1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE,FALLBACK0
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx | FileCheck %s --check-prefixes=AVX,AVX1,AVX1-ONLY,FALLBACK1
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-SLOW,FALLBACK2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST,FALLBACK3
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST-PERLANE,FALLBACK4
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512F-ONLY-SLOW,FALLBACK5
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512F-ONLY-FAST,FALLBACK6
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512DQ-SLOW,FALLBACK7
10 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512DQ-FAST,FALLBACK8
11 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512BW-ONLY-SLOW,FALLBACK9
12 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512BW-ONLY-FAST,FALLBACK10
13 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512DQBW-SLOW,FALLBACK11
14 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512DQBW-FAST,FALLBACK12
16 ; These patterns are produced by LoopVectorizer for interleaved stores.
18 define void @store_i16_stride7_vf2(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %out.vec) nounwind {
19 ; SSE-LABEL: store_i16_stride7_vf2:
21 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
22 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
23 ; SSE-NEXT: movdqa (%rdi), %xmm0
24 ; SSE-NEXT: movdqa (%rdx), %xmm1
25 ; SSE-NEXT: movdqa (%r8), %xmm2
26 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
27 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
28 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1]
29 ; SSE-NEXT: movdqa %xmm0, %xmm3
30 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
31 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,1]
32 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[0,2,1,3,4,5,6,7]
33 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,6,6,6,6]
34 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [65535,65535,65535,65535,0,0,0,65535]
35 ; SSE-NEXT: pand %xmm4, %xmm3
36 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
37 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[0,1,2,0]
38 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,6,4,7,7]
39 ; SSE-NEXT: pandn %xmm5, %xmm4
40 ; SSE-NEXT: por %xmm3, %xmm4
41 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
42 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[3,1,2,3]
43 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[1,2,0,3,4,5,6,7]
44 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,65535,65535,0,0,0,65535,65535]
45 ; SSE-NEXT: pand %xmm1, %xmm0
46 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,1,3]
47 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[2,2,2,2,4,5,6,7]
48 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,6,5,6,7]
49 ; SSE-NEXT: pandn %xmm2, %xmm1
50 ; SSE-NEXT: por %xmm0, %xmm1
51 ; SSE-NEXT: movq %xmm1, 16(%rax)
52 ; SSE-NEXT: movdqa %xmm4, (%rax)
53 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[2,3,2,3]
54 ; SSE-NEXT: movd %xmm0, 24(%rax)
57 ; AVX1-ONLY-LABEL: store_i16_stride7_vf2:
59 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
60 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
61 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm0
62 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm1
63 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm2
64 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
65 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
66 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
67 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
68 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
69 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm0[0,1,4,5,8,9,12,13,u,u,u,u,u,u,2,3]
70 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm1[u,u,u,u,u,u,u,u,0,1,4,5,8,9,u,u]
71 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3],xmm3[4,5,6],xmm2[7]
72 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[6,7,10,11,14,15,u,u,u,u,u,u,12,13,14,15]
73 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[u,u,u,u,u,u,2,3,6,7,10,11,u,u,u,u]
74 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm1[3,4,5],xmm0[6,7]
75 ; AVX1-ONLY-NEXT: vpextrd $2, %xmm1, 24(%rax)
76 ; AVX1-ONLY-NEXT: vmovq %xmm0, 16(%rax)
77 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, (%rax)
78 ; AVX1-ONLY-NEXT: retq
80 ; AVX2-ONLY-LABEL: store_i16_stride7_vf2:
82 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
83 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
84 ; AVX2-ONLY-NEXT: vmovdqa (%rdi), %xmm0
85 ; AVX2-ONLY-NEXT: vmovdqa (%rsi), %xmm1
86 ; AVX2-ONLY-NEXT: vmovdqa (%rdx), %xmm2
87 ; AVX2-ONLY-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1]
88 ; AVX2-ONLY-NEXT: vinserti128 $1, (%r9), %ymm1, %ymm1
89 ; AVX2-ONLY-NEXT: vinserti128 $1, (%r8), %ymm0, %ymm0
90 ; AVX2-ONLY-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
91 ; AVX2-ONLY-NEXT: vinserti128 $1, (%r10), %ymm2, %ymm1
92 ; AVX2-ONLY-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
93 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,1,4,5,8,9,12,13],zero,zero,zero,zero,zero,zero,ymm0[2,3],zero,zero,zero,zero,zero,zero,ymm0[18,19,22,23,26,27],zero,zero,zero,zero
94 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
95 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,zero,zero,zero,zero,ymm0[0,1,4,5,8,9],zero,zero,ymm0[22,23,26,27,30,31],zero,zero,zero,zero,zero,zero,ymm0[24,25,20,21]
96 ; AVX2-ONLY-NEXT: vpor %ymm0, %ymm1, %ymm0
97 ; AVX2-ONLY-NEXT: vextracti128 $1, %ymm1, %xmm1
98 ; AVX2-ONLY-NEXT: vpextrd $2, %xmm1, 24(%rax)
99 ; AVX2-ONLY-NEXT: vextracti128 $1, %ymm0, %xmm1
100 ; AVX2-ONLY-NEXT: vmovq %xmm1, 16(%rax)
101 ; AVX2-ONLY-NEXT: vmovdqa %xmm0, (%rax)
102 ; AVX2-ONLY-NEXT: vzeroupper
103 ; AVX2-ONLY-NEXT: retq
105 ; AVX512F-LABEL: store_i16_stride7_vf2:
107 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
108 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r10
109 ; AVX512F-NEXT: vmovdqa (%rdi), %xmm0
110 ; AVX512F-NEXT: vmovdqa (%rsi), %xmm1
111 ; AVX512F-NEXT: vmovdqa (%rdx), %xmm2
112 ; AVX512F-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1]
113 ; AVX512F-NEXT: vinserti128 $1, (%r9), %ymm1, %ymm1
114 ; AVX512F-NEXT: vinserti128 $1, (%r8), %ymm0, %ymm0
115 ; AVX512F-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
116 ; AVX512F-NEXT: vinserti128 $1, (%r10), %ymm2, %ymm1
117 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
118 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,1,4,5,8,9,12,13],zero,zero,zero,zero,zero,zero,ymm0[2,3],zero,zero,zero,zero,zero,zero,ymm0[18,19,22,23,26,27,u,u,u,u]
119 ; AVX512F-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
120 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,zero,zero,zero,zero,ymm0[0,1,4,5,8,9],zero,zero,ymm0[22,23,26,27,30,31],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
121 ; AVX512F-NEXT: vpor %ymm0, %ymm1, %ymm0
122 ; AVX512F-NEXT: vextracti128 $1, %ymm1, %xmm1
123 ; AVX512F-NEXT: vpextrd $2, %xmm1, 24(%rax)
124 ; AVX512F-NEXT: vextracti128 $1, %ymm0, %xmm1
125 ; AVX512F-NEXT: vmovq %xmm1, 16(%rax)
126 ; AVX512F-NEXT: vmovdqa %xmm0, (%rax)
127 ; AVX512F-NEXT: vzeroupper
130 ; AVX512BW-LABEL: store_i16_stride7_vf2:
132 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
133 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
134 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm0
135 ; AVX512BW-NEXT: vmovdqa (%rsi), %xmm1
136 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm2
137 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1]
138 ; AVX512BW-NEXT: vinserti128 $1, (%r9), %ymm1, %ymm1
139 ; AVX512BW-NEXT: vinserti128 $1, (%r8), %ymm0, %ymm0
140 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
141 ; AVX512BW-NEXT: vinserti128 $1, (%r10), %ymm2, %ymm1
142 ; AVX512BW-NEXT: vmovdqa {{.*#+}} ymm2 = <0,2,16,18,8,10,24,1,3,17,19,9,11,25,u,u>
143 ; AVX512BW-NEXT: vpermi2w %ymm1, %ymm0, %ymm2
144 ; AVX512BW-NEXT: vextracti128 $1, %ymm2, %xmm0
145 ; AVX512BW-NEXT: vpextrd $2, %xmm0, 24(%rax)
146 ; AVX512BW-NEXT: vmovq %xmm0, 16(%rax)
147 ; AVX512BW-NEXT: vmovdqa %xmm2, (%rax)
148 ; AVX512BW-NEXT: vzeroupper
149 ; AVX512BW-NEXT: retq
150 %in.vec0 = load <2 x i16>, ptr %in.vecptr0, align 64
151 %in.vec1 = load <2 x i16>, ptr %in.vecptr1, align 64
152 %in.vec2 = load <2 x i16>, ptr %in.vecptr2, align 64
153 %in.vec3 = load <2 x i16>, ptr %in.vecptr3, align 64
154 %in.vec4 = load <2 x i16>, ptr %in.vecptr4, align 64
155 %in.vec5 = load <2 x i16>, ptr %in.vecptr5, align 64
156 %in.vec6 = load <2 x i16>, ptr %in.vecptr6, align 64
157 %1 = shufflevector <2 x i16> %in.vec0, <2 x i16> %in.vec1, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
158 %2 = shufflevector <2 x i16> %in.vec2, <2 x i16> %in.vec3, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
159 %3 = shufflevector <2 x i16> %in.vec4, <2 x i16> %in.vec5, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
160 %4 = shufflevector <4 x i16> %1, <4 x i16> %2, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
161 %5 = shufflevector <2 x i16> %in.vec6, <2 x i16> poison, <4 x i32> <i32 0, i32 1, i32 undef, i32 undef>
162 %6 = shufflevector <4 x i16> %3, <4 x i16> %5, <6 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5>
163 %7 = shufflevector <6 x i16> %6, <6 x i16> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 undef, i32 undef>
164 %8 = shufflevector <8 x i16> %4, <8 x i16> %7, <14 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13>
165 %interleaved.vec = shufflevector <14 x i16> %8, <14 x i16> poison, <14 x i32> <i32 0, i32 2, i32 4, i32 6, i32 8, i32 10, i32 12, i32 1, i32 3, i32 5, i32 7, i32 9, i32 11, i32 13>
166 store <14 x i16> %interleaved.vec, ptr %out.vec, align 64
170 define void @store_i16_stride7_vf4(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %out.vec) nounwind {
171 ; SSE-LABEL: store_i16_stride7_vf4:
173 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
174 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
175 ; SSE-NEXT: movq {{.*#+}} xmm3 = mem[0],zero
176 ; SSE-NEXT: movq {{.*#+}} xmm5 = mem[0],zero
177 ; SSE-NEXT: movq {{.*#+}} xmm0 = mem[0],zero
178 ; SSE-NEXT: movq {{.*#+}} xmm4 = mem[0],zero
179 ; SSE-NEXT: movq {{.*#+}} xmm1 = mem[0],zero
180 ; SSE-NEXT: movq {{.*#+}} xmm6 = mem[0],zero
181 ; SSE-NEXT: movq {{.*#+}} xmm2 = mem[0],zero
182 ; SSE-NEXT: punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm6[0]
183 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm1[0,1,0,1]
184 ; SSE-NEXT: movdqa {{.*#+}} xmm6 = [65535,65535,65535,65535,65535,0,65535,65535]
185 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm2[0,0,2,3]
186 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,5,5,5,5]
187 ; SSE-NEXT: pand %xmm6, %xmm8
188 ; SSE-NEXT: pandn %xmm7, %xmm6
189 ; SSE-NEXT: por %xmm8, %xmm6
190 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm4[0,1,1,3,4,5,6,7]
191 ; SSE-NEXT: punpcklqdq {{.*#+}} xmm8 = xmm8[0],xmm3[0]
192 ; SSE-NEXT: movdqa {{.*#+}} xmm7 = [65535,65535,0,65535,65535,65535,0,65535]
193 ; SSE-NEXT: movdqa %xmm5, %xmm10
194 ; SSE-NEXT: movdqa %xmm3, %xmm9
195 ; SSE-NEXT: pshuflw {{.*#+}} xmm11 = xmm3[1,1,1,1,4,5,6,7]
196 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
197 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm0[0],xmm5[1],xmm0[1],xmm5[2],xmm0[2],xmm5[3],xmm0[3]
198 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,1,2,3]
199 ; SSE-NEXT: pshufhw {{.*#+}} xmm12 = xmm5[0,1,2,3,4,4,4,4]
200 ; SSE-NEXT: pand %xmm7, %xmm12
201 ; SSE-NEXT: pandn %xmm8, %xmm7
202 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [65535,65535,65535,0,0,0,65535,65535]
203 ; SSE-NEXT: por %xmm12, %xmm7
204 ; SSE-NEXT: pand %xmm5, %xmm7
205 ; SSE-NEXT: pandn %xmm6, %xmm5
206 ; SSE-NEXT: por %xmm7, %xmm5
207 ; SSE-NEXT: psrld $16, %xmm10
208 ; SSE-NEXT: punpckldq {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1]
209 ; SSE-NEXT: movdqa {{.*#+}} xmm7 = [65535,65535,65535,65535,65535,0,0,65535]
210 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
211 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm0[0,1,2,3,4,5,6,6]
212 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[2,1,2,3]
213 ; SSE-NEXT: pand %xmm7, %xmm6
214 ; SSE-NEXT: pandn %xmm9, %xmm7
215 ; SSE-NEXT: por %xmm6, %xmm7
216 ; SSE-NEXT: movdqa {{.*#+}} xmm6 = [65535,65535,0,0,0,65535,65535,65535]
217 ; SSE-NEXT: pand %xmm6, %xmm7
218 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm2[3,1,2,3]
219 ; SSE-NEXT: pshuflw {{.*#+}} xmm9 = xmm8[0,1,2,0,4,5,6,7]
220 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm1[1,1,1,1]
221 ; SSE-NEXT: punpcklqdq {{.*#+}} xmm9 = xmm9[0],xmm10[0]
222 ; SSE-NEXT: pandn %xmm9, %xmm6
223 ; SSE-NEXT: por %xmm7, %xmm6
224 ; SSE-NEXT: movdqa {{.*#+}} xmm7 = [65535,65535,65535,0,65535,65535,65535,65535]
225 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm8[0,3,1,3,4,5,6,7]
226 ; SSE-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm8
227 ; SSE-NEXT: psrlq $48, %xmm4
228 ; SSE-NEXT: por %xmm8, %xmm4
229 ; SSE-NEXT: pand %xmm7, %xmm4
230 ; SSE-NEXT: pandn %xmm1, %xmm7
231 ; SSE-NEXT: por %xmm4, %xmm7
232 ; SSE-NEXT: punpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm3[0]
233 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm11[2,1]
234 ; SSE-NEXT: movaps {{.*#+}} xmm3 = [65535,65535,65535,65535,0,0,0,65535]
235 ; SSE-NEXT: andps %xmm3, %xmm0
236 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
237 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[0,1,0,2,4,5,6,7]
238 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,1,1]
239 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
240 ; SSE-NEXT: andnps %xmm2, %xmm3
241 ; SSE-NEXT: orps %xmm0, %xmm3
242 ; SSE-NEXT: movaps %xmm3, (%rax)
243 ; SSE-NEXT: movq %xmm7, 48(%rax)
244 ; SSE-NEXT: movdqa %xmm6, 32(%rax)
245 ; SSE-NEXT: movdqa %xmm5, 16(%rax)
248 ; AVX1-ONLY-LABEL: store_i16_stride7_vf4:
249 ; AVX1-ONLY: # %bb.0:
250 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
251 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
252 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
253 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
254 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
255 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
256 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
257 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
258 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
259 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
260 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
261 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm1[0,1,1,3]
262 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,7,7]
263 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm2[3,1,2,1]
264 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[2,0,2,3,4,5,6,7]
265 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm4[0,1,2,3,4],xmm3[5,6],xmm4[7]
266 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm0[3,1,2,3]
267 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm4[0,1,2,0,4,5,6,7]
268 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm6 = mem[0,0]
269 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm6[1,1,1,1]
270 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0]
271 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm5[2,3,4],xmm3[5,6,7]
272 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,3,1,3,4,5,6,7]
273 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1,2],xmm6[3],xmm4[4,5,6,7]
274 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm5 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
275 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0],xmm4[1,2,3,4,5,6,7]
276 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm5 = xmm1[0,1,8,9,u,u,u,u,u,u,u,u,2,3,2,3]
277 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
278 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm7 = xmm2[0,1,0,2,4,5,6,7]
279 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1],xmm7[2,3],xmm5[4,5,6,7]
280 ; AVX1-ONLY-NEXT: vpxor %xmm7, %xmm7, %xmm7
281 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm0[0],xmm7[1,2,3],xmm0[4],xmm7[5,6,7]
282 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm8 = xmm6[0],zero,zero,zero,xmm6[1],zero,zero,zero
283 ; AVX1-ONLY-NEXT: vpackusdw %xmm8, %xmm7, %xmm7
284 ; AVX1-ONLY-NEXT: vpackusdw %xmm7, %xmm7, %xmm7
285 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2,3],xmm7[4,5,6],xmm5[7]
286 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[10,11,u,u,u,u,u,u,u,u,u,u,4,5,12,13]
287 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,1,3,3,4,5,6,7]
288 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0],xmm2[1,2],xmm1[3,4,5,6,7]
289 ; AVX1-ONLY-NEXT: vmovsldup {{.*#+}} xmm0 = xmm0[0,0,2,2]
290 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,5,5,5,5]
291 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4],xmm6[5],xmm0[6,7]
292 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3,4,5],xmm1[6,7]
293 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, 16(%rax)
294 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, (%rax)
295 ; AVX1-ONLY-NEXT: vmovq %xmm4, 48(%rax)
296 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, 32(%rax)
297 ; AVX1-ONLY-NEXT: retq
299 ; AVX2-SLOW-LABEL: store_i16_stride7_vf4:
300 ; AVX2-SLOW: # %bb.0:
301 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
302 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
303 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
304 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
305 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
306 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
307 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
308 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
309 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
310 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
311 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
312 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
313 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
314 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
315 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,2,1,3]
316 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm2[0,1,8,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm2[2,3],zero,zero,ymm2[18,19,26,27],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
317 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm2[2,3,0,1]
318 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm5 = zero,zero,zero,zero,ymm5[0,1,8,9,4,5,6,7,4,5],zero,zero,ymm5[26,27],zero,zero,zero,zero,ymm5[24,25,20,21,22,23,20,21,28,29]
319 ; AVX2-SLOW-NEXT: vpor %ymm5, %ymm4, %ymm4
320 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,0,2]
321 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm5 = ymm0[2,3,2,3,2,3,2,3],zero,zero,zero,zero,ymm0[0,1,2,3,18,19,18,19,18,19,18,19,26,27],zero,zero,ymm0[16,17,18,19]
322 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,zero,zero,zero,zero,zero,zero,ymm1[0,1,8,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm1[18,19],zero,zero,zero,zero
323 ; AVX2-SLOW-NEXT: vpor %ymm5, %ymm6, %ymm5
324 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255]
325 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
326 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[12,13,6,7],zero,zero,zero,zero,ymm0[4,5,4,5,4,5,4,5,28,29,22,23,30,31],zero,zero,ymm0[20,21,20,21,20,21,20,21]
327 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,ymm1[4,5,12,13],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm1[22,23],zero,zero,zero,zero,zero,zero,zero,zero
328 ; AVX2-SLOW-NEXT: vpor %ymm0, %ymm1, %ymm0
329 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm2[14,15,14,15,14,15,14,15,4,5,6,7,14,15,14,15,30,31,30,31,30,31,30,31,20,21,22,23,30,31,30,31]
330 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[3,1,2,1]
331 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[2,0,2,3,4,5,6,7]
332 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = <255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,0,0,u,u,u,u,u,u,u,u,u,u,u,u,u,u>
333 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm2, %ymm1, %ymm1
334 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,u,u,u,u,u,u,u,u>
335 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm1, %ymm0, %ymm0
336 ; AVX2-SLOW-NEXT: vmovdqa %ymm4, (%rax)
337 ; AVX2-SLOW-NEXT: vextracti128 $1, %ymm0, %xmm1
338 ; AVX2-SLOW-NEXT: vmovq %xmm1, 48(%rax)
339 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, 32(%rax)
340 ; AVX2-SLOW-NEXT: vzeroupper
341 ; AVX2-SLOW-NEXT: retq
343 ; AVX2-FAST-LABEL: store_i16_stride7_vf4:
344 ; AVX2-FAST: # %bb.0:
345 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
346 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
347 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
348 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
349 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
350 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
351 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
352 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
353 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
354 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
355 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
356 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
357 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
358 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
359 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
360 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <5,7,1,3,7,u,u,u>
361 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm1, %ymm1
362 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[0,1,4,5],zero,zero,zero,zero,zero,zero,ymm1[10,11,14,15,2,3,18,19],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
363 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [1,3,5,7,1,3,5,7]
364 ; AVX2-FAST-NEXT: # ymm3 = mem[0,1,0,1]
365 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm3, %ymm3
366 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,zero,zero,ymm3[0,1,4,5,8,9],zero,zero,zero,zero,zero,zero,zero,zero,ymm3[18,19,22,23,26,27],zero,zero,zero,zero,zero,zero,zero,zero
367 ; AVX2-FAST-NEXT: vpor %ymm3, %ymm1, %ymm1
368 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm2[0,1,8,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm2[2,3],zero,zero,ymm2[18,19,26,27],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
369 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,0,1]
370 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = zero,zero,zero,zero,ymm2[0,1,8,9,4,5,6,7,4,5],zero,zero,ymm2[26,27],zero,zero,zero,zero,ymm2[24,25,20,21,22,23,20,21,28,29]
371 ; AVX2-FAST-NEXT: vpor %ymm2, %ymm3, %ymm2
372 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [0,2,4,6,0,2,4,6]
373 ; AVX2-FAST-NEXT: # ymm3 = mem[0,1,0,1]
374 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm3, %ymm0
375 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,0,1,4,5,8,9,u,u,u,u,u,u,u,u,18,19,22,23,26,27,u,u,u,u]
376 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255]
377 ; AVX2-FAST-NEXT: vpblendvb %ymm3, %ymm2, %ymm0, %ymm0
378 ; AVX2-FAST-NEXT: vmovdqa %ymm0, (%rax)
379 ; AVX2-FAST-NEXT: vextracti128 $1, %ymm1, %xmm0
380 ; AVX2-FAST-NEXT: vmovq %xmm0, 48(%rax)
381 ; AVX2-FAST-NEXT: vmovdqa %xmm1, 32(%rax)
382 ; AVX2-FAST-NEXT: vzeroupper
383 ; AVX2-FAST-NEXT: retq
385 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride7_vf4:
386 ; AVX2-FAST-PERLANE: # %bb.0:
387 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
388 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %r10
389 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
390 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
391 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
392 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
393 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
394 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
395 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
396 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
397 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
398 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
399 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
400 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
401 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,2,1,3]
402 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm4 = ymm2[0,1,8,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm2[2,3],zero,zero,ymm2[18,19,26,27],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
403 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm2[2,3,0,1]
404 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm5 = zero,zero,zero,zero,ymm5[0,1,8,9,4,5,6,7,4,5],zero,zero,ymm5[26,27],zero,zero,zero,zero,ymm5[24,25,20,21,22,23,20,21,28,29]
405 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm5, %ymm4, %ymm4
406 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,0,2]
407 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm5 = ymm0[2,3,2,3,2,3,2,3],zero,zero,zero,zero,ymm0[0,1,2,3,18,19,18,19,18,19,18,19,26,27],zero,zero,ymm0[16,17,18,19]
408 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,zero,zero,zero,zero,zero,zero,ymm1[0,1,8,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm1[18,19],zero,zero,zero,zero
409 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm5, %ymm6, %ymm5
410 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255]
411 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
412 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[12,13,6,7],zero,zero,zero,zero,ymm0[4,5,4,5,4,5,4,5,28,29,22,23,30,31],zero,zero,ymm0[20,21,20,21,20,21,20,21]
413 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,ymm1[4,5,12,13],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm1[22,23],zero,zero,zero,zero,zero,zero,zero,zero
414 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm0, %ymm1, %ymm0
415 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm2[14,15,14,15,14,15,14,15,4,5,6,7,14,15,14,15,30,31,30,31,30,31,30,31,20,21,22,23,30,31,30,31]
416 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm2 = xmm3[4,5,12,13,4,5,6,7,8,9,10,11,4,5,6,7]
417 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm3 = <255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,0,0,u,u,u,u,u,u,u,u,u,u,u,u,u,u>
418 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm3, %ymm2, %ymm1, %ymm1
419 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,u,u,u,u,u,u,u,u>
420 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm1, %ymm0, %ymm0
421 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm4, (%rax)
422 ; AVX2-FAST-PERLANE-NEXT: vextracti128 $1, %ymm0, %xmm1
423 ; AVX2-FAST-PERLANE-NEXT: vmovq %xmm1, 48(%rax)
424 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, 32(%rax)
425 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
426 ; AVX2-FAST-PERLANE-NEXT: retq
428 ; AVX512F-SLOW-LABEL: store_i16_stride7_vf4:
429 ; AVX512F-SLOW: # %bb.0:
430 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
431 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
432 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
433 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
434 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
435 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
436 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
437 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
438 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
439 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
440 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
441 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
442 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
443 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
444 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,2,1,3]
445 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,0,2]
446 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm0[u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm0[0,1,u,u,u,u,u,u,u,u,18,19,26,27],zero,zero,ymm0[u,u,u,u]
447 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u],zero,zero,zero,zero,ymm0[4,5,u,u,u,u,u,u,u,u,22,23,30,31],zero,zero,ymm0[u,u,u,u,u,u,u,u]
448 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm4, %zmm0
449 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm1[u,u,u,u,u,u,u,u,0,1,8,9],zero,zero,ymm1[u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[18,19,u,u,u,u]
450 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,4,5,12,13],zero,zero,ymm1[u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[22,23,u,u,u,u,u,u,u,u]
451 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm4, %zmm1
452 ; AVX512F-SLOW-NEXT: vporq %zmm0, %zmm1, %zmm0
453 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm2[2,3,0,1]
454 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,ymm1[0,1,8,9,u,u,u,u,u,u],zero,zero,ymm1[26,27],zero,zero,zero,zero,ymm1[u,u,u,u,u,u,20,21,28,29]
455 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,zero,zero,ymm2[u,u,u,u,u,u,6,7,14,15],zero,zero,ymm2[30,31,u,u,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero
456 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm1, %zmm1
457 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[0,1,8,9],zero,zero,zero,zero,ymm2[u,u,u,u,u,u,2,3],zero,zero,ymm2[18,19,26,27,u,u,u,u,u,u],zero,zero,zero,zero
458 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[4,5,12,13,u,u,u,u,u,u],zero,zero,zero,zero,ymm3[6,7],zero,zero,ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u]
459 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm2
460 ; AVX512F-SLOW-NEXT: vporq %zmm1, %zmm2, %zmm1
461 ; AVX512F-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm1
462 ; AVX512F-SLOW-NEXT: vextracti32x4 $2, %zmm1, 32(%rax)
463 ; AVX512F-SLOW-NEXT: vextracti32x4 $3, %zmm1, %xmm0
464 ; AVX512F-SLOW-NEXT: vmovq %xmm0, 48(%rax)
465 ; AVX512F-SLOW-NEXT: vmovdqa %ymm1, (%rax)
466 ; AVX512F-SLOW-NEXT: vzeroupper
467 ; AVX512F-SLOW-NEXT: retq
469 ; AVX512F-FAST-LABEL: store_i16_stride7_vf4:
470 ; AVX512F-FAST: # %bb.0:
471 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
472 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
473 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
474 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
475 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
476 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
477 ; AVX512F-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
478 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
479 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
480 ; AVX512F-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
481 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
482 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
483 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
484 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <0,2,4,u>
485 ; AVX512F-FAST-NEXT: vpermi2q %ymm3, %ymm0, %ymm1
486 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [0,2,4,6,0,2,4,6]
487 ; AVX512F-FAST-NEXT: # ymm0 = mem[0,1,0,1]
488 ; AVX512F-FAST-NEXT: vpermd %ymm1, %ymm0, %ymm0
489 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,zero,zero,zero,zero,ymm0[0,1,4,5,8,9],zero,zero,zero,zero,zero,zero,zero,zero,ymm0[18,19,22,23,26,27],zero,zero,zero,zero
490 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [1,3,5,7,1,3,5,7]
491 ; AVX512F-FAST-NEXT: # ymm3 = mem[0,1,0,1]
492 ; AVX512F-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm1
493 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,ymm1[0,1,4,5,8,9],zero,zero,zero,zero,zero,zero,zero,zero,ymm1[18,19,22,23,26,27],zero,zero,zero,zero,zero,zero,zero,zero
494 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
495 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm2[0,1,8,9],zero,zero,zero,zero,ymm2[u,u,u,u,u,u,2,3],zero,zero,ymm2[18,19,26,27,u,u,u,u,u,u],zero,zero,zero,zero
496 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm2[2,3,0,1]
497 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,zero,zero,ymm3[0,1,8,9,u,u,u,u,u,u],zero,zero,ymm3[26,27],zero,zero,zero,zero,ymm3[u,u,u,u,u,u,20,21,28,29]
498 ; AVX512F-FAST-NEXT: vpternlogq $168, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm3
499 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <5,7,1,3,7,u,u,u>
500 ; AVX512F-FAST-NEXT: vpermd %ymm2, %ymm1, %ymm1
501 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[0,1,4,5],zero,zero,zero,zero,zero,zero,ymm1[10,11,14,15,2,3,18,19],zero,zero,zero,zero,zero,zero,ymm1[u,u,u,u,u,u,u,u]
502 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm3, %zmm1
503 ; AVX512F-FAST-NEXT: vporq %zmm0, %zmm1, %zmm0
504 ; AVX512F-FAST-NEXT: vextracti32x4 $2, %zmm0, 32(%rax)
505 ; AVX512F-FAST-NEXT: vextracti32x4 $3, %zmm0, %xmm1
506 ; AVX512F-FAST-NEXT: vmovq %xmm1, 48(%rax)
507 ; AVX512F-FAST-NEXT: vmovdqa %ymm0, (%rax)
508 ; AVX512F-FAST-NEXT: vzeroupper
509 ; AVX512F-FAST-NEXT: retq
511 ; AVX512BW-SLOW-LABEL: store_i16_stride7_vf4:
512 ; AVX512BW-SLOW: # %bb.0:
513 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
514 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
515 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
516 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
517 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
518 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
519 ; AVX512BW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
520 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
521 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
522 ; AVX512BW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
523 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
524 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
525 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
526 ; AVX512BW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
527 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,4,8,12,32,40,36,1,5,9,13,33,41,37,2,6,10,14,34,42,38,3,7,11,15,35,43,39,u,u,u,u>
528 ; AVX512BW-SLOW-NEXT: vpermi2w %zmm0, %zmm2, %zmm1
529 ; AVX512BW-SLOW-NEXT: vextracti32x4 $2, %zmm1, 32(%rax)
530 ; AVX512BW-SLOW-NEXT: vextracti32x4 $3, %zmm1, %xmm0
531 ; AVX512BW-SLOW-NEXT: vmovq %xmm0, 48(%rax)
532 ; AVX512BW-SLOW-NEXT: vmovdqa %ymm1, (%rax)
533 ; AVX512BW-SLOW-NEXT: vzeroupper
534 ; AVX512BW-SLOW-NEXT: retq
536 ; AVX512BW-FAST-LABEL: store_i16_stride7_vf4:
537 ; AVX512BW-FAST: # %bb.0:
538 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
539 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
540 ; AVX512BW-FAST-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
541 ; AVX512BW-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
542 ; AVX512BW-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
543 ; AVX512BW-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
544 ; AVX512BW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
545 ; AVX512BW-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
546 ; AVX512BW-FAST-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
547 ; AVX512BW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
548 ; AVX512BW-FAST-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
549 ; AVX512BW-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
550 ; AVX512BW-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
551 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <0,2,4,u>
552 ; AVX512BW-FAST-NEXT: vpermi2q %ymm3, %ymm0, %ymm1
553 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm0
554 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,4,8,12,16,20,24,1,5,9,13,17,21,25,2,6,10,14,18,22,26,3,7,11,15,19,23,27,u,u,u,u>
555 ; AVX512BW-FAST-NEXT: vpermw %zmm0, %zmm1, %zmm0
556 ; AVX512BW-FAST-NEXT: vextracti32x4 $2, %zmm0, 32(%rax)
557 ; AVX512BW-FAST-NEXT: vextracti32x4 $3, %zmm0, %xmm1
558 ; AVX512BW-FAST-NEXT: vmovq %xmm1, 48(%rax)
559 ; AVX512BW-FAST-NEXT: vmovdqa %ymm0, (%rax)
560 ; AVX512BW-FAST-NEXT: vzeroupper
561 ; AVX512BW-FAST-NEXT: retq
562 %in.vec0 = load <4 x i16>, ptr %in.vecptr0, align 64
563 %in.vec1 = load <4 x i16>, ptr %in.vecptr1, align 64
564 %in.vec2 = load <4 x i16>, ptr %in.vecptr2, align 64
565 %in.vec3 = load <4 x i16>, ptr %in.vecptr3, align 64
566 %in.vec4 = load <4 x i16>, ptr %in.vecptr4, align 64
567 %in.vec5 = load <4 x i16>, ptr %in.vecptr5, align 64
568 %in.vec6 = load <4 x i16>, ptr %in.vecptr6, align 64
569 %1 = shufflevector <4 x i16> %in.vec0, <4 x i16> %in.vec1, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
570 %2 = shufflevector <4 x i16> %in.vec2, <4 x i16> %in.vec3, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
571 %3 = shufflevector <4 x i16> %in.vec4, <4 x i16> %in.vec5, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
572 %4 = shufflevector <8 x i16> %1, <8 x i16> %2, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
573 %5 = shufflevector <4 x i16> %in.vec6, <4 x i16> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef>
574 %6 = shufflevector <8 x i16> %3, <8 x i16> %5, <12 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11>
575 %7 = shufflevector <12 x i16> %6, <12 x i16> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 undef, i32 undef, i32 undef, i32 undef>
576 %8 = shufflevector <16 x i16> %4, <16 x i16> %7, <28 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27>
577 %interleaved.vec = shufflevector <28 x i16> %8, <28 x i16> poison, <28 x i32> <i32 0, i32 4, i32 8, i32 12, i32 16, i32 20, i32 24, i32 1, i32 5, i32 9, i32 13, i32 17, i32 21, i32 25, i32 2, i32 6, i32 10, i32 14, i32 18, i32 22, i32 26, i32 3, i32 7, i32 11, i32 15, i32 19, i32 23, i32 27>
578 store <28 x i16> %interleaved.vec, ptr %out.vec, align 64
582 define void @store_i16_stride7_vf8(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %out.vec) nounwind {
583 ; SSE-LABEL: store_i16_stride7_vf8:
585 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
586 ; SSE-NEXT: movdqa (%rdi), %xmm3
587 ; SSE-NEXT: movdqa (%rsi), %xmm8
588 ; SSE-NEXT: movdqa (%rdx), %xmm5
589 ; SSE-NEXT: movdqa (%rcx), %xmm11
590 ; SSE-NEXT: movdqa (%r8), %xmm4
591 ; SSE-NEXT: movdqa (%r9), %xmm10
592 ; SSE-NEXT: movdqa (%rax), %xmm2
593 ; SSE-NEXT: movdqa %xmm5, %xmm0
594 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm11[4],xmm0[5],xmm11[5],xmm0[6],xmm11[6],xmm0[7],xmm11[7]
595 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
596 ; SSE-NEXT: movdqa %xmm3, %xmm6
597 ; SSE-NEXT: punpckhwd {{.*#+}} xmm6 = xmm6[4],xmm8[4],xmm6[5],xmm8[5],xmm6[6],xmm8[6],xmm6[7],xmm8[7]
598 ; SSE-NEXT: movdqa %xmm6, %xmm1
599 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm0[2,3]
600 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm2[2,3,2,3]
601 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [65535,0,65535,65535,65535,65535,65535,65535]
602 ; SSE-NEXT: movdqa %xmm4, %xmm7
603 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm10[0],xmm7[1],xmm10[1],xmm7[2],xmm10[2],xmm7[3],xmm10[3]
604 ; SSE-NEXT: movdqa %xmm10, %xmm13
605 ; SSE-NEXT: punpckhwd {{.*#+}} xmm13 = xmm13[4],xmm4[4],xmm13[5],xmm4[5],xmm13[6],xmm4[6],xmm13[7],xmm4[7]
606 ; SSE-NEXT: pshuflw {{.*#+}} xmm13 = xmm13[2,2,2,2,4,5,6,7]
607 ; SSE-NEXT: pshufhw {{.*#+}} xmm13 = xmm13[0,1,2,3,4,5,5,4]
608 ; SSE-NEXT: pand %xmm12, %xmm13
609 ; SSE-NEXT: pandn %xmm9, %xmm12
610 ; SSE-NEXT: por %xmm13, %xmm12
611 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,2],xmm12[0,3]
612 ; SSE-NEXT: movdqa {{.*#+}} xmm9 = [65535,65535,65535,0,65535,65535,65535,65535]
613 ; SSE-NEXT: pandn %xmm2, %xmm9
614 ; SSE-NEXT: movdqa %xmm7, %xmm12
615 ; SSE-NEXT: movdqa %xmm7, %xmm13
616 ; SSE-NEXT: psrldq {{.*#+}} xmm13 = xmm13[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
617 ; SSE-NEXT: por %xmm9, %xmm13
618 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm5[2,2,2,2]
619 ; SSE-NEXT: movdqa {{.*#+}} xmm14 = [65535,65535,65535,65535,65535,65535,0,65535]
620 ; SSE-NEXT: pshufhw {{.*#+}} xmm15 = xmm11[0,1,2,3,4,4,4,4]
621 ; SSE-NEXT: pand %xmm14, %xmm15
622 ; SSE-NEXT: pandn %xmm9, %xmm14
623 ; SSE-NEXT: movaps {{.*#+}} xmm9 = [65535,0,0,0,65535,65535,65535,65535]
624 ; SSE-NEXT: por %xmm15, %xmm14
625 ; SSE-NEXT: movdqa %xmm6, %xmm15
626 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[0,1],xmm14[3,3]
627 ; SSE-NEXT: pshuflw {{.*#+}} xmm14 = xmm11[3,3,3,3,4,5,6,7]
628 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[0,1],xmm15[0,2]
629 ; SSE-NEXT: andps %xmm9, %xmm14
630 ; SSE-NEXT: andnps %xmm13, %xmm9
631 ; SSE-NEXT: orps %xmm14, %xmm9
632 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm2[0,1,0,1]
633 ; SSE-NEXT: movdqa {{.*#+}} xmm14 = [65535,65535,65535,65535,65535,0,65535,65535]
634 ; SSE-NEXT: pslldq {{.*#+}} xmm12 = zero,zero,xmm12[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
635 ; SSE-NEXT: pand %xmm14, %xmm12
636 ; SSE-NEXT: pandn %xmm13, %xmm14
637 ; SSE-NEXT: por %xmm12, %xmm14
638 ; SSE-NEXT: movdqa %xmm11, %xmm12
639 ; SSE-NEXT: psrld $16, %xmm12
640 ; SSE-NEXT: movdqa %xmm5, %xmm13
641 ; SSE-NEXT: punpckldq {{.*#+}} xmm13 = xmm13[0],xmm12[0],xmm13[1],xmm12[1]
642 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [65535,0,0,65535,65535,65535,65535,65535]
643 ; SSE-NEXT: movdqa %xmm8, %xmm15
644 ; SSE-NEXT: punpcklwd {{.*#+}} xmm15 = xmm15[0],xmm3[0],xmm15[1],xmm3[1],xmm15[2],xmm3[2],xmm15[3],xmm3[3]
645 ; SSE-NEXT: pshuflw {{.*#+}} xmm15 = xmm15[2,2,2,2,4,5,6,7]
646 ; SSE-NEXT: pshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,5,4]
647 ; SSE-NEXT: pand %xmm12, %xmm15
648 ; SSE-NEXT: pandn %xmm13, %xmm12
649 ; SSE-NEXT: movdqa %xmm5, %xmm13
650 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm11[0],xmm5[1],xmm11[1],xmm5[2],xmm11[2],xmm5[3],xmm11[3]
651 ; SSE-NEXT: por %xmm15, %xmm12
652 ; SSE-NEXT: movdqa {{.*#+}} xmm15 = [65535,65535,65535,0,0,0,65535,65535]
653 ; SSE-NEXT: pand %xmm15, %xmm12
654 ; SSE-NEXT: pandn %xmm14, %xmm15
655 ; SSE-NEXT: movdqa %xmm4, %xmm14
656 ; SSE-NEXT: punpckhwd {{.*#+}} xmm14 = xmm14[4],xmm10[4],xmm14[5],xmm10[5],xmm14[6],xmm10[6],xmm14[7],xmm10[7]
657 ; SSE-NEXT: por %xmm12, %xmm15
658 ; SSE-NEXT: psrlq $48, %xmm11
659 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm13 = xmm13[1],xmm11[1]
660 ; SSE-NEXT: movdqa {{.*#+}} xmm11 = [65535,65535,65535,0,0,65535,65535,65535]
661 ; SSE-NEXT: pandn %xmm13, %xmm11
662 ; SSE-NEXT: movdqa %xmm6, %xmm13
663 ; SSE-NEXT: psrldq {{.*#+}} xmm13 = xmm13[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
664 ; SSE-NEXT: por %xmm11, %xmm13
665 ; SSE-NEXT: psrld $16, %xmm10
666 ; SSE-NEXT: movdqa %xmm4, %xmm11
667 ; SSE-NEXT: punpckhdq {{.*#+}} xmm11 = xmm11[2],xmm10[2],xmm11[3],xmm10[3]
668 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,65535,65535,65535,65535,0,0,65535]
669 ; SSE-NEXT: movdqa %xmm0, %xmm12
670 ; SSE-NEXT: pandn %xmm11, %xmm12
671 ; SSE-NEXT: por %xmm13, %xmm12
672 ; SSE-NEXT: movdqa {{.*#+}} xmm10 = [0,65535,65535,65535,65535,65535,65535,0]
673 ; SSE-NEXT: pand %xmm10, %xmm12
674 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm2[3,3,3,3]
675 ; SSE-NEXT: pandn %xmm11, %xmm10
676 ; SSE-NEXT: por %xmm12, %xmm10
677 ; SSE-NEXT: movdqa %xmm3, %xmm12
678 ; SSE-NEXT: pshuflw {{.*#+}} xmm11 = xmm3[1,1,1,1,4,5,6,7]
679 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm8[0],xmm3[1],xmm8[1],xmm3[2],xmm8[2],xmm3[3],xmm8[3]
680 ; SSE-NEXT: psrld $16, %xmm8
681 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm8[0],xmm12[1],xmm8[1]
682 ; SSE-NEXT: movdqa %xmm0, %xmm8
683 ; SSE-NEXT: pandn %xmm12, %xmm8
684 ; SSE-NEXT: pshufhw {{.*#+}} xmm12 = xmm5[0,1,2,3,4,5,6,6]
685 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm12[2,1,2,3]
686 ; SSE-NEXT: pand %xmm0, %xmm12
687 ; SSE-NEXT: por %xmm8, %xmm12
688 ; SSE-NEXT: movdqa {{.*#+}} xmm8 = [65535,65535,0,0,0,65535,65535,65535]
689 ; SSE-NEXT: pand %xmm8, %xmm12
690 ; SSE-NEXT: movdqa %xmm7, %xmm13
691 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[2,2],xmm2[1,1]
692 ; SSE-NEXT: pandn %xmm13, %xmm8
693 ; SSE-NEXT: por %xmm12, %xmm8
694 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
695 ; SSE-NEXT: pslldq {{.*#+}} xmm12 = zero,zero,zero,zero,zero,zero,xmm12[0,1,2,3,4,5,6,7,8,9]
696 ; SSE-NEXT: pslldq {{.*#+}} xmm6 = zero,zero,xmm6[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
697 ; SSE-NEXT: pand %xmm0, %xmm6
698 ; SSE-NEXT: pandn %xmm12, %xmm0
699 ; SSE-NEXT: por %xmm6, %xmm0
700 ; SSE-NEXT: movaps %xmm2, %xmm6
701 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[2,3],xmm14[0,1]
702 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4,4,5,5,6,6,7,7]
703 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[2,0],xmm4[2,1]
704 ; SSE-NEXT: movaps {{.*#+}} xmm4 = [65535,65535,65535,0,0,0,0,65535]
705 ; SSE-NEXT: andps %xmm4, %xmm6
706 ; SSE-NEXT: andnps %xmm0, %xmm4
707 ; SSE-NEXT: orps %xmm6, %xmm4
708 ; SSE-NEXT: punpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm3[0]
709 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[2,0],xmm11[2,1]
710 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm7[0,0,1,1]
711 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,1,1]
712 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
713 ; SSE-NEXT: movaps {{.*#+}} xmm2 = [65535,65535,65535,65535,0,0,0,65535]
714 ; SSE-NEXT: andps %xmm2, %xmm5
715 ; SSE-NEXT: andnps %xmm0, %xmm2
716 ; SSE-NEXT: orps %xmm5, %xmm2
717 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
718 ; SSE-NEXT: movaps %xmm2, (%rax)
719 ; SSE-NEXT: movaps %xmm4, 64(%rax)
720 ; SSE-NEXT: movdqa %xmm15, 16(%rax)
721 ; SSE-NEXT: movdqa %xmm8, 32(%rax)
722 ; SSE-NEXT: movaps %xmm9, 48(%rax)
723 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0,1,3]
724 ; SSE-NEXT: movaps %xmm1, 80(%rax)
725 ; SSE-NEXT: movdqa %xmm10, 96(%rax)
728 ; AVX1-ONLY-LABEL: store_i16_stride7_vf8:
729 ; AVX1-ONLY: # %bb.0:
730 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
731 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
732 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm7
733 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm8
734 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm2
735 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm4
736 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm0
737 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm3
738 ; AVX1-ONLY-NEXT: vmovdqa (%r10), %xmm1
739 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm4, %xmm5
740 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm5 = xmm2[0],xmm5[0],xmm2[1],xmm5[1]
741 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
742 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[2,2,2,2,4,5,6,7]
743 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,5,5,4]
744 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0],xmm5[1,2],xmm6[3,4,5,6,7]
745 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
746 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm9[0,0,1,1]
747 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
748 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm10 = xmm10[0,1,2,2,4,5,6,7]
749 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[0,1,2,1]
750 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm10[0,1],xmm6[2,3],xmm10[4,5,6,7]
751 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm6, %ymm5
752 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm6 = [65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535]
753 ; AVX1-ONLY-NEXT: vandps %ymm6, %ymm5, %ymm5
754 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
755 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm11 = zero,zero,xmm10[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
756 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm1[0,1,0,1]
757 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1,2,3,4],xmm12[5],xmm11[6,7]
758 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm10[0,1,0,1]
759 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm1[0,0,0,0]
760 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0,1,2,3,4,5],xmm13[6,7]
761 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm12, %ymm11
762 ; AVX1-ONLY-NEXT: vandnps %ymm11, %ymm6, %ymm6
763 ; AVX1-ONLY-NEXT: vorps %ymm6, %ymm5, %ymm5
764 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm2[2,2,2,2]
765 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm11 = xmm4[3,3,3,3,4,5,6,7]
766 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm11 = xmm11[0,1,2,3,4,4,4,4]
767 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1,2,3,4,5],xmm6[6],xmm11[7]
768 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
769 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm6[0,1,0,1]
770 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1,2,3],xmm12[4,5],xmm11[6,7]
771 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm8, %xmm8
772 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1]
773 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm8 = xmm9[0,1,2,3,4,5,6,6]
774 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[2,1,2,3]
775 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm8[0,1,2,3,4],xmm7[5,6],xmm8[7]
776 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm7, %ymm7
777 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm8 = [65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535]
778 ; AVX1-ONLY-NEXT: vandps %ymm7, %ymm8, %ymm7
779 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm9 = xmm10[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
780 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm9 = xmm9[0,1,2],xmm1[3],xmm9[4,5,6,7]
781 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm10 = xmm10[0,2],xmm1[1,3]
782 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm10, %ymm9
783 ; AVX1-ONLY-NEXT: vandnps %ymm9, %ymm8, %ymm8
784 ; AVX1-ONLY-NEXT: vorps %ymm7, %ymm8, %ymm7
785 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
786 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm6[2,2,3,3]
787 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm9 = xmm9[0,1,2,3],xmm8[4,5,6,7]
788 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm8 = zero,zero,zero,zero,zero,zero,xmm8[0,1,2,3,4,5,6,7,8,9]
789 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm10 = zero,zero,xmm6[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
790 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm8 = xmm10[0,1,2,3,4],xmm8[5,6],xmm10[7]
791 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm8, %ymm8
792 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm9 = [65535,65535,65535,0,0,0,0,65535,65535,65535,0,0,0,0,65535,65535]
793 ; AVX1-ONLY-NEXT: vandnps %ymm8, %ymm9, %ymm8
794 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
795 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm10 = xmm10[2,2,2,2,4,5,6,7]
796 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm10 = xmm10[0,1,2,3,4,5,5,4]
797 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm1[2,3,2,3]
798 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0],xmm11[1],xmm10[2,3,4,5,6,7]
799 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm11 = xmm0[4],xmm3[4],xmm0[5],xmm3[5],xmm0[6],xmm3[6],xmm0[7],xmm3[7]
800 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm11 = xmm11[0,1,2,2,4,5,6,7]
801 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[0,1,2,1]
802 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm1[2,2,3,3]
803 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1],xmm12[2,3],xmm11[4,5,6,7]
804 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm11, %ymm10
805 ; AVX1-ONLY-NEXT: vandps %ymm9, %ymm10, %ymm9
806 ; AVX1-ONLY-NEXT: vorps %ymm8, %ymm9, %ymm8
807 ; AVX1-ONLY-NEXT: vpsrlq $48, %xmm4, %xmm4
808 ; AVX1-ONLY-NEXT: vpunpckhqdq {{.*#+}} xmm2 = xmm2[1],xmm4[1]
809 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm4 = xmm6[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
810 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm4[0,1,2],xmm2[3,4],xmm4[5,6,7]
811 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm3, %xmm3
812 ; AVX1-ONLY-NEXT: vpunpckhdq {{.*#+}} xmm0 = xmm0[2],xmm3[2],xmm0[3],xmm3[3]
813 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
814 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[8,9,u,u,u,u,u,u,u,u,6,7,10,11,12,13]
815 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm2[1,2,3,4],xmm0[5,6,7]
816 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, 96(%rax)
817 ; AVX1-ONLY-NEXT: vmovaps %ymm8, 64(%rax)
818 ; AVX1-ONLY-NEXT: vmovaps %ymm7, 32(%rax)
819 ; AVX1-ONLY-NEXT: vmovaps %ymm5, (%rax)
820 ; AVX1-ONLY-NEXT: vzeroupper
821 ; AVX1-ONLY-NEXT: retq
823 ; AVX2-SLOW-LABEL: store_i16_stride7_vf8:
824 ; AVX2-SLOW: # %bb.0:
825 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
826 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
827 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm0
828 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm2
829 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm4
830 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm5
831 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm1
832 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %xmm3
833 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm7
834 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm4, %ymm8
835 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm1, %ymm9
836 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = ymm8[4,5,u,u,u,u,u,u,u,u,u,u,u,u,6,7,22,23,u,u,u,u,u,u,u,u,u,u,u,u,24,25]
837 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm8[2,3,0,1]
838 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm10 = ymm10[2,2,2,2,4,5,6,7,10,10,10,10,12,13,14,15]
839 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm10[0,1,2,2,4,5,6,6]
840 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm6 = ymm6[0],ymm10[1],ymm6[2,3,4,5],ymm10[6],ymm6[7,8],ymm10[9],ymm6[10,11,12,13],ymm10[14],ymm6[15]
841 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm7[0,2,1,3]
842 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u,u,u,16,17,24,25,u,u,u,u]
843 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm11 = <255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255>
844 ; AVX2-SLOW-NEXT: vpblendvb %ymm11, %ymm6, %ymm10, %ymm6
845 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm9[0,2,0,2]
846 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm11 = ymm10[u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,u,u,22,23,30,31,u,u,u,u,u,u,u,u,u,u]
847 ; AVX2-SLOW-NEXT: vpbroadcastd 4(%r10), %ymm12
848 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm13 = <u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u>
849 ; AVX2-SLOW-NEXT: vpblendvb %ymm13, %ymm11, %ymm12, %ymm11
850 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm12 = [255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255]
851 ; AVX2-SLOW-NEXT: vpblendvb %ymm12, %ymm6, %ymm11, %ymm6
852 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm8[1,3,1,3]
853 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm11 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm11[2,3,10,11],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm11[20,21,28,29],zero,zero,zero,zero
854 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm7[1,3,1,3]
855 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm12 = zero,zero,zero,zero,zero,zero,ymm12[2,3,10,11],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm12[20,21,28,29],zero,zero,zero,zero,zero,zero,zero,zero
856 ; AVX2-SLOW-NEXT: vpor %ymm11, %ymm12, %ymm11
857 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[1,3,3,1]
858 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm9 = ymm9[0,1,8,9,u,u,u,u,u,u,u,u,u,u,2,3,18,19,u,u,u,u,u,u,u,u,u,u,28,29,20,21]
859 ; AVX2-SLOW-NEXT: vpbroadcastd 8(%r10), %ymm12
860 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm13 = <255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255>
861 ; AVX2-SLOW-NEXT: vpblendvb %ymm13, %ymm9, %ymm12, %ymm9
862 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm12 = [255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255]
863 ; AVX2-SLOW-NEXT: vpblendvb %ymm12, %ymm9, %ymm11, %ymm9
864 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,0,2]
865 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = zero,zero,zero,zero,ymm8[0,1,8,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm8[18,19,26,27],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
866 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,2,2,0]
867 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm7[0,1,8,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm7[2,3,18,19],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm7[28,29,20,21]
868 ; AVX2-SLOW-NEXT: vpor %ymm7, %ymm8, %ymm7
869 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = ymm10[u,u,u,u,u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,u,u,18,19,26,27,u,u,u,u,u,u]
870 ; AVX2-SLOW-NEXT: vpbroadcastd (%r10), %ymm10
871 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm11 = <u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u>
872 ; AVX2-SLOW-NEXT: vpblendvb %ymm11, %ymm8, %ymm10, %ymm8
873 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm10 = [255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255]
874 ; AVX2-SLOW-NEXT: vpblendvb %ymm10, %ymm7, %ymm8, %ymm7
875 ; AVX2-SLOW-NEXT: vpsrlq $48, %xmm5, %xmm5
876 ; AVX2-SLOW-NEXT: vpunpckhqdq {{.*#+}} xmm4 = xmm4[1],xmm5[1]
877 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm2[4],xmm0[5],xmm2[5],xmm0[6],xmm2[6],xmm0[7],xmm2[7]
878 ; AVX2-SLOW-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
879 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm4[3,4],xmm0[5,6,7]
880 ; AVX2-SLOW-NEXT: vpsrld $16, %xmm3, %xmm2
881 ; AVX2-SLOW-NEXT: vpunpckhdq {{.*#+}} xmm1 = xmm1[2],xmm2[2],xmm1[3],xmm2[3]
882 ; AVX2-SLOW-NEXT: vpbroadcastd 12(%r10), %xmm2
883 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1,2,3,4],xmm1[5,6],xmm2[7]
884 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1,2,3,4],xmm1[5,6,7]
885 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, 96(%rax)
886 ; AVX2-SLOW-NEXT: vmovdqa %ymm7, (%rax)
887 ; AVX2-SLOW-NEXT: vmovdqa %ymm9, 64(%rax)
888 ; AVX2-SLOW-NEXT: vmovdqa %ymm6, 32(%rax)
889 ; AVX2-SLOW-NEXT: vzeroupper
890 ; AVX2-SLOW-NEXT: retq
892 ; AVX2-FAST-LABEL: store_i16_stride7_vf8:
893 ; AVX2-FAST: # %bb.0:
894 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
895 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
896 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm0
897 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm2
898 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm4
899 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm5
900 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm1
901 ; AVX2-FAST-NEXT: vmovdqa (%r9), %xmm3
902 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm7
903 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm5, %ymm4, %ymm8
904 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm3, %ymm1, %ymm9
905 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm7[0,2,1,3]
906 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm6[6,7,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm6[16,17,24,25],zero,zero,zero,zero
907 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = <1,5,u,u,5,2,6,u>
908 ; AVX2-FAST-NEXT: vpermd %ymm8, %ymm10, %ymm10
909 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[0,1,4,5],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm10[2,3,18,19],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm10[20,21,24,25]
910 ; AVX2-FAST-NEXT: vpor %ymm6, %ymm10, %ymm6
911 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm10 = ymm9[0,2,0,2]
912 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm10[u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,u,u,22,23,30,31,u,u,u,u,u,u,u,u,u,u]
913 ; AVX2-FAST-NEXT: vpbroadcastd 4(%r10), %ymm12
914 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = <u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u>
915 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm11, %ymm12, %ymm11
916 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = [255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255]
917 ; AVX2-FAST-NEXT: vpblendvb %ymm12, %ymm6, %ymm11, %ymm6
918 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm8[1,3,1,3]
919 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm11 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm11[2,3,10,11],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm11[20,21,28,29],zero,zero,zero,zero
920 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm7[1,3,1,3]
921 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = zero,zero,zero,zero,zero,zero,ymm12[2,3,10,11],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm12[20,21,28,29],zero,zero,zero,zero,zero,zero,zero,zero
922 ; AVX2-FAST-NEXT: vpor %ymm11, %ymm12, %ymm11
923 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[1,3,3,1]
924 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm9 = ymm9[0,1,8,9,u,u,u,u,u,u,u,u,u,u,2,3,18,19,u,u,u,u,u,u,u,u,u,u,28,29,20,21]
925 ; AVX2-FAST-NEXT: vpbroadcastd 8(%r10), %ymm12
926 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = <255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255>
927 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm9, %ymm12, %ymm9
928 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = [255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255]
929 ; AVX2-FAST-NEXT: vpblendvb %ymm12, %ymm9, %ymm11, %ymm9
930 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,0,2]
931 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm8 = zero,zero,zero,zero,ymm8[0,1,8,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm8[18,19,26,27],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
932 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,2,2,0]
933 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm7[0,1,8,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm7[2,3,18,19],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm7[28,29,20,21]
934 ; AVX2-FAST-NEXT: vpor %ymm7, %ymm8, %ymm7
935 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm8 = ymm10[u,u,u,u,u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,u,u,18,19,26,27,u,u,u,u,u,u]
936 ; AVX2-FAST-NEXT: vpbroadcastd (%r10), %ymm10
937 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = <u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u>
938 ; AVX2-FAST-NEXT: vpblendvb %ymm11, %ymm8, %ymm10, %ymm8
939 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = [255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255]
940 ; AVX2-FAST-NEXT: vpblendvb %ymm10, %ymm7, %ymm8, %ymm7
941 ; AVX2-FAST-NEXT: vpsrlq $48, %xmm5, %xmm5
942 ; AVX2-FAST-NEXT: vpunpckhqdq {{.*#+}} xmm4 = xmm4[1],xmm5[1]
943 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm2[4],xmm0[5],xmm2[5],xmm0[6],xmm2[6],xmm0[7],xmm2[7]
944 ; AVX2-FAST-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
945 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm4[3,4],xmm0[5,6,7]
946 ; AVX2-FAST-NEXT: vpsrld $16, %xmm3, %xmm2
947 ; AVX2-FAST-NEXT: vpunpckhdq {{.*#+}} xmm1 = xmm1[2],xmm2[2],xmm1[3],xmm2[3]
948 ; AVX2-FAST-NEXT: vpbroadcastd 12(%r10), %xmm2
949 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1,2,3,4],xmm1[5,6],xmm2[7]
950 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1,2,3,4],xmm1[5,6,7]
951 ; AVX2-FAST-NEXT: vmovdqa %xmm0, 96(%rax)
952 ; AVX2-FAST-NEXT: vmovdqa %ymm7, (%rax)
953 ; AVX2-FAST-NEXT: vmovdqa %ymm9, 64(%rax)
954 ; AVX2-FAST-NEXT: vmovdqa %ymm6, 32(%rax)
955 ; AVX2-FAST-NEXT: vzeroupper
956 ; AVX2-FAST-NEXT: retq
958 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride7_vf8:
959 ; AVX2-FAST-PERLANE: # %bb.0:
960 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
961 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %r10
962 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm0
963 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm2
964 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm4
965 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm5
966 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm1
967 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %xmm3
968 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm7
969 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm4, %ymm8
970 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm1, %ymm9
971 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm6 = ymm8[4,5,u,u,u,u,u,u,u,u,u,u,u,u,6,7,22,23,u,u,u,u,u,u,u,u,u,u,u,u,24,25]
972 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm8[2,3,0,1]
973 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,4,5,u,u,u,u,u,u,u,u,8,9,u,u,u,u,20,21,u,u,u,u,u,u,u,u,24,25,u,u]
974 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm6 = ymm6[0],ymm10[1],ymm6[2,3,4,5],ymm10[6],ymm6[7,8],ymm10[9],ymm6[10,11,12,13],ymm10[14],ymm6[15]
975 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm7[0,2,1,3]
976 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,6,7,14,15,u,u,u,u,u,u,u,u,u,u,16,17,24,25,u,u,u,u]
977 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = <255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255>
978 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm6, %ymm10, %ymm6
979 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm9[0,2,0,2]
980 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm11 = ymm10[u,u,u,u,4,5,12,13,u,u,u,u,u,u,u,u,u,u,22,23,30,31,u,u,u,u,u,u,u,u,u,u]
981 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 4(%r10), %ymm12
982 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm13 = <u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u>
983 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm11, %ymm12, %ymm11
984 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm12 = [255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255]
985 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm12, %ymm6, %ymm11, %ymm6
986 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm8[1,3,1,3]
987 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm11 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm11[2,3,10,11],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm11[20,21,28,29],zero,zero,zero,zero
988 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm7[1,3,1,3]
989 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm12 = zero,zero,zero,zero,zero,zero,ymm12[2,3,10,11],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm12[20,21,28,29],zero,zero,zero,zero,zero,zero,zero,zero
990 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm11, %ymm12, %ymm11
991 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[1,3,3,1]
992 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm9 = ymm9[0,1,8,9,u,u,u,u,u,u,u,u,u,u,2,3,18,19,u,u,u,u,u,u,u,u,u,u,28,29,20,21]
993 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 8(%r10), %ymm12
994 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm13 = <255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255>
995 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm9, %ymm12, %ymm9
996 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm12 = [255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255]
997 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm12, %ymm9, %ymm11, %ymm9
998 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,0,2]
999 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm8 = zero,zero,zero,zero,ymm8[0,1,8,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm8[18,19,26,27],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1000 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,2,2,0]
1001 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm7 = ymm7[0,1,8,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm7[2,3,18,19],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm7[28,29,20,21]
1002 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm7, %ymm8, %ymm7
1003 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm8 = ymm10[u,u,u,u,u,u,u,u,0,1,8,9,u,u,u,u,u,u,u,u,u,u,18,19,26,27,u,u,u,u,u,u]
1004 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd (%r10), %ymm10
1005 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = <u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u>
1006 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm8, %ymm10, %ymm8
1007 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm10 = [255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255]
1008 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm7, %ymm8, %ymm7
1009 ; AVX2-FAST-PERLANE-NEXT: vpsrlq $48, %xmm5, %xmm5
1010 ; AVX2-FAST-PERLANE-NEXT: vpunpckhqdq {{.*#+}} xmm4 = xmm4[1],xmm5[1]
1011 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm2[4],xmm0[5],xmm2[5],xmm0[6],xmm2[6],xmm0[7],xmm2[7]
1012 ; AVX2-FAST-PERLANE-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1013 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm4[3,4],xmm0[5,6,7]
1014 ; AVX2-FAST-PERLANE-NEXT: vpsrld $16, %xmm3, %xmm2
1015 ; AVX2-FAST-PERLANE-NEXT: vpunpckhdq {{.*#+}} xmm1 = xmm1[2],xmm2[2],xmm1[3],xmm2[3]
1016 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 12(%r10), %xmm2
1017 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1,2,3,4],xmm1[5,6],xmm2[7]
1018 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1,2,3,4],xmm1[5,6,7]
1019 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, 96(%rax)
1020 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm7, (%rax)
1021 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm9, 64(%rax)
1022 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm6, 32(%rax)
1023 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
1024 ; AVX2-FAST-PERLANE-NEXT: retq
1026 ; AVX512F-SLOW-LABEL: store_i16_stride7_vf8:
1027 ; AVX512F-SLOW: # %bb.0:
1028 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1029 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
1030 ; AVX512F-SLOW-NEXT: vmovdqa (%rdi), %xmm0
1031 ; AVX512F-SLOW-NEXT: vmovdqa (%rsi), %xmm1
1032 ; AVX512F-SLOW-NEXT: vmovdqa (%rdx), %xmm3
1033 ; AVX512F-SLOW-NEXT: vmovdqa (%rcx), %xmm4
1034 ; AVX512F-SLOW-NEXT: vmovdqa (%r8), %xmm5
1035 ; AVX512F-SLOW-NEXT: vmovdqa (%r9), %xmm6
1036 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm7
1037 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm2
1038 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm5, %ymm8
1039 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm2[0,2,0,2]
1040 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm9 = zero,zero,zero,zero,ymm9[0,1,8,9,u,u,u,u,u,u],zero,zero,zero,zero,ymm9[18,19,26,27,u,u,u,u,u,u],zero,zero,zero,zero
1041 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm7[0,2,1,3]
1042 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = zero,zero,zero,zero,ymm10[u,u,u,u,u,u,6,7,14,15],zero,zero,zero,zero,ymm10[u,u,u,u,u,u,16,17,24,25],zero,zero,zero,zero
1043 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm9, %zmm9
1044 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = ymm2[4,5,u,u,u,u,u,u,u,u,u,u,u,u,6,7,22,23,u,u,u,u,u,u,u,u,u,u,u,u,24,25]
1045 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm2[2,3,0,1]
1046 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} ymm11 = ymm11[2,2,2,2,4,5,6,7,10,10,10,10,12,13,14,15]
1047 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm11[0,1,2,2,4,5,6,6]
1048 ; AVX512F-SLOW-NEXT: vpblendw {{.*#+}} ymm10 = ymm10[0],ymm11[1],ymm10[2,3,4,5],ymm11[6],ymm10[7,8],ymm11[9],ymm10[10,11,12,13],ymm11[14],ymm10[15]
1049 ; AVX512F-SLOW-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm10, %ymm10
1050 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm7[0,2,2,0]
1051 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm11 = ymm11[0,1,8,9],zero,zero,zero,zero,ymm11[u,u,u,u,u,u,2,3,18,19],zero,zero,zero,zero,ymm11[u,u,u,u,u,u,28,29,20,21]
1052 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm11, %zmm10
1053 ; AVX512F-SLOW-NEXT: vporq %zmm9, %zmm10, %zmm9
1054 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm8[0,2,0,2]
1055 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm11 = ymm10[u,u,u,u,u,u,u,u,0,1,8,9],zero,zero,ymm10[u,u,u,u,u,u,u,u,18,19,26,27],zero,zero,ymm10[u,u,u,u]
1056 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,4,5,12,13],zero,zero,ymm10[u,u,u,u,u,u,u,u,22,23,30,31],zero,zero,ymm10[u,u,u,u,u,u,u,u]
1057 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm11, %zmm10
1058 ; AVX512F-SLOW-NEXT: vpbroadcastd (%r10), %ymm11
1059 ; AVX512F-SLOW-NEXT: vpbroadcastd 4(%r10), %ymm12
1060 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm12, %zmm11, %zmm11
1061 ; AVX512F-SLOW-NEXT: vpternlogq $236, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm10, %zmm11
1062 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm11
1063 ; AVX512F-SLOW-NEXT: vpsrlq $48, %xmm4, %xmm4
1064 ; AVX512F-SLOW-NEXT: vpunpckhqdq {{.*#+}} xmm3 = xmm3[1],xmm4[1]
1065 ; AVX512F-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
1066 ; AVX512F-SLOW-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1067 ; AVX512F-SLOW-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm3[3,4],xmm0[5,6,7]
1068 ; AVX512F-SLOW-NEXT: vpsrld $16, %xmm6, %xmm1
1069 ; AVX512F-SLOW-NEXT: vpunpckhdq {{.*#+}} xmm1 = xmm5[2],xmm1[2],xmm5[3],xmm1[3]
1070 ; AVX512F-SLOW-NEXT: vpbroadcastd 12(%r10), %xmm3
1071 ; AVX512F-SLOW-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0,1,2,3,4],xmm1[5,6],xmm3[7]
1072 ; AVX512F-SLOW-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1,2,3,4],xmm1[5,6,7]
1073 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
1074 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u],zero,zero,zero,zero,ymm1[2,3,10,11,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[20,21,28,29,u,u,u,u]
1075 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm7[1,3,1,3]
1076 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,2,3,10,11],zero,zero,zero,zero,ymm2[u,u,u,u,u,u,20,21,28,29],zero,zero,zero,zero,ymm2[u,u,u,u]
1077 ; AVX512F-SLOW-NEXT: vpor %ymm1, %ymm2, %ymm1
1078 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm8[1,3,3,1]
1079 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[0,1,8,9],zero,zero,ymm2[u,u,u,u,u,u,u,u,2,3,18,19],zero,zero,ymm2[u,u,u,u,u,u,u,u,28,29,20,21]
1080 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
1081 ; AVX512F-SLOW-NEXT: vpternlogd $206, 8(%r10){1to8}, %ymm2, %ymm3
1082 ; AVX512F-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm3
1083 ; AVX512F-SLOW-NEXT: vinserti32x4 $2, %xmm0, %zmm3, %zmm1
1084 ; AVX512F-SLOW-NEXT: vmovdqa %xmm0, 96(%rax)
1085 ; AVX512F-SLOW-NEXT: vmovdqa %ymm1, 64(%rax)
1086 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm11, (%rax)
1087 ; AVX512F-SLOW-NEXT: vzeroupper
1088 ; AVX512F-SLOW-NEXT: retq
1090 ; AVX512F-FAST-LABEL: store_i16_stride7_vf8:
1091 ; AVX512F-FAST: # %bb.0:
1092 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
1093 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
1094 ; AVX512F-FAST-NEXT: vmovdqa (%rdi), %xmm0
1095 ; AVX512F-FAST-NEXT: vmovdqa (%rsi), %xmm1
1096 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %xmm2
1097 ; AVX512F-FAST-NEXT: vmovdqa (%rcx), %xmm3
1098 ; AVX512F-FAST-NEXT: vmovdqa (%r8), %xmm4
1099 ; AVX512F-FAST-NEXT: vmovdqa (%r9), %xmm5
1100 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm6
1101 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm7
1102 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm5, %ymm4, %ymm8
1103 ; AVX512F-FAST-NEXT: vpsrlq $48, %xmm3, %xmm3
1104 ; AVX512F-FAST-NEXT: vpunpckhqdq {{.*#+}} xmm2 = xmm2[1],xmm3[1]
1105 ; AVX512F-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
1106 ; AVX512F-FAST-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1107 ; AVX512F-FAST-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm2[3,4],xmm0[5,6,7]
1108 ; AVX512F-FAST-NEXT: vpsrld $16, %xmm5, %xmm1
1109 ; AVX512F-FAST-NEXT: vpunpckhdq {{.*#+}} xmm1 = xmm4[2],xmm1[2],xmm4[3],xmm1[3]
1110 ; AVX512F-FAST-NEXT: vpbroadcastd 12(%r10), %xmm2
1111 ; AVX512F-FAST-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1,2,3,4],xmm1[5,6],xmm2[7]
1112 ; AVX512F-FAST-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1,2,3,4],xmm1[5,6,7]
1113 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm7[1,3,1,3]
1114 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u],zero,zero,zero,zero,ymm1[2,3,10,11,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[20,21,28,29,u,u,u,u]
1115 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm6[1,3,1,3]
1116 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,2,3,10,11],zero,zero,zero,zero,ymm2[u,u,u,u,u,u,20,21,28,29],zero,zero,zero,zero,ymm2[u,u,u,u]
1117 ; AVX512F-FAST-NEXT: vpor %ymm1, %ymm2, %ymm1
1118 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm8[1,3,3,1]
1119 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[0,1,8,9],zero,zero,ymm2[u,u,u,u,u,u,u,u,2,3,18,19],zero,zero,ymm2[u,u,u,u,u,u,u,u,28,29,20,21]
1120 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
1121 ; AVX512F-FAST-NEXT: vpternlogd $206, 8(%r10){1to8}, %ymm2, %ymm3
1122 ; AVX512F-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm3
1123 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm3, %zmm1
1124 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm7[0,2,0,2]
1125 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm2 = zero,zero,zero,zero,ymm2[0,1,8,9,u,u,u,u,u,u],zero,zero,zero,zero,ymm2[18,19,26,27,u,u,u,u,u,u],zero,zero,zero,zero
1126 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm6[0,2,1,3]
1127 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,zero,zero,ymm3[u,u,u,u,u,u,6,7,14,15],zero,zero,zero,zero,ymm3[u,u,u,u,u,u,16,17,24,25],zero,zero,zero,zero
1128 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm2
1129 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm6[0,2,2,0]
1130 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[0,1,8,9],zero,zero,zero,zero,ymm3[u,u,u,u,u,u,2,3,18,19],zero,zero,zero,zero,ymm3[u,u,u,u,u,u,28,29,20,21]
1131 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <1,5,u,u,5,2,6,u>
1132 ; AVX512F-FAST-NEXT: vpermd %ymm7, %ymm4, %ymm4
1133 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[0,1,4,5,u,u,u,u,u,u],zero,zero,zero,zero,ymm4[2,3,18,19,u,u,u,u,u,u],zero,zero,zero,zero,ymm4[20,21,24,25]
1134 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm3, %zmm3
1135 ; AVX512F-FAST-NEXT: vporq %zmm2, %zmm3, %zmm2
1136 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm8[0,2,0,2]
1137 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm3[u,u,u,u,u,u,u,u,0,1,8,9],zero,zero,ymm3[u,u,u,u,u,u,u,u,18,19,26,27],zero,zero,ymm3[u,u,u,u]
1138 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,u,4,5,12,13],zero,zero,ymm3[u,u,u,u,u,u,u,u,22,23,30,31],zero,zero,ymm3[u,u,u,u,u,u,u,u]
1139 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm4, %zmm3
1140 ; AVX512F-FAST-NEXT: vpbroadcastd (%r10), %ymm4
1141 ; AVX512F-FAST-NEXT: vpbroadcastd 4(%r10), %ymm5
1142 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm4
1143 ; AVX512F-FAST-NEXT: vpternlogq $236, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm4
1144 ; AVX512F-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm4
1145 ; AVX512F-FAST-NEXT: vmovdqa %xmm0, 96(%rax)
1146 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm4, (%rax)
1147 ; AVX512F-FAST-NEXT: vmovdqa %ymm1, 64(%rax)
1148 ; AVX512F-FAST-NEXT: vzeroupper
1149 ; AVX512F-FAST-NEXT: retq
1151 ; AVX512BW-LABEL: store_i16_stride7_vf8:
1152 ; AVX512BW: # %bb.0:
1153 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1154 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
1155 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm0
1156 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm1
1157 ; AVX512BW-NEXT: vmovdqa (%r8), %xmm2
1158 ; AVX512BW-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
1159 ; AVX512BW-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
1160 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
1161 ; AVX512BW-NEXT: vinserti128 $1, (%r9), %ymm2, %ymm1
1162 ; AVX512BW-NEXT: vinserti32x4 $2, (%r10), %zmm1, %zmm1
1163 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [0,8,16,24,32,40,48,1,9,17,25,33,41,49,2,10,18,26,34,42,50,3,11,19,27,35,43,51,4,12,20,28]
1164 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
1165 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = <36,44,52,5,13,21,29,37,45,53,6,14,22,30,38,46,54,7,15,23,31,39,47,55,u,u,u,u,u,u,u,u>
1166 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm3
1167 ; AVX512BW-NEXT: vextracti32x4 $2, %zmm3, 96(%rax)
1168 ; AVX512BW-NEXT: vmovdqa64 %zmm2, (%rax)
1169 ; AVX512BW-NEXT: vmovdqa %ymm3, 64(%rax)
1170 ; AVX512BW-NEXT: vzeroupper
1171 ; AVX512BW-NEXT: retq
1172 %in.vec0 = load <8 x i16>, ptr %in.vecptr0, align 64
1173 %in.vec1 = load <8 x i16>, ptr %in.vecptr1, align 64
1174 %in.vec2 = load <8 x i16>, ptr %in.vecptr2, align 64
1175 %in.vec3 = load <8 x i16>, ptr %in.vecptr3, align 64
1176 %in.vec4 = load <8 x i16>, ptr %in.vecptr4, align 64
1177 %in.vec5 = load <8 x i16>, ptr %in.vecptr5, align 64
1178 %in.vec6 = load <8 x i16>, ptr %in.vecptr6, align 64
1179 %1 = shufflevector <8 x i16> %in.vec0, <8 x i16> %in.vec1, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
1180 %2 = shufflevector <8 x i16> %in.vec2, <8 x i16> %in.vec3, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
1181 %3 = shufflevector <8 x i16> %in.vec4, <8 x i16> %in.vec5, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
1182 %4 = shufflevector <16 x i16> %1, <16 x i16> %2, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1183 %5 = shufflevector <8 x i16> %in.vec6, <8 x i16> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1184 %6 = shufflevector <16 x i16> %3, <16 x i16> %5, <24 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23>
1185 %7 = shufflevector <24 x i16> %6, <24 x i16> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1186 %8 = shufflevector <32 x i16> %4, <32 x i16> %7, <56 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55>
1187 %interleaved.vec = shufflevector <56 x i16> %8, <56 x i16> poison, <56 x i32> <i32 0, i32 8, i32 16, i32 24, i32 32, i32 40, i32 48, i32 1, i32 9, i32 17, i32 25, i32 33, i32 41, i32 49, i32 2, i32 10, i32 18, i32 26, i32 34, i32 42, i32 50, i32 3, i32 11, i32 19, i32 27, i32 35, i32 43, i32 51, i32 4, i32 12, i32 20, i32 28, i32 36, i32 44, i32 52, i32 5, i32 13, i32 21, i32 29, i32 37, i32 45, i32 53, i32 6, i32 14, i32 22, i32 30, i32 38, i32 46, i32 54, i32 7, i32 15, i32 23, i32 31, i32 39, i32 47, i32 55>
1188 store <56 x i16> %interleaved.vec, ptr %out.vec, align 64
1192 define void @store_i16_stride7_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %out.vec) nounwind {
1193 ; SSE-LABEL: store_i16_stride7_vf16:
1195 ; SSE-NEXT: subq $216, %rsp
1196 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1197 ; SSE-NEXT: movdqa 16(%rdi), %xmm6
1198 ; SSE-NEXT: movdqa 16(%rsi), %xmm4
1199 ; SSE-NEXT: movdqa 16(%rdx), %xmm15
1200 ; SSE-NEXT: movdqa 16(%rcx), %xmm1
1201 ; SSE-NEXT: movdqa 16(%r8), %xmm8
1202 ; SSE-NEXT: movdqa 16(%r9), %xmm7
1203 ; SSE-NEXT: movdqa 16(%rax), %xmm3
1204 ; SSE-NEXT: movdqa %xmm1, %xmm0
1205 ; SSE-NEXT: movdqa %xmm1, %xmm5
1206 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1207 ; SSE-NEXT: psrlq $48, %xmm0
1208 ; SSE-NEXT: movdqa %xmm15, %xmm1
1209 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm1 = xmm1[1],xmm0[1]
1210 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [65535,65535,65535,0,0,65535,65535,65535]
1211 ; SSE-NEXT: pandn %xmm1, %xmm2
1212 ; SSE-NEXT: movdqa %xmm6, %xmm0
1213 ; SSE-NEXT: movdqa %xmm6, %xmm10
1214 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1215 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm4[4],xmm0[5],xmm4[5],xmm0[6],xmm4[6],xmm0[7],xmm4[7]
1216 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1217 ; SSE-NEXT: movdqa %xmm4, %xmm9
1218 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1219 ; SSE-NEXT: psrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1220 ; SSE-NEXT: por %xmm2, %xmm0
1221 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1222 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm8[2,2,3,3]
1223 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [65535,65535,65535,65535,65535,0,65535,65535]
1224 ; SSE-NEXT: movdqa %xmm4, %xmm2
1225 ; SSE-NEXT: pandn %xmm1, %xmm2
1226 ; SSE-NEXT: por %xmm0, %xmm2
1227 ; SSE-NEXT: movdqa %xmm8, %xmm1
1228 ; SSE-NEXT: movdqa %xmm7, %xmm6
1229 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1230 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
1231 ; SSE-NEXT: movdqa %xmm1, %xmm7
1232 ; SSE-NEXT: movdqa %xmm6, %xmm1
1233 ; SSE-NEXT: psrld $16, %xmm1
1234 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,0],xmm2[2,0]
1235 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,0]
1236 ; SSE-NEXT: movaps {{.*#+}} xmm6 = [0,65535,65535,65535,65535,65535,65535,0]
1237 ; SSE-NEXT: andps %xmm6, %xmm0
1238 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1239 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm3[3,3,3,3]
1240 ; SSE-NEXT: andnps %xmm1, %xmm6
1241 ; SSE-NEXT: orps %xmm0, %xmm6
1242 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1243 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm3[0,1,0,1]
1244 ; SSE-NEXT: movdqa %xmm4, %xmm1
1245 ; SSE-NEXT: pandn %xmm0, %xmm1
1246 ; SSE-NEXT: movdqa %xmm7, %xmm0
1247 ; SSE-NEXT: movdqa %xmm7, %xmm8
1248 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1249 ; SSE-NEXT: pslldq {{.*#+}} xmm0 = zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
1250 ; SSE-NEXT: pand %xmm4, %xmm0
1251 ; SSE-NEXT: por %xmm1, %xmm0
1252 ; SSE-NEXT: movdqa {{.*#+}} xmm7 = [65535,65535,65535,0,0,0,65535,65535]
1253 ; SSE-NEXT: movdqa %xmm7, %xmm1
1254 ; SSE-NEXT: pandn %xmm0, %xmm1
1255 ; SSE-NEXT: movdqa %xmm5, %xmm0
1256 ; SSE-NEXT: psrld $16, %xmm0
1257 ; SSE-NEXT: movdqa %xmm15, %xmm3
1258 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
1259 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,0,65535,65535,65535,65535,65535]
1260 ; SSE-NEXT: movdqa %xmm0, %xmm6
1261 ; SSE-NEXT: pandn %xmm3, %xmm6
1262 ; SSE-NEXT: movdqa %xmm9, %xmm3
1263 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm10[0],xmm3[1],xmm10[1],xmm3[2],xmm10[2],xmm3[3],xmm10[3]
1264 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[2,2,2,2,4,5,6,7]
1265 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,5,4]
1266 ; SSE-NEXT: pand %xmm0, %xmm3
1267 ; SSE-NEXT: por %xmm6, %xmm3
1268 ; SSE-NEXT: pand %xmm7, %xmm3
1269 ; SSE-NEXT: por %xmm1, %xmm3
1270 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1271 ; SSE-NEXT: movdqa (%rax), %xmm3
1272 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm3[2,3,2,3]
1273 ; SSE-NEXT: movdqa %xmm3, %xmm9
1274 ; SSE-NEXT: movdqa {{.*#+}} xmm11 = [65535,0,65535,65535,65535,65535,65535,65535]
1275 ; SSE-NEXT: movdqa %xmm11, %xmm3
1276 ; SSE-NEXT: pandn %xmm1, %xmm3
1277 ; SSE-NEXT: movdqa (%r8), %xmm2
1278 ; SSE-NEXT: movdqa (%r9), %xmm10
1279 ; SSE-NEXT: movdqa %xmm10, %xmm1
1280 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1281 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
1282 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1283 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,2,2,2,4,5,6,7]
1284 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm1[0,1,2,3,4,5,5,4]
1285 ; SSE-NEXT: pand %xmm11, %xmm6
1286 ; SSE-NEXT: por %xmm3, %xmm6
1287 ; SSE-NEXT: movdqa (%rdx), %xmm13
1288 ; SSE-NEXT: movdqa (%rcx), %xmm5
1289 ; SSE-NEXT: movdqa %xmm13, %xmm14
1290 ; SSE-NEXT: punpckhwd {{.*#+}} xmm14 = xmm14[4],xmm5[4],xmm14[5],xmm5[5],xmm14[6],xmm5[6],xmm14[7],xmm5[7]
1291 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1292 ; SSE-NEXT: movdqa (%rdi), %xmm11
1293 ; SSE-NEXT: movdqa (%rsi), %xmm1
1294 ; SSE-NEXT: movdqa %xmm11, %xmm12
1295 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1296 ; SSE-NEXT: punpckhwd {{.*#+}} xmm12 = xmm12[4],xmm1[4],xmm12[5],xmm1[5],xmm12[6],xmm1[6],xmm12[7],xmm1[7]
1297 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1298 ; SSE-NEXT: movdqa %xmm12, %xmm3
1299 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[2,2],xmm14[2,3]
1300 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[1,2],xmm6[0,3]
1301 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1302 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm10[0],xmm2[1],xmm10[1],xmm2[2],xmm10[2],xmm2[3],xmm10[3]
1303 ; SSE-NEXT: movdqa %xmm2, (%rsp) # 16-byte Spill
1304 ; SSE-NEXT: pslldq {{.*#+}} xmm2 = zero,zero,xmm2[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
1305 ; SSE-NEXT: pand %xmm4, %xmm2
1306 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm9[0,1,0,1]
1307 ; SSE-NEXT: movdqa %xmm9, %xmm14
1308 ; SSE-NEXT: pandn %xmm6, %xmm4
1309 ; SSE-NEXT: por %xmm2, %xmm4
1310 ; SSE-NEXT: movdqa %xmm5, %xmm3
1311 ; SSE-NEXT: movdqa %xmm5, %xmm2
1312 ; SSE-NEXT: psrld $16, %xmm3
1313 ; SSE-NEXT: movdqa %xmm13, %xmm6
1314 ; SSE-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm3[0],xmm6[1],xmm3[1]
1315 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm11[0],xmm1[1],xmm11[1],xmm1[2],xmm11[2],xmm1[3],xmm11[3]
1316 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm1[2,2,2,2,4,5,6,7]
1317 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,5,4]
1318 ; SSE-NEXT: pand %xmm0, %xmm3
1319 ; SSE-NEXT: pandn %xmm6, %xmm0
1320 ; SSE-NEXT: por %xmm3, %xmm0
1321 ; SSE-NEXT: pand %xmm7, %xmm0
1322 ; SSE-NEXT: pandn %xmm4, %xmm7
1323 ; SSE-NEXT: por %xmm0, %xmm7
1324 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1325 ; SSE-NEXT: movdqa {{.*#+}} xmm7 = [65535,65535,65535,0,65535,65535,65535,65535]
1326 ; SSE-NEXT: movdqa %xmm7, %xmm0
1327 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
1328 ; SSE-NEXT: movdqa %xmm8, %xmm3
1329 ; SSE-NEXT: psrldq {{.*#+}} xmm3 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1330 ; SSE-NEXT: por %xmm0, %xmm3
1331 ; SSE-NEXT: movdqa {{.*#+}} xmm8 = [65535,0,0,0,65535,65535,65535,65535]
1332 ; SSE-NEXT: movdqa %xmm8, %xmm6
1333 ; SSE-NEXT: pandn %xmm3, %xmm6
1334 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm15[2,2,2,2]
1335 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,65535,65535,65535,65535,65535,0,65535]
1336 ; SSE-NEXT: movdqa %xmm0, %xmm4
1337 ; SSE-NEXT: pandn %xmm3, %xmm4
1338 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
1339 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm5[0,1,2,3,4,4,4,4]
1340 ; SSE-NEXT: pand %xmm0, %xmm3
1341 ; SSE-NEXT: por %xmm4, %xmm3
1342 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
1343 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm3[3,3]
1344 ; SSE-NEXT: movdqa %xmm15, %xmm10
1345 ; SSE-NEXT: punpckhwd {{.*#+}} xmm10 = xmm10[4],xmm5[4],xmm10[5],xmm5[5],xmm10[6],xmm5[6],xmm10[7],xmm5[7]
1346 ; SSE-NEXT: movdqa %xmm5, %xmm1
1347 ; SSE-NEXT: punpcklwd {{.*#+}} xmm15 = xmm15[0],xmm5[0],xmm15[1],xmm5[1],xmm15[2],xmm5[2],xmm15[3],xmm5[3]
1348 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1349 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[3,3,3,3,4,5,6,7]
1350 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,1],xmm9[0,2]
1351 ; SSE-NEXT: andps %xmm8, %xmm1
1352 ; SSE-NEXT: orps %xmm6, %xmm1
1353 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1354 ; SSE-NEXT: movdqa %xmm14, %xmm11
1355 ; SSE-NEXT: pandn %xmm14, %xmm7
1356 ; SSE-NEXT: movdqa (%rsp), %xmm3 # 16-byte Reload
1357 ; SSE-NEXT: psrldq {{.*#+}} xmm3 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1358 ; SSE-NEXT: por %xmm7, %xmm3
1359 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm2[0,1,2,3,4,4,4,4]
1360 ; SSE-NEXT: pand %xmm0, %xmm4
1361 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm13[2,2,2,2]
1362 ; SSE-NEXT: pandn %xmm5, %xmm0
1363 ; SSE-NEXT: por %xmm4, %xmm0
1364 ; SSE-NEXT: movdqa %xmm12, %xmm4
1365 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm0[3,3]
1366 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm2[3,3,3,3,4,5,6,7]
1367 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm4[0,2]
1368 ; SSE-NEXT: andps %xmm8, %xmm0
1369 ; SSE-NEXT: pandn %xmm3, %xmm8
1370 ; SSE-NEXT: por %xmm0, %xmm8
1371 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1372 ; SSE-NEXT: movdqa %xmm13, %xmm0
1373 ; SSE-NEXT: punpcklwd {{.*#+}} xmm13 = xmm13[0],xmm2[0],xmm13[1],xmm2[1],xmm13[2],xmm2[2],xmm13[3],xmm2[3]
1374 ; SSE-NEXT: psrlq $48, %xmm2
1375 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm0 = xmm0[1],xmm2[1]
1376 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,65535,65535,0,0,65535,65535,65535]
1377 ; SSE-NEXT: pandn %xmm0, %xmm1
1378 ; SSE-NEXT: movdqa %xmm12, %xmm0
1379 ; SSE-NEXT: psrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1380 ; SSE-NEXT: por %xmm1, %xmm0
1381 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
1382 ; SSE-NEXT: movdqa %xmm3, %xmm2
1383 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1384 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
1385 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1386 ; SSE-NEXT: psrld $16, %xmm1
1387 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
1388 ; SSE-NEXT: movdqa %xmm14, %xmm1
1389 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm3[4],xmm1[5],xmm3[5],xmm1[6],xmm3[6],xmm1[7],xmm3[7]
1390 ; SSE-NEXT: movdqa {{.*#+}} xmm8 = [0,65535,65535,65535,65535,0,0,0]
1391 ; SSE-NEXT: pand %xmm8, %xmm0
1392 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,4,7]
1393 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,1]
1394 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,7,5,4]
1395 ; SSE-NEXT: pandn %xmm1, %xmm8
1396 ; SSE-NEXT: por %xmm0, %xmm8
1397 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
1398 ; SSE-NEXT: movaps %xmm4, %xmm0
1399 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,0],xmm10[2,0]
1400 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1401 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm0[2,0]
1402 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1403 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm0[4],xmm4[5],xmm0[5],xmm4[6],xmm0[6],xmm4[7],xmm0[7]
1404 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,5,6,6,7]
1405 ; SSE-NEXT: movaps {{.*#+}} xmm2 = [0,65535,65535,65535,65535,65535,65535,0]
1406 ; SSE-NEXT: andps %xmm2, %xmm1
1407 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
1408 ; SSE-NEXT: andnps %xmm0, %xmm2
1409 ; SSE-NEXT: orps %xmm1, %xmm2
1410 ; SSE-NEXT: movaps {{.*#+}} xmm1 = [65535,0,65535,65535,65535,65535,65535,65535]
1411 ; SSE-NEXT: andps %xmm1, %xmm2
1412 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
1413 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm7[2,3,2,3]
1414 ; SSE-NEXT: andnps %xmm0, %xmm1
1415 ; SSE-NEXT: orps %xmm2, %xmm1
1416 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1417 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1418 ; SSE-NEXT: movdqa %xmm0, %xmm1
1419 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm0[1,1,1,1,4,5,6,7]
1420 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1421 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1422 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3]
1423 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1424 ; SSE-NEXT: movdqa %xmm2, %xmm0
1425 ; SSE-NEXT: psrld $16, %xmm0
1426 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
1427 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,65535,65535,65535,65535,0,0,65535]
1428 ; SSE-NEXT: movdqa %xmm0, %xmm2
1429 ; SSE-NEXT: pandn %xmm1, %xmm2
1430 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm15[0,1,2,3,4,5,6,6]
1431 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,2,3]
1432 ; SSE-NEXT: pand %xmm0, %xmm1
1433 ; SSE-NEXT: por %xmm2, %xmm1
1434 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
1435 ; SSE-NEXT: movaps %xmm14, %xmm5
1436 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[2,2],xmm7[1,1]
1437 ; SSE-NEXT: movaps {{.*#+}} xmm6 = [65535,65535,0,0,0,65535,65535,65535]
1438 ; SSE-NEXT: movaps %xmm6, %xmm3
1439 ; SSE-NEXT: andnps %xmm5, %xmm3
1440 ; SSE-NEXT: pand %xmm6, %xmm1
1441 ; SSE-NEXT: orps %xmm1, %xmm3
1442 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
1443 ; SSE-NEXT: movdqa %xmm15, %xmm1
1444 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm15[1,1,1,1,4,5,6,7]
1445 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1446 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
1447 ; SSE-NEXT: punpcklwd {{.*#+}} xmm15 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
1448 ; SSE-NEXT: psrld $16, %xmm9
1449 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm9[0],xmm1[1],xmm9[1]
1450 ; SSE-NEXT: movdqa %xmm0, %xmm9
1451 ; SSE-NEXT: pandn %xmm1, %xmm9
1452 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm13[0,1,2,3,4,5,6,6]
1453 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,2,3]
1454 ; SSE-NEXT: pand %xmm0, %xmm1
1455 ; SSE-NEXT: por %xmm9, %xmm1
1456 ; SSE-NEXT: movaps (%rsp), %xmm5 # 16-byte Reload
1457 ; SSE-NEXT: movaps %xmm5, %xmm9
1458 ; SSE-NEXT: movdqa %xmm11, %xmm2
1459 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[2,2],xmm11[1,1]
1460 ; SSE-NEXT: pand %xmm6, %xmm1
1461 ; SSE-NEXT: andnps %xmm9, %xmm6
1462 ; SSE-NEXT: orps %xmm1, %xmm6
1463 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
1464 ; SSE-NEXT: pslldq {{.*#+}} xmm9 = zero,zero,zero,zero,zero,zero,xmm9[0,1,2,3,4,5,6,7,8,9]
1465 ; SSE-NEXT: movdqa %xmm0, %xmm1
1466 ; SSE-NEXT: pandn %xmm9, %xmm1
1467 ; SSE-NEXT: pslldq {{.*#+}} xmm12 = zero,zero,xmm12[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
1468 ; SSE-NEXT: pand %xmm0, %xmm12
1469 ; SSE-NEXT: por %xmm1, %xmm12
1470 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,65535,65535,0,0,0,0,65535]
1471 ; SSE-NEXT: movdqa %xmm1, %xmm9
1472 ; SSE-NEXT: pandn %xmm12, %xmm9
1473 ; SSE-NEXT: shufpd $1, {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Folded Reload
1474 ; SSE-NEXT: # xmm11 = xmm11[1],mem[0]
1475 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
1476 ; SSE-NEXT: punpckhwd {{.*#+}} xmm12 = xmm12[4,4,5,5,6,6,7,7]
1477 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,0],xmm12[2,1]
1478 ; SSE-NEXT: andps %xmm1, %xmm11
1479 ; SSE-NEXT: orps %xmm9, %xmm11
1480 ; SSE-NEXT: pslldq {{.*#+}} xmm10 = zero,zero,zero,zero,zero,zero,xmm10[0,1,2,3,4,5,6,7,8,9]
1481 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
1482 ; SSE-NEXT: pslldq {{.*#+}} xmm9 = zero,zero,xmm9[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
1483 ; SSE-NEXT: pand %xmm0, %xmm9
1484 ; SSE-NEXT: pandn %xmm10, %xmm0
1485 ; SSE-NEXT: por %xmm9, %xmm0
1486 ; SSE-NEXT: movdqa %xmm7, %xmm9
1487 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,3],xmm4[0,1]
1488 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
1489 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4,4,5,5,6,6,7,7]
1490 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,0],xmm4[2,1]
1491 ; SSE-NEXT: andps %xmm1, %xmm7
1492 ; SSE-NEXT: pandn %xmm0, %xmm1
1493 ; SSE-NEXT: por %xmm7, %xmm1
1494 ; SSE-NEXT: punpcklqdq {{.*#+}} xmm13 = xmm13[0],xmm15[0]
1495 ; SSE-NEXT: shufps $98, {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Folded Reload
1496 ; SSE-NEXT: # xmm13 = xmm13[2,0],mem[2,1]
1497 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm5[0,0,1,1]
1498 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm2[0,0,1,1]
1499 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1]
1500 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [65535,65535,65535,65535,0,0,0,65535]
1501 ; SSE-NEXT: movdqa %xmm4, %xmm5
1502 ; SSE-NEXT: pandn %xmm0, %xmm5
1503 ; SSE-NEXT: andps %xmm4, %xmm13
1504 ; SSE-NEXT: por %xmm13, %xmm5
1505 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1506 ; SSE-NEXT: unpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
1507 ; SSE-NEXT: # xmm0 = xmm0[0],mem[0]
1508 ; SSE-NEXT: shufps $98, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
1509 ; SSE-NEXT: # xmm0 = xmm0[2,0],mem[2,1]
1510 ; SSE-NEXT: movaps %xmm0, %xmm2
1511 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm14[0,0,1,1]
1512 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm9[0,0,1,1]
1513 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm7[0],xmm0[1],xmm7[1]
1514 ; SSE-NEXT: andps %xmm4, %xmm2
1515 ; SSE-NEXT: pandn %xmm0, %xmm4
1516 ; SSE-NEXT: por %xmm2, %xmm4
1517 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1518 ; SSE-NEXT: movdqa %xmm4, 112(%rax)
1519 ; SSE-NEXT: movdqa %xmm5, (%rax)
1520 ; SSE-NEXT: movdqa %xmm1, 176(%rax)
1521 ; SSE-NEXT: movaps %xmm11, 64(%rax)
1522 ; SSE-NEXT: movaps %xmm6, 32(%rax)
1523 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1524 ; SSE-NEXT: movaps %xmm0, 48(%rax)
1525 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1526 ; SSE-NEXT: movaps %xmm0, 160(%rax)
1527 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1528 ; SSE-NEXT: movaps %xmm0, 16(%rax)
1529 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1530 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0,1,3]
1531 ; SSE-NEXT: movaps %xmm0, 80(%rax)
1532 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1533 ; SSE-NEXT: movaps %xmm0, 128(%rax)
1534 ; SSE-NEXT: movaps %xmm3, 144(%rax)
1535 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1536 ; SSE-NEXT: movaps %xmm0, 192(%rax)
1537 ; SSE-NEXT: movdqa %xmm8, 96(%rax)
1538 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1539 ; SSE-NEXT: movaps %xmm0, 208(%rax)
1540 ; SSE-NEXT: addq $216, %rsp
1543 ; AVX1-ONLY-LABEL: store_i16_stride7_vf16:
1544 ; AVX1-ONLY: # %bb.0:
1545 ; AVX1-ONLY-NEXT: subq $40, %rsp
1546 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1547 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm5
1548 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdx), %xmm14
1549 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm7
1550 ; AVX1-ONLY-NEXT: vmovdqa 16(%rcx), %xmm15
1551 ; AVX1-ONLY-NEXT: vpsrlq $48, %xmm15, %xmm0
1552 ; AVX1-ONLY-NEXT: vpunpckhqdq {{.*#+}} xmm0 = xmm14[1],xmm0[1]
1553 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm14[4],xmm15[4],xmm14[5],xmm15[5],xmm14[6],xmm15[6],xmm14[7],xmm15[7]
1554 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm2
1555 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm0 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535]
1556 ; AVX1-ONLY-NEXT: vandnps %ymm2, %ymm0, %ymm2
1557 ; AVX1-ONLY-NEXT: vmovdqa 16(%rsi), %xmm3
1558 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1559 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm4
1560 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1561 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
1562 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm9[2,2,3,3]
1563 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm6 = xmm9[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1564 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm3, %ymm3
1565 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm3, %ymm3
1566 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm3, %ymm2
1567 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm6 = [0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535]
1568 ; AVX1-ONLY-NEXT: vandps %ymm6, %ymm2, %ymm2
1569 ; AVX1-ONLY-NEXT: vmovdqa 16(%r8), %xmm3
1570 ; AVX1-ONLY-NEXT: vmovdqa 16(%r9), %xmm8
1571 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm8, %xmm10
1572 ; AVX1-ONLY-NEXT: vpunpckhdq {{.*#+}} xmm10 = xmm3[2],xmm10[2],xmm3[3],xmm10[3]
1573 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm12 = xmm8[4],xmm3[4],xmm8[5],xmm3[5],xmm8[6],xmm3[6],xmm8[7],xmm3[7]
1574 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm12 = xmm12[2,2,2,2,4,5,6,7]
1575 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm12 = xmm12[0,1,2,3,4,5,5,4]
1576 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm12, %ymm10
1577 ; AVX1-ONLY-NEXT: vandnps %ymm10, %ymm6, %ymm6
1578 ; AVX1-ONLY-NEXT: vorps %ymm6, %ymm2, %ymm2
1579 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm10
1580 ; AVX1-ONLY-NEXT: vmovdqa 16(%rax), %xmm13
1581 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm13[3,3,3,3]
1582 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm12[0],xmm10[1,2,3,4,5,6],xmm12[7]
1583 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1584 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm13[2,3,2,3]
1585 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm10[1],xmm2[2,3,4,5,6,7]
1586 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1587 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm9[0,1,0,1]
1588 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm9 = zero,zero,xmm9[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
1589 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm2, %ymm9
1590 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm14[2,2,2,2]
1591 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm10 = xmm15[3,3,3,3,4,5,6,7]
1592 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm10 = xmm10[0,1,2,3,4,4,4,4]
1593 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0,1,2,3,4,5],xmm2[6],xmm10[7]
1594 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm6
1595 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9]
1596 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm10, %ymm10
1597 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm2
1598 ; AVX1-ONLY-NEXT: vandnps %ymm9, %ymm0, %ymm9
1599 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm10, %ymm0
1600 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm9, %ymm0
1601 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm3[4],xmm8[4],xmm3[5],xmm8[5],xmm3[6],xmm8[6],xmm3[7],xmm8[7]
1602 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm9 = xmm9[0,1,2,2,4,5,6,7]
1603 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[0,1,2,1]
1604 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm13[2,2,3,3]
1605 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm9 = xmm9[0,1],xmm10[2,3],xmm9[4,5,6,7]
1606 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm10 = [65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0]
1607 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm10, %ymm0
1608 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm3[0],xmm8[0],xmm3[1],xmm8[1],xmm3[2],xmm8[2],xmm3[3],xmm8[3]
1609 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm3 = xmm8[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1610 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2],xmm13[3],xmm3[4,5,6,7]
1611 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm3, %ymm3
1612 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm10, %ymm3
1613 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm0, %ymm0
1614 ; AVX1-ONLY-NEXT: vmovups %ymm0, (%rsp) # 32-byte Spill
1615 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm6, %xmm0
1616 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
1617 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm2[4],xmm6[4],xmm2[5],xmm6[5],xmm2[6],xmm6[6],xmm2[7],xmm6[7]
1618 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm9[0,1,0,1]
1619 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
1620 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1621 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm7[3,3,3,3,4,5,6,7]
1622 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,4,4]
1623 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1624 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm5[2,2,2,2]
1625 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm3[0,1,2,3,4,5],xmm10[6],xmm3[7]
1626 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm5[0],xmm7[0],xmm5[1],xmm7[1],xmm5[2],xmm7[2],xmm5[3],xmm7[3]
1627 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm12 = xmm3[0,1,2,3,4,5,6,6]
1628 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[2,1,2,3]
1629 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm12, %ymm10
1630 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm1 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535]
1631 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm1, %ymm0
1632 ; AVX1-ONLY-NEXT: vandps %ymm1, %ymm10, %ymm10
1633 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm10, %ymm1
1634 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm10
1635 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm5
1636 ; AVX1-ONLY-NEXT: vmovdqa (%rax), %xmm12
1637 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm10[0],xmm5[0],xmm10[1],xmm5[1],xmm10[2],xmm5[2],xmm10[3],xmm5[3]
1638 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm11 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1639 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1,2],xmm12[3],xmm11[4,5,6,7]
1640 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm4[0,2],xmm12[1,3]
1641 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm7, %ymm7
1642 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm11 = [65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535]
1643 ; AVX1-ONLY-NEXT: vandps %ymm1, %ymm11, %ymm1
1644 ; AVX1-ONLY-NEXT: vandnps %ymm7, %ymm11, %ymm7
1645 ; AVX1-ONLY-NEXT: vorps %ymm7, %ymm1, %ymm0
1646 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1647 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm15, %xmm1
1648 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm14[0],xmm1[0],xmm14[1],xmm1[1]
1649 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
1650 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm7 = xmm15[0,1,2,3,4,5,6,6]
1651 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,1,2,3]
1652 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm1, %ymm1
1653 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
1654 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm11, %xmm7
1655 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1656 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm7 = xmm0[0],xmm7[0],xmm0[1],xmm7[1]
1657 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm0[0],xmm11[1],xmm0[1],xmm11[2],xmm0[2],xmm11[3],xmm0[3]
1658 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm11 = xmm11[2,2,2,2,4,5,6,7]
1659 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm11 = xmm11[0,1,2,3,4,5,5,4]
1660 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm11, %ymm7
1661 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm11 = [65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0]
1662 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm11, %ymm1
1663 ; AVX1-ONLY-NEXT: vandps %ymm7, %ymm11, %ymm7
1664 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm7, %ymm1
1665 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm7 = zero,zero,xmm8[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
1666 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm13[0,1,0,1]
1667 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2,3,4],xmm11[5],xmm7[6,7]
1668 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm11 = xmm8[0,2],xmm13[1,3]
1669 ; AVX1-ONLY-NEXT: vmovaps %xmm8, %xmm14
1670 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm7, %ymm7
1671 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm11 = [65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535]
1672 ; AVX1-ONLY-NEXT: vandps %ymm1, %ymm11, %ymm1
1673 ; AVX1-ONLY-NEXT: vandnps %ymm7, %ymm11, %ymm7
1674 ; AVX1-ONLY-NEXT: vorps %ymm7, %ymm1, %ymm7
1675 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
1676 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm11, %xmm1
1677 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
1678 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm8[0],xmm1[0],xmm8[1],xmm1[1]
1679 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,0,1,1]
1680 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm3, %ymm1
1681 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm2[0],xmm6[0],xmm2[1],xmm6[1],xmm2[2],xmm6[2],xmm2[3],xmm6[3]
1682 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
1683 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,1,2,2,4,5,6,7]
1684 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,2,1]
1685 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[2,2,2,2,4,5,6,7]
1686 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,5,4]
1687 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2
1688 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm3 = [65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
1689 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm3, %ymm1
1690 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm2, %ymm2
1691 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm2, %ymm1
1692 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm2 = zero,zero,xmm4[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
1693 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm12[0,1,0,1]
1694 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3,4],xmm3[5],xmm2[6,7]
1695 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm0 = xmm4[0,1,0,1]
1696 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm12[0,0,0,0]
1697 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm0[0,1,2],xmm3[3]
1698 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
1699 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm2 = [65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535]
1700 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm1, %ymm1
1701 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm2, %ymm0
1702 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm1, %ymm0
1703 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm8[4],xmm11[4],xmm8[5],xmm11[5],xmm8[6],xmm11[6],xmm8[7],xmm11[7]
1704 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm2 = zero,zero,zero,zero,zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9]
1705 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
1706 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm2 = zero,zero,xmm9[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
1707 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm9[2,2,3,3]
1708 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
1709 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm3 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535]
1710 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm3, %ymm1
1711 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm2, %ymm2
1712 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm2, %ymm1
1713 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm5[4],xmm10[4],xmm5[5],xmm10[5],xmm5[6],xmm10[6],xmm5[7],xmm10[7]
1714 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[2,2,2,2,4,5,6,7]
1715 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,5,4]
1716 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm12[2,3,2,3]
1717 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm3[1],xmm2[2,3,4,5,6,7]
1718 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm10[4],xmm5[4],xmm10[5],xmm5[5],xmm10[6],xmm5[6],xmm10[7],xmm5[7]
1719 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,1,2,2,4,5,6,7]
1720 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,2,1]
1721 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm12[2,2,3,3]
1722 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm4[2,3],xmm3[4,5,6,7]
1723 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2
1724 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm3 = [65535,65535,65535,0,0,0,0,65535,65535,65535,0,0,0,0,65535,65535]
1725 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm3, %ymm1
1726 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm2, %ymm2
1727 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm2, %ymm1
1728 ; AVX1-ONLY-NEXT: vpsrlq $48, %xmm11, %xmm2
1729 ; AVX1-ONLY-NEXT: vpunpckhqdq {{.*#+}} xmm2 = xmm8[1],xmm2[1]
1730 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm15[0,0,1,1]
1731 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
1732 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
1733 ; AVX1-ONLY-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3, %xmm3 # 16-byte Folded Reload
1734 ; AVX1-ONLY-NEXT: # xmm3 = xmm3[0],mem[0],xmm3[1],mem[1],xmm3[2],mem[2],xmm3[3],mem[3]
1735 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm4 = xmm9[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
1736 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,1,2,2,4,5,6,7]
1737 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,2,1]
1738 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
1739 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm4 = [65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
1740 ; AVX1-ONLY-NEXT: vandnps %ymm2, %ymm4, %ymm2
1741 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm3, %ymm3
1742 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm3, %ymm2
1743 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm14[0,1,0,1]
1744 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm4 = xmm13[0,0,0,0]
1745 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm3 = xmm3[0,1,2],xmm4[3]
1746 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm5, %xmm4
1747 ; AVX1-ONLY-NEXT: vpunpckhdq {{.*#+}} xmm4 = xmm10[2],xmm4[2],xmm10[3],xmm4[3]
1748 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm12[4],xmm4[4],xmm12[5],xmm4[5],xmm12[6],xmm4[6],xmm12[7],xmm4[7]
1749 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[8,9,u,u,u,u,u,u,u,u,6,7,10,11,12,13]
1750 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
1751 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm4 = [0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535]
1752 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm2, %ymm2
1753 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm4, %ymm3
1754 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm2, %ymm2
1755 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1756 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 96(%rax)
1757 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 64(%rax)
1758 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rax)
1759 ; AVX1-ONLY-NEXT: vmovaps %ymm7, 128(%rax)
1760 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1761 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
1762 ; AVX1-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
1763 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
1764 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1765 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 192(%rax)
1766 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1767 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 208(%rax)
1768 ; AVX1-ONLY-NEXT: addq $40, %rsp
1769 ; AVX1-ONLY-NEXT: vzeroupper
1770 ; AVX1-ONLY-NEXT: retq
1772 ; AVX2-SLOW-LABEL: store_i16_stride7_vf16:
1773 ; AVX2-SLOW: # %bb.0:
1774 ; AVX2-SLOW-NEXT: subq $40, %rsp
1775 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1776 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %ymm7
1777 ; AVX2-SLOW-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1778 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %ymm6
1779 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %ymm5
1780 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %ymm13
1781 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %ymm3
1782 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %ymm2
1783 ; AVX2-SLOW-NEXT: vmovdqa (%rax), %ymm1
1784 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1785 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = <3,u,u,u,4,u,u,4>
1786 ; AVX2-SLOW-NEXT: vpermd %ymm7, %ymm0, %ymm0
1787 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm6[0,3,2,3,4,7,6,7]
1788 ; AVX2-SLOW-NEXT: vmovdqa %ymm6, %ymm7
1789 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm8 = ymm8[0,0,3,3,4,5,6,7,8,8,11,11,12,13,14,15]
1790 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = <u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255>
1791 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm0, %ymm8, %ymm0
1792 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = <u,3,u,u,u,4,u,u>
1793 ; AVX2-SLOW-NEXT: vpermd %ymm5, %ymm8, %ymm8
1794 ; AVX2-SLOW-NEXT: vmovdqa %ymm5, %ymm6
1795 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm9 = ymm13[0,0,0,0,4,5,6,7,8,8,8,8,12,13,14,15]
1796 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm9 = ymm9[0,1,2,3,7,7,7,7,8,9,10,11,15,15,15,15]
1797 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm10 = <u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u>
1798 ; AVX2-SLOW-NEXT: vpblendvb %ymm10, %ymm8, %ymm9, %ymm8
1799 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = <u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255>
1800 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm0, %ymm8, %ymm0
1801 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = <u,u,3,u,u,u,4,u>
1802 ; AVX2-SLOW-NEXT: vpermd %ymm3, %ymm8, %ymm8
1803 ; AVX2-SLOW-NEXT: vmovdqa %ymm3, %ymm4
1804 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm2[0,1,0,3,4,5,4,7]
1805 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm9 = ymm9[0,1,2,3,4,4,7,7,8,9,10,11,12,12,15,15]
1806 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm10 = <u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u>
1807 ; AVX2-SLOW-NEXT: vpblendvb %ymm10, %ymm8, %ymm9, %ymm8
1808 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = <3,u,u,3,u,u,u,4>
1809 ; AVX2-SLOW-NEXT: vpermd %ymm1, %ymm9, %ymm9
1810 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm10 = <0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u>
1811 ; AVX2-SLOW-NEXT: vpblendvb %ymm10, %ymm8, %ymm9, %ymm8
1812 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = [0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255]
1813 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm0, %ymm8, %ymm0
1814 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
1815 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm10
1816 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm11
1817 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
1818 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,2,3,3,4,5,6,7]
1819 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
1820 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm12
1821 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm5
1822 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm5[4],xmm12[4],xmm5[5],xmm12[5],xmm5[6],xmm12[6],xmm5[7],xmm12[7]
1823 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm8 = xmm8[2,1,2,3,4,5,6,7]
1824 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,5,4]
1825 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,1,3]
1826 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = <u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u>
1827 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm8, %ymm0, %ymm1
1828 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %xmm9
1829 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm14
1830 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm14[4],xmm9[4],xmm14[5],xmm9[5],xmm14[6],xmm9[6],xmm14[7],xmm9[7]
1831 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
1832 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,1,1]
1833 ; AVX2-SLOW-NEXT: vpbroadcastd 8(%rax), %ymm0
1834 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm15 = <255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255>
1835 ; AVX2-SLOW-NEXT: vpblendvb %ymm15, %ymm8, %ymm0, %ymm0
1836 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = [255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255]
1837 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm0, %ymm1, %ymm0
1838 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1839 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm0 = xmm10[u,u,4,5,u,u,u,u,6,7,u,u,u,u,8,9]
1840 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm11[1,1,2,2]
1841 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1],xmm1[2,3],xmm0[4],xmm1[5,6],xmm0[7]
1842 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm5[3,3,3,3,4,5,6,7]
1843 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
1844 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm12[1,1,2,3]
1845 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} xmm1 = xmm15[0,1],xmm1[2],xmm15[3,4],xmm1[5],xmm15[6,7]
1846 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
1847 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
1848 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm15 = <255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255>
1849 ; AVX2-SLOW-NEXT: vpblendvb %ymm15, %ymm0, %ymm1, %ymm1
1850 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm14[0],xmm9[0],xmm14[1],xmm9[1],xmm14[2],xmm9[2],xmm14[3],xmm9[3]
1851 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm9 = xmm0[0,1,2,3,4,5,7,6]
1852 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[0,2,3,3]
1853 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,1,1,3]
1854 ; AVX2-SLOW-NEXT: vpbroadcastd 4(%rax), %ymm14
1855 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm15 = <u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u>
1856 ; AVX2-SLOW-NEXT: vpblendvb %ymm15, %ymm9, %ymm14, %ymm9
1857 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm14 = [255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255]
1858 ; AVX2-SLOW-NEXT: vpblendvb %ymm14, %ymm1, %ymm9, %ymm1
1859 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1860 ; AVX2-SLOW-NEXT: vmovdqa %ymm7, %ymm8
1861 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm7[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
1862 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
1863 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
1864 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm3[3,3,3,3,7,7,7,7]
1865 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm14[0,1,2],ymm1[3],ymm14[4,5],ymm1[6],ymm14[7,8,9,10],ymm1[11],ymm14[12,13],ymm1[14],ymm14[15]
1866 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm14 = ymm13[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
1867 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm14[2,2,2,2,6,6,6,6]
1868 ; AVX2-SLOW-NEXT: vmovdqa %ymm6, %ymm9
1869 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm15 = ymm6[3,3,3,3,7,7,7,7]
1870 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm14 = ymm15[0],ymm14[1],ymm15[2,3],ymm14[4],ymm15[5,6,7,8],ymm14[9],ymm15[10,11],ymm14[12],ymm15[13,14,15]
1871 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
1872 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,2,2,3]
1873 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm15 = <u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u>
1874 ; AVX2-SLOW-NEXT: vpblendvb %ymm15, %ymm1, %ymm14, %ymm1
1875 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, %ymm7
1876 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm14 = ymm2[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
1877 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm14[2,2,2,3,6,6,6,7]
1878 ; AVX2-SLOW-NEXT: vmovdqa %ymm4, %ymm2
1879 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm15 = ymm4[3,3,3,3,7,7,7,7]
1880 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm14 = ymm14[0,1],ymm15[2],ymm14[3,4],ymm15[5],ymm14[6,7,8,9],ymm15[10],ymm14[11,12],ymm15[13],ymm14[14,15]
1881 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,3]
1882 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm15 = <0,0,u,u,255,255,255,255,255,255,255,255,0,0,0,0,u,u,255,255,255,255,255,255,255,255,0,0,0,0,u,u>
1883 ; AVX2-SLOW-NEXT: vpblendvb %ymm15, %ymm1, %ymm14, %ymm1
1884 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
1885 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm4[2,3,3,3,6,7,7,7]
1886 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,3,2]
1887 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm15 = [255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0]
1888 ; AVX2-SLOW-NEXT: vpblendvb %ymm15, %ymm1, %ymm14, %ymm14
1889 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm12[0],xmm5[0],xmm12[1],xmm5[1],xmm12[2],xmm5[2],xmm12[3],xmm5[3]
1890 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
1891 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,1]
1892 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
1893 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm10 = xmm10[0,1,3,2,4,5,6,7]
1894 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[0,0,1,1]
1895 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,1,1,3]
1896 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm11 = <255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255>
1897 ; AVX2-SLOW-NEXT: vpblendvb %ymm11, %ymm1, %ymm10, %ymm1
1898 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,1,3,2,4,5,6,7]
1899 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,1,1,3]
1900 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
1901 ; AVX2-SLOW-NEXT: vpbroadcastd (%rax), %ymm10
1902 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm11 = <u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u>
1903 ; AVX2-SLOW-NEXT: vpblendvb %ymm11, %ymm0, %ymm10, %ymm0
1904 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm10 = [255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255]
1905 ; AVX2-SLOW-NEXT: vpblendvb %ymm10, %ymm1, %ymm0, %ymm10
1906 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm6[2,2,2,2,6,6,6,6]
1907 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
1908 ; AVX2-SLOW-NEXT: vmovdqa %ymm13, %ymm6
1909 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
1910 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm0[2,2,2,3]
1911 ; AVX2-SLOW-NEXT: vmovdqa %ymm3, %ymm0
1912 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm3[2,2,2,2,6,6,6,6]
1913 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm11 = ymm8[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
1914 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm11[2,2,2,2,6,6,6,6]
1915 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm11[1],ymm1[2,3],ymm11[4],ymm1[5,6,7,8],ymm11[9],ymm1[10,11],ymm11[12],ymm1[13,14,15]
1916 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
1917 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm11 = <255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u>
1918 ; AVX2-SLOW-NEXT: vpblendvb %ymm11, %ymm12, %ymm1, %ymm3
1919 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, %ymm15
1920 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm2[1,2,2,3,5,6,6,7]
1921 ; AVX2-SLOW-NEXT: vmovdqa %ymm7, %ymm2
1922 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm11 = ymm7[3,3,3,3,4,5,6,7,11,11,11,11,12,13,14,15]
1923 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm11 = ymm11[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
1924 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm11[2],ymm1[3,4],ymm11[5],ymm1[6,7,8,9],ymm11[10],ymm1[11,12],ymm11[13],ymm1[14,15]
1925 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,2]
1926 ; AVX2-SLOW-NEXT: vmovdqa %ymm4, %ymm13
1927 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm4[0,1,2,2,4,5,6,6]
1928 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,1,3,3]
1929 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm12 = <u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255>
1930 ; AVX2-SLOW-NEXT: vpblendvb %ymm12, %ymm1, %ymm11, %ymm1
1931 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm11 = [255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0]
1932 ; AVX2-SLOW-NEXT: vpblendvb %ymm11, %ymm3, %ymm1, %ymm7
1933 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm0[1,1,1,1,5,5,5,5]
1934 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm5 = ymm8[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
1935 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[0,0,2,1,4,4,6,5]
1936 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm5[0,1],ymm1[2],ymm5[3,4],ymm1[5],ymm5[6,7,8,9],ymm1[10],ymm5[11,12],ymm1[13],ymm5[14,15]
1937 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
1938 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm9[0,1,1,3,4,5,5,7]
1939 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm4 = ymm6[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
1940 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[0,0,0,0,4,4,4,4]
1941 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7,8,9],ymm4[10],ymm3[11,12],ymm4[13],ymm3[14,15]
1942 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,2]
1943 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = <255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0>
1944 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm1, %ymm3, %ymm1
1945 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm15[0,0,2,1,4,4,6,5]
1946 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
1947 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,0,0,0,4,4,4,4]
1948 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1,2],ymm3[3],ymm2[4,5],ymm3[6],ymm2[7,8,9,10],ymm3[11],ymm2[12,13],ymm3[14],ymm2[15]
1949 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,3,3]
1950 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm13[0,1,1,3,4,5,5,7]
1951 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
1952 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u>
1953 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm2
1954 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = [255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255]
1955 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
1956 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1957 ; AVX2-SLOW-NEXT: vmovups (%rsp), %ymm2 # 32-byte Reload
1958 ; AVX2-SLOW-NEXT: vmovaps %ymm2, 96(%rax)
1959 ; AVX2-SLOW-NEXT: vmovdqa %ymm1, 128(%rax)
1960 ; AVX2-SLOW-NEXT: vmovdqa %ymm7, 160(%rax)
1961 ; AVX2-SLOW-NEXT: vmovdqa %ymm10, (%rax)
1962 ; AVX2-SLOW-NEXT: vmovdqa %ymm14, 192(%rax)
1963 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1964 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 32(%rax)
1965 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1966 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 64(%rax)
1967 ; AVX2-SLOW-NEXT: addq $40, %rsp
1968 ; AVX2-SLOW-NEXT: vzeroupper
1969 ; AVX2-SLOW-NEXT: retq
1971 ; AVX2-FAST-LABEL: store_i16_stride7_vf16:
1972 ; AVX2-FAST: # %bb.0:
1973 ; AVX2-FAST-NEXT: pushq %rax
1974 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
1975 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %ymm5
1976 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %ymm7
1977 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %ymm4
1978 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %ymm6
1979 ; AVX2-FAST-NEXT: vmovdqa (%r8), %ymm15
1980 ; AVX2-FAST-NEXT: vmovdqa (%r9), %ymm3
1981 ; AVX2-FAST-NEXT: vmovdqa (%rax), %ymm1
1982 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm0 = ymm4[2,2,2,2,6,6,6,6]
1983 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm8 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
1984 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm8[0,1],ymm0[2],ymm8[3,4],ymm0[5],ymm8[6,7,8,9],ymm0[10],ymm8[11,12],ymm0[13],ymm8[14,15]
1985 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
1986 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm8 = ymm7[u,u,8,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,u,u,u,u,26,27,u,u,u,u,u,u]
1987 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm9 = ymm5[2,2,2,2,6,6,6,6]
1988 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0],ymm8[1],ymm9[2,3],ymm8[4],ymm9[5,6,7,8],ymm8[9],ymm9[10,11],ymm8[12],ymm9[13,14,15]
1989 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,2,3]
1990 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = <255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u>
1991 ; AVX2-FAST-NEXT: vpblendvb %ymm9, %ymm0, %ymm8, %ymm0
1992 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [4,5,2,2,6,6,6,6]
1993 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm8, %ymm8
1994 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm9 = ymm3[u,u,u,u,u,u,u,u,u,u,8,9,u,u,u,u,u,u,u,u,22,23,u,u,u,u,24,25,u,u,u,u]
1995 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm10 = ymm15[1,2,2,3,5,6,6,7]
1996 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm9 = ymm10[0,1],ymm9[2],ymm10[3,4],ymm9[5],ymm10[6,7,8,9],ymm9[10],ymm10[11,12],ymm9[13],ymm10[14,15]
1997 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,1,3,2]
1998 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = <u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255>
1999 ; AVX2-FAST-NEXT: vpblendvb %ymm10, %ymm9, %ymm8, %ymm8
2000 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0]
2001 ; AVX2-FAST-NEXT: vpblendvb %ymm9, %ymm0, %ymm8, %ymm0
2002 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2003 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm0 = ymm5[1,1,1,1,5,5,5,5]
2004 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm8 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,u,u,20,21,24,25,u,u,22,23,22,23]
2005 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm8[0,1],ymm0[2],ymm8[3,4],ymm0[5],ymm8[6,7,8,9],ymm0[10],ymm8[11,12],ymm0[13],ymm8[14,15]
2006 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
2007 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm8 = ymm6[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
2008 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm9 = ymm4[0,1,1,3,4,5,5,7]
2009 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0,1],ymm8[2],ymm9[3,4],ymm8[5],ymm9[6,7,8,9],ymm8[10],ymm9[11,12],ymm8[13],ymm9[14,15]
2010 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,1,3,2]
2011 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = <255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0>
2012 ; AVX2-FAST-NEXT: vpblendvb %ymm9, %ymm0, %ymm8, %ymm0
2013 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [0,1,4,5,4,5,5,7]
2014 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm8, %ymm8
2015 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2016 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm9 = ymm15[0,0,2,1,4,4,6,5]
2017 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm10 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,u,u,18,19,20,21,u,u,20,21]
2018 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm9 = ymm10[0,1,2],ymm9[3],ymm10[4,5],ymm9[6],ymm10[7,8,9,10],ymm9[11],ymm10[12,13],ymm9[14],ymm10[15]
2019 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,3,3]
2020 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = <u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u>
2021 ; AVX2-FAST-NEXT: vpblendvb %ymm10, %ymm9, %ymm8, %ymm8
2022 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255]
2023 ; AVX2-FAST-NEXT: vpblendvb %ymm9, %ymm0, %ymm8, %ymm0
2024 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2025 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <3,u,u,u,4,u,u,4>
2026 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm0, %ymm0
2027 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm8 = ymm7[0,1,0,1,14,15,14,15,8,9,10,11,12,13,14,15,16,17,16,17,30,31,30,31,24,25,26,27,28,29,30,31]
2028 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = <u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255>
2029 ; AVX2-FAST-NEXT: vpblendvb %ymm9, %ymm0, %ymm8, %ymm0
2030 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = <u,3,u,u,u,4,u,u>
2031 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm8, %ymm8
2032 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm9 = ymm6[0,1,0,1,0,1,0,1,14,15,14,15,14,15,14,15,16,17,16,17,16,17,16,17,30,31,30,31,30,31,30,31]
2033 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = <u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u>
2034 ; AVX2-FAST-NEXT: vpblendvb %ymm10, %ymm8, %ymm9, %ymm8
2035 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = <u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255>
2036 ; AVX2-FAST-NEXT: vpblendvb %ymm9, %ymm0, %ymm8, %ymm0
2037 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = <u,u,3,u,u,u,4,u>
2038 ; AVX2-FAST-NEXT: vpermd %ymm15, %ymm8, %ymm8
2039 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm9 = ymm3[0,1,2,3,4,5,6,7,0,1,0,1,14,15,14,15,16,17,18,19,20,21,22,23,16,17,16,17,30,31,30,31]
2040 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = <u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u>
2041 ; AVX2-FAST-NEXT: vpblendvb %ymm10, %ymm8, %ymm9, %ymm8
2042 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = <3,u,u,3,u,u,u,4>
2043 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm9, %ymm9
2044 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = <0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u>
2045 ; AVX2-FAST-NEXT: vpblendvb %ymm10, %ymm8, %ymm9, %ymm8
2046 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255]
2047 ; AVX2-FAST-NEXT: vpblendvb %ymm9, %ymm0, %ymm8, %ymm0
2048 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2049 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm10
2050 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm12
2051 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm12[4],xmm10[4],xmm12[5],xmm10[5],xmm12[6],xmm10[6],xmm12[7],xmm10[7]
2052 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
2053 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,3]
2054 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm13
2055 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm14
2056 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
2057 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm8 = xmm8[0,2,3,3,4,5,6,7]
2058 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,2,1]
2059 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = <u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u>
2060 ; AVX2-FAST-NEXT: vpblendvb %ymm11, %ymm0, %ymm8, %ymm11
2061 ; AVX2-FAST-NEXT: vmovdqa (%r9), %xmm8
2062 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm2
2063 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm2[4],xmm8[4],xmm2[5],xmm8[5],xmm2[6],xmm8[6],xmm2[7],xmm8[7]
2064 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
2065 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,0,1,1]
2066 ; AVX2-FAST-NEXT: vpbroadcastd 8(%rax), %ymm0
2067 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = <255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255>
2068 ; AVX2-FAST-NEXT: vpblendvb %ymm9, %ymm1, %ymm0, %ymm0
2069 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255]
2070 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm0, %ymm11, %ymm11
2071 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm13[u,u,4,5,u,u,u,u,6,7,u,u,u,u,8,9]
2072 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm1 = xmm14[1,1,2,2]
2073 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1],xmm1[2,3],xmm0[4],xmm1[5,6],xmm0[7]
2074 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm1 = xmm12[u,u,u,u,6,7,u,u,u,u,8,9,u,u,u,u]
2075 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm9 = xmm10[1,1,2,3]
2076 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} xmm1 = xmm9[0,1],xmm1[2],xmm9[3,4],xmm1[5],xmm9[6,7]
2077 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
2078 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
2079 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = <255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255>
2080 ; AVX2-FAST-NEXT: vpblendvb %ymm9, %ymm0, %ymm1, %ymm1
2081 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm8[0],xmm2[1],xmm8[1],xmm2[2],xmm8[2],xmm2[3],xmm8[3]
2082 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm2 = xmm0[0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
2083 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,1,3]
2084 ; AVX2-FAST-NEXT: vpbroadcastd 4(%rax), %ymm8
2085 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = <u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u>
2086 ; AVX2-FAST-NEXT: vpblendvb %ymm9, %ymm2, %ymm8, %ymm2
2087 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255]
2088 ; AVX2-FAST-NEXT: vpblendvb %ymm8, %ymm1, %ymm2, %ymm8
2089 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,14,15,u,u,u,u,u,u,u,u,28,29,u,u,u,u,30,31,u,u]
2090 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm5[3,3,3,3,7,7,7,7]
2091 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5],ymm1[6],ymm2[7,8,9,10],ymm1[11],ymm2[12,13],ymm1[14],ymm2[15]
2092 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm6[u,u,12,13,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,u,u,u,u,30,31,u,u,u,u,u,u]
2093 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[3,3,3,3,7,7,7,7]
2094 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm4[0],ymm2[1],ymm4[2,3],ymm2[4],ymm4[5,6,7,8],ymm2[9],ymm4[10,11],ymm2[12],ymm4[13,14,15]
2095 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
2096 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
2097 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u>
2098 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm1, %ymm2, %ymm1
2099 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm15[3,3,3,3,7,7,7,7]
2100 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,u,u,28,29,26,27,u,u,30,31,30,31]
2101 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0,1],ymm2[2],ymm3[3,4],ymm2[5],ymm3[6,7,8,9],ymm2[10],ymm3[11,12],ymm2[13],ymm3[14,15]
2102 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2103 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = <0,0,u,u,255,255,255,255,255,255,255,255,0,0,0,0,u,u,255,255,255,255,255,255,255,255,0,0,0,0,u,u>
2104 ; AVX2-FAST-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
2105 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [6,7,3,3,7,7,6,7]
2106 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm2 # 32-byte Folded Reload
2107 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0]
2108 ; AVX2-FAST-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
2109 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm10[0],xmm12[0],xmm10[1],xmm12[1],xmm10[2],xmm12[2],xmm10[3],xmm12[3]
2110 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
2111 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,1]
2112 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
2113 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
2114 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
2115 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255>
2116 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm2
2117 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
2118 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
2119 ; AVX2-FAST-NEXT: vpbroadcastd (%rax), %ymm3
2120 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u>
2121 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm0, %ymm3, %ymm0
2122 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255]
2123 ; AVX2-FAST-NEXT: vpblendvb %ymm3, %ymm2, %ymm0, %ymm0
2124 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
2125 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2126 ; AVX2-FAST-NEXT: vmovaps %ymm2, 96(%rax)
2127 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2128 ; AVX2-FAST-NEXT: vmovaps %ymm2, 128(%rax)
2129 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2130 ; AVX2-FAST-NEXT: vmovaps %ymm2, 160(%rax)
2131 ; AVX2-FAST-NEXT: vmovdqa %ymm0, (%rax)
2132 ; AVX2-FAST-NEXT: vmovdqa %ymm1, 192(%rax)
2133 ; AVX2-FAST-NEXT: vmovdqa %ymm8, 32(%rax)
2134 ; AVX2-FAST-NEXT: vmovdqa %ymm11, 64(%rax)
2135 ; AVX2-FAST-NEXT: popq %rax
2136 ; AVX2-FAST-NEXT: vzeroupper
2137 ; AVX2-FAST-NEXT: retq
2139 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride7_vf16:
2140 ; AVX2-FAST-PERLANE: # %bb.0:
2141 ; AVX2-FAST-PERLANE-NEXT: subq $40, %rsp
2142 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2143 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %ymm7
2144 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %ymm6
2145 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %ymm4
2146 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %ymm5
2147 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %ymm3
2148 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2149 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %ymm2
2150 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rax), %ymm1
2151 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2152 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm0 = <3,u,u,u,4,u,u,4>
2153 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm7, %ymm0, %ymm0
2154 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm8 = ymm6[0,1,0,1,14,15,14,15,8,9,10,11,12,13,14,15,16,17,16,17,30,31,30,31,24,25,26,27,28,29,30,31]
2155 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm9 = <u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255>
2156 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm9, %ymm0, %ymm8, %ymm0
2157 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm8 = <u,3,u,u,u,4,u,u>
2158 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm4, %ymm8, %ymm8
2159 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm4, %ymm12
2160 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm9 = ymm5[0,1,0,1,0,1,0,1,14,15,14,15,14,15,14,15,16,17,16,17,16,17,16,17,30,31,30,31,30,31,30,31]
2161 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm5, %ymm4
2162 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm10 = <u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u>
2163 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm8, %ymm9, %ymm8
2164 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm9 = <u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255>
2165 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm9, %ymm0, %ymm8, %ymm0
2166 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm8 = <u,u,3,u,u,u,4,u>
2167 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm3, %ymm8, %ymm8
2168 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm9 = ymm2[0,1,2,3,4,5,6,7,0,1,0,1,14,15,14,15,16,17,18,19,20,21,22,23,16,17,16,17,30,31,30,31]
2169 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm10 = <u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u>
2170 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm8, %ymm9, %ymm8
2171 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm9 = <3,u,u,3,u,u,u,4>
2172 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm1, %ymm9, %ymm9
2173 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm10 = <0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u>
2174 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm8, %ymm9, %ymm8
2175 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm9 = [0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255]
2176 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm9, %ymm0, %ymm8, %ymm0
2177 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
2178 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm10
2179 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm11
2180 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
2181 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
2182 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,3]
2183 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm3
2184 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm5
2185 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm5[4],xmm3[4],xmm5[5],xmm3[5],xmm5[6],xmm3[6],xmm5[7],xmm3[7]
2186 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} xmm8 = xmm8[0,2,3,3,4,5,6,7]
2187 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,2,1]
2188 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm9 = <u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u>
2189 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm9, %ymm0, %ymm8, %ymm1
2190 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %xmm9
2191 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm14
2192 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm14[4],xmm9[4],xmm14[5],xmm9[5],xmm14[6],xmm9[6],xmm14[7],xmm9[7]
2193 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
2194 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,1,1]
2195 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 8(%rax), %ymm0
2196 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = <255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255>
2197 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm8, %ymm0, %ymm0
2198 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm8 = [255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255]
2199 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm8, %ymm0, %ymm1, %ymm0
2200 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2201 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm0 = xmm3[u,u,4,5,u,u,u,u,6,7,u,u,u,u,8,9]
2202 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm1 = xmm5[1,1,2,2]
2203 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1],xmm1[2,3],xmm0[4],xmm1[5,6],xmm0[7]
2204 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm1 = xmm11[u,u,u,u,6,7,u,u,u,u,8,9,u,u,u,u]
2205 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm15 = xmm10[1,1,2,3]
2206 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} xmm1 = xmm15[0,1],xmm1[2],xmm15[3,4],xmm1[5],xmm15[6,7]
2207 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
2208 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
2209 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = <255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255>
2210 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm0, %ymm1, %ymm1
2211 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm14[0],xmm9[0],xmm14[1],xmm9[1],xmm14[2],xmm9[2],xmm14[3],xmm9[3]
2212 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm9 = xmm0[0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
2213 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,1,1,3]
2214 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 4(%rax), %ymm14
2215 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = <u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u>
2216 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm9, %ymm14, %ymm9
2217 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm14 = [255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255]
2218 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm14, %ymm1, %ymm9, %ymm1
2219 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2220 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,14,15,u,u,u,u,u,u,u,u,28,29,u,u,u,u,30,31,u,u]
2221 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm6, %ymm9
2222 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm14 = ymm7[3,3,3,3,7,7,7,7]
2223 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm7, %ymm8
2224 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm1 = ymm14[0,1,2],ymm1[3],ymm14[4,5],ymm1[6],ymm14[7,8,9,10],ymm1[11],ymm14[12,13],ymm1[14],ymm14[15]
2225 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm14 = ymm4[u,u,12,13,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,u,u,u,u,30,31,u,u,u,u,u,u]
2226 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm12, %ymm7
2227 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm15 = ymm12[3,3,3,3,7,7,7,7]
2228 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm14 = ymm15[0],ymm14[1],ymm15[2,3],ymm14[4],ymm15[5,6,7,8],ymm14[9],ymm15[10,11],ymm14[12],ymm15[13,14,15]
2229 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
2230 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,2,2,3]
2231 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = <u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u>
2232 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm1, %ymm14, %ymm1
2233 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
2234 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm14 = ymm6[3,3,3,3,7,7,7,7]
2235 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm2, %ymm12
2236 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm15 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,u,u,28,29,26,27,u,u,30,31,30,31]
2237 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm14 = ymm15[0,1],ymm14[2],ymm15[3,4],ymm14[5],ymm15[6,7,8,9],ymm14[10],ymm15[11,12],ymm14[13],ymm15[14,15]
2238 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,3]
2239 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = <0,0,u,u,255,255,255,255,255,255,255,255,0,0,0,0,u,u,255,255,255,255,255,255,255,255,0,0,0,0,u,u>
2240 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm1, %ymm14, %ymm1
2241 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
2242 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm14 = ymm13[2,3,3,3,6,7,7,7]
2243 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,3,2]
2244 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = [255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0]
2245 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm1, %ymm14, %ymm14
2246 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
2247 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
2248 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,1]
2249 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
2250 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm10 = xmm10[0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
2251 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,1,1,3]
2252 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = <255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255>
2253 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm1, %ymm10, %ymm1
2254 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
2255 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
2256 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd (%rax), %ymm10
2257 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = <u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u>
2258 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm0, %ymm10, %ymm0
2259 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm10 = [255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255]
2260 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm1, %ymm0, %ymm10
2261 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm0 = ymm7[2,2,2,2,6,6,6,6]
2262 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
2263 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
2264 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm0[2,2,2,3]
2265 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm9[u,u,8,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,u,u,u,u,26,27,u,u,u,u,u,u]
2266 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm11 = ymm8[2,2,2,2,6,6,6,6]
2267 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm1 = ymm11[0],ymm1[1],ymm11[2,3],ymm1[4],ymm11[5,6,7,8],ymm1[9],ymm11[10,11],ymm1[12],ymm11[13,14,15]
2268 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
2269 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = <255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u>
2270 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm2, %ymm1, %ymm3
2271 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm12, %ymm2
2272 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm12[u,u,u,u,u,u,u,u,u,u,8,9,u,u,u,u,u,u,u,u,22,23,u,u,u,u,24,25,u,u,u,u]
2273 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm6, %ymm15
2274 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm11 = ymm6[1,2,2,3,5,6,6,7]
2275 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm1 = ymm11[0,1],ymm1[2],ymm11[3,4],ymm1[5],ymm11[6,7,8,9],ymm1[10],ymm11[11,12],ymm1[13],ymm11[14,15]
2276 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,2]
2277 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm11 = ymm13[0,1,2,2,4,5,6,6]
2278 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,1,3,3]
2279 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm12 = <u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255>
2280 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm12, %ymm1, %ymm11, %ymm1
2281 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = [255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0]
2282 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm3, %ymm1, %ymm6
2283 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm8[1,1,1,1,5,5,5,5]
2284 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm5 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,u,u,20,21,24,25,u,u,22,23,22,23]
2285 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm1 = ymm5[0,1],ymm1[2],ymm5[3,4],ymm1[5],ymm5[6,7,8,9],ymm1[10],ymm5[11,12],ymm1[13],ymm5[14,15]
2286 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
2287 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm4 = ymm4[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
2288 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm7[0,1,1,3,4,5,5,7]
2289 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7,8,9],ymm4[10],ymm3[11,12],ymm4[13],ymm3[14,15]
2290 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,2]
2291 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm4 = <255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0>
2292 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm1, %ymm3, %ymm1
2293 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm15[0,0,2,1,4,4,6,5]
2294 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,u,u,18,19,20,21,u,u,20,21]
2295 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1,2],ymm3[3],ymm2[4,5],ymm3[6],ymm2[7,8,9,10],ymm3[11],ymm2[12,13],ymm3[14],ymm2[15]
2296 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,3,3]
2297 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm13[0,1,1,3,4,5,5,7]
2298 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
2299 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u>
2300 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm2
2301 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm3 = [255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255]
2302 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
2303 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2304 ; AVX2-FAST-PERLANE-NEXT: vmovups (%rsp), %ymm2 # 32-byte Reload
2305 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm2, 96(%rax)
2306 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm1, 128(%rax)
2307 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm6, 160(%rax)
2308 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm10, (%rax)
2309 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm14, 192(%rax)
2310 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2311 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 32(%rax)
2312 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2313 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 64(%rax)
2314 ; AVX2-FAST-PERLANE-NEXT: addq $40, %rsp
2315 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
2316 ; AVX2-FAST-PERLANE-NEXT: retq
2318 ; AVX512F-ONLY-SLOW-LABEL: store_i16_stride7_vf16:
2319 ; AVX512F-ONLY-SLOW: # %bb.0:
2320 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2321 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %ymm8
2322 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rsi), %ymm9
2323 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %ymm6
2324 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rcx), %ymm7
2325 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r8), %ymm2
2326 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r9), %ymm3
2327 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rax), %ymm13
2328 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm7[u,u,u,u,u,u],zero,zero,ymm7[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm7[16,17,u,u,u,u,u,u,u,u]
2329 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm6[u,u,u,u,u,u,14,15],zero,zero,ymm6[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm6[u,u,u,u,u,u,u,u]
2330 ; AVX512F-ONLY-SLOW-NEXT: vporq %ymm1, %ymm4, %ymm16
2331 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rcx), %xmm14
2332 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm15
2333 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
2334 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm0, %xmm21
2335 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,ymm9[14,15,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm9[16,17,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero
2336 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm8[12,13,14,15],zero,zero,ymm8[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm8[u,u,u,u,u,u,u,u,16,17,18,19]
2337 ; AVX512F-ONLY-SLOW-NEXT: vporq %ymm1, %ymm4, %ymm17
2338 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %xmm1
2339 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rsi), %xmm4
2340 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm5 = ymm3[u,u,u,u,u,u,u,u,u,u],zero,zero,ymm3[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm3[16,17,u,u,u,u]
2341 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = ymm2[u,u,u,u,u,u,u,u,u,u,14,15],zero,zero,ymm2[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm2[u,u,u,u]
2342 ; AVX512F-ONLY-SLOW-NEXT: vporq %ymm5, %ymm10, %ymm19
2343 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd 8(%rax), %ymm5
2344 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm10 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
2345 ; AVX512F-ONLY-SLOW-NEXT: vpandn %ymm5, %ymm10, %ymm5
2346 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = ymm13[12,13,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm13[14,15,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm13[16,17,u,u]
2347 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm5, %zmm5
2348 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm6[0,1,1,3,4,5,5,7]
2349 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm12 = ymm7[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
2350 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[0,0,0,0,4,4,4,4]
2351 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm10[0,1],ymm12[2],ymm10[3,4],ymm12[5],ymm10[6,7,8,9],ymm12[10],ymm10[11,12],ymm12[13],ymm10[14,15]
2352 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm0, %ymm22
2353 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm6[2,2,2,2,6,6,6,6]
2354 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm12 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
2355 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm12[0,1],ymm10[2],ymm12[3,4],ymm10[5],ymm12[6,7,8,9],ymm10[10],ymm12[11,12],ymm10[13],ymm12[14,15]
2356 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm8[1,1,1,1,5,5,5,5]
2357 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm11 = ymm9[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
2358 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm11[0,0,2,1,4,4,6,5]
2359 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm11 = ymm11[0,1],ymm10[2],ymm11[3,4],ymm10[5],ymm11[6,7,8,9],ymm10[10],ymm11[11,12],ymm10[13],ymm11[14,15]
2360 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm4[4],xmm1[4],xmm4[5],xmm1[5],xmm4[6],xmm1[6],xmm4[7],xmm1[7]
2361 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm12, %zmm11, %zmm20
2362 ; AVX512F-ONLY-SLOW-NEXT: vprold $16, %xmm4, %xmm11
2363 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm12 = xmm1[1,1,2,3]
2364 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} xmm11 = xmm12[0,1],xmm11[2],xmm12[3,4],xmm11[5],xmm12[6,7]
2365 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
2366 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm12 = xmm12[0,1,3,2,4,5,6,7]
2367 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [0,16,0,1,17,17,2,0,0,16,0,1,17,17,2,0]
2368 ; AVX512F-ONLY-SLOW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
2369 ; AVX512F-ONLY-SLOW-NEXT: vpermi2d %zmm12, %zmm11, %zmm18
2370 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm8[2,2,2,2,6,6,6,6]
2371 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm12 = ymm9[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
2372 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
2373 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm11[0],ymm12[1],ymm11[2,3],ymm12[4],ymm11[5,6,7,8],ymm12[9],ymm11[10,11],ymm12[12],ymm11[13,14,15]
2374 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r9), %xmm11
2375 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} xmm14 = xmm14[u,u,4,5,u,u,u,u,6,7,u,u,u,u,8,9]
2376 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[1,1,2,2]
2377 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} xmm14 = xmm15[0],xmm14[1],xmm15[2,3],xmm14[4],xmm15[5,6],xmm14[7]
2378 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r8), %xmm15
2379 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
2380 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm15[4],xmm11[4],xmm15[5],xmm11[5],xmm15[6],xmm11[6],xmm15[7],xmm11[7]
2381 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
2382 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm4
2383 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm0
2384 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm14, %zmm0, %zmm1
2385 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm2[0,0,2,1,4,4,6,5]
2386 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm14 = ymm3[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
2387 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm14[0,0,0,0,4,4,4,4]
2388 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm14 = ymm14[0,1,2],ymm0[3],ymm14[4,5],ymm0[6],ymm14[7,8,9,10],ymm0[11],ymm14[12,13],ymm0[14],ymm14[15]
2389 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm11[0],xmm15[1],xmm11[1],xmm15[2],xmm11[2],xmm15[3],xmm11[3]
2390 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm11 = xmm0[0,1,2,3,4,5,7,6]
2391 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,1,3,2,4,5,6,7]
2392 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm15 = <0,1,0,1,0,1,1,3,16,18,19,19,19,19,u,u>
2393 ; AVX512F-ONLY-SLOW-NEXT: vpermi2d %zmm11, %zmm0, %zmm15
2394 ; AVX512F-ONLY-SLOW-NEXT: vprold $16, %ymm3, %ymm0
2395 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm2[1,2,2,3,5,6,6,7]
2396 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm11[0,1],ymm0[2],ymm11[3,4],ymm0[5],ymm11[6,7,8,9],ymm0[10],ymm11[11,12],ymm0[13],ymm11[14,15]
2397 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm8[3,3,3,3,7,7,7,7]
2398 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm9 = ymm9[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
2399 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm9[2,2,2,2,6,6,6,6]
2400 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm8 = ymm8[0,1,2],ymm9[3],ymm8[4,5],ymm9[6],ymm8[7,8,9,10],ymm9[11],ymm8[12,13],ymm9[14],ymm8[15]
2401 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[3,3,3,3,7,7,7,7]
2402 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm7 = ymm7[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
2403 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm7[2,2,2,2,6,6,6,6]
2404 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2,3],ymm7[4],ymm6[5,6,7,8],ymm7[9],ymm6[10,11],ymm7[12],ymm6[13,14,15]
2405 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm21, %xmm7
2406 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm7 = xmm7[0,2,3,3,4,5,6,7]
2407 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
2408 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm9 = xmm10[2,1,2,3,4,5,6,7]
2409 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,5,5,4]
2410 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,1,3]
2411 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,1,1]
2412 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm22[2,1,3,2]
2413 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm12[0,2,2,3]
2414 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm14[2,2,3,3]
2415 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,2]
2416 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,1,3,3]
2417 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,2,2,3]
2418 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[3,3,3,3,7,7,7,7]
2419 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
2420 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[2,2,2,3,6,6,6,7]
2421 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0,1],ymm2[2],ymm3[3,4],ymm2[5],ymm3[6,7,8,9],ymm2[10],ymm3[11,12],ymm2[13],ymm3[14,15]
2422 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2423 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,5,4,0,0,6,5,0,0,5,4,0,0,6,5,0]
2424 ; AVX512F-ONLY-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
2425 ; AVX512F-ONLY-SLOW-NEXT: vpermd %zmm13, %zmm3, %zmm3
2426 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm13 = ymm13[2,3,3,3,6,7,7,7]
2427 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm16, %zmm7, %zmm7
2428 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rcx
2429 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm17, %zmm9, %zmm9
2430 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm9
2431 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm19, %zmm4, %zmm4
2432 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm5
2433 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm5
2434 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm11, %zmm10, %zmm4
2435 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm7 = zmm20[2,2,2,3,6,6,6,7]
2436 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm7
2437 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm12, %zmm0
2438 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm3
2439 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm3
2440 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm0 = zmm1[0,0,1,1,4,4,5,5]
2441 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm0
2442 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd (%rax), %ymm1
2443 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd 4(%rax), %ymm4
2444 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm1, %zmm1
2445 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm15, %zmm1
2446 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm1
2447 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm8, %ymm6
2448 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm13[2,1,3,2]
2449 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm2, %ymm0
2450 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm6, %ymm0
2451 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa %ymm0, 192(%rcx)
2452 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, (%rcx)
2453 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, 128(%rcx)
2454 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, 64(%rcx)
2455 ; AVX512F-ONLY-SLOW-NEXT: vzeroupper
2456 ; AVX512F-ONLY-SLOW-NEXT: retq
2458 ; AVX512F-ONLY-FAST-LABEL: store_i16_stride7_vf16:
2459 ; AVX512F-ONLY-FAST: # %bb.0:
2460 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdi), %ymm5
2461 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rsi), %ymm7
2462 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %ymm4
2463 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rcx), %ymm6
2464 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r8), %ymm1
2465 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r9), %ymm2
2466 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm7[14,15,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm7[16,17,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero
2467 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm5[12,13,14,15],zero,zero,ymm5[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm5[u,u,u,u,u,u,u,u,16,17,18,19]
2468 ; AVX512F-ONLY-FAST-NEXT: vporq %ymm0, %ymm3, %ymm16
2469 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdi), %xmm11
2470 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rsi), %xmm12
2471 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm6[u,u,u,u,u,u],zero,zero,ymm6[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm6[16,17,u,u,u,u,u,u,u,u]
2472 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm4[u,u,u,u,u,u,14,15],zero,zero,ymm4[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm4[u,u,u,u,u,u,u,u]
2473 ; AVX512F-ONLY-FAST-NEXT: vporq %ymm0, %ymm3, %ymm17
2474 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rcx), %xmm14
2475 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm2[u,u,u,u,u,u,u,u,u,u],zero,zero,ymm2[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm2[16,17,u,u,u,u]
2476 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm1[u,u,u,u,u,u,u,u,u,u,14,15],zero,zero,ymm1[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm1[u,u,u,u]
2477 ; AVX512F-ONLY-FAST-NEXT: vporq %ymm0, %ymm3, %ymm18
2478 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,u,u,u,u,26,27,u,u,u,u,u,u]
2479 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm5[2,2,2,2,6,6,6,6]
2480 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm3[0],ymm0[1],ymm3[2,3],ymm0[4],ymm3[5,6,7,8],ymm0[9],ymm3[10,11],ymm0[12],ymm3[13,14,15]
2481 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm6[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
2482 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm9 = ymm4[0,1,1,3,4,5,5,7]
2483 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm9[0,1],ymm3[2],ymm9[3,4],ymm3[5],ymm9[6,7,8,9],ymm3[10],ymm9[11,12],ymm3[13],ymm9[14,15]
2484 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = <2,u,3,2,u,10,10,11>
2485 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm3, %zmm9
2486 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm0 = ymm4[2,2,2,2,6,6,6,6]
2487 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
2488 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7,8,9],ymm0[10],ymm3[11,12],ymm0[13],ymm3[14,15]
2489 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm5[1,1,1,1,5,5,5,5]
2490 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm10 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,u,u,20,21,24,25,u,u,22,23,22,23]
2491 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm10[0,1],ymm3[2],ymm10[3,4],ymm3[5],ymm10[6,7,8,9],ymm3[10],ymm10[11,12],ymm3[13],ymm10[14,15]
2492 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm3, %zmm19
2493 ; AVX512F-ONLY-FAST-NEXT: vprold $16, %ymm2, %ymm0
2494 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm1[1,2,2,3,5,6,6,7]
2495 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7,8,9],ymm0[10],ymm3[11,12],ymm0[13],ymm3[14,15]
2496 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm1[0,0,2,1,4,4,6,5]
2497 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm13 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,u,u,18,19,20,21,u,u,20,21]
2498 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm13[0,1,2],ymm3[3],ymm13[4,5],ymm3[6],ymm13[7,8,9,10],ymm3[11],ymm13[12,13],ymm3[14],ymm13[15]
2499 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm8
2500 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm13 = [2,2,3,3,10,9,11,10]
2501 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm3, %zmm13
2502 ; AVX512F-ONLY-FAST-NEXT: vprold $16, %xmm12, %xmm0
2503 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} xmm3 = xmm11[1,1,2,3]
2504 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} xmm0 = xmm3[0,1],xmm0[2],xmm3[3,4],xmm0[5],xmm3[6,7]
2505 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3]
2506 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
2507 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm15 = [0,8,1,9,0,8,1,9]
2508 ; AVX512F-ONLY-FAST-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3]
2509 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm3, %zmm15
2510 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
2511 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
2512 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} xmm14 = xmm14[u,u,4,5,u,u,u,u,6,7,u,u,u,u,8,9]
2513 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[1,1,2,2]
2514 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0],xmm14[1],xmm8[2,3],xmm14[4],xmm8[5,6],xmm14[7]
2515 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r9), %xmm14
2516 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
2517 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm12 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
2518 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm12, %xmm11, %xmm11
2519 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm8, %zmm11, %zmm11
2520 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r8), %xmm8
2521 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
2522 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm12, %xmm10, %xmm12
2523 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
2524 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd 8(%rax), %ymm10
2525 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} ymm20 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
2526 ; AVX512F-ONLY-FAST-NEXT: vpandnq %ymm10, %ymm20, %ymm10
2527 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3]
2528 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} xmm14 = xmm8[0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
2529 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
2530 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm20 = [0,0,0,1,8,9,9,11]
2531 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm14, %zmm8, %zmm20
2532 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = mem[0,1,2,3,0,1,2,3]
2533 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm8[12,13,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm8[14,15,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm8[16,17,u,u]
2534 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm14, %zmm10, %zmm10
2535 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,14,15,u,u,u,u,u,u,u,u,28,29,u,u,u,u,30,31,u,u]
2536 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[3,3,3,3,7,7,7,7]
2537 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm5 = ymm5[0,1,2],ymm7[3],ymm5[4,5],ymm7[6],ymm5[7,8,9,10],ymm7[11],ymm5[12,13],ymm7[14],ymm5[15]
2538 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm6 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,u,u,u,u,30,31,u,u,u,u,u,u]
2539 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[3,3,3,3,7,7,7,7]
2540 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0],ymm6[1],ymm4[2,3],ymm6[4],ymm4[5,6,7,8],ymm6[9],ymm4[10,11],ymm6[12],ymm4[13,14,15]
2541 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,13,4,0,0,14,5,0,0,13,4,0,0,14,5,0]
2542 ; AVX512F-ONLY-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
2543 ; AVX512F-ONLY-FAST-NEXT: vpermd %zmm8, %zmm6, %zmm6
2544 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = <6,u,u,u,7,u,u,7>
2545 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm8, %ymm7, %ymm7
2546 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
2547 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,1,3]
2548 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,2,3,3,4,5,6,7]
2549 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
2550 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm12[0,0,1,1]
2551 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,1,3,3]
2552 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
2553 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[3,3,3,3,7,7,7,7]
2554 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,u,u,28,29,26,27,u,u,30,31,30,31]
2555 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
2556 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
2557 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rcx
2558 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm16, %zmm3, %zmm2
2559 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm17, %zmm0, %zmm0
2560 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm0
2561 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm18, %zmm8, %zmm2
2562 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm10
2563 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm10
2564 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} zmm0 = zmm19[2,2,2,3,6,6,6,7]
2565 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm0
2566 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm13, %zmm6
2567 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm6
2568 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} zmm0 = zmm11[0,0,1,1,4,4,5,5]
2569 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm15, %zmm0
2570 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd (%rax), %ymm2
2571 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd 4(%rax), %ymm3
2572 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm2
2573 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm20, %zmm2
2574 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm2
2575 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm5, %ymm4
2576 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm7, %ymm1
2577 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm4, %ymm1
2578 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %ymm1, 192(%rcx)
2579 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, (%rcx)
2580 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, 128(%rcx)
2581 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, 64(%rcx)
2582 ; AVX512F-ONLY-FAST-NEXT: vzeroupper
2583 ; AVX512F-ONLY-FAST-NEXT: retq
2585 ; AVX512DQ-SLOW-LABEL: store_i16_stride7_vf16:
2586 ; AVX512DQ-SLOW: # %bb.0:
2587 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2588 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %ymm8
2589 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rsi), %ymm9
2590 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %ymm6
2591 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rcx), %ymm7
2592 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r8), %ymm2
2593 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r9), %ymm3
2594 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rax), %ymm13
2595 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm7[u,u,u,u,u,u],zero,zero,ymm7[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm7[16,17,u,u,u,u,u,u,u,u]
2596 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm6[u,u,u,u,u,u,14,15],zero,zero,ymm6[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm6[u,u,u,u,u,u,u,u]
2597 ; AVX512DQ-SLOW-NEXT: vporq %ymm1, %ymm4, %ymm16
2598 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rcx), %xmm14
2599 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %xmm15
2600 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
2601 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm0, %xmm21
2602 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,ymm9[14,15,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm9[16,17,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero
2603 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm8[12,13,14,15],zero,zero,ymm8[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm8[u,u,u,u,u,u,u,u,16,17,18,19]
2604 ; AVX512DQ-SLOW-NEXT: vporq %ymm1, %ymm4, %ymm17
2605 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %xmm1
2606 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rsi), %xmm4
2607 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm5 = ymm3[u,u,u,u,u,u,u,u,u,u],zero,zero,ymm3[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm3[16,17,u,u,u,u]
2608 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = ymm2[u,u,u,u,u,u,u,u,u,u,14,15],zero,zero,ymm2[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm2[u,u,u,u]
2609 ; AVX512DQ-SLOW-NEXT: vporq %ymm5, %ymm10, %ymm19
2610 ; AVX512DQ-SLOW-NEXT: vpbroadcastd 8(%rax), %ymm5
2611 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm10 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
2612 ; AVX512DQ-SLOW-NEXT: vpandn %ymm5, %ymm10, %ymm5
2613 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = ymm13[12,13,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm13[14,15,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm13[16,17,u,u]
2614 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm5, %zmm5
2615 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm6[0,1,1,3,4,5,5,7]
2616 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm12 = ymm7[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
2617 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[0,0,0,0,4,4,4,4]
2618 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm10[0,1],ymm12[2],ymm10[3,4],ymm12[5],ymm10[6,7,8,9],ymm12[10],ymm10[11,12],ymm12[13],ymm10[14,15]
2619 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm0, %ymm22
2620 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm6[2,2,2,2,6,6,6,6]
2621 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm12 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
2622 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm12[0,1],ymm10[2],ymm12[3,4],ymm10[5],ymm12[6,7,8,9],ymm10[10],ymm12[11,12],ymm10[13],ymm12[14,15]
2623 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm8[1,1,1,1,5,5,5,5]
2624 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm11 = ymm9[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
2625 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm11[0,0,2,1,4,4,6,5]
2626 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm11 = ymm11[0,1],ymm10[2],ymm11[3,4],ymm10[5],ymm11[6,7,8,9],ymm10[10],ymm11[11,12],ymm10[13],ymm11[14,15]
2627 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm4[4],xmm1[4],xmm4[5],xmm1[5],xmm4[6],xmm1[6],xmm4[7],xmm1[7]
2628 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm12, %zmm11, %zmm20
2629 ; AVX512DQ-SLOW-NEXT: vprold $16, %xmm4, %xmm11
2630 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm12 = xmm1[1,1,2,3]
2631 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} xmm11 = xmm12[0,1],xmm11[2],xmm12[3,4],xmm11[5],xmm12[6,7]
2632 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
2633 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm12 = xmm12[0,1,3,2,4,5,6,7]
2634 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x8 {{.*#+}} zmm18 = [0,16,0,1,17,17,2,0,0,16,0,1,17,17,2,0]
2635 ; AVX512DQ-SLOW-NEXT: # zmm18 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
2636 ; AVX512DQ-SLOW-NEXT: vpermi2d %zmm12, %zmm11, %zmm18
2637 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm8[2,2,2,2,6,6,6,6]
2638 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm12 = ymm9[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
2639 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
2640 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm11[0],ymm12[1],ymm11[2,3],ymm12[4],ymm11[5,6,7,8],ymm12[9],ymm11[10,11],ymm12[12],ymm11[13,14,15]
2641 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r9), %xmm11
2642 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} xmm14 = xmm14[u,u,4,5,u,u,u,u,6,7,u,u,u,u,8,9]
2643 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[1,1,2,2]
2644 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} xmm14 = xmm15[0],xmm14[1],xmm15[2,3],xmm14[4],xmm15[5,6],xmm14[7]
2645 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r8), %xmm15
2646 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
2647 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm15[4],xmm11[4],xmm15[5],xmm11[5],xmm15[6],xmm11[6],xmm15[7],xmm11[7]
2648 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
2649 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm4
2650 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm0
2651 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm14, %zmm0, %zmm1
2652 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm2[0,0,2,1,4,4,6,5]
2653 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm14 = ymm3[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
2654 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm14[0,0,0,0,4,4,4,4]
2655 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm14 = ymm14[0,1,2],ymm0[3],ymm14[4,5],ymm0[6],ymm14[7,8,9,10],ymm0[11],ymm14[12,13],ymm0[14],ymm14[15]
2656 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm11[0],xmm15[1],xmm11[1],xmm15[2],xmm11[2],xmm15[3],xmm11[3]
2657 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm11 = xmm0[0,1,2,3,4,5,7,6]
2658 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,1,3,2,4,5,6,7]
2659 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm15 = <0,1,0,1,0,1,1,3,16,18,19,19,19,19,u,u>
2660 ; AVX512DQ-SLOW-NEXT: vpermi2d %zmm11, %zmm0, %zmm15
2661 ; AVX512DQ-SLOW-NEXT: vprold $16, %ymm3, %ymm0
2662 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm2[1,2,2,3,5,6,6,7]
2663 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm11[0,1],ymm0[2],ymm11[3,4],ymm0[5],ymm11[6,7,8,9],ymm0[10],ymm11[11,12],ymm0[13],ymm11[14,15]
2664 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm8[3,3,3,3,7,7,7,7]
2665 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm9 = ymm9[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
2666 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm9[2,2,2,2,6,6,6,6]
2667 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm8 = ymm8[0,1,2],ymm9[3],ymm8[4,5],ymm9[6],ymm8[7,8,9,10],ymm9[11],ymm8[12,13],ymm9[14],ymm8[15]
2668 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[3,3,3,3,7,7,7,7]
2669 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm7 = ymm7[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
2670 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm7[2,2,2,2,6,6,6,6]
2671 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2,3],ymm7[4],ymm6[5,6,7,8],ymm7[9],ymm6[10,11],ymm7[12],ymm6[13,14,15]
2672 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm21, %xmm7
2673 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm7 = xmm7[0,2,3,3,4,5,6,7]
2674 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
2675 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm9 = xmm10[2,1,2,3,4,5,6,7]
2676 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,5,5,4]
2677 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,1,3]
2678 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,1,1]
2679 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm22[2,1,3,2]
2680 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm12[0,2,2,3]
2681 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm14[2,2,3,3]
2682 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,2]
2683 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,1,3,3]
2684 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,2,2,3]
2685 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[3,3,3,3,7,7,7,7]
2686 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
2687 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[2,2,2,3,6,6,6,7]
2688 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0,1],ymm2[2],ymm3[3,4],ymm2[5],ymm3[6,7,8,9],ymm2[10],ymm3[11,12],ymm2[13],ymm3[14,15]
2689 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2690 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x8 {{.*#+}} zmm3 = [0,5,4,0,0,6,5,0,0,5,4,0,0,6,5,0]
2691 ; AVX512DQ-SLOW-NEXT: # zmm3 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
2692 ; AVX512DQ-SLOW-NEXT: vpermd %zmm13, %zmm3, %zmm3
2693 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm13 = ymm13[2,3,3,3,6,7,7,7]
2694 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm16, %zmm7, %zmm7
2695 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rcx
2696 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm17, %zmm9, %zmm9
2697 ; AVX512DQ-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm9
2698 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm19, %zmm4, %zmm4
2699 ; AVX512DQ-SLOW-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm5
2700 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm5
2701 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm11, %zmm10, %zmm4
2702 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} zmm7 = zmm20[2,2,2,3,6,6,6,7]
2703 ; AVX512DQ-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm7
2704 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm12, %zmm0
2705 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm3
2706 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm3
2707 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} zmm0 = zmm1[0,0,1,1,4,4,5,5]
2708 ; AVX512DQ-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm0
2709 ; AVX512DQ-SLOW-NEXT: vpbroadcastd (%rax), %ymm1
2710 ; AVX512DQ-SLOW-NEXT: vpbroadcastd 4(%rax), %ymm4
2711 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm1, %zmm1
2712 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm15, %zmm1
2713 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm1
2714 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm8, %ymm6
2715 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm13[2,1,3,2]
2716 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm2, %ymm0
2717 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm6, %ymm0
2718 ; AVX512DQ-SLOW-NEXT: vmovdqa %ymm0, 192(%rcx)
2719 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, (%rcx)
2720 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, 128(%rcx)
2721 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, 64(%rcx)
2722 ; AVX512DQ-SLOW-NEXT: vzeroupper
2723 ; AVX512DQ-SLOW-NEXT: retq
2725 ; AVX512DQ-FAST-LABEL: store_i16_stride7_vf16:
2726 ; AVX512DQ-FAST: # %bb.0:
2727 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdi), %ymm5
2728 ; AVX512DQ-FAST-NEXT: vmovdqa (%rsi), %ymm7
2729 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %ymm4
2730 ; AVX512DQ-FAST-NEXT: vmovdqa (%rcx), %ymm6
2731 ; AVX512DQ-FAST-NEXT: vmovdqa (%r8), %ymm1
2732 ; AVX512DQ-FAST-NEXT: vmovdqa (%r9), %ymm2
2733 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm7[14,15,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm7[16,17,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero
2734 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm5[12,13,14,15],zero,zero,ymm5[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm5[u,u,u,u,u,u,u,u,16,17,18,19]
2735 ; AVX512DQ-FAST-NEXT: vporq %ymm0, %ymm3, %ymm16
2736 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdi), %xmm11
2737 ; AVX512DQ-FAST-NEXT: vmovdqa (%rsi), %xmm12
2738 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm6[u,u,u,u,u,u],zero,zero,ymm6[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm6[16,17,u,u,u,u,u,u,u,u]
2739 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm4[u,u,u,u,u,u,14,15],zero,zero,ymm4[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm4[u,u,u,u,u,u,u,u]
2740 ; AVX512DQ-FAST-NEXT: vporq %ymm0, %ymm3, %ymm17
2741 ; AVX512DQ-FAST-NEXT: vmovdqa (%rcx), %xmm14
2742 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm2[u,u,u,u,u,u,u,u,u,u],zero,zero,ymm2[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm2[16,17,u,u,u,u]
2743 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm1[u,u,u,u,u,u,u,u,u,u,14,15],zero,zero,ymm1[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm1[u,u,u,u]
2744 ; AVX512DQ-FAST-NEXT: vporq %ymm0, %ymm3, %ymm18
2745 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,u,u,u,u,26,27,u,u,u,u,u,u]
2746 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm5[2,2,2,2,6,6,6,6]
2747 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm3[0],ymm0[1],ymm3[2,3],ymm0[4],ymm3[5,6,7,8],ymm0[9],ymm3[10,11],ymm0[12],ymm3[13,14,15]
2748 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm6[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
2749 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm9 = ymm4[0,1,1,3,4,5,5,7]
2750 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm9[0,1],ymm3[2],ymm9[3,4],ymm3[5],ymm9[6,7,8,9],ymm3[10],ymm9[11,12],ymm3[13],ymm9[14,15]
2751 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = <2,u,3,2,u,10,10,11>
2752 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm0, %zmm3, %zmm9
2753 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm0 = ymm4[2,2,2,2,6,6,6,6]
2754 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
2755 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7,8,9],ymm0[10],ymm3[11,12],ymm0[13],ymm3[14,15]
2756 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm5[1,1,1,1,5,5,5,5]
2757 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm10 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,u,u,20,21,24,25,u,u,22,23,22,23]
2758 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm10[0,1],ymm3[2],ymm10[3,4],ymm3[5],ymm10[6,7,8,9],ymm3[10],ymm10[11,12],ymm3[13],ymm10[14,15]
2759 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm3, %zmm19
2760 ; AVX512DQ-FAST-NEXT: vprold $16, %ymm2, %ymm0
2761 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm1[1,2,2,3,5,6,6,7]
2762 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7,8,9],ymm0[10],ymm3[11,12],ymm0[13],ymm3[14,15]
2763 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm1[0,0,2,1,4,4,6,5]
2764 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm13 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,u,u,18,19,20,21,u,u,20,21]
2765 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm13[0,1,2],ymm3[3],ymm13[4,5],ymm3[6],ymm13[7,8,9,10],ymm3[11],ymm13[12,13],ymm3[14],ymm13[15]
2766 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %xmm8
2767 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm13 = [2,2,3,3,10,9,11,10]
2768 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm0, %zmm3, %zmm13
2769 ; AVX512DQ-FAST-NEXT: vprold $16, %xmm12, %xmm0
2770 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} xmm3 = xmm11[1,1,2,3]
2771 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} xmm0 = xmm3[0,1],xmm0[2],xmm3[3,4],xmm0[5],xmm3[6,7]
2772 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3]
2773 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
2774 ; AVX512DQ-FAST-NEXT: vbroadcasti32x8 {{.*#+}} zmm15 = [0,8,1,9,0,8,1,9]
2775 ; AVX512DQ-FAST-NEXT: # zmm15 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
2776 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm0, %zmm3, %zmm15
2777 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
2778 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
2779 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} xmm14 = xmm14[u,u,4,5,u,u,u,u,6,7,u,u,u,u,8,9]
2780 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[1,1,2,2]
2781 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0],xmm14[1],xmm8[2,3],xmm14[4],xmm8[5,6],xmm14[7]
2782 ; AVX512DQ-FAST-NEXT: vmovdqa (%r9), %xmm14
2783 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
2784 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm12 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
2785 ; AVX512DQ-FAST-NEXT: vpshufb %xmm12, %xmm11, %xmm11
2786 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm8, %zmm11, %zmm11
2787 ; AVX512DQ-FAST-NEXT: vmovdqa (%r8), %xmm8
2788 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
2789 ; AVX512DQ-FAST-NEXT: vpshufb %xmm12, %xmm10, %xmm12
2790 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
2791 ; AVX512DQ-FAST-NEXT: vpbroadcastd 8(%rax), %ymm10
2792 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} ymm20 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
2793 ; AVX512DQ-FAST-NEXT: vpandnq %ymm10, %ymm20, %ymm10
2794 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3]
2795 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} xmm14 = xmm8[0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
2796 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
2797 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm20 = [0,0,0,1,8,9,9,11]
2798 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm14, %zmm8, %zmm20
2799 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = mem[0,1,2,3,0,1,2,3]
2800 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm8[12,13,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm8[14,15,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm8[16,17,u,u]
2801 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm14, %zmm10, %zmm10
2802 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,14,15,u,u,u,u,u,u,u,u,28,29,u,u,u,u,30,31,u,u]
2803 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[3,3,3,3,7,7,7,7]
2804 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm5 = ymm5[0,1,2],ymm7[3],ymm5[4,5],ymm7[6],ymm5[7,8,9,10],ymm7[11],ymm5[12,13],ymm7[14],ymm5[15]
2805 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm6 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,u,u,u,u,30,31,u,u,u,u,u,u]
2806 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[3,3,3,3,7,7,7,7]
2807 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0],ymm6[1],ymm4[2,3],ymm6[4],ymm4[5,6,7,8],ymm6[9],ymm4[10,11],ymm6[12],ymm4[13,14,15]
2808 ; AVX512DQ-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [0,13,4,0,0,14,5,0,0,13,4,0,0,14,5,0]
2809 ; AVX512DQ-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
2810 ; AVX512DQ-FAST-NEXT: vpermd %zmm8, %zmm6, %zmm6
2811 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = <6,u,u,u,7,u,u,7>
2812 ; AVX512DQ-FAST-NEXT: vpermd %ymm8, %ymm7, %ymm7
2813 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
2814 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,1,3]
2815 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,2,3,3,4,5,6,7]
2816 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
2817 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm12[0,0,1,1]
2818 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,1,3,3]
2819 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
2820 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[3,3,3,3,7,7,7,7]
2821 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,u,u,28,29,26,27,u,u,30,31,30,31]
2822 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
2823 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
2824 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rcx
2825 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm16, %zmm3, %zmm2
2826 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm17, %zmm0, %zmm0
2827 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm0
2828 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm18, %zmm8, %zmm2
2829 ; AVX512DQ-FAST-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm10
2830 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm10
2831 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} zmm0 = zmm19[2,2,2,3,6,6,6,7]
2832 ; AVX512DQ-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm0
2833 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm13, %zmm6
2834 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm6
2835 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} zmm0 = zmm11[0,0,1,1,4,4,5,5]
2836 ; AVX512DQ-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm15, %zmm0
2837 ; AVX512DQ-FAST-NEXT: vpbroadcastd (%rax), %ymm2
2838 ; AVX512DQ-FAST-NEXT: vpbroadcastd 4(%rax), %ymm3
2839 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm2
2840 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm20, %zmm2
2841 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm2
2842 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm5, %ymm4
2843 ; AVX512DQ-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm7, %ymm1
2844 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm4, %ymm1
2845 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm1, 192(%rcx)
2846 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, (%rcx)
2847 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, 128(%rcx)
2848 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, 64(%rcx)
2849 ; AVX512DQ-FAST-NEXT: vzeroupper
2850 ; AVX512DQ-FAST-NEXT: retq
2852 ; AVX512BW-LABEL: store_i16_stride7_vf16:
2853 ; AVX512BW: # %bb.0:
2854 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2855 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
2856 ; AVX512BW-NEXT: vmovdqa (%rdi), %ymm0
2857 ; AVX512BW-NEXT: vmovdqa (%rdx), %ymm1
2858 ; AVX512BW-NEXT: vmovdqa (%r8), %ymm2
2859 ; AVX512BW-NEXT: vmovdqa (%r10), %ymm3
2860 ; AVX512BW-NEXT: vinserti64x4 $1, (%rsi), %zmm0, %zmm0
2861 ; AVX512BW-NEXT: vinserti64x4 $1, (%rcx), %zmm1, %zmm1
2862 ; AVX512BW-NEXT: vinserti64x4 $1, (%r9), %zmm2, %zmm2
2863 ; AVX512BW-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,14,30,46,62,u,u,u,15,31,47,63,u,u,u>
2864 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm4
2865 ; AVX512BW-NEXT: vmovdqa {{.*#+}} ymm5 = <29,45,u,u,u,u,14,30,46,u,u,u,u,15,31,47>
2866 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm5
2867 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm6 = <u,u,u,u,0,16,32,u,u,u,u,1,17,33,u,u,u,u,2,18,34,u,u,u,u,3,19,35,u,u,u,u>
2868 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm6
2869 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = <0,16,32,48,u,u,u,1,17,33,49,u,u,u,2,18,34,50,u,u,u,3,19,35,51,u,u,u,4,20,36,52>
2870 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm7
2871 ; AVX512BW-NEXT: movl $236730480, %ecx # imm = 0xE1C3870
2872 ; AVX512BW-NEXT: kmovd %ecx, %k1
2873 ; AVX512BW-NEXT: vmovdqu16 %zmm6, %zmm7 {%k1}
2874 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm6 = <4,20,36,u,u,u,u,5,21,37,u,u,u,u,6,22,38,u,u,u,u,7,23,39,u,u,u,u,8,24,40,u>
2875 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm6
2876 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm8 = <u,u,u,5,21,37,53,u,u,u,6,22,38,54,u,u,u,7,23,39,55,u,u,u,8,24,40,56,u,u,u,9>
2877 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm8
2878 ; AVX512BW-NEXT: movl $1893843847, %ecx # imm = 0x70E1C387
2879 ; AVX512BW-NEXT: kmovd %ecx, %k1
2880 ; AVX512BW-NEXT: vmovdqu16 %zmm6, %zmm8 {%k1}
2881 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm6 = <u,u,u,9,25,41,u,u,u,u,10,26,42,u,u,u,u,11,27,43,u,u,u,u,12,28,44,u,u,u,u,13>
2882 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm2, %zmm6
2883 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <57,9,25,u,u,u,42,58,10,26,u,u,u,43,59,11,27,u,u,u,44,60,12,28,u,u,u,45,61,13,29,u>
2884 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm1, %zmm2
2885 ; AVX512BW-NEXT: movl $-2029118408, %ecx # imm = 0x870E1C38
2886 ; AVX512BW-NEXT: kmovd %ecx, %k1
2887 ; AVX512BW-NEXT: vmovdqu16 %zmm6, %zmm2 {%k1}
2888 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 128(%rax)
2889 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 64(%rax)
2890 ; AVX512BW-NEXT: vmovdqa64 %zmm7, (%rax)
2891 ; AVX512BW-NEXT: movw $-7741, %cx # imm = 0xE1C3
2892 ; AVX512BW-NEXT: kmovd %ecx, %k1
2893 ; AVX512BW-NEXT: vmovdqu16 %ymm5, %ymm4 {%k1}
2894 ; AVX512BW-NEXT: vmovdqa %ymm4, 192(%rax)
2895 ; AVX512BW-NEXT: vzeroupper
2896 ; AVX512BW-NEXT: retq
2897 %in.vec0 = load <16 x i16>, ptr %in.vecptr0, align 64
2898 %in.vec1 = load <16 x i16>, ptr %in.vecptr1, align 64
2899 %in.vec2 = load <16 x i16>, ptr %in.vecptr2, align 64
2900 %in.vec3 = load <16 x i16>, ptr %in.vecptr3, align 64
2901 %in.vec4 = load <16 x i16>, ptr %in.vecptr4, align 64
2902 %in.vec5 = load <16 x i16>, ptr %in.vecptr5, align 64
2903 %in.vec6 = load <16 x i16>, ptr %in.vecptr6, align 64
2904 %1 = shufflevector <16 x i16> %in.vec0, <16 x i16> %in.vec1, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
2905 %2 = shufflevector <16 x i16> %in.vec2, <16 x i16> %in.vec3, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
2906 %3 = shufflevector <16 x i16> %in.vec4, <16 x i16> %in.vec5, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
2907 %4 = shufflevector <32 x i16> %1, <32 x i16> %2, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
2908 %5 = shufflevector <16 x i16> %in.vec6, <16 x i16> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2909 %6 = shufflevector <32 x i16> %3, <32 x i16> %5, <48 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47>
2910 %7 = shufflevector <48 x i16> %6, <48 x i16> poison, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2911 %8 = shufflevector <64 x i16> %4, <64 x i16> %7, <112 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111>
2912 %interleaved.vec = shufflevector <112 x i16> %8, <112 x i16> poison, <112 x i32> <i32 0, i32 16, i32 32, i32 48, i32 64, i32 80, i32 96, i32 1, i32 17, i32 33, i32 49, i32 65, i32 81, i32 97, i32 2, i32 18, i32 34, i32 50, i32 66, i32 82, i32 98, i32 3, i32 19, i32 35, i32 51, i32 67, i32 83, i32 99, i32 4, i32 20, i32 36, i32 52, i32 68, i32 84, i32 100, i32 5, i32 21, i32 37, i32 53, i32 69, i32 85, i32 101, i32 6, i32 22, i32 38, i32 54, i32 70, i32 86, i32 102, i32 7, i32 23, i32 39, i32 55, i32 71, i32 87, i32 103, i32 8, i32 24, i32 40, i32 56, i32 72, i32 88, i32 104, i32 9, i32 25, i32 41, i32 57, i32 73, i32 89, i32 105, i32 10, i32 26, i32 42, i32 58, i32 74, i32 90, i32 106, i32 11, i32 27, i32 43, i32 59, i32 75, i32 91, i32 107, i32 12, i32 28, i32 44, i32 60, i32 76, i32 92, i32 108, i32 13, i32 29, i32 45, i32 61, i32 77, i32 93, i32 109, i32 14, i32 30, i32 46, i32 62, i32 78, i32 94, i32 110, i32 15, i32 31, i32 47, i32 63, i32 79, i32 95, i32 111>
2913 store <112 x i16> %interleaved.vec, ptr %out.vec, align 64
2917 define void @store_i16_stride7_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %out.vec) nounwind {
2918 ; SSE-LABEL: store_i16_stride7_vf32:
2920 ; SSE-NEXT: subq $680, %rsp # imm = 0x2A8
2921 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2922 ; SSE-NEXT: movdqa 48(%rdi), %xmm3
2923 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2924 ; SSE-NEXT: movdqa 48(%rsi), %xmm2
2925 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2926 ; SSE-NEXT: movdqa 48(%rdx), %xmm1
2927 ; SSE-NEXT: movdqa 48(%rcx), %xmm5
2928 ; SSE-NEXT: movdqa 48(%r8), %xmm9
2929 ; SSE-NEXT: movdqa 48(%r9), %xmm4
2930 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2931 ; SSE-NEXT: movaps 48(%rax), %xmm7
2932 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[2,2,2,2]
2933 ; SSE-NEXT: movdqa %xmm1, %xmm10
2934 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2935 ; SSE-NEXT: movdqa {{.*#+}} xmm6 = [65535,65535,65535,65535,65535,65535,0,65535]
2936 ; SSE-NEXT: movdqa %xmm6, %xmm1
2937 ; SSE-NEXT: pandn %xmm0, %xmm1
2938 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm5[3,3,3,3,4,5,6,7]
2939 ; SSE-NEXT: movdqa %xmm5, %xmm11
2940 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2941 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
2942 ; SSE-NEXT: pand %xmm6, %xmm0
2943 ; SSE-NEXT: por %xmm1, %xmm0
2944 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
2945 ; SSE-NEXT: movdqa %xmm3, %xmm1
2946 ; SSE-NEXT: movdqa %xmm3, %xmm5
2947 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2948 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0],xmm0[3,0]
2949 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [65535,0,65535,65535,65535,65535,65535,65535]
2950 ; SSE-NEXT: pand %xmm3, %xmm0
2951 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm9[1,1,1,1]
2952 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2953 ; SSE-NEXT: pandn %xmm2, %xmm3
2954 ; SSE-NEXT: por %xmm0, %xmm3
2955 ; SSE-NEXT: movdqa %xmm4, %xmm0
2956 ; SSE-NEXT: psrld $16, %xmm0
2957 ; SSE-NEXT: movdqa %xmm0, %xmm2
2958 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[1,0],xmm3[0,0]
2959 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[2,0],xmm1[0,2]
2960 ; SSE-NEXT: movaps {{.*#+}} xmm1 = [65535,65535,65535,0,65535,65535,65535,65535]
2961 ; SSE-NEXT: andps %xmm1, %xmm2
2962 ; SSE-NEXT: andnps %xmm7, %xmm1
2963 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2964 ; SSE-NEXT: orps %xmm2, %xmm1
2965 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2966 ; SSE-NEXT: movdqa %xmm11, %xmm1
2967 ; SSE-NEXT: psrlq $48, %xmm1
2968 ; SSE-NEXT: movdqa %xmm10, %xmm2
2969 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm2 = xmm2[1],xmm1[1]
2970 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,65535,65535,0,0,65535,65535,65535]
2971 ; SSE-NEXT: pandn %xmm2, %xmm1
2972 ; SSE-NEXT: movdqa %xmm5, %xmm2
2973 ; SSE-NEXT: psrldq {{.*#+}} xmm2 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
2974 ; SSE-NEXT: por %xmm1, %xmm2
2975 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm9[2,2,3,3]
2976 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,65535,65535,65535,65535,0,65535,65535]
2977 ; SSE-NEXT: movdqa %xmm1, %xmm4
2978 ; SSE-NEXT: movdqa %xmm1, %xmm8
2979 ; SSE-NEXT: pandn %xmm3, %xmm4
2980 ; SSE-NEXT: por %xmm2, %xmm4
2981 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,0],xmm4[2,0]
2982 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm0[2,0]
2983 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [0,65535,65535,65535,65535,65535,65535,0]
2984 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm7[3,3,3,3]
2985 ; SSE-NEXT: movdqa %xmm1, %xmm3
2986 ; SSE-NEXT: pandn %xmm0, %xmm3
2987 ; SSE-NEXT: andps %xmm1, %xmm2
2988 ; SSE-NEXT: por %xmm2, %xmm3
2989 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2990 ; SSE-NEXT: movdqa (%rax), %xmm2
2991 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[0,1,0,1]
2992 ; SSE-NEXT: movdqa %xmm2, %xmm7
2993 ; SSE-NEXT: movdqa %xmm8, %xmm2
2994 ; SSE-NEXT: pandn %xmm0, %xmm2
2995 ; SSE-NEXT: movdqa (%r8), %xmm0
2996 ; SSE-NEXT: movdqa (%r9), %xmm1
2997 ; SSE-NEXT: movdqa %xmm0, %xmm3
2998 ; SSE-NEXT: movdqa %xmm0, %xmm12
2999 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3000 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
3001 ; SSE-NEXT: movdqa %xmm1, %xmm13
3002 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3003 ; SSE-NEXT: movdqa %xmm3, %xmm0
3004 ; SSE-NEXT: movdqa %xmm3, %xmm5
3005 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3006 ; SSE-NEXT: pslldq {{.*#+}} xmm0 = zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
3007 ; SSE-NEXT: pand %xmm8, %xmm0
3008 ; SSE-NEXT: por %xmm2, %xmm0
3009 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,65535,65535,0,0,0,65535,65535]
3010 ; SSE-NEXT: movdqa %xmm1, %xmm2
3011 ; SSE-NEXT: movdqa %xmm1, %xmm10
3012 ; SSE-NEXT: pandn %xmm0, %xmm2
3013 ; SSE-NEXT: movdqa (%rcx), %xmm1
3014 ; SSE-NEXT: movdqa %xmm1, %xmm0
3015 ; SSE-NEXT: psrld $16, %xmm0
3016 ; SSE-NEXT: movdqa (%rdx), %xmm4
3017 ; SSE-NEXT: movdqa %xmm4, %xmm3
3018 ; SSE-NEXT: movdqa %xmm4, %xmm14
3019 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
3020 ; SSE-NEXT: movdqa {{.*#+}} xmm15 = [65535,0,0,65535,65535,65535,65535,65535]
3021 ; SSE-NEXT: movdqa %xmm15, %xmm4
3022 ; SSE-NEXT: pandn %xmm3, %xmm4
3023 ; SSE-NEXT: movdqa (%rdi), %xmm11
3024 ; SSE-NEXT: movdqa (%rsi), %xmm9
3025 ; SSE-NEXT: movdqa %xmm9, %xmm3
3026 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3027 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm11[0],xmm3[1],xmm11[1],xmm3[2],xmm11[2],xmm3[3],xmm11[3]
3028 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3029 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[2,2,2,2,4,5,6,7]
3030 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,5,4]
3031 ; SSE-NEXT: pand %xmm15, %xmm3
3032 ; SSE-NEXT: por %xmm4, %xmm3
3033 ; SSE-NEXT: pand %xmm10, %xmm3
3034 ; SSE-NEXT: por %xmm2, %xmm3
3035 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3036 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [65535,65535,65535,0,65535,65535,65535,65535]
3037 ; SSE-NEXT: pandn %xmm7, %xmm2
3038 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3039 ; SSE-NEXT: movdqa %xmm5, %xmm3
3040 ; SSE-NEXT: psrldq {{.*#+}} xmm3 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
3041 ; SSE-NEXT: por %xmm2, %xmm3
3042 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,0,0,65535,65535,65535,65535]
3043 ; SSE-NEXT: movdqa %xmm0, %xmm2
3044 ; SSE-NEXT: pandn %xmm3, %xmm2
3045 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3046 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm14[2,2,2,2]
3047 ; SSE-NEXT: movdqa %xmm6, %xmm4
3048 ; SSE-NEXT: pandn %xmm3, %xmm4
3049 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm1[0,1,2,3,4,4,4,4]
3050 ; SSE-NEXT: pand %xmm6, %xmm3
3051 ; SSE-NEXT: por %xmm4, %xmm3
3052 ; SSE-NEXT: punpckhwd {{.*#+}} xmm11 = xmm11[4],xmm9[4],xmm11[5],xmm9[5],xmm11[6],xmm9[6],xmm11[7],xmm9[7]
3053 ; SSE-NEXT: movdqa %xmm11, %xmm4
3054 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm3[3,3]
3055 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm1[3,3,3,3,4,5,6,7]
3056 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3057 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm4[0,2]
3058 ; SSE-NEXT: andps %xmm0, %xmm3
3059 ; SSE-NEXT: orps %xmm2, %xmm3
3060 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3061 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm7[2,3,2,3]
3062 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,65535,65535,65535,65535,65535,65535]
3063 ; SSE-NEXT: movdqa %xmm0, %xmm3
3064 ; SSE-NEXT: pandn %xmm2, %xmm3
3065 ; SSE-NEXT: movdqa %xmm13, %xmm2
3066 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm12[4],xmm2[5],xmm12[5],xmm2[6],xmm12[6],xmm2[7],xmm12[7]
3067 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[2,2,2,2,4,5,6,7]
3068 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,5,4]
3069 ; SSE-NEXT: pand %xmm0, %xmm2
3070 ; SSE-NEXT: por %xmm3, %xmm2
3071 ; SSE-NEXT: movdqa %xmm14, %xmm4
3072 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm1[4],xmm4[5],xmm1[5],xmm4[6],xmm1[6],xmm4[7],xmm1[7]
3073 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3074 ; SSE-NEXT: movdqa %xmm11, %xmm3
3075 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[2,2],xmm4[2,3]
3076 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[1,2],xmm2[0,3]
3077 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3078 ; SSE-NEXT: movdqa 16(%rax), %xmm3
3079 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[0,1,0,1]
3080 ; SSE-NEXT: movdqa %xmm3, %xmm7
3081 ; SSE-NEXT: movdqa %xmm8, %xmm5
3082 ; SSE-NEXT: movdqa %xmm8, %xmm3
3083 ; SSE-NEXT: pandn %xmm2, %xmm3
3084 ; SSE-NEXT: movdqa 16(%r8), %xmm10
3085 ; SSE-NEXT: movdqa 16(%r9), %xmm8
3086 ; SSE-NEXT: movdqa %xmm10, %xmm4
3087 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3088 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm8[0],xmm4[1],xmm8[1],xmm4[2],xmm8[2],xmm4[3],xmm8[3]
3089 ; SSE-NEXT: movdqa %xmm8, (%rsp) # 16-byte Spill
3090 ; SSE-NEXT: movdqa %xmm4, %xmm2
3091 ; SSE-NEXT: movdqa %xmm4, %xmm12
3092 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3093 ; SSE-NEXT: pslldq {{.*#+}} xmm2 = zero,zero,xmm2[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
3094 ; SSE-NEXT: pand %xmm5, %xmm2
3095 ; SSE-NEXT: por %xmm3, %xmm2
3096 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,65535,65535,0,0,0,65535,65535]
3097 ; SSE-NEXT: movdqa %xmm1, %xmm3
3098 ; SSE-NEXT: pandn %xmm2, %xmm3
3099 ; SSE-NEXT: movdqa 16(%rcx), %xmm5
3100 ; SSE-NEXT: movdqa %xmm5, %xmm2
3101 ; SSE-NEXT: psrld $16, %xmm2
3102 ; SSE-NEXT: movdqa 16(%rdx), %xmm0
3103 ; SSE-NEXT: movdqa %xmm0, %xmm4
3104 ; SSE-NEXT: movdqa %xmm0, %xmm9
3105 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm2[0],xmm4[1],xmm2[1]
3106 ; SSE-NEXT: movdqa %xmm15, %xmm2
3107 ; SSE-NEXT: pandn %xmm4, %xmm2
3108 ; SSE-NEXT: movdqa 16(%rdi), %xmm0
3109 ; SSE-NEXT: movdqa 16(%rsi), %xmm13
3110 ; SSE-NEXT: movdqa %xmm13, %xmm4
3111 ; SSE-NEXT: movdqa %xmm13, %xmm14
3112 ; SSE-NEXT: movdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3113 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
3114 ; SSE-NEXT: movdqa %xmm0, %xmm13
3115 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3116 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[2,2,2,2,4,5,6,7]
3117 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,5,4]
3118 ; SSE-NEXT: pand %xmm15, %xmm4
3119 ; SSE-NEXT: por %xmm2, %xmm4
3120 ; SSE-NEXT: pand %xmm1, %xmm4
3121 ; SSE-NEXT: por %xmm3, %xmm4
3122 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3123 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [65535,65535,65535,0,65535,65535,65535,65535]
3124 ; SSE-NEXT: pandn %xmm7, %xmm2
3125 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3126 ; SSE-NEXT: movdqa %xmm12, %xmm3
3127 ; SSE-NEXT: psrldq {{.*#+}} xmm3 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
3128 ; SSE-NEXT: por %xmm2, %xmm3
3129 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,0,0,65535,65535,65535,65535]
3130 ; SSE-NEXT: movdqa %xmm0, %xmm2
3131 ; SSE-NEXT: pandn %xmm3, %xmm2
3132 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3133 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm9[2,2,2,2]
3134 ; SSE-NEXT: movdqa %xmm6, %xmm4
3135 ; SSE-NEXT: pandn %xmm3, %xmm4
3136 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm5[0,1,2,3,4,4,4,4]
3137 ; SSE-NEXT: pand %xmm6, %xmm3
3138 ; SSE-NEXT: por %xmm4, %xmm3
3139 ; SSE-NEXT: punpckhwd {{.*#+}} xmm13 = xmm13[4],xmm14[4],xmm13[5],xmm14[5],xmm13[6],xmm14[6],xmm13[7],xmm14[7]
3140 ; SSE-NEXT: movdqa %xmm13, %xmm4
3141 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm3[3,3]
3142 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm5[3,3,3,3,4,5,6,7]
3143 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3144 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm4[0,2]
3145 ; SSE-NEXT: andps %xmm0, %xmm3
3146 ; SSE-NEXT: orps %xmm2, %xmm3
3147 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3148 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm7[2,3,2,3]
3149 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,65535,65535,65535,65535,65535,65535]
3150 ; SSE-NEXT: movdqa %xmm0, %xmm3
3151 ; SSE-NEXT: pandn %xmm2, %xmm3
3152 ; SSE-NEXT: punpckhwd {{.*#+}} xmm8 = xmm8[4],xmm10[4],xmm8[5],xmm10[5],xmm8[6],xmm10[6],xmm8[7],xmm10[7]
3153 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm8[2,2,2,2,4,5,6,7]
3154 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,5,4]
3155 ; SSE-NEXT: pand %xmm0, %xmm2
3156 ; SSE-NEXT: por %xmm3, %xmm2
3157 ; SSE-NEXT: movdqa %xmm9, %xmm4
3158 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
3159 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3160 ; SSE-NEXT: movdqa %xmm13, %xmm3
3161 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[2,2],xmm4[2,3]
3162 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[1,2],xmm2[0,3]
3163 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3164 ; SSE-NEXT: movdqa 32(%rax), %xmm3
3165 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[0,1,0,1]
3166 ; SSE-NEXT: movdqa %xmm3, %xmm7
3167 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [65535,65535,65535,65535,65535,0,65535,65535]
3168 ; SSE-NEXT: movdqa %xmm5, %xmm3
3169 ; SSE-NEXT: pandn %xmm2, %xmm3
3170 ; SSE-NEXT: movdqa 32(%r8), %xmm10
3171 ; SSE-NEXT: movdqa 32(%r9), %xmm9
3172 ; SSE-NEXT: movdqa %xmm10, %xmm4
3173 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3174 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm9[0],xmm4[1],xmm9[1],xmm4[2],xmm9[2],xmm4[3],xmm9[3]
3175 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3176 ; SSE-NEXT: movdqa %xmm4, %xmm2
3177 ; SSE-NEXT: movdqa %xmm4, %xmm14
3178 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3179 ; SSE-NEXT: pslldq {{.*#+}} xmm2 = zero,zero,xmm2[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
3180 ; SSE-NEXT: pand %xmm5, %xmm2
3181 ; SSE-NEXT: por %xmm3, %xmm2
3182 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [65535,65535,65535,0,0,0,65535,65535]
3183 ; SSE-NEXT: movdqa %xmm5, %xmm3
3184 ; SSE-NEXT: pandn %xmm2, %xmm3
3185 ; SSE-NEXT: movdqa 32(%rcx), %xmm12
3186 ; SSE-NEXT: movdqa %xmm12, %xmm2
3187 ; SSE-NEXT: psrld $16, %xmm2
3188 ; SSE-NEXT: movdqa 32(%rdx), %xmm0
3189 ; SSE-NEXT: movdqa %xmm0, %xmm4
3190 ; SSE-NEXT: movdqa %xmm0, %xmm8
3191 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm2[0],xmm4[1],xmm2[1]
3192 ; SSE-NEXT: movdqa %xmm15, %xmm2
3193 ; SSE-NEXT: pandn %xmm4, %xmm2
3194 ; SSE-NEXT: movdqa 32(%rdi), %xmm1
3195 ; SSE-NEXT: movdqa 32(%rsi), %xmm0
3196 ; SSE-NEXT: movdqa %xmm0, %xmm4
3197 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3198 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
3199 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3200 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[2,2,2,2,4,5,6,7]
3201 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,5,4]
3202 ; SSE-NEXT: pand %xmm15, %xmm4
3203 ; SSE-NEXT: por %xmm2, %xmm4
3204 ; SSE-NEXT: pand %xmm5, %xmm4
3205 ; SSE-NEXT: por %xmm3, %xmm4
3206 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3207 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [65535,65535,65535,0,65535,65535,65535,65535]
3208 ; SSE-NEXT: pandn %xmm7, %xmm3
3209 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3210 ; SSE-NEXT: movdqa %xmm14, %xmm2
3211 ; SSE-NEXT: psrldq {{.*#+}} xmm2 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
3212 ; SSE-NEXT: por %xmm3, %xmm2
3213 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm12[0,1,2,3,4,4,4,4]
3214 ; SSE-NEXT: pand %xmm6, %xmm3
3215 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3216 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm8[2,2,2,2]
3217 ; SSE-NEXT: pandn %xmm4, %xmm6
3218 ; SSE-NEXT: por %xmm3, %xmm6
3219 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
3220 ; SSE-NEXT: movdqa %xmm1, %xmm3
3221 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3222 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm6[3,3]
3223 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm12[3,3,3,3,4,5,6,7]
3224 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm3[0,2]
3225 ; SSE-NEXT: movaps {{.*#+}} xmm5 = [65535,0,0,0,65535,65535,65535,65535]
3226 ; SSE-NEXT: andps %xmm5, %xmm4
3227 ; SSE-NEXT: andnps %xmm2, %xmm5
3228 ; SSE-NEXT: orps %xmm4, %xmm5
3229 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3230 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm7[2,3,2,3]
3231 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,65535,65535,65535,65535,65535,65535]
3232 ; SSE-NEXT: movdqa %xmm0, %xmm3
3233 ; SSE-NEXT: pandn %xmm2, %xmm3
3234 ; SSE-NEXT: punpckhwd {{.*#+}} xmm9 = xmm9[4],xmm10[4],xmm9[5],xmm10[5],xmm9[6],xmm10[6],xmm9[7],xmm10[7]
3235 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm9[2,2,2,2,4,5,6,7]
3236 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,5,4]
3237 ; SSE-NEXT: pand %xmm0, %xmm2
3238 ; SSE-NEXT: movdqa %xmm0, %xmm9
3239 ; SSE-NEXT: por %xmm3, %xmm2
3240 ; SSE-NEXT: movdqa %xmm8, %xmm0
3241 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm12[4],xmm0[5],xmm12[5],xmm0[6],xmm12[6],xmm0[7],xmm12[7]
3242 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3243 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm0[2,3]
3244 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,2],xmm2[0,3]
3245 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3246 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3247 ; SSE-NEXT: movdqa %xmm1, %xmm2
3248 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
3249 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm8[0],xmm2[1],xmm8[1],xmm2[2],xmm8[2],xmm2[3],xmm8[3]
3250 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3251 ; SSE-NEXT: pslldq {{.*#+}} xmm2 = zero,zero,xmm2[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
3252 ; SSE-NEXT: movdqa {{.*#+}} xmm10 = [65535,65535,65535,65535,65535,0,65535,65535]
3253 ; SSE-NEXT: pand %xmm10, %xmm2
3254 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
3255 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm7[0,1,0,1]
3256 ; SSE-NEXT: pandn %xmm3, %xmm10
3257 ; SSE-NEXT: por %xmm2, %xmm10
3258 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
3259 ; SSE-NEXT: movdqa %xmm5, %xmm6
3260 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3261 ; SSE-NEXT: punpckhwd {{.*#+}} xmm6 = xmm6[4],xmm3[4],xmm6[5],xmm3[5],xmm6[6],xmm3[6],xmm6[7],xmm3[7]
3262 ; SSE-NEXT: movdqa %xmm5, %xmm2
3263 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
3264 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3265 ; SSE-NEXT: psrld $16, %xmm3
3266 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
3267 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3268 ; SSE-NEXT: punpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
3269 ; SSE-NEXT: # xmm3 = xmm3[0],mem[0],xmm3[1],mem[1],xmm3[2],mem[2],xmm3[3],mem[3]
3270 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[2,2,2,2,4,5,6,7]
3271 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,5,4]
3272 ; SSE-NEXT: pand %xmm15, %xmm3
3273 ; SSE-NEXT: pandn %xmm2, %xmm15
3274 ; SSE-NEXT: por %xmm3, %xmm15
3275 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [65535,65535,65535,0,0,0,65535,65535]
3276 ; SSE-NEXT: pand %xmm2, %xmm15
3277 ; SSE-NEXT: pandn %xmm10, %xmm2
3278 ; SSE-NEXT: por %xmm15, %xmm2
3279 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3280 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[2,2,2,2]
3281 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,0],xmm6[2,0]
3282 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3283 ; SSE-NEXT: pslldq {{.*#+}} xmm6 = zero,zero,zero,zero,zero,zero,xmm6[0,1,2,3,4,5,6,7,8,9]
3284 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [65535,65535,65535,65535,65535,0,0,65535]
3285 ; SSE-NEXT: movdqa %xmm4, %xmm2
3286 ; SSE-NEXT: pandn %xmm6, %xmm2
3287 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3288 ; SSE-NEXT: pslldq {{.*#+}} xmm3 = zero,zero,xmm3[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
3289 ; SSE-NEXT: pand %xmm4, %xmm3
3290 ; SSE-NEXT: por %xmm2, %xmm3
3291 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [0,65535,65535,65535,65535,65535,65535,0]
3292 ; SSE-NEXT: movdqa %xmm1, %xmm2
3293 ; SSE-NEXT: pandn %xmm0, %xmm2
3294 ; SSE-NEXT: pand %xmm1, %xmm3
3295 ; SSE-NEXT: por %xmm3, %xmm2
3296 ; SSE-NEXT: psrldq {{.*#+}} xmm8 = xmm8[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
3297 ; SSE-NEXT: movdqa %xmm9, %xmm3
3298 ; SSE-NEXT: pandn %xmm8, %xmm3
3299 ; SSE-NEXT: pand %xmm9, %xmm2
3300 ; SSE-NEXT: por %xmm2, %xmm3
3301 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,65535,0,65535,65535,65535,65535,65535]
3302 ; SSE-NEXT: pand %xmm1, %xmm3
3303 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm7[2,2,3,3]
3304 ; SSE-NEXT: pandn %xmm0, %xmm1
3305 ; SSE-NEXT: por %xmm3, %xmm1
3306 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3307 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
3308 ; SSE-NEXT: movdqa %xmm8, %xmm0
3309 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3310 ; SSE-NEXT: punpcklwd {{.*#+}} xmm8 = xmm8[0],xmm15[0],xmm8[1],xmm15[1],xmm8[2],xmm15[2],xmm8[3],xmm15[3]
3311 ; SSE-NEXT: psrlq $48, %xmm15
3312 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm0 = xmm0[1],xmm15[1]
3313 ; SSE-NEXT: movdqa {{.*#+}} xmm6 = [65535,65535,65535,0,0,65535,65535,65535]
3314 ; SSE-NEXT: movdqa %xmm6, %xmm2
3315 ; SSE-NEXT: pandn %xmm0, %xmm2
3316 ; SSE-NEXT: movdqa %xmm11, %xmm0
3317 ; SSE-NEXT: psrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
3318 ; SSE-NEXT: por %xmm2, %xmm0
3319 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3320 ; SSE-NEXT: movdqa %xmm1, %xmm3
3321 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3322 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
3323 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3324 ; SSE-NEXT: psrld $16, %xmm2
3325 ; SSE-NEXT: movdqa %xmm1, %xmm3
3326 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3327 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
3328 ; SSE-NEXT: movdqa %xmm5, %xmm2
3329 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
3330 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,6,5,4,7]
3331 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[3,1,2,1]
3332 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,7,5,4]
3333 ; SSE-NEXT: movdqa {{.*#+}} xmm15 = [0,65535,65535,65535,65535,0,0,0]
3334 ; SSE-NEXT: movdqa %xmm15, %xmm1
3335 ; SSE-NEXT: pandn %xmm2, %xmm1
3336 ; SSE-NEXT: pand %xmm15, %xmm0
3337 ; SSE-NEXT: por %xmm0, %xmm1
3338 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3339 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
3340 ; SSE-NEXT: movdqa %xmm9, %xmm0
3341 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3342 ; SSE-NEXT: punpcklwd {{.*#+}} xmm9 = xmm9[0],xmm14[0],xmm9[1],xmm14[1],xmm9[2],xmm14[2],xmm9[3],xmm14[3]
3343 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3344 ; SSE-NEXT: psrlq $48, %xmm14
3345 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm0 = xmm0[1],xmm14[1]
3346 ; SSE-NEXT: movdqa %xmm6, %xmm2
3347 ; SSE-NEXT: pandn %xmm0, %xmm2
3348 ; SSE-NEXT: movdqa %xmm13, %xmm0
3349 ; SSE-NEXT: psrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
3350 ; SSE-NEXT: por %xmm2, %xmm0
3351 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3352 ; SSE-NEXT: movdqa %xmm1, %xmm3
3353 ; SSE-NEXT: movdqa (%rsp), %xmm2 # 16-byte Reload
3354 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
3355 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3356 ; SSE-NEXT: psrld $16, %xmm2
3357 ; SSE-NEXT: movdqa %xmm1, %xmm3
3358 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3359 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
3360 ; SSE-NEXT: movdqa %xmm10, %xmm2
3361 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
3362 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,6,5,4,7]
3363 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[3,1,2,1]
3364 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,7,5,4]
3365 ; SSE-NEXT: movdqa %xmm15, %xmm1
3366 ; SSE-NEXT: pandn %xmm2, %xmm1
3367 ; SSE-NEXT: pand %xmm15, %xmm0
3368 ; SSE-NEXT: por %xmm0, %xmm1
3369 ; SSE-NEXT: movdqa %xmm1, (%rsp) # 16-byte Spill
3370 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3371 ; SSE-NEXT: movdqa %xmm1, %xmm0
3372 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm12[0],xmm1[1],xmm12[1],xmm1[2],xmm12[2],xmm1[3],xmm12[3]
3373 ; SSE-NEXT: movdqa %xmm1, %xmm14
3374 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3375 ; SSE-NEXT: psrlq $48, %xmm12
3376 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm0 = xmm0[1],xmm12[1]
3377 ; SSE-NEXT: movdqa %xmm6, %xmm1
3378 ; SSE-NEXT: pandn %xmm0, %xmm1
3379 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3380 ; SSE-NEXT: psrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
3381 ; SSE-NEXT: por %xmm1, %xmm0
3382 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3383 ; SSE-NEXT: movdqa %xmm6, %xmm3
3384 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3385 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
3386 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3387 ; SSE-NEXT: psrld $16, %xmm2
3388 ; SSE-NEXT: movdqa %xmm6, %xmm3
3389 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3390 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
3391 ; SSE-NEXT: movdqa %xmm7, %xmm2
3392 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
3393 ; SSE-NEXT: pand %xmm15, %xmm0
3394 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,6,5,4,7]
3395 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[3,1,2,1]
3396 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,7,5,4]
3397 ; SSE-NEXT: pandn %xmm2, %xmm15
3398 ; SSE-NEXT: por %xmm0, %xmm15
3399 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3400 ; SSE-NEXT: movdqa %xmm1, %xmm0
3401 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm1[1,1,1,1,4,5,6,7]
3402 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3403 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3404 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
3405 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3406 ; SSE-NEXT: psrld $16, %xmm2
3407 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
3408 ; SSE-NEXT: movdqa %xmm4, %xmm2
3409 ; SSE-NEXT: pandn %xmm0, %xmm2
3410 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm8[0,1,2,3,4,5,6,6]
3411 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
3412 ; SSE-NEXT: pand %xmm4, %xmm0
3413 ; SSE-NEXT: por %xmm2, %xmm0
3414 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3415 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[2,2],xmm5[1,1]
3416 ; SSE-NEXT: movaps %xmm5, %xmm3
3417 ; SSE-NEXT: movaps {{.*#+}} xmm12 = [65535,65535,0,0,0,65535,65535,65535]
3418 ; SSE-NEXT: movaps %xmm12, %xmm1
3419 ; SSE-NEXT: andnps %xmm2, %xmm1
3420 ; SSE-NEXT: pand %xmm12, %xmm0
3421 ; SSE-NEXT: orps %xmm0, %xmm1
3422 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3423 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3424 ; SSE-NEXT: movdqa %xmm1, %xmm0
3425 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm1[1,1,1,1,4,5,6,7]
3426 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3427 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3428 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
3429 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3430 ; SSE-NEXT: psrld $16, %xmm2
3431 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
3432 ; SSE-NEXT: movdqa %xmm4, %xmm2
3433 ; SSE-NEXT: pandn %xmm0, %xmm2
3434 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm9[0,1,2,3,4,5,6,6]
3435 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
3436 ; SSE-NEXT: pand %xmm4, %xmm0
3437 ; SSE-NEXT: por %xmm2, %xmm0
3438 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3439 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[2,2],xmm10[1,1]
3440 ; SSE-NEXT: movaps %xmm12, %xmm1
3441 ; SSE-NEXT: andnps %xmm2, %xmm1
3442 ; SSE-NEXT: pand %xmm12, %xmm0
3443 ; SSE-NEXT: orps %xmm0, %xmm1
3444 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3445 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3446 ; SSE-NEXT: movdqa %xmm1, %xmm0
3447 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm1[1,1,1,1,4,5,6,7]
3448 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3449 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3450 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
3451 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3452 ; SSE-NEXT: psrld $16, %xmm2
3453 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
3454 ; SSE-NEXT: movdqa %xmm4, %xmm2
3455 ; SSE-NEXT: pandn %xmm0, %xmm2
3456 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm14[0,1,2,3,4,5,6,6]
3457 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
3458 ; SSE-NEXT: pand %xmm4, %xmm0
3459 ; SSE-NEXT: por %xmm2, %xmm0
3460 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
3461 ; SSE-NEXT: movaps %xmm9, %xmm1
3462 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm7[1,1]
3463 ; SSE-NEXT: movaps %xmm7, %xmm5
3464 ; SSE-NEXT: movaps %xmm12, %xmm10
3465 ; SSE-NEXT: andnps %xmm1, %xmm10
3466 ; SSE-NEXT: pand %xmm12, %xmm0
3467 ; SSE-NEXT: orps %xmm0, %xmm10
3468 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3469 ; SSE-NEXT: movdqa %xmm1, %xmm0
3470 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm1[1,1,1,1,4,5,6,7]
3471 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3472 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3473 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
3474 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3475 ; SSE-NEXT: movdqa %xmm2, %xmm1
3476 ; SSE-NEXT: psrld $16, %xmm1
3477 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
3478 ; SSE-NEXT: movdqa %xmm4, %xmm1
3479 ; SSE-NEXT: pandn %xmm0, %xmm1
3480 ; SSE-NEXT: pshufhw $164, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
3481 ; SSE-NEXT: # xmm0 = mem[0,1,2,3,4,5,6,6]
3482 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
3483 ; SSE-NEXT: pand %xmm4, %xmm0
3484 ; SSE-NEXT: por %xmm1, %xmm0
3485 ; SSE-NEXT: pand %xmm12, %xmm0
3486 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3487 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3488 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm14[1,1]
3489 ; SSE-NEXT: andnps %xmm1, %xmm12
3490 ; SSE-NEXT: orps %xmm0, %xmm12
3491 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3492 ; SSE-NEXT: shufps $42, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
3493 ; SSE-NEXT: # xmm1 = xmm1[2,2],mem[2,0]
3494 ; SSE-NEXT: pshufhw $233, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
3495 ; SSE-NEXT: # xmm0 = mem[0,1,2,3,5,6,6,7]
3496 ; SSE-NEXT: movaps {{.*#+}} xmm2 = [0,65535,65535,65535,65535,65535,65535,0]
3497 ; SSE-NEXT: andps %xmm2, %xmm1
3498 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
3499 ; SSE-NEXT: andnps %xmm0, %xmm2
3500 ; SSE-NEXT: orps %xmm1, %xmm2
3501 ; SSE-NEXT: movaps {{.*#+}} xmm1 = [65535,0,65535,65535,65535,65535,65535,65535]
3502 ; SSE-NEXT: andps %xmm1, %xmm2
3503 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm14[2,3,2,3]
3504 ; SSE-NEXT: andnps %xmm0, %xmm1
3505 ; SSE-NEXT: orps %xmm2, %xmm1
3506 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3507 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3508 ; SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9]
3509 ; SSE-NEXT: movdqa %xmm4, %xmm0
3510 ; SSE-NEXT: pandn %xmm1, %xmm0
3511 ; SSE-NEXT: pslldq {{.*#+}} xmm11 = zero,zero,xmm11[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
3512 ; SSE-NEXT: pand %xmm4, %xmm11
3513 ; SSE-NEXT: por %xmm0, %xmm11
3514 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [65535,65535,65535,0,0,0,0,65535]
3515 ; SSE-NEXT: movdqa %xmm2, %xmm1
3516 ; SSE-NEXT: pandn %xmm11, %xmm1
3517 ; SSE-NEXT: movaps %xmm3, %xmm11
3518 ; SSE-NEXT: shufpd $1, {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Folded Reload
3519 ; SSE-NEXT: # xmm11 = xmm11[1],mem[0]
3520 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3521 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4,4,5,5,6,6,7,7]
3522 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,0],xmm0[2,1]
3523 ; SSE-NEXT: andps %xmm2, %xmm11
3524 ; SSE-NEXT: orps %xmm1, %xmm11
3525 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3526 ; SSE-NEXT: pslldq {{.*#+}} xmm0 = zero,zero,zero,zero,zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9]
3527 ; SSE-NEXT: movdqa %xmm4, %xmm1
3528 ; SSE-NEXT: pandn %xmm0, %xmm1
3529 ; SSE-NEXT: pslldq {{.*#+}} xmm13 = zero,zero,xmm13[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
3530 ; SSE-NEXT: pand %xmm4, %xmm13
3531 ; SSE-NEXT: por %xmm1, %xmm13
3532 ; SSE-NEXT: movdqa %xmm2, %xmm1
3533 ; SSE-NEXT: pandn %xmm13, %xmm1
3534 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
3535 ; SSE-NEXT: movapd %xmm7, %xmm13
3536 ; SSE-NEXT: shufpd $1, {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Folded Reload
3537 ; SSE-NEXT: # xmm13 = xmm13[1],mem[0]
3538 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3539 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4,4,5,5,6,6,7,7]
3540 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[2,0],xmm0[2,1]
3541 ; SSE-NEXT: andps %xmm2, %xmm13
3542 ; SSE-NEXT: orps %xmm1, %xmm13
3543 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3544 ; SSE-NEXT: pslldq {{.*#+}} xmm0 = zero,zero,zero,zero,zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9]
3545 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3546 ; SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
3547 ; SSE-NEXT: pand %xmm4, %xmm1
3548 ; SSE-NEXT: pandn %xmm0, %xmm4
3549 ; SSE-NEXT: por %xmm1, %xmm4
3550 ; SSE-NEXT: movaps %xmm5, %xmm1
3551 ; SSE-NEXT: shufpd $1, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
3552 ; SSE-NEXT: # xmm1 = xmm1[1],mem[0]
3553 ; SSE-NEXT: punpckhwd {{.*#+}} xmm6 = xmm6[4,4,5,5,6,6,7,7]
3554 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm6[2,1]
3555 ; SSE-NEXT: andps %xmm2, %xmm1
3556 ; SSE-NEXT: pandn %xmm4, %xmm2
3557 ; SSE-NEXT: por %xmm1, %xmm2
3558 ; SSE-NEXT: punpcklqdq {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Folded Reload
3559 ; SSE-NEXT: # xmm8 = xmm8[0],mem[0]
3560 ; SSE-NEXT: shufps $98, {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Folded Reload
3561 ; SSE-NEXT: # xmm8 = xmm8[2,0],mem[2,1]
3562 ; SSE-NEXT: pshufd $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
3563 ; SSE-NEXT: # xmm4 = mem[0,0,1,1]
3564 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm3[0,0,1,1]
3565 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
3566 ; SSE-NEXT: movdqa {{.*#+}} xmm6 = [65535,65535,65535,65535,0,0,0,65535]
3567 ; SSE-NEXT: movdqa %xmm6, %xmm1
3568 ; SSE-NEXT: pandn %xmm4, %xmm1
3569 ; SSE-NEXT: andps %xmm6, %xmm8
3570 ; SSE-NEXT: por %xmm8, %xmm1
3571 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3572 ; SSE-NEXT: unpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
3573 ; SSE-NEXT: # xmm3 = xmm3[0],mem[0]
3574 ; SSE-NEXT: shufps $98, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
3575 ; SSE-NEXT: # xmm3 = xmm3[2,0],mem[2,1]
3576 ; SSE-NEXT: pshufd $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
3577 ; SSE-NEXT: # xmm4 = mem[0,0,1,1]
3578 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm7[0,0,1,1]
3579 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm8[0],xmm4[1],xmm8[1]
3580 ; SSE-NEXT: movdqa %xmm6, %xmm8
3581 ; SSE-NEXT: pandn %xmm4, %xmm8
3582 ; SSE-NEXT: andps %xmm6, %xmm3
3583 ; SSE-NEXT: por %xmm3, %xmm8
3584 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3585 ; SSE-NEXT: unpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
3586 ; SSE-NEXT: # xmm0 = xmm0[0],mem[0]
3587 ; SSE-NEXT: shufps $98, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
3588 ; SSE-NEXT: # xmm0 = xmm0[2,0],mem[2,1]
3589 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm9[0,0,1,1]
3590 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm5[0,0,1,1]
3591 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm9[0],xmm4[1],xmm9[1]
3592 ; SSE-NEXT: movdqa %xmm6, %xmm9
3593 ; SSE-NEXT: pandn %xmm4, %xmm9
3594 ; SSE-NEXT: andps %xmm6, %xmm0
3595 ; SSE-NEXT: por %xmm0, %xmm9
3596 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3597 ; SSE-NEXT: unpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
3598 ; SSE-NEXT: # xmm0 = xmm0[0],mem[0]
3599 ; SSE-NEXT: shufps $98, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
3600 ; SSE-NEXT: # xmm0 = xmm0[2,0],mem[2,1]
3601 ; SSE-NEXT: pshufd $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
3602 ; SSE-NEXT: # xmm4 = mem[0,0,1,1]
3603 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm14[0,0,1,1]
3604 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm7[0],xmm4[1],xmm7[1]
3605 ; SSE-NEXT: andps %xmm6, %xmm0
3606 ; SSE-NEXT: pandn %xmm4, %xmm6
3607 ; SSE-NEXT: por %xmm0, %xmm6
3608 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
3609 ; SSE-NEXT: movdqa %xmm6, 336(%rax)
3610 ; SSE-NEXT: movdqa %xmm9, 224(%rax)
3611 ; SSE-NEXT: movdqa %xmm8, 112(%rax)
3612 ; SSE-NEXT: movdqa %xmm1, (%rax)
3613 ; SSE-NEXT: movdqa %xmm2, 288(%rax)
3614 ; SSE-NEXT: movaps %xmm13, 176(%rax)
3615 ; SSE-NEXT: movaps %xmm11, 64(%rax)
3616 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3617 ; SSE-NEXT: movaps %xmm0, 416(%rax)
3618 ; SSE-NEXT: movaps %xmm12, 368(%rax)
3619 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3620 ; SSE-NEXT: movaps %xmm0, 352(%rax)
3621 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3622 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0,1,3]
3623 ; SSE-NEXT: movaps %xmm0, 304(%rax)
3624 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3625 ; SSE-NEXT: movaps %xmm0, 272(%rax)
3626 ; SSE-NEXT: movaps %xmm10, 256(%rax)
3627 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3628 ; SSE-NEXT: movaps %xmm0, 240(%rax)
3629 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3630 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0,1,3]
3631 ; SSE-NEXT: movaps %xmm0, 192(%rax)
3632 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3633 ; SSE-NEXT: movaps %xmm0, 160(%rax)
3634 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3635 ; SSE-NEXT: movaps %xmm0, 144(%rax)
3636 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3637 ; SSE-NEXT: movaps %xmm0, 128(%rax)
3638 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3639 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0,1,3]
3640 ; SSE-NEXT: movaps %xmm0, 80(%rax)
3641 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3642 ; SSE-NEXT: movaps %xmm0, 48(%rax)
3643 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3644 ; SSE-NEXT: movaps %xmm0, 32(%rax)
3645 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3646 ; SSE-NEXT: movaps %xmm0, 16(%rax)
3647 ; SSE-NEXT: movdqa %xmm15, 320(%rax)
3648 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
3649 ; SSE-NEXT: movaps %xmm0, 208(%rax)
3650 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3651 ; SSE-NEXT: movaps %xmm0, 96(%rax)
3652 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3653 ; SSE-NEXT: movaps %xmm0, 432(%rax)
3654 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3655 ; SSE-NEXT: movaps %xmm0, 400(%rax)
3656 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3657 ; SSE-NEXT: movaps %xmm0, 384(%rax)
3658 ; SSE-NEXT: addq $680, %rsp # imm = 0x2A8
3661 ; AVX1-ONLY-LABEL: store_i16_stride7_vf32:
3662 ; AVX1-ONLY: # %bb.0:
3663 ; AVX1-ONLY-NEXT: subq $584, %rsp # imm = 0x248
3664 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
3665 ; AVX1-ONLY-NEXT: vmovdqa 16(%rsi), %xmm12
3666 ; AVX1-ONLY-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3667 ; AVX1-ONLY-NEXT: vmovdqa 48(%rsi), %xmm9
3668 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm10
3669 ; AVX1-ONLY-NEXT: vmovdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3670 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdi), %xmm0
3671 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3672 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm9[4],xmm0[5],xmm9[5],xmm0[6],xmm9[6],xmm0[7],xmm9[7]
3673 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,1,0,1]
3674 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm2 = zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
3675 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
3676 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm11 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535]
3677 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm11, %ymm2
3678 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdx), %xmm4
3679 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[2,2,2,2]
3680 ; AVX1-ONLY-NEXT: vmovdqa 48(%rcx), %xmm3
3681 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm3[3,3,3,3,4,5,6,7]
3682 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,4,4,4]
3683 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5],xmm1[6],xmm5[7]
3684 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
3685 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm6 = zero,zero,zero,zero,zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9]
3686 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
3687 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm11, %ymm5
3688 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm5, %ymm6
3689 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm6, %xmm5
3690 ; AVX1-ONLY-NEXT: vmovdqa 48(%r8), %xmm2
3691 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm2[2,2,2,2]
3692 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0],xmm5[1,2,3,4,5,6],xmm7[7]
3693 ; AVX1-ONLY-NEXT: vmovdqa 48(%r9), %xmm13
3694 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm8 = xmm13[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
3695 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0],xmm8[1],xmm7[2,3,4,5,6,7]
3696 ; AVX1-ONLY-NEXT: vmovdqa 48(%rax), %xmm5
3697 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm5[2,2,3,3]
3698 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1],xmm8[2],xmm7[3,4,5,6,7]
3699 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3700 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm2[1,1,1,1]
3701 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0],xmm7[1],xmm6[2,3,4,5,6,7]
3702 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm13, %xmm7
3703 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1],xmm7[2,3],xmm6[4,5,6,7]
3704 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3705 ; AVX1-ONLY-NEXT: vpsrlq $48, %xmm3, %xmm6
3706 ; AVX1-ONLY-NEXT: vpunpckhqdq {{.*#+}} xmm6 = xmm4[1],xmm6[1]
3707 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm1, %ymm1
3708 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm11, %ymm1
3709 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm0[2,2,3,3]
3710 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
3711 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm6, %ymm0
3712 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm11, %ymm0
3713 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm0, %ymm0
3714 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm1
3715 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm2[2,2,3,3]
3716 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4],xmm6[5],xmm1[6,7]
3717 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5],xmm7[6,7]
3718 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm5[3,3,3,3]
3719 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm6[0],xmm1[1,2,3,4,5,6],xmm6[7]
3720 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3721 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5],xmm2[6,7]
3722 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm13[0,1,2,3,5,6,6,7]
3723 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
3724 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1,2,3,4,5,6],xmm1[7]
3725 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm5[2,3,2,3]
3726 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3,4,5,6,7]
3727 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3728 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm10[4],xmm12[4],xmm10[5],xmm12[5],xmm10[6],xmm12[6],xmm10[7],xmm12[7]
3729 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3730 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm1[0,1,0,1]
3731 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm1 = zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
3732 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
3733 ; AVX1-ONLY-NEXT: vmovdqa 16(%rcx), %xmm7
3734 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3735 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm7[3,3,3,3,4,5,6,7]
3736 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
3737 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdx), %xmm8
3738 ; AVX1-ONLY-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3739 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm8[2,2,2,2]
3740 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5],xmm6[6],xmm1[7]
3741 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
3742 ; AVX1-ONLY-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3743 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm6 = zero,zero,zero,zero,zero,zero,xmm6[0,1,2,3,4,5,6,7,8,9]
3744 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm1, %ymm1
3745 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm11, %ymm0
3746 ; AVX1-ONLY-NEXT: vandps %ymm1, %ymm11, %ymm1
3747 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm1, %ymm0
3748 ; AVX1-ONLY-NEXT: vmovdqa 16(%r8), %xmm7
3749 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3750 ; AVX1-ONLY-NEXT: vmovdqa 16(%r9), %xmm11
3751 ; AVX1-ONLY-NEXT: vmovdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3752 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm7[4],xmm11[4],xmm7[5],xmm11[5],xmm7[6],xmm11[6],xmm7[7],xmm11[7]
3753 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,2,2,4,5,6,7]
3754 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[0,1,2,1]
3755 ; AVX1-ONLY-NEXT: vmovdqa 16(%rax), %xmm8
3756 ; AVX1-ONLY-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3757 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm8[2,2,3,3]
3758 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1],xmm6[2,3],xmm1[4,5,6,7]
3759 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm11[0],xmm7[1],xmm11[1],xmm7[2],xmm11[2],xmm7[3],xmm11[3]
3760 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3761 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm6 = xmm6[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
3762 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1,2],xmm8[3],xmm6[4,5,6,7]
3763 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm6, %ymm1
3764 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm6 = [65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0]
3765 ; AVX1-ONLY-NEXT: vandps %ymm6, %ymm0, %ymm0
3766 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm6, %ymm1
3767 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm0, %ymm0
3768 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3769 ; AVX1-ONLY-NEXT: vmovdqa 32(%rsi), %xmm11
3770 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdi), %xmm12
3771 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm11, %xmm0
3772 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm12[0],xmm0[0],xmm12[1],xmm0[1]
3773 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
3774 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3775 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[0,1,0,1]
3776 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
3777 ; AVX1-ONLY-NEXT: vmovdqa 32(%rcx), %xmm7
3778 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm7[3,3,3,3,4,5,6,7]
3779 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
3780 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdx), %xmm10
3781 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm10[2,2,2,2]
3782 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm1[0,1,2,3,4,5],xmm6[6],xmm1[7]
3783 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm10[0],xmm7[0],xmm10[1],xmm7[1],xmm10[2],xmm7[2],xmm10[3],xmm7[3]
3784 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, %xmm1
3785 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3786 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm7 = xmm8[0,1,2,3,4,5,6,6]
3787 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,1,2,3]
3788 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm7, %ymm6
3789 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm7 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535]
3790 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm7, %ymm0
3791 ; AVX1-ONLY-NEXT: vandps %ymm7, %ymm6, %ymm6
3792 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm6, %ymm7
3793 ; AVX1-ONLY-NEXT: vmovdqa 32(%r8), %xmm14
3794 ; AVX1-ONLY-NEXT: vmovdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3795 ; AVX1-ONLY-NEXT: vmovdqa 32(%r9), %xmm0
3796 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3797 ; AVX1-ONLY-NEXT: vmovdqa 32(%rax), %xmm6
3798 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm14[0],xmm0[0],xmm14[1],xmm0[1],xmm14[2],xmm0[2],xmm14[3],xmm0[3]
3799 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm15 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
3800 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm15 = xmm15[0,1,2],xmm6[3],xmm15[4,5,6,7]
3801 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm14 = xmm0[0,2],xmm6[1,3]
3802 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm14, %ymm14
3803 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm15 = [65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535]
3804 ; AVX1-ONLY-NEXT: vandps %ymm7, %ymm15, %ymm7
3805 ; AVX1-ONLY-NEXT: vandnps %ymm14, %ymm15, %ymm14
3806 ; AVX1-ONLY-NEXT: vorps %ymm7, %ymm14, %ymm7
3807 ; AVX1-ONLY-NEXT: vmovups %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3808 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm3, %xmm7
3809 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm7 = xmm4[0],xmm7[0],xmm4[1],xmm7[1]
3810 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3811 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3812 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,6,6]
3813 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,1,2,3]
3814 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm7, %ymm3
3815 ; AVX1-ONLY-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3816 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm9, %xmm4
3817 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
3818 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm4 = xmm7[0],xmm4[0],xmm7[1],xmm4[1]
3819 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3]
3820 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm7 = xmm7[2,2,2,2,4,5,6,7]
3821 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,5,5,4]
3822 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm7, %ymm4
3823 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm7 = [65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0]
3824 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm7, %ymm3
3825 ; AVX1-ONLY-NEXT: vandps %ymm7, %ymm4, %ymm4
3826 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm4, %ymm3
3827 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm2[0],xmm13[0],xmm2[1],xmm13[1],xmm2[2],xmm13[2],xmm2[3],xmm13[3]
3828 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3829 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm2 = zero,zero,xmm7[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
3830 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3831 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm5[0,1,0,1]
3832 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3,4],xmm4[5],xmm2[6,7]
3833 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm4 = xmm7[0,2],xmm5[1,3]
3834 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
3835 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm4 = [65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535]
3836 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm3, %ymm3
3837 ; AVX1-ONLY-NEXT: vandnps %ymm2, %ymm4, %ymm2
3838 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm3, %ymm2
3839 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3840 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm1, %xmm2
3841 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm10[0],xmm2[0],xmm10[1],xmm2[1]
3842 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm8[0,0,1,1]
3843 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
3844 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
3845 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
3846 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,1,2,2,4,5,6,7]
3847 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,2,1]
3848 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,2,2,2,4,5,6,7]
3849 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,5,4]
3850 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
3851 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm5 = [65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
3852 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm5, %ymm1
3853 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm2, %ymm2
3854 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm2, %ymm1
3855 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm2 = zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
3856 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm6[0,1,0,1]
3857 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3,4],xmm3[5],xmm2[6,7]
3858 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm0 = xmm0[0,1,0,1]
3859 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm6[0,0,0,0]
3860 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm0 = xmm0[0,1,2],xmm3[3]
3861 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
3862 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm4 = [65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535]
3863 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm1, %ymm1
3864 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm4, %ymm0
3865 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm1, %ymm0
3866 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3867 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm9
3868 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm7
3869 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm7, %xmm0
3870 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm9[0],xmm0[0],xmm9[1],xmm0[1]
3871 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3]
3872 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[0,0,1,1]
3873 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm2
3874 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm0
3875 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm14
3876 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm14[0],xmm0[0],xmm14[1],xmm0[1],xmm14[2],xmm0[2],xmm14[3],xmm0[3]
3877 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm11 = xmm11[0,1,2,2,4,5,6,7]
3878 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[0,1,2,1]
3879 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm0[0],xmm14[0],xmm0[1],xmm14[1],xmm0[2],xmm14[2],xmm0[3],xmm14[3]
3880 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm12 = xmm12[2,2,2,2,4,5,6,7]
3881 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm12 = xmm12[0,1,2,3,4,5,5,4]
3882 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm11, %ymm11
3883 ; AVX1-ONLY-NEXT: vandnps %ymm2, %ymm5, %ymm2
3884 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm11, %ymm5
3885 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm5, %ymm15
3886 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm1
3887 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, (%rsp) # 16-byte Spill
3888 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm5
3889 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3890 ; AVX1-ONLY-NEXT: vmovdqa (%rax), %xmm2
3891 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
3892 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm1 = zero,zero,xmm5[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
3893 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm2[0,1,0,1]
3894 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4],xmm12[5],xmm1[6,7]
3895 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm5[0,1,0,1]
3896 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm2[0,0,0,0]
3897 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm11 = xmm12[0,1,2,3,4,5],xmm11[6,7]
3898 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm11, %ymm1
3899 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm15, %ymm11
3900 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm4, %ymm1
3901 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm11, %ymm1
3902 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3903 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm0, %xmm1
3904 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm14[0],xmm1[0],xmm14[1],xmm1[1]
3905 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm14[4],xmm0[4],xmm14[5],xmm0[5],xmm14[6],xmm0[6],xmm14[7],xmm0[7]
3906 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3907 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,1,0,1]
3908 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3909 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm7[3,3,3,3,4,5,6,7]
3910 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
3911 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm9[2,2,2,2]
3912 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5],xmm11[6],xmm1[7]
3913 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,6,6]
3914 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,1,2,3]
3915 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm3, %ymm1
3916 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm12 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535]
3917 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm12, %ymm0
3918 ; AVX1-ONLY-NEXT: vandps %ymm1, %ymm12, %ymm1
3919 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm1, %ymm0
3920 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm1 = xmm5[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
3921 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3922 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm2[3],xmm1[4,5,6,7]
3923 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm5[0,2],xmm2[1,3]
3924 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm3, %ymm1
3925 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm2 = [65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535]
3926 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm0, %ymm0
3927 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm2, %ymm1
3928 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm0, %ymm0
3929 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3930 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
3931 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm13, %xmm0
3932 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3933 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
3934 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm1[0],xmm13[0],xmm1[1],xmm13[1],xmm1[2],xmm13[2],xmm1[3],xmm13[3]
3935 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3936 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, %xmm4
3937 ; AVX1-ONLY-NEXT: vmovdqa %xmm13, %xmm2
3938 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm3[0,1,2,3,4,5,6,6]
3939 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,1,2,3]
3940 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
3941 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
3942 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm5, %xmm1
3943 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3944 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
3945 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
3946 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,2,2,2,4,5,6,7]
3947 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,5,4]
3948 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm3, %ymm1
3949 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm3 = [65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0]
3950 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm3, %ymm0
3951 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm1, %ymm1
3952 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm1, %ymm0
3953 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
3954 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm1 = zero,zero,xmm5[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
3955 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
3956 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm13[0,1,0,1]
3957 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4],xmm3[5],xmm1[6,7]
3958 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm3 = xmm5[0,2],xmm13[1,3]
3959 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
3960 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm3 = [65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535]
3961 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm0, %ymm0
3962 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm3, %ymm1
3963 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm0, %ymm0
3964 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3965 ; AVX1-ONLY-NEXT: vpsrlq $48, %xmm2, %xmm0
3966 ; AVX1-ONLY-NEXT: vpunpckhqdq {{.*#+}} xmm0 = xmm4[1],xmm0[1]
3967 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3968 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3969 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3970 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm2[2,2,3,3]
3971 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm3 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
3972 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
3973 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm2 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535]
3974 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm2, %ymm0
3975 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm1, %ymm1
3976 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm1, %ymm0
3977 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3978 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm2, %xmm1
3979 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3980 ; AVX1-ONLY-NEXT: vpunpckhdq {{.*#+}} xmm1 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
3981 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
3982 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,2,2,2,4,5,6,7]
3983 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,5,4]
3984 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm13[2,3,2,3]
3985 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0],xmm5[1],xmm3[2,3,4,5,6,7]
3986 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm13[4],xmm1[4],xmm13[5],xmm1[5],xmm13[6],xmm1[6],xmm13[7],xmm1[7]
3987 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[8,9,8,9,8,9,8,9,12,13,6,7,10,11,12,13]
3988 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm3
3989 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm5 = [0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0]
3990 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm0, %ymm0
3991 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm5, %ymm3
3992 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm0, %ymm0
3993 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3994 ; AVX1-ONLY-NEXT: vmovdqa %xmm10, %xmm4
3995 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3996 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm10[4],xmm2[4],xmm10[5],xmm2[5],xmm10[6],xmm2[6],xmm10[7],xmm2[7]
3997 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm3 = zero,zero,zero,zero,zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9]
3998 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm0
3999 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
4000 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm3 = zero,zero,xmm8[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
4001 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm8[2,2,3,3]
4002 ; AVX1-ONLY-NEXT: vmovdqa %xmm8, %xmm10
4003 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm3
4004 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm12, %ymm0
4005 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm12, %ymm3
4006 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm3, %ymm0
4007 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
4008 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4009 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm1[4],xmm11[4],xmm1[5],xmm11[5],xmm1[6],xmm11[6],xmm1[7],xmm11[7]
4010 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,2,2,2,4,5,6,7]
4011 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,5,4]
4012 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm6[2,3,2,3]
4013 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0],xmm5[1],xmm3[2,3,4,5,6,7]
4014 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm11[4],xmm1[4],xmm11[5],xmm1[5],xmm11[6],xmm1[6],xmm11[7],xmm1[7]
4015 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,1,2,2,4,5,6,7]
4016 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,1,2,1]
4017 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm6[2,2,3,3]
4018 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1],xmm8[2,3],xmm5[4,5,6,7]
4019 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm3
4020 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm15 = [65535,65535,65535,0,0,0,0,65535,65535,65535,0,0,0,0,65535,65535]
4021 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm15, %ymm0
4022 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm15, %ymm3
4023 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm3, %ymm0
4024 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4025 ; AVX1-ONLY-NEXT: vpsrlq $48, %xmm2, %xmm0
4026 ; AVX1-ONLY-NEXT: vpunpckhqdq {{.*#+}} xmm0 = xmm4[1],xmm0[1]
4027 ; AVX1-ONLY-NEXT: vpermilps $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
4028 ; AVX1-ONLY-NEXT: # xmm3 = mem[0,0,1,1]
4029 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
4030 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
4031 ; AVX1-ONLY-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm3 # 16-byte Folded Reload
4032 ; AVX1-ONLY-NEXT: # xmm3 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
4033 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm5 = xmm10[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
4034 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,1,2,2,4,5,6,7]
4035 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,2,1]
4036 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm3
4037 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm8 = [65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
4038 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm8, %ymm0
4039 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm8, %ymm3
4040 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm3, %ymm3
4041 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
4042 ; AVX1-ONLY-NEXT: vpblendw $247, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm14 # 16-byte Folded Reload
4043 ; AVX1-ONLY-NEXT: # xmm14 = mem[0,1,2],xmm2[3],mem[4,5,6,7]
4044 ; AVX1-ONLY-NEXT: vpshufd $68, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
4045 ; AVX1-ONLY-NEXT: # xmm5 = mem[0,1,0,1]
4046 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm2[0,0,0,0]
4047 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5],xmm10[6,7]
4048 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm1, %xmm10
4049 ; AVX1-ONLY-NEXT: vpunpckhdq {{.*#+}} xmm10 = xmm11[2],xmm10[2],xmm11[3],xmm10[3]
4050 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm6[4],xmm10[4],xmm6[5],xmm10[5],xmm6[6],xmm10[6],xmm6[7],xmm10[7]
4051 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[8,9,8,9,8,9,8,9,12,13,6,7,10,11,12,13]
4052 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm6, %ymm5
4053 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm6 = [0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535]
4054 ; AVX1-ONLY-NEXT: vandps %ymm6, %ymm3, %ymm3
4055 ; AVX1-ONLY-NEXT: vandnps %ymm5, %ymm6, %ymm5
4056 ; AVX1-ONLY-NEXT: vorps %ymm5, %ymm3, %ymm5
4057 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
4058 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm10 = zero,zero,zero,zero,zero,zero,xmm3[0,1,2,3,4,5,6,7,8,9]
4059 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm10, %ymm3
4060 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4061 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm10 = zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
4062 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm1[2,2,3,3]
4063 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, %xmm4
4064 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm10, %ymm10
4065 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm12, %ymm3
4066 ; AVX1-ONLY-NEXT: vandps %ymm12, %ymm10, %ymm10
4067 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm10, %ymm3
4068 ; AVX1-ONLY-NEXT: vmovdqa (%rsp), %xmm1 # 16-byte Reload
4069 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
4070 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm10 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
4071 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm10 = xmm10[2,2,2,2,4,5,6,7]
4072 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm10 = xmm10[0,1,2,3,4,5,5,4]
4073 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4074 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm0[2,3,2,3]
4075 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0],xmm11[1],xmm10[2,3,4,5,6,7]
4076 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm11 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
4077 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm11 = xmm11[0,1,2,2,4,5,6,7]
4078 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[0,1,2,1]
4079 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm0[2,2,3,3]
4080 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1],xmm12[2,3],xmm11[4,5,6,7]
4081 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm11, %ymm10
4082 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm15, %ymm3
4083 ; AVX1-ONLY-NEXT: vandps %ymm15, %ymm10, %ymm10
4084 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm10, %ymm3
4085 ; AVX1-ONLY-NEXT: vpsrlq $48, %xmm7, %xmm7
4086 ; AVX1-ONLY-NEXT: vpunpckhqdq {{.*#+}} xmm7 = xmm9[1],xmm7[1]
4087 ; AVX1-ONLY-NEXT: vpermilps $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Folded Reload
4088 ; AVX1-ONLY-NEXT: # xmm9 = mem[0,0,1,1]
4089 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm7, %ymm7
4090 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
4091 ; AVX1-ONLY-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
4092 ; AVX1-ONLY-NEXT: # xmm9 = xmm9[0],mem[0],xmm9[1],mem[1],xmm9[2],mem[2],xmm9[3],mem[3]
4093 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm4 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
4094 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm9 = xmm9[0,1,2,2,4,5,6,7]
4095 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[0,1,2,1]
4096 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm4, %ymm4
4097 ; AVX1-ONLY-NEXT: vandnps %ymm7, %ymm8, %ymm7
4098 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm8, %ymm4
4099 ; AVX1-ONLY-NEXT: vorps %ymm7, %ymm4, %ymm4
4100 ; AVX1-ONLY-NEXT: vpermilps $68, {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Folded Reload
4101 ; AVX1-ONLY-NEXT: # xmm7 = mem[0,1,0,1]
4102 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm8 = xmm13[0,0,0,0]
4103 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0,1,2],xmm8[3]
4104 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm2, %xmm8
4105 ; AVX1-ONLY-NEXT: vpunpckhdq {{.*#+}} xmm8 = xmm1[2],xmm8[2],xmm1[3],xmm8[3]
4106 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm0[4],xmm8[4],xmm0[5],xmm8[5],xmm0[6],xmm8[6],xmm0[7],xmm8[7]
4107 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm2[8,9,8,9,8,9,8,9,12,13,6,7,10,11,12,13]
4108 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm1, %ymm1
4109 ; AVX1-ONLY-NEXT: vandps %ymm6, %ymm4, %ymm2
4110 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm6, %ymm1
4111 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm2, %ymm1
4112 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
4113 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 96(%rax)
4114 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 64(%rax)
4115 ; AVX1-ONLY-NEXT: vmovaps %ymm5, 320(%rax)
4116 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4117 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%rax)
4118 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4119 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 192(%rax)
4120 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4121 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 128(%rax)
4122 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4123 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 32(%rax)
4124 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4125 ; AVX1-ONLY-NEXT: vmovaps %ymm1, (%rax)
4126 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4127 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 224(%rax)
4128 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4129 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 352(%rax)
4130 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4131 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 256(%rax)
4132 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4133 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 160(%rax)
4134 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4135 ; AVX1-ONLY-NEXT: vmovaps %xmm1, 416(%rax)
4136 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4137 ; AVX1-ONLY-NEXT: vmovaps %xmm1, 432(%rax)
4138 ; AVX1-ONLY-NEXT: vmovdqa %xmm14, 384(%rax)
4139 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4140 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 400(%rax)
4141 ; AVX1-ONLY-NEXT: addq $584, %rsp # imm = 0x248
4142 ; AVX1-ONLY-NEXT: vzeroupper
4143 ; AVX1-ONLY-NEXT: retq
4145 ; AVX2-SLOW-LABEL: store_i16_stride7_vf32:
4146 ; AVX2-SLOW: # %bb.0:
4147 ; AVX2-SLOW-NEXT: subq $616, %rsp # imm = 0x268
4148 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %ymm13
4149 ; AVX2-SLOW-NEXT: vmovdqu %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4150 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %ymm8
4151 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %ymm15
4152 ; AVX2-SLOW-NEXT: vmovdqu %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4153 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %ymm10
4154 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %ymm12
4155 ; AVX2-SLOW-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4156 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %ymm5
4157 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %ymm14
4158 ; AVX2-SLOW-NEXT: vmovdqu %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4159 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %ymm9
4160 ; AVX2-SLOW-NEXT: vmovdqa 32(%r8), %ymm6
4161 ; AVX2-SLOW-NEXT: vmovdqa 32(%r9), %ymm7
4162 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = <3,u,u,u,4,u,u,4>
4163 ; AVX2-SLOW-NEXT: vpermd %ymm8, %ymm0, %ymm1
4164 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm10[0,3,2,3,4,7,6,7]
4165 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,0,3,3,4,5,6,7,8,8,11,11,12,13,14,15]
4166 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = <u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255>
4167 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
4168 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <u,3,u,u,u,4,u,u>
4169 ; AVX2-SLOW-NEXT: vpermd %ymm5, %ymm2, %ymm4
4170 ; AVX2-SLOW-NEXT: vmovdqa %ymm5, %ymm11
4171 ; AVX2-SLOW-NEXT: vpermd %ymm13, %ymm0, %ymm0
4172 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm15[0,3,2,3,4,7,6,7]
4173 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm5 = ymm5[0,0,3,3,4,5,6,7,8,8,11,11,12,13,14,15]
4174 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm0, %ymm5, %ymm0
4175 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm9[0,0,0,0,4,5,6,7,8,8,8,8,12,13,14,15]
4176 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,7,7,7,7,8,9,10,11,15,15,15,15]
4177 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm5 = <u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u>
4178 ; AVX2-SLOW-NEXT: vpblendvb %ymm5, %ymm4, %ymm3, %ymm3
4179 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255>
4180 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm1, %ymm3, %ymm1
4181 ; AVX2-SLOW-NEXT: vpermd %ymm12, %ymm2, %ymm2
4182 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm14[0,0,0,0,4,5,6,7,8,8,8,8,12,13,14,15]
4183 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,7,7,7,7,8,9,10,11,15,15,15,15]
4184 ; AVX2-SLOW-NEXT: vpblendvb %ymm5, %ymm2, %ymm3, %ymm2
4185 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = <u,u,3,u,u,u,4,u>
4186 ; AVX2-SLOW-NEXT: vpermd %ymm6, %ymm3, %ymm5
4187 ; AVX2-SLOW-NEXT: vmovdqa %ymm6, %ymm12
4188 ; AVX2-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4189 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm0, %ymm2, %ymm0
4190 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm7[0,1,0,3,4,5,4,7]
4191 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm2[0,1,2,3,4,4,7,7,8,9,10,11,12,12,15,15]
4192 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u>
4193 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm5, %ymm2, %ymm2
4194 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %ymm5
4195 ; AVX2-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4196 ; AVX2-SLOW-NEXT: vpermd %ymm5, %ymm3, %ymm3
4197 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %ymm5
4198 ; AVX2-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4199 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[0,1,0,3,4,5,4,7]
4200 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm5 = ymm5[0,1,2,3,4,4,7,7,8,9,10,11,12,12,15,15]
4201 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm3, %ymm5, %ymm3
4202 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
4203 ; AVX2-SLOW-NEXT: vmovdqa 32(%rax), %ymm6
4204 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = <3,u,u,3,u,u,u,4>
4205 ; AVX2-SLOW-NEXT: vpermd %ymm6, %ymm4, %ymm5
4206 ; AVX2-SLOW-NEXT: vmovdqa %ymm6, %ymm13
4207 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm6 = <0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u>
4208 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm2, %ymm5, %ymm2
4209 ; AVX2-SLOW-NEXT: vmovdqa (%rax), %ymm5
4210 ; AVX2-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4211 ; AVX2-SLOW-NEXT: vpermd %ymm5, %ymm4, %ymm4
4212 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm3, %ymm4, %ymm3
4213 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = [0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255]
4214 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm1, %ymm2, %ymm1
4215 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4216 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm0, %ymm3, %ymm0
4217 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4218 ; AVX2-SLOW-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4219 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
4220 ; AVX2-SLOW-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4221 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm11[2,2,2,2,6,6,6,6]
4222 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
4223 ; AVX2-SLOW-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4224 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm10[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
4225 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
4226 ; AVX2-SLOW-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4227 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm8[2,2,2,2,6,6,6,6]
4228 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
4229 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
4230 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
4231 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u>
4232 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4233 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm12[1,2,2,3,5,6,6,7]
4234 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,2]
4235 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0>
4236 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4237 ; AVX2-SLOW-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4238 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm7[2,1,2,3,6,5,6,7]
4239 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,0,3,3,4,5,6,7,8,8,11,11,12,13,14,15]
4240 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
4241 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255>
4242 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4243 ; AVX2-SLOW-NEXT: vmovdqu %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4244 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm13[0,1,2,2,4,5,6,6]
4245 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
4246 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255]
4247 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4248 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4249 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm10[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
4250 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
4251 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm8[3,3,3,3,7,7,7,7]
4252 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
4253 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm9[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
4254 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
4255 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm11[3,3,3,3,7,7,7,7]
4256 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
4257 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
4258 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
4259 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u>
4260 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4261 ; AVX2-SLOW-NEXT: vpbroadcastd 60(%r8), %ymm1
4262 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u>
4263 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4264 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm7[0,1,2,3,5,5,7,6,8,9,10,11,13,13,15,14]
4265 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[3,3,3,3]
4266 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u>
4267 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4268 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm13[2,3,3,3,6,7,7,7]
4269 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,2]
4270 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0]
4271 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4272 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4273 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %xmm0
4274 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4275 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %xmm6
4276 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
4277 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1],xmm0[2],xmm6[2],xmm0[3],xmm6[3]
4278 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm0, %xmm0
4279 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
4280 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %xmm11
4281 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %xmm10
4282 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
4283 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,3,2,4,5,6,7]
4284 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[0,0,1,1]
4285 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
4286 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = <255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255>
4287 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm0, %ymm1, %ymm12
4288 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm0
4289 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4290 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm13
4291 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm13[0],xmm0[1],xmm13[1],xmm0[2],xmm13[2],xmm0[3],xmm13[3]
4292 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm0, %xmm0
4293 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
4294 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm8
4295 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm7
4296 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
4297 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,3,2,4,5,6,7]
4298 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[0,0,1,1]
4299 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
4300 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm0, %ymm1, %ymm2
4301 ; AVX2-SLOW-NEXT: vmovdqa 32(%r9), %xmm0
4302 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4303 ; AVX2-SLOW-NEXT: vmovdqa 32(%r8), %xmm1
4304 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4305 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4306 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm4[0,1,3,2,4,5,6,7]
4307 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,1,1,3]
4308 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
4309 ; AVX2-SLOW-NEXT: vpbroadcastd 32(%rax), %ymm9
4310 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = <u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u>
4311 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm0, %ymm9, %ymm0
4312 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %xmm5
4313 ; AVX2-SLOW-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4314 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm1
4315 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4316 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
4317 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm15 = xmm1[0,1,3,2,4,5,6,7]
4318 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[0,1,1,3]
4319 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,0,0,1]
4320 ; AVX2-SLOW-NEXT: vpbroadcastd (%rax), %ymm14
4321 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm15, %ymm14, %ymm3
4322 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm14 = [255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255]
4323 ; AVX2-SLOW-NEXT: vpblendvb %ymm14, %ymm12, %ymm0, %ymm15
4324 ; AVX2-SLOW-NEXT: vpblendvb %ymm14, %ymm2, %ymm3, %ymm0
4325 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
4326 ; AVX2-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm0 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
4327 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm11, %xmm2
4328 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm10[1,1,2,2]
4329 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0],xmm2[1],xmm3[2,3],xmm2[4],xmm3[5,6],xmm2[7]
4330 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm6[3,3,3,3,4,5,6,7]
4331 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,4,4]
4332 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
4333 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm12 = xmm5[1,1,2,3]
4334 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} xmm3 = xmm12[0,1],xmm3[2],xmm12[3,4],xmm3[5],xmm12[6,7]
4335 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,1]
4336 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
4337 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm12 = <255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255>
4338 ; AVX2-SLOW-NEXT: vpblendvb %ymm12, %ymm2, %ymm3, %ymm9
4339 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm8, %xmm0
4340 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm7[1,1,2,2]
4341 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} xmm0 = xmm3[0],xmm0[1],xmm3[2,3],xmm0[4],xmm3[5,6],xmm0[7]
4342 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm13[3,3,3,3,4,5,6,7]
4343 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,4,4]
4344 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
4345 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm14 = xmm2[1,1,2,3]
4346 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} xmm3 = xmm14[0,1],xmm3[2],xmm14[3,4],xmm3[5],xmm14[6,7]
4347 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
4348 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
4349 ; AVX2-SLOW-NEXT: vpblendvb %ymm12, %ymm0, %ymm3, %ymm0
4350 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm4[0,1,2,3,4,5,7,6]
4351 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,2,3,3]
4352 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
4353 ; AVX2-SLOW-NEXT: vpbroadcastd 36(%rax), %ymm4
4354 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm12 = <u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u>
4355 ; AVX2-SLOW-NEXT: vpblendvb %ymm12, %ymm3, %ymm4, %ymm3
4356 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,7,6]
4357 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[0,2,3,3]
4358 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
4359 ; AVX2-SLOW-NEXT: vpbroadcastd 4(%rax), %ymm4
4360 ; AVX2-SLOW-NEXT: vpblendvb %ymm12, %ymm1, %ymm4, %ymm1
4361 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = [255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255]
4362 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm9, %ymm3, %ymm14
4363 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm0, %ymm1, %ymm0
4364 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4365 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm10[4],xmm11[4],xmm10[5],xmm11[5],xmm10[6],xmm11[6],xmm10[7],xmm11[7]
4366 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
4367 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,2,3,3,4,5,6,7]
4368 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
4369 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[2,1,2,3,4,5,6,7]
4370 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,5,4]
4371 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,3]
4372 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u>
4373 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm1, %ymm0, %ymm0
4374 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
4375 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm13[4],xmm2[4],xmm13[5],xmm2[5],xmm13[6],xmm2[6],xmm13[7],xmm2[7]
4376 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,2,3,3,4,5,6,7]
4377 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
4378 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,1,2,3,4,5,6,7]
4379 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,5,4]
4380 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,1,3]
4381 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm3, %ymm1, %ymm1
4382 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
4383 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
4384 ; AVX2-SLOW-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
4385 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm5 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
4386 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm2, %xmm2
4387 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,1]
4388 ; AVX2-SLOW-NEXT: vpbroadcastd 40(%rax), %ymm3
4389 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = <255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255>
4390 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm2
4391 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
4392 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3, %xmm3 # 16-byte Folded Reload
4393 ; AVX2-SLOW-NEXT: # xmm3 = xmm3[4],mem[4],xmm3[5],mem[5],xmm3[6],mem[6],xmm3[7],mem[7]
4394 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm3, %xmm3
4395 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,1,1]
4396 ; AVX2-SLOW-NEXT: vpbroadcastd 8(%rax), %ymm5
4397 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm3, %ymm5, %ymm3
4398 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = [255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255]
4399 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm2, %ymm0, %ymm0
4400 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm3, %ymm1, %ymm1
4401 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
4402 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm10[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
4403 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,2,6,6,6,6]
4404 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
4405 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm7[3,3,3,3,7,7,7,7]
4406 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5],ymm2[6],ymm3[7,8,9,10],ymm2[11],ymm3[12,13],ymm2[14],ymm3[15]
4407 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
4408 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm3 = ymm9[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
4409 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[2,2,2,2,6,6,6,6]
4410 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
4411 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm8[3,3,3,3,7,7,7,7]
4412 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2,3],ymm3[4],ymm4[5,6,7,8],ymm3[9],ymm4[10,11],ymm3[12],ymm4[13,14,15]
4413 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,3,3]
4414 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
4415 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u>
4416 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm2
4417 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
4418 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm3 = ymm12[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
4419 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[2,2,2,3,6,6,6,7]
4420 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
4421 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm11[3,3,3,3,7,7,7,7]
4422 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7,8,9],ymm4[10],ymm3[11,12],ymm4[13],ymm3[14,15]
4423 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
4424 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
4425 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm13[2,3,3,3,6,7,7,7]
4426 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,3,2]
4427 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm5 = <255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0>
4428 ; AVX2-SLOW-NEXT: vpblendvb %ymm5, %ymm3, %ymm4, %ymm3
4429 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = [0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0]
4430 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm2
4431 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
4432 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm8[2,2,2,2,6,6,6,6]
4433 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7,8,9],ymm4[10],ymm3[11,12],ymm4[13],ymm3[14,15]
4434 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm4 = ymm10[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
4435 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[2,2,2,2,6,6,6,6]
4436 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm7[2,2,2,2,6,6,6,6]
4437 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm4 = ymm5[0],ymm4[1],ymm5[2,3],ymm4[4],ymm5[5,6,7,8],ymm4[9],ymm5[10,11],ymm4[12],ymm5[13,14,15]
4438 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
4439 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
4440 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm5 = <255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u>
4441 ; AVX2-SLOW-NEXT: vpblendvb %ymm5, %ymm3, %ymm4, %ymm3
4442 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm4 = ymm12[3,3,3,3,4,5,6,7,11,11,11,11,12,13,14,15]
4443 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm4 = ymm4[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
4444 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm11[1,2,2,3,5,6,6,7]
4445 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm4 = ymm5[0,1],ymm4[2],ymm5[3,4],ymm4[5],ymm5[6,7,8,9],ymm4[10],ymm5[11,12],ymm4[13],ymm5[14,15]
4446 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,3,2]
4447 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm13[0,1,2,2,4,5,6,6]
4448 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,1,3,3]
4449 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm6 = <u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255>
4450 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
4451 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm5 = [255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0]
4452 ; AVX2-SLOW-NEXT: vpblendvb %ymm5, %ymm3, %ymm4, %ymm3
4453 ; AVX2-SLOW-NEXT: vpshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
4454 ; AVX2-SLOW-NEXT: # ymm4 = mem[1,1,1,1,5,5,5,5]
4455 ; AVX2-SLOW-NEXT: vpshuflw $249, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
4456 ; AVX2-SLOW-NEXT: # ymm5 = mem[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
4457 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[0,0,2,1,4,4,6,5]
4458 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm4 = ymm5[0,1],ymm4[2],ymm5[3,4],ymm4[5],ymm5[6,7,8,9],ymm4[10],ymm5[11,12],ymm4[13],ymm5[14,15]
4459 ; AVX2-SLOW-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
4460 ; AVX2-SLOW-NEXT: # ymm5 = mem[0,1,1,3,4,5,5,7]
4461 ; AVX2-SLOW-NEXT: vpshuflw $233, {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Folded Reload
4462 ; AVX2-SLOW-NEXT: # ymm6 = mem[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
4463 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[0,0,0,0,4,4,4,4]
4464 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm5 = ymm5[0,1],ymm6[2],ymm5[3,4],ymm6[5],ymm5[6,7,8,9],ymm6[10],ymm5[11,12],ymm6[13],ymm5[14,15]
4465 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
4466 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,1,3,2]
4467 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm6 = <255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0>
4468 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
4469 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm7[1,1,1,1,5,5,5,5]
4470 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm7 = ymm10[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
4471 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm7[0,0,2,1,4,4,6,5]
4472 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm5 = ymm7[0,1],ymm5[2],ymm7[3,4],ymm5[5],ymm7[6,7,8,9],ymm5[10],ymm7[11,12],ymm5[13],ymm7[14,15]
4473 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm8[0,1,1,3,4,5,5,7]
4474 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm8 = ymm9[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
4475 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm8[0,0,0,0,4,4,4,4]
4476 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm7 = ymm7[0,1],ymm8[2],ymm7[3,4],ymm8[5],ymm7[6,7,8,9],ymm8[10],ymm7[11,12],ymm8[13],ymm7[14,15]
4477 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
4478 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,1,3,2]
4479 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm5, %ymm7, %ymm5
4480 ; AVX2-SLOW-NEXT: vpshufd $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Folded Reload
4481 ; AVX2-SLOW-NEXT: # ymm6 = mem[0,0,2,1,4,4,6,5]
4482 ; AVX2-SLOW-NEXT: vpshuflw $233, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Folded Reload
4483 ; AVX2-SLOW-NEXT: # ymm7 = mem[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
4484 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm7[0,0,0,0,4,4,4,4]
4485 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm6 = ymm7[0,1,2],ymm6[3],ymm7[4,5],ymm6[6],ymm7[7,8,9,10],ymm6[11],ymm7[12,13],ymm6[14],ymm7[15]
4486 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,3,3]
4487 ; AVX2-SLOW-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Folded Reload
4488 ; AVX2-SLOW-NEXT: # ymm7 = mem[0,1,1,3,4,5,5,7]
4489 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,2,2,3]
4490 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = <u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u>
4491 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm6, %ymm7, %ymm6
4492 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm11[0,0,2,1,4,4,6,5]
4493 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm9 = ymm12[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
4494 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm9[0,0,0,0,4,4,4,4]
4495 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm7 = ymm9[0,1,2],ymm7[3],ymm9[4,5],ymm7[6],ymm9[7,8,9,10],ymm7[11],ymm9[12,13],ymm7[14],ymm9[15]
4496 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,2,3,3]
4497 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm13[0,1,1,3,4,5,5,7]
4498 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,2,3]
4499 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm7, %ymm9, %ymm7
4500 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = [255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255]
4501 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm4, %ymm6, %ymm4
4502 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm5, %ymm7, %ymm5
4503 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
4504 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
4505 ; AVX2-SLOW-NEXT: vmovaps %ymm6, 96(%rax)
4506 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
4507 ; AVX2-SLOW-NEXT: vmovaps %ymm6, 320(%rax)
4508 ; AVX2-SLOW-NEXT: vmovdqa %ymm5, 128(%rax)
4509 ; AVX2-SLOW-NEXT: vmovdqa %ymm4, 352(%rax)
4510 ; AVX2-SLOW-NEXT: vmovdqa %ymm3, 160(%rax)
4511 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, 192(%rax)
4512 ; AVX2-SLOW-NEXT: vmovdqa %ymm1, 64(%rax)
4513 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4514 ; AVX2-SLOW-NEXT: vmovaps %ymm1, 32(%rax)
4515 ; AVX2-SLOW-NEXT: vmovups (%rsp), %ymm1 # 32-byte Reload
4516 ; AVX2-SLOW-NEXT: vmovaps %ymm1, (%rax)
4517 ; AVX2-SLOW-NEXT: vmovdqa %ymm15, 224(%rax)
4518 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, 288(%rax)
4519 ; AVX2-SLOW-NEXT: vmovdqa %ymm14, 256(%rax)
4520 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4521 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 416(%rax)
4522 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4523 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 384(%rax)
4524 ; AVX2-SLOW-NEXT: addq $616, %rsp # imm = 0x268
4525 ; AVX2-SLOW-NEXT: vzeroupper
4526 ; AVX2-SLOW-NEXT: retq
4528 ; AVX2-FAST-LABEL: store_i16_stride7_vf32:
4529 ; AVX2-FAST: # %bb.0:
4530 ; AVX2-FAST-NEXT: subq $312, %rsp # imm = 0x138
4531 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
4532 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %ymm7
4533 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %ymm1
4534 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %ymm6
4535 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %ymm2
4536 ; AVX2-FAST-NEXT: vmovdqa (%r8), %ymm4
4537 ; AVX2-FAST-NEXT: vmovdqa (%r9), %ymm10
4538 ; AVX2-FAST-NEXT: vmovdqa (%rax), %ymm3
4539 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29]
4540 ; AVX2-FAST-NEXT: vmovdqa %ymm1, %ymm11
4541 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm1 = ymm7[3,3,3,3,7,7,7,7]
4542 ; AVX2-FAST-NEXT: vmovdqa %ymm7, %ymm9
4543 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
4544 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
4545 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm2[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
4546 ; AVX2-FAST-NEXT: vmovdqa %ymm2, %ymm7
4547 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm6[3,3,3,3,7,7,7,7]
4548 ; AVX2-FAST-NEXT: vmovdqa %ymm6, %ymm8
4549 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
4550 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
4551 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u>
4552 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4553 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [6,7,3,3,7,7,6,7]
4554 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm1, %ymm1
4555 ; AVX2-FAST-NEXT: vmovdqa %ymm3, %ymm6
4556 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4557 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm4[3,3,3,3,7,7,7,7]
4558 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4559 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,u,u,28,29,26,27,u,u,30,31,30,31]
4560 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0,1],ymm2[2],ymm3[3,4],ymm2[5],ymm3[6,7,8,9],ymm2[10],ymm3[11,12],ymm2[13],ymm3[14,15]
4561 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
4562 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = <255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0>
4563 ; AVX2-FAST-NEXT: vpblendvb %ymm3, %ymm2, %ymm1, %ymm1
4564 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0]
4565 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4566 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4567 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [4,5,2,2,6,6,6,6]
4568 ; AVX2-FAST-NEXT: vpermd %ymm6, %ymm0, %ymm0
4569 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm10[u,u,u,u,u,u,u,u,u,u,8,9,u,u,u,u,u,u,u,u,22,23,u,u,u,u,24,25,u,u,u,u]
4570 ; AVX2-FAST-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4571 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm4[1,2,2,3,5,6,6,7]
4572 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
4573 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,2]
4574 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255>
4575 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm1, %ymm0, %ymm0
4576 ; AVX2-FAST-NEXT: vmovdqa %ymm7, %ymm4
4577 ; AVX2-FAST-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4578 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
4579 ; AVX2-FAST-NEXT: vmovdqa %ymm8, %ymm5
4580 ; AVX2-FAST-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4581 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm8[2,2,2,2,6,6,6,6]
4582 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7,8,9],ymm2[10],ymm1[11,12],ymm2[13],ymm1[14,15]
4583 ; AVX2-FAST-NEXT: vmovdqa %ymm11, %ymm6
4584 ; AVX2-FAST-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4585 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm11[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
4586 ; AVX2-FAST-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4587 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm9[2,2,2,2,6,6,6,6]
4588 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5,6,7,8],ymm2[9],ymm3[10,11],ymm2[12],ymm3[13,14,15]
4589 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
4590 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
4591 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = <255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u>
4592 ; AVX2-FAST-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
4593 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0]
4594 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm1, %ymm0, %ymm0
4595 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4596 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %ymm15
4597 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %ymm12
4598 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm8 = [18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
4599 ; AVX2-FAST-NEXT: # ymm8 = mem[0,1,0,1]
4600 ; AVX2-FAST-NEXT: vpshufb %ymm8, %ymm12, %ymm1
4601 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm15[1,1,1,1,5,5,5,5]
4602 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7,8,9],ymm2[10],ymm1[11,12],ymm2[13],ymm1[14,15]
4603 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %ymm14
4604 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %ymm11
4605 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm11[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
4606 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm7 = ymm14[0,1,1,3,4,5,5,7]
4607 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm7[0,1],ymm2[2],ymm7[3,4],ymm2[5],ymm7[6,7,8,9],ymm2[10],ymm7[11,12],ymm2[13],ymm7[14,15]
4608 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
4609 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,3,2]
4610 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0>
4611 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm7
4612 ; AVX2-FAST-NEXT: vpshufb %ymm8, %ymm6, %ymm1
4613 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm9[1,1,1,1,5,5,5,5]
4614 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7,8,9],ymm2[10],ymm1[11,12],ymm2[13],ymm1[14,15]
4615 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm4[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
4616 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm8 = ymm5[0,1,1,3,4,5,5,7]
4617 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm8[0,1],ymm2[2],ymm8[3,4],ymm2[5],ymm8[6,7,8,9],ymm2[10],ymm8[11,12],ymm2[13],ymm8[14,15]
4618 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
4619 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,3,2]
4620 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm8
4621 ; AVX2-FAST-NEXT: vmovdqa 32(%r8), %ymm9
4622 ; AVX2-FAST-NEXT: vmovdqa 32(%r9), %ymm6
4623 ; AVX2-FAST-NEXT: vpbroadcastd {{.*#+}} ymm3 = [18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
4624 ; AVX2-FAST-NEXT: vpshufb %ymm3, %ymm6, %ymm1
4625 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm13 = ymm9[0,0,2,1,4,4,6,5]
4626 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1,2],ymm13[3],ymm1[4,5],ymm13[6],ymm1[7,8,9,10],ymm13[11],ymm1[12,13],ymm13[14],ymm1[15]
4627 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm13 = ymm1[2,2,3,3]
4628 ; AVX2-FAST-NEXT: vmovdqa 32(%rax), %ymm5
4629 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [0,1,4,5,4,5,5,7]
4630 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm1, %ymm0
4631 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u>
4632 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm13, %ymm0, %ymm0
4633 ; AVX2-FAST-NEXT: vpshufb %ymm3, %ymm10, %ymm3
4634 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
4635 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm13 = ymm4[0,0,2,1,4,4,6,5]
4636 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0,1,2],ymm13[3],ymm3[4,5],ymm13[6],ymm3[7,8,9,10],ymm13[11],ymm3[12,13],ymm13[14],ymm3[15]
4637 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
4638 ; AVX2-FAST-NEXT: vpermd %ymm10, %ymm1, %ymm1
4639 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,3,3]
4640 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm3, %ymm1, %ymm1
4641 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255]
4642 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm7, %ymm0, %ymm0
4643 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4644 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm8, %ymm1, %ymm0
4645 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4646 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <3,u,u,u,4,u,u,4>
4647 ; AVX2-FAST-NEXT: vpermd %ymm15, %ymm0, %ymm1
4648 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [0,1,0,1,14,15,14,15,8,9,10,11,12,13,14,15,16,17,16,17,30,31,30,31,24,25,26,27,28,29,30,31]
4649 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm12, %ymm3
4650 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = <u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255>
4651 ; AVX2-FAST-NEXT: vpblendvb %ymm8, %ymm1, %ymm3, %ymm7
4652 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
4653 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4654 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm1, %ymm1
4655 ; AVX2-FAST-NEXT: vpblendvb %ymm8, %ymm0, %ymm1, %ymm0
4656 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <u,3,u,u,u,4,u,u>
4657 ; AVX2-FAST-NEXT: vpermd %ymm14, %ymm1, %ymm2
4658 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,0,1,0,1,0,1,14,15,14,15,14,15,14,15,16,17,16,17,16,17,16,17,30,31,30,31,30,31,30,31]
4659 ; AVX2-FAST-NEXT: vpshufb %ymm3, %ymm11, %ymm8
4660 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = <u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u>
4661 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm2, %ymm8, %ymm2
4662 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
4663 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
4664 ; AVX2-FAST-NEXT: vpshufb %ymm3, %ymm8, %ymm3
4665 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm1, %ymm3, %ymm1
4666 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = <u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255>
4667 ; AVX2-FAST-NEXT: vpblendvb %ymm3, %ymm7, %ymm2, %ymm2
4668 ; AVX2-FAST-NEXT: vpblendvb %ymm3, %ymm0, %ymm1, %ymm0
4669 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <u,u,3,u,u,u,4,u>
4670 ; AVX2-FAST-NEXT: vpermd %ymm9, %ymm1, %ymm3
4671 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = [0,1,2,3,4,5,6,7,0,1,0,1,14,15,14,15,16,17,18,19,20,21,22,23,16,17,16,17,30,31,30,31]
4672 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm6, %ymm8
4673 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = <u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u>
4674 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm3, %ymm8, %ymm3
4675 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm1, %ymm1
4676 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
4677 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm4, %ymm7
4678 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm1, %ymm7, %ymm1
4679 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = <3,u,u,3,u,u,u,4>
4680 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm7, %ymm8
4681 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = <0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u>
4682 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm3, %ymm8, %ymm3
4683 ; AVX2-FAST-NEXT: vpermd %ymm10, %ymm7, %ymm7
4684 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm1, %ymm7, %ymm1
4685 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = [0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255]
4686 ; AVX2-FAST-NEXT: vpblendvb %ymm7, %ymm2, %ymm3, %ymm2
4687 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4688 ; AVX2-FAST-NEXT: vpblendvb %ymm7, %ymm0, %ymm1, %ymm0
4689 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4690 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
4691 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm1 = ymm14[2,2,2,2,6,6,6,6]
4692 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
4693 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm12[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
4694 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm15[2,2,2,2,6,6,6,6]
4695 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
4696 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
4697 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
4698 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u>
4699 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4700 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [5,6,2,3,6,7,5,6]
4701 ; AVX2-FAST-NEXT: vpermd %ymm9, %ymm1, %ymm1
4702 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0>
4703 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4704 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25,22,23,22,23,u,u,u,u,u,u,u,u]
4705 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
4706 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255>
4707 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4708 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [4,5,2,2,6,6,6,6]
4709 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm1, %ymm1
4710 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255]
4711 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4712 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4713 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29]
4714 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm1 = ymm15[3,3,3,3,7,7,7,7]
4715 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
4716 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
4717 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm11[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
4718 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm14[3,3,3,3,7,7,7,7]
4719 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
4720 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
4721 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u>
4722 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4723 ; AVX2-FAST-NEXT: vpbroadcastd 60(%r8), %ymm1
4724 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u>
4725 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4726 ; AVX2-FAST-NEXT: vpshufhw {{.*#+}} ymm1 = ymm6[0,1,2,3,5,5,7,6,8,9,10,11,13,13,15,14]
4727 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[3,3,3,3]
4728 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u>
4729 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4730 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [6,7,3,3,7,7,6,7]
4731 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm1, %ymm1
4732 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0]
4733 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4734 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4735 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %xmm2
4736 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %xmm3
4737 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4738 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %xmm1
4739 ; AVX2-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4740 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %xmm0
4741 ; AVX2-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4742 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm5 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
4743 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
4744 ; AVX2-FAST-NEXT: vpshufb %xmm5, %xmm0, %xmm0
4745 ; AVX2-FAST-NEXT: vmovdqa %xmm5, %xmm4
4746 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
4747 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4748 ; AVX2-FAST-NEXT: vmovdqa %xmm2, %xmm7
4749 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
4750 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm1, %xmm1
4751 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
4752 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = <255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255>
4753 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm0, %ymm1, %ymm12
4754 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm13
4755 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm9
4756 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm9[0],xmm13[0],xmm9[1],xmm13[1],xmm9[2],xmm13[2],xmm9[3],xmm13[3]
4757 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm0, %xmm1
4758 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm15
4759 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm8
4760 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm8[0],xmm15[0],xmm8[1],xmm15[1],xmm8[2],xmm15[2],xmm8[3],xmm15[3]
4761 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm2, %xmm2
4762 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,1]
4763 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
4764 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm2, %ymm1, %ymm10
4765 ; AVX2-FAST-NEXT: vmovdqa 32(%r9), %xmm0
4766 ; AVX2-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4767 ; AVX2-FAST-NEXT: vmovdqa 32(%r8), %xmm1
4768 ; AVX2-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4769 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4770 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
4771 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm5, %xmm1
4772 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
4773 ; AVX2-FAST-NEXT: vpbroadcastd 32(%rax), %ymm2
4774 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = <u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u>
4775 ; AVX2-FAST-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm0
4776 ; AVX2-FAST-NEXT: vmovdqa (%r9), %xmm1
4777 ; AVX2-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4778 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm14
4779 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm1[0],xmm14[1],xmm1[1],xmm14[2],xmm1[2],xmm14[3],xmm1[3]
4780 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm6, %xmm4
4781 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
4782 ; AVX2-FAST-NEXT: vpbroadcastd (%rax), %ymm11
4783 ; AVX2-FAST-NEXT: vpblendvb %ymm3, %ymm4, %ymm11, %ymm3
4784 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255]
4785 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm12, %ymm0, %ymm0
4786 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4787 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm10, %ymm3, %ymm0
4788 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4789 ; AVX2-FAST-NEXT: vpbroadcastq {{.*#+}} xmm0 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
4790 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm7, %xmm3
4791 ; AVX2-FAST-NEXT: vmovdqa %xmm7, %xmm10
4792 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
4793 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm4 = xmm7[1,1,2,2]
4794 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} xmm3 = xmm4[0],xmm3[1],xmm4[2,3],xmm3[4],xmm4[5,6],xmm3[7]
4795 ; AVX2-FAST-NEXT: vpbroadcastd {{.*#+}} xmm4 = [6,7,8,9,6,7,8,9,6,7,8,9,6,7,8,9]
4796 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
4797 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm2, %xmm11
4798 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4799 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm12 = xmm1[1,1,2,3]
4800 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} xmm11 = xmm12[0,1],xmm11[2],xmm12[3,4],xmm11[5],xmm12[6,7]
4801 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,1,1]
4802 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,2,1]
4803 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255>
4804 ; AVX2-FAST-NEXT: vpblendvb %ymm12, %ymm3, %ymm11, %ymm3
4805 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm13, %xmm0
4806 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm11 = xmm9[1,1,2,2]
4807 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} xmm0 = xmm11[0],xmm0[1],xmm11[2,3],xmm0[4],xmm11[5,6],xmm0[7]
4808 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm15, %xmm4
4809 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm11 = xmm8[1,1,2,3]
4810 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} xmm4 = xmm11[0,1],xmm4[2],xmm11[3,4],xmm4[5],xmm11[6,7]
4811 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
4812 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
4813 ; AVX2-FAST-NEXT: vpblendvb %ymm12, %ymm0, %ymm4, %ymm0
4814 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
4815 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm5, %xmm5
4816 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,1,3]
4817 ; AVX2-FAST-NEXT: vpbroadcastd 36(%rax), %ymm11
4818 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u>
4819 ; AVX2-FAST-NEXT: vpblendvb %ymm12, %ymm5, %ymm11, %ymm5
4820 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm6, %xmm4
4821 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,1,3]
4822 ; AVX2-FAST-NEXT: vpbroadcastd 4(%rax), %ymm6
4823 ; AVX2-FAST-NEXT: vpblendvb %ymm12, %ymm4, %ymm6, %ymm4
4824 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = [255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255]
4825 ; AVX2-FAST-NEXT: vpblendvb %ymm11, %ymm3, %ymm5, %ymm6
4826 ; AVX2-FAST-NEXT: vpblendvb %ymm11, %ymm0, %ymm4, %ymm5
4827 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
4828 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm7[4],xmm10[4],xmm7[5],xmm10[5],xmm7[6],xmm10[6],xmm7[7],xmm10[7]
4829 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm4 = [4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
4830 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm0, %xmm0
4831 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,3]
4832 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,2,3,3,4,5,6,7]
4833 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
4834 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = <u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u>
4835 ; AVX2-FAST-NEXT: vpblendvb %ymm11, %ymm0, %ymm3, %ymm0
4836 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm15[4],xmm8[4],xmm15[5],xmm8[5],xmm15[6],xmm8[6],xmm15[7],xmm8[7]
4837 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm3, %xmm3
4838 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm9[4],xmm13[4],xmm9[5],xmm13[5],xmm9[6],xmm13[6],xmm9[7],xmm13[7]
4839 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,1,3]
4840 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,2,3,3,4,5,6,7]
4841 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
4842 ; AVX2-FAST-NEXT: vpblendvb %ymm11, %ymm3, %ymm4, %ymm3
4843 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4844 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm4 # 16-byte Folded Reload
4845 ; AVX2-FAST-NEXT: # xmm4 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
4846 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm9 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
4847 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm4, %xmm4
4848 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,1,1]
4849 ; AVX2-FAST-NEXT: vpbroadcastd 40(%rax), %ymm7
4850 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = <255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255>
4851 ; AVX2-FAST-NEXT: vpblendvb %ymm8, %ymm4, %ymm7, %ymm4
4852 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm1 # 16-byte Folded Reload
4853 ; AVX2-FAST-NEXT: # xmm1 = xmm14[4],mem[4],xmm14[5],mem[5],xmm14[6],mem[6],xmm14[7],mem[7]
4854 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm1, %xmm1
4855 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,1]
4856 ; AVX2-FAST-NEXT: vpbroadcastd 8(%rax), %ymm2
4857 ; AVX2-FAST-NEXT: vpblendvb %ymm8, %ymm1, %ymm2, %ymm1
4858 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255]
4859 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm4, %ymm0, %ymm0
4860 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm1, %ymm3, %ymm1
4861 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
4862 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
4863 ; AVX2-FAST-NEXT: vmovaps %ymm2, 96(%rax)
4864 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
4865 ; AVX2-FAST-NEXT: vmovaps %ymm2, 320(%rax)
4866 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
4867 ; AVX2-FAST-NEXT: vmovaps %ymm2, 128(%rax)
4868 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
4869 ; AVX2-FAST-NEXT: vmovaps %ymm2, 352(%rax)
4870 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
4871 ; AVX2-FAST-NEXT: vmovaps %ymm2, 160(%rax)
4872 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
4873 ; AVX2-FAST-NEXT: vmovaps %ymm2, 192(%rax)
4874 ; AVX2-FAST-NEXT: vmovdqa %ymm1, 64(%rax)
4875 ; AVX2-FAST-NEXT: vmovdqa %ymm5, 32(%rax)
4876 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4877 ; AVX2-FAST-NEXT: vmovaps %ymm1, (%rax)
4878 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4879 ; AVX2-FAST-NEXT: vmovaps %ymm1, 224(%rax)
4880 ; AVX2-FAST-NEXT: vmovdqa %ymm0, 288(%rax)
4881 ; AVX2-FAST-NEXT: vmovdqa %ymm6, 256(%rax)
4882 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4883 ; AVX2-FAST-NEXT: vmovaps %ymm0, 416(%rax)
4884 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4885 ; AVX2-FAST-NEXT: vmovaps %ymm0, 384(%rax)
4886 ; AVX2-FAST-NEXT: addq $312, %rsp # imm = 0x138
4887 ; AVX2-FAST-NEXT: vzeroupper
4888 ; AVX2-FAST-NEXT: retq
4890 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride7_vf32:
4891 ; AVX2-FAST-PERLANE: # %bb.0:
4892 ; AVX2-FAST-PERLANE-NEXT: subq $616, %rsp # imm = 0x268
4893 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %ymm11
4894 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4895 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %ymm8
4896 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %ymm14
4897 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4898 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %ymm9
4899 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %ymm13
4900 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4901 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %ymm6
4902 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %ymm15
4903 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4904 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %ymm10
4905 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r8), %ymm7
4906 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm0 = <3,u,u,u,4,u,u,4>
4907 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm8, %ymm0, %ymm1
4908 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = [0,1,0,1,14,15,14,15,8,9,10,11,12,13,14,15,16,17,16,17,30,31,30,31,24,25,26,27,28,29,30,31]
4909 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm2, %ymm9, %ymm3
4910 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255>
4911 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm1, %ymm3, %ymm1
4912 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm3 = <u,3,u,u,u,4,u,u>
4913 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm6, %ymm3, %ymm5
4914 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm6, %ymm12
4915 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = [0,1,0,1,0,1,0,1,14,15,14,15,14,15,14,15,16,17,16,17,16,17,16,17,30,31,30,31,30,31,30,31]
4916 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm11, %ymm0, %ymm0
4917 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm2, %ymm14, %ymm2
4918 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm0, %ymm2, %ymm0
4919 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm6, %ymm10, %ymm2
4920 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u>
4921 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm5, %ymm2, %ymm2
4922 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm5 = <u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255>
4923 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm1, %ymm2, %ymm1
4924 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r9), %ymm11
4925 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm13, %ymm3, %ymm2
4926 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm6, %ymm15, %ymm3
4927 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm2
4928 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm3 = <u,u,3,u,u,u,4,u>
4929 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm7, %ymm3, %ymm4
4930 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4931 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm0, %ymm2, %ymm0
4932 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = [0,1,2,3,4,5,6,7,0,1,0,1,14,15,14,15,16,17,18,19,20,21,22,23,16,17,16,17,30,31,30,31]
4933 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm2, %ymm11, %ymm5
4934 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = <u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u>
4935 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
4936 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %ymm5
4937 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4938 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm5, %ymm3, %ymm3
4939 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %ymm5
4940 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4941 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm2, %ymm5, %ymm2
4942 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm3, %ymm2, %ymm2
4943 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
4944 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rax), %ymm6
4945 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm3 = <3,u,u,3,u,u,u,4>
4946 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm6, %ymm3, %ymm5
4947 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm6, %ymm13
4948 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = <0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u>
4949 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
4950 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rax), %ymm5
4951 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4952 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm5, %ymm3, %ymm3
4953 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm2, %ymm3, %ymm2
4954 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm3 = [0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255]
4955 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm3, %ymm1, %ymm4, %ymm1
4956 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4957 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm3, %ymm0, %ymm2, %ymm0
4958 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4959 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4960 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm0 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
4961 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4962 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm12[2,2,2,2,6,6,6,6]
4963 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
4964 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4965 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm9[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
4966 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4967 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm8[2,2,2,2,6,6,6,6]
4968 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
4969 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
4970 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
4971 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u>
4972 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4973 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm7[1,2,2,3,5,6,6,7]
4974 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,2]
4975 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0>
4976 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4977 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4978 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25,22,23,22,23,u,u,u,u,u,u,u,u]
4979 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
4980 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255>
4981 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4982 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4983 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm13[0,1,2,2,4,5,6,6]
4984 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
4985 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255]
4986 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4987 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4988 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm0 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29]
4989 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm8[3,3,3,3,7,7,7,7]
4990 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
4991 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm10[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
4992 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm12[3,3,3,3,7,7,7,7]
4993 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
4994 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
4995 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
4996 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u>
4997 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
4998 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 60(%r8), %ymm1
4999 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u>
5000 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
5001 ; AVX2-FAST-PERLANE-NEXT: vpshufhw {{.*#+}} ymm1 = ymm11[0,1,2,3,5,5,7,6,8,9,10,11,13,13,15,14]
5002 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[3,3,3,3]
5003 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u>
5004 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
5005 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm13[2,3,3,3,6,7,7,7]
5006 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,2]
5007 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0]
5008 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
5009 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5010 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %xmm1
5011 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %xmm0
5012 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5013 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
5014 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
5015 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, %xmm6
5016 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm0, %xmm0
5017 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm3, %xmm4
5018 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
5019 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %xmm12
5020 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %xmm11
5021 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
5022 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
5023 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm1, %xmm1
5024 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
5025 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm3 = <255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255>
5026 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm3, %ymm0, %ymm1, %ymm9
5027 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm14
5028 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm13
5029 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3]
5030 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm0
5031 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm8
5032 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm7
5033 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
5034 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm4, %xmm1, %xmm1
5035 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,1]
5036 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,1,3]
5037 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm3, %ymm1, %ymm0, %ymm2
5038 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r9), %xmm0
5039 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5040 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r8), %xmm1
5041 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5042 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
5043 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
5044 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm5, %xmm0
5045 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
5046 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 32(%rax), %ymm10
5047 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u>
5048 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm0, %ymm10, %ymm0
5049 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %xmm1
5050 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5051 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm10
5052 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm10[0],xmm1[0],xmm10[1],xmm1[1],xmm10[2],xmm1[2],xmm10[3],xmm1[3]
5053 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm1, %xmm3
5054 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
5055 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd (%rax), %ymm15
5056 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm3, %ymm15, %ymm3
5057 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm4 = [255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255]
5058 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm9, %ymm0, %ymm0
5059 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
5060 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm0
5061 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5062 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq {{.*#+}} xmm0 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
5063 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm12, %xmm2
5064 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm3 = xmm11[1,1,2,2]
5065 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0],xmm2[1],xmm3[2,3],xmm2[4],xmm3[5,6],xmm2[7]
5066 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd {{.*#+}} xmm3 = [6,7,8,9,6,7,8,9,6,7,8,9,6,7,8,9]
5067 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm6, %xmm4
5068 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm6, %xmm9
5069 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
5070 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm15 = xmm6[1,1,2,3]
5071 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} xmm4 = xmm15[0,1],xmm4[2],xmm15[3,4],xmm4[5],xmm15[6,7]
5072 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,1]
5073 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
5074 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = <255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255>
5075 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm2, %ymm4, %ymm2
5076 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm14, %xmm0
5077 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm4 = xmm13[1,1,2,2]
5078 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} xmm0 = xmm4[0],xmm0[1],xmm4[2,3],xmm0[4],xmm4[5,6],xmm0[7]
5079 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm8, %xmm3
5080 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm4 = xmm7[1,1,2,3]
5081 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} xmm3 = xmm4[0,1],xmm3[2],xmm4[3,4],xmm3[5],xmm4[6,7]
5082 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
5083 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
5084 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm0, %ymm3, %ymm0
5085 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
5086 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm5, %xmm4
5087 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,1,3]
5088 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 36(%rax), %ymm5
5089 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = <u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u>
5090 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm4, %ymm5, %ymm4
5091 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm1, %xmm1
5092 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,3]
5093 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 4(%rax), %ymm3
5094 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm1, %ymm3, %ymm1
5095 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm3 = [255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255]
5096 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm3, %ymm2, %ymm4, %ymm15
5097 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm3, %ymm0, %ymm1, %ymm0
5098 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5099 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm9[4],xmm6[4],xmm9[5],xmm6[5],xmm9[6],xmm6[6],xmm9[7],xmm6[7]
5100 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm11[4],xmm12[4],xmm11[5],xmm12[5],xmm11[6],xmm12[6],xmm11[7],xmm12[7]
5101 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm2 = [4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
5102 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm0
5103 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,3]
5104 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,2,3,3,4,5,6,7]
5105 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
5106 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm3 = <u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u>
5107 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm3, %ymm0, %ymm1, %ymm0
5108 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
5109 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm1, %xmm1
5110 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm13[4],xmm14[4],xmm13[5],xmm14[5],xmm13[6],xmm14[6],xmm13[7],xmm14[7]
5111 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,3]
5112 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,2,3,3,4,5,6,7]
5113 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
5114 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
5115 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
5116 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
5117 ; AVX2-FAST-PERLANE-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
5118 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm5 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
5119 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm2, %xmm2
5120 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,1]
5121 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 40(%rax), %ymm3
5122 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm4 = <255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255>
5123 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm2
5124 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm10, %xmm3 # 16-byte Folded Reload
5125 ; AVX2-FAST-PERLANE-NEXT: # xmm3 = xmm10[4],mem[4],xmm10[5],mem[5],xmm10[6],mem[6],xmm10[7],mem[7]
5126 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm3, %xmm3
5127 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,1,1]
5128 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 8(%rax), %ymm5
5129 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm3, %ymm5, %ymm3
5130 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm4 = [255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255]
5131 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm2, %ymm0, %ymm0
5132 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm3, %ymm1, %ymm1
5133 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
5134 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm2 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29]
5135 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
5136 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm8[3,3,3,3,7,7,7,7]
5137 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5],ymm2[6],ymm3[7,8,9,10],ymm2[11],ymm3[12,13],ymm2[14],ymm3[15]
5138 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
5139 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm3 = ymm11[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
5140 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
5141 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm4 = ymm9[3,3,3,3,7,7,7,7]
5142 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2,3],ymm3[4],ymm4[5,6,7,8],ymm3[9],ymm4[10,11],ymm3[12],ymm4[13,14,15]
5143 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,3,3]
5144 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
5145 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u>
5146 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm2
5147 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
5148 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm3 = ymm12[3,3,3,3,7,7,7,7]
5149 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
5150 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm4 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,u,u,28,29,26,27,u,u,30,31,30,31]
5151 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0,1],ymm3[2],ymm4[3,4],ymm3[5],ymm4[6,7,8,9],ymm3[10],ymm4[11,12],ymm3[13],ymm4[14,15]
5152 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
5153 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
5154 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm4 = ymm14[2,3,3,3,6,7,7,7]
5155 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,3,2]
5156 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm5 = <255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0>
5157 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm3, %ymm4, %ymm3
5158 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm4 = [0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0]
5159 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm2
5160 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm3 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
5161 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm4 = ymm9[2,2,2,2,6,6,6,6]
5162 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7,8,9],ymm4[10],ymm3[11,12],ymm4[13],ymm3[14,15]
5163 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm4 = ymm10[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
5164 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm5 = ymm8[2,2,2,2,6,6,6,6]
5165 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm4 = ymm5[0],ymm4[1],ymm5[2,3],ymm4[4],ymm5[5,6,7,8],ymm4[9],ymm5[10,11],ymm4[12],ymm5[13,14,15]
5166 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
5167 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
5168 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm5 = <255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u>
5169 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm3, %ymm4, %ymm3
5170 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm4 = ymm13[u,u,u,u,u,u,u,u,u,u,8,9,u,u,u,u,u,u,u,u,22,23,u,u,u,u,24,25,u,u,u,u]
5171 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm5 = ymm12[1,2,2,3,5,6,6,7]
5172 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm4 = ymm5[0,1],ymm4[2],ymm5[3,4],ymm4[5],ymm5[6,7,8,9],ymm4[10],ymm5[11,12],ymm4[13],ymm5[14,15]
5173 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,3,2]
5174 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm5 = ymm14[0,1,2,2,4,5,6,6]
5175 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,1,3,3]
5176 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = <u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255>
5177 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
5178 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm5 = [255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0]
5179 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm3, %ymm4, %ymm3
5180 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
5181 ; AVX2-FAST-PERLANE-NEXT: # ymm5 = mem[0,1,0,1]
5182 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
5183 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm5, %ymm4, %ymm4
5184 ; AVX2-FAST-PERLANE-NEXT: vpshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Folded Reload
5185 ; AVX2-FAST-PERLANE-NEXT: # ymm6 = mem[1,1,1,1,5,5,5,5]
5186 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0,1],ymm6[2],ymm4[3,4],ymm6[5],ymm4[6,7,8,9],ymm6[10],ymm4[11,12],ymm6[13],ymm4[14,15]
5187 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
5188 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm6[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
5189 ; AVX2-FAST-PERLANE-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Folded Reload
5190 ; AVX2-FAST-PERLANE-NEXT: # ymm7 = mem[0,1,1,3,4,5,5,7]
5191 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm6 = ymm7[0,1],ymm6[2],ymm7[3,4],ymm6[5],ymm7[6,7,8,9],ymm6[10],ymm7[11,12],ymm6[13],ymm7[14,15]
5192 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
5193 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,2]
5194 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm7 = <255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0>
5195 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm4, %ymm6, %ymm4
5196 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm5, %ymm10, %ymm5
5197 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm6 = ymm8[1,1,1,1,5,5,5,5]
5198 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm5 = ymm5[0,1],ymm6[2],ymm5[3,4],ymm6[5],ymm5[6,7,8,9],ymm6[10],ymm5[11,12],ymm6[13],ymm5[14,15]
5199 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm11[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
5200 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm8 = ymm9[0,1,1,3,4,5,5,7]
5201 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm6 = ymm8[0,1],ymm6[2],ymm8[3,4],ymm6[5],ymm8[6,7,8,9],ymm6[10],ymm8[11,12],ymm6[13],ymm8[14,15]
5202 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
5203 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,2]
5204 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm5, %ymm6, %ymm5
5205 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd {{.*#+}} ymm6 = [18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
5206 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
5207 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm6, %ymm7, %ymm7
5208 ; AVX2-FAST-PERLANE-NEXT: vpshufd $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Folded Reload
5209 ; AVX2-FAST-PERLANE-NEXT: # ymm8 = mem[0,0,2,1,4,4,6,5]
5210 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm7 = ymm7[0,1,2],ymm8[3],ymm7[4,5],ymm8[6],ymm7[7,8,9,10],ymm8[11],ymm7[12,13],ymm8[14],ymm7[15]
5211 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,2,3,3]
5212 ; AVX2-FAST-PERLANE-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Folded Reload
5213 ; AVX2-FAST-PERLANE-NEXT: # ymm8 = mem[0,1,1,3,4,5,5,7]
5214 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,2,3]
5215 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm9 = <u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u>
5216 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm9, %ymm7, %ymm8, %ymm7
5217 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm6, %ymm13, %ymm6
5218 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm8 = ymm12[0,0,2,1,4,4,6,5]
5219 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm6 = ymm6[0,1,2],ymm8[3],ymm6[4,5],ymm8[6],ymm6[7,8,9,10],ymm8[11],ymm6[12,13],ymm8[14],ymm6[15]
5220 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,3,3]
5221 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm8 = ymm14[0,1,1,3,4,5,5,7]
5222 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,2,3]
5223 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm9, %ymm6, %ymm8, %ymm6
5224 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm8 = [255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255]
5225 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm8, %ymm4, %ymm7, %ymm4
5226 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm8, %ymm5, %ymm6, %ymm5
5227 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
5228 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
5229 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm6, 96(%rax)
5230 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
5231 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm6, 320(%rax)
5232 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm5, 128(%rax)
5233 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm4, 352(%rax)
5234 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm3, 160(%rax)
5235 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm2, 192(%rax)
5236 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm1, 64(%rax)
5237 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5238 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, 32(%rax)
5239 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5240 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, (%rax)
5241 ; AVX2-FAST-PERLANE-NEXT: vmovups (%rsp), %ymm1 # 32-byte Reload
5242 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, 224(%rax)
5243 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, 288(%rax)
5244 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm15, 256(%rax)
5245 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5246 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 416(%rax)
5247 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5248 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 384(%rax)
5249 ; AVX2-FAST-PERLANE-NEXT: addq $616, %rsp # imm = 0x268
5250 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
5251 ; AVX2-FAST-PERLANE-NEXT: retq
5253 ; AVX512F-ONLY-SLOW-LABEL: store_i16_stride7_vf32:
5254 ; AVX512F-ONLY-SLOW: # %bb.0:
5255 ; AVX512F-ONLY-SLOW-NEXT: subq $648, %rsp # imm = 0x288
5256 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rcx), %ymm1
5257 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm1, (%rsp) # 32-byte Spill
5258 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
5259 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm0, %ymm1, %ymm2
5260 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %ymm4
5261 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm1 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
5262 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm1, %ymm4, %ymm3
5263 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm4, %ymm30
5264 ; AVX512F-ONLY-SLOW-NEXT: vpor %ymm2, %ymm3, %ymm2
5265 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5266 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rsi), %ymm2
5267 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5268 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
5269 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm4, %ymm2, %ymm2
5270 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %ymm5
5271 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
5272 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm9, %ymm5, %ymm3
5273 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm5, %ymm31
5274 ; AVX512F-ONLY-SLOW-NEXT: vpor %ymm2, %ymm3, %ymm2
5275 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5276 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r9), %ymm15
5277 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
5278 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm2, %ymm15, %ymm5
5279 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r8), %ymm6
5280 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
5281 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm3, %ymm6, %ymm7
5282 ; AVX512F-ONLY-SLOW-NEXT: vpor %ymm5, %ymm7, %ymm5
5283 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5284 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%r9), %xmm7
5285 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%r8), %xmm8
5286 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rsi), %ymm5
5287 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm4, %ymm5, %ymm4
5288 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdi), %ymm11
5289 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm9, %ymm11, %ymm9
5290 ; AVX512F-ONLY-SLOW-NEXT: vpor %ymm4, %ymm9, %ymm4
5291 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5292 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rcx), %ymm13
5293 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm0, %ymm13, %ymm0
5294 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdx), %ymm14
5295 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm1, %ymm14, %ymm1
5296 ; AVX512F-ONLY-SLOW-NEXT: vpor %ymm0, %ymm1, %ymm0
5297 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5298 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%r9), %ymm4
5299 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm2, %ymm4, %ymm1
5300 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%r8), %ymm0
5301 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm3, %ymm0, %ymm2
5302 ; AVX512F-ONLY-SLOW-NEXT: vpor %ymm1, %ymm2, %ymm1
5303 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5304 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm6[3,3,3,3,7,7,7,7]
5305 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm15[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
5306 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,3,6,6,6,7]
5307 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
5308 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
5309 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm9 = [4,5,4,5,4,5,6,7,16,17,16,17,16,17,17,19]
5310 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm1[0,1,3,2,4,5,6,7]
5311 ; AVX512F-ONLY-SLOW-NEXT: vpermi2d %zmm3, %zmm2, %zmm9
5312 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5313 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rcx), %xmm9
5314 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
5315 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} xmm10 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
5316 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,7,6]
5317 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm10, %xmm2, %xmm2
5318 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = <16,18,19,19,19,19,u,u,0,1,0,1,2,3,2,3>
5319 ; AVX512F-ONLY-SLOW-NEXT: vpermi2d %zmm1, %zmm2, %zmm3
5320 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5321 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm2
5322 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm7 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
5323 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm7, %xmm9, %xmm1
5324 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[1,1,2,2]
5325 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0],xmm1[1],xmm3[2,3],xmm1[4],xmm3[5,6],xmm1[7]
5326 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm2[0],xmm9[0],xmm2[1],xmm9[1],xmm2[2],xmm9[2],xmm2[3],xmm9[3]
5327 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,1,3,2,4,5,6,7]
5328 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm8 = <u,16,u,u,17,17,u,u,0,u,u,1,2,u,u,3>
5329 ; AVX512F-ONLY-SLOW-NEXT: vpermi2d %zmm3, %zmm1, %zmm8
5330 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5331 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r9), %xmm3
5332 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r8), %xmm12
5333 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm12[0],xmm3[0],xmm12[1],xmm3[1],xmm12[2],xmm3[2],xmm12[3],xmm3[3]
5334 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm8 = xmm1[0,1,2,3,4,5,7,6]
5335 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,3,2,4,5,6,7]
5336 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm26 = <0,1,0,1,0,1,1,3,16,18,19,19,19,19,u,u>
5337 ; AVX512F-ONLY-SLOW-NEXT: vpermi2d %zmm8, %zmm1, %zmm26
5338 ; AVX512F-ONLY-SLOW-NEXT: vprold $16, %ymm15, %ymm1
5339 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm6[1,2,2,3,5,6,6,7]
5340 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm8[0,1],ymm1[2],ymm8[3,4],ymm1[5],ymm8[6,7,8,9],ymm1[10],ymm8[11,12],ymm1[13],ymm8[14,15]
5341 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[0,0,2,1,4,4,6,5]
5342 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm8 = ymm15[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
5343 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm8[0,0,0,0,4,4,4,4]
5344 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm6 = ymm8[0,1,2],ymm6[3],ymm8[4,5],ymm6[6],ymm8[7,8,9,10],ymm6[11],ymm8[12,13],ymm6[14],ymm8[15]
5345 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm28 = [2,2,3,3,10,9,11,10]
5346 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm1, %zmm6, %zmm28
5347 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
5348 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm9[4],xmm2[5],xmm9[5],xmm2[6],xmm9[6],xmm2[7],xmm9[7]
5349 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm1, %xmm25
5350 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rax), %ymm8
5351 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5352 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd 8(%rax), %ymm1
5353 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
5354 ; AVX512F-ONLY-SLOW-NEXT: vpandn %ymm1, %ymm2, %ymm1
5355 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm6 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
5356 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm6, %ymm8, %ymm2
5357 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
5358 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5359 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm12 = xmm12[4],xmm3[4],xmm12[5],xmm3[5],xmm12[6],xmm3[6],xmm12[7],xmm3[7]
5360 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm5[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
5361 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,0,2,1,4,4,6,5]
5362 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm11[1,1,1,1,5,5,5,5]
5363 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm3[2],ymm1[3,4],ymm3[5],ymm1[6,7,8,9],ymm3[10],ymm1[11,12],ymm3[13],ymm1[14,15]
5364 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5365 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm13[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
5366 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,0,0,0,4,4,4,4]
5367 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm14[0,1,1,3,4,5,5,7]
5368 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm3[0,1],ymm1[2],ymm3[3,4],ymm1[5],ymm3[6,7,8,9],ymm1[10],ymm3[11,12],ymm1[13],ymm3[14,15]
5369 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5370 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rax), %ymm1
5371 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm1[0,1,1,3,4,5,5,7]
5372 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
5373 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
5374 ; AVX512F-ONLY-SLOW-NEXT: vpandn %ymm3, %ymm8, %ymm3
5375 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm6, %ymm1, %ymm6
5376 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm6, %zmm2
5377 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5378 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm4[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
5379 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,0,0,4,4,4,4]
5380 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm0[0,0,2,1,4,4,6,5]
5381 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0,1,2],ymm6[3],ymm3[4,5],ymm6[6],ymm3[7,8,9,10],ymm6[11],ymm3[12,13],ymm6[14],ymm3[15]
5382 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm2, %ymm27
5383 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm15 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
5384 ; AVX512F-ONLY-SLOW-NEXT: # ymm15 = mem[0,1,0,1]
5385 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm15, %ymm13, %ymm3
5386 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm14[2,2,2,2,6,6,6,6]
5387 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0,1],ymm6[2],ymm3[3,4],ymm6[5],ymm3[6,7,8,9],ymm6[10],ymm3[11,12],ymm6[13],ymm3[14,15]
5388 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm2, %ymm22
5389 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm14[3,3,3,3,7,7,7,7]
5390 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm6 = ymm13[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
5391 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[2,2,2,2,6,6,6,6]
5392 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm6[1],ymm3[2,3],ymm6[4],ymm3[5,6,7,8],ymm6[9],ymm3[10,11],ymm6[12],ymm3[13,14,15]
5393 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm2, %ymm17
5394 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm3 = ymm5[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
5395 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[2,2,2,2,6,6,6,6]
5396 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm11[2,2,2,2,6,6,6,6]
5397 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm6[0],ymm3[1],ymm6[2,3],ymm3[4],ymm6[5,6,7,8],ymm3[9],ymm6[10,11],ymm3[12],ymm6[13,14,15]
5398 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm2, %ymm16
5399 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm11[3,3,3,3,7,7,7,7]
5400 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm5 = ymm5[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
5401 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[2,2,2,2,6,6,6,6]
5402 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm13 = ymm3[0,1,2],ymm5[3],ymm3[4,5],ymm5[6],ymm3[7,8,9,10],ymm5[11],ymm3[12,13],ymm5[14],ymm3[15]
5403 ; AVX512F-ONLY-SLOW-NEXT: vprold $16, %ymm4, %ymm3
5404 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm0[1,2,2,3,5,6,6,7]
5405 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0,1],ymm3[2],ymm5[3,4],ymm3[5],ymm5[6,7,8,9],ymm3[10],ymm5[11,12],ymm3[13],ymm5[14,15]
5406 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm2, %ymm24
5407 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[3,3,3,3,7,7,7,7]
5408 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm3 = ymm4[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
5409 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[2,2,2,3,6,6,6,7]
5410 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7,8,9],ymm0[10],ymm3[11,12],ymm0[13],ymm3[14,15]
5411 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm0, %ymm23
5412 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [6,5,0,0,7,6,0,7,6,5,0,0,7,6,0,7]
5413 ; AVX512F-ONLY-SLOW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
5414 ; AVX512F-ONLY-SLOW-NEXT: vpermd %zmm1, %zmm0, %zmm29
5415 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu (%rsp), %ymm11 # 32-byte Reload
5416 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm11[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
5417 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
5418 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm30[3,3,3,3,7,7,7,7]
5419 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm9 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
5420 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
5421 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm14[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
5422 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
5423 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm31[3,3,3,3,7,7,7,7]
5424 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm8 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
5425 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdi), %xmm0
5426 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rsi), %xmm1
5427 ; AVX512F-ONLY-SLOW-NEXT: vprold $16, %xmm1, %xmm3
5428 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm0[1,1,2,3]
5429 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} xmm2 = xmm6[0,1],xmm3[2],xmm6[3,4],xmm3[5],xmm6[6,7]
5430 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm2, %ymm21
5431 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
5432 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
5433 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm0, %xmm20
5434 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rcx), %xmm1
5435 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm7, %xmm1, %xmm0
5436 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdx), %xmm6
5437 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm6[1,1,2,2]
5438 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} xmm0 = xmm7[0],xmm0[1],xmm7[2,3],xmm0[4],xmm7[5,6],xmm0[7]
5439 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm0, %ymm19
5440 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm1[0],xmm6[1],xmm1[1],xmm6[2],xmm1[2],xmm6[3],xmm1[3]
5441 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm6[4],xmm1[4],xmm6[5],xmm1[5],xmm6[6],xmm1[6],xmm6[7],xmm1[7]
5442 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm0, %xmm18
5443 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm10, %xmm12, %xmm2
5444 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm10, %xmm3, %xmm4
5445 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %xmm3
5446 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rsi), %xmm6
5447 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm3[0],xmm6[0],xmm3[1],xmm6[1],xmm3[2],xmm6[2],xmm3[3],xmm6[3]
5448 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm10, %xmm7, %xmm10
5449 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm6[4],xmm3[4],xmm6[5],xmm3[5],xmm6[6],xmm3[6],xmm6[7],xmm3[7]
5450 ; AVX512F-ONLY-SLOW-NEXT: vprold $16, %xmm6, %xmm6
5451 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[1,1,2,3]
5452 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} xmm7 = xmm3[0,1],xmm6[2],xmm3[3,4],xmm6[5],xmm3[6,7]
5453 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm11[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
5454 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,0,0,4,4,4,4]
5455 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm30[0,1,1,3,4,5,5,7]
5456 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm6 = ymm6[0,1],ymm3[2],ymm6[3,4],ymm3[5],ymm6[6,7,8,9],ymm3[10],ymm6[11,12],ymm3[13],ymm6[14,15]
5457 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm15, %ymm11, %ymm3
5458 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm30[2,2,2,2,6,6,6,6]
5459 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm15 = ymm3[0,1],ymm12[2],ymm3[3,4],ymm12[5],ymm3[6,7,8,9],ymm12[10],ymm3[11,12],ymm12[13],ymm3[14,15]
5460 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm14[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
5461 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
5462 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm31[1,1,1,1,5,5,5,5]
5463 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm12[2],ymm3[3,4],ymm12[5],ymm3[6,7,8,9],ymm12[10],ymm3[11,12],ymm12[13],ymm3[14,15]
5464 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm31[2,2,2,2,6,6,6,6]
5465 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm14 = ymm14[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
5466 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm14[2,2,2,2,6,6,6,6]
5467 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm12[0],ymm14[1],ymm12[2,3],ymm14[4],ymm12[5,6,7,8],ymm14[9],ymm12[10,11],ymm14[12],ymm12[13,14,15]
5468 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm25, %xmm1
5469 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm14 = xmm1[0,2,3,3,4,5,6,7]
5470 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,2,1]
5471 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[2,1,2,3,4,5,6,7]
5472 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5,5,4]
5473 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,0,1,3]
5474 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,1]
5475 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm30 = ymm22[2,2,2,3]
5476 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm31 = ymm17[0,2,2,3]
5477 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm16[0,2,2,3]
5478 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,1,3,3]
5479 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,2,3]
5480 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,1,3,2,4,5,6,7]
5481 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,0,1,1]
5482 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,1,3,3]
5483 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,1,1]
5484 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm31, %zmm30, %zmm30
5485 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm13, %zmm0, %zmm0
5486 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm13 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
5487 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm30, %zmm13, %zmm0
5488 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,1,3]
5489 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm9, %zmm5
5490 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm8, %zmm4
5491 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $226, %zmm5, %zmm13, %zmm4
5492 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm14, %zmm5 # 32-byte Folded Reload
5493 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm8 # 32-byte Folded Reload
5494 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm31 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
5495 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $226, %zmm5, %zmm31, %zmm8
5496 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm1 # 32-byte Folded Reload
5497 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm2[0,1,2,3],zmm1[4,5,6,7]
5498 ; AVX512F-ONLY-SLOW-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Folded Reload
5499 ; AVX512F-ONLY-SLOW-NEXT: # ymm2 = mem[2,2,2,3]
5500 ; AVX512F-ONLY-SLOW-NEXT: vpermq $182, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
5501 ; AVX512F-ONLY-SLOW-NEXT: # ymm5 = mem[2,1,3,2]
5502 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm13 = ymm27[2,2,3,3]
5503 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm24[2,1,3,2]
5504 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm16 = ymm23[2,2,2,3]
5505 ; AVX512F-ONLY-SLOW-NEXT: vpshufd $254, {{[-0-9]+}}(%r{{[sb]}}p), %ymm17 # 32-byte Folded Reload
5506 ; AVX512F-ONLY-SLOW-NEXT: # ymm17 = mem[2,3,3,3,6,7,7,7]
5507 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm22 = ymm21[0,0,2,1]
5508 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm20, %xmm9
5509 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm11 = xmm9[2,1,2,3,4,5,6,7]
5510 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm11 = xmm11[0,1,2,3,4,5,5,4]
5511 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,1,3]
5512 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm30 = ymm19[0,0,1,1]
5513 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm18, %xmm9
5514 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm9 = xmm9[0,2,3,3,4,5,6,7]
5515 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,2,1]
5516 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,1,1]
5517 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
5518 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,2]
5519 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
5520 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
5521 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,2,2,3]
5522 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
5523 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $236, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm1
5524 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm1
5525 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
5526 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm8, %zmm2
5527 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
5528 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm8, %zmm5
5529 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm5
5530 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm13, %zmm0, %zmm2
5531 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
5532 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 = zmm8[0,1,2,3],zmm2[4,5,6,7]
5533 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
5534 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $236, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm2
5535 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm2
5536 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm16, %zmm14, %zmm5
5537 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm5
5538 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm29
5539 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm17[2,1,3,2]
5540 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd 32(%rax), %ymm5
5541 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm0, %zmm0
5542 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
5543 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm0
5544 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm0
5545 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm11, %zmm22, %zmm4
5546 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm30, %zmm5
5547 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $226, %zmm4, %zmm31, %zmm5
5548 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd 36(%rax), %ymm4
5549 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd 40(%rax), %ymm8
5550 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm8, %zmm4, %zmm4
5551 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
5552 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm4
5553 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm4
5554 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm10, %zmm5
5555 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
5556 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm5
5557 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd (%rax), %ymm7
5558 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd 4(%rax), %ymm8
5559 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm8, %zmm7, %zmm7
5560 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm26, %zmm7
5561 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm7
5562 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm6, %zmm5
5563 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm12, %zmm3, %zmm3
5564 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm3
5565 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,5,4,0,0,6,5,0,0,5,4,0,0,6,5,0]
5566 ; AVX512F-ONLY-SLOW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
5567 ; AVX512F-ONLY-SLOW-NEXT: vpermd (%rax), %zmm5, %zmm5
5568 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm28, %zmm5
5569 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm5
5570 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
5571 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, 128(%rax)
5572 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, (%rax)
5573 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, 320(%rax)
5574 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, 256(%rax)
5575 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, 192(%rax)
5576 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, 64(%rax)
5577 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm29, 384(%rax)
5578 ; AVX512F-ONLY-SLOW-NEXT: addq $648, %rsp # imm = 0x288
5579 ; AVX512F-ONLY-SLOW-NEXT: vzeroupper
5580 ; AVX512F-ONLY-SLOW-NEXT: retq
5582 ; AVX512F-ONLY-FAST-LABEL: store_i16_stride7_vf32:
5583 ; AVX512F-ONLY-FAST: # %bb.0:
5584 ; AVX512F-ONLY-FAST-NEXT: subq $232, %rsp
5585 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rcx), %ymm1
5586 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
5587 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm0, %ymm1, %ymm2
5588 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %ymm1, %ymm9
5589 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5590 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %ymm4
5591 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
5592 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm1, %ymm4, %ymm3
5593 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %ymm4, %ymm11
5594 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5595 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm2, %ymm3, %ymm2
5596 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5597 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rsi), %ymm8
5598 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rsi), %ymm10
5599 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
5600 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm4, %ymm8, %ymm2
5601 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm8, (%rsp) # 32-byte Spill
5602 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdi), %ymm6
5603 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
5604 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm5, %ymm6, %ymm3
5605 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm6, %ymm18
5606 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5607 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm2, %ymm3, %ymm2
5608 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5609 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r9), %ymm13
5610 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
5611 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm2, %ymm13, %ymm6
5612 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r8), %ymm15
5613 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
5614 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm3, %ymm15, %ymm7
5615 ; AVX512F-ONLY-FAST-NEXT: vporq %ymm6, %ymm7, %ymm25
5616 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm4, %ymm10, %ymm4
5617 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdi), %ymm6
5618 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm5, %ymm6, %ymm5
5619 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm4, %ymm5, %ymm4
5620 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5621 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rcx), %ymm4
5622 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm0, %ymm4, %ymm0
5623 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm4, %ymm31
5624 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdx), %ymm12
5625 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm1, %ymm12, %ymm1
5626 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm0, %ymm1, %ymm0
5627 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5628 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r9), %ymm1
5629 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm2, %ymm1, %ymm0
5630 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %ymm1, %ymm14
5631 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r8), %ymm4
5632 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm3, %ymm4, %ymm1
5633 ; AVX512F-ONLY-FAST-NEXT: vporq %ymm0, %ymm1, %ymm21
5634 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29>
5635 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm0, %ymm10, %ymm1
5636 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm6[3,3,3,3,7,7,7,7]
5637 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5],ymm1[6],ymm2[7,8,9,10],ymm1[11],ymm2[12,13],ymm1[14],ymm2[15]
5638 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm10[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
5639 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm10, %ymm29
5640 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm6[2,2,2,2,6,6,6,6]
5641 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5,6,7,8],ymm2[9],ymm3[10,11],ymm2[12],ymm3[13,14,15]
5642 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm22 = <u,2,2,3,10,u,11,u>
5643 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm2, %zmm22
5644 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm9[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
5645 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm11[3,3,3,3,7,7,7,7]
5646 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
5647 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rcx), %xmm10
5648 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdx), %xmm11
5649 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
5650 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
5651 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm23 = <u,2,2,3,8,u,9,u>
5652 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm2, %zmm1, %zmm23
5653 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm0, %ymm8, %ymm0
5654 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm1 = ymm18[3,3,3,3,7,7,7,7]
5655 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
5656 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdi), %xmm0
5657 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm5 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
5658 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rsi), %xmm1
5659 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
5660 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm5, %xmm3, %xmm3
5661 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm5, %xmm8
5662 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm19 = [2,1,3,3,8,8,9,9]
5663 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm3, %zmm2, %zmm19
5664 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
5665 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm15[3,3,3,3,7,7,7,7]
5666 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7,8,9],ymm3[10],ymm2[11,12],ymm3[13],ymm2[14,15]
5667 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm20 = [2,2,2,3,8,8,8,9]
5668 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r9), %xmm3
5669 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5670 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r8), %xmm5
5671 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5672 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
5673 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
5674 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm3, %xmm7, %xmm5
5675 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm7, %xmm24
5676 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm5, %zmm2, %zmm20
5677 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
5678 ; AVX512F-ONLY-FAST-NEXT: vprold $16, %xmm1, %xmm1
5679 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
5680 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2],xmm0[3,4],xmm1[5],xmm0[6,7]
5681 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm9 = [4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
5682 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm9, %xmm2, %xmm1
5683 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm26 = <u,0,u,1,8,8,9,u>
5684 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm0, %zmm26
5685 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r9), %xmm2
5686 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r8), %xmm1
5687 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
5688 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm3, %xmm0, %xmm3
5689 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
5690 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm27 = [0,0,0,1,8,9,9,11]
5691 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm3, %zmm27
5692 ; AVX512F-ONLY-FAST-NEXT: vprold $16, %ymm13, %ymm0
5693 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm15[1,2,2,3,5,6,6,7]
5694 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7,8,9],ymm0[10],ymm3[11,12],ymm0[13],ymm3[14,15]
5695 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd {{.*#+}} ymm5 = [18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
5696 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm5, %ymm13, %ymm3
5697 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm7 = ymm15[0,0,2,1,4,4,6,5]
5698 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0,1,2],ymm7[3],ymm3[4,5],ymm7[6],ymm3[7,8,9,10],ymm7[11],ymm3[12,13],ymm7[14],ymm3[15]
5699 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm28 = [2,2,3,3,10,9,11,10]
5700 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm0, %zmm3, %zmm28
5701 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdi), %xmm15
5702 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rsi), %xmm0
5703 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm0[4],xmm15[4],xmm0[5],xmm15[5],xmm0[6],xmm15[6],xmm0[7],xmm15[7]
5704 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm9, %xmm3, %xmm9
5705 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
5706 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm25, %zmm0, %zmm2
5707 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm8, %xmm1, %xmm1
5708 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm8, %xmm18
5709 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm25 = <0,0,1,1,12,13,u,15>
5710 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm2, %zmm1, %zmm25
5711 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
5712 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd 8(%rax), %ymm1
5713 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
5714 ; AVX512F-ONLY-FAST-NEXT: vpandn %ymm1, %ymm2, %ymm1
5715 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rax), %ymm3
5716 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
5717 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm2, %ymm3, %ymm7
5718 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm3, %ymm16
5719 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm1, %zmm30
5720 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm29, %ymm1
5721 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
5722 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[1,1,1,1,5,5,5,5]
5723 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm8 = ymm1[0,1],ymm6[2],ymm1[3,4],ymm6[5],ymm1[6,7,8,9],ymm6[10],ymm1[11,12],ymm6[13],ymm1[14,15]
5724 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm31, %ymm13
5725 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm13[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
5726 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm6 = ymm12[0,1,1,3,4,5,5,7]
5727 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm7 = ymm6[0,1],ymm1[2],ymm6[3,4],ymm1[5],ymm6[6,7,8,9],ymm1[10],ymm6[11,12],ymm1[13],ymm6[14,15]
5728 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %ymm14, %ymm3
5729 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm5, %ymm14, %ymm1
5730 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm5 = ymm4[0,0,2,1,4,4,6,5]
5731 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1,2],ymm5[3],ymm1[4,5],ymm5[6],ymm1[7,8,9,10],ymm5[11],ymm1[12,13],ymm5[14],ymm1[15]
5732 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm29 = <0,1,u,3,10,10,11,11>
5733 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm1, %zmm21, %zmm29
5734 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <u,u,4,u,u,u,5,u>
5735 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rax), %ymm6
5736 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm6, %ymm1, %ymm1
5737 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
5738 ; AVX512F-ONLY-FAST-NEXT: vpandn %ymm1, %ymm5, %ymm1
5739 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm2, %ymm6, %ymm2
5740 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm14
5741 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm13[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
5742 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm5 = ymm12[3,3,3,3,7,7,7,7]
5743 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0],ymm2[1],ymm5[2,3],ymm2[4],ymm5[5,6,7,8],ymm2[9],ymm5[10,11],ymm2[12],ymm5[13,14,15]
5744 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
5745 ; AVX512F-ONLY-FAST-NEXT: # ymm5 = mem[0,1,0,1]
5746 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm5, %ymm13, %ymm13
5747 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
5748 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm13 = ymm13[0,1],ymm12[2],ymm13[3,4],ymm12[5],ymm13[6,7,8,9],ymm12[10],ymm13[11,12],ymm12[13],ymm13[14,15]
5749 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm21 = [2,2,2,3,8,10,10,11]
5750 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm21, %zmm13
5751 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
5752 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm12 = ymm4[3,3,3,3,7,7,7,7]
5753 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm12[2],ymm2[3,4],ymm12[5],ymm2[6,7,8,9],ymm12[10],ymm2[11,12],ymm12[13],ymm2[14,15]
5754 ; AVX512F-ONLY-FAST-NEXT: vprold $16, %ymm3, %ymm12
5755 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[1,2,2,3,5,6,6,7]
5756 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm12 = ymm4[0,1],ymm12[2],ymm4[3,4],ymm12[5],ymm4[6,7,8,9],ymm12[10],ymm4[11,12],ymm12[13],ymm4[14,15]
5757 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm31 = [2,1,3,2,10,10,10,11]
5758 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm31, %zmm12
5759 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm17 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
5760 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, %zmm22, %zmm17, %zmm13
5761 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm13, %zmm12
5762 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rax), %zmm3
5763 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm4 = [30,5,0,0,31,6,0,31,30,5,0,0,31,6,0,31]
5764 ; AVX512F-ONLY-FAST-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3]
5765 ; AVX512F-ONLY-FAST-NEXT: vpermi2d %zmm3, %zmm6, %zmm4
5766 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm12, %zmm4
5767 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
5768 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq {{.*#+}} xmm12 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
5769 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm12, %xmm10, %xmm10
5770 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[1,1,2,2]
5771 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} xmm10 = xmm11[0],xmm10[1],xmm11[2,3],xmm10[4],xmm11[5,6],xmm10[7]
5772 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[0,2,3,3,4,5,6,7]
5773 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm11 = [0,0,1,1,8,8,10,9]
5774 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm11, %zmm10
5775 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5776 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm6 # 16-byte Folded Reload
5777 ; AVX512F-ONLY-FAST-NEXT: # xmm6 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
5778 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm24, %xmm1
5779 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} xmm13 = xmm1[0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
5780 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm18, %xmm1
5781 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm1, %xmm6, %xmm6
5782 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm22 = [0,1,1,3,8,8,9,9]
5783 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm22, %zmm13
5784 ; AVX512F-ONLY-FAST-NEXT: vprold $16, %xmm0, %xmm6
5785 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} xmm2 = xmm15[1,1,2,3]
5786 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm6[2],xmm2[3,4],xmm6[5],xmm2[6,7]
5787 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3]
5788 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm0
5789 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
5790 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rcx), %xmm2
5791 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm12, %xmm2, %xmm6
5792 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm11
5793 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} xmm12 = xmm11[1,1,2,2]
5794 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} xmm6 = xmm12[0],xmm6[1],xmm12[2,3],xmm6[4],xmm12[5,6],xmm6[7]
5795 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm12 = xmm11[4],xmm2[4],xmm11[5],xmm2[5],xmm11[6],xmm2[6],xmm11[7],xmm2[7]
5796 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm11[0],xmm2[0],xmm11[1],xmm2[1],xmm11[2],xmm2[2],xmm11[3],xmm2[3]
5797 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
5798 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm22, %zmm2
5799 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd 36(%rax), %ymm6
5800 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd 40(%rax), %ymm11
5801 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm11, %zmm6, %zmm6
5802 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm13, %zmm6
5803 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm11 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
5804 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, %zmm26, %zmm11, %zmm10
5805 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm10, %zmm6
5806 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm2
5807 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
5808 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm5, %ymm10, %ymm0
5809 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5810 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm5 = ymm1[2,2,2,2,6,6,6,6]
5811 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm5[2],ymm0[3,4],ymm5[5],ymm0[6,7,8,9],ymm5[10],ymm0[11,12],ymm5[13],ymm0[14,15]
5812 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm5 = ymm10[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
5813 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm10 = ymm1[0,1,1,3,4,5,5,7]
5814 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm5 = ymm10[0,1],ymm5[2],ymm10[3,4],ymm5[5],ymm10[6,7,8,9],ymm5[10],ymm10[11,12],ymm5[13],ymm10[14,15]
5815 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm10 = xmm12[0,2,3,3,4,5,6,7]
5816 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,2,1]
5817 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,1,3]
5818 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,2,2,3]
5819 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,1,3,2]
5820 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm31, %zmm5
5821 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd (%rax), %ymm0
5822 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd 4(%rax), %ymm12
5823 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm12, %zmm0, %zmm0
5824 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm27, %zmm0
5825 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm0
5826 ; AVX512F-ONLY-FAST-NEXT: vmovdqu (%rsp), %ymm13 # 32-byte Reload
5827 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm13[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
5828 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5829 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm12 = ymm1[2,2,2,2,6,6,6,6]
5830 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm12[0],ymm2[1],ymm12[2,3],ymm2[4],ymm12[5,6,7,8],ymm2[9],ymm12[10,11],ymm2[12],ymm12[13,14,15]
5831 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
5832 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm13 = ymm1[1,1,1,1,5,5,5,5]
5833 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm12 = ymm12[0,1],ymm13[2],ymm12[3,4],ymm13[5],ymm12[6,7,8,9],ymm13[10],ymm12[11,12],ymm13[13],ymm12[14,15]
5834 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm21, %zmm12
5835 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm12
5836 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <6,u,u,u,7,u,u,7>
5837 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm16, %ymm2, %ymm2
5838 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm16, %zmm3, %zmm3
5839 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,13,4,0,0,14,5,0,0,13,4,0,0,14,5,0]
5840 ; AVX512F-ONLY-FAST-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
5841 ; AVX512F-ONLY-FAST-NEXT: vpermd %zmm3, %zmm5, %zmm3
5842 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm28, %zmm3
5843 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm12, %zmm3
5844 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm5 # 32-byte Folded Reload
5845 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm9, %zmm9 # 32-byte Folded Reload
5846 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, %zmm5, %zmm11, %zmm9
5847 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, %zmm23, %zmm17, %zmm19
5848 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm25, %zmm30
5849 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm30
5850 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
5851 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm8, %zmm5, %zmm5
5852 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
5853 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm8, %zmm7
5854 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm7
5855 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm29, %zmm14
5856 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm14
5857 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd 32(%rax), %ymm5
5858 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm2, %zmm2
5859 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm20, %zmm2
5860 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm19, %zmm2
5861 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
5862 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, 128(%rax)
5863 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, (%rax)
5864 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm14, 320(%rax)
5865 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, 256(%rax)
5866 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, 192(%rax)
5867 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm4, 384(%rax)
5868 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm30, 64(%rax)
5869 ; AVX512F-ONLY-FAST-NEXT: addq $232, %rsp
5870 ; AVX512F-ONLY-FAST-NEXT: vzeroupper
5871 ; AVX512F-ONLY-FAST-NEXT: retq
5873 ; AVX512DQ-SLOW-LABEL: store_i16_stride7_vf32:
5874 ; AVX512DQ-SLOW: # %bb.0:
5875 ; AVX512DQ-SLOW-NEXT: subq $648, %rsp # imm = 0x288
5876 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rcx), %ymm1
5877 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm1, (%rsp) # 32-byte Spill
5878 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
5879 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm0, %ymm1, %ymm2
5880 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %ymm4
5881 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm1 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
5882 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm1, %ymm4, %ymm3
5883 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm4, %ymm30
5884 ; AVX512DQ-SLOW-NEXT: vpor %ymm2, %ymm3, %ymm2
5885 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5886 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rsi), %ymm2
5887 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5888 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
5889 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm4, %ymm2, %ymm2
5890 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %ymm5
5891 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
5892 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm9, %ymm5, %ymm3
5893 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm5, %ymm31
5894 ; AVX512DQ-SLOW-NEXT: vpor %ymm2, %ymm3, %ymm2
5895 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5896 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r9), %ymm15
5897 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
5898 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm2, %ymm15, %ymm5
5899 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r8), %ymm6
5900 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
5901 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm3, %ymm6, %ymm7
5902 ; AVX512DQ-SLOW-NEXT: vpor %ymm5, %ymm7, %ymm5
5903 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5904 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%r9), %xmm7
5905 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%r8), %xmm8
5906 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rsi), %ymm5
5907 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm4, %ymm5, %ymm4
5908 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdi), %ymm11
5909 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm9, %ymm11, %ymm9
5910 ; AVX512DQ-SLOW-NEXT: vpor %ymm4, %ymm9, %ymm4
5911 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5912 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rcx), %ymm13
5913 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm0, %ymm13, %ymm0
5914 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdx), %ymm14
5915 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm1, %ymm14, %ymm1
5916 ; AVX512DQ-SLOW-NEXT: vpor %ymm0, %ymm1, %ymm0
5917 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5918 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%r9), %ymm4
5919 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm2, %ymm4, %ymm1
5920 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%r8), %ymm0
5921 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm3, %ymm0, %ymm2
5922 ; AVX512DQ-SLOW-NEXT: vpor %ymm1, %ymm2, %ymm1
5923 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5924 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm6[3,3,3,3,7,7,7,7]
5925 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm15[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
5926 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,3,6,6,6,7]
5927 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
5928 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
5929 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm9 = [4,5,4,5,4,5,6,7,16,17,16,17,16,17,17,19]
5930 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm1[0,1,3,2,4,5,6,7]
5931 ; AVX512DQ-SLOW-NEXT: vpermi2d %zmm3, %zmm2, %zmm9
5932 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5933 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rcx), %xmm9
5934 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
5935 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} xmm10 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
5936 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,7,6]
5937 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm10, %xmm2, %xmm2
5938 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = <16,18,19,19,19,19,u,u,0,1,0,1,2,3,2,3>
5939 ; AVX512DQ-SLOW-NEXT: vpermi2d %zmm1, %zmm2, %zmm3
5940 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5941 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %xmm2
5942 ; AVX512DQ-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm7 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
5943 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm7, %xmm9, %xmm1
5944 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[1,1,2,2]
5945 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0],xmm1[1],xmm3[2,3],xmm1[4],xmm3[5,6],xmm1[7]
5946 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm2[0],xmm9[0],xmm2[1],xmm9[1],xmm2[2],xmm9[2],xmm2[3],xmm9[3]
5947 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,1,3,2,4,5,6,7]
5948 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm8 = <u,16,u,u,17,17,u,u,0,u,u,1,2,u,u,3>
5949 ; AVX512DQ-SLOW-NEXT: vpermi2d %zmm3, %zmm1, %zmm8
5950 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5951 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r9), %xmm3
5952 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r8), %xmm12
5953 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm12[0],xmm3[0],xmm12[1],xmm3[1],xmm12[2],xmm3[2],xmm12[3],xmm3[3]
5954 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm8 = xmm1[0,1,2,3,4,5,7,6]
5955 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,3,2,4,5,6,7]
5956 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm26 = <0,1,0,1,0,1,1,3,16,18,19,19,19,19,u,u>
5957 ; AVX512DQ-SLOW-NEXT: vpermi2d %zmm8, %zmm1, %zmm26
5958 ; AVX512DQ-SLOW-NEXT: vprold $16, %ymm15, %ymm1
5959 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm6[1,2,2,3,5,6,6,7]
5960 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm8[0,1],ymm1[2],ymm8[3,4],ymm1[5],ymm8[6,7,8,9],ymm1[10],ymm8[11,12],ymm1[13],ymm8[14,15]
5961 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[0,0,2,1,4,4,6,5]
5962 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm8 = ymm15[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
5963 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm8[0,0,0,0,4,4,4,4]
5964 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm6 = ymm8[0,1,2],ymm6[3],ymm8[4,5],ymm6[6],ymm8[7,8,9,10],ymm6[11],ymm8[12,13],ymm6[14],ymm8[15]
5965 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm28 = [2,2,3,3,10,9,11,10]
5966 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm1, %zmm6, %zmm28
5967 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
5968 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm9[4],xmm2[5],xmm9[5],xmm2[6],xmm9[6],xmm2[7],xmm9[7]
5969 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm1, %xmm25
5970 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rax), %ymm8
5971 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5972 ; AVX512DQ-SLOW-NEXT: vpbroadcastd 8(%rax), %ymm1
5973 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
5974 ; AVX512DQ-SLOW-NEXT: vpandn %ymm1, %ymm2, %ymm1
5975 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm6 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
5976 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm6, %ymm8, %ymm2
5977 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
5978 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5979 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm12 = xmm12[4],xmm3[4],xmm12[5],xmm3[5],xmm12[6],xmm3[6],xmm12[7],xmm3[7]
5980 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm5[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
5981 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,0,2,1,4,4,6,5]
5982 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm11[1,1,1,1,5,5,5,5]
5983 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm3[2],ymm1[3,4],ymm3[5],ymm1[6,7,8,9],ymm3[10],ymm1[11,12],ymm3[13],ymm1[14,15]
5984 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5985 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm13[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
5986 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,0,0,0,4,4,4,4]
5987 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm14[0,1,1,3,4,5,5,7]
5988 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm3[0,1],ymm1[2],ymm3[3,4],ymm1[5],ymm3[6,7,8,9],ymm1[10],ymm3[11,12],ymm1[13],ymm3[14,15]
5989 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5990 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rax), %ymm1
5991 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm1[0,1,1,3,4,5,5,7]
5992 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,2,3]
5993 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
5994 ; AVX512DQ-SLOW-NEXT: vpandn %ymm3, %ymm8, %ymm3
5995 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm6, %ymm1, %ymm6
5996 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm6, %zmm2
5997 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5998 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm4[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
5999 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,0,0,4,4,4,4]
6000 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm0[0,0,2,1,4,4,6,5]
6001 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0,1,2],ymm6[3],ymm3[4,5],ymm6[6],ymm3[7,8,9,10],ymm6[11],ymm3[12,13],ymm6[14],ymm3[15]
6002 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm2, %ymm27
6003 ; AVX512DQ-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm15 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
6004 ; AVX512DQ-SLOW-NEXT: # ymm15 = mem[0,1,0,1]
6005 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm15, %ymm13, %ymm3
6006 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm14[2,2,2,2,6,6,6,6]
6007 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0,1],ymm6[2],ymm3[3,4],ymm6[5],ymm3[6,7,8,9],ymm6[10],ymm3[11,12],ymm6[13],ymm3[14,15]
6008 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm2, %ymm22
6009 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm14[3,3,3,3,7,7,7,7]
6010 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm6 = ymm13[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
6011 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[2,2,2,2,6,6,6,6]
6012 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm6[1],ymm3[2,3],ymm6[4],ymm3[5,6,7,8],ymm6[9],ymm3[10,11],ymm6[12],ymm3[13,14,15]
6013 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm2, %ymm17
6014 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm3 = ymm5[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
6015 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[2,2,2,2,6,6,6,6]
6016 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm11[2,2,2,2,6,6,6,6]
6017 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm6[0],ymm3[1],ymm6[2,3],ymm3[4],ymm6[5,6,7,8],ymm3[9],ymm6[10,11],ymm3[12],ymm6[13,14,15]
6018 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm2, %ymm16
6019 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm11[3,3,3,3,7,7,7,7]
6020 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm5 = ymm5[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
6021 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[2,2,2,2,6,6,6,6]
6022 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm13 = ymm3[0,1,2],ymm5[3],ymm3[4,5],ymm5[6],ymm3[7,8,9,10],ymm5[11],ymm3[12,13],ymm5[14],ymm3[15]
6023 ; AVX512DQ-SLOW-NEXT: vprold $16, %ymm4, %ymm3
6024 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm0[1,2,2,3,5,6,6,7]
6025 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0,1],ymm3[2],ymm5[3,4],ymm3[5],ymm5[6,7,8,9],ymm3[10],ymm5[11,12],ymm3[13],ymm5[14,15]
6026 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm2, %ymm24
6027 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[3,3,3,3,7,7,7,7]
6028 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm3 = ymm4[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
6029 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[2,2,2,3,6,6,6,7]
6030 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7,8,9],ymm0[10],ymm3[11,12],ymm0[13],ymm3[14,15]
6031 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm0, %ymm23
6032 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x8 {{.*#+}} zmm0 = [6,5,0,0,7,6,0,7,6,5,0,0,7,6,0,7]
6033 ; AVX512DQ-SLOW-NEXT: # zmm0 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
6034 ; AVX512DQ-SLOW-NEXT: vpermd %zmm1, %zmm0, %zmm29
6035 ; AVX512DQ-SLOW-NEXT: vmovdqu (%rsp), %ymm11 # 32-byte Reload
6036 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm11[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
6037 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
6038 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm30[3,3,3,3,7,7,7,7]
6039 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm9 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
6040 ; AVX512DQ-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
6041 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm14[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
6042 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
6043 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm31[3,3,3,3,7,7,7,7]
6044 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm8 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
6045 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdi), %xmm0
6046 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rsi), %xmm1
6047 ; AVX512DQ-SLOW-NEXT: vprold $16, %xmm1, %xmm3
6048 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm0[1,1,2,3]
6049 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} xmm2 = xmm6[0,1],xmm3[2],xmm6[3,4],xmm3[5],xmm6[6,7]
6050 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm2, %ymm21
6051 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
6052 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
6053 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm0, %xmm20
6054 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rcx), %xmm1
6055 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm7, %xmm1, %xmm0
6056 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdx), %xmm6
6057 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm6[1,1,2,2]
6058 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} xmm0 = xmm7[0],xmm0[1],xmm7[2,3],xmm0[4],xmm7[5,6],xmm0[7]
6059 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm0, %ymm19
6060 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm1[0],xmm6[1],xmm1[1],xmm6[2],xmm1[2],xmm6[3],xmm1[3]
6061 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm6[4],xmm1[4],xmm6[5],xmm1[5],xmm6[6],xmm1[6],xmm6[7],xmm1[7]
6062 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm0, %xmm18
6063 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm10, %xmm12, %xmm2
6064 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm10, %xmm3, %xmm4
6065 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %xmm3
6066 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rsi), %xmm6
6067 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm3[0],xmm6[0],xmm3[1],xmm6[1],xmm3[2],xmm6[2],xmm3[3],xmm6[3]
6068 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm10, %xmm7, %xmm10
6069 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm6[4],xmm3[4],xmm6[5],xmm3[5],xmm6[6],xmm3[6],xmm6[7],xmm3[7]
6070 ; AVX512DQ-SLOW-NEXT: vprold $16, %xmm6, %xmm6
6071 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[1,1,2,3]
6072 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} xmm7 = xmm3[0,1],xmm6[2],xmm3[3,4],xmm6[5],xmm3[6,7]
6073 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm11[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
6074 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,0,0,4,4,4,4]
6075 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm30[0,1,1,3,4,5,5,7]
6076 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm6 = ymm6[0,1],ymm3[2],ymm6[3,4],ymm3[5],ymm6[6,7,8,9],ymm3[10],ymm6[11,12],ymm3[13],ymm6[14,15]
6077 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm15, %ymm11, %ymm3
6078 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm30[2,2,2,2,6,6,6,6]
6079 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm15 = ymm3[0,1],ymm12[2],ymm3[3,4],ymm12[5],ymm3[6,7,8,9],ymm12[10],ymm3[11,12],ymm12[13],ymm3[14,15]
6080 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm14[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
6081 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
6082 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm31[1,1,1,1,5,5,5,5]
6083 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm12[2],ymm3[3,4],ymm12[5],ymm3[6,7,8,9],ymm12[10],ymm3[11,12],ymm12[13],ymm3[14,15]
6084 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm31[2,2,2,2,6,6,6,6]
6085 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm14 = ymm14[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
6086 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm14[2,2,2,2,6,6,6,6]
6087 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm12[0],ymm14[1],ymm12[2,3],ymm14[4],ymm12[5,6,7,8],ymm14[9],ymm12[10,11],ymm14[12],ymm12[13,14,15]
6088 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm25, %xmm1
6089 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm14 = xmm1[0,2,3,3,4,5,6,7]
6090 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,2,1]
6091 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[2,1,2,3,4,5,6,7]
6092 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5,5,4]
6093 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,0,1,3]
6094 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,1]
6095 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm30 = ymm22[2,2,2,3]
6096 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm31 = ymm17[0,2,2,3]
6097 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm16[0,2,2,3]
6098 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,1,3,3]
6099 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,2,3]
6100 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,1,3,2,4,5,6,7]
6101 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,0,1,1]
6102 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,1,3,3]
6103 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,1,1]
6104 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm31, %zmm30, %zmm30
6105 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm13, %zmm0, %zmm0
6106 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm13 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
6107 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm30, %zmm13, %zmm0
6108 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,1,3]
6109 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm9, %zmm5
6110 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm8, %zmm4
6111 ; AVX512DQ-SLOW-NEXT: vpternlogq $226, %zmm5, %zmm13, %zmm4
6112 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm14, %zmm5 # 32-byte Folded Reload
6113 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm8 # 32-byte Folded Reload
6114 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm31 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
6115 ; AVX512DQ-SLOW-NEXT: vpternlogq $226, %zmm5, %zmm31, %zmm8
6116 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm1 # 32-byte Folded Reload
6117 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm2[0,1,2,3],zmm1[4,5,6,7]
6118 ; AVX512DQ-SLOW-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Folded Reload
6119 ; AVX512DQ-SLOW-NEXT: # ymm2 = mem[2,2,2,3]
6120 ; AVX512DQ-SLOW-NEXT: vpermq $182, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
6121 ; AVX512DQ-SLOW-NEXT: # ymm5 = mem[2,1,3,2]
6122 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm13 = ymm27[2,2,3,3]
6123 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm24[2,1,3,2]
6124 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm16 = ymm23[2,2,2,3]
6125 ; AVX512DQ-SLOW-NEXT: vpshufd $254, {{[-0-9]+}}(%r{{[sb]}}p), %ymm17 # 32-byte Folded Reload
6126 ; AVX512DQ-SLOW-NEXT: # ymm17 = mem[2,3,3,3,6,7,7,7]
6127 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm22 = ymm21[0,0,2,1]
6128 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm20, %xmm9
6129 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm11 = xmm9[2,1,2,3,4,5,6,7]
6130 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm11 = xmm11[0,1,2,3,4,5,5,4]
6131 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,1,3]
6132 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm30 = ymm19[0,0,1,1]
6133 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm18, %xmm9
6134 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm9 = xmm9[0,2,3,3,4,5,6,7]
6135 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,2,1]
6136 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,1,1]
6137 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
6138 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,2]
6139 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
6140 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
6141 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,2,2,3]
6142 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
6143 ; AVX512DQ-SLOW-NEXT: vpternlogq $236, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm1
6144 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm1
6145 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
6146 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm8, %zmm2
6147 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
6148 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm8, %zmm5
6149 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm5
6150 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm13, %zmm0, %zmm2
6151 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
6152 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 = zmm8[0,1,2,3],zmm2[4,5,6,7]
6153 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
6154 ; AVX512DQ-SLOW-NEXT: vpternlogq $236, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm2
6155 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm2
6156 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm16, %zmm14, %zmm5
6157 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm5
6158 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm29
6159 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm17[2,1,3,2]
6160 ; AVX512DQ-SLOW-NEXT: vpbroadcastd 32(%rax), %ymm5
6161 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm0, %zmm0
6162 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
6163 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm0
6164 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm0
6165 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm11, %zmm22, %zmm4
6166 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm30, %zmm5
6167 ; AVX512DQ-SLOW-NEXT: vpternlogq $226, %zmm4, %zmm31, %zmm5
6168 ; AVX512DQ-SLOW-NEXT: vpbroadcastd 36(%rax), %ymm4
6169 ; AVX512DQ-SLOW-NEXT: vpbroadcastd 40(%rax), %ymm8
6170 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm8, %zmm4, %zmm4
6171 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
6172 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm4
6173 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm4
6174 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm10, %zmm5
6175 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
6176 ; AVX512DQ-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm5
6177 ; AVX512DQ-SLOW-NEXT: vpbroadcastd (%rax), %ymm7
6178 ; AVX512DQ-SLOW-NEXT: vpbroadcastd 4(%rax), %ymm8
6179 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm8, %zmm7, %zmm7
6180 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm26, %zmm7
6181 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm7
6182 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm6, %zmm5
6183 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm12, %zmm3, %zmm3
6184 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm3
6185 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x8 {{.*#+}} zmm5 = [0,5,4,0,0,6,5,0,0,5,4,0,0,6,5,0]
6186 ; AVX512DQ-SLOW-NEXT: # zmm5 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
6187 ; AVX512DQ-SLOW-NEXT: vpermd (%rax), %zmm5, %zmm5
6188 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm28, %zmm5
6189 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm5
6190 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
6191 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm5, 128(%rax)
6192 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, (%rax)
6193 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, 320(%rax)
6194 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, 256(%rax)
6195 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, 192(%rax)
6196 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, 64(%rax)
6197 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm29, 384(%rax)
6198 ; AVX512DQ-SLOW-NEXT: addq $648, %rsp # imm = 0x288
6199 ; AVX512DQ-SLOW-NEXT: vzeroupper
6200 ; AVX512DQ-SLOW-NEXT: retq
6202 ; AVX512DQ-FAST-LABEL: store_i16_stride7_vf32:
6203 ; AVX512DQ-FAST: # %bb.0:
6204 ; AVX512DQ-FAST-NEXT: subq $232, %rsp
6205 ; AVX512DQ-FAST-NEXT: vmovdqa (%rcx), %ymm1
6206 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
6207 ; AVX512DQ-FAST-NEXT: vpshufb %ymm0, %ymm1, %ymm2
6208 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm1, %ymm9
6209 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6210 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %ymm4
6211 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
6212 ; AVX512DQ-FAST-NEXT: vpshufb %ymm1, %ymm4, %ymm3
6213 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm4, %ymm11
6214 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6215 ; AVX512DQ-FAST-NEXT: vpor %ymm2, %ymm3, %ymm2
6216 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6217 ; AVX512DQ-FAST-NEXT: vmovdqa (%rsi), %ymm8
6218 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rsi), %ymm10
6219 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
6220 ; AVX512DQ-FAST-NEXT: vpshufb %ymm4, %ymm8, %ymm2
6221 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm8, (%rsp) # 32-byte Spill
6222 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdi), %ymm6
6223 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
6224 ; AVX512DQ-FAST-NEXT: vpshufb %ymm5, %ymm6, %ymm3
6225 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm6, %ymm18
6226 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6227 ; AVX512DQ-FAST-NEXT: vpor %ymm2, %ymm3, %ymm2
6228 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6229 ; AVX512DQ-FAST-NEXT: vmovdqa (%r9), %ymm13
6230 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
6231 ; AVX512DQ-FAST-NEXT: vpshufb %ymm2, %ymm13, %ymm6
6232 ; AVX512DQ-FAST-NEXT: vmovdqa (%r8), %ymm15
6233 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
6234 ; AVX512DQ-FAST-NEXT: vpshufb %ymm3, %ymm15, %ymm7
6235 ; AVX512DQ-FAST-NEXT: vporq %ymm6, %ymm7, %ymm25
6236 ; AVX512DQ-FAST-NEXT: vpshufb %ymm4, %ymm10, %ymm4
6237 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdi), %ymm6
6238 ; AVX512DQ-FAST-NEXT: vpshufb %ymm5, %ymm6, %ymm5
6239 ; AVX512DQ-FAST-NEXT: vpor %ymm4, %ymm5, %ymm4
6240 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6241 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rcx), %ymm4
6242 ; AVX512DQ-FAST-NEXT: vpshufb %ymm0, %ymm4, %ymm0
6243 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm4, %ymm31
6244 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdx), %ymm12
6245 ; AVX512DQ-FAST-NEXT: vpshufb %ymm1, %ymm12, %ymm1
6246 ; AVX512DQ-FAST-NEXT: vpor %ymm0, %ymm1, %ymm0
6247 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6248 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r9), %ymm1
6249 ; AVX512DQ-FAST-NEXT: vpshufb %ymm2, %ymm1, %ymm0
6250 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm1, %ymm14
6251 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r8), %ymm4
6252 ; AVX512DQ-FAST-NEXT: vpshufb %ymm3, %ymm4, %ymm1
6253 ; AVX512DQ-FAST-NEXT: vporq %ymm0, %ymm1, %ymm21
6254 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29>
6255 ; AVX512DQ-FAST-NEXT: vpshufb %ymm0, %ymm10, %ymm1
6256 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm6[3,3,3,3,7,7,7,7]
6257 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5],ymm1[6],ymm2[7,8,9,10],ymm1[11],ymm2[12,13],ymm1[14],ymm2[15]
6258 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm10[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
6259 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm10, %ymm29
6260 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm6[2,2,2,2,6,6,6,6]
6261 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5,6,7,8],ymm2[9],ymm3[10,11],ymm2[12],ymm3[13,14,15]
6262 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm22 = <u,2,2,3,10,u,11,u>
6263 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm1, %zmm2, %zmm22
6264 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm9[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
6265 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm11[3,3,3,3,7,7,7,7]
6266 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
6267 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rcx), %xmm10
6268 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdx), %xmm11
6269 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
6270 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
6271 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm23 = <u,2,2,3,8,u,9,u>
6272 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm2, %zmm1, %zmm23
6273 ; AVX512DQ-FAST-NEXT: vpshufb %ymm0, %ymm8, %ymm0
6274 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm1 = ymm18[3,3,3,3,7,7,7,7]
6275 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
6276 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdi), %xmm0
6277 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm5 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
6278 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rsi), %xmm1
6279 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
6280 ; AVX512DQ-FAST-NEXT: vpshufb %xmm5, %xmm3, %xmm3
6281 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm5, %xmm8
6282 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm19 = [2,1,3,3,8,8,9,9]
6283 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm3, %zmm2, %zmm19
6284 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
6285 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm15[3,3,3,3,7,7,7,7]
6286 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7,8,9],ymm3[10],ymm2[11,12],ymm3[13],ymm2[14,15]
6287 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm20 = [2,2,2,3,8,8,8,9]
6288 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r9), %xmm3
6289 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6290 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r8), %xmm5
6291 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6292 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
6293 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
6294 ; AVX512DQ-FAST-NEXT: vpshufb %xmm3, %xmm7, %xmm5
6295 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm7, %xmm24
6296 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm5, %zmm2, %zmm20
6297 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
6298 ; AVX512DQ-FAST-NEXT: vprold $16, %xmm1, %xmm1
6299 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
6300 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2],xmm0[3,4],xmm1[5],xmm0[6,7]
6301 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm9 = [4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
6302 ; AVX512DQ-FAST-NEXT: vpshufb %xmm9, %xmm2, %xmm1
6303 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm26 = <u,0,u,1,8,8,9,u>
6304 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm1, %zmm0, %zmm26
6305 ; AVX512DQ-FAST-NEXT: vmovdqa (%r9), %xmm2
6306 ; AVX512DQ-FAST-NEXT: vmovdqa (%r8), %xmm1
6307 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
6308 ; AVX512DQ-FAST-NEXT: vpshufb %xmm3, %xmm0, %xmm3
6309 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
6310 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm27 = [0,0,0,1,8,9,9,11]
6311 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm0, %zmm3, %zmm27
6312 ; AVX512DQ-FAST-NEXT: vprold $16, %ymm13, %ymm0
6313 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm15[1,2,2,3,5,6,6,7]
6314 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7,8,9],ymm0[10],ymm3[11,12],ymm0[13],ymm3[14,15]
6315 ; AVX512DQ-FAST-NEXT: vpbroadcastd {{.*#+}} ymm5 = [18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
6316 ; AVX512DQ-FAST-NEXT: vpshufb %ymm5, %ymm13, %ymm3
6317 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm7 = ymm15[0,0,2,1,4,4,6,5]
6318 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0,1,2],ymm7[3],ymm3[4,5],ymm7[6],ymm3[7,8,9,10],ymm7[11],ymm3[12,13],ymm7[14],ymm3[15]
6319 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm28 = [2,2,3,3,10,9,11,10]
6320 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm0, %zmm3, %zmm28
6321 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdi), %xmm15
6322 ; AVX512DQ-FAST-NEXT: vmovdqa (%rsi), %xmm0
6323 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm0[4],xmm15[4],xmm0[5],xmm15[5],xmm0[6],xmm15[6],xmm0[7],xmm15[7]
6324 ; AVX512DQ-FAST-NEXT: vpshufb %xmm9, %xmm3, %xmm9
6325 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
6326 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm25, %zmm0, %zmm2
6327 ; AVX512DQ-FAST-NEXT: vpshufb %xmm8, %xmm1, %xmm1
6328 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm8, %xmm18
6329 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm25 = <0,0,1,1,12,13,u,15>
6330 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm2, %zmm1, %zmm25
6331 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
6332 ; AVX512DQ-FAST-NEXT: vpbroadcastd 8(%rax), %ymm1
6333 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
6334 ; AVX512DQ-FAST-NEXT: vpandn %ymm1, %ymm2, %ymm1
6335 ; AVX512DQ-FAST-NEXT: vmovdqa (%rax), %ymm3
6336 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
6337 ; AVX512DQ-FAST-NEXT: vpshufb %ymm2, %ymm3, %ymm7
6338 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm3, %ymm16
6339 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm1, %zmm30
6340 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm29, %ymm1
6341 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
6342 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[1,1,1,1,5,5,5,5]
6343 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm8 = ymm1[0,1],ymm6[2],ymm1[3,4],ymm6[5],ymm1[6,7,8,9],ymm6[10],ymm1[11,12],ymm6[13],ymm1[14,15]
6344 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm31, %ymm13
6345 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm13[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
6346 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm6 = ymm12[0,1,1,3,4,5,5,7]
6347 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm7 = ymm6[0,1],ymm1[2],ymm6[3,4],ymm1[5],ymm6[6,7,8,9],ymm1[10],ymm6[11,12],ymm1[13],ymm6[14,15]
6348 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm14, %ymm3
6349 ; AVX512DQ-FAST-NEXT: vpshufb %ymm5, %ymm14, %ymm1
6350 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm5 = ymm4[0,0,2,1,4,4,6,5]
6351 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1,2],ymm5[3],ymm1[4,5],ymm5[6],ymm1[7,8,9,10],ymm5[11],ymm1[12,13],ymm5[14],ymm1[15]
6352 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm29 = <0,1,u,3,10,10,11,11>
6353 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm1, %zmm21, %zmm29
6354 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <u,u,4,u,u,u,5,u>
6355 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rax), %ymm6
6356 ; AVX512DQ-FAST-NEXT: vpermd %ymm6, %ymm1, %ymm1
6357 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
6358 ; AVX512DQ-FAST-NEXT: vpandn %ymm1, %ymm5, %ymm1
6359 ; AVX512DQ-FAST-NEXT: vpshufb %ymm2, %ymm6, %ymm2
6360 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm14
6361 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm13[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
6362 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm5 = ymm12[3,3,3,3,7,7,7,7]
6363 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0],ymm2[1],ymm5[2,3],ymm2[4],ymm5[5,6,7,8],ymm2[9],ymm5[10,11],ymm2[12],ymm5[13,14,15]
6364 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
6365 ; AVX512DQ-FAST-NEXT: # ymm5 = mem[0,1,0,1]
6366 ; AVX512DQ-FAST-NEXT: vpshufb %ymm5, %ymm13, %ymm13
6367 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
6368 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm13 = ymm13[0,1],ymm12[2],ymm13[3,4],ymm12[5],ymm13[6,7,8,9],ymm12[10],ymm13[11,12],ymm12[13],ymm13[14,15]
6369 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm21 = [2,2,2,3,8,10,10,11]
6370 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm21, %zmm13
6371 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
6372 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm12 = ymm4[3,3,3,3,7,7,7,7]
6373 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm12[2],ymm2[3,4],ymm12[5],ymm2[6,7,8,9],ymm12[10],ymm2[11,12],ymm12[13],ymm2[14,15]
6374 ; AVX512DQ-FAST-NEXT: vprold $16, %ymm3, %ymm12
6375 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[1,2,2,3,5,6,6,7]
6376 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm12 = ymm4[0,1],ymm12[2],ymm4[3,4],ymm12[5],ymm4[6,7,8,9],ymm12[10],ymm4[11,12],ymm12[13],ymm4[14,15]
6377 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm31 = [2,1,3,2,10,10,10,11]
6378 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm31, %zmm12
6379 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm17 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
6380 ; AVX512DQ-FAST-NEXT: vpternlogq $226, %zmm22, %zmm17, %zmm13
6381 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm13, %zmm12
6382 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rax), %zmm3
6383 ; AVX512DQ-FAST-NEXT: vbroadcasti32x8 {{.*#+}} zmm4 = [30,5,0,0,31,6,0,31,30,5,0,0,31,6,0,31]
6384 ; AVX512DQ-FAST-NEXT: # zmm4 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
6385 ; AVX512DQ-FAST-NEXT: vpermi2d %zmm3, %zmm6, %zmm4
6386 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm12, %zmm4
6387 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
6388 ; AVX512DQ-FAST-NEXT: vpbroadcastq {{.*#+}} xmm12 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
6389 ; AVX512DQ-FAST-NEXT: vpshufb %xmm12, %xmm10, %xmm10
6390 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[1,1,2,2]
6391 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} xmm10 = xmm11[0],xmm10[1],xmm11[2,3],xmm10[4],xmm11[5,6],xmm10[7]
6392 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[0,2,3,3,4,5,6,7]
6393 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm11 = [0,0,1,1,8,8,10,9]
6394 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm11, %zmm10
6395 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6396 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm6 # 16-byte Folded Reload
6397 ; AVX512DQ-FAST-NEXT: # xmm6 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
6398 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm24, %xmm1
6399 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} xmm13 = xmm1[0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
6400 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm18, %xmm1
6401 ; AVX512DQ-FAST-NEXT: vpshufb %xmm1, %xmm6, %xmm6
6402 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm22 = [0,1,1,3,8,8,9,9]
6403 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm22, %zmm13
6404 ; AVX512DQ-FAST-NEXT: vprold $16, %xmm0, %xmm6
6405 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} xmm2 = xmm15[1,1,2,3]
6406 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm6[2],xmm2[3,4],xmm6[5],xmm2[6,7]
6407 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3]
6408 ; AVX512DQ-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm0
6409 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm11, %zmm0
6410 ; AVX512DQ-FAST-NEXT: vmovdqa (%rcx), %xmm2
6411 ; AVX512DQ-FAST-NEXT: vpshufb %xmm12, %xmm2, %xmm6
6412 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %xmm11
6413 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} xmm12 = xmm11[1,1,2,2]
6414 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} xmm6 = xmm12[0],xmm6[1],xmm12[2,3],xmm6[4],xmm12[5,6],xmm6[7]
6415 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm12 = xmm11[4],xmm2[4],xmm11[5],xmm2[5],xmm11[6],xmm2[6],xmm11[7],xmm2[7]
6416 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm11[0],xmm2[0],xmm11[1],xmm2[1],xmm11[2],xmm2[2],xmm11[3],xmm2[3]
6417 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
6418 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm22, %zmm2
6419 ; AVX512DQ-FAST-NEXT: vpbroadcastd 36(%rax), %ymm6
6420 ; AVX512DQ-FAST-NEXT: vpbroadcastd 40(%rax), %ymm11
6421 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm11, %zmm6, %zmm6
6422 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm13, %zmm6
6423 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm11 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
6424 ; AVX512DQ-FAST-NEXT: vpternlogq $226, %zmm26, %zmm11, %zmm10
6425 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm10, %zmm6
6426 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm2
6427 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
6428 ; AVX512DQ-FAST-NEXT: vpshufb %ymm5, %ymm10, %ymm0
6429 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6430 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm5 = ymm1[2,2,2,2,6,6,6,6]
6431 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm5[2],ymm0[3,4],ymm5[5],ymm0[6,7,8,9],ymm5[10],ymm0[11,12],ymm5[13],ymm0[14,15]
6432 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm5 = ymm10[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
6433 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm10 = ymm1[0,1,1,3,4,5,5,7]
6434 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm5 = ymm10[0,1],ymm5[2],ymm10[3,4],ymm5[5],ymm10[6,7,8,9],ymm5[10],ymm10[11,12],ymm5[13],ymm10[14,15]
6435 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm10 = xmm12[0,2,3,3,4,5,6,7]
6436 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,2,1]
6437 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,1,3]
6438 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,2,2,3]
6439 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,1,3,2]
6440 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm31, %zmm5
6441 ; AVX512DQ-FAST-NEXT: vpbroadcastd (%rax), %ymm0
6442 ; AVX512DQ-FAST-NEXT: vpbroadcastd 4(%rax), %ymm12
6443 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm12, %zmm0, %zmm0
6444 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm27, %zmm0
6445 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm0
6446 ; AVX512DQ-FAST-NEXT: vmovdqu (%rsp), %ymm13 # 32-byte Reload
6447 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm13[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
6448 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6449 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm12 = ymm1[2,2,2,2,6,6,6,6]
6450 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm12[0],ymm2[1],ymm12[2,3],ymm2[4],ymm12[5,6,7,8],ymm2[9],ymm12[10,11],ymm2[12],ymm12[13,14,15]
6451 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
6452 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm13 = ymm1[1,1,1,1,5,5,5,5]
6453 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm12 = ymm12[0,1],ymm13[2],ymm12[3,4],ymm13[5],ymm12[6,7,8,9],ymm13[10],ymm12[11,12],ymm13[13],ymm12[14,15]
6454 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm21, %zmm12
6455 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm12
6456 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <6,u,u,u,7,u,u,7>
6457 ; AVX512DQ-FAST-NEXT: vpermd %ymm16, %ymm2, %ymm2
6458 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm16, %zmm3, %zmm3
6459 ; AVX512DQ-FAST-NEXT: vbroadcasti32x8 {{.*#+}} zmm5 = [0,13,4,0,0,14,5,0,0,13,4,0,0,14,5,0]
6460 ; AVX512DQ-FAST-NEXT: # zmm5 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
6461 ; AVX512DQ-FAST-NEXT: vpermd %zmm3, %zmm5, %zmm3
6462 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm28, %zmm3
6463 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm12, %zmm3
6464 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm5 # 32-byte Folded Reload
6465 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm9, %zmm9 # 32-byte Folded Reload
6466 ; AVX512DQ-FAST-NEXT: vpternlogq $226, %zmm5, %zmm11, %zmm9
6467 ; AVX512DQ-FAST-NEXT: vpternlogq $226, %zmm23, %zmm17, %zmm19
6468 ; AVX512DQ-FAST-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm25, %zmm30
6469 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm30
6470 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
6471 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm8, %zmm5, %zmm5
6472 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
6473 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm8, %zmm7
6474 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm7
6475 ; AVX512DQ-FAST-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm29, %zmm14
6476 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm14
6477 ; AVX512DQ-FAST-NEXT: vpbroadcastd 32(%rax), %ymm5
6478 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm2, %zmm2
6479 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm20, %zmm2
6480 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm19, %zmm2
6481 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
6482 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, 128(%rax)
6483 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, (%rax)
6484 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm14, 320(%rax)
6485 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, 256(%rax)
6486 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, 192(%rax)
6487 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm4, 384(%rax)
6488 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm30, 64(%rax)
6489 ; AVX512DQ-FAST-NEXT: addq $232, %rsp
6490 ; AVX512DQ-FAST-NEXT: vzeroupper
6491 ; AVX512DQ-FAST-NEXT: retq
6493 ; AVX512BW-LABEL: store_i16_stride7_vf32:
6494 ; AVX512BW: # %bb.0:
6495 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
6496 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
6497 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm3
6498 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm4
6499 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm6
6500 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm7
6501 ; AVX512BW-NEXT: vmovdqa64 (%r8), %zmm1
6502 ; AVX512BW-NEXT: vmovdqa64 (%r9), %zmm2
6503 ; AVX512BW-NEXT: vmovdqa64 (%r10), %zmm0
6504 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [0,0,30,62,28,60,0,0,0,31,63,29,61,0,0,0,0,0,30,62,28,60,0,0,0,31,63,29,61,0,0,0]
6505 ; AVX512BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
6506 ; AVX512BW-NEXT: vpermi2w %zmm4, %zmm3, %zmm5
6507 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [27,0,0,0,62,30,60,28,0,0,0,63,31,61,29,0,27,0,0,0,62,30,60,28,0,0,0,63,31,61,29,0]
6508 ; AVX512BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
6509 ; AVX512BW-NEXT: vpermi2w %zmm6, %zmm7, %zmm8
6510 ; AVX512BW-NEXT: movl $101455920, %ecx # imm = 0x60C1830
6511 ; AVX512BW-NEXT: kmovd %ecx, %k1
6512 ; AVX512BW-NEXT: vmovdqu16 %zmm5, %zmm8 {%k1}
6513 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [61,27,59,0,0,0,30,62,28,60,0,0,0,31,63,29,61,27,59,0,0,0,30,62,28,60,0,0,0,31,63,29]
6514 ; AVX512BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
6515 ; AVX512BW-NEXT: vpermi2w %zmm2, %zmm1, %zmm5
6516 ; AVX512BW-NEXT: movl $1623294726, %ecx # imm = 0x60C18306
6517 ; AVX512BW-NEXT: kmovd %ecx, %k2
6518 ; AVX512BW-NEXT: vmovdqu16 %zmm5, %zmm8 {%k2}
6519 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm5 = [0,1,2,59,4,5,6,7,8,9,60,11,12,13,14,15,16,61,18,19,20,21,22,23,62,25,26,27,28,29,30,63]
6520 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm8, %zmm5
6521 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [2,34,0,32,0,0,0,3,35,1,33,0,0,0,4,36,2,34,0,32,0,0,0,3,35,1,33,0,0,0,4,36]
6522 ; AVX512BW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
6523 ; AVX512BW-NEXT: vpermi2w %zmm7, %zmm6, %zmm9
6524 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [0,32,0,0,0,3,35,1,33,0,0,0,4,36,2,34,0,32,0,0,0,3,35,1,33,0,0,0,4,36,2,34]
6525 ; AVX512BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
6526 ; AVX512BW-NEXT: vpermi2w %zmm4, %zmm3, %zmm8
6527 ; AVX512BW-NEXT: movl $-1048377844, %ecx # imm = 0xC183060C
6528 ; AVX512BW-NEXT: kmovd %ecx, %k2
6529 ; AVX512BW-NEXT: vmovdqu16 %zmm9, %zmm8 {%k2}
6530 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,0,2,34,0,32,0,0,0,3,35,1,33,0,0,0,0,0,2,34,0,32,0,0,0,3,35,1,33,0,0,0]
6531 ; AVX512BW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
6532 ; AVX512BW-NEXT: vpermi2w %zmm2, %zmm1, %zmm9
6533 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm10 = <u,u,u,u,4,5,32,u,u,u,u,11,12,33,u,u,u,u,18,19,34,u,u,u,u,25,26,35,u,u,u,u>
6534 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm9, %zmm10
6535 ; AVX512BW-NEXT: movl $236730480, %ecx # imm = 0xE1C3870
6536 ; AVX512BW-NEXT: kmovd %ecx, %k2
6537 ; AVX512BW-NEXT: vmovdqu16 %zmm10, %zmm8 {%k2}
6538 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [0,0,0,7,39,5,37,0,0,0,8,40,6,38,0,0,0,0,0,7,39,5,37,0,0,0,8,40,6,38,0,0]
6539 ; AVX512BW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
6540 ; AVX512BW-NEXT: vpermi2w %zmm7, %zmm6, %zmm10
6541 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm9 = [0,7,39,5,37,0,0,0,8,40,6,38,0,0,0,9,0,7,39,5,37,0,0,0,8,40,6,38,0,0,0,9]
6542 ; AVX512BW-NEXT: # zmm9 = mem[0,1,2,3,0,1,2,3]
6543 ; AVX512BW-NEXT: vpermi2w %zmm4, %zmm3, %zmm9
6544 ; AVX512BW-NEXT: movl $202911840, %ecx # imm = 0xC183060
6545 ; AVX512BW-NEXT: kmovd %ecx, %k2
6546 ; AVX512BW-NEXT: vmovdqu16 %zmm10, %zmm9 {%k2}
6547 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [4,36,0,0,0,7,39,5,37,0,0,0,8,40,6,38,4,36,0,0,0,7,39,5,37,0,0,0,8,40,6,38]
6548 ; AVX512BW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
6549 ; AVX512BW-NEXT: vpermi2w %zmm2, %zmm1, %zmm10
6550 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm11 = <0,1,36,u,u,u,u,7,8,37,u,u,u,u,14,15,38,u,u,u,u,21,22,39,u,u,u,u,28,29,40,u>
6551 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm10, %zmm11
6552 ; AVX512BW-NEXT: movl $1893843847, %ecx # imm = 0x70E1C387
6553 ; AVX512BW-NEXT: kmovd %ecx, %k3
6554 ; AVX512BW-NEXT: vmovdqu16 %zmm11, %zmm9 {%k3}
6555 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [9,0,0,0,44,12,42,10,0,0,0,45,13,43,11,0,9,0,0,0,44,12,42,10,0,0,0,45,13,43,11,0]
6556 ; AVX512BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
6557 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm4, %zmm11
6558 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm10 = [43,9,41,0,0,0,12,44,10,42,0,0,0,13,45,11,43,9,41,0,0,0,12,44,10,42,0,0,0,13,45,11]
6559 ; AVX512BW-NEXT: # zmm10 = mem[0,1,2,3,0,1,2,3]
6560 ; AVX512BW-NEXT: vpermi2w %zmm7, %zmm6, %zmm10
6561 ; AVX512BW-NEXT: movl $405823681, %ecx # imm = 0x183060C1
6562 ; AVX512BW-NEXT: kmovd %ecx, %k3
6563 ; AVX512BW-NEXT: vmovdqu16 %zmm11, %zmm10 {%k3}
6564 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,11,43,9,41,0,0,0,12,44,10,42,0,0,0,13,0,11,43,9,41,0,0,0,12,44,10,42,0,0,0,13]
6565 ; AVX512BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
6566 ; AVX512BW-NEXT: vpermi2w %zmm2, %zmm1, %zmm11
6567 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm12 = <u,u,u,3,4,41,u,u,u,u,10,11,42,u,u,u,u,17,18,43,u,u,u,u,24,25,44,u,u,u,u,31>
6568 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm11, %zmm12
6569 ; AVX512BW-NEXT: movl $-2029118408, %ecx # imm = 0x870E1C38
6570 ; AVX512BW-NEXT: kmovd %ecx, %k3
6571 ; AVX512BW-NEXT: vmovdqu16 %zmm12, %zmm10 {%k3}
6572 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,0,16,48,14,46,0,0,0,17,49,15,47,0,0,0,0,0,16,48,14,46,0,0,0,17,49,15,47,0,0,0]
6573 ; AVX512BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
6574 ; AVX512BW-NEXT: vpermi2w %zmm7, %zmm6, %zmm11
6575 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [16,48,14,46,0,0,0,17,49,15,47,0,0,0,18,50,16,48,14,46,0,0,0,17,49,15,47,0,0,0,18,50]
6576 ; AVX512BW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
6577 ; AVX512BW-NEXT: vpermi2w %zmm4, %zmm3, %zmm12
6578 ; AVX512BW-NEXT: vmovdqu16 %zmm11, %zmm12 {%k1}
6579 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [13,0,0,0,48,16,46,14,0,0,0,49,17,47,15,0,13,0,0,0,48,16,46,14,0,0,0,49,17,47,15,0]
6580 ; AVX512BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
6581 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm2, %zmm11
6582 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm13 = <0,45,u,u,u,u,6,7,46,u,u,u,u,13,14,47,u,u,u,u,20,21,48,u,u,u,u,27,28,49,u,u>
6583 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm11, %zmm13
6584 ; AVX512BW-NEXT: movl $946921923, %ecx # imm = 0x3870E1C3
6585 ; AVX512BW-NEXT: kmovd %ecx, %k1
6586 ; AVX512BW-NEXT: vmovdqu16 %zmm13, %zmm12 {%k1}
6587 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,0,0,21,53,19,51,0,0,0,22,54,20,52,0,0,0,0,0,21,53,19,51,0,0,0,22,54,20,52,0,0]
6588 ; AVX512BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
6589 ; AVX512BW-NEXT: vpermi2w %zmm4, %zmm3, %zmm11
6590 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm13 = [18,50,0,0,0,21,53,19,51,0,0,0,22,54,20,52,18,50,0,0,0,21,53,19,51,0,0,0,22,54,20,52]
6591 ; AVX512BW-NEXT: # zmm13 = mem[0,1,2,3,0,1,2,3]
6592 ; AVX512BW-NEXT: vpermi2w %zmm7, %zmm6, %zmm13
6593 ; AVX512BW-NEXT: vmovdqu16 %zmm11, %zmm13 {%k2}
6594 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [20,52,18,50,0,0,0,21,53,19,51,0,0,0,22,54,20,52,18,50,0,0,0,21,53,19,51,0,0,0,22,54]
6595 ; AVX512BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
6596 ; AVX512BW-NEXT: vpermi2w %zmm2, %zmm1, %zmm11
6597 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm14 = <u,u,2,3,50,u,u,u,u,9,10,51,u,u,u,u,16,17,52,u,u,u,u,23,24,53,u,u,u,u,30,31>
6598 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm11, %zmm14
6599 ; AVX512BW-NEXT: movl $-1014559204, %ecx # imm = 0xC3870E1C
6600 ; AVX512BW-NEXT: kmovd %ecx, %k1
6601 ; AVX512BW-NEXT: vmovdqu16 %zmm14, %zmm13 {%k1}
6602 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [0,25,57,23,55,0,0,0,26,58,24,56,0,0,0,27,0,25,57,23,55,0,0,0,26,58,24,56,0,0,0,27]
6603 ; AVX512BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
6604 ; AVX512BW-NEXT: vpermi2w %zmm7, %zmm6, %zmm11
6605 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [57,23,55,0,0,0,26,58,24,56,0,0,0,27,59,25,57,23,55,0,0,0,26,58,24,56,0,0,0,27,59,25]
6606 ; AVX512BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
6607 ; AVX512BW-NEXT: vpermi2w %zmm4, %zmm3, %zmm6
6608 ; AVX512BW-NEXT: movl $-2096755688, %ecx # imm = 0x83060C18
6609 ; AVX512BW-NEXT: kmovd %ecx, %k1
6610 ; AVX512BW-NEXT: vmovdqu16 %zmm11, %zmm6 {%k1}
6611 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,0,0,25,57,23,55,0,0,0,26,58,24,56,0,0,0,0,0,25,57,23,55,0,0,0,26,58,24,56,0,0]
6612 ; AVX512BW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
6613 ; AVX512BW-NEXT: vpermi2w %zmm2, %zmm1, %zmm3
6614 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <54,u,u,u,u,5,6,55,u,u,u,u,12,13,56,u,u,u,u,19,20,57,u,u,u,u,26,27,58,u,u,u>
6615 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm3, %zmm1
6616 ; AVX512BW-NEXT: movl $473460961, %ecx # imm = 0x1C3870E1
6617 ; AVX512BW-NEXT: kmovd %ecx, %k1
6618 ; AVX512BW-NEXT: vmovdqu16 %zmm1, %zmm6 {%k1}
6619 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 320(%rax)
6620 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 256(%rax)
6621 ; AVX512BW-NEXT: vmovdqa64 %zmm12, 192(%rax)
6622 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 128(%rax)
6623 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 64(%rax)
6624 ; AVX512BW-NEXT: vmovdqa64 %zmm8, (%rax)
6625 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 384(%rax)
6626 ; AVX512BW-NEXT: vzeroupper
6627 ; AVX512BW-NEXT: retq
6628 %in.vec0 = load <32 x i16>, ptr %in.vecptr0, align 64
6629 %in.vec1 = load <32 x i16>, ptr %in.vecptr1, align 64
6630 %in.vec2 = load <32 x i16>, ptr %in.vecptr2, align 64
6631 %in.vec3 = load <32 x i16>, ptr %in.vecptr3, align 64
6632 %in.vec4 = load <32 x i16>, ptr %in.vecptr4, align 64
6633 %in.vec5 = load <32 x i16>, ptr %in.vecptr5, align 64
6634 %in.vec6 = load <32 x i16>, ptr %in.vecptr6, align 64
6635 %1 = shufflevector <32 x i16> %in.vec0, <32 x i16> %in.vec1, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
6636 %2 = shufflevector <32 x i16> %in.vec2, <32 x i16> %in.vec3, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
6637 %3 = shufflevector <32 x i16> %in.vec4, <32 x i16> %in.vec5, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
6638 %4 = shufflevector <64 x i16> %1, <64 x i16> %2, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
6639 %5 = shufflevector <32 x i16> %in.vec6, <32 x i16> poison, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
6640 %6 = shufflevector <64 x i16> %3, <64 x i16> %5, <96 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95>
6641 %7 = shufflevector <96 x i16> %6, <96 x i16> poison, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
6642 %8 = shufflevector <128 x i16> %4, <128 x i16> %7, <224 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223>
6643 %interleaved.vec = shufflevector <224 x i16> %8, <224 x i16> poison, <224 x i32> <i32 0, i32 32, i32 64, i32 96, i32 128, i32 160, i32 192, i32 1, i32 33, i32 65, i32 97, i32 129, i32 161, i32 193, i32 2, i32 34, i32 66, i32 98, i32 130, i32 162, i32 194, i32 3, i32 35, i32 67, i32 99, i32 131, i32 163, i32 195, i32 4, i32 36, i32 68, i32 100, i32 132, i32 164, i32 196, i32 5, i32 37, i32 69, i32 101, i32 133, i32 165, i32 197, i32 6, i32 38, i32 70, i32 102, i32 134, i32 166, i32 198, i32 7, i32 39, i32 71, i32 103, i32 135, i32 167, i32 199, i32 8, i32 40, i32 72, i32 104, i32 136, i32 168, i32 200, i32 9, i32 41, i32 73, i32 105, i32 137, i32 169, i32 201, i32 10, i32 42, i32 74, i32 106, i32 138, i32 170, i32 202, i32 11, i32 43, i32 75, i32 107, i32 139, i32 171, i32 203, i32 12, i32 44, i32 76, i32 108, i32 140, i32 172, i32 204, i32 13, i32 45, i32 77, i32 109, i32 141, i32 173, i32 205, i32 14, i32 46, i32 78, i32 110, i32 142, i32 174, i32 206, i32 15, i32 47, i32 79, i32 111, i32 143, i32 175, i32 207, i32 16, i32 48, i32 80, i32 112, i32 144, i32 176, i32 208, i32 17, i32 49, i32 81, i32 113, i32 145, i32 177, i32 209, i32 18, i32 50, i32 82, i32 114, i32 146, i32 178, i32 210, i32 19, i32 51, i32 83, i32 115, i32 147, i32 179, i32 211, i32 20, i32 52, i32 84, i32 116, i32 148, i32 180, i32 212, i32 21, i32 53, i32 85, i32 117, i32 149, i32 181, i32 213, i32 22, i32 54, i32 86, i32 118, i32 150, i32 182, i32 214, i32 23, i32 55, i32 87, i32 119, i32 151, i32 183, i32 215, i32 24, i32 56, i32 88, i32 120, i32 152, i32 184, i32 216, i32 25, i32 57, i32 89, i32 121, i32 153, i32 185, i32 217, i32 26, i32 58, i32 90, i32 122, i32 154, i32 186, i32 218, i32 27, i32 59, i32 91, i32 123, i32 155, i32 187, i32 219, i32 28, i32 60, i32 92, i32 124, i32 156, i32 188, i32 220, i32 29, i32 61, i32 93, i32 125, i32 157, i32 189, i32 221, i32 30, i32 62, i32 94, i32 126, i32 158, i32 190, i32 222, i32 31, i32 63, i32 95, i32 127, i32 159, i32 191, i32 223>
6644 store <224 x i16> %interleaved.vec, ptr %out.vec, align 64
6648 define void @store_i16_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %out.vec) nounwind {
6649 ; SSE-LABEL: store_i16_stride7_vf64:
6651 ; SSE-NEXT: subq $1640, %rsp # imm = 0x668
6652 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
6653 ; SSE-NEXT: movdqa 112(%rdi), %xmm15
6654 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6655 ; SSE-NEXT: movdqa 112(%rsi), %xmm2
6656 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6657 ; SSE-NEXT: movdqa 96(%rdx), %xmm5
6658 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6659 ; SSE-NEXT: movdqa 112(%rdx), %xmm1
6660 ; SSE-NEXT: movdqa 96(%rcx), %xmm12
6661 ; SSE-NEXT: movdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6662 ; SSE-NEXT: movdqa 112(%rcx), %xmm6
6663 ; SSE-NEXT: movdqa 112(%r8), %xmm4
6664 ; SSE-NEXT: movdqa 112(%r9), %xmm8
6665 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6666 ; SSE-NEXT: movaps 112(%rax), %xmm7
6667 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[2,2,2,2]
6668 ; SSE-NEXT: movdqa %xmm1, %xmm10
6669 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6670 ; SSE-NEXT: movdqa {{.*#+}} xmm13 = [65535,65535,65535,65535,65535,65535,0,65535]
6671 ; SSE-NEXT: movdqa %xmm13, %xmm1
6672 ; SSE-NEXT: pandn %xmm0, %xmm1
6673 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm6[3,3,3,3,4,5,6,7]
6674 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6675 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
6676 ; SSE-NEXT: pand %xmm13, %xmm0
6677 ; SSE-NEXT: por %xmm1, %xmm0
6678 ; SSE-NEXT: punpckhwd {{.*#+}} xmm15 = xmm15[4],xmm2[4],xmm15[5],xmm2[5],xmm15[6],xmm2[6],xmm15[7],xmm2[7]
6679 ; SSE-NEXT: movdqa %xmm15, %xmm1
6680 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6681 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0],xmm0[3,0]
6682 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [65535,0,65535,65535,65535,65535,65535,65535]
6683 ; SSE-NEXT: pand %xmm3, %xmm0
6684 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[1,1,1,1]
6685 ; SSE-NEXT: movdqa %xmm4, %xmm9
6686 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6687 ; SSE-NEXT: pandn %xmm2, %xmm3
6688 ; SSE-NEXT: por %xmm0, %xmm3
6689 ; SSE-NEXT: movdqa %xmm8, %xmm0
6690 ; SSE-NEXT: psrld $16, %xmm0
6691 ; SSE-NEXT: movdqa %xmm0, %xmm4
6692 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[1,0],xmm3[0,0]
6693 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[2,0],xmm1[0,2]
6694 ; SSE-NEXT: movaps {{.*#+}} xmm1 = [65535,65535,65535,0,65535,65535,65535,65535]
6695 ; SSE-NEXT: andps %xmm1, %xmm4
6696 ; SSE-NEXT: andnps %xmm7, %xmm1
6697 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6698 ; SSE-NEXT: orps %xmm4, %xmm1
6699 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6700 ; SSE-NEXT: movdqa %xmm12, %xmm1
6701 ; SSE-NEXT: psrlq $48, %xmm1
6702 ; SSE-NEXT: movdqa %xmm5, %xmm3
6703 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm3 = xmm3[1],xmm1[1]
6704 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [65535,65535,65535,0,0,65535,65535,65535]
6705 ; SSE-NEXT: movdqa %xmm2, %xmm1
6706 ; SSE-NEXT: pandn %xmm3, %xmm1
6707 ; SSE-NEXT: movdqa 96(%rdi), %xmm4
6708 ; SSE-NEXT: movdqa %xmm4, (%rsp) # 16-byte Spill
6709 ; SSE-NEXT: movdqa 96(%rsi), %xmm3
6710 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6711 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
6712 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6713 ; SSE-NEXT: movdqa %xmm4, %xmm3
6714 ; SSE-NEXT: psrldq {{.*#+}} xmm3 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
6715 ; SSE-NEXT: por %xmm1, %xmm3
6716 ; SSE-NEXT: movdqa 96(%r8), %xmm1
6717 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6718 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
6719 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [65535,65535,65535,65535,65535,0,65535,65535]
6720 ; SSE-NEXT: movdqa %xmm5, %xmm4
6721 ; SSE-NEXT: movdqa %xmm5, %xmm14
6722 ; SSE-NEXT: pandn %xmm1, %xmm4
6723 ; SSE-NEXT: por %xmm3, %xmm4
6724 ; SSE-NEXT: movdqa 96(%r9), %xmm1
6725 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6726 ; SSE-NEXT: psrld $16, %xmm1
6727 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,0],xmm4[2,0]
6728 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm1[2,0]
6729 ; SSE-NEXT: movdqa 96(%rax), %xmm1
6730 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6731 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [0,65535,65535,65535,65535,65535,65535,0]
6732 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,3,3,3]
6733 ; SSE-NEXT: movdqa %xmm5, %xmm4
6734 ; SSE-NEXT: pandn %xmm1, %xmm4
6735 ; SSE-NEXT: andps %xmm5, %xmm3
6736 ; SSE-NEXT: por %xmm3, %xmm4
6737 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6738 ; SSE-NEXT: movdqa %xmm6, %xmm1
6739 ; SSE-NEXT: psrlq $48, %xmm1
6740 ; SSE-NEXT: movdqa %xmm10, %xmm3
6741 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm3 = xmm3[1],xmm1[1]
6742 ; SSE-NEXT: movdqa %xmm2, %xmm1
6743 ; SSE-NEXT: pandn %xmm3, %xmm1
6744 ; SSE-NEXT: movdqa %xmm15, %xmm3
6745 ; SSE-NEXT: psrldq {{.*#+}} xmm3 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
6746 ; SSE-NEXT: por %xmm1, %xmm3
6747 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm9[2,2,3,3]
6748 ; SSE-NEXT: movdqa %xmm14, %xmm4
6749 ; SSE-NEXT: pandn %xmm1, %xmm4
6750 ; SSE-NEXT: por %xmm3, %xmm4
6751 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,0],xmm4[2,0]
6752 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm0[2,0]
6753 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm7[3,3,3,3]
6754 ; SSE-NEXT: movdqa %xmm5, %xmm1
6755 ; SSE-NEXT: pandn %xmm0, %xmm1
6756 ; SSE-NEXT: andps %xmm5, %xmm3
6757 ; SSE-NEXT: por %xmm3, %xmm1
6758 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6759 ; SSE-NEXT: movdqa (%rax), %xmm11
6760 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm11[0,1,0,1]
6761 ; SSE-NEXT: movdqa %xmm14, %xmm1
6762 ; SSE-NEXT: pandn %xmm0, %xmm1
6763 ; SSE-NEXT: movdqa (%r8), %xmm8
6764 ; SSE-NEXT: movdqa (%r9), %xmm7
6765 ; SSE-NEXT: movdqa %xmm8, %xmm3
6766 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6767 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm7[0],xmm3[1],xmm7[1],xmm3[2],xmm7[2],xmm3[3],xmm7[3]
6768 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6769 ; SSE-NEXT: movdqa %xmm3, %xmm0
6770 ; SSE-NEXT: movdqa %xmm3, %xmm6
6771 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6772 ; SSE-NEXT: pslldq {{.*#+}} xmm0 = zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
6773 ; SSE-NEXT: pand %xmm14, %xmm0
6774 ; SSE-NEXT: por %xmm1, %xmm0
6775 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [65535,65535,65535,0,0,0,65535,65535]
6776 ; SSE-NEXT: movdqa %xmm2, %xmm1
6777 ; SSE-NEXT: movdqa %xmm2, %xmm10
6778 ; SSE-NEXT: pandn %xmm0, %xmm1
6779 ; SSE-NEXT: movdqa (%rcx), %xmm5
6780 ; SSE-NEXT: movdqa %xmm5, %xmm0
6781 ; SSE-NEXT: psrld $16, %xmm0
6782 ; SSE-NEXT: movdqa (%rdx), %xmm2
6783 ; SSE-NEXT: movdqa %xmm2, %xmm3
6784 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
6785 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,0,65535,65535,65535,65535,65535]
6786 ; SSE-NEXT: movdqa %xmm0, %xmm4
6787 ; SSE-NEXT: movdqa %xmm0, %xmm12
6788 ; SSE-NEXT: pandn %xmm3, %xmm4
6789 ; SSE-NEXT: movdqa (%rdi), %xmm9
6790 ; SSE-NEXT: movdqa (%rsi), %xmm0
6791 ; SSE-NEXT: movdqa %xmm0, %xmm3
6792 ; SSE-NEXT: movdqa %xmm0, %xmm15
6793 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6794 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm9[0],xmm3[1],xmm9[1],xmm3[2],xmm9[2],xmm3[3],xmm9[3]
6795 ; SSE-NEXT: movdqa %xmm9, %xmm0
6796 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6797 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[2,2,2,2,4,5,6,7]
6798 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,5,4]
6799 ; SSE-NEXT: pand %xmm12, %xmm3
6800 ; SSE-NEXT: movdqa %xmm12, %xmm9
6801 ; SSE-NEXT: por %xmm4, %xmm3
6802 ; SSE-NEXT: pand %xmm10, %xmm3
6803 ; SSE-NEXT: por %xmm1, %xmm3
6804 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6805 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,65535,65535,0,65535,65535,65535,65535]
6806 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6807 ; SSE-NEXT: pandn %xmm11, %xmm1
6808 ; SSE-NEXT: movdqa %xmm6, %xmm3
6809 ; SSE-NEXT: psrldq {{.*#+}} xmm3 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
6810 ; SSE-NEXT: por %xmm1, %xmm3
6811 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [65535,0,0,0,65535,65535,65535,65535]
6812 ; SSE-NEXT: movdqa %xmm12, %xmm1
6813 ; SSE-NEXT: pandn %xmm3, %xmm1
6814 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6815 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm2[2,2,2,2]
6816 ; SSE-NEXT: movdqa %xmm13, %xmm4
6817 ; SSE-NEXT: pandn %xmm3, %xmm4
6818 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm5[0,1,2,3,4,4,4,4]
6819 ; SSE-NEXT: pand %xmm13, %xmm3
6820 ; SSE-NEXT: por %xmm4, %xmm3
6821 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm15[4],xmm0[5],xmm15[5],xmm0[6],xmm15[6],xmm0[7],xmm15[7]
6822 ; SSE-NEXT: movdqa %xmm0, %xmm4
6823 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6824 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm3[3,3]
6825 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm5[3,3,3,3,4,5,6,7]
6826 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6827 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm4[0,2]
6828 ; SSE-NEXT: andps %xmm12, %xmm3
6829 ; SSE-NEXT: orps %xmm1, %xmm3
6830 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6831 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm11[2,3,2,3]
6832 ; SSE-NEXT: movdqa {{.*#+}} xmm13 = [65535,0,65535,65535,65535,65535,65535,65535]
6833 ; SSE-NEXT: movdqa %xmm13, %xmm3
6834 ; SSE-NEXT: pandn %xmm1, %xmm3
6835 ; SSE-NEXT: punpckhwd {{.*#+}} xmm7 = xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
6836 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm7[2,2,2,2,4,5,6,7]
6837 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,5,4]
6838 ; SSE-NEXT: pand %xmm13, %xmm1
6839 ; SSE-NEXT: movdqa %xmm13, %xmm15
6840 ; SSE-NEXT: por %xmm3, %xmm1
6841 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm5[4],xmm2[5],xmm5[5],xmm2[6],xmm5[6],xmm2[7],xmm5[7]
6842 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6843 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,2],xmm2[2,3]
6844 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,2],xmm1[0,3]
6845 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6846 ; SSE-NEXT: movdqa 16(%rax), %xmm3
6847 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm3[0,1,0,1]
6848 ; SSE-NEXT: movdqa %xmm3, %xmm11
6849 ; SSE-NEXT: movdqa %xmm14, %xmm2
6850 ; SSE-NEXT: movdqa %xmm14, %xmm3
6851 ; SSE-NEXT: pandn %xmm1, %xmm3
6852 ; SSE-NEXT: movdqa 16(%r8), %xmm14
6853 ; SSE-NEXT: movdqa 16(%r9), %xmm1
6854 ; SSE-NEXT: movdqa %xmm14, %xmm5
6855 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6856 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
6857 ; SSE-NEXT: movdqa %xmm1, %xmm13
6858 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6859 ; SSE-NEXT: movdqa %xmm5, %xmm1
6860 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6861 ; SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
6862 ; SSE-NEXT: pand %xmm2, %xmm1
6863 ; SSE-NEXT: por %xmm3, %xmm1
6864 ; SSE-NEXT: movdqa %xmm10, %xmm3
6865 ; SSE-NEXT: pandn %xmm1, %xmm3
6866 ; SSE-NEXT: movdqa 16(%rcx), %xmm8
6867 ; SSE-NEXT: movdqa %xmm8, %xmm1
6868 ; SSE-NEXT: psrld $16, %xmm1
6869 ; SSE-NEXT: movdqa 16(%rdx), %xmm7
6870 ; SSE-NEXT: movdqa %xmm7, %xmm4
6871 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
6872 ; SSE-NEXT: movdqa %xmm9, %xmm2
6873 ; SSE-NEXT: movdqa %xmm9, %xmm1
6874 ; SSE-NEXT: pandn %xmm4, %xmm1
6875 ; SSE-NEXT: movdqa 16(%rdi), %xmm0
6876 ; SSE-NEXT: movdqa 16(%rsi), %xmm6
6877 ; SSE-NEXT: movdqa %xmm6, %xmm4
6878 ; SSE-NEXT: movdqa %xmm6, %xmm9
6879 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6880 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
6881 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6882 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[2,2,2,2,4,5,6,7]
6883 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,5,4]
6884 ; SSE-NEXT: pand %xmm2, %xmm4
6885 ; SSE-NEXT: por %xmm1, %xmm4
6886 ; SSE-NEXT: pand %xmm10, %xmm4
6887 ; SSE-NEXT: movdqa %xmm10, %xmm6
6888 ; SSE-NEXT: por %xmm3, %xmm4
6889 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6890 ; SSE-NEXT: movdqa {{.*#+}} xmm10 = [65535,65535,65535,0,65535,65535,65535,65535]
6891 ; SSE-NEXT: movdqa %xmm10, %xmm1
6892 ; SSE-NEXT: pandn %xmm11, %xmm1
6893 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6894 ; SSE-NEXT: psrldq {{.*#+}} xmm5 = xmm5[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
6895 ; SSE-NEXT: por %xmm1, %xmm5
6896 ; SSE-NEXT: movdqa %xmm12, %xmm1
6897 ; SSE-NEXT: pandn %xmm5, %xmm1
6898 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6899 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm7[2,2,2,2]
6900 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [65535,65535,65535,65535,65535,65535,0,65535]
6901 ; SSE-NEXT: movdqa %xmm5, %xmm4
6902 ; SSE-NEXT: pandn %xmm3, %xmm4
6903 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm8[0,1,2,3,4,4,4,4]
6904 ; SSE-NEXT: pand %xmm5, %xmm3
6905 ; SSE-NEXT: por %xmm4, %xmm3
6906 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm9[4],xmm0[5],xmm9[5],xmm0[6],xmm9[6],xmm0[7],xmm9[7]
6907 ; SSE-NEXT: movdqa %xmm0, %xmm4
6908 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6909 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm3[3,3]
6910 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm8[3,3,3,3,4,5,6,7]
6911 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6912 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm4[0,2]
6913 ; SSE-NEXT: andps %xmm12, %xmm3
6914 ; SSE-NEXT: orps %xmm1, %xmm3
6915 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6916 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm11[2,3,2,3]
6917 ; SSE-NEXT: movdqa %xmm15, %xmm3
6918 ; SSE-NEXT: pandn %xmm1, %xmm3
6919 ; SSE-NEXT: movdqa %xmm13, %xmm1
6920 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm14[4],xmm1[5],xmm14[5],xmm1[6],xmm14[6],xmm1[7],xmm14[7]
6921 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,2,2,2,4,5,6,7]
6922 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,5,4]
6923 ; SSE-NEXT: pand %xmm15, %xmm1
6924 ; SSE-NEXT: por %xmm3, %xmm1
6925 ; SSE-NEXT: punpckhwd {{.*#+}} xmm7 = xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
6926 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6927 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,2],xmm7[2,3]
6928 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,2],xmm1[0,3]
6929 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6930 ; SSE-NEXT: movdqa 32(%rax), %xmm7
6931 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm7[0,1,0,1]
6932 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [65535,65535,65535,65535,65535,0,65535,65535]
6933 ; SSE-NEXT: movdqa %xmm2, %xmm3
6934 ; SSE-NEXT: pandn %xmm1, %xmm3
6935 ; SSE-NEXT: movdqa 32(%r8), %xmm14
6936 ; SSE-NEXT: movdqa 32(%r9), %xmm4
6937 ; SSE-NEXT: movdqa %xmm14, %xmm0
6938 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6939 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
6940 ; SSE-NEXT: movdqa %xmm4, %xmm13
6941 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6942 ; SSE-NEXT: movdqa %xmm0, %xmm1
6943 ; SSE-NEXT: movdqa %xmm0, %xmm11
6944 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6945 ; SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
6946 ; SSE-NEXT: pand %xmm2, %xmm1
6947 ; SSE-NEXT: por %xmm3, %xmm1
6948 ; SSE-NEXT: movdqa %xmm6, %xmm8
6949 ; SSE-NEXT: movdqa %xmm6, %xmm3
6950 ; SSE-NEXT: pandn %xmm1, %xmm3
6951 ; SSE-NEXT: movdqa 32(%rcx), %xmm2
6952 ; SSE-NEXT: movdqa %xmm2, %xmm1
6953 ; SSE-NEXT: psrld $16, %xmm1
6954 ; SSE-NEXT: movdqa 32(%rdx), %xmm5
6955 ; SSE-NEXT: movdqa %xmm5, %xmm4
6956 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
6957 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [65535,0,0,65535,65535,65535,65535,65535]
6958 ; SSE-NEXT: movdqa %xmm12, %xmm1
6959 ; SSE-NEXT: pandn %xmm4, %xmm1
6960 ; SSE-NEXT: movdqa 32(%rdi), %xmm9
6961 ; SSE-NEXT: movdqa 32(%rsi), %xmm0
6962 ; SSE-NEXT: movdqa %xmm0, %xmm4
6963 ; SSE-NEXT: movdqa %xmm0, %xmm6
6964 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6965 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm9[0],xmm4[1],xmm9[1],xmm4[2],xmm9[2],xmm4[3],xmm9[3]
6966 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6967 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[2,2,2,2,4,5,6,7]
6968 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm4[0,1,2,3,4,5,5,4]
6969 ; SSE-NEXT: pand %xmm12, %xmm0
6970 ; SSE-NEXT: por %xmm1, %xmm0
6971 ; SSE-NEXT: pand %xmm8, %xmm0
6972 ; SSE-NEXT: por %xmm3, %xmm0
6973 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6974 ; SSE-NEXT: movdqa %xmm10, %xmm1
6975 ; SSE-NEXT: movdqa %xmm10, %xmm15
6976 ; SSE-NEXT: pandn %xmm7, %xmm1
6977 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6978 ; SSE-NEXT: movdqa %xmm11, %xmm3
6979 ; SSE-NEXT: psrldq {{.*#+}} xmm3 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
6980 ; SSE-NEXT: por %xmm1, %xmm3
6981 ; SSE-NEXT: movdqa {{.*#+}} xmm10 = [65535,0,0,0,65535,65535,65535,65535]
6982 ; SSE-NEXT: movdqa %xmm10, %xmm1
6983 ; SSE-NEXT: pandn %xmm3, %xmm1
6984 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm5[2,2,2,2]
6985 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6986 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,65535,65535,65535,65535,65535,0,65535]
6987 ; SSE-NEXT: movdqa %xmm0, %xmm4
6988 ; SSE-NEXT: pandn %xmm3, %xmm4
6989 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm2[0,1,2,3,4,4,4,4]
6990 ; SSE-NEXT: pand %xmm0, %xmm3
6991 ; SSE-NEXT: por %xmm4, %xmm3
6992 ; SSE-NEXT: movdqa %xmm9, %xmm0
6993 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm6[4],xmm0[5],xmm6[5],xmm0[6],xmm6[6],xmm0[7],xmm6[7]
6994 ; SSE-NEXT: movdqa %xmm0, %xmm4
6995 ; SSE-NEXT: movdqa %xmm0, %xmm11
6996 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6997 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm3[3,3]
6998 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm2[3,3,3,3,4,5,6,7]
6999 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7000 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm4[0,2]
7001 ; SSE-NEXT: andps %xmm10, %xmm0
7002 ; SSE-NEXT: orps %xmm1, %xmm0
7003 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7004 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm7[2,3,2,3]
7005 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,65535,65535,65535,65535,65535,65535]
7006 ; SSE-NEXT: movdqa %xmm0, %xmm3
7007 ; SSE-NEXT: pandn %xmm1, %xmm3
7008 ; SSE-NEXT: movdqa %xmm13, %xmm1
7009 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm14[4],xmm1[5],xmm14[5],xmm1[6],xmm14[6],xmm1[7],xmm14[7]
7010 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,2,2,2,4,5,6,7]
7011 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,5,4]
7012 ; SSE-NEXT: pand %xmm0, %xmm1
7013 ; SSE-NEXT: por %xmm3, %xmm1
7014 ; SSE-NEXT: punpckhwd {{.*#+}} xmm5 = xmm5[4],xmm2[4],xmm5[5],xmm2[5],xmm5[6],xmm2[6],xmm5[7],xmm2[7]
7015 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7016 ; SSE-NEXT: movdqa %xmm11, %xmm0
7017 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,2],xmm5[2,3]
7018 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,2],xmm1[0,3]
7019 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7020 ; SSE-NEXT: movdqa 48(%rax), %xmm0
7021 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm0[0,1,0,1]
7022 ; SSE-NEXT: movdqa %xmm0, %xmm5
7023 ; SSE-NEXT: movdqa {{.*#+}} xmm14 = [65535,65535,65535,65535,65535,0,65535,65535]
7024 ; SSE-NEXT: movdqa %xmm14, %xmm3
7025 ; SSE-NEXT: pandn %xmm1, %xmm3
7026 ; SSE-NEXT: movdqa 48(%r8), %xmm0
7027 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7028 ; SSE-NEXT: movdqa 48(%r9), %xmm12
7029 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm12[0],xmm0[1],xmm12[1],xmm0[2],xmm12[2],xmm0[3],xmm12[3]
7030 ; SSE-NEXT: movdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7031 ; SSE-NEXT: movdqa %xmm0, %xmm1
7032 ; SSE-NEXT: movdqa %xmm0, %xmm11
7033 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7034 ; SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
7035 ; SSE-NEXT: pand %xmm14, %xmm1
7036 ; SSE-NEXT: por %xmm3, %xmm1
7037 ; SSE-NEXT: movdqa %xmm8, %xmm7
7038 ; SSE-NEXT: movdqa %xmm8, %xmm3
7039 ; SSE-NEXT: pandn %xmm1, %xmm3
7040 ; SSE-NEXT: movdqa 48(%rcx), %xmm0
7041 ; SSE-NEXT: movdqa %xmm0, %xmm1
7042 ; SSE-NEXT: movdqa %xmm0, %xmm9
7043 ; SSE-NEXT: psrld $16, %xmm1
7044 ; SSE-NEXT: movdqa 48(%rdx), %xmm0
7045 ; SSE-NEXT: movdqa %xmm0, %xmm4
7046 ; SSE-NEXT: movdqa %xmm0, %xmm10
7047 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
7048 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [65535,0,0,65535,65535,65535,65535,65535]
7049 ; SSE-NEXT: movdqa %xmm2, %xmm1
7050 ; SSE-NEXT: pandn %xmm4, %xmm1
7051 ; SSE-NEXT: movdqa 48(%rdi), %xmm6
7052 ; SSE-NEXT: movdqa 48(%rsi), %xmm0
7053 ; SSE-NEXT: movdqa %xmm0, %xmm4
7054 ; SSE-NEXT: movdqa %xmm0, %xmm8
7055 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7056 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm6[0],xmm4[1],xmm6[1],xmm4[2],xmm6[2],xmm4[3],xmm6[3]
7057 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7058 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[2,2,2,2,4,5,6,7]
7059 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm4[0,1,2,3,4,5,5,4]
7060 ; SSE-NEXT: pand %xmm2, %xmm0
7061 ; SSE-NEXT: por %xmm1, %xmm0
7062 ; SSE-NEXT: pand %xmm7, %xmm0
7063 ; SSE-NEXT: por %xmm3, %xmm0
7064 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7065 ; SSE-NEXT: pandn %xmm5, %xmm15
7066 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7067 ; SSE-NEXT: movdqa %xmm11, %xmm3
7068 ; SSE-NEXT: psrldq {{.*#+}} xmm3 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
7069 ; SSE-NEXT: por %xmm15, %xmm3
7070 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [65535,0,0,0,65535,65535,65535,65535]
7071 ; SSE-NEXT: movdqa %xmm2, %xmm1
7072 ; SSE-NEXT: pandn %xmm3, %xmm1
7073 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7074 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm10[2,2,2,2]
7075 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,65535,65535,65535,65535,65535,0,65535]
7076 ; SSE-NEXT: movdqa %xmm0, %xmm4
7077 ; SSE-NEXT: pandn %xmm3, %xmm4
7078 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm9[0,1,2,3,4,4,4,4]
7079 ; SSE-NEXT: pand %xmm0, %xmm3
7080 ; SSE-NEXT: por %xmm4, %xmm3
7081 ; SSE-NEXT: movdqa %xmm6, %xmm0
7082 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm8[4],xmm0[5],xmm8[5],xmm0[6],xmm8[6],xmm0[7],xmm8[7]
7083 ; SSE-NEXT: movdqa %xmm0, %xmm4
7084 ; SSE-NEXT: movdqa %xmm0, %xmm6
7085 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7086 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm3[3,3]
7087 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm9[3,3,3,3,4,5,6,7]
7088 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7089 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm4[0,2]
7090 ; SSE-NEXT: andps %xmm2, %xmm0
7091 ; SSE-NEXT: orps %xmm1, %xmm0
7092 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7093 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[2,3,2,3]
7094 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,65535,65535,65535,65535,65535,65535]
7095 ; SSE-NEXT: movdqa %xmm0, %xmm3
7096 ; SSE-NEXT: pandn %xmm1, %xmm3
7097 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Folded Reload
7098 ; SSE-NEXT: # xmm12 = xmm12[4],mem[4],xmm12[5],mem[5],xmm12[6],mem[6],xmm12[7],mem[7]
7099 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm12[2,2,2,2,4,5,6,7]
7100 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,5,4]
7101 ; SSE-NEXT: pand %xmm0, %xmm1
7102 ; SSE-NEXT: por %xmm3, %xmm1
7103 ; SSE-NEXT: movdqa %xmm10, %xmm3
7104 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm9[4],xmm3[5],xmm9[5],xmm3[6],xmm9[6],xmm3[7],xmm9[7]
7105 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7106 ; SSE-NEXT: movdqa %xmm6, %xmm0
7107 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,2],xmm3[2,3]
7108 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,2],xmm1[0,3]
7109 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7110 ; SSE-NEXT: movdqa 64(%rax), %xmm0
7111 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm0[0,1,0,1]
7112 ; SSE-NEXT: movdqa %xmm0, %xmm5
7113 ; SSE-NEXT: movdqa %xmm14, %xmm3
7114 ; SSE-NEXT: pandn %xmm1, %xmm3
7115 ; SSE-NEXT: movdqa 64(%r8), %xmm1
7116 ; SSE-NEXT: movdqa 64(%r9), %xmm2
7117 ; SSE-NEXT: movdqa %xmm1, %xmm0
7118 ; SSE-NEXT: movdqa %xmm1, %xmm7
7119 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7120 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3]
7121 ; SSE-NEXT: movdqa %xmm2, %xmm10
7122 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7123 ; SSE-NEXT: movdqa %xmm0, %xmm1
7124 ; SSE-NEXT: movdqa %xmm0, %xmm2
7125 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7126 ; SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
7127 ; SSE-NEXT: pand %xmm14, %xmm1
7128 ; SSE-NEXT: por %xmm3, %xmm1
7129 ; SSE-NEXT: movdqa {{.*#+}} xmm14 = [65535,65535,65535,0,0,0,65535,65535]
7130 ; SSE-NEXT: movdqa %xmm14, %xmm3
7131 ; SSE-NEXT: pandn %xmm1, %xmm3
7132 ; SSE-NEXT: movdqa 64(%rcx), %xmm0
7133 ; SSE-NEXT: movdqa %xmm0, %xmm1
7134 ; SSE-NEXT: movdqa %xmm0, %xmm11
7135 ; SSE-NEXT: psrld $16, %xmm1
7136 ; SSE-NEXT: movdqa 64(%rdx), %xmm0
7137 ; SSE-NEXT: movdqa %xmm0, %xmm4
7138 ; SSE-NEXT: movdqa %xmm0, %xmm13
7139 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
7140 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [65535,0,0,65535,65535,65535,65535,65535]
7141 ; SSE-NEXT: movdqa %xmm12, %xmm1
7142 ; SSE-NEXT: pandn %xmm4, %xmm1
7143 ; SSE-NEXT: movdqa 64(%rdi), %xmm8
7144 ; SSE-NEXT: movdqa 64(%rsi), %xmm0
7145 ; SSE-NEXT: movdqa %xmm0, %xmm4
7146 ; SSE-NEXT: movdqa %xmm0, %xmm6
7147 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7148 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm8[0],xmm4[1],xmm8[1],xmm4[2],xmm8[2],xmm4[3],xmm8[3]
7149 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7150 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[2,2,2,2,4,5,6,7]
7151 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm4[0,1,2,3,4,5,5,4]
7152 ; SSE-NEXT: pand %xmm12, %xmm0
7153 ; SSE-NEXT: por %xmm1, %xmm0
7154 ; SSE-NEXT: pand %xmm14, %xmm0
7155 ; SSE-NEXT: por %xmm3, %xmm0
7156 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7157 ; SSE-NEXT: movdqa {{.*#+}} xmm15 = [65535,65535,65535,0,65535,65535,65535,65535]
7158 ; SSE-NEXT: movdqa %xmm15, %xmm1
7159 ; SSE-NEXT: pandn %xmm5, %xmm1
7160 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7161 ; SSE-NEXT: movdqa %xmm2, %xmm3
7162 ; SSE-NEXT: psrldq {{.*#+}} xmm3 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
7163 ; SSE-NEXT: por %xmm1, %xmm3
7164 ; SSE-NEXT: movdqa {{.*#+}} xmm9 = [65535,0,0,0,65535,65535,65535,65535]
7165 ; SSE-NEXT: movdqa %xmm9, %xmm1
7166 ; SSE-NEXT: pandn %xmm3, %xmm1
7167 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm13[2,2,2,2]
7168 ; SSE-NEXT: movdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7169 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,65535,65535,65535,65535,65535,0,65535]
7170 ; SSE-NEXT: movdqa %xmm0, %xmm4
7171 ; SSE-NEXT: pandn %xmm3, %xmm4
7172 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm11[0,1,2,3,4,4,4,4]
7173 ; SSE-NEXT: pand %xmm0, %xmm3
7174 ; SSE-NEXT: por %xmm4, %xmm3
7175 ; SSE-NEXT: movdqa %xmm8, %xmm0
7176 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm6[4],xmm0[5],xmm6[5],xmm0[6],xmm6[6],xmm0[7],xmm6[7]
7177 ; SSE-NEXT: movdqa %xmm0, %xmm4
7178 ; SSE-NEXT: movdqa %xmm0, %xmm2
7179 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7180 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm3[3,3]
7181 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm11[3,3,3,3,4,5,6,7]
7182 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7183 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm4[0,2]
7184 ; SSE-NEXT: andps %xmm9, %xmm0
7185 ; SSE-NEXT: orps %xmm1, %xmm0
7186 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7187 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[2,3,2,3]
7188 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,65535,65535,65535,65535,65535,65535]
7189 ; SSE-NEXT: movdqa %xmm0, %xmm3
7190 ; SSE-NEXT: pandn %xmm1, %xmm3
7191 ; SSE-NEXT: movdqa %xmm10, %xmm1
7192 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm7[4],xmm1[5],xmm7[5],xmm1[6],xmm7[6],xmm1[7],xmm7[7]
7193 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,2,2,2,4,5,6,7]
7194 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,5,4]
7195 ; SSE-NEXT: pand %xmm0, %xmm1
7196 ; SSE-NEXT: por %xmm3, %xmm1
7197 ; SSE-NEXT: movdqa %xmm13, %xmm3
7198 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm11[4],xmm3[5],xmm11[5],xmm3[6],xmm11[6],xmm3[7],xmm11[7]
7199 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7200 ; SSE-NEXT: movdqa %xmm2, %xmm0
7201 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,2],xmm3[2,3]
7202 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,2],xmm1[0,3]
7203 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7204 ; SSE-NEXT: movdqa 80(%rax), %xmm9
7205 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm9[0,1,0,1]
7206 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [65535,65535,65535,65535,65535,0,65535,65535]
7207 ; SSE-NEXT: movdqa %xmm12, %xmm3
7208 ; SSE-NEXT: pandn %xmm1, %xmm3
7209 ; SSE-NEXT: movdqa 80(%r8), %xmm1
7210 ; SSE-NEXT: movdqa 80(%r9), %xmm2
7211 ; SSE-NEXT: movdqa %xmm1, %xmm8
7212 ; SSE-NEXT: movdqa %xmm1, %xmm10
7213 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7214 ; SSE-NEXT: punpcklwd {{.*#+}} xmm8 = xmm8[0],xmm2[0],xmm8[1],xmm2[1],xmm8[2],xmm2[2],xmm8[3],xmm2[3]
7215 ; SSE-NEXT: movdqa %xmm2, %xmm11
7216 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7217 ; SSE-NEXT: movdqa %xmm8, %xmm1
7218 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7219 ; SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
7220 ; SSE-NEXT: pand %xmm12, %xmm1
7221 ; SSE-NEXT: por %xmm3, %xmm1
7222 ; SSE-NEXT: movdqa %xmm14, %xmm3
7223 ; SSE-NEXT: pandn %xmm1, %xmm3
7224 ; SSE-NEXT: movdqa 80(%rcx), %xmm0
7225 ; SSE-NEXT: movdqa %xmm0, %xmm1
7226 ; SSE-NEXT: movdqa %xmm0, %xmm13
7227 ; SSE-NEXT: psrld $16, %xmm1
7228 ; SSE-NEXT: movdqa 80(%rdx), %xmm2
7229 ; SSE-NEXT: movdqa %xmm2, %xmm4
7230 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
7231 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [65535,0,0,65535,65535,65535,65535,65535]
7232 ; SSE-NEXT: movdqa %xmm5, %xmm1
7233 ; SSE-NEXT: pandn %xmm4, %xmm1
7234 ; SSE-NEXT: movdqa 80(%rdi), %xmm7
7235 ; SSE-NEXT: movdqa 80(%rsi), %xmm0
7236 ; SSE-NEXT: movdqa %xmm0, %xmm4
7237 ; SSE-NEXT: movdqa %xmm0, %xmm6
7238 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7239 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm7[0],xmm4[1],xmm7[1],xmm4[2],xmm7[2],xmm4[3],xmm7[3]
7240 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7241 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[2,2,2,2,4,5,6,7]
7242 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm4[0,1,2,3,4,5,5,4]
7243 ; SSE-NEXT: pand %xmm5, %xmm0
7244 ; SSE-NEXT: por %xmm1, %xmm0
7245 ; SSE-NEXT: pand %xmm14, %xmm0
7246 ; SSE-NEXT: por %xmm3, %xmm0
7247 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7248 ; SSE-NEXT: movdqa %xmm15, %xmm1
7249 ; SSE-NEXT: movdqa %xmm9, %xmm5
7250 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7251 ; SSE-NEXT: pandn %xmm9, %xmm1
7252 ; SSE-NEXT: psrldq {{.*#+}} xmm8 = xmm8[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
7253 ; SSE-NEXT: por %xmm1, %xmm8
7254 ; SSE-NEXT: movdqa {{.*#+}} xmm9 = [65535,0,0,0,65535,65535,65535,65535]
7255 ; SSE-NEXT: movdqa %xmm9, %xmm1
7256 ; SSE-NEXT: pandn %xmm8, %xmm1
7257 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm2[2,2,2,2]
7258 ; SSE-NEXT: movdqa %xmm2, %xmm8
7259 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7260 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,65535,65535,65535,65535,65535,0,65535]
7261 ; SSE-NEXT: movdqa %xmm0, %xmm4
7262 ; SSE-NEXT: pandn %xmm3, %xmm4
7263 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm13[0,1,2,3,4,4,4,4]
7264 ; SSE-NEXT: pand %xmm0, %xmm3
7265 ; SSE-NEXT: por %xmm4, %xmm3
7266 ; SSE-NEXT: movdqa %xmm7, %xmm0
7267 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4],xmm6[4],xmm0[5],xmm6[5],xmm0[6],xmm6[6],xmm0[7],xmm6[7]
7268 ; SSE-NEXT: movdqa %xmm0, %xmm4
7269 ; SSE-NEXT: movdqa %xmm0, %xmm2
7270 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7271 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm3[3,3]
7272 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm13[3,3,3,3,4,5,6,7]
7273 ; SSE-NEXT: movdqa %xmm13, %xmm6
7274 ; SSE-NEXT: movdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7275 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm4[0,2]
7276 ; SSE-NEXT: andps %xmm9, %xmm0
7277 ; SSE-NEXT: movaps %xmm9, %xmm13
7278 ; SSE-NEXT: orps %xmm1, %xmm0
7279 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7280 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[2,3,2,3]
7281 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,65535,65535,65535,65535,65535,65535]
7282 ; SSE-NEXT: movdqa %xmm0, %xmm3
7283 ; SSE-NEXT: pandn %xmm1, %xmm3
7284 ; SSE-NEXT: movdqa %xmm11, %xmm1
7285 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm10[4],xmm1[5],xmm10[5],xmm1[6],xmm10[6],xmm1[7],xmm10[7]
7286 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,2,2,2,4,5,6,7]
7287 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,5,4]
7288 ; SSE-NEXT: pand %xmm0, %xmm1
7289 ; SSE-NEXT: movdqa %xmm0, %xmm11
7290 ; SSE-NEXT: por %xmm3, %xmm1
7291 ; SSE-NEXT: punpckhwd {{.*#+}} xmm8 = xmm8[4],xmm6[4],xmm8[5],xmm6[5],xmm8[6],xmm6[6],xmm8[7],xmm6[7]
7292 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7293 ; SSE-NEXT: movdqa %xmm2, %xmm0
7294 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,2],xmm8[2,3]
7295 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,2],xmm1[0,3]
7296 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7297 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7298 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[0,1,0,1]
7299 ; SSE-NEXT: movdqa %xmm12, %xmm3
7300 ; SSE-NEXT: pandn %xmm1, %xmm3
7301 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
7302 ; SSE-NEXT: movdqa %xmm8, %xmm0
7303 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7304 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1],xmm0[2],xmm6[2],xmm0[3],xmm6[3]
7305 ; SSE-NEXT: movdqa %xmm0, %xmm1
7306 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7307 ; SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
7308 ; SSE-NEXT: pand %xmm12, %xmm1
7309 ; SSE-NEXT: por %xmm3, %xmm1
7310 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7311 ; SSE-NEXT: movdqa %xmm2, %xmm3
7312 ; SSE-NEXT: psrld $16, %xmm3
7313 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
7314 ; SSE-NEXT: movdqa %xmm7, %xmm4
7315 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
7316 ; SSE-NEXT: movdqa {{.*#+}} xmm14 = [65535,0,0,65535,65535,65535,65535,65535]
7317 ; SSE-NEXT: movdqa %xmm14, %xmm3
7318 ; SSE-NEXT: pandn %xmm4, %xmm3
7319 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7320 ; SSE-NEXT: punpcklwd (%rsp), %xmm4 # 16-byte Folded Reload
7321 ; SSE-NEXT: # xmm4 = xmm4[0],mem[0],xmm4[1],mem[1],xmm4[2],mem[2],xmm4[3],mem[3]
7322 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[2,2,2,2,4,5,6,7]
7323 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,5,4]
7324 ; SSE-NEXT: pand %xmm14, %xmm4
7325 ; SSE-NEXT: por %xmm3, %xmm4
7326 ; SSE-NEXT: movdqa {{.*#+}} xmm10 = [65535,65535,65535,0,0,0,65535,65535]
7327 ; SSE-NEXT: pand %xmm10, %xmm4
7328 ; SSE-NEXT: pandn %xmm1, %xmm10
7329 ; SSE-NEXT: por %xmm4, %xmm10
7330 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7331 ; SSE-NEXT: movdqa %xmm15, %xmm3
7332 ; SSE-NEXT: movdqa %xmm15, %xmm12
7333 ; SSE-NEXT: pandn %xmm5, %xmm3
7334 ; SSE-NEXT: movdqa %xmm5, %xmm10
7335 ; SSE-NEXT: movdqa %xmm0, %xmm1
7336 ; SSE-NEXT: psrldq {{.*#+}} xmm1 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
7337 ; SSE-NEXT: por %xmm3, %xmm1
7338 ; SSE-NEXT: movdqa %xmm7, %xmm14
7339 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm7[2,2,2,2]
7340 ; SSE-NEXT: movdqa {{.*#+}} xmm9 = [65535,65535,65535,65535,65535,65535,0,65535]
7341 ; SSE-NEXT: movdqa %xmm9, %xmm4
7342 ; SSE-NEXT: pandn %xmm3, %xmm4
7343 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm2[0,1,2,3,4,4,4,4]
7344 ; SSE-NEXT: pand %xmm9, %xmm5
7345 ; SSE-NEXT: por %xmm4, %xmm5
7346 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7347 ; SSE-NEXT: movaps %xmm0, %xmm3
7348 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm5[3,3]
7349 ; SSE-NEXT: movdqa %xmm7, %xmm4
7350 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
7351 ; SSE-NEXT: movdqa %xmm4, %xmm5
7352 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7353 ; SSE-NEXT: punpcklwd {{.*#+}} xmm14 = xmm14[0],xmm2[0],xmm14[1],xmm2[1],xmm14[2],xmm2[2],xmm14[3],xmm2[3]
7354 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7355 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm2[3,3,3,3,4,5,6,7]
7356 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm3[0,2]
7357 ; SSE-NEXT: movaps %xmm13, %xmm3
7358 ; SSE-NEXT: andps %xmm13, %xmm4
7359 ; SSE-NEXT: andnps %xmm1, %xmm3
7360 ; SSE-NEXT: orps %xmm4, %xmm3
7361 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7362 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm10[2,3,2,3]
7363 ; SSE-NEXT: movdqa %xmm11, %xmm1
7364 ; SSE-NEXT: pandn %xmm3, %xmm1
7365 ; SSE-NEXT: movdqa %xmm8, %xmm2
7366 ; SSE-NEXT: movdqa %xmm6, %xmm3
7367 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm6[4],xmm2[5],xmm6[5],xmm2[6],xmm6[6],xmm2[7],xmm6[7]
7368 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7369 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm8[4],xmm3[5],xmm8[5],xmm3[6],xmm8[6],xmm3[7],xmm8[7]
7370 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[2,2,2,2,4,5,6,7]
7371 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,5,4]
7372 ; SSE-NEXT: pand %xmm11, %xmm3
7373 ; SSE-NEXT: movdqa %xmm11, %xmm15
7374 ; SSE-NEXT: por %xmm1, %xmm3
7375 ; SSE-NEXT: movaps %xmm0, %xmm1
7376 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm5[2,3]
7377 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,2],xmm3[0,3]
7378 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7379 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
7380 ; SSE-NEXT: movdqa %xmm9, %xmm1
7381 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7382 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm4[4],xmm1[5],xmm4[5],xmm1[6],xmm4[6],xmm1[7],xmm4[7]
7383 ; SSE-NEXT: movdqa %xmm9, %xmm3
7384 ; SSE-NEXT: punpcklwd {{.*#+}} xmm9 = xmm9[0],xmm4[0],xmm9[1],xmm4[1],xmm9[2],xmm4[2],xmm9[3],xmm4[3]
7385 ; SSE-NEXT: psrld $16, %xmm4
7386 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1]
7387 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
7388 ; SSE-NEXT: movdqa %xmm13, %xmm4
7389 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
7390 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm10[0],xmm4[1],xmm10[1],xmm4[2],xmm10[2],xmm4[3],xmm10[3]
7391 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[2,2,2,2,4,5,6,7]
7392 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,5,4]
7393 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7394 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm6[0,0,0,0]
7395 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,5,5,5,5]
7396 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[2,1],xmm4[3,3]
7397 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,0,65535,65535,65535,65535,65535]
7398 ; SSE-NEXT: pand %xmm0, %xmm4
7399 ; SSE-NEXT: pandn %xmm3, %xmm0
7400 ; SSE-NEXT: por %xmm4, %xmm0
7401 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
7402 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm11[0,0,1,1]
7403 ; SSE-NEXT: movdqa %xmm12, %xmm4
7404 ; SSE-NEXT: pandn %xmm3, %xmm4
7405 ; SSE-NEXT: pand %xmm12, %xmm0
7406 ; SSE-NEXT: por %xmm0, %xmm4
7407 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm5[0,2]
7408 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7409 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[0,1,0,1]
7410 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [65535,65535,65535,65535,65535,0,65535,65535]
7411 ; SSE-NEXT: movdqa %xmm5, %xmm3
7412 ; SSE-NEXT: pandn %xmm0, %xmm3
7413 ; SSE-NEXT: andps %xmm5, %xmm4
7414 ; SSE-NEXT: por %xmm4, %xmm3
7415 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7416 ; SSE-NEXT: movdqa %xmm11, %xmm4
7417 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm11[2,2,2,2]
7418 ; SSE-NEXT: movdqa %xmm11, %xmm0
7419 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7420 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[3,0],xmm1[2,0]
7421 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7422 ; SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9]
7423 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [65535,65535,65535,65535,65535,0,0,65535]
7424 ; SSE-NEXT: movdqa %xmm4, %xmm5
7425 ; SSE-NEXT: pandn %xmm1, %xmm5
7426 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7427 ; SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
7428 ; SSE-NEXT: pand %xmm4, %xmm1
7429 ; SSE-NEXT: por %xmm5, %xmm1
7430 ; SSE-NEXT: movdqa {{.*#+}} xmm7 = [0,65535,65535,65535,65535,65535,65535,0]
7431 ; SSE-NEXT: movdqa %xmm7, %xmm5
7432 ; SSE-NEXT: pandn %xmm3, %xmm5
7433 ; SSE-NEXT: pand %xmm7, %xmm1
7434 ; SSE-NEXT: por %xmm1, %xmm5
7435 ; SSE-NEXT: movdqa %xmm6, %xmm1
7436 ; SSE-NEXT: movdqa %xmm6, %xmm7
7437 ; SSE-NEXT: psrldq {{.*#+}} xmm1 = xmm1[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
7438 ; SSE-NEXT: movdqa %xmm15, %xmm3
7439 ; SSE-NEXT: pandn %xmm1, %xmm3
7440 ; SSE-NEXT: pand %xmm15, %xmm5
7441 ; SSE-NEXT: por %xmm5, %xmm3
7442 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [65535,65535,0,65535,65535,65535,65535,65535]
7443 ; SSE-NEXT: pand %xmm5, %xmm3
7444 ; SSE-NEXT: movdqa %xmm2, %xmm6
7445 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[2,2,3,3]
7446 ; SSE-NEXT: pandn %xmm1, %xmm5
7447 ; SSE-NEXT: por %xmm3, %xmm5
7448 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7449 ; SSE-NEXT: movdqa %xmm10, %xmm3
7450 ; SSE-NEXT: movdqa %xmm10, %xmm1
7451 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm10[1,1,1,1,4,5,6,7]
7452 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7453 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm13[0],xmm3[1],xmm13[1],xmm3[2],xmm13[2],xmm3[3],xmm13[3]
7454 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7455 ; SSE-NEXT: movdqa %xmm13, %xmm3
7456 ; SSE-NEXT: psrld $16, %xmm3
7457 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1]
7458 ; SSE-NEXT: movdqa %xmm4, %xmm3
7459 ; SSE-NEXT: pandn %xmm1, %xmm3
7460 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm9[0,1,2,3,4,5,6,6]
7461 ; SSE-NEXT: pand %xmm4, %xmm1
7462 ; SSE-NEXT: por %xmm3, %xmm1
7463 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm9[2,3]
7464 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm1[2,3]
7465 ; SSE-NEXT: andps %xmm12, %xmm0
7466 ; SSE-NEXT: movdqa %xmm7, %xmm1
7467 ; SSE-NEXT: pslld $16, %xmm1
7468 ; SSE-NEXT: pandn %xmm1, %xmm12
7469 ; SSE-NEXT: por %xmm0, %xmm12
7470 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,65535,65535,65535,0,65535,65535,65535]
7471 ; SSE-NEXT: pand %xmm1, %xmm12
7472 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[0,1,1,3]
7473 ; SSE-NEXT: pandn %xmm0, %xmm1
7474 ; SSE-NEXT: por %xmm12, %xmm1
7475 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7476 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7477 ; SSE-NEXT: movdqa %xmm1, %xmm0
7478 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7479 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
7480 ; SSE-NEXT: movdqa %xmm1, %xmm13
7481 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7482 ; SSE-NEXT: movdqa %xmm2, %xmm1
7483 ; SSE-NEXT: psrlq $48, %xmm1
7484 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7485 ; SSE-NEXT: movdqa {{.*#+}} xmm8 = [65535,65535,65535,0,0,65535,65535,65535]
7486 ; SSE-NEXT: movdqa %xmm8, %xmm1
7487 ; SSE-NEXT: pandn %xmm0, %xmm1
7488 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7489 ; SSE-NEXT: psrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
7490 ; SSE-NEXT: por %xmm1, %xmm0
7491 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7492 ; SSE-NEXT: movdqa %xmm3, %xmm2
7493 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7494 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
7495 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7496 ; SSE-NEXT: psrld $16, %xmm1
7497 ; SSE-NEXT: movdqa %xmm3, %xmm2
7498 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
7499 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
7500 ; SSE-NEXT: movdqa %xmm11, %xmm1
7501 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
7502 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,4,7]
7503 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,1]
7504 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,7,5,4]
7505 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [0,65535,65535,65535,65535,0,0,0]
7506 ; SSE-NEXT: movdqa %xmm5, %xmm2
7507 ; SSE-NEXT: pandn %xmm1, %xmm2
7508 ; SSE-NEXT: pand %xmm5, %xmm0
7509 ; SSE-NEXT: por %xmm0, %xmm2
7510 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7511 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7512 ; SSE-NEXT: movdqa %xmm6, %xmm0
7513 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7514 ; SSE-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm1[0],xmm6[1],xmm1[1],xmm6[2],xmm1[2],xmm6[3],xmm1[3]
7515 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7516 ; SSE-NEXT: psrlq $48, %xmm1
7517 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7518 ; SSE-NEXT: movdqa %xmm8, %xmm15
7519 ; SSE-NEXT: movdqa %xmm8, %xmm1
7520 ; SSE-NEXT: pandn %xmm0, %xmm1
7521 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7522 ; SSE-NEXT: psrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
7523 ; SSE-NEXT: por %xmm1, %xmm0
7524 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7525 ; SSE-NEXT: movdqa %xmm3, %xmm2
7526 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7527 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
7528 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7529 ; SSE-NEXT: psrld $16, %xmm1
7530 ; SSE-NEXT: movdqa %xmm3, %xmm2
7531 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
7532 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
7533 ; SSE-NEXT: movdqa %xmm8, %xmm1
7534 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
7535 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,4,7]
7536 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,1]
7537 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,7,5,4]
7538 ; SSE-NEXT: movdqa %xmm5, %xmm2
7539 ; SSE-NEXT: pandn %xmm1, %xmm2
7540 ; SSE-NEXT: pand %xmm5, %xmm0
7541 ; SSE-NEXT: por %xmm0, %xmm2
7542 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7543 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
7544 ; SSE-NEXT: movdqa %xmm7, %xmm0
7545 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7546 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm2[0],xmm7[1],xmm2[1],xmm7[2],xmm2[2],xmm7[3],xmm2[3]
7547 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7548 ; SSE-NEXT: movdqa %xmm2, %xmm1
7549 ; SSE-NEXT: psrlq $48, %xmm1
7550 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7551 ; SSE-NEXT: movdqa %xmm15, %xmm1
7552 ; SSE-NEXT: pandn %xmm0, %xmm1
7553 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7554 ; SSE-NEXT: psrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
7555 ; SSE-NEXT: por %xmm1, %xmm0
7556 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7557 ; SSE-NEXT: movdqa %xmm3, %xmm2
7558 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7559 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
7560 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7561 ; SSE-NEXT: psrld $16, %xmm1
7562 ; SSE-NEXT: movdqa %xmm3, %xmm2
7563 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
7564 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
7565 ; SSE-NEXT: movdqa %xmm14, %xmm1
7566 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
7567 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,4,7]
7568 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,1]
7569 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,7,5,4]
7570 ; SSE-NEXT: movdqa %xmm5, %xmm2
7571 ; SSE-NEXT: pandn %xmm1, %xmm2
7572 ; SSE-NEXT: pand %xmm5, %xmm0
7573 ; SSE-NEXT: por %xmm0, %xmm2
7574 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7575 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7576 ; SSE-NEXT: movdqa %xmm1, %xmm0
7577 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7578 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
7579 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7580 ; SSE-NEXT: movdqa %xmm2, %xmm1
7581 ; SSE-NEXT: psrlq $48, %xmm1
7582 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7583 ; SSE-NEXT: movdqa %xmm15, %xmm1
7584 ; SSE-NEXT: pandn %xmm0, %xmm1
7585 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7586 ; SSE-NEXT: psrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
7587 ; SSE-NEXT: por %xmm1, %xmm0
7588 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7589 ; SSE-NEXT: movdqa %xmm3, %xmm2
7590 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7591 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
7592 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7593 ; SSE-NEXT: psrld $16, %xmm1
7594 ; SSE-NEXT: movdqa %xmm3, %xmm2
7595 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
7596 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7597 ; SSE-NEXT: movdqa %xmm3, %xmm1
7598 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
7599 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,4,7]
7600 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,1]
7601 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,7,5,4]
7602 ; SSE-NEXT: movdqa %xmm5, %xmm2
7603 ; SSE-NEXT: pandn %xmm1, %xmm2
7604 ; SSE-NEXT: pand %xmm5, %xmm0
7605 ; SSE-NEXT: por %xmm0, %xmm2
7606 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7607 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7608 ; SSE-NEXT: movdqa %xmm1, %xmm0
7609 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7610 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
7611 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7612 ; SSE-NEXT: movdqa %xmm2, %xmm1
7613 ; SSE-NEXT: psrlq $48, %xmm1
7614 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7615 ; SSE-NEXT: movdqa %xmm15, %xmm1
7616 ; SSE-NEXT: pandn %xmm0, %xmm1
7617 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7618 ; SSE-NEXT: psrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
7619 ; SSE-NEXT: por %xmm1, %xmm0
7620 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
7621 ; SSE-NEXT: movdqa %xmm10, %xmm2
7622 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7623 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
7624 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7625 ; SSE-NEXT: psrld $16, %xmm1
7626 ; SSE-NEXT: movdqa %xmm10, %xmm2
7627 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
7628 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
7629 ; SSE-NEXT: movdqa %xmm10, %xmm1
7630 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
7631 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,4,7]
7632 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,1]
7633 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,7,5,4]
7634 ; SSE-NEXT: movdqa %xmm5, %xmm2
7635 ; SSE-NEXT: pandn %xmm1, %xmm2
7636 ; SSE-NEXT: pand %xmm5, %xmm0
7637 ; SSE-NEXT: por %xmm0, %xmm2
7638 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7639 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7640 ; SSE-NEXT: movdqa %xmm1, %xmm0
7641 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7642 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
7643 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7644 ; SSE-NEXT: movdqa %xmm2, %xmm1
7645 ; SSE-NEXT: psrlq $48, %xmm1
7646 ; SSE-NEXT: punpckhqdq {{.*#+}} xmm0 = xmm0[1],xmm1[1]
7647 ; SSE-NEXT: movdqa %xmm15, %xmm1
7648 ; SSE-NEXT: pandn %xmm0, %xmm1
7649 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7650 ; SSE-NEXT: psrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
7651 ; SSE-NEXT: por %xmm1, %xmm0
7652 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
7653 ; SSE-NEXT: movdqa %xmm12, %xmm2
7654 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7655 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
7656 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7657 ; SSE-NEXT: psrld $16, %xmm1
7658 ; SSE-NEXT: movdqa %xmm12, %xmm2
7659 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
7660 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
7661 ; SSE-NEXT: movdqa %xmm15, %xmm1
7662 ; SSE-NEXT: punpckhwd {{.*#+}} xmm1 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
7663 ; SSE-NEXT: pand %xmm5, %xmm0
7664 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,4,7]
7665 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,1]
7666 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,7,5,4]
7667 ; SSE-NEXT: pandn %xmm1, %xmm5
7668 ; SSE-NEXT: por %xmm0, %xmm5
7669 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7670 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7671 ; SSE-NEXT: movdqa %xmm1, %xmm0
7672 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm1[1,1,1,1,4,5,6,7]
7673 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7674 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7675 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
7676 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7677 ; SSE-NEXT: movdqa %xmm2, %xmm1
7678 ; SSE-NEXT: psrld $16, %xmm1
7679 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
7680 ; SSE-NEXT: movdqa %xmm4, %xmm1
7681 ; SSE-NEXT: pandn %xmm0, %xmm1
7682 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm13[0,1,2,3,4,5,6,6]
7683 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
7684 ; SSE-NEXT: pand %xmm4, %xmm0
7685 ; SSE-NEXT: por %xmm1, %xmm0
7686 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7687 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm11[1,1]
7688 ; SSE-NEXT: movaps {{.*#+}} xmm5 = [65535,65535,0,0,0,65535,65535,65535]
7689 ; SSE-NEXT: movaps %xmm5, %xmm2
7690 ; SSE-NEXT: andnps %xmm1, %xmm2
7691 ; SSE-NEXT: pand %xmm5, %xmm0
7692 ; SSE-NEXT: orps %xmm0, %xmm2
7693 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7694 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7695 ; SSE-NEXT: movdqa %xmm1, %xmm0
7696 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm1[1,1,1,1,4,5,6,7]
7697 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7698 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7699 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
7700 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7701 ; SSE-NEXT: movdqa %xmm2, %xmm1
7702 ; SSE-NEXT: psrld $16, %xmm1
7703 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
7704 ; SSE-NEXT: movdqa %xmm4, %xmm1
7705 ; SSE-NEXT: pandn %xmm0, %xmm1
7706 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm6[0,1,2,3,4,5,6,6]
7707 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
7708 ; SSE-NEXT: pand %xmm4, %xmm0
7709 ; SSE-NEXT: por %xmm1, %xmm0
7710 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7711 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm8[1,1]
7712 ; SSE-NEXT: movaps %xmm8, %xmm12
7713 ; SSE-NEXT: movaps %xmm5, %xmm2
7714 ; SSE-NEXT: andnps %xmm1, %xmm2
7715 ; SSE-NEXT: pand %xmm5, %xmm0
7716 ; SSE-NEXT: orps %xmm0, %xmm2
7717 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7718 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7719 ; SSE-NEXT: movdqa %xmm1, %xmm0
7720 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm1[1,1,1,1,4,5,6,7]
7721 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7722 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7723 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
7724 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7725 ; SSE-NEXT: movdqa %xmm2, %xmm1
7726 ; SSE-NEXT: psrld $16, %xmm1
7727 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
7728 ; SSE-NEXT: movdqa %xmm4, %xmm1
7729 ; SSE-NEXT: pandn %xmm0, %xmm1
7730 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm7[0,1,2,3,4,5,6,6]
7731 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
7732 ; SSE-NEXT: pand %xmm4, %xmm0
7733 ; SSE-NEXT: por %xmm1, %xmm0
7734 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7735 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm14[1,1]
7736 ; SSE-NEXT: movaps %xmm14, %xmm11
7737 ; SSE-NEXT: movaps %xmm5, %xmm2
7738 ; SSE-NEXT: andnps %xmm1, %xmm2
7739 ; SSE-NEXT: pand %xmm5, %xmm0
7740 ; SSE-NEXT: orps %xmm0, %xmm2
7741 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7742 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7743 ; SSE-NEXT: movdqa %xmm1, %xmm0
7744 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm1[1,1,1,1,4,5,6,7]
7745 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7746 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7747 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
7748 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7749 ; SSE-NEXT: movdqa %xmm2, %xmm1
7750 ; SSE-NEXT: psrld $16, %xmm1
7751 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
7752 ; SSE-NEXT: movdqa %xmm4, %xmm1
7753 ; SSE-NEXT: pandn %xmm0, %xmm1
7754 ; SSE-NEXT: pshufhw $164, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7755 ; SSE-NEXT: # xmm0 = mem[0,1,2,3,4,5,6,6]
7756 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
7757 ; SSE-NEXT: pand %xmm4, %xmm0
7758 ; SSE-NEXT: por %xmm1, %xmm0
7759 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7760 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm3[1,1]
7761 ; SSE-NEXT: movaps %xmm3, %xmm14
7762 ; SSE-NEXT: movaps %xmm5, %xmm2
7763 ; SSE-NEXT: andnps %xmm1, %xmm2
7764 ; SSE-NEXT: pand %xmm5, %xmm0
7765 ; SSE-NEXT: orps %xmm0, %xmm2
7766 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7767 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7768 ; SSE-NEXT: movdqa %xmm1, %xmm0
7769 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm1[1,1,1,1,4,5,6,7]
7770 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7771 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7772 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
7773 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7774 ; SSE-NEXT: movdqa %xmm2, %xmm1
7775 ; SSE-NEXT: psrld $16, %xmm1
7776 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
7777 ; SSE-NEXT: movdqa %xmm4, %xmm1
7778 ; SSE-NEXT: pandn %xmm0, %xmm1
7779 ; SSE-NEXT: pshufhw $164, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7780 ; SSE-NEXT: # xmm0 = mem[0,1,2,3,4,5,6,6]
7781 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
7782 ; SSE-NEXT: pand %xmm4, %xmm0
7783 ; SSE-NEXT: por %xmm1, %xmm0
7784 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7785 ; SSE-NEXT: movdqa %xmm10, %xmm13
7786 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm10[1,1]
7787 ; SSE-NEXT: movaps %xmm5, %xmm2
7788 ; SSE-NEXT: andnps %xmm1, %xmm2
7789 ; SSE-NEXT: pand %xmm5, %xmm0
7790 ; SSE-NEXT: orps %xmm0, %xmm2
7791 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7792 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7793 ; SSE-NEXT: movdqa %xmm1, %xmm0
7794 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm1[1,1,1,1,4,5,6,7]
7795 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7796 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7797 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
7798 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7799 ; SSE-NEXT: movdqa %xmm2, %xmm1
7800 ; SSE-NEXT: psrld $16, %xmm1
7801 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
7802 ; SSE-NEXT: movdqa %xmm4, %xmm1
7803 ; SSE-NEXT: pandn %xmm0, %xmm1
7804 ; SSE-NEXT: pshufhw $164, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7805 ; SSE-NEXT: # xmm0 = mem[0,1,2,3,4,5,6,6]
7806 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
7807 ; SSE-NEXT: pand %xmm4, %xmm0
7808 ; SSE-NEXT: por %xmm1, %xmm0
7809 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7810 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm15[1,1]
7811 ; SSE-NEXT: movaps %xmm5, %xmm2
7812 ; SSE-NEXT: andnps %xmm1, %xmm2
7813 ; SSE-NEXT: pand %xmm5, %xmm0
7814 ; SSE-NEXT: orps %xmm0, %xmm2
7815 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7816 ; SSE-NEXT: movdqa (%rsp), %xmm1 # 16-byte Reload
7817 ; SSE-NEXT: movdqa %xmm1, %xmm0
7818 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm1[1,1,1,1,4,5,6,7]
7819 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7820 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7821 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
7822 ; SSE-NEXT: movdqa %xmm1, (%rsp) # 16-byte Spill
7823 ; SSE-NEXT: movdqa %xmm2, %xmm1
7824 ; SSE-NEXT: psrld $16, %xmm1
7825 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
7826 ; SSE-NEXT: movdqa %xmm4, %xmm1
7827 ; SSE-NEXT: pandn %xmm0, %xmm1
7828 ; SSE-NEXT: pshufhw $164, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7829 ; SSE-NEXT: # xmm0 = mem[0,1,2,3,4,5,6,6]
7830 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
7831 ; SSE-NEXT: pand %xmm4, %xmm0
7832 ; SSE-NEXT: por %xmm1, %xmm0
7833 ; SSE-NEXT: pand %xmm5, %xmm0
7834 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7835 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7836 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm3[1,1]
7837 ; SSE-NEXT: andnps %xmm1, %xmm5
7838 ; SSE-NEXT: orps %xmm0, %xmm5
7839 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7840 ; SSE-NEXT: unpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Folded Reload
7841 ; SSE-NEXT: # xmm9 = xmm9[0],mem[0]
7842 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7843 ; SSE-NEXT: shufps $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7844 ; SSE-NEXT: # xmm0 = xmm0[0,1],mem[1,3]
7845 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[2,0],xmm0[0,2]
7846 ; SSE-NEXT: movaps {{.*#+}} xmm2 = [65535,65535,65535,65535,65535,0,65535,65535]
7847 ; SSE-NEXT: andps %xmm2, %xmm9
7848 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7849 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm1[0,1,2,3,5,6,6,7]
7850 ; SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm1[0,1,2,3,4,5]
7851 ; SSE-NEXT: andnps %xmm1, %xmm2
7852 ; SSE-NEXT: orps %xmm9, %xmm2
7853 ; SSE-NEXT: movaps {{.*#+}} xmm5 = [65535,65535,65535,65535,65535,65535,0,65535]
7854 ; SSE-NEXT: andps %xmm5, %xmm2
7855 ; SSE-NEXT: movaps %xmm2, %xmm7
7856 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7857 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,0,0,0]
7858 ; SSE-NEXT: andnps %xmm1, %xmm5
7859 ; SSE-NEXT: orps %xmm7, %xmm5
7860 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7861 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7862 ; SSE-NEXT: shufps $42, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
7863 ; SSE-NEXT: # xmm5 = xmm5[2,2],mem[2,0]
7864 ; SSE-NEXT: movaps {{.*#+}} xmm1 = [0,65535,65535,65535,65535,65535,65535,0]
7865 ; SSE-NEXT: andps %xmm1, %xmm5
7866 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
7867 ; SSE-NEXT: andnps %xmm0, %xmm1
7868 ; SSE-NEXT: orps %xmm5, %xmm1
7869 ; SSE-NEXT: movaps {{.*#+}} xmm5 = [65535,0,65535,65535,65535,65535,65535,65535]
7870 ; SSE-NEXT: andps %xmm5, %xmm1
7871 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[2,3,2,3]
7872 ; SSE-NEXT: andnps %xmm0, %xmm5
7873 ; SSE-NEXT: orps %xmm1, %xmm5
7874 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7875 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7876 ; SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9]
7877 ; SSE-NEXT: movdqa %xmm4, %xmm0
7878 ; SSE-NEXT: pandn %xmm1, %xmm0
7879 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7880 ; SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
7881 ; SSE-NEXT: pand %xmm4, %xmm1
7882 ; SSE-NEXT: por %xmm0, %xmm1
7883 ; SSE-NEXT: movdqa %xmm1, %xmm0
7884 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [65535,65535,65535,0,0,0,0,65535]
7885 ; SSE-NEXT: movdqa %xmm2, %xmm1
7886 ; SSE-NEXT: pandn %xmm0, %xmm1
7887 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
7888 ; SSE-NEXT: movapd %xmm10, %xmm5
7889 ; SSE-NEXT: shufpd $1, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
7890 ; SSE-NEXT: # xmm5 = xmm5[1],mem[0]
7891 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7892 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4,4,5,5,6,6,7,7]
7893 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[2,0],xmm0[2,1]
7894 ; SSE-NEXT: andps %xmm2, %xmm5
7895 ; SSE-NEXT: orps %xmm1, %xmm5
7896 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7897 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7898 ; SSE-NEXT: pslldq {{.*#+}} xmm0 = zero,zero,zero,zero,zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9]
7899 ; SSE-NEXT: movdqa %xmm4, %xmm1
7900 ; SSE-NEXT: pandn %xmm0, %xmm1
7901 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7902 ; SSE-NEXT: pslldq {{.*#+}} xmm0 = zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
7903 ; SSE-NEXT: pand %xmm4, %xmm0
7904 ; SSE-NEXT: por %xmm1, %xmm0
7905 ; SSE-NEXT: movdqa %xmm2, %xmm1
7906 ; SSE-NEXT: pandn %xmm0, %xmm1
7907 ; SSE-NEXT: shufpd $1, {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Folded Reload
7908 ; SSE-NEXT: # xmm12 = xmm12[1],mem[0]
7909 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7910 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4,4,5,5,6,6,7,7]
7911 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[2,0],xmm0[2,1]
7912 ; SSE-NEXT: andps %xmm2, %xmm12
7913 ; SSE-NEXT: orps %xmm1, %xmm12
7914 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7915 ; SSE-NEXT: pslldq {{.*#+}} xmm0 = zero,zero,zero,zero,zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9]
7916 ; SSE-NEXT: movdqa %xmm4, %xmm1
7917 ; SSE-NEXT: pandn %xmm0, %xmm1
7918 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7919 ; SSE-NEXT: pslldq {{.*#+}} xmm0 = zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
7920 ; SSE-NEXT: pand %xmm4, %xmm0
7921 ; SSE-NEXT: por %xmm1, %xmm0
7922 ; SSE-NEXT: movdqa %xmm2, %xmm1
7923 ; SSE-NEXT: pandn %xmm0, %xmm1
7924 ; SSE-NEXT: movaps %xmm11, %xmm6
7925 ; SSE-NEXT: shufpd $1, {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Folded Reload
7926 ; SSE-NEXT: # xmm11 = xmm11[1],mem[0]
7927 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7928 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4,4,5,5,6,6,7,7]
7929 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,0],xmm0[2,1]
7930 ; SSE-NEXT: andps %xmm2, %xmm11
7931 ; SSE-NEXT: orps %xmm1, %xmm11
7932 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7933 ; SSE-NEXT: pslldq {{.*#+}} xmm0 = zero,zero,zero,zero,zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9]
7934 ; SSE-NEXT: movdqa %xmm4, %xmm1
7935 ; SSE-NEXT: pandn %xmm0, %xmm1
7936 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7937 ; SSE-NEXT: pslldq {{.*#+}} xmm0 = zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
7938 ; SSE-NEXT: pand %xmm4, %xmm0
7939 ; SSE-NEXT: por %xmm1, %xmm0
7940 ; SSE-NEXT: movdqa %xmm2, %xmm1
7941 ; SSE-NEXT: pandn %xmm0, %xmm1
7942 ; SSE-NEXT: movaps %xmm14, %xmm15
7943 ; SSE-NEXT: movaps %xmm14, %xmm9
7944 ; SSE-NEXT: shufpd $1, {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Folded Reload
7945 ; SSE-NEXT: # xmm9 = xmm9[1],mem[0]
7946 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7947 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4,4,5,5,6,6,7,7]
7948 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[2,0],xmm0[2,1]
7949 ; SSE-NEXT: andps %xmm2, %xmm9
7950 ; SSE-NEXT: orps %xmm1, %xmm9
7951 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7952 ; SSE-NEXT: pslldq {{.*#+}} xmm0 = zero,zero,zero,zero,zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9]
7953 ; SSE-NEXT: movdqa %xmm4, %xmm1
7954 ; SSE-NEXT: pandn %xmm0, %xmm1
7955 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7956 ; SSE-NEXT: pslldq {{.*#+}} xmm0 = zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
7957 ; SSE-NEXT: pand %xmm4, %xmm0
7958 ; SSE-NEXT: por %xmm1, %xmm0
7959 ; SSE-NEXT: movdqa %xmm2, %xmm1
7960 ; SSE-NEXT: pandn %xmm0, %xmm1
7961 ; SSE-NEXT: movdqa %xmm13, %xmm7
7962 ; SSE-NEXT: shufpd $1, {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Folded Reload
7963 ; SSE-NEXT: # xmm7 = xmm7[1],mem[0]
7964 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7965 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4,4,5,5,6,6,7,7]
7966 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,0],xmm0[2,1]
7967 ; SSE-NEXT: andps %xmm2, %xmm7
7968 ; SSE-NEXT: orps %xmm1, %xmm7
7969 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7970 ; SSE-NEXT: pslldq {{.*#+}} xmm0 = zero,zero,zero,zero,zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9]
7971 ; SSE-NEXT: movdqa %xmm4, %xmm1
7972 ; SSE-NEXT: pandn %xmm0, %xmm1
7973 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7974 ; SSE-NEXT: pslldq {{.*#+}} xmm0 = zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
7975 ; SSE-NEXT: pand %xmm4, %xmm0
7976 ; SSE-NEXT: por %xmm1, %xmm0
7977 ; SSE-NEXT: movdqa %xmm2, %xmm1
7978 ; SSE-NEXT: pandn %xmm0, %xmm1
7979 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
7980 ; SSE-NEXT: movapd %xmm14, %xmm5
7981 ; SSE-NEXT: shufpd $1, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
7982 ; SSE-NEXT: # xmm5 = xmm5[1],mem[0]
7983 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7984 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4,4,5,5,6,6,7,7]
7985 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[2,0],xmm0[2,1]
7986 ; SSE-NEXT: andps %xmm2, %xmm5
7987 ; SSE-NEXT: orps %xmm1, %xmm5
7988 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7989 ; SSE-NEXT: pslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9]
7990 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7991 ; SSE-NEXT: pslldq {{.*#+}} xmm0 = zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
7992 ; SSE-NEXT: pand %xmm4, %xmm0
7993 ; SSE-NEXT: pandn %xmm1, %xmm4
7994 ; SSE-NEXT: por %xmm0, %xmm4
7995 ; SSE-NEXT: shufpd $1, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
7996 ; SSE-NEXT: # xmm3 = xmm3[1],mem[0]
7997 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7998 ; SSE-NEXT: punpckhwd {{.*#+}} xmm0 = xmm0[4,4,5,5,6,6,7,7]
7999 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[2,0],xmm0[2,1]
8000 ; SSE-NEXT: andps %xmm2, %xmm3
8001 ; SSE-NEXT: pandn %xmm4, %xmm2
8002 ; SSE-NEXT: por %xmm3, %xmm2
8003 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8004 ; SSE-NEXT: unpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
8005 ; SSE-NEXT: # xmm0 = xmm0[0],mem[0]
8006 ; SSE-NEXT: shufps $98, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
8007 ; SSE-NEXT: # xmm0 = xmm0[2,0],mem[2,1]
8008 ; SSE-NEXT: pshufd $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
8009 ; SSE-NEXT: # xmm1 = mem[0,0,1,1]
8010 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm10[0,0,1,1]
8011 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1]
8012 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [65535,65535,65535,65535,0,0,0,65535]
8013 ; SSE-NEXT: movdqa %xmm4, %xmm10
8014 ; SSE-NEXT: pandn %xmm1, %xmm10
8015 ; SSE-NEXT: andps %xmm4, %xmm0
8016 ; SSE-NEXT: por %xmm0, %xmm10
8017 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8018 ; SSE-NEXT: unpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
8019 ; SSE-NEXT: # xmm0 = xmm0[0],mem[0]
8020 ; SSE-NEXT: shufps $98, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
8021 ; SSE-NEXT: # xmm0 = xmm0[2,0],mem[2,1]
8022 ; SSE-NEXT: pshufd $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
8023 ; SSE-NEXT: # xmm1 = mem[0,0,1,1]
8024 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm8[0,0,1,1]
8025 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1]
8026 ; SSE-NEXT: movdqa %xmm4, %xmm8
8027 ; SSE-NEXT: pandn %xmm1, %xmm8
8028 ; SSE-NEXT: andps %xmm4, %xmm0
8029 ; SSE-NEXT: por %xmm0, %xmm8
8030 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8031 ; SSE-NEXT: unpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
8032 ; SSE-NEXT: # xmm0 = xmm0[0],mem[0]
8033 ; SSE-NEXT: shufps $98, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
8034 ; SSE-NEXT: # xmm0 = xmm0[2,0],mem[2,1]
8035 ; SSE-NEXT: pshufd $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
8036 ; SSE-NEXT: # xmm1 = mem[0,0,1,1]
8037 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm6[0,0,1,1]
8038 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1]
8039 ; SSE-NEXT: movdqa %xmm4, %xmm6
8040 ; SSE-NEXT: pandn %xmm1, %xmm6
8041 ; SSE-NEXT: andps %xmm4, %xmm0
8042 ; SSE-NEXT: por %xmm0, %xmm6
8043 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
8044 ; SSE-NEXT: unpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Folded Reload
8045 ; SSE-NEXT: # xmm13 = xmm13[0],mem[0]
8046 ; SSE-NEXT: shufps $98, {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Folded Reload
8047 ; SSE-NEXT: # xmm13 = xmm13[2,0],mem[2,1]
8048 ; SSE-NEXT: pshufd $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
8049 ; SSE-NEXT: # xmm0 = mem[0,0,1,1]
8050 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm15[0,0,1,1]
8051 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
8052 ; SSE-NEXT: movdqa %xmm4, %xmm3
8053 ; SSE-NEXT: pandn %xmm0, %xmm3
8054 ; SSE-NEXT: andps %xmm4, %xmm13
8055 ; SSE-NEXT: por %xmm13, %xmm3
8056 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
8057 ; SSE-NEXT: unpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
8058 ; SSE-NEXT: # xmm1 = xmm1[0],mem[0]
8059 ; SSE-NEXT: shufps $98, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
8060 ; SSE-NEXT: # xmm1 = xmm1[2,0],mem[2,1]
8061 ; SSE-NEXT: pshufd $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
8062 ; SSE-NEXT: # xmm0 = mem[0,0,1,1]
8063 ; SSE-NEXT: pshufd $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Folded Reload
8064 ; SSE-NEXT: # xmm13 = mem[0,0,1,1]
8065 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm13[0],xmm0[1],xmm13[1]
8066 ; SSE-NEXT: movdqa %xmm4, %xmm13
8067 ; SSE-NEXT: pandn %xmm0, %xmm13
8068 ; SSE-NEXT: andps %xmm4, %xmm1
8069 ; SSE-NEXT: por %xmm1, %xmm13
8070 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
8071 ; SSE-NEXT: unpcklpd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
8072 ; SSE-NEXT: # xmm1 = xmm1[0],mem[0]
8073 ; SSE-NEXT: shufps $98, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
8074 ; SSE-NEXT: # xmm1 = xmm1[2,0],mem[2,1]
8075 ; SSE-NEXT: pshufd $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
8076 ; SSE-NEXT: # xmm0 = mem[0,0,1,1]
8077 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm14[0,0,1,1]
8078 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm15[0],xmm0[1],xmm15[1]
8079 ; SSE-NEXT: movdqa %xmm4, %xmm15
8080 ; SSE-NEXT: pandn %xmm0, %xmm15
8081 ; SSE-NEXT: andps %xmm4, %xmm1
8082 ; SSE-NEXT: por %xmm1, %xmm15
8083 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
8084 ; SSE-NEXT: unpcklpd (%rsp), %xmm1 # 16-byte Folded Reload
8085 ; SSE-NEXT: # xmm1 = xmm1[0],mem[0]
8086 ; SSE-NEXT: shufps $98, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
8087 ; SSE-NEXT: # xmm1 = xmm1[2,0],mem[2,1]
8088 ; SSE-NEXT: pshufd $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
8089 ; SSE-NEXT: # xmm0 = mem[0,0,1,1]
8090 ; SSE-NEXT: pshufd $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Folded Reload
8091 ; SSE-NEXT: # xmm14 = mem[0,0,1,1]
8092 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm14[0],xmm0[1],xmm14[1]
8093 ; SSE-NEXT: andps %xmm4, %xmm1
8094 ; SSE-NEXT: pandn %xmm0, %xmm4
8095 ; SSE-NEXT: por %xmm1, %xmm4
8096 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
8097 ; SSE-NEXT: movdqa %xmm4, 672(%rax)
8098 ; SSE-NEXT: movdqa %xmm15, 560(%rax)
8099 ; SSE-NEXT: movdqa %xmm13, 448(%rax)
8100 ; SSE-NEXT: movdqa %xmm3, 336(%rax)
8101 ; SSE-NEXT: movdqa %xmm6, 224(%rax)
8102 ; SSE-NEXT: movdqa %xmm8, 112(%rax)
8103 ; SSE-NEXT: movdqa %xmm10, (%rax)
8104 ; SSE-NEXT: movdqa %xmm2, 736(%rax)
8105 ; SSE-NEXT: movaps %xmm5, 624(%rax)
8106 ; SSE-NEXT: movaps %xmm7, 512(%rax)
8107 ; SSE-NEXT: movaps %xmm9, 400(%rax)
8108 ; SSE-NEXT: movaps %xmm11, 288(%rax)
8109 ; SSE-NEXT: movaps %xmm12, 176(%rax)
8110 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8111 ; SSE-NEXT: movaps %xmm0, 64(%rax)
8112 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8113 ; SSE-NEXT: movaps %xmm0, 864(%rax)
8114 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8115 ; SSE-NEXT: movaps %xmm0, 784(%rax)
8116 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8117 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0,1,3]
8118 ; SSE-NEXT: movaps %xmm0, 752(%rax)
8119 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8120 ; SSE-NEXT: movaps %xmm0, 720(%rax)
8121 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8122 ; SSE-NEXT: movaps %xmm0, 704(%rax)
8123 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8124 ; SSE-NEXT: movaps %xmm0, 688(%rax)
8125 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8126 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0,1,3]
8127 ; SSE-NEXT: movaps %xmm0, 640(%rax)
8128 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8129 ; SSE-NEXT: movaps %xmm0, 608(%rax)
8130 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8131 ; SSE-NEXT: movaps %xmm0, 592(%rax)
8132 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8133 ; SSE-NEXT: movaps %xmm0, 576(%rax)
8134 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8135 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0,1,3]
8136 ; SSE-NEXT: movaps %xmm0, 528(%rax)
8137 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8138 ; SSE-NEXT: movaps %xmm0, 496(%rax)
8139 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8140 ; SSE-NEXT: movaps %xmm0, 480(%rax)
8141 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8142 ; SSE-NEXT: movaps %xmm0, 464(%rax)
8143 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8144 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0,1,3]
8145 ; SSE-NEXT: movaps %xmm0, 416(%rax)
8146 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8147 ; SSE-NEXT: movaps %xmm0, 384(%rax)
8148 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8149 ; SSE-NEXT: movaps %xmm0, 368(%rax)
8150 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8151 ; SSE-NEXT: movaps %xmm0, 352(%rax)
8152 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8153 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0,1,3]
8154 ; SSE-NEXT: movaps %xmm0, 304(%rax)
8155 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8156 ; SSE-NEXT: movaps %xmm0, 272(%rax)
8157 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8158 ; SSE-NEXT: movaps %xmm0, 256(%rax)
8159 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8160 ; SSE-NEXT: movaps %xmm0, 240(%rax)
8161 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8162 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0,1,3]
8163 ; SSE-NEXT: movaps %xmm0, 192(%rax)
8164 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8165 ; SSE-NEXT: movaps %xmm0, 160(%rax)
8166 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8167 ; SSE-NEXT: movaps %xmm0, 144(%rax)
8168 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8169 ; SSE-NEXT: movaps %xmm0, 128(%rax)
8170 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8171 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0,1,3]
8172 ; SSE-NEXT: movaps %xmm0, 80(%rax)
8173 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8174 ; SSE-NEXT: movaps %xmm0, 48(%rax)
8175 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8176 ; SSE-NEXT: movaps %xmm0, 32(%rax)
8177 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8178 ; SSE-NEXT: movaps %xmm0, 16(%rax)
8179 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8180 ; SSE-NEXT: movaps %xmm0, 656(%rax)
8181 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8182 ; SSE-NEXT: movaps %xmm0, 544(%rax)
8183 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8184 ; SSE-NEXT: movaps %xmm0, 432(%rax)
8185 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8186 ; SSE-NEXT: movaps %xmm0, 320(%rax)
8187 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8188 ; SSE-NEXT: movaps %xmm0, 208(%rax)
8189 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8190 ; SSE-NEXT: movaps %xmm0, 96(%rax)
8191 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8192 ; SSE-NEXT: movaps %xmm0, 880(%rax)
8193 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8194 ; SSE-NEXT: movaps %xmm0, 816(%rax)
8195 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8196 ; SSE-NEXT: movaps %xmm0, 768(%rax)
8197 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8198 ; SSE-NEXT: movaps %xmm0, 848(%rax)
8199 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8200 ; SSE-NEXT: movaps %xmm0, 832(%rax)
8201 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8202 ; SSE-NEXT: movaps %xmm0, 800(%rax)
8203 ; SSE-NEXT: addq $1640, %rsp # imm = 0x668
8206 ; AVX1-ONLY-LABEL: store_i16_stride7_vf64:
8207 ; AVX1-ONLY: # %bb.0:
8208 ; AVX1-ONLY-NEXT: subq $1496, %rsp # imm = 0x5D8
8209 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
8210 ; AVX1-ONLY-NEXT: vmovdqa 112(%rsi), %xmm1
8211 ; AVX1-ONLY-NEXT: vmovdqa 112(%rdi), %xmm4
8212 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm4[4],xmm1[4],xmm4[5],xmm1[5],xmm4[6],xmm1[6],xmm4[7],xmm1[7]
8213 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm5[0,1,0,1]
8214 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm2 = zero,zero,xmm5[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
8215 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
8216 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm14 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535]
8217 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm14, %ymm0
8218 ; AVX1-ONLY-NEXT: vmovdqa 112(%rdx), %xmm6
8219 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm6[2,2,2,2]
8220 ; AVX1-ONLY-NEXT: vmovdqa 112(%rcx), %xmm8
8221 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm8[3,3,3,3,4,5,6,7]
8222 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,4,4]
8223 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1,2,3,4,5],xmm2[6],xmm3[7]
8224 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm6[4],xmm8[4],xmm6[5],xmm8[5],xmm6[6],xmm8[6],xmm6[7],xmm8[7]
8225 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm3 = zero,zero,zero,zero,zero,zero,xmm9[0,1,2,3,4,5,6,7,8,9]
8226 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
8227 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm14, %ymm2
8228 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm2, %ymm7
8229 ; AVX1-ONLY-NEXT: vmovdqa 112(%r8), %xmm0
8230 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[1,1,1,1]
8231 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm7[0],xmm2[1],xmm7[2,3,4,5,6,7]
8232 ; AVX1-ONLY-NEXT: vmovdqa 112(%r9), %xmm2
8233 ; AVX1-ONLY-NEXT: vmovdqa 112(%rax), %xmm3
8234 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm7, %xmm7
8235 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm0[2,2,2,2]
8236 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm11[0],xmm7[1,2,3,4,5,6],xmm11[7]
8237 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm11 = xmm2[6,7,8,9,10,11,12,13,14,15],zero,zero,zero,zero,zero,zero
8238 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0],xmm11[1],xmm7[2,3,4,5,6,7]
8239 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm3[2,2,3,3]
8240 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1],xmm11[2],xmm7[3,4,5,6,7]
8241 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8242 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm8, %xmm7
8243 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm11 = xmm6[0],xmm7[0],xmm6[1],xmm7[1]
8244 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm6[0],xmm8[0],xmm6[1],xmm8[1],xmm6[2],xmm8[2],xmm6[3],xmm8[3]
8245 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm12 = xmm7[0,1,2,3,4,5,6,6]
8246 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[2,1,2,3]
8247 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm11, %ymm11
8248 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm1, %xmm12
8249 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm12 = xmm4[0],xmm12[0],xmm4[1],xmm12[1]
8250 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
8251 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm13 = xmm13[2,2,2,2,4,5,6,7]
8252 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm13 = xmm13[0,1,2,3,4,5,5,4]
8253 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm13, %ymm12
8254 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm13 = [65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0]
8255 ; AVX1-ONLY-NEXT: vandnps %ymm11, %ymm13, %ymm11
8256 ; AVX1-ONLY-NEXT: vandps %ymm13, %ymm12, %ymm12
8257 ; AVX1-ONLY-NEXT: vmovaps %ymm13, %ymm15
8258 ; AVX1-ONLY-NEXT: vorps %ymm11, %ymm12, %ymm11
8259 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm0[0,0,1,1]
8260 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm11[0,1,2],xmm12[3],xmm11[4,5,6,7]
8261 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm13 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
8262 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0,1,2,3],xmm13[4,5],xmm12[6,7]
8263 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm3[0,1,0,1]
8264 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0,1,2,3,4],xmm13[5],xmm12[6,7]
8265 ; AVX1-ONLY-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8266 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm11, %xmm11
8267 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1],xmm0[2,3],xmm11[4,5,6,7]
8268 ; AVX1-ONLY-NEXT: vpslld $16, %xmm2, %xmm12
8269 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1,2],xmm12[3],xmm11[4,5,6,7]
8270 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm12 = xmm3[0],zero,xmm3[1],zero
8271 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1,2,3],xmm12[4],xmm11[5,6,7]
8272 ; AVX1-ONLY-NEXT: vmovdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8273 ; AVX1-ONLY-NEXT: vpsrlq $48, %xmm8, %xmm8
8274 ; AVX1-ONLY-NEXT: vpunpckhqdq {{.*#+}} xmm8 = xmm6[1],xmm8[1]
8275 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm2, %xmm11
8276 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm10[0,1],xmm11[2,3],xmm10[4,5,6,7]
8277 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm9, %ymm8
8278 ; AVX1-ONLY-NEXT: vandnps %ymm8, %ymm14, %ymm8
8279 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm5[2,2,3,3]
8280 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm5 = xmm5[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
8281 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm9, %ymm5
8282 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm14, %ymm5
8283 ; AVX1-ONLY-NEXT: vorps %ymm5, %ymm8, %ymm5
8284 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm8 = xmm5[0,1,2,3,4,5],xmm0[6,7]
8285 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm9 = xmm2[0,1,2,3,5,6,6,7]
8286 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,2,2]
8287 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm8 = xmm9[0],xmm8[1,2,3,4,5,6],xmm9[7]
8288 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm3[2,3,2,3]
8289 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0],xmm9[1],xmm8[2,3,4,5,6,7]
8290 ; AVX1-ONLY-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8291 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm5, %xmm5
8292 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm0[2,2,3,3]
8293 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2,3,4],xmm8[5],xmm5[6,7]
8294 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5],xmm11[6,7]
8295 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm3[3,3,3,3]
8296 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm8[0],xmm5[1,2,3,4,5,6],xmm8[7]
8297 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8298 ; AVX1-ONLY-NEXT: vmovdqa 96(%rdx), %xmm8
8299 ; AVX1-ONLY-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8300 ; AVX1-ONLY-NEXT: vmovdqa 96(%rcx), %xmm5
8301 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8302 ; AVX1-ONLY-NEXT: vpsrlq $48, %xmm5, %xmm5
8303 ; AVX1-ONLY-NEXT: vpunpckhqdq {{.*#+}} xmm5 = xmm8[1],xmm5[1]
8304 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[0,0,1,1]
8305 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm5, %ymm5
8306 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
8307 ; AVX1-ONLY-NEXT: vmovdqa 96(%rsi), %xmm4
8308 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8309 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,2,2,4,5,6,7]
8310 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[0,1,2,1]
8311 ; AVX1-ONLY-NEXT: vmovdqa 96(%rdi), %xmm7
8312 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8313 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm7[4],xmm4[4],xmm7[5],xmm4[5],xmm7[6],xmm4[6],xmm7[7],xmm4[7]
8314 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8315 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm4 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
8316 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm4, %ymm1
8317 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm7 = [65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
8318 ; AVX1-ONLY-NEXT: vandnps %ymm5, %ymm7, %ymm4
8319 ; AVX1-ONLY-NEXT: vandps %ymm7, %ymm1, %ymm1
8320 ; AVX1-ONLY-NEXT: vorps %ymm4, %ymm1, %ymm1
8321 ; AVX1-ONLY-NEXT: vmovdqa 96(%r8), %xmm4
8322 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8323 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
8324 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm1[0,1,2,3,4],xmm4[5],xmm1[6,7]
8325 ; AVX1-ONLY-NEXT: vmovdqa 96(%r9), %xmm5
8326 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8327 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm5, %xmm5
8328 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5],xmm5[6,7]
8329 ; AVX1-ONLY-NEXT: vmovdqa 96(%rax), %xmm5
8330 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8331 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[3,3,3,3]
8332 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0],xmm4[1,2,3,4,5,6],xmm5[7]
8333 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8334 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm1
8335 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,1,0,1]
8336 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1,2,3],xmm0[4,5],xmm1[6,7]
8337 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,xmm2[0,1,2,3,4,5]
8338 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4],xmm1[5],xmm0[6,7]
8339 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm6[0,1,2],xmm3[3],xmm6[4,5,6,7]
8340 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8341 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm3[0,0,0,0]
8342 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5],xmm1[6],xmm0[7]
8343 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8344 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm2
8345 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm3
8346 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm3, %xmm0
8347 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
8348 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
8349 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, %xmm9
8350 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8351 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, %xmm10
8352 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8353 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,0,1,1]
8354 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm3
8355 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm1
8356 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm2
8357 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
8358 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,1,2,2,4,5,6,7]
8359 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[0,1,2,1]
8360 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
8361 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[2,2,2,2,4,5,6,7]
8362 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,5,4]
8363 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm4, %ymm4
8364 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm11 = [65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
8365 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm11, %ymm3
8366 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm11, %ymm4
8367 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm4, %ymm5
8368 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm4
8369 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8370 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm3
8371 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8372 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
8373 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm6 = zero,zero,xmm3[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
8374 ; AVX1-ONLY-NEXT: vmovdqa (%rax), %xmm4
8375 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm4[0,1,0,1]
8376 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1,2,3,4],xmm7[5],xmm6[6,7]
8377 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm3[0,1,0,1]
8378 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm4[0,0,0,0]
8379 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,5],xmm8[6,7]
8380 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm7, %ymm6
8381 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm7 = [65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535]
8382 ; AVX1-ONLY-NEXT: vandps %ymm7, %ymm5, %ymm5
8383 ; AVX1-ONLY-NEXT: vandnps %ymm6, %ymm7, %ymm6
8384 ; AVX1-ONLY-NEXT: vorps %ymm6, %ymm5, %ymm5
8385 ; AVX1-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8386 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm1, %xmm5
8387 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm5 = xmm2[0],xmm5[0],xmm2[1],xmm5[1]
8388 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
8389 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8390 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[0,1,0,1]
8391 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm5, %ymm1
8392 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm9[3,3,3,3,4,5,6,7]
8393 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,4,4,4]
8394 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm10[2,2,2,2]
8395 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5],xmm5[6],xmm2[7]
8396 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5,6,6]
8397 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
8398 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
8399 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm12 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535]
8400 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm12, %ymm1
8401 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm12, %ymm0
8402 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm0, %ymm0
8403 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm1 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
8404 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8405 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm4[3],xmm1[4,5,6,7]
8406 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm3[0,2],xmm4[1,3]
8407 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
8408 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm4 = [65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535]
8409 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm0, %ymm0
8410 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm4, %ymm1
8411 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm0, %ymm0
8412 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8413 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdx), %xmm5
8414 ; AVX1-ONLY-NEXT: vmovdqa 16(%rcx), %xmm1
8415 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm1, %xmm0
8416 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm5[0],xmm0[0],xmm5[1],xmm0[1]
8417 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
8418 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8419 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, %xmm6
8420 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm2[0,1,2,3,4,5,6,6]
8421 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,1,2,3]
8422 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
8423 ; AVX1-ONLY-NEXT: vmovdqa 16(%rsi), %xmm3
8424 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm7
8425 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm3, %xmm1
8426 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm7[0],xmm1[0],xmm7[1],xmm1[1]
8427 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm7[0],xmm3[1],xmm7[1],xmm3[2],xmm7[2],xmm3[3],xmm7[3]
8428 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, %xmm9
8429 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8430 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, %xmm10
8431 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8432 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[2,2,2,2,4,5,6,7]
8433 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,5,4]
8434 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
8435 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm15, %ymm0
8436 ; AVX1-ONLY-NEXT: vandps %ymm1, %ymm15, %ymm1
8437 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm1, %ymm0
8438 ; AVX1-ONLY-NEXT: vmovdqa 16(%r9), %xmm1
8439 ; AVX1-ONLY-NEXT: vmovdqa 16(%r8), %xmm2
8440 ; AVX1-ONLY-NEXT: vmovdqa 16(%rax), %xmm3
8441 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
8442 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, %xmm7
8443 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8444 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, %xmm8
8445 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8446 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm1 = zero,zero,xmm13[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
8447 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[0,1,0,1]
8448 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4],xmm2[5],xmm1[6,7]
8449 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm2 = xmm13[0,2],xmm3[1,3]
8450 ; AVX1-ONLY-NEXT: vmovaps %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8451 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
8452 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm14 = [65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535]
8453 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm14, %ymm0
8454 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm14, %ymm1
8455 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm0, %ymm0
8456 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8457 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm9[4],xmm10[4],xmm9[5],xmm10[5],xmm9[6],xmm10[6],xmm9[7],xmm10[7]
8458 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8459 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm1[0,1,0,1]
8460 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm1 = zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
8461 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
8462 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8463 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm6[3,3,3,3,4,5,6,7]
8464 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
8465 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8466 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm5[2,2,2,2]
8467 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5],xmm2[6],xmm1[7]
8468 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm5[4],xmm6[4],xmm5[5],xmm6[5],xmm5[6],xmm6[6],xmm5[7],xmm6[7]
8469 ; AVX1-ONLY-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8470 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm2 = zero,zero,zero,zero,zero,zero,xmm2[0,1,2,3,4,5,6,7,8,9]
8471 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
8472 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm2 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535]
8473 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm2, %ymm0
8474 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm1, %ymm1
8475 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm1, %ymm0
8476 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
8477 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,2,2,4,5,6,7]
8478 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[0,1,2,1]
8479 ; AVX1-ONLY-NEXT: vmovaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8480 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[2,2,3,3]
8481 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1],xmm2[2,3],xmm1[4,5,6,7]
8482 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm2 = xmm13[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
8483 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm3[3],xmm2[4,5,6,7]
8484 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
8485 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm2 = [65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0]
8486 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm0, %ymm0
8487 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm2, %ymm1
8488 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm0, %ymm0
8489 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8490 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdx), %xmm2
8491 ; AVX1-ONLY-NEXT: vmovdqa 32(%rcx), %xmm5
8492 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm5, %xmm0
8493 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
8494 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
8495 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, %xmm13
8496 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8497 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, %xmm15
8498 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8499 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,0,1,1]
8500 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
8501 ; AVX1-ONLY-NEXT: vmovdqa 32(%rsi), %xmm2
8502 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdi), %xmm7
8503 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm7[0],xmm2[0],xmm7[1],xmm2[1],xmm7[2],xmm2[2],xmm7[3],xmm2[3]
8504 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,1,2,2,4,5,6,7]
8505 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,1,2,1]
8506 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm2[0],xmm7[0],xmm2[1],xmm7[1],xmm2[2],xmm7[2],xmm2[3],xmm7[3]
8507 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm8 = xmm8[2,2,2,2,4,5,6,7]
8508 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,5,4]
8509 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm5, %ymm5
8510 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm11, %ymm1
8511 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm11, %ymm5
8512 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm5, %ymm8
8513 ; AVX1-ONLY-NEXT: vmovdqa 32(%r9), %xmm6
8514 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8515 ; AVX1-ONLY-NEXT: vmovdqa 32(%r8), %xmm1
8516 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8517 ; AVX1-ONLY-NEXT: vmovdqa 32(%rax), %xmm5
8518 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm6[0],xmm1[1],xmm6[1],xmm1[2],xmm6[2],xmm1[3],xmm6[3]
8519 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm9 = zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
8520 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm5[0,1,0,1]
8521 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm9 = xmm9[0,1,2,3,4],xmm10[5],xmm9[6,7]
8522 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm1[0,1,0,1]
8523 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm5[0,0,0,0]
8524 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0,1,2,3,4,5],xmm11[6,7]
8525 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm10, %ymm9
8526 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm3 = [65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535]
8527 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm8, %ymm8
8528 ; AVX1-ONLY-NEXT: vandnps %ymm9, %ymm3, %ymm9
8529 ; AVX1-ONLY-NEXT: vorps %ymm9, %ymm8, %ymm6
8530 ; AVX1-ONLY-NEXT: vmovups %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8531 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm2, %xmm8
8532 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm8 = xmm7[0],xmm8[0],xmm7[1],xmm8[1]
8533 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm7[4],xmm2[4],xmm7[5],xmm2[5],xmm7[6],xmm2[6],xmm7[7],xmm2[7]
8534 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8535 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,0,1]
8536 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm8, %ymm2
8537 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm7 = xmm13[3,3,3,3,4,5,6,7]
8538 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,4,4,4]
8539 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm15[2,2,2,2]
8540 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,5],xmm8[6],xmm7[7]
8541 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5,6,6]
8542 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
8543 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm0
8544 ; AVX1-ONLY-NEXT: vandnps %ymm2, %ymm12, %ymm2
8545 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm12, %ymm0
8546 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm0, %ymm0
8547 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm2 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
8548 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8549 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm5[3],xmm2[4,5,6,7]
8550 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm1 = xmm1[0,2],xmm5[1,3]
8551 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
8552 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm0, %ymm0
8553 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm4, %ymm1
8554 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm0, %ymm0
8555 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8556 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdx), %xmm6
8557 ; AVX1-ONLY-NEXT: vmovdqa 48(%rcx), %xmm1
8558 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm1, %xmm0
8559 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm6[0],xmm0[0],xmm6[1],xmm0[1]
8560 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm1[0],xmm6[1],xmm1[1],xmm6[2],xmm1[2],xmm6[3],xmm1[3]
8561 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8562 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, %xmm8
8563 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm2[0,1,2,3,4,5,6,6]
8564 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,1,2,3]
8565 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
8566 ; AVX1-ONLY-NEXT: vmovdqa 48(%rsi), %xmm7
8567 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdi), %xmm9
8568 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm7, %xmm1
8569 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm9[0],xmm1[0],xmm9[1],xmm1[1]
8570 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm7[0],xmm9[0],xmm7[1],xmm9[1],xmm7[2],xmm9[2],xmm7[3],xmm9[3]
8571 ; AVX1-ONLY-NEXT: vmovdqa %xmm9, %xmm13
8572 ; AVX1-ONLY-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8573 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, %xmm3
8574 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8575 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[2,2,2,2,4,5,6,7]
8576 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,5,4]
8577 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
8578 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm4 = [65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0]
8579 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm4, %ymm0
8580 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm1, %ymm1
8581 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm1, %ymm0
8582 ; AVX1-ONLY-NEXT: vmovdqa 48(%r9), %xmm1
8583 ; AVX1-ONLY-NEXT: vmovdqa 48(%r8), %xmm2
8584 ; AVX1-ONLY-NEXT: vmovdqa 48(%rax), %xmm11
8585 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
8586 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, %xmm9
8587 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8588 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, %xmm10
8589 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8590 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, %xmm2
8591 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm1 = zero,zero,xmm7[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
8592 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm11[0,1,0,1]
8593 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4],xmm7[5],xmm1[6,7]
8594 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm2[0,2],xmm11[1,3]
8595 ; AVX1-ONLY-NEXT: vmovaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8596 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm1, %ymm1
8597 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm14, %ymm0
8598 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm14, %ymm1
8599 ; AVX1-ONLY-NEXT: vmovaps %ymm14, %ymm5
8600 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm0, %ymm0
8601 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8602 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm13[4],xmm3[4],xmm13[5],xmm3[5],xmm13[6],xmm3[6],xmm13[7],xmm3[7]
8603 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8604 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm1[0,1,0,1]
8605 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm1 = zero,zero,xmm1[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
8606 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
8607 ; AVX1-ONLY-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8608 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm8[3,3,3,3,4,5,6,7]
8609 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
8610 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8611 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm6[2,2,2,2]
8612 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5],xmm7[6],xmm1[7]
8613 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm6[4],xmm8[4],xmm6[5],xmm8[5],xmm6[6],xmm8[6],xmm6[7],xmm8[7]
8614 ; AVX1-ONLY-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8615 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm7 = zero,zero,zero,zero,zero,zero,xmm3[0,1,2,3,4,5,6,7,8,9]
8616 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm1, %ymm1
8617 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm3 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535]
8618 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm3, %ymm0
8619 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm1, %ymm1
8620 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm1, %ymm0
8621 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm9[4],xmm10[4],xmm9[5],xmm10[5],xmm9[6],xmm10[6],xmm9[7],xmm10[7]
8622 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,2,2,4,5,6,7]
8623 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[0,1,2,1]
8624 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm11[2,2,3,3]
8625 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1],xmm7[2,3],xmm1[4,5,6,7]
8626 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm7 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
8627 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2],xmm11[3],xmm7[4,5,6,7]
8628 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm7, %ymm1
8629 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm2 = [65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0]
8630 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm0, %ymm0
8631 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm2, %ymm1
8632 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm0, %ymm0
8633 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8634 ; AVX1-ONLY-NEXT: vmovdqa 64(%rdx), %xmm2
8635 ; AVX1-ONLY-NEXT: vmovdqa 64(%rcx), %xmm7
8636 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm7, %xmm0
8637 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
8638 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm2[0],xmm7[0],xmm2[1],xmm7[1],xmm2[2],xmm7[2],xmm2[3],xmm7[3]
8639 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, %xmm0
8640 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8641 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8642 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm3[0,0,1,1]
8643 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm7, %ymm1
8644 ; AVX1-ONLY-NEXT: vmovdqa 64(%rsi), %xmm8
8645 ; AVX1-ONLY-NEXT: vmovdqa 64(%rdi), %xmm9
8646 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
8647 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm7 = xmm7[0,1,2,2,4,5,6,7]
8648 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[0,1,2,1]
8649 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
8650 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm10 = xmm10[2,2,2,2,4,5,6,7]
8651 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm10 = xmm10[0,1,2,3,4,5,5,4]
8652 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm7, %ymm7
8653 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm6 = [65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
8654 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm6, %ymm1
8655 ; AVX1-ONLY-NEXT: vandps %ymm6, %ymm7, %ymm7
8656 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm7, %ymm10
8657 ; AVX1-ONLY-NEXT: vmovdqa 64(%r9), %xmm1
8658 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8659 ; AVX1-ONLY-NEXT: vmovdqa 64(%r8), %xmm6
8660 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8661 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm6[0],xmm1[0],xmm6[1],xmm1[1],xmm6[2],xmm1[2],xmm6[3],xmm1[3]
8662 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm13 = zero,zero,xmm7[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
8663 ; AVX1-ONLY-NEXT: vmovdqa 64(%rax), %xmm6
8664 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm6[0,1,0,1]
8665 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0,1,2,3,4],xmm14[5],xmm13[6,7]
8666 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm7[0,1,0,1]
8667 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm6[0,0,0,0]
8668 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm14 = xmm14[0,1,2,3,4,5],xmm15[6,7]
8669 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm14, %ymm13
8670 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm1 = [65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535]
8671 ; AVX1-ONLY-NEXT: vandps %ymm1, %ymm10, %ymm10
8672 ; AVX1-ONLY-NEXT: vandnps %ymm13, %ymm1, %ymm13
8673 ; AVX1-ONLY-NEXT: vorps %ymm13, %ymm10, %ymm1
8674 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8675 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm8, %xmm10
8676 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm10 = xmm9[0],xmm10[0],xmm9[1],xmm10[1]
8677 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
8678 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8679 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm1[0,1,0,1]
8680 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm10, %ymm8
8681 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm9 = xmm0[3,3,3,3,4,5,6,7]
8682 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,4,4,4]
8683 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm2[2,2,2,2]
8684 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,5],xmm10[6],xmm9[7]
8685 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm0 = xmm3[0,1,2,3,4,5,6,6]
8686 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
8687 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm0, %ymm0
8688 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm1 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535]
8689 ; AVX1-ONLY-NEXT: vandnps %ymm8, %ymm1, %ymm8
8690 ; AVX1-ONLY-NEXT: vandps %ymm1, %ymm0, %ymm0
8691 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm8, %ymm0
8692 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm8 = xmm7[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
8693 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8694 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1,2],xmm6[3],xmm8[4,5,6,7]
8695 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm7[0,2],xmm6[1,3]
8696 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm7, %ymm7
8697 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm1 = [65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535]
8698 ; AVX1-ONLY-NEXT: vandps %ymm1, %ymm0, %ymm0
8699 ; AVX1-ONLY-NEXT: vandnps %ymm7, %ymm1, %ymm7
8700 ; AVX1-ONLY-NEXT: vorps %ymm7, %ymm0, %ymm0
8701 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8702 ; AVX1-ONLY-NEXT: vmovdqa 80(%rdx), %xmm1
8703 ; AVX1-ONLY-NEXT: vmovdqa 80(%rcx), %xmm9
8704 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm9, %xmm0
8705 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
8706 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm1[0],xmm9[0],xmm1[1],xmm9[1],xmm1[2],xmm9[2],xmm1[3],xmm9[3]
8707 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8708 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm7 = xmm2[0,1,2,3,4,5,6,6]
8709 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,1,2,3]
8710 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm0, %ymm0
8711 ; AVX1-ONLY-NEXT: vmovdqa 80(%rdi), %xmm2
8712 ; AVX1-ONLY-NEXT: vmovdqa 80(%rsi), %xmm3
8713 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm3, %xmm7
8714 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm7 = xmm2[0],xmm7[0],xmm2[1],xmm7[1]
8715 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
8716 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, %xmm12
8717 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8718 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, %xmm8
8719 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8720 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm10 = xmm10[2,2,2,2,4,5,6,7]
8721 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm10 = xmm10[0,1,2,3,4,5,5,4]
8722 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm10, %ymm7
8723 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm4, %ymm0
8724 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm7, %ymm7
8725 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm7, %ymm7
8726 ; AVX1-ONLY-NEXT: vmovdqa 80(%r9), %xmm0
8727 ; AVX1-ONLY-NEXT: vmovdqa 80(%r8), %xmm2
8728 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
8729 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, %xmm3
8730 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8731 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, %xmm4
8732 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8733 ; AVX1-ONLY-NEXT: vmovdqa 80(%rax), %xmm2
8734 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, %xmm0
8735 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm15 = zero,zero,xmm6[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
8736 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm2[0,1,0,1]
8737 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm15[0,1,2,3,4],xmm6[5],xmm15[6,7]
8738 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm15 = xmm0[0,2],xmm2[1,3]
8739 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, %xmm10
8740 ; AVX1-ONLY-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8741 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm6, %ymm6
8742 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm7, %ymm7
8743 ; AVX1-ONLY-NEXT: vandnps %ymm6, %ymm5, %ymm6
8744 ; AVX1-ONLY-NEXT: vorps %ymm6, %ymm7, %ymm0
8745 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8746 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm8[4],xmm12[4],xmm8[5],xmm12[5],xmm8[6],xmm12[6],xmm8[7],xmm12[7]
8747 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8748 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm0[0,1,0,1]
8749 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm7 = zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
8750 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm6, %ymm6
8751 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm7 = xmm9[3,3,3,3,4,5,6,7]
8752 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,4,4,4]
8753 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8754 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm1[2,2,2,2]
8755 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,5],xmm15[6],xmm7[7]
8756 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm9[4],xmm1[5],xmm9[5],xmm1[6],xmm9[6],xmm1[7],xmm9[7]
8757 ; AVX1-ONLY-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8758 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm15 = zero,zero,zero,zero,zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9]
8759 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm7, %ymm7
8760 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm0 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535]
8761 ; AVX1-ONLY-NEXT: vandnps %ymm6, %ymm0, %ymm6
8762 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm7, %ymm7
8763 ; AVX1-ONLY-NEXT: vorps %ymm6, %ymm7, %ymm6
8764 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
8765 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm7 = xmm7[0,1,2,2,4,5,6,7]
8766 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[0,1,2,1]
8767 ; AVX1-ONLY-NEXT: vmovaps %xmm2, (%rsp) # 16-byte Spill
8768 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm2[2,2,3,3]
8769 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1],xmm15[2,3],xmm7[4,5,6,7]
8770 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm15 = xmm10[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
8771 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm15 = xmm15[0,1,2],xmm2[3],xmm15[4,5,6,7]
8772 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm15, %ymm7
8773 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm0 = [65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0]
8774 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm6, %ymm6
8775 ; AVX1-ONLY-NEXT: vandnps %ymm7, %ymm0, %ymm7
8776 ; AVX1-ONLY-NEXT: vorps %ymm7, %ymm6, %ymm0
8777 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8778 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
8779 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm13, %xmm6
8780 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
8781 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm7 = xmm2[0],xmm6[0],xmm2[1],xmm6[1]
8782 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm2[0],xmm13[0],xmm2[1],xmm13[1],xmm2[2],xmm13[2],xmm2[3],xmm13[3]
8783 ; AVX1-ONLY-NEXT: vmovdqa %xmm13, %xmm8
8784 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm6[0,0,1,1]
8785 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm15, %ymm7
8786 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8787 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8788 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
8789 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm15 = xmm15[0,1,2,2,4,5,6,7]
8790 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[0,1,2,1]
8791 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
8792 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, %xmm1
8793 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm14 = xmm14[2,2,2,2,4,5,6,7]
8794 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,4,5,5,4]
8795 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm14, %ymm15, %ymm14
8796 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm0 = [65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
8797 ; AVX1-ONLY-NEXT: vandnps %ymm7, %ymm0, %ymm7
8798 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm14, %ymm14
8799 ; AVX1-ONLY-NEXT: vorps %ymm7, %ymm14, %ymm14
8800 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
8801 ; AVX1-ONLY-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm7, %xmm7 # 16-byte Folded Reload
8802 ; AVX1-ONLY-NEXT: # xmm7 = xmm7[0],mem[0],xmm7[1],mem[1],xmm7[2],mem[2],xmm7[3],mem[3]
8803 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm15 = zero,zero,xmm7[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
8804 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8805 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm0[0,1,0,1]
8806 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm15[0,1,2,3,4],xmm13[5],xmm15[6,7]
8807 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm7[0,1,0,1]
8808 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm0[0,0,0,0]
8809 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm15[0,1,2,3,4,5],xmm12[6,7]
8810 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm12, %ymm12
8811 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm3 = [65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535]
8812 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm14, %ymm13
8813 ; AVX1-ONLY-NEXT: vandnps %ymm12, %ymm3, %ymm12
8814 ; AVX1-ONLY-NEXT: vorps %ymm12, %ymm13, %ymm3
8815 ; AVX1-ONLY-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8816 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm1, %xmm12
8817 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm12 = xmm4[0],xmm12[0],xmm4[1],xmm12[1]
8818 ; AVX1-ONLY-NEXT: vpermilps $68, {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Folded Reload
8819 ; AVX1-ONLY-NEXT: # xmm13 = mem[0,1,0,1]
8820 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm12, %ymm12
8821 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm13 = xmm8[3,3,3,3,4,5,6,7]
8822 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm13 = xmm13[0,1,2,3,4,4,4,4]
8823 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm2[2,2,2,2]
8824 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0,1,2,3,4,5],xmm14[6],xmm13[7]
8825 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,5,6,6]
8826 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,1,2,3]
8827 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm6, %ymm6
8828 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm10 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535]
8829 ; AVX1-ONLY-NEXT: vandnps %ymm12, %ymm10, %ymm12
8830 ; AVX1-ONLY-NEXT: vandps %ymm6, %ymm10, %ymm6
8831 ; AVX1-ONLY-NEXT: vorps %ymm6, %ymm12, %ymm6
8832 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm12 = xmm7[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
8833 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0,1,2],xmm0[3],xmm12[4,5,6,7]
8834 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm7 = xmm7[0,2],xmm0[1,3]
8835 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm7, %ymm7
8836 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm0 = [65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535]
8837 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm6, %ymm6
8838 ; AVX1-ONLY-NEXT: vandnps %ymm7, %ymm0, %ymm7
8839 ; AVX1-ONLY-NEXT: vorps %ymm7, %ymm6, %ymm0
8840 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8841 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8842 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
8843 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm14[4],xmm15[4],xmm14[5],xmm15[5],xmm14[6],xmm15[6],xmm14[7],xmm15[7]
8844 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm7 = zero,zero,zero,zero,zero,zero,xmm6[0,1,2,3,4,5,6,7,8,9]
8845 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm7, %ymm6
8846 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8847 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm7 = zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
8848 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm0[2,2,3,3]
8849 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, %xmm2
8850 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm7, %ymm7
8851 ; AVX1-ONLY-NEXT: vandnps %ymm6, %ymm10, %ymm6
8852 ; AVX1-ONLY-NEXT: vandps %ymm7, %ymm10, %ymm7
8853 ; AVX1-ONLY-NEXT: vorps %ymm6, %ymm7, %ymm6
8854 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
8855 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
8856 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm13[4],xmm1[4],xmm13[5],xmm1[5],xmm13[6],xmm1[6],xmm13[7],xmm1[7]
8857 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm7 = xmm7[2,2,2,2,4,5,6,7]
8858 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,5,5,4]
8859 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
8860 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm3[2,3,2,3]
8861 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0],xmm12[1],xmm7[2,3,4,5,6,7]
8862 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm12 = xmm1[4],xmm13[4],xmm1[5],xmm13[5],xmm1[6],xmm13[6],xmm1[7],xmm13[7]
8863 ; AVX1-ONLY-NEXT: vmovdqa %xmm13, %xmm0
8864 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm12 = xmm12[0,1,2,2,4,5,6,7]
8865 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[0,1,2,1]
8866 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm3[2,2,3,3]
8867 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0,1],xmm13[2,3],xmm12[4,5,6,7]
8868 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm12, %ymm12
8869 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm5 = [65535,65535,65535,0,0,0,0,65535,65535,65535,0,0,0,0,65535,65535]
8870 ; AVX1-ONLY-NEXT: vandnps %ymm6, %ymm5, %ymm6
8871 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm12, %ymm12
8872 ; AVX1-ONLY-NEXT: vorps %ymm6, %ymm12, %ymm4
8873 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8874 ; AVX1-ONLY-NEXT: vpsrlq $48, %xmm15, %xmm6
8875 ; AVX1-ONLY-NEXT: vpunpckhqdq {{.*#+}} xmm6 = xmm14[1],xmm6[1]
8876 ; AVX1-ONLY-NEXT: vpermilps $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Folded Reload
8877 ; AVX1-ONLY-NEXT: # xmm12 = mem[0,0,1,1]
8878 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm6, %ymm6
8879 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
8880 ; AVX1-ONLY-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm7, %xmm12 # 16-byte Folded Reload
8881 ; AVX1-ONLY-NEXT: # xmm12 = xmm7[0],mem[0],xmm7[1],mem[1],xmm7[2],mem[2],xmm7[3],mem[3]
8882 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm13 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
8883 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm12 = xmm12[0,1,2,2,4,5,6,7]
8884 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[0,1,2,1]
8885 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm13, %ymm12
8886 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm2 = [65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
8887 ; AVX1-ONLY-NEXT: vandnps %ymm6, %ymm2, %ymm6
8888 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm12, %ymm12
8889 ; AVX1-ONLY-NEXT: vorps %ymm6, %ymm12, %ymm12
8890 ; AVX1-ONLY-NEXT: vpshufd $68, {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Folded Reload
8891 ; AVX1-ONLY-NEXT: # xmm6 = mem[0,1,0,1]
8892 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
8893 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm2[0,0,0,0]
8894 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm6[0,1,2,3,4,5],xmm13[6,7]
8895 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm0, %xmm6
8896 ; AVX1-ONLY-NEXT: vpunpckhdq {{.*#+}} xmm6 = xmm1[2],xmm6[2],xmm1[3],xmm6[3]
8897 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm6[4],xmm3[5],xmm6[5],xmm3[6],xmm6[6],xmm3[7],xmm6[7]
8898 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm0 = [8,9,8,9,8,9,8,9,12,13,6,7,10,11,12,13]
8899 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm4, %xmm4
8900 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, %xmm6
8901 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm4, %ymm13
8902 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm8 = [0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535]
8903 ; AVX1-ONLY-NEXT: vandps %ymm8, %ymm12, %ymm12
8904 ; AVX1-ONLY-NEXT: vandnps %ymm13, %ymm8, %ymm13
8905 ; AVX1-ONLY-NEXT: vorps %ymm13, %ymm12, %ymm0
8906 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8907 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8908 ; AVX1-ONLY-NEXT: vpsrlq $48, %xmm0, %xmm12
8909 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8910 ; AVX1-ONLY-NEXT: vpunpckhqdq {{.*#+}} xmm12 = xmm0[1],xmm12[1]
8911 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8912 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm0, %ymm12
8913 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8914 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm0[2,2,3,3]
8915 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm14 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
8916 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm14, %ymm13, %ymm13
8917 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm0 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535]
8918 ; AVX1-ONLY-NEXT: vandnps %ymm12, %ymm0, %ymm12
8919 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm13, %ymm13
8920 ; AVX1-ONLY-NEXT: vorps %ymm12, %ymm13, %ymm12
8921 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8922 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm0, %xmm13
8923 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
8924 ; AVX1-ONLY-NEXT: vpunpckhdq {{.*#+}} xmm13 = xmm1[2],xmm13[2],xmm1[3],xmm13[3]
8925 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm14 = xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
8926 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm13 = xmm2[4],xmm13[4],xmm2[5],xmm13[5],xmm2[6],xmm13[6],xmm2[7],xmm13[7]
8927 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm14 = xmm14[2,2,2,2,4,5,6,7]
8928 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,4,5,5,4]
8929 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[2,3,2,3]
8930 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm14[0],xmm3[1],xmm14[2,3,4,5,6,7]
8931 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm13, %xmm13
8932 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm3, %ymm13
8933 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm7 = [0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0]
8934 ; AVX1-ONLY-NEXT: vandps %ymm7, %ymm12, %ymm12
8935 ; AVX1-ONLY-NEXT: vandnps %ymm13, %ymm7, %ymm13
8936 ; AVX1-ONLY-NEXT: vorps %ymm13, %ymm12, %ymm0
8937 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8938 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
8939 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
8940 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm12 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
8941 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm13 = zero,zero,zero,zero,zero,zero,xmm12[0,1,2,3,4,5,6,7,8,9]
8942 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm13, %ymm12
8943 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8944 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm13 = zero,zero,xmm4[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
8945 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm4[2,2,3,3]
8946 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm14, %ymm13, %ymm13
8947 ; AVX1-ONLY-NEXT: vandnps %ymm12, %ymm10, %ymm12
8948 ; AVX1-ONLY-NEXT: vandps %ymm10, %ymm13, %ymm13
8949 ; AVX1-ONLY-NEXT: vorps %ymm12, %ymm13, %ymm12
8950 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8951 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
8952 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm13 = xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
8953 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm13 = xmm13[2,2,2,2,4,5,6,7]
8954 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm13 = xmm13[0,1,2,3,4,5,5,4]
8955 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8956 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm6[2,3,2,3]
8957 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0],xmm14[1],xmm13[2,3,4,5,6,7]
8958 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm14 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
8959 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm14 = xmm14[0,1,2,2,4,5,6,7]
8960 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm14[0,1,2,1]
8961 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm6[2,2,3,3]
8962 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm14 = xmm14[0,1],xmm15[2,3],xmm14[4,5,6,7]
8963 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm14, %ymm13
8964 ; AVX1-ONLY-NEXT: vandnps %ymm12, %ymm5, %ymm12
8965 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm13, %ymm13
8966 ; AVX1-ONLY-NEXT: vorps %ymm12, %ymm13, %ymm5
8967 ; AVX1-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8968 ; AVX1-ONLY-NEXT: vpsrlq $48, %xmm2, %xmm12
8969 ; AVX1-ONLY-NEXT: vpunpckhqdq {{.*#+}} xmm12 = xmm3[1],xmm12[1]
8970 ; AVX1-ONLY-NEXT: vpermilps $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Folded Reload
8971 ; AVX1-ONLY-NEXT: # xmm13 = mem[0,0,1,1]
8972 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm12, %ymm12
8973 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
8974 ; AVX1-ONLY-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm13 # 16-byte Folded Reload
8975 ; AVX1-ONLY-NEXT: # xmm13 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
8976 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm14 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
8977 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm13 = xmm13[0,1,2,2,4,5,6,7]
8978 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[0,1,2,1]
8979 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm14, %ymm13
8980 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm2 = [65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
8981 ; AVX1-ONLY-NEXT: vandnps %ymm12, %ymm2, %ymm12
8982 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm13, %ymm13
8983 ; AVX1-ONLY-NEXT: vorps %ymm12, %ymm13, %ymm12
8984 ; AVX1-ONLY-NEXT: vpermilps $68, {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Folded Reload
8985 ; AVX1-ONLY-NEXT: # xmm13 = mem[0,1,0,1]
8986 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} xmm14 = xmm11[0,0,0,0]
8987 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} xmm13 = xmm13[0,1,2],xmm14[3]
8988 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm0, %xmm14
8989 ; AVX1-ONLY-NEXT: vpunpckhdq {{.*#+}} xmm14 = xmm1[2],xmm14[2],xmm1[3],xmm14[3]
8990 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm6[4],xmm14[4],xmm6[5],xmm14[5],xmm6[6],xmm14[6],xmm6[7],xmm14[7]
8991 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm3 = [8,9,8,9,8,9,8,9,12,13,6,7,10,11,12,13]
8992 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm5, %xmm5
8993 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm5, %ymm5
8994 ; AVX1-ONLY-NEXT: vandps %ymm8, %ymm12, %ymm12
8995 ; AVX1-ONLY-NEXT: vandnps %ymm5, %ymm8, %ymm5
8996 ; AVX1-ONLY-NEXT: vorps %ymm5, %ymm12, %ymm0
8997 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8998 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8999 ; AVX1-ONLY-NEXT: vpsrlq $48, %xmm0, %xmm5
9000 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9001 ; AVX1-ONLY-NEXT: vpunpckhqdq {{.*#+}} xmm5 = xmm0[1],xmm5[1]
9002 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9003 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
9004 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9005 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm0[2,2,3,3]
9006 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm13 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
9007 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm12, %ymm12
9008 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm0 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535]
9009 ; AVX1-ONLY-NEXT: vandnps %ymm5, %ymm0, %ymm5
9010 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm12, %ymm12
9011 ; AVX1-ONLY-NEXT: vorps %ymm5, %ymm12, %ymm5
9012 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
9013 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm1, %xmm12
9014 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9015 ; AVX1-ONLY-NEXT: vpunpckhdq {{.*#+}} xmm12 = xmm0[2],xmm12[2],xmm0[3],xmm12[3]
9016 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm13 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
9017 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm12 = xmm11[4],xmm12[4],xmm11[5],xmm12[5],xmm11[6],xmm12[6],xmm11[7],xmm12[7]
9018 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm13 = xmm13[2,2,2,2,4,5,6,7]
9019 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm13 = xmm13[0,1,2,3,4,5,5,4]
9020 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm11[2,3,2,3]
9021 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm13[0],xmm2[1],xmm13[2,3,4,5,6,7]
9022 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm12, %xmm12
9023 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, %xmm15
9024 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm2, %ymm2
9025 ; AVX1-ONLY-NEXT: vandps %ymm7, %ymm5, %ymm5
9026 ; AVX1-ONLY-NEXT: vandnps %ymm2, %ymm7, %ymm2
9027 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm5, %ymm0
9028 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9029 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
9030 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
9031 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
9032 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm12 = zero,zero,zero,zero,zero,zero,xmm2[0,1,2,3,4,5,6,7,8,9]
9033 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm12, %ymm2
9034 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9035 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm12 = zero,zero,xmm5[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
9036 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm5[2,2,3,3]
9037 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm12, %ymm12
9038 ; AVX1-ONLY-NEXT: vandnps %ymm2, %ymm10, %ymm2
9039 ; AVX1-ONLY-NEXT: vandps %ymm10, %ymm12, %ymm12
9040 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm12, %ymm2
9041 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9042 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9043 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm12 = xmm0[4],xmm6[4],xmm0[5],xmm6[5],xmm0[6],xmm6[6],xmm0[7],xmm6[7]
9044 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm12 = xmm12[2,2,2,2,4,5,6,7]
9045 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm12 = xmm12[0,1,2,3,4,5,5,4]
9046 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
9047 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm11[2,3,2,3]
9048 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0],xmm13[1],xmm12[2,3,4,5,6,7]
9049 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm13 = xmm6[4],xmm0[4],xmm6[5],xmm0[5],xmm6[6],xmm0[6],xmm6[7],xmm0[7]
9050 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, %xmm1
9051 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm13 = xmm13[0,1,2,2,4,5,6,7]
9052 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[0,1,2,1]
9053 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm11[2,2,3,3]
9054 ; AVX1-ONLY-NEXT: vmovdqa %xmm11, %xmm0
9055 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0,1],xmm14[2,3],xmm13[4,5,6,7]
9056 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm13, %ymm12
9057 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm14 = [65535,65535,65535,0,0,0,0,65535,65535,65535,0,0,0,0,65535,65535]
9058 ; AVX1-ONLY-NEXT: vandnps %ymm2, %ymm14, %ymm2
9059 ; AVX1-ONLY-NEXT: vandps %ymm14, %ymm12, %ymm12
9060 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm12, %ymm2
9061 ; AVX1-ONLY-NEXT: vpsrlq $48, %xmm4, %xmm11
9062 ; AVX1-ONLY-NEXT: vpunpckhqdq {{.*#+}} xmm11 = xmm3[1],xmm11[1]
9063 ; AVX1-ONLY-NEXT: vpermilps $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Folded Reload
9064 ; AVX1-ONLY-NEXT: # xmm12 = mem[0,0,1,1]
9065 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm11, %ymm11
9066 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
9067 ; AVX1-ONLY-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3, %xmm12 # 16-byte Folded Reload
9068 ; AVX1-ONLY-NEXT: # xmm12 = xmm3[0],mem[0],xmm3[1],mem[1],xmm3[2],mem[2],xmm3[3],mem[3]
9069 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm13 = xmm5[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
9070 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm12 = xmm12[0,1,2,2,4,5,6,7]
9071 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[0,1,2,1]
9072 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm13, %ymm12
9073 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm3 = [65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
9074 ; AVX1-ONLY-NEXT: vandnps %ymm11, %ymm3, %ymm11
9075 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm12, %ymm12
9076 ; AVX1-ONLY-NEXT: vorps %ymm11, %ymm12, %ymm11
9077 ; AVX1-ONLY-NEXT: vpshufd $68, {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Folded Reload
9078 ; AVX1-ONLY-NEXT: # xmm10 = mem[0,1,0,1]
9079 ; AVX1-ONLY-NEXT: vmovdqa (%rsp), %xmm5 # 16-byte Reload
9080 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm5[0,0,0,0]
9081 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0,1,2,3,4,5],xmm12[6,7]
9082 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm1, %xmm12
9083 ; AVX1-ONLY-NEXT: vpunpckhdq {{.*#+}} xmm12 = xmm6[2],xmm12[2],xmm6[3],xmm12[3]
9084 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm0[4],xmm12[4],xmm0[5],xmm12[5],xmm0[6],xmm12[6],xmm0[7],xmm12[7]
9085 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm1, %xmm1
9086 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm1, %ymm1
9087 ; AVX1-ONLY-NEXT: vandps %ymm8, %ymm11, %ymm10
9088 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm8, %ymm1
9089 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm10, %ymm1
9090 ; AVX1-ONLY-NEXT: vpsrlq $48, %xmm9, %xmm4
9091 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9092 ; AVX1-ONLY-NEXT: vpunpckhqdq {{.*#+}} xmm4 = xmm0[1],xmm4[1]
9093 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9094 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm4
9095 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9096 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm0[2,2,3,3]
9097 ; AVX1-ONLY-NEXT: vpsrldq {{.*#+}} xmm9 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
9098 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm8, %ymm8
9099 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm0 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535]
9100 ; AVX1-ONLY-NEXT: vandnps %ymm4, %ymm0, %ymm4
9101 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm8, %ymm8
9102 ; AVX1-ONLY-NEXT: vorps %ymm4, %ymm8, %ymm4
9103 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
9104 ; AVX1-ONLY-NEXT: vpsrld $16, %xmm3, %xmm8
9105 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9106 ; AVX1-ONLY-NEXT: vpunpckhdq {{.*#+}} xmm8 = xmm0[2],xmm8[2],xmm0[3],xmm8[3]
9107 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm5[4],xmm8[4],xmm5[5],xmm8[5],xmm5[6],xmm8[6],xmm5[7],xmm8[7]
9108 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm8, %xmm6
9109 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
9110 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm8 = xmm8[2,2,2,2,4,5,6,7]
9111 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,5,4]
9112 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm5[2,3,2,3]
9113 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm8[0],xmm0[1],xmm8[2,3,4,5,6,7]
9114 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm0
9115 ; AVX1-ONLY-NEXT: vandps %ymm7, %ymm4, %ymm4
9116 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm7, %ymm0
9117 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm4, %ymm0
9118 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
9119 ; AVX1-ONLY-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3, %xmm3 # 16-byte Folded Reload
9120 ; AVX1-ONLY-NEXT: # xmm3 = xmm3[4],mem[4],xmm3[5],mem[5],xmm3[6],mem[6],xmm3[7],mem[7]
9121 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm4 = zero,zero,zero,zero,zero,zero,xmm3[0,1,2,3,4,5,6,7,8,9]
9122 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
9123 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9124 ; AVX1-ONLY-NEXT: vpslldq {{.*#+}} xmm4 = zero,zero,xmm6[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
9125 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
9126 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm4, %ymm4
9127 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm5 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535]
9128 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm5, %ymm3
9129 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm4, %ymm4
9130 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm4, %ymm3
9131 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
9132 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
9133 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
9134 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[2,2,2,2,4,5,6,7]
9135 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,5,4]
9136 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9137 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm5[2,3,2,3]
9138 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0],xmm6[1],xmm4[2,3,4,5,6,7]
9139 ; AVX1-ONLY-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm8[4],xmm9[4],xmm8[5],xmm9[5],xmm8[6],xmm9[6],xmm8[7],xmm9[7]
9140 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[0,1,2,2,4,5,6,7]
9141 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[0,1,2,1]
9142 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm5[2,2,3,3]
9143 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1],xmm8[2,3],xmm6[4,5,6,7]
9144 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm4
9145 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm14, %ymm3
9146 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm14, %ymm4
9147 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm4, %ymm3
9148 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
9149 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 736(%rax)
9150 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 640(%rax)
9151 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 544(%rax)
9152 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 512(%rax)
9153 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9154 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 416(%rax)
9155 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9156 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 320(%rax)
9157 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9158 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%rax)
9159 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9160 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 192(%rax)
9161 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9162 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 96(%rax)
9163 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9164 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
9165 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9166 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 704(%rax)
9167 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9168 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 672(%rax)
9169 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9170 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 608(%rax)
9171 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9172 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 576(%rax)
9173 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9174 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 480(%rax)
9175 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9176 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 448(%rax)
9177 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9178 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 384(%rax)
9179 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9180 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 352(%rax)
9181 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9182 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
9183 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9184 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 224(%rax)
9185 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9186 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
9187 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9188 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 128(%rax)
9189 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9190 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
9191 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9192 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rax)
9193 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9194 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 784(%rax)
9195 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9196 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 768(%rax)
9197 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9198 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 880(%rax)
9199 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9200 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 864(%rax)
9201 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9202 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 816(%rax)
9203 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9204 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 800(%rax)
9205 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9206 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 848(%rax)
9207 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9208 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 832(%rax)
9209 ; AVX1-ONLY-NEXT: addq $1496, %rsp # imm = 0x5D8
9210 ; AVX1-ONLY-NEXT: vzeroupper
9211 ; AVX1-ONLY-NEXT: retq
9213 ; AVX2-SLOW-LABEL: store_i16_stride7_vf64:
9214 ; AVX2-SLOW: # %bb.0:
9215 ; AVX2-SLOW-NEXT: subq $1688, %rsp # imm = 0x698
9216 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
9217 ; AVX2-SLOW-NEXT: vmovdqa 64(%r8), %ymm4
9218 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9219 ; AVX2-SLOW-NEXT: vmovdqa 32(%r8), %ymm3
9220 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9221 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %ymm0
9222 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9223 ; AVX2-SLOW-NEXT: vmovdqa 64(%r9), %ymm5
9224 ; AVX2-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9225 ; AVX2-SLOW-NEXT: vmovdqa 32(%r9), %ymm8
9226 ; AVX2-SLOW-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9227 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %ymm1
9228 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9229 ; AVX2-SLOW-NEXT: vmovdqa (%rax), %ymm9
9230 ; AVX2-SLOW-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9231 ; AVX2-SLOW-NEXT: vmovdqa 32(%rax), %ymm7
9232 ; AVX2-SLOW-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9233 ; AVX2-SLOW-NEXT: vmovdqa 64(%rax), %ymm6
9234 ; AVX2-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9235 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,3,u,u,u,4,u>
9236 ; AVX2-SLOW-NEXT: vpermd %ymm0, %ymm2, %ymm0
9237 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, %ymm11
9238 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,1,0,3,4,5,4,7]
9239 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm1[0,1,2,3,4,4,7,7,8,9,10,11,12,12,15,15]
9240 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm1 = <u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u>
9241 ; AVX2-SLOW-NEXT: vpblendvb %ymm1, %ymm0, %ymm2, %ymm0
9242 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm10 = <3,u,u,3,u,u,u,4>
9243 ; AVX2-SLOW-NEXT: vpermd %ymm3, %ymm11, %ymm2
9244 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm8[0,1,0,3,4,5,4,7]
9245 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,4,4,7,7,8,9,10,11,12,12,15,15]
9246 ; AVX2-SLOW-NEXT: vpblendvb %ymm1, %ymm2, %ymm3, %ymm2
9247 ; AVX2-SLOW-NEXT: vpermd %ymm4, %ymm11, %ymm3
9248 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm5[0,1,0,3,4,5,4,7]
9249 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm4 = ymm4[0,1,2,3,4,4,7,7,8,9,10,11,12,12,15,15]
9250 ; AVX2-SLOW-NEXT: vpblendvb %ymm1, %ymm3, %ymm4, %ymm3
9251 ; AVX2-SLOW-NEXT: vpermd %ymm9, %ymm10, %ymm1
9252 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = <0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u>
9253 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm0, %ymm1, %ymm5
9254 ; AVX2-SLOW-NEXT: vpermd %ymm7, %ymm10, %ymm0
9255 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm2, %ymm0, %ymm1
9256 ; AVX2-SLOW-NEXT: vpermd %ymm6, %ymm10, %ymm0
9257 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm3, %ymm0, %ymm0
9258 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %ymm2
9259 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9260 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,3,2,3,4,7,6,7]
9261 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,0,3,3,4,5,6,7,8,8,11,11,12,13,14,15]
9262 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %ymm3
9263 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9264 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = <3,u,u,u,4,u,u,4>
9265 ; AVX2-SLOW-NEXT: vpermd %ymm3, %ymm4, %ymm3
9266 ; AVX2-SLOW-NEXT: vmovdqa %ymm4, %ymm7
9267 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255>
9268 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm3, %ymm2, %ymm2
9269 ; AVX2-SLOW-NEXT: vmovdqa %ymm4, %ymm6
9270 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %ymm3
9271 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9272 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm3[0,0,0,0,4,5,6,7,8,8,8,8,12,13,14,15]
9273 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,7,7,7,7,8,9,10,11,15,15,15,15]
9274 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %ymm4
9275 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9276 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = <u,3,u,u,u,4,u,u>
9277 ; AVX2-SLOW-NEXT: vpermd %ymm4, %ymm8, %ymm4
9278 ; AVX2-SLOW-NEXT: vmovdqa %ymm8, %ymm9
9279 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = <u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u>
9280 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm4, %ymm3, %ymm3
9281 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255>
9282 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm2
9283 ; AVX2-SLOW-NEXT: vmovdqa %ymm4, %ymm10
9284 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = [0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255]
9285 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm2, %ymm5, %ymm2
9286 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9287 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %ymm2
9288 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9289 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,3,2,3,4,7,6,7]
9290 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,0,3,3,4,5,6,7,8,8,11,11,12,13,14,15]
9291 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %ymm4
9292 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9293 ; AVX2-SLOW-NEXT: vpermd %ymm4, %ymm7, %ymm4
9294 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm4, %ymm2, %ymm2
9295 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %ymm4
9296 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9297 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm4 = ymm4[0,0,0,0,4,5,6,7,8,8,8,8,12,13,14,15]
9298 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm4 = ymm4[0,1,2,3,7,7,7,7,8,9,10,11,15,15,15,15]
9299 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %ymm5
9300 ; AVX2-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9301 ; AVX2-SLOW-NEXT: vpermd %ymm5, %ymm9, %ymm5
9302 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm5, %ymm4, %ymm4
9303 ; AVX2-SLOW-NEXT: vpblendvb %ymm10, %ymm2, %ymm4, %ymm2
9304 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm2, %ymm1, %ymm1
9305 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9306 ; AVX2-SLOW-NEXT: vmovdqa 64(%rsi), %ymm1
9307 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9308 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,3,2,3,4,7,6,7]
9309 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,0,3,3,4,5,6,7,8,8,11,11,12,13,14,15]
9310 ; AVX2-SLOW-NEXT: vmovdqa 64(%rdi), %ymm2
9311 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9312 ; AVX2-SLOW-NEXT: vpermd %ymm2, %ymm7, %ymm2
9313 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm2, %ymm1, %ymm1
9314 ; AVX2-SLOW-NEXT: vmovdqa 64(%rcx), %ymm2
9315 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9316 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,0,0,0,4,5,6,7,8,8,8,8,12,13,14,15]
9317 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm2[0,1,2,3,7,7,7,7,8,9,10,11,15,15,15,15]
9318 ; AVX2-SLOW-NEXT: vmovdqa 64(%rdx), %ymm4
9319 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9320 ; AVX2-SLOW-NEXT: vpermd %ymm4, %ymm9, %ymm4
9321 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm4, %ymm2, %ymm2
9322 ; AVX2-SLOW-NEXT: vpblendvb %ymm10, %ymm1, %ymm2, %ymm1
9323 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm1, %ymm0, %ymm0
9324 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9325 ; AVX2-SLOW-NEXT: vmovdqa 96(%rsi), %ymm1
9326 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm1[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
9327 ; AVX2-SLOW-NEXT: vmovdqa %ymm1, %ymm3
9328 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9329 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
9330 ; AVX2-SLOW-NEXT: vmovdqa 96(%rdi), %ymm2
9331 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm2[1,1,1,1,5,5,5,5]
9332 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, %ymm5
9333 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
9334 ; AVX2-SLOW-NEXT: vmovdqa 96(%rcx), %ymm2
9335 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm2[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
9336 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, %ymm4
9337 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,0,0,0,4,4,4,4]
9338 ; AVX2-SLOW-NEXT: vmovdqa 96(%rdx), %ymm6
9339 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm6[0,1,1,3,4,5,5,7]
9340 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
9341 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
9342 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,2]
9343 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0>
9344 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
9345 ; AVX2-SLOW-NEXT: vmovdqa 96(%r8), %ymm2
9346 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm2[0,0,2,1,4,4,6,5]
9347 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, %ymm7
9348 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9349 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
9350 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255>
9351 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
9352 ; AVX2-SLOW-NEXT: vmovdqa 96(%r9), %ymm2
9353 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm2[1,1,2,2,4,5,6,7,9,9,10,10,12,13,14,15]
9354 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, %ymm8
9355 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
9356 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255>
9357 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
9358 ; AVX2-SLOW-NEXT: vmovdqa 96(%rax), %ymm2
9359 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm2[0,1,1,3,4,5,5,7]
9360 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, %ymm9
9361 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
9362 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255]
9363 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
9364 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9365 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
9366 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9367 ; AVX2-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9368 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm6[2,2,2,2,6,6,6,6]
9369 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
9370 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm3[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
9371 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
9372 ; AVX2-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9373 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm5[2,2,2,2,6,6,6,6]
9374 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
9375 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
9376 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
9377 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u>
9378 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
9379 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm7[1,2,2,3,5,6,6,7]
9380 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,2]
9381 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0>
9382 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
9383 ; AVX2-SLOW-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9384 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm8[2,1,2,3,6,5,6,7]
9385 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,0,3,3,4,5,6,7,8,8,11,11,12,13,14,15]
9386 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
9387 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255>
9388 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
9389 ; AVX2-SLOW-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9390 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm9[0,1,2,2,4,5,6,6]
9391 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
9392 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255]
9393 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
9394 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9395 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm3[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
9396 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
9397 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm5[3,3,3,3,7,7,7,7]
9398 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
9399 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm4[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
9400 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
9401 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm6[3,3,3,3,7,7,7,7]
9402 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
9403 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
9404 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
9405 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u>
9406 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
9407 ; AVX2-SLOW-NEXT: vpbroadcastd 124(%r8), %ymm1
9408 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u>
9409 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
9410 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm8[0,1,2,3,5,5,7,6,8,9,10,11,13,13,15,14]
9411 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[3,3,3,3]
9412 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u>
9413 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
9414 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm9[2,3,3,3,6,7,7,7]
9415 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,2]
9416 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0]
9417 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
9418 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9419 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm1
9420 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9421 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm0
9422 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9423 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
9424 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm1 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
9425 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm0, %xmm0
9426 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, %xmm3
9427 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,0,1,1]
9428 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm0
9429 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9430 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm2
9431 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, (%rsp) # 16-byte Spill
9432 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
9433 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,1,3,2,4,5,6,7]
9434 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,0,1,1]
9435 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm0[0,1,1,3]
9436 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = <255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255>
9437 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm13
9438 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %xmm2
9439 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9440 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %xmm1
9441 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9442 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
9443 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm1, %xmm1
9444 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,1]
9445 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %xmm2
9446 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9447 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %xmm11
9448 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm11[0],xmm2[0],xmm11[1],xmm2[1],xmm11[2],xmm2[2],xmm11[3],xmm2[3]
9449 ; AVX2-SLOW-NEXT: vmovdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9450 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,1,3,2,4,5,6,7]
9451 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,0,1,1]
9452 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,1,3]
9453 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
9454 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9455 ; AVX2-SLOW-NEXT: vmovdqa 64(%rsi), %xmm2
9456 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9457 ; AVX2-SLOW-NEXT: vmovdqa 64(%rdi), %xmm1
9458 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9459 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
9460 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm1, %xmm1
9461 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,1]
9462 ; AVX2-SLOW-NEXT: vmovdqa 64(%rcx), %xmm2
9463 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9464 ; AVX2-SLOW-NEXT: vmovdqa 64(%rdx), %xmm9
9465 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm9[0],xmm2[0],xmm9[1],xmm2[1],xmm9[2],xmm2[2],xmm9[3],xmm2[3]
9466 ; AVX2-SLOW-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9467 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,1,3,2,4,5,6,7]
9468 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,0,1,1]
9469 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,1,3]
9470 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
9471 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9472 ; AVX2-SLOW-NEXT: vmovdqa 96(%rsi), %xmm2
9473 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9474 ; AVX2-SLOW-NEXT: vmovdqa 96(%rdi), %xmm1
9475 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9476 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
9477 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm1, %xmm1
9478 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,1]
9479 ; AVX2-SLOW-NEXT: vmovdqa 96(%rcx), %xmm5
9480 ; AVX2-SLOW-NEXT: vmovdqa 96(%rdx), %xmm8
9481 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm8[0],xmm5[0],xmm8[1],xmm5[1],xmm8[2],xmm5[2],xmm8[3],xmm5[3]
9482 ; AVX2-SLOW-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9483 ; AVX2-SLOW-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9484 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,1,3,2,4,5,6,7]
9485 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,0,1,1]
9486 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,1,3]
9487 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm4
9488 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %xmm0
9489 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9490 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm1
9491 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9492 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
9493 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9494 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm0[0,1,3,2,4,5,6,7]
9495 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[0,1,1,3]
9496 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
9497 ; AVX2-SLOW-NEXT: vpbroadcastd (%rax), %ymm2
9498 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm6 = <u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u>
9499 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm1, %ymm2, %ymm1
9500 ; AVX2-SLOW-NEXT: vmovdqa 32(%r9), %xmm0
9501 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9502 ; AVX2-SLOW-NEXT: vmovdqa 32(%r8), %xmm2
9503 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9504 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
9505 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9506 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm0[0,1,3,2,4,5,6,7]
9507 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,1,3]
9508 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
9509 ; AVX2-SLOW-NEXT: vpbroadcastd 32(%rax), %ymm3
9510 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm2, %ymm3, %ymm7
9511 ; AVX2-SLOW-NEXT: vmovdqa 64(%r9), %xmm0
9512 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9513 ; AVX2-SLOW-NEXT: vmovdqa 64(%r8), %xmm2
9514 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9515 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
9516 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm3[0,1,3,2,4,5,6,7]
9517 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,1,3]
9518 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
9519 ; AVX2-SLOW-NEXT: vpbroadcastd 64(%rax), %ymm12
9520 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm2, %ymm12, %ymm0
9521 ; AVX2-SLOW-NEXT: vmovdqa 96(%r9), %xmm10
9522 ; AVX2-SLOW-NEXT: vmovdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9523 ; AVX2-SLOW-NEXT: vmovdqa 96(%r8), %xmm2
9524 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9525 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm10[0],xmm2[1],xmm10[1],xmm2[2],xmm10[2],xmm2[3],xmm10[3]
9526 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm15 = xmm2[0,1,3,2,4,5,6,7]
9527 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[0,1,1,3]
9528 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,0,0,1]
9529 ; AVX2-SLOW-NEXT: vpbroadcastd 96(%rax), %ymm14
9530 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm15, %ymm14, %ymm6
9531 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm14 = [255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255]
9532 ; AVX2-SLOW-NEXT: vpblendvb %ymm14, %ymm13, %ymm1, %ymm1
9533 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9534 ; AVX2-SLOW-NEXT: vpblendvb %ymm14, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm1 # 32-byte Folded Reload
9535 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9536 ; AVX2-SLOW-NEXT: vpblendvb %ymm14, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
9537 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9538 ; AVX2-SLOW-NEXT: vpblendvb %ymm14, %ymm4, %ymm6, %ymm0
9539 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9540 ; AVX2-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm4 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
9541 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9542 ; AVX2-SLOW-NEXT: vpshufb %xmm4, %xmm0, %xmm0
9543 ; AVX2-SLOW-NEXT: vpshufd $165, (%rsp), %xmm1 # 16-byte Folded Reload
9544 ; AVX2-SLOW-NEXT: # xmm1 = mem[1,1,2,2]
9545 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1],xmm1[2,3],xmm0[4],xmm1[5,6],xmm0[7]
9546 ; AVX2-SLOW-NEXT: vpshuflw $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
9547 ; AVX2-SLOW-NEXT: # xmm1 = mem[3,3,3,3,4,5,6,7]
9548 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
9549 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
9550 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm12[1,1,2,3]
9551 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} xmm1 = xmm6[0,1],xmm1[2],xmm6[3,4],xmm1[5],xmm6[6,7]
9552 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm0[0,0,1,1]
9553 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
9554 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = <255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255>
9555 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm6, %ymm1, %ymm1
9556 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9557 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
9558 ; AVX2-SLOW-NEXT: vpshufb %xmm4, %xmm1, %xmm1
9559 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm11[1,1,2,2]
9560 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} xmm1 = xmm6[0],xmm1[1],xmm6[2,3],xmm1[4],xmm6[5,6],xmm1[7]
9561 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
9562 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm13[3,3,3,3,4,5,6,7]
9563 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,4,4,4]
9564 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9565 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm14 = xmm10[1,1,2,3]
9566 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} xmm6 = xmm14[0,1],xmm6[2],xmm14[3,4],xmm6[5],xmm14[6,7]
9567 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,1]
9568 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,2,1]
9569 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm6, %ymm1
9570 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9571 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
9572 ; AVX2-SLOW-NEXT: vpshufb %xmm4, %xmm1, %xmm1
9573 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm14 = xmm9[1,1,2,2]
9574 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} xmm1 = xmm14[0],xmm1[1],xmm14[2,3],xmm1[4],xmm14[5,6],xmm1[7]
9575 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
9576 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm14 = xmm11[3,3,3,3,4,5,6,7]
9577 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,4,4,4,4]
9578 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
9579 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm7[1,1,2,3]
9580 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} xmm14 = xmm15[0,1],xmm14[2],xmm15[3,4],xmm14[5],xmm15[6,7]
9581 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,1]
9582 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,2,1]
9583 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm14, %ymm1
9584 ; AVX2-SLOW-NEXT: vpshufb %xmm4, %xmm5, %xmm4
9585 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm14 = xmm8[1,1,2,2]
9586 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} xmm4 = xmm14[0],xmm4[1],xmm14[2,3],xmm4[4],xmm14[5,6],xmm4[7]
9587 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
9588 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm14 = xmm9[3,3,3,3,4,5,6,7]
9589 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,4,4,4,4]
9590 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9591 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm6[1,1,2,3]
9592 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} xmm14 = xmm15[0,1],xmm14[2],xmm15[3,4],xmm14[5],xmm15[6,7]
9593 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,1,1]
9594 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,2,1]
9595 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm4, %ymm14, %ymm0
9596 ; AVX2-SLOW-NEXT: vpshufhw $180, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
9597 ; AVX2-SLOW-NEXT: # xmm4 = mem[0,1,2,3,4,5,7,6]
9598 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[0,2,3,3]
9599 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,1,3]
9600 ; AVX2-SLOW-NEXT: vpbroadcastd 4(%rax), %ymm8
9601 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm14 = <u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u>
9602 ; AVX2-SLOW-NEXT: vpblendvb %ymm14, %ymm4, %ymm8, %ymm4
9603 ; AVX2-SLOW-NEXT: vpshufhw $180, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
9604 ; AVX2-SLOW-NEXT: # xmm5 = mem[0,1,2,3,4,5,7,6]
9605 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,2,3,3]
9606 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,1,3]
9607 ; AVX2-SLOW-NEXT: vpbroadcastd 36(%rax), %ymm8
9608 ; AVX2-SLOW-NEXT: vpblendvb %ymm14, %ymm5, %ymm8, %ymm5
9609 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,7,6]
9610 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,2,3,3]
9611 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
9612 ; AVX2-SLOW-NEXT: vpbroadcastd 68(%rax), %ymm8
9613 ; AVX2-SLOW-NEXT: vpblendvb %ymm14, %ymm3, %ymm8, %ymm3
9614 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,7,6]
9615 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,2,3,3]
9616 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,1,3]
9617 ; AVX2-SLOW-NEXT: vpbroadcastd 100(%rax), %ymm8
9618 ; AVX2-SLOW-NEXT: vpblendvb %ymm14, %ymm2, %ymm8, %ymm2
9619 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = [255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255]
9620 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4, %ymm4 # 32-byte Folded Reload
9621 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9622 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm4 # 32-byte Folded Reload
9623 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9624 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm1, %ymm3, %ymm1
9625 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9626 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm0, %ymm2, %ymm0
9627 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9628 ; AVX2-SLOW-NEXT: vmovdqa (%rsp), %xmm0 # 16-byte Reload
9629 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
9630 ; AVX2-SLOW-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
9631 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
9632 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm12[4],xmm1[5],xmm12[5],xmm1[6],xmm12[6],xmm1[7],xmm12[7]
9633 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,2,3,3,4,5,6,7]
9634 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
9635 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[2,1,2,3,4,5,6,7]
9636 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,5,4]
9637 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,3]
9638 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = <u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u>
9639 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm1, %ymm0, %ymm0
9640 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
9641 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
9642 ; AVX2-SLOW-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
9643 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm13[4],xmm10[4],xmm13[5],xmm10[5],xmm13[6],xmm10[6],xmm13[7],xmm10[7]
9644 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,2,3,3,4,5,6,7]
9645 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
9646 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[2,1,2,3,4,5,6,7]
9647 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,5,4]
9648 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,3]
9649 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm2, %ymm1, %ymm1
9650 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
9651 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
9652 ; AVX2-SLOW-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
9653 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm11[4],xmm7[4],xmm11[5],xmm7[5],xmm11[6],xmm7[6],xmm11[7],xmm7[7]
9654 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,2,3,3,4,5,6,7]
9655 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
9656 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[2,1,2,3,4,5,6,7]
9657 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,5,4]
9658 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,1,3]
9659 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm4, %ymm2, %ymm2
9660 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
9661 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4, %xmm4 # 16-byte Folded Reload
9662 ; AVX2-SLOW-NEXT: # xmm4 = xmm4[4],mem[4],xmm4[5],mem[5],xmm4[6],mem[6],xmm4[7],mem[7]
9663 ; AVX2-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm9[4],xmm6[4],xmm9[5],xmm6[5],xmm9[6],xmm6[6],xmm9[7],xmm6[7]
9664 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,2,3,3,4,5,6,7]
9665 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
9666 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[2,1,2,3,4,5,6,7]
9667 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,5,4]
9668 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,1,3]
9669 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm5, %ymm4, %ymm3
9670 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
9671 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4, %xmm4 # 16-byte Folded Reload
9672 ; AVX2-SLOW-NEXT: # xmm4 = xmm4[4],mem[4],xmm4[5],mem[5],xmm4[6],mem[6],xmm4[7],mem[7]
9673 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm8 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
9674 ; AVX2-SLOW-NEXT: vpshufb %xmm8, %xmm4, %xmm4
9675 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,1,1]
9676 ; AVX2-SLOW-NEXT: vpbroadcastd 8(%rax), %ymm5
9677 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm6 = <255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255>
9678 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
9679 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9680 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm5 # 16-byte Folded Reload
9681 ; AVX2-SLOW-NEXT: # xmm5 = xmm5[4],mem[4],xmm5[5],mem[5],xmm5[6],mem[6],xmm5[7],mem[7]
9682 ; AVX2-SLOW-NEXT: vpshufb %xmm8, %xmm5, %xmm5
9683 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,1,1]
9684 ; AVX2-SLOW-NEXT: vpbroadcastd 40(%rax), %ymm7
9685 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm5, %ymm7, %ymm5
9686 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
9687 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm7, %xmm7 # 16-byte Folded Reload
9688 ; AVX2-SLOW-NEXT: # xmm7 = xmm7[4],mem[4],xmm7[5],mem[5],xmm7[6],mem[6],xmm7[7],mem[7]
9689 ; AVX2-SLOW-NEXT: vpshufb %xmm8, %xmm7, %xmm7
9690 ; AVX2-SLOW-NEXT: vmovdqa %xmm8, %xmm9
9691 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,1,1]
9692 ; AVX2-SLOW-NEXT: vpbroadcastd 72(%rax), %ymm8
9693 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm7, %ymm8, %ymm7
9694 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
9695 ; AVX2-SLOW-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm8, %xmm8 # 16-byte Folded Reload
9696 ; AVX2-SLOW-NEXT: # xmm8 = xmm8[4],mem[4],xmm8[5],mem[5],xmm8[6],mem[6],xmm8[7],mem[7]
9697 ; AVX2-SLOW-NEXT: vpshufb %xmm9, %xmm8, %xmm8
9698 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,1,1]
9699 ; AVX2-SLOW-NEXT: vpbroadcastd 104(%rax), %ymm9
9700 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm8, %ymm9, %ymm6
9701 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = [255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255]
9702 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm4, %ymm0, %ymm0
9703 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9704 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm5, %ymm1, %ymm0
9705 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9706 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm7, %ymm2, %ymm0
9707 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9708 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm6, %ymm3, %ymm0
9709 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9710 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = <3,u,u,u,4,u,u,4>
9711 ; AVX2-SLOW-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm1 # 32-byte Folded Reload
9712 ; AVX2-SLOW-NEXT: vpshufd $236, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Folded Reload
9713 ; AVX2-SLOW-NEXT: # ymm2 = mem[0,3,2,3,4,7,6,7]
9714 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,0,3,3,4,5,6,7,8,8,11,11,12,13,14,15]
9715 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = <u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255>
9716 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
9717 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = <u,3,u,u,u,4,u,u>
9718 ; AVX2-SLOW-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm2 # 32-byte Folded Reload
9719 ; AVX2-SLOW-NEXT: vpshuflw $0, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
9720 ; AVX2-SLOW-NEXT: # ymm3 = mem[0,0,0,0,4,5,6,7,8,8,8,8,12,13,14,15]
9721 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,7,7,7,7,8,9,10,11,15,15,15,15]
9722 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = <u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u>
9723 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm2, %ymm3, %ymm2
9724 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = <u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255>
9725 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
9726 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = <u,u,3,u,u,u,4,u>
9727 ; AVX2-SLOW-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm2 # 32-byte Folded Reload
9728 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = <u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255>
9729 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
9730 ; AVX2-SLOW-NEXT: vpshufd $196, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Folded Reload
9731 ; AVX2-SLOW-NEXT: # ymm2 = mem[0,1,0,3,4,5,4,7]
9732 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm2[0,1,2,3,4,4,7,7,8,9,10,11,12,12,15,15]
9733 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = <u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255>
9734 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
9735 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = <3,u,u,3,u,u,u,4>
9736 ; AVX2-SLOW-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm2 # 32-byte Folded Reload
9737 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = [0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255]
9738 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm0
9739 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
9740 ; AVX2-SLOW-NEXT: vpshuflw $249, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Folded Reload
9741 ; AVX2-SLOW-NEXT: # ymm2 = mem[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
9742 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,0,2,1,4,4,6,5]
9743 ; AVX2-SLOW-NEXT: vpshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
9744 ; AVX2-SLOW-NEXT: # ymm3 = mem[1,1,1,1,5,5,5,5]
9745 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7,8,9],ymm3[10],ymm2[11,12],ymm3[13],ymm2[14,15]
9746 ; AVX2-SLOW-NEXT: vpshuflw $233, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
9747 ; AVX2-SLOW-NEXT: # ymm3 = mem[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
9748 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,0,0,4,4,4,4]
9749 ; AVX2-SLOW-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
9750 ; AVX2-SLOW-NEXT: # ymm4 = mem[0,1,1,3,4,5,5,7]
9751 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0,1],ymm3[2],ymm4[3,4],ymm3[5],ymm4[6,7,8,9],ymm3[10],ymm4[11,12],ymm3[13],ymm4[14,15]
9752 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
9753 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,2]
9754 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = <255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0>
9755 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm2, %ymm3, %ymm1
9756 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9757 ; AVX2-SLOW-NEXT: vpshuflw $249, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
9758 ; AVX2-SLOW-NEXT: # ymm3 = mem[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
9759 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
9760 ; AVX2-SLOW-NEXT: vpshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
9761 ; AVX2-SLOW-NEXT: # ymm4 = mem[1,1,1,1,5,5,5,5]
9762 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7,8,9],ymm4[10],ymm3[11,12],ymm4[13],ymm3[14,15]
9763 ; AVX2-SLOW-NEXT: vpshuflw $233, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
9764 ; AVX2-SLOW-NEXT: # ymm4 = mem[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
9765 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[0,0,0,0,4,4,4,4]
9766 ; AVX2-SLOW-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
9767 ; AVX2-SLOW-NEXT: # ymm5 = mem[0,1,1,3,4,5,5,7]
9768 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm4 = ymm5[0,1],ymm4[2],ymm5[3,4],ymm4[5],ymm5[6,7,8,9],ymm4[10],ymm5[11,12],ymm4[13],ymm5[14,15]
9769 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
9770 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,3,2]
9771 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm3, %ymm4, %ymm1
9772 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9773 ; AVX2-SLOW-NEXT: vpshuflw $249, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
9774 ; AVX2-SLOW-NEXT: # ymm4 = mem[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
9775 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[0,0,2,1,4,4,6,5]
9776 ; AVX2-SLOW-NEXT: vpshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
9777 ; AVX2-SLOW-NEXT: # ymm5 = mem[1,1,1,1,5,5,5,5]
9778 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7,8,9],ymm5[10],ymm4[11,12],ymm5[13],ymm4[14,15]
9779 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9780 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm5 = ymm2[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
9781 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[0,0,0,0,4,4,4,4]
9782 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9783 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm1[0,1,1,3,4,5,5,7]
9784 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm5 = ymm6[0,1],ymm5[2],ymm6[3,4],ymm5[5],ymm6[6,7,8,9],ymm5[10],ymm6[11,12],ymm5[13],ymm6[14,15]
9785 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
9786 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,1,3,2]
9787 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm4, %ymm5, %ymm0
9788 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9789 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9790 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm5 = ymm0[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
9791 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[0,0,0,0,4,4,4,4]
9792 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
9793 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm15[0,0,2,1,4,4,6,5]
9794 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm5 = ymm5[0,1,2],ymm6[3],ymm5[4,5],ymm6[6],ymm5[7,8,9,10],ymm6[11],ymm5[12,13],ymm6[14],ymm5[15]
9795 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,3,3]
9796 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
9797 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm10[0,1,1,3,4,5,5,7]
9798 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,2,2,3]
9799 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm7 = <u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u>
9800 ; AVX2-SLOW-NEXT: vpblendvb %ymm7, %ymm5, %ymm6, %ymm5
9801 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
9802 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm6 = ymm11[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
9803 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[0,0,0,0,4,4,4,4]
9804 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
9805 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm4[0,0,2,1,4,4,6,5]
9806 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm6 = ymm6[0,1,2],ymm8[3],ymm6[4,5],ymm8[6],ymm6[7,8,9,10],ymm8[11],ymm6[12,13],ymm8[14],ymm6[15]
9807 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,3,3]
9808 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
9809 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm12[0,1,1,3,4,5,5,7]
9810 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,2,3]
9811 ; AVX2-SLOW-NEXT: vpblendvb %ymm7, %ymm6, %ymm8, %ymm6
9812 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
9813 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm8 = ymm3[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
9814 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm8[0,0,0,0,4,4,4,4]
9815 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
9816 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm13[0,0,2,1,4,4,6,5]
9817 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm8 = ymm8[0,1,2],ymm9[3],ymm8[4,5],ymm9[6],ymm8[7,8,9,10],ymm9[11],ymm8[12,13],ymm9[14],ymm8[15]
9818 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,2,3,3]
9819 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
9820 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm14[0,1,1,3,4,5,5,7]
9821 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,2,3]
9822 ; AVX2-SLOW-NEXT: vpblendvb %ymm7, %ymm8, %ymm9, %ymm7
9823 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = [255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255]
9824 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm0 # 32-byte Folded Reload
9825 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9826 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm0 # 32-byte Folded Reload
9827 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9828 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm0 # 32-byte Folded Reload
9829 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9830 ; AVX2-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm8 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
9831 ; AVX2-SLOW-NEXT: # ymm8 = mem[0,1,0,1]
9832 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9833 ; AVX2-SLOW-NEXT: vpshufb %ymm8, %ymm0, %ymm5
9834 ; AVX2-SLOW-NEXT: vpshufd $170, {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Folded Reload
9835 ; AVX2-SLOW-NEXT: # ymm6 = mem[2,2,2,2,6,6,6,6]
9836 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm5 = ymm5[0,1],ymm6[2],ymm5[3,4],ymm6[5],ymm5[6,7,8,9],ymm6[10],ymm5[11,12],ymm6[13],ymm5[14,15]
9837 ; AVX2-SLOW-NEXT: vpshufhw $225, {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Folded Reload
9838 ; AVX2-SLOW-NEXT: # ymm6 = mem[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
9839 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[2,2,2,2,6,6,6,6]
9840 ; AVX2-SLOW-NEXT: vpshufd $170, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Folded Reload
9841 ; AVX2-SLOW-NEXT: # ymm7 = mem[2,2,2,2,6,6,6,6]
9842 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm6 = ymm7[0],ymm6[1],ymm7[2,3],ymm6[4],ymm7[5,6,7,8],ymm6[9],ymm7[10,11],ymm6[12],ymm7[13,14,15]
9843 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
9844 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,2,2,3]
9845 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = <255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u>
9846 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm5, %ymm6, %ymm5
9847 ; AVX2-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9848 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
9849 ; AVX2-SLOW-NEXT: vpshufb %ymm8, %ymm5, %ymm6
9850 ; AVX2-SLOW-NEXT: vmovdqa %ymm8, %ymm9
9851 ; AVX2-SLOW-NEXT: vpshufd $170, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Folded Reload
9852 ; AVX2-SLOW-NEXT: # ymm7 = mem[2,2,2,2,6,6,6,6]
9853 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm6 = ymm6[0,1],ymm7[2],ymm6[3,4],ymm7[5],ymm6[6,7,8,9],ymm7[10],ymm6[11,12],ymm7[13],ymm6[14,15]
9854 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
9855 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm7 = ymm5[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
9856 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm7[2,2,2,2,6,6,6,6]
9857 ; AVX2-SLOW-NEXT: vpshufd $170, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Folded Reload
9858 ; AVX2-SLOW-NEXT: # ymm8 = mem[2,2,2,2,6,6,6,6]
9859 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm7 = ymm8[0],ymm7[1],ymm8[2,3],ymm7[4],ymm8[5,6,7,8],ymm7[9],ymm8[10,11],ymm7[12],ymm8[13,14,15]
9860 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
9861 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,2,2,3]
9862 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm6, %ymm7, %ymm6
9863 ; AVX2-SLOW-NEXT: vpshufb %ymm9, %ymm2, %ymm7
9864 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm1[2,2,2,2,6,6,6,6]
9865 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm7 = ymm7[0,1],ymm8[2],ymm7[3,4],ymm8[5],ymm7[6,7,8,9],ymm8[10],ymm7[11,12],ymm8[13],ymm7[14,15]
9866 ; AVX2-SLOW-NEXT: vpshufhw $225, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Folded Reload
9867 ; AVX2-SLOW-NEXT: # ymm8 = mem[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
9868 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm8[2,2,2,2,6,6,6,6]
9869 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9870 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm0[2,2,2,2,6,6,6,6]
9871 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0],ymm8[1],ymm9[2,3],ymm8[4],ymm9[5,6,7,8],ymm8[9],ymm9[10,11],ymm8[12],ymm9[13,14,15]
9872 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,2,2,3]
9873 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,2,3]
9874 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm1 = <255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u>
9875 ; AVX2-SLOW-NEXT: vpblendvb %ymm1, %ymm7, %ymm8, %ymm7
9876 ; AVX2-SLOW-NEXT: vpshuflw $255, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Folded Reload
9877 ; AVX2-SLOW-NEXT: # ymm8 = mem[3,3,3,3,4,5,6,7,11,11,11,11,12,13,14,15]
9878 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm8 = ymm8[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
9879 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm15[1,2,2,3,5,6,6,7]
9880 ; AVX2-SLOW-NEXT: vmovdqa %ymm15, %ymm2
9881 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0,1],ymm8[2],ymm9[3,4],ymm8[5],ymm9[6,7,8,9],ymm8[10],ymm9[11,12],ymm8[13],ymm9[14,15]
9882 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,1,3,2]
9883 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm10[0,1,2,2,4,5,6,6]
9884 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,1,3,3]
9885 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm10 = <u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255>
9886 ; AVX2-SLOW-NEXT: vpblendvb %ymm10, %ymm8, %ymm9, %ymm8
9887 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm9 = ymm11[3,3,3,3,4,5,6,7,11,11,11,11,12,13,14,15]
9888 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm9 = ymm9[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
9889 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm4[1,2,2,3,5,6,6,7]
9890 ; AVX2-SLOW-NEXT: vmovdqa %ymm4, %ymm1
9891 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm9 = ymm11[0,1],ymm9[2],ymm11[3,4],ymm9[5],ymm11[6,7,8,9],ymm9[10],ymm11[11,12],ymm9[13],ymm11[14,15]
9892 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,1,3,2]
9893 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm12[0,1,2,2,4,5,6,6]
9894 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,1,3,3]
9895 ; AVX2-SLOW-NEXT: vpblendvb %ymm10, %ymm9, %ymm11, %ymm9
9896 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm11 = ymm3[3,3,3,3,4,5,6,7,11,11,11,11,12,13,14,15]
9897 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm11 = ymm11[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
9898 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm13[1,2,2,3,5,6,6,7]
9899 ; AVX2-SLOW-NEXT: vmovdqa %ymm13, %ymm15
9900 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm11 = ymm12[0,1],ymm11[2],ymm12[3,4],ymm11[5],ymm12[6,7,8,9],ymm11[10],ymm12[11,12],ymm11[13],ymm12[14,15]
9901 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,1,3,2]
9902 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm14[0,1,2,2,4,5,6,6]
9903 ; AVX2-SLOW-NEXT: vmovdqa %ymm14, %ymm3
9904 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,1,3,3]
9905 ; AVX2-SLOW-NEXT: vpblendvb %ymm10, %ymm11, %ymm12, %ymm10
9906 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm11 = [255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0]
9907 ; AVX2-SLOW-NEXT: vpblendvb %ymm11, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm4 # 32-byte Folded Reload
9908 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9909 ; AVX2-SLOW-NEXT: vpblendvb %ymm11, %ymm6, %ymm9, %ymm6
9910 ; AVX2-SLOW-NEXT: vpblendvb %ymm11, %ymm7, %ymm10, %ymm7
9911 ; AVX2-SLOW-NEXT: vpshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Folded Reload
9912 ; AVX2-SLOW-NEXT: # ymm8 = mem[3,3,3,3,7,7,7,7]
9913 ; AVX2-SLOW-NEXT: vpshufhw $235, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Folded Reload
9914 ; AVX2-SLOW-NEXT: # ymm9 = mem[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
9915 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm9[2,2,2,2,6,6,6,6]
9916 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm8 = ymm8[0,1,2],ymm9[3],ymm8[4,5],ymm9[6],ymm8[7,8,9,10],ymm9[11],ymm8[12,13],ymm9[14],ymm8[15]
9917 ; AVX2-SLOW-NEXT: vpshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Folded Reload
9918 ; AVX2-SLOW-NEXT: # ymm9 = mem[3,3,3,3,7,7,7,7]
9919 ; AVX2-SLOW-NEXT: vpshufhw $235, {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Folded Reload
9920 ; AVX2-SLOW-NEXT: # ymm10 = mem[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
9921 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm10[2,2,2,2,6,6,6,6]
9922 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm9 = ymm9[0],ymm10[1],ymm9[2,3],ymm10[4],ymm9[5,6,7,8],ymm10[9],ymm9[10,11],ymm10[12],ymm9[13,14,15]
9923 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,1,3,3]
9924 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,2,3]
9925 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u>
9926 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm8, %ymm9, %ymm8
9927 ; AVX2-SLOW-NEXT: vpshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Folded Reload
9928 ; AVX2-SLOW-NEXT: # ymm9 = mem[3,3,3,3,7,7,7,7]
9929 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm10 = ymm5[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
9930 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm10[2,2,2,2,6,6,6,6]
9931 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm9 = ymm9[0,1,2],ymm10[3],ymm9[4,5],ymm10[6],ymm9[7,8,9,10],ymm10[11],ymm9[12,13],ymm10[14],ymm9[15]
9932 ; AVX2-SLOW-NEXT: vpshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Folded Reload
9933 ; AVX2-SLOW-NEXT: # ymm10 = mem[3,3,3,3,7,7,7,7]
9934 ; AVX2-SLOW-NEXT: vpshufhw $235, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
9935 ; AVX2-SLOW-NEXT: # ymm11 = mem[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
9936 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm11[2,2,2,2,6,6,6,6]
9937 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm10 = ymm10[0],ymm11[1],ymm10[2,3],ymm11[4],ymm10[5,6,7,8],ymm11[9],ymm10[10,11],ymm11[12],ymm10[13,14,15]
9938 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,1,3,3]
9939 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,2,2,3]
9940 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm9, %ymm10, %ymm9
9941 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm0[3,3,3,3,7,7,7,7]
9942 ; AVX2-SLOW-NEXT: vpshufhw $235, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
9943 ; AVX2-SLOW-NEXT: # ymm11 = mem[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
9944 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm11[2,2,2,2,6,6,6,6]
9945 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm10 = ymm10[0,1,2],ymm11[3],ymm10[4,5],ymm11[6],ymm10[7,8,9,10],ymm11[11],ymm10[12,13],ymm11[14],ymm10[15]
9946 ; AVX2-SLOW-NEXT: vpshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
9947 ; AVX2-SLOW-NEXT: # ymm11 = mem[3,3,3,3,7,7,7,7]
9948 ; AVX2-SLOW-NEXT: vpshufhw $235, {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Folded Reload
9949 ; AVX2-SLOW-NEXT: # ymm12 = mem[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
9950 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
9951 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm11 = ymm11[0],ymm12[1],ymm11[2,3],ymm12[4],ymm11[5,6,7,8],ymm12[9],ymm11[10,11],ymm12[12],ymm11[13,14,15]
9952 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,1,3,3]
9953 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,2,2,3]
9954 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm10, %ymm11, %ymm10
9955 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm2[3,3,3,3,7,7,7,7]
9956 ; AVX2-SLOW-NEXT: vpshufhw $249, {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Folded Reload
9957 ; AVX2-SLOW-NEXT: # ymm12 = mem[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
9958 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,3,6,6,6,7]
9959 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm11 = ymm12[0,1],ymm11[2],ymm12[3,4],ymm11[5],ymm12[6,7,8,9],ymm11[10],ymm12[11,12],ymm11[13],ymm12[14,15]
9960 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
9961 ; AVX2-SLOW-NEXT: vpshufd $254, {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Folded Reload
9962 ; AVX2-SLOW-NEXT: # ymm12 = mem[2,3,3,3,6,7,7,7]
9963 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,1,3,2]
9964 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm13 = <255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0>
9965 ; AVX2-SLOW-NEXT: vpblendvb %ymm13, %ymm11, %ymm12, %ymm11
9966 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm1[3,3,3,3,7,7,7,7]
9967 ; AVX2-SLOW-NEXT: vpshufhw $249, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Folded Reload
9968 ; AVX2-SLOW-NEXT: # ymm14 = mem[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
9969 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm14[2,2,2,3,6,6,6,7]
9970 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm14[0,1],ymm12[2],ymm14[3,4],ymm12[5],ymm14[6,7,8,9],ymm12[10],ymm14[11,12],ymm12[13],ymm14[14,15]
9971 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,2,2,3]
9972 ; AVX2-SLOW-NEXT: vpshufd $254, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Folded Reload
9973 ; AVX2-SLOW-NEXT: # ymm14 = mem[2,3,3,3,6,7,7,7]
9974 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,3,2]
9975 ; AVX2-SLOW-NEXT: vpblendvb %ymm13, %ymm12, %ymm14, %ymm12
9976 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm15[3,3,3,3,7,7,7,7]
9977 ; AVX2-SLOW-NEXT: vpshufhw $249, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Folded Reload
9978 ; AVX2-SLOW-NEXT: # ymm15 = mem[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
9979 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm15 = ymm15[2,2,2,3,6,6,6,7]
9980 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm14 = ymm15[0,1],ymm14[2],ymm15[3,4],ymm14[5],ymm15[6,7,8,9],ymm14[10],ymm15[11,12],ymm14[13],ymm15[14,15]
9981 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,3]
9982 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm15 = ymm3[2,3,3,3,6,7,7,7]
9983 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,1,3,2]
9984 ; AVX2-SLOW-NEXT: vpblendvb %ymm13, %ymm14, %ymm15, %ymm13
9985 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm14 = [0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0]
9986 ; AVX2-SLOW-NEXT: vpblendvb %ymm14, %ymm8, %ymm11, %ymm8
9987 ; AVX2-SLOW-NEXT: vpblendvb %ymm14, %ymm9, %ymm12, %ymm9
9988 ; AVX2-SLOW-NEXT: vpblendvb %ymm14, %ymm10, %ymm13, %ymm10
9989 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
9990 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9991 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 544(%rax)
9992 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9993 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 320(%rax)
9994 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9995 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 96(%rax)
9996 ; AVX2-SLOW-NEXT: vmovdqa %ymm10, 640(%rax)
9997 ; AVX2-SLOW-NEXT: vmovdqa %ymm7, 608(%rax)
9998 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9999 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 576(%rax)
10000 ; AVX2-SLOW-NEXT: vmovdqa %ymm9, 416(%rax)
10001 ; AVX2-SLOW-NEXT: vmovdqa %ymm6, 384(%rax)
10002 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10003 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 352(%rax)
10004 ; AVX2-SLOW-NEXT: vmovdqa %ymm8, 192(%rax)
10005 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10006 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 160(%rax)
10007 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10008 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 128(%rax)
10009 ; AVX2-SLOW-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
10010 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 768(%rax)
10011 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10012 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 736(%rax)
10013 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10014 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 704(%rax)
10015 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10016 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 672(%rax)
10017 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10018 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 512(%rax)
10019 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10020 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 480(%rax)
10021 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10022 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 448(%rax)
10023 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10024 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 288(%rax)
10025 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10026 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 256(%rax)
10027 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10028 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 224(%rax)
10029 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10030 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 64(%rax)
10031 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10032 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 32(%rax)
10033 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10034 ; AVX2-SLOW-NEXT: vmovaps %ymm0, (%rax)
10035 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10036 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 864(%rax)
10037 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10038 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 832(%rax)
10039 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10040 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 800(%rax)
10041 ; AVX2-SLOW-NEXT: addq $1688, %rsp # imm = 0x698
10042 ; AVX2-SLOW-NEXT: vzeroupper
10043 ; AVX2-SLOW-NEXT: retq
10045 ; AVX2-FAST-LABEL: store_i16_stride7_vf64:
10046 ; AVX2-FAST: # %bb.0:
10047 ; AVX2-FAST-NEXT: subq $1256, %rsp # imm = 0x4E8
10048 ; AVX2-FAST-NEXT: vmovdqa 96(%rdi), %ymm0
10049 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10050 ; AVX2-FAST-NEXT: vmovdqa 96(%rsi), %ymm1
10051 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10052 ; AVX2-FAST-NEXT: vmovdqa 96(%rdx), %ymm2
10053 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10054 ; AVX2-FAST-NEXT: vmovdqa 96(%rcx), %ymm4
10055 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10056 ; AVX2-FAST-NEXT: vmovdqa 96(%r8), %ymm3
10057 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10058 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = <3,u,u,u,4,u,u,4>
10059 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm5, %ymm0
10060 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[0,1,0,1,14,15,14,15,8,9,10,11,12,13,14,15,16,17,16,17,30,31,30,31,24,25,26,27,28,29,30,31]
10061 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = <u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255>
10062 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm0, %ymm1, %ymm0
10063 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <u,3,u,u,u,4,u,u>
10064 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm1, %ymm1
10065 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm4[0,1,0,1,0,1,0,1,14,15,14,15,14,15,14,15,16,17,16,17,16,17,16,17,30,31,30,31,30,31,30,31]
10066 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u>
10067 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm1, %ymm2, %ymm1
10068 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255>
10069 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10070 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <u,u,3,u,u,u,4,u>
10071 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm1, %ymm1
10072 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255>
10073 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10074 ; AVX2-FAST-NEXT: vmovdqa 96(%r9), %ymm1
10075 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10076 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5,6,7,0,1,0,1,14,15,14,15,16,17,18,19,20,21,22,23,16,17,16,17,30,31,30,31]
10077 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255>
10078 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10079 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
10080 ; AVX2-FAST-NEXT: vmovdqa 96(%rax), %ymm1
10081 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10082 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <3,u,u,3,u,u,u,4>
10083 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm2, %ymm1
10084 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255]
10085 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10086 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10087 ; AVX2-FAST-NEXT: vmovdqa (%r8), %ymm3
10088 ; AVX2-FAST-NEXT: vmovdqa (%r9), %ymm2
10089 ; AVX2-FAST-NEXT: vpbroadcastd {{.*#+}} ymm1 = [18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
10090 ; AVX2-FAST-NEXT: vpshufb %ymm1, %ymm2, %ymm0
10091 ; AVX2-FAST-NEXT: vmovdqa %ymm2, %ymm10
10092 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm3[0,0,2,1,4,4,6,5]
10093 ; AVX2-FAST-NEXT: vmovdqa %ymm3, %ymm11
10094 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1,2],ymm2[3],ymm0[4,5],ymm2[6],ymm0[7,8,9,10],ymm2[11],ymm0[12,13],ymm2[14],ymm0[15]
10095 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,3,3]
10096 ; AVX2-FAST-NEXT: vmovdqa (%rax), %ymm4
10097 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [0,1,4,5,4,5,5,7]
10098 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm2, %ymm3
10099 ; AVX2-FAST-NEXT: vmovdqa %ymm4, %ymm12
10100 ; AVX2-FAST-NEXT: vmovdqa %ymm2, %ymm5
10101 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u>
10102 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm3, %ymm3
10103 ; AVX2-FAST-NEXT: vmovdqa 32(%r8), %ymm6
10104 ; AVX2-FAST-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10105 ; AVX2-FAST-NEXT: vmovdqa 32(%r9), %ymm4
10106 ; AVX2-FAST-NEXT: vpshufb %ymm1, %ymm4, %ymm0
10107 ; AVX2-FAST-NEXT: vmovdqa %ymm4, %ymm7
10108 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10109 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm4 = ymm6[0,0,2,1,4,4,6,5]
10110 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1,2],ymm4[3],ymm0[4,5],ymm4[6],ymm0[7,8,9,10],ymm4[11],ymm0[12,13],ymm4[14],ymm0[15]
10111 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,3,3]
10112 ; AVX2-FAST-NEXT: vmovdqa 32(%rax), %ymm4
10113 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10114 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm5, %ymm4
10115 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm4, %ymm6
10116 ; AVX2-FAST-NEXT: vmovdqa 64(%r9), %ymm0
10117 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10118 ; AVX2-FAST-NEXT: vpshufb %ymm1, %ymm0, %ymm0
10119 ; AVX2-FAST-NEXT: vmovdqa 64(%r8), %ymm1
10120 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10121 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,0,2,1,4,4,6,5]
10122 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1,2],ymm1[3],ymm0[4,5],ymm1[6],ymm0[7,8,9,10],ymm1[11],ymm0[12,13],ymm1[14],ymm0[15]
10123 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,3,3]
10124 ; AVX2-FAST-NEXT: vmovdqa 64(%rax), %ymm1
10125 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10126 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm5, %ymm1
10127 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm4
10128 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %ymm5
10129 ; AVX2-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10130 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %ymm1
10131 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10132 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
10133 ; AVX2-FAST-NEXT: # ymm0 = mem[0,1,0,1]
10134 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm1, %ymm1
10135 ; AVX2-FAST-NEXT: vmovdqa %ymm0, %ymm8
10136 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm5[1,1,1,1,5,5,5,5]
10137 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7,8,9],ymm2[10],ymm1[11,12],ymm2[13],ymm1[14,15]
10138 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %ymm15
10139 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %ymm13
10140 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm2 = ymm13[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
10141 ; AVX2-FAST-NEXT: vmovdqu %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10142 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm5 = ymm15[0,1,1,3,4,5,5,7]
10143 ; AVX2-FAST-NEXT: vmovdqu %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10144 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0,1],ymm2[2],ymm5[3,4],ymm2[5],ymm5[6,7,8,9],ymm2[10],ymm5[11,12],ymm2[13],ymm5[14,15]
10145 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
10146 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,3,2]
10147 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0>
10148 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
10149 ; AVX2-FAST-NEXT: vmovdqa %ymm0, %ymm5
10150 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255]
10151 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm1, %ymm3, %ymm0
10152 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10153 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %ymm0
10154 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10155 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %ymm1
10156 ; AVX2-FAST-NEXT: vmovdqu %ymm1, (%rsp) # 32-byte Spill
10157 ; AVX2-FAST-NEXT: vpshufb %ymm8, %ymm1, %ymm1
10158 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm0[1,1,1,1,5,5,5,5]
10159 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm3[2],ymm1[3,4],ymm3[5],ymm1[6,7,8,9],ymm3[10],ymm1[11,12],ymm3[13],ymm1[14,15]
10160 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %ymm0
10161 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10162 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %ymm3
10163 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10164 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm3[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
10165 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm9 = ymm0[0,1,1,3,4,5,5,7]
10166 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm9[0,1],ymm3[2],ymm9[3,4],ymm3[5],ymm9[6,7,8,9],ymm3[10],ymm9[11,12],ymm3[13],ymm9[14,15]
10167 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
10168 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,2]
10169 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm1, %ymm3, %ymm1
10170 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm1, %ymm6, %ymm0
10171 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10172 ; AVX2-FAST-NEXT: vmovdqa 64(%rdi), %ymm0
10173 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10174 ; AVX2-FAST-NEXT: vmovdqa 64(%rsi), %ymm1
10175 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10176 ; AVX2-FAST-NEXT: vpshufb %ymm8, %ymm1, %ymm1
10177 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm9 = ymm0[1,1,1,1,5,5,5,5]
10178 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm9[2],ymm1[3,4],ymm9[5],ymm1[6,7,8,9],ymm9[10],ymm1[11,12],ymm9[13],ymm1[14,15]
10179 ; AVX2-FAST-NEXT: vmovdqa 64(%rdx), %ymm3
10180 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10181 ; AVX2-FAST-NEXT: vmovdqa 64(%rcx), %ymm0
10182 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10183 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm0 = ymm0[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
10184 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm14 = ymm3[0,1,1,3,4,5,5,7]
10185 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm14[0,1],ymm0[2],ymm14[3,4],ymm0[5],ymm14[6,7,8,9],ymm0[10],ymm14[11,12],ymm0[13],ymm14[14,15]
10186 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
10187 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,2]
10188 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm1, %ymm0, %ymm0
10189 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm4, %ymm0
10190 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10191 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <u,u,u,u,u,u,u,u,u,u,8,9,u,u,u,u,u,u,u,u,22,23,u,u,u,u,24,25,u,u,u,u>
10192 ; AVX2-FAST-NEXT: vmovdqa %ymm10, %ymm5
10193 ; AVX2-FAST-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10194 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm10, %ymm1
10195 ; AVX2-FAST-NEXT: vmovdqa %ymm11, %ymm6
10196 ; AVX2-FAST-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10197 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm11[1,2,2,3,5,6,6,7]
10198 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
10199 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,2]
10200 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [4,5,2,2,6,6,6,6]
10201 ; AVX2-FAST-NEXT: vmovdqa %ymm12, %ymm8
10202 ; AVX2-FAST-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10203 ; AVX2-FAST-NEXT: vpermd %ymm12, %ymm9, %ymm2
10204 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255>
10205 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm1, %ymm2, %ymm12
10206 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm7, %ymm2
10207 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
10208 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm14 = ymm3[1,2,2,3,5,6,6,7]
10209 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm14[0,1],ymm2[2],ymm14[3,4],ymm2[5],ymm14[6,7,8,9],ymm2[10],ymm14[11,12],ymm2[13],ymm14[14,15]
10210 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,1,3,2]
10211 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
10212 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm9, %ymm14
10213 ; AVX2-FAST-NEXT: vmovdqa %ymm9, %ymm1
10214 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm2, %ymm14, %ymm2
10215 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
10216 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm10, %ymm0
10217 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
10218 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm14 = ymm11[1,2,2,3,5,6,6,7]
10219 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm14[0,1],ymm0[2],ymm14[3,4],ymm0[5],ymm14[6,7,8,9],ymm0[10],ymm14[11,12],ymm0[13],ymm14[14,15]
10220 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,2]
10221 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
10222 ; AVX2-FAST-NEXT: vpermd %ymm9, %ymm1, %ymm14
10223 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm0, %ymm14, %ymm0
10224 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10225 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
10226 ; AVX2-FAST-NEXT: # ymm1 = mem[0,1,0,1]
10227 ; AVX2-FAST-NEXT: vpshufb %ymm1, %ymm13, %ymm4
10228 ; AVX2-FAST-NEXT: vmovdqa %ymm1, %ymm13
10229 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm14 = ymm15[2,2,2,2,6,6,6,6]
10230 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0,1],ymm14[2],ymm4[3,4],ymm14[5],ymm4[6,7,8,9],ymm14[10],ymm4[11,12],ymm14[13],ymm4[14,15]
10231 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u>
10232 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10233 ; AVX2-FAST-NEXT: vpshufb %ymm1, %ymm0, %ymm14
10234 ; AVX2-FAST-NEXT: vmovdqa %ymm1, %ymm0
10235 ; AVX2-FAST-NEXT: vpshufd $170, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Folded Reload
10236 ; AVX2-FAST-NEXT: # ymm15 = mem[2,2,2,2,6,6,6,6]
10237 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm14 = ymm15[0],ymm14[1],ymm15[2,3],ymm14[4],ymm15[5,6,7,8],ymm14[9],ymm15[10,11],ymm14[12],ymm15[13,14,15]
10238 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
10239 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,2,2,3]
10240 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u>
10241 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm4, %ymm14, %ymm4
10242 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = [255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0]
10243 ; AVX2-FAST-NEXT: vpblendvb %ymm14, %ymm4, %ymm12, %ymm1
10244 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10245 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
10246 ; AVX2-FAST-NEXT: vpshufb %ymm13, %ymm12, %ymm1
10247 ; AVX2-FAST-NEXT: vpshufd $170, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
10248 ; AVX2-FAST-NEXT: # ymm4 = mem[2,2,2,2,6,6,6,6]
10249 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm4[2],ymm1[3,4],ymm4[5],ymm1[6,7,8,9],ymm4[10],ymm1[11,12],ymm4[13],ymm1[14,15]
10250 ; AVX2-FAST-NEXT: vmovdqu (%rsp), %ymm4 # 32-byte Reload
10251 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm4, %ymm4
10252 ; AVX2-FAST-NEXT: vpshufd $170, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Folded Reload
10253 ; AVX2-FAST-NEXT: # ymm15 = mem[2,2,2,2,6,6,6,6]
10254 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm15[0],ymm4[1],ymm15[2,3],ymm4[4],ymm15[5,6,7,8],ymm4[9],ymm15[10,11],ymm4[12],ymm15[13,14,15]
10255 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
10256 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
10257 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = <255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u>
10258 ; AVX2-FAST-NEXT: vpblendvb %ymm15, %ymm1, %ymm4, %ymm1
10259 ; AVX2-FAST-NEXT: vpblendvb %ymm14, %ymm1, %ymm2, %ymm1
10260 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10261 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10262 ; AVX2-FAST-NEXT: vpshufb %ymm13, %ymm1, %ymm1
10263 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
10264 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm13[2,2,2,2,6,6,6,6]
10265 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7,8,9],ymm2[10],ymm1[11,12],ymm2[13],ymm1[14,15]
10266 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
10267 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm2, %ymm2
10268 ; AVX2-FAST-NEXT: vpshufd $170, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
10269 ; AVX2-FAST-NEXT: # ymm4 = mem[2,2,2,2,6,6,6,6]
10270 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm4[0],ymm2[1],ymm4[2,3],ymm2[4],ymm4[5,6,7,8],ymm2[9],ymm4[10,11],ymm2[12],ymm4[13,14,15]
10271 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
10272 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,2,3]
10273 ; AVX2-FAST-NEXT: vpblendvb %ymm15, %ymm1, %ymm2, %ymm1
10274 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10275 ; AVX2-FAST-NEXT: vpblendvb %ymm14, %ymm1, %ymm0, %ymm0
10276 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10277 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
10278 ; AVX2-FAST-NEXT: # ymm2 = mem[0,1,0,1]
10279 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm5, %ymm0
10280 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm1 = ymm6[3,3,3,3,7,7,7,7]
10281 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
10282 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
10283 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = [6,7,3,3,7,7,6,7]
10284 ; AVX2-FAST-NEXT: vpermd %ymm8, %ymm5, %ymm1
10285 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0>
10286 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm0, %ymm1, %ymm1
10287 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10288 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm0, %ymm0
10289 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm14 = ymm3[3,3,3,3,7,7,7,7]
10290 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm14[2],ymm0[3,4],ymm14[5],ymm0[6,7,8,9],ymm14[10],ymm0[11,12],ymm14[13],ymm0[14,15]
10291 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
10292 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm5, %ymm14
10293 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm0, %ymm14, %ymm0
10294 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm10, %ymm2
10295 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm14 = ymm11[3,3,3,3,7,7,7,7]
10296 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm14[2],ymm2[3,4],ymm14[5],ymm2[6,7,8,9],ymm14[10],ymm2[11,12],ymm14[13],ymm2[14,15]
10297 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
10298 ; AVX2-FAST-NEXT: vpermd %ymm9, %ymm5, %ymm14
10299 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm2, %ymm14, %ymm2
10300 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10301 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = <u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29>
10302 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
10303 ; AVX2-FAST-NEXT: vpshufb %ymm5, %ymm3, %ymm4
10304 ; AVX2-FAST-NEXT: vmovdqa %ymm5, %ymm6
10305 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
10306 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm14 = ymm8[3,3,3,3,7,7,7,7]
10307 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm14[0,1,2],ymm4[3],ymm14[4,5],ymm4[6],ymm14[7,8,9,10],ymm4[11],ymm14[12,13],ymm4[14],ymm14[15]
10308 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = <14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u>
10309 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
10310 ; AVX2-FAST-NEXT: vpshufb %ymm9, %ymm2, %ymm14
10311 ; AVX2-FAST-NEXT: vpshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Folded Reload
10312 ; AVX2-FAST-NEXT: # ymm15 = mem[3,3,3,3,7,7,7,7]
10313 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm14 = ymm15[0],ymm14[1],ymm15[2,3],ymm14[4],ymm15[5,6,7,8],ymm14[9],ymm15[10,11],ymm14[12],ymm15[13,14,15]
10314 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,1,3,3]
10315 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,2,2,3]
10316 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u>
10317 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm4, %ymm14, %ymm4
10318 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = [0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0]
10319 ; AVX2-FAST-NEXT: vpblendvb %ymm10, %ymm4, %ymm1, %ymm1
10320 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10321 ; AVX2-FAST-NEXT: vmovdqu (%rsp), %ymm5 # 32-byte Reload
10322 ; AVX2-FAST-NEXT: vpshufb %ymm6, %ymm5, %ymm1
10323 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
10324 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm4 = ymm14[3,3,3,3,7,7,7,7]
10325 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm4[0,1,2],ymm1[3],ymm4[4,5],ymm1[6],ymm4[7,8,9,10],ymm1[11],ymm4[12,13],ymm1[14],ymm4[15]
10326 ; AVX2-FAST-NEXT: vpshufb %ymm9, %ymm12, %ymm4
10327 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
10328 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm15 = ymm11[3,3,3,3,7,7,7,7]
10329 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm15[0],ymm4[1],ymm15[2,3],ymm4[4],ymm15[5,6,7,8],ymm4[9],ymm15[10,11],ymm4[12],ymm15[13,14,15]
10330 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
10331 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
10332 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm1, %ymm4, %ymm1
10333 ; AVX2-FAST-NEXT: vpblendvb %ymm10, %ymm1, %ymm0, %ymm0
10334 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10335 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
10336 ; AVX2-FAST-NEXT: vpshufb %ymm6, %ymm7, %ymm0
10337 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
10338 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm1 = ymm6[3,3,3,3,7,7,7,7]
10339 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
10340 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
10341 ; AVX2-FAST-NEXT: vpshufb %ymm9, %ymm15, %ymm1
10342 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm4 = ymm13[3,3,3,3,7,7,7,7]
10343 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm4[0],ymm1[1],ymm4[2,3],ymm1[4],ymm4[5,6,7,8],ymm1[9],ymm4[10,11],ymm1[12],ymm4[13,14,15]
10344 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
10345 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
10346 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10347 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10348 ; AVX2-FAST-NEXT: vpblendvb %ymm10, %ymm0, %ymm1, %ymm0
10349 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10350 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <3,u,u,u,4,u,u,4>
10351 ; AVX2-FAST-NEXT: vpermd %ymm8, %ymm2, %ymm0
10352 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [0,1,0,1,14,15,14,15,8,9,10,11,12,13,14,15,16,17,16,17,30,31,30,31,24,25,26,27,28,29,30,31]
10353 ; AVX2-FAST-NEXT: vpshufb %ymm8, %ymm3, %ymm1
10354 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = <u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255>
10355 ; AVX2-FAST-NEXT: vpblendvb %ymm10, %ymm0, %ymm1, %ymm0
10356 ; AVX2-FAST-NEXT: vpermd %ymm14, %ymm2, %ymm1
10357 ; AVX2-FAST-NEXT: vmovdqa %ymm2, %ymm4
10358 ; AVX2-FAST-NEXT: vpshufb %ymm8, %ymm5, %ymm2
10359 ; AVX2-FAST-NEXT: vpblendvb %ymm10, %ymm1, %ymm2, %ymm1
10360 ; AVX2-FAST-NEXT: vpermd %ymm6, %ymm4, %ymm2
10361 ; AVX2-FAST-NEXT: vpshufb %ymm8, %ymm7, %ymm3
10362 ; AVX2-FAST-NEXT: vpblendvb %ymm10, %ymm2, %ymm3, %ymm2
10363 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = <u,3,u,u,u,4,u,u>
10364 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm3 # 32-byte Folded Reload
10365 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = [0,1,0,1,0,1,0,1,14,15,14,15,14,15,14,15,16,17,16,17,16,17,16,17,30,31,30,31,30,31,30,31]
10366 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
10367 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm4, %ymm4
10368 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = <u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u>
10369 ; AVX2-FAST-NEXT: vpblendvb %ymm8, %ymm3, %ymm4, %ymm3
10370 ; AVX2-FAST-NEXT: vpermd %ymm11, %ymm5, %ymm4
10371 ; AVX2-FAST-NEXT: vmovdqa %ymm5, %ymm6
10372 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm12, %ymm5
10373 ; AVX2-FAST-NEXT: vpblendvb %ymm8, %ymm4, %ymm5, %ymm4
10374 ; AVX2-FAST-NEXT: vpermd %ymm13, %ymm6, %ymm5
10375 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm15, %ymm6
10376 ; AVX2-FAST-NEXT: vpblendvb %ymm8, %ymm5, %ymm6, %ymm5
10377 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = <u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255>
10378 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm0, %ymm3, %ymm0
10379 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm1, %ymm4, %ymm1
10380 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm2, %ymm5, %ymm2
10381 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = <u,u,3,u,u,u,4,u>
10382 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm3 # 32-byte Folded Reload
10383 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [0,1,2,3,4,5,6,7,0,1,0,1,14,15,14,15,16,17,18,19,20,21,22,23,16,17,16,17,30,31,30,31]
10384 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
10385 ; AVX2-FAST-NEXT: vpshufb %ymm8, %ymm4, %ymm4
10386 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = <u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u>
10387 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm3, %ymm4, %ymm3
10388 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm4 # 32-byte Folded Reload
10389 ; AVX2-FAST-NEXT: vmovdqa %ymm6, %ymm7
10390 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
10391 ; AVX2-FAST-NEXT: vpshufb %ymm8, %ymm6, %ymm6
10392 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm4, %ymm6, %ymm4
10393 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm6 # 32-byte Folded Reload
10394 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
10395 ; AVX2-FAST-NEXT: vpshufb %ymm8, %ymm7, %ymm7
10396 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm6, %ymm7, %ymm5
10397 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = <3,u,u,3,u,u,u,4>
10398 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm6 # 32-byte Folded Reload
10399 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = <0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u>
10400 ; AVX2-FAST-NEXT: vpblendvb %ymm7, %ymm3, %ymm6, %ymm3
10401 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm6 # 32-byte Folded Reload
10402 ; AVX2-FAST-NEXT: vpblendvb %ymm7, %ymm4, %ymm6, %ymm4
10403 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm6 # 32-byte Folded Reload
10404 ; AVX2-FAST-NEXT: vpblendvb %ymm7, %ymm5, %ymm6, %ymm5
10405 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255]
10406 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm0, %ymm3, %ymm0
10407 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10408 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm1, %ymm4, %ymm0
10409 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10410 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm2, %ymm5, %ymm0
10411 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10412 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
10413 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
10414 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
10415 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm1 = ymm3[1,1,1,1,5,5,5,5]
10416 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
10417 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
10418 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm6[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
10419 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
10420 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm4[0,1,1,3,4,5,5,7]
10421 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
10422 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
10423 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,2]
10424 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0>
10425 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10426 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [4,4,2,1,6,5,6,5]
10427 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
10428 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm1, %ymm1
10429 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255>
10430 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10431 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
10432 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} ymm1 = ymm8[1,1,2,2,4,5,6,7,9,9,10,10,12,13,14,15]
10433 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
10434 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255>
10435 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10436 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
10437 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [0,1,4,5,4,5,5,7]
10438 ; AVX2-FAST-NEXT: vpermd %ymm9, %ymm1, %ymm1
10439 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255]
10440 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10441 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10442 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
10443 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm1 = ymm4[2,2,2,2,6,6,6,6]
10444 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
10445 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm5[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
10446 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm3[2,2,2,2,6,6,6,6]
10447 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
10448 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
10449 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
10450 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u>
10451 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10452 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [5,6,2,3,6,7,5,6]
10453 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm1, %ymm1
10454 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0>
10455 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10456 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25,22,23,22,23,u,u,u,u,u,u,u,u]
10457 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
10458 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255>
10459 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10460 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [4,5,2,2,6,6,6,6]
10461 ; AVX2-FAST-NEXT: vpermd %ymm9, %ymm1, %ymm1
10462 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255]
10463 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10464 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10465 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29]
10466 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm1 = ymm3[3,3,3,3,7,7,7,7]
10467 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
10468 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm6[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
10469 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm4[3,3,3,3,7,7,7,7]
10470 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
10471 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
10472 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
10473 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u>
10474 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10475 ; AVX2-FAST-NEXT: vpbroadcastd 124(%r8), %ymm1
10476 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u>
10477 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10478 ; AVX2-FAST-NEXT: vpshufhw {{.*#+}} ymm1 = ymm8[0,1,2,3,5,5,7,6,8,9,10,11,13,13,15,14]
10479 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[3,3,3,3]
10480 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u>
10481 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10482 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [6,7,3,3,7,7,6,7]
10483 ; AVX2-FAST-NEXT: vpermd %ymm9, %ymm1, %ymm1
10484 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0]
10485 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10486 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10487 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm1
10488 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm0
10489 ; AVX2-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10490 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
10491 ; AVX2-FAST-NEXT: vmovdqa %xmm1, %xmm15
10492 ; AVX2-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10493 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm1 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
10494 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm0
10495 ; AVX2-FAST-NEXT: vmovdqa %xmm1, %xmm4
10496 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm0[0,0,1,1]
10497 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm0
10498 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm1
10499 ; AVX2-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10500 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
10501 ; AVX2-FAST-NEXT: vmovdqa %xmm0, %xmm9
10502 ; AVX2-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10503 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
10504 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm1, %xmm1
10505 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm1[0,1,1,3]
10506 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255>
10507 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm2, %ymm3, %ymm10
10508 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %xmm3
10509 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10510 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %xmm2
10511 ; AVX2-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10512 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
10513 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm2, %xmm2
10514 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,1]
10515 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %xmm5
10516 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %xmm3
10517 ; AVX2-FAST-NEXT: vmovdqa %xmm3, (%rsp) # 16-byte Spill
10518 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
10519 ; AVX2-FAST-NEXT: vmovdqa %xmm5, %xmm13
10520 ; AVX2-FAST-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10521 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm3, %xmm3
10522 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
10523 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm2, %ymm3, %ymm2
10524 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10525 ; AVX2-FAST-NEXT: vmovdqa 64(%rsi), %xmm2
10526 ; AVX2-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10527 ; AVX2-FAST-NEXT: vmovdqa 64(%rdi), %xmm3
10528 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10529 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
10530 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm2, %xmm2
10531 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,1]
10532 ; AVX2-FAST-NEXT: vmovdqa 64(%rcx), %xmm3
10533 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10534 ; AVX2-FAST-NEXT: vmovdqa 64(%rdx), %xmm8
10535 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm8[0],xmm3[0],xmm8[1],xmm3[1],xmm8[2],xmm3[2],xmm8[3],xmm3[3]
10536 ; AVX2-FAST-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10537 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm3, %xmm3
10538 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
10539 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm2, %ymm3, %ymm2
10540 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10541 ; AVX2-FAST-NEXT: vmovdqa 96(%rcx), %xmm2
10542 ; AVX2-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10543 ; AVX2-FAST-NEXT: vmovdqa 96(%rdx), %xmm3
10544 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10545 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
10546 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm2, %xmm0
10547 ; AVX2-FAST-NEXT: vmovdqa 96(%rsi), %xmm3
10548 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10549 ; AVX2-FAST-NEXT: vmovdqa 96(%rdi), %xmm2
10550 ; AVX2-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10551 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
10552 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm2, %xmm2
10553 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,1]
10554 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,1,3]
10555 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm2, %ymm0, %ymm7
10556 ; AVX2-FAST-NEXT: vmovdqa (%r9), %xmm0
10557 ; AVX2-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10558 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm1
10559 ; AVX2-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10560 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
10561 ; AVX2-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10562 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
10563 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm0, %xmm1
10564 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
10565 ; AVX2-FAST-NEXT: vpbroadcastd (%rax), %ymm3
10566 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u>
10567 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm1, %ymm3, %ymm6
10568 ; AVX2-FAST-NEXT: vmovdqa 32(%r9), %xmm0
10569 ; AVX2-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10570 ; AVX2-FAST-NEXT: vmovdqa 32(%r8), %xmm1
10571 ; AVX2-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10572 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
10573 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm12, %xmm3
10574 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
10575 ; AVX2-FAST-NEXT: vpbroadcastd 32(%rax), %ymm5
10576 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm3, %ymm5, %ymm1
10577 ; AVX2-FAST-NEXT: vmovdqa 64(%r9), %xmm0
10578 ; AVX2-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10579 ; AVX2-FAST-NEXT: vmovdqa 64(%r8), %xmm3
10580 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10581 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
10582 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm5, %xmm3
10583 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
10584 ; AVX2-FAST-NEXT: vpbroadcastd 64(%rax), %ymm11
10585 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm3, %ymm11, %ymm0
10586 ; AVX2-FAST-NEXT: vmovdqa 96(%r9), %xmm11
10587 ; AVX2-FAST-NEXT: vmovdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10588 ; AVX2-FAST-NEXT: vmovdqa 96(%r8), %xmm3
10589 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10590 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm11[0],xmm3[1],xmm11[1],xmm3[2],xmm11[2],xmm3[3],xmm11[3]
10591 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm3, %xmm2
10592 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
10593 ; AVX2-FAST-NEXT: vpbroadcastd 96(%rax), %ymm14
10594 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm2, %ymm14, %ymm2
10595 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255]
10596 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm10, %ymm6, %ymm6
10597 ; AVX2-FAST-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10598 ; AVX2-FAST-NEXT: vpblendvb %ymm4, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
10599 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10600 ; AVX2-FAST-NEXT: vpblendvb %ymm4, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
10601 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10602 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm7, %ymm2, %ymm0
10603 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10604 ; AVX2-FAST-NEXT: vpbroadcastq {{.*#+}} xmm1 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
10605 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm9, %xmm0
10606 ; AVX2-FAST-NEXT: vpshufd $165, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
10607 ; AVX2-FAST-NEXT: # xmm2 = mem[1,1,2,2]
10608 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} xmm0 = xmm2[0],xmm0[1],xmm2[2,3],xmm0[4],xmm2[5,6],xmm0[7]
10609 ; AVX2-FAST-NEXT: vpbroadcastd {{.*#+}} xmm6 = [6,7,8,9,6,7,8,9,6,7,8,9,6,7,8,9]
10610 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm15, %xmm2
10611 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10612 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm4 = xmm15[1,1,2,3]
10613 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} xmm2 = xmm4[0,1],xmm2[2],xmm4[3,4],xmm2[5],xmm4[6,7]
10614 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
10615 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
10616 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = <255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255>
10617 ; AVX2-FAST-NEXT: vpblendvb %ymm7, %ymm0, %ymm2, %ymm0
10618 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10619 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm13, %xmm0
10620 ; AVX2-FAST-NEXT: vpshufd $165, (%rsp), %xmm2 # 16-byte Folded Reload
10621 ; AVX2-FAST-NEXT: # xmm2 = mem[1,1,2,2]
10622 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} xmm0 = xmm2[0],xmm0[1],xmm2[2,3],xmm0[4],xmm2[5,6],xmm0[7]
10623 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
10624 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm13, %xmm2
10625 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10626 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm14 = xmm10[1,1,2,3]
10627 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} xmm2 = xmm14[0,1],xmm2[2],xmm14[3,4],xmm2[5],xmm14[6,7]
10628 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
10629 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
10630 ; AVX2-FAST-NEXT: vpblendvb %ymm7, %ymm0, %ymm2, %ymm4
10631 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10632 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm0
10633 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm14 = xmm8[1,1,2,2]
10634 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} xmm0 = xmm14[0],xmm0[1],xmm14[2,3],xmm0[4],xmm14[5,6],xmm0[7]
10635 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
10636 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm8, %xmm14
10637 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
10638 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm11 = xmm9[1,1,2,3]
10639 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1],xmm14[2],xmm11[3,4],xmm14[5],xmm11[6,7]
10640 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
10641 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,2,1]
10642 ; AVX2-FAST-NEXT: vpblendvb %ymm7, %ymm0, %ymm11, %ymm2
10643 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10644 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm1
10645 ; AVX2-FAST-NEXT: vpshufd $165, {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Folded Reload
10646 ; AVX2-FAST-NEXT: # xmm11 = mem[1,1,2,2]
10647 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} xmm1 = xmm11[0],xmm1[1],xmm11[2,3],xmm1[4],xmm11[5,6],xmm1[7]
10648 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
10649 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm8, %xmm6
10650 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10651 ; AVX2-FAST-NEXT: vpshufd {{.*#+}} xmm11 = xmm0[1,1,2,3]
10652 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} xmm6 = xmm11[0,1],xmm6[2],xmm11[3,4],xmm6[5],xmm11[6,7]
10653 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,1]
10654 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,2,1]
10655 ; AVX2-FAST-NEXT: vpblendvb %ymm7, %ymm1, %ymm6, %ymm1
10656 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm6 = [0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
10657 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
10658 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm7, %xmm7
10659 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,1,3]
10660 ; AVX2-FAST-NEXT: vpbroadcastd 4(%rax), %ymm11
10661 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = <u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u>
10662 ; AVX2-FAST-NEXT: vpblendvb %ymm14, %ymm7, %ymm11, %ymm7
10663 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm12, %xmm11
10664 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,1,1,3]
10665 ; AVX2-FAST-NEXT: vpbroadcastd 36(%rax), %ymm12
10666 ; AVX2-FAST-NEXT: vpblendvb %ymm14, %ymm11, %ymm12, %ymm11
10667 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm5, %xmm5
10668 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,1,3]
10669 ; AVX2-FAST-NEXT: vpbroadcastd 68(%rax), %ymm12
10670 ; AVX2-FAST-NEXT: vpblendvb %ymm14, %ymm5, %ymm12, %ymm5
10671 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm3, %xmm3
10672 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
10673 ; AVX2-FAST-NEXT: vpbroadcastd 100(%rax), %ymm6
10674 ; AVX2-FAST-NEXT: vpblendvb %ymm14, %ymm3, %ymm6, %ymm3
10675 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255]
10676 ; AVX2-FAST-NEXT: vpblendvb %ymm6, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm7 # 32-byte Folded Reload
10677 ; AVX2-FAST-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10678 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm4, %ymm11, %ymm11
10679 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm2, %ymm5, %ymm12
10680 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm1, %ymm3, %ymm14
10681 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
10682 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm15[4],xmm1[5],xmm15[5],xmm1[6],xmm15[6],xmm1[7],xmm15[7]
10683 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
10684 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
10685 ; AVX2-FAST-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
10686 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm4 = [4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
10687 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm1, %xmm1
10688 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,3]
10689 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,2,3,3,4,5,6,7]
10690 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
10691 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = <u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u>
10692 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm1, %ymm2, %ymm1
10693 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm13[4],xmm10[4],xmm13[5],xmm10[5],xmm13[6],xmm10[6],xmm13[7],xmm10[7]
10694 ; AVX2-FAST-NEXT: vmovdqa (%rsp), %xmm3 # 16-byte Reload
10695 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3, %xmm3 # 16-byte Folded Reload
10696 ; AVX2-FAST-NEXT: # xmm3 = xmm3[4],mem[4],xmm3[5],mem[5],xmm3[6],mem[6],xmm3[7],mem[7]
10697 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm2, %xmm2
10698 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,3]
10699 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,2,3,3,4,5,6,7]
10700 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
10701 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm2, %ymm3, %ymm2
10702 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
10703 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm3[4],xmm9[4],xmm3[5],xmm9[5],xmm3[6],xmm9[6],xmm3[7],xmm9[7]
10704 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10705 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm6, %xmm6 # 16-byte Folded Reload
10706 ; AVX2-FAST-NEXT: # xmm6 = xmm6[4],mem[4],xmm6[5],mem[5],xmm6[6],mem[6],xmm6[7],mem[7]
10707 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm3, %xmm3
10708 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,1,3]
10709 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[0,2,3,3,4,5,6,7]
10710 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,2,1]
10711 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm3, %ymm6, %ymm3
10712 ; AVX2-FAST-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm8[4],xmm0[4],xmm8[5],xmm0[5],xmm8[6],xmm0[6],xmm8[7],xmm0[7]
10713 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm6, %xmm4
10714 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10715 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm6 # 16-byte Folded Reload
10716 ; AVX2-FAST-NEXT: # xmm6 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
10717 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,1,3]
10718 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[0,2,3,3,4,5,6,7]
10719 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,2,1]
10720 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm4, %ymm6, %ymm4
10721 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10722 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm5 # 16-byte Folded Reload
10723 ; AVX2-FAST-NEXT: # xmm5 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
10724 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm9 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
10725 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm5, %xmm5
10726 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,1,1]
10727 ; AVX2-FAST-NEXT: vpbroadcastd 8(%rax), %ymm6
10728 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = <255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255>
10729 ; AVX2-FAST-NEXT: vpblendvb %ymm7, %ymm5, %ymm6, %ymm5
10730 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10731 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm6 # 16-byte Folded Reload
10732 ; AVX2-FAST-NEXT: # xmm6 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
10733 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm6, %xmm6
10734 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,1,1]
10735 ; AVX2-FAST-NEXT: vpbroadcastd 40(%rax), %ymm8
10736 ; AVX2-FAST-NEXT: vpblendvb %ymm7, %ymm6, %ymm8, %ymm6
10737 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10738 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm8 # 16-byte Folded Reload
10739 ; AVX2-FAST-NEXT: # xmm8 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
10740 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm8, %xmm8
10741 ; AVX2-FAST-NEXT: vmovdqa %xmm9, %xmm10
10742 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,1,1]
10743 ; AVX2-FAST-NEXT: vpbroadcastd 72(%rax), %ymm9
10744 ; AVX2-FAST-NEXT: vpblendvb %ymm7, %ymm8, %ymm9, %ymm8
10745 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10746 ; AVX2-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm9 # 16-byte Folded Reload
10747 ; AVX2-FAST-NEXT: # xmm9 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
10748 ; AVX2-FAST-NEXT: vpshufb %xmm10, %xmm9, %xmm9
10749 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,1,1]
10750 ; AVX2-FAST-NEXT: vpbroadcastd 104(%rax), %ymm10
10751 ; AVX2-FAST-NEXT: vpblendvb %ymm7, %ymm9, %ymm10, %ymm7
10752 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255]
10753 ; AVX2-FAST-NEXT: vpblendvb %ymm9, %ymm5, %ymm1, %ymm1
10754 ; AVX2-FAST-NEXT: vpblendvb %ymm9, %ymm6, %ymm2, %ymm2
10755 ; AVX2-FAST-NEXT: vpblendvb %ymm9, %ymm8, %ymm3, %ymm3
10756 ; AVX2-FAST-NEXT: vpblendvb %ymm9, %ymm7, %ymm4, %ymm4
10757 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
10758 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
10759 ; AVX2-FAST-NEXT: vmovaps %ymm5, 544(%rax)
10760 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
10761 ; AVX2-FAST-NEXT: vmovaps %ymm5, 320(%rax)
10762 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
10763 ; AVX2-FAST-NEXT: vmovaps %ymm5, 96(%rax)
10764 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
10765 ; AVX2-FAST-NEXT: vmovaps %ymm5, 640(%rax)
10766 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
10767 ; AVX2-FAST-NEXT: vmovaps %ymm5, 608(%rax)
10768 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
10769 ; AVX2-FAST-NEXT: vmovaps %ymm5, 576(%rax)
10770 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
10771 ; AVX2-FAST-NEXT: vmovaps %ymm5, 416(%rax)
10772 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
10773 ; AVX2-FAST-NEXT: vmovaps %ymm5, 384(%rax)
10774 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
10775 ; AVX2-FAST-NEXT: vmovaps %ymm5, 352(%rax)
10776 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
10777 ; AVX2-FAST-NEXT: vmovaps %ymm5, 192(%rax)
10778 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
10779 ; AVX2-FAST-NEXT: vmovaps %ymm5, 160(%rax)
10780 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
10781 ; AVX2-FAST-NEXT: vmovaps %ymm5, 128(%rax)
10782 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
10783 ; AVX2-FAST-NEXT: vmovaps %ymm5, 768(%rax)
10784 ; AVX2-FAST-NEXT: vmovdqa %ymm4, 736(%rax)
10785 ; AVX2-FAST-NEXT: vmovdqa %ymm14, 704(%rax)
10786 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10787 ; AVX2-FAST-NEXT: vmovaps %ymm0, 672(%rax)
10788 ; AVX2-FAST-NEXT: vmovdqa %ymm3, 512(%rax)
10789 ; AVX2-FAST-NEXT: vmovdqa %ymm12, 480(%rax)
10790 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10791 ; AVX2-FAST-NEXT: vmovaps %ymm0, 448(%rax)
10792 ; AVX2-FAST-NEXT: vmovdqa %ymm2, 288(%rax)
10793 ; AVX2-FAST-NEXT: vmovdqa %ymm11, 256(%rax)
10794 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10795 ; AVX2-FAST-NEXT: vmovaps %ymm0, 224(%rax)
10796 ; AVX2-FAST-NEXT: vmovdqa %ymm1, 64(%rax)
10797 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10798 ; AVX2-FAST-NEXT: vmovaps %ymm0, 32(%rax)
10799 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10800 ; AVX2-FAST-NEXT: vmovaps %ymm0, (%rax)
10801 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10802 ; AVX2-FAST-NEXT: vmovaps %ymm0, 864(%rax)
10803 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10804 ; AVX2-FAST-NEXT: vmovaps %ymm0, 832(%rax)
10805 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
10806 ; AVX2-FAST-NEXT: vmovaps %ymm0, 800(%rax)
10807 ; AVX2-FAST-NEXT: addq $1256, %rsp # imm = 0x4E8
10808 ; AVX2-FAST-NEXT: vzeroupper
10809 ; AVX2-FAST-NEXT: retq
10811 ; AVX2-FAST-PERLANE-LABEL: store_i16_stride7_vf64:
10812 ; AVX2-FAST-PERLANE: # %bb.0:
10813 ; AVX2-FAST-PERLANE-NEXT: subq $1544, %rsp # imm = 0x608
10814 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rdi), %ymm0
10815 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10816 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rsi), %ymm2
10817 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10818 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rdx), %ymm3
10819 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm3, (%rsp) # 32-byte Spill
10820 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rcx), %ymm5
10821 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%r8), %ymm7
10822 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10823 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm14 = <3,u,u,u,4,u,u,4>
10824 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm0, %ymm14, %ymm1
10825 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = [0,1,0,1,14,15,14,15,8,9,10,11,12,13,14,15,16,17,16,17,30,31,30,31,24,25,26,27,28,29,30,31]
10826 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm15, %ymm2, %ymm4
10827 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255>
10828 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm1, %ymm4, %ymm1
10829 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm10 = <u,3,u,u,u,4,u,u>
10830 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm3, %ymm10, %ymm4
10831 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = [0,1,0,1,0,1,0,1,14,15,14,15,14,15,14,15,16,17,16,17,16,17,16,17,30,31,30,31,30,31,30,31]
10832 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm11, %ymm5, %ymm6
10833 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm13 = <u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u>
10834 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm4, %ymm6, %ymm4
10835 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm12 = <u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255>
10836 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm12, %ymm1, %ymm4, %ymm1
10837 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm0 = <u,u,3,u,u,u,4,u>
10838 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm7, %ymm0, %ymm4
10839 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = <u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255>
10840 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm1, %ymm4, %ymm1
10841 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%r9), %ymm4
10842 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm9 = [0,1,2,3,4,5,6,7,0,1,0,1,14,15,14,15,16,17,18,19,20,21,22,23,16,17,16,17,30,31,30,31]
10843 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm9, %ymm4, %ymm6
10844 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm7 = <u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255>
10845 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm1, %ymm6, %ymm1
10846 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
10847 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rax), %ymm6
10848 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm0 = <3,u,u,3,u,u,u,4>
10849 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm6, %ymm0, %ymm7
10850 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm8 = [0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255]
10851 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm8, %ymm1, %ymm7, %ymm1
10852 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10853 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %ymm0
10854 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10855 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %ymm3
10856 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10857 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm0, %ymm14, %ymm1
10858 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm15, %ymm3, %ymm7
10859 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm1, %ymm7, %ymm3
10860 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %ymm1
10861 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10862 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %ymm0
10863 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10864 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm1, %ymm14, %ymm7
10865 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm15, %ymm0, %ymm8
10866 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm7, %ymm8, %ymm7
10867 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rdi), %ymm0
10868 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10869 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm0, %ymm14, %ymm8
10870 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rsi), %ymm0
10871 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10872 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm15, %ymm0, %ymm14
10873 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm8, %ymm14, %ymm1
10874 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %ymm0
10875 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10876 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %ymm2
10877 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10878 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm0, %ymm10, %ymm8
10879 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm11, %ymm2, %ymm14
10880 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm8, %ymm14, %ymm8
10881 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %ymm2
10882 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10883 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %ymm0
10884 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10885 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm2, %ymm10, %ymm14
10886 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm11, %ymm0, %ymm15
10887 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm14, %ymm15, %ymm14
10888 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rdx), %ymm0
10889 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10890 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm0, %ymm10, %ymm10
10891 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rcx), %ymm0
10892 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10893 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm11, %ymm0, %ymm11
10894 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm10, %ymm11, %ymm10
10895 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm12, %ymm3, %ymm8, %ymm3
10896 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm12, %ymm7, %ymm14, %ymm7
10897 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm12, %ymm1, %ymm10, %ymm0
10898 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %ymm1
10899 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10900 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %ymm2
10901 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10902 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm12 = <u,u,3,u,u,u,4,u>
10903 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm1, %ymm12, %ymm8
10904 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm9, %ymm2, %ymm10
10905 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = <u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u,u,u,u,u,u,u,255,255,0,0,u,u,u,u>
10906 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm8, %ymm10, %ymm8
10907 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r8), %ymm2
10908 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10909 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r9), %ymm1
10910 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10911 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm2, %ymm12, %ymm10
10912 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm12, %ymm2
10913 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm9, %ymm1, %ymm12
10914 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm10, %ymm12, %ymm10
10915 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%r8), %ymm1
10916 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10917 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm1, %ymm2, %ymm12
10918 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%r9), %ymm1
10919 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10920 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm9, %ymm1, %ymm9
10921 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm12, %ymm9, %ymm9
10922 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rax), %ymm1
10923 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10924 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <3,u,u,3,u,u,u,4>
10925 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm1, %ymm2, %ymm11
10926 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm12 = <0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u>
10927 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm12, %ymm8, %ymm11, %ymm8
10928 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rax), %ymm1
10929 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10930 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm1, %ymm2, %ymm11
10931 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm12, %ymm10, %ymm11, %ymm10
10932 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rax), %ymm1
10933 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10934 ; AVX2-FAST-PERLANE-NEXT: vpermd %ymm1, %ymm2, %ymm11
10935 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm12, %ymm9, %ymm11, %ymm9
10936 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = [0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255]
10937 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm3, %ymm8, %ymm1
10938 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10939 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm7, %ymm10, %ymm1
10940 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10941 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm0, %ymm9, %ymm0
10942 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10943 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
10944 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm0 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
10945 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
10946 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm3[1,1,1,1,5,5,5,5]
10947 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
10948 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm1 = ymm5[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
10949 ; AVX2-FAST-PERLANE-NEXT: vmovdqu (%rsp), %ymm8 # 32-byte Reload
10950 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm7 = ymm8[0,1,1,3,4,5,5,7]
10951 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm1 = ymm7[0,1],ymm1[2],ymm7[3,4],ymm1[5],ymm7[6,7,8,9],ymm1[10],ymm7[11,12],ymm1[13],ymm7[14,15]
10952 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
10953 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,2]
10954 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0>
10955 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10956 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
10957 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm10[0,0,2,1,4,4,6,5]
10958 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
10959 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm7 = <255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255>
10960 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm0, %ymm1, %ymm0
10961 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} ymm1 = ymm4[1,1,2,2,4,5,6,7,9,9,10,10,12,13,14,15]
10962 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
10963 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm7 = <255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255>
10964 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm0, %ymm1, %ymm0
10965 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm6[0,1,1,3,4,5,5,7]
10966 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
10967 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm7 = [255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255]
10968 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm0, %ymm1, %ymm0
10969 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10970 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm0 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
10971 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm8[2,2,2,2,6,6,6,6]
10972 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
10973 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm9[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
10974 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm7 = ymm3[2,2,2,2,6,6,6,6]
10975 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm1 = ymm7[0],ymm1[1],ymm7[2,3],ymm1[4],ymm7[5,6,7,8],ymm1[9],ymm7[10,11],ymm1[12],ymm7[13,14,15]
10976 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
10977 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
10978 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u>
10979 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
10980 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm10[1,2,2,3,5,6,6,7]
10981 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,2]
10982 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm7 = <255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0>
10983 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm0, %ymm1, %ymm0
10984 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25,22,23,22,23,u,u,u,u,u,u,u,u]
10985 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
10986 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm7 = <255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255>
10987 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm0, %ymm1, %ymm0
10988 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm6[0,1,2,2,4,5,6,6]
10989 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,3]
10990 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm7 = [255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255]
10991 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm0, %ymm1, %ymm0
10992 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10993 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm0 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29]
10994 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm3[3,3,3,3,7,7,7,7]
10995 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
10996 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm5[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
10997 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm2 = ymm8[3,3,3,3,7,7,7,7]
10998 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
10999 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,1,3,3]
11000 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,2,3]
11001 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u>
11002 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
11003 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 124(%r8), %ymm1
11004 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u,255,255,255,255,255,255,255,255,0,0,u,u,u,u>
11005 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
11006 ; AVX2-FAST-PERLANE-NEXT: vpshufhw {{.*#+}} ymm1 = ymm4[0,1,2,3,5,5,7,6,8,9,10,11,13,13,15,14]
11007 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[3,3,3,3]
11008 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u,255,255,255,255,255,255,255,255,255,255,0,0,u,u>
11009 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
11010 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm1 = ymm6[2,3,3,3,6,7,7,7]
11011 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,2]
11012 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255,255,0,0]
11013 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
11014 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11015 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm1
11016 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, (%rsp) # 16-byte Spill
11017 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm0
11018 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11019 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
11020 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm1 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
11021 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm0, %xmm0
11022 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, %xmm4
11023 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm0[0,0,1,1]
11024 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm12
11025 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm1
11026 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11027 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm12[0],xmm1[1],xmm12[1],xmm1[2],xmm12[2],xmm1[3],xmm12[3]
11028 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11029 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
11030 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm1, %xmm1
11031 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm1[0,1,1,3]
11032 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm1 = <255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255>
11033 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm1, %ymm2, %ymm3, %ymm2
11034 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11035 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %xmm3
11036 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11037 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %xmm2
11038 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11039 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
11040 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm4, %xmm2, %xmm2
11041 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,1]
11042 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %xmm14
11043 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %xmm11
11044 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm11[0],xmm14[0],xmm11[1],xmm14[1],xmm11[2],xmm14[2],xmm11[3],xmm14[3]
11045 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11046 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11047 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm3, %xmm3
11048 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
11049 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm1, %ymm2, %ymm3, %ymm2
11050 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11051 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rsi), %xmm2
11052 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11053 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rdi), %xmm3
11054 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11055 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
11056 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm4, %xmm2, %xmm2
11057 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,1]
11058 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rcx), %xmm3
11059 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11060 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rdx), %xmm10
11061 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm10[0],xmm3[0],xmm10[1],xmm3[1],xmm10[2],xmm3[2],xmm10[3],xmm3[3]
11062 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11063 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm3, %xmm3
11064 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
11065 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm1, %ymm2, %ymm3, %ymm2
11066 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11067 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rcx), %xmm2
11068 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11069 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rdx), %xmm3
11070 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11071 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
11072 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm2, %xmm0
11073 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rsi), %xmm3
11074 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11075 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rdi), %xmm2
11076 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11077 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
11078 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm4, %xmm2, %xmm2
11079 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,1]
11080 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,1,3]
11081 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm1, %ymm2, %ymm0, %ymm7
11082 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %xmm0
11083 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11084 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm1
11085 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11086 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
11087 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
11088 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm4, %xmm9, %xmm1
11089 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
11090 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd (%rax), %ymm2
11091 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = <u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u>
11092 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm1, %ymm2, %ymm5
11093 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r9), %xmm0
11094 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11095 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r8), %xmm1
11096 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11097 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
11098 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm4, %xmm8, %xmm2
11099 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
11100 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 32(%rax), %ymm3
11101 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm2, %ymm3, %ymm1
11102 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%r9), %xmm0
11103 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11104 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%r8), %xmm2
11105 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11106 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
11107 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm4, %xmm3, %xmm2
11108 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
11109 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 64(%rax), %ymm13
11110 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm2, %ymm13, %ymm0
11111 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%r8), %xmm13
11112 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11113 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%r9), %xmm2
11114 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11115 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm13[0],xmm2[0],xmm13[1],xmm2[1],xmm13[2],xmm2[2],xmm13[3],xmm2[3]
11116 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm4, %xmm2, %xmm4
11117 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
11118 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 96(%rax), %ymm15
11119 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm4, %ymm15, %ymm4
11120 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255]
11121 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm5 # 32-byte Folded Reload
11122 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11123 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
11124 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11125 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11126 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11127 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm7, %ymm4, %ymm0
11128 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11129 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq {{.*#+}} xmm1 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
11130 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm12, %xmm0
11131 ; AVX2-FAST-PERLANE-NEXT: vpshufd $165, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
11132 ; AVX2-FAST-PERLANE-NEXT: # xmm4 = mem[1,1,2,2]
11133 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} xmm0 = xmm4[0],xmm0[1],xmm4[2,3],xmm0[4],xmm4[5,6],xmm0[7]
11134 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd {{.*#+}} xmm5 = [6,7,8,9,6,7,8,9,6,7,8,9,6,7,8,9]
11135 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsp), %xmm4 # 16-byte Reload
11136 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm4, %xmm4
11137 ; AVX2-FAST-PERLANE-NEXT: vpshufd $229, {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Folded Reload
11138 ; AVX2-FAST-PERLANE-NEXT: # xmm6 = mem[1,1,2,3]
11139 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} xmm4 = xmm6[0,1],xmm4[2],xmm6[3,4],xmm4[5],xmm6[6,7]
11140 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
11141 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
11142 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm7 = <255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255>
11143 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm0, %ymm4, %ymm6
11144 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm14, %xmm0
11145 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm4 = xmm11[1,1,2,2]
11146 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} xmm0 = xmm4[0],xmm0[1],xmm4[2,3],xmm0[4],xmm4[5,6],xmm0[7]
11147 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
11148 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm4, %xmm4
11149 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
11150 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm15 = xmm14[1,1,2,3]
11151 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} xmm4 = xmm15[0,1],xmm4[2],xmm15[3,4],xmm4[5],xmm15[6,7]
11152 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
11153 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
11154 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm0, %ymm4, %ymm4
11155 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11156 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm0, %xmm0
11157 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm15 = xmm10[1,1,2,2]
11158 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} xmm0 = xmm15[0],xmm0[1],xmm15[2,3],xmm0[4],xmm15[5,6],xmm0[7]
11159 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
11160 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm10, %xmm15
11161 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
11162 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm13 = xmm12[1,1,2,3]
11163 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0,1],xmm15[2],xmm13[3,4],xmm15[5],xmm13[6,7]
11164 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
11165 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm13 = ymm13[0,0,2,1]
11166 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm0, %ymm13, %ymm0
11167 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
11168 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm13, %xmm1
11169 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11170 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm13 = xmm15[1,1,2,2]
11171 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} xmm1 = xmm13[0],xmm1[1],xmm13[2,3],xmm1[4],xmm13[5,6],xmm1[7]
11172 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
11173 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm11, %xmm5
11174 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
11175 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} xmm13 = xmm10[1,1,2,3]
11176 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} xmm5 = xmm13[0,1],xmm5[2],xmm13[3,4],xmm5[5],xmm13[6,7]
11177 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,1]
11178 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,2,1]
11179 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm1, %ymm5, %ymm1
11180 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm5 = [0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
11181 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm9, %xmm7
11182 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,1,3]
11183 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 4(%rax), %ymm9
11184 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm13 = <u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u>
11185 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm7, %ymm9, %ymm7
11186 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm8, %xmm8
11187 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,1,1,3]
11188 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 36(%rax), %ymm9
11189 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm8, %ymm9, %ymm8
11190 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm3, %xmm3
11191 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
11192 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 68(%rax), %ymm9
11193 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm3, %ymm9, %ymm3
11194 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm2, %xmm2
11195 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,1,3]
11196 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 100(%rax), %ymm5
11197 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm2, %ymm5, %ymm2
11198 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm5 = [255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255]
11199 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm6, %ymm7, %ymm6
11200 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11201 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm4, %ymm8, %ymm4
11202 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11203 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm0, %ymm3, %ymm0
11204 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11205 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm1, %ymm2, %ymm0
11206 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11207 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsp), %xmm0 # 16-byte Reload
11208 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
11209 ; AVX2-FAST-PERLANE-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
11210 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11211 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
11212 ; AVX2-FAST-PERLANE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
11213 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm3 = [4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
11214 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm0, %xmm0
11215 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,3]
11216 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,2,3,3,4,5,6,7]
11217 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
11218 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u>
11219 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm0, %ymm1, %ymm0
11220 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11221 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm14[4],xmm1[5],xmm14[5],xmm1[6],xmm14[6],xmm1[7],xmm14[7]
11222 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
11223 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
11224 ; AVX2-FAST-PERLANE-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
11225 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm1, %xmm1
11226 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,3]
11227 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,2,3,3,4,5,6,7]
11228 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
11229 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm1, %ymm2, %ymm1
11230 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
11231 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm12[4],xmm2[5],xmm12[5],xmm2[6],xmm12[6],xmm2[7],xmm12[7]
11232 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
11233 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm5 # 16-byte Folded Reload
11234 ; AVX2-FAST-PERLANE-NEXT: # xmm5 = xmm5[4],mem[4],xmm5[5],mem[5],xmm5[6],mem[6],xmm5[7],mem[7]
11235 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm2, %xmm2
11236 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,3]
11237 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,2,3,3,4,5,6,7]
11238 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,2,1]
11239 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm2, %ymm5, %ymm2
11240 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
11241 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm5, %xmm3
11242 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm5 # 16-byte Folded Reload
11243 ; AVX2-FAST-PERLANE-NEXT: # xmm5 = xmm15[4],mem[4],xmm15[5],mem[5],xmm15[6],mem[6],xmm15[7],mem[7]
11244 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,1,3]
11245 ; AVX2-FAST-PERLANE-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,2,3,3,4,5,6,7]
11246 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,2,1]
11247 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm3, %ymm5, %ymm3
11248 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
11249 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4, %xmm4 # 16-byte Folded Reload
11250 ; AVX2-FAST-PERLANE-NEXT: # xmm4 = xmm4[4],mem[4],xmm4[5],mem[5],xmm4[6],mem[6],xmm4[7],mem[7]
11251 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm8 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
11252 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm8, %xmm4, %xmm4
11253 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,1,1]
11254 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 8(%rax), %ymm5
11255 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = <255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255>
11256 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
11257 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
11258 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm5 # 16-byte Folded Reload
11259 ; AVX2-FAST-PERLANE-NEXT: # xmm5 = xmm5[4],mem[4],xmm5[5],mem[5],xmm5[6],mem[6],xmm5[7],mem[7]
11260 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm8, %xmm5, %xmm5
11261 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,1,1]
11262 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 40(%rax), %ymm7
11263 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm5, %ymm7, %ymm5
11264 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
11265 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm7, %xmm7 # 16-byte Folded Reload
11266 ; AVX2-FAST-PERLANE-NEXT: # xmm7 = xmm7[4],mem[4],xmm7[5],mem[5],xmm7[6],mem[6],xmm7[7],mem[7]
11267 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm8, %xmm7, %xmm7
11268 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm8, %xmm9
11269 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,1,1]
11270 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 72(%rax), %ymm8
11271 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm7, %ymm8, %ymm7
11272 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
11273 ; AVX2-FAST-PERLANE-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm8, %xmm8 # 16-byte Folded Reload
11274 ; AVX2-FAST-PERLANE-NEXT: # xmm8 = xmm8[4],mem[4],xmm8[5],mem[5],xmm8[6],mem[6],xmm8[7],mem[7]
11275 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm9, %xmm8, %xmm8
11276 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,1,1]
11277 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd 104(%rax), %ymm9
11278 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm8, %ymm9, %ymm6
11279 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm8 = [255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255,255,255,0,0,0,0,0,0,0,0,255,255,255,255]
11280 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm8, %ymm4, %ymm0, %ymm0
11281 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11282 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm8, %ymm5, %ymm1, %ymm0
11283 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
11284 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm8, %ymm7, %ymm2, %ymm0
11285 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11286 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm8, %ymm6, %ymm3, %ymm0
11287 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11288 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11289 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
11290 ; AVX2-FAST-PERLANE-NEXT: # ymm1 = mem[0,1,0,1]
11291 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm1, %ymm0, %ymm4
11292 ; AVX2-FAST-PERLANE-NEXT: vpshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
11293 ; AVX2-FAST-PERLANE-NEXT: # ymm5 = mem[1,1,1,1,5,5,5,5]
11294 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7,8,9],ymm5[10],ymm4[11,12],ymm5[13],ymm4[14,15]
11295 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11296 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm5 = ymm0[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
11297 ; AVX2-FAST-PERLANE-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Folded Reload
11298 ; AVX2-FAST-PERLANE-NEXT: # ymm6 = mem[0,1,1,3,4,5,5,7]
11299 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm5 = ymm6[0,1],ymm5[2],ymm6[3,4],ymm5[5],ymm6[6,7,8,9],ymm5[10],ymm6[11,12],ymm5[13],ymm6[14,15]
11300 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
11301 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,1,3,2]
11302 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0>
11303 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm4, %ymm5, %ymm0
11304 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11305 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11306 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm1, %ymm0, %ymm5
11307 ; AVX2-FAST-PERLANE-NEXT: vpshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Folded Reload
11308 ; AVX2-FAST-PERLANE-NEXT: # ymm6 = mem[1,1,1,1,5,5,5,5]
11309 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm5 = ymm5[0,1],ymm6[2],ymm5[3,4],ymm6[5],ymm5[6,7,8,9],ymm6[10],ymm5[11,12],ymm6[13],ymm5[14,15]
11310 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11311 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm6 = ymm0[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
11312 ; AVX2-FAST-PERLANE-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Folded Reload
11313 ; AVX2-FAST-PERLANE-NEXT: # ymm7 = mem[0,1,1,3,4,5,5,7]
11314 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm6 = ymm7[0,1],ymm6[2],ymm7[3,4],ymm6[5],ymm7[6,7,8,9],ymm6[10],ymm7[11,12],ymm6[13],ymm7[14,15]
11315 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
11316 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,2]
11317 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm5, %ymm6, %ymm0
11318 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11319 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm2, %ymm3
11320 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
11321 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm1, %ymm12, %ymm6
11322 ; AVX2-FAST-PERLANE-NEXT: vpshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Folded Reload
11323 ; AVX2-FAST-PERLANE-NEXT: # ymm7 = mem[1,1,1,1,5,5,5,5]
11324 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm6 = ymm6[0,1],ymm7[2],ymm6[3,4],ymm7[5],ymm6[6,7,8,9],ymm7[10],ymm6[11,12],ymm7[13],ymm6[14,15]
11325 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
11326 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm2[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
11327 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
11328 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm8 = ymm4[0,1,1,3,4,5,5,7]
11329 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm7 = ymm8[0,1],ymm7[2],ymm8[3,4],ymm7[5],ymm8[6,7,8,9],ymm7[10],ymm8[11,12],ymm7[13],ymm8[14,15]
11330 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
11331 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,1,3,2]
11332 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm3, %ymm6, %ymm7, %ymm0
11333 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastd {{.*#+}} ymm7 = [18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
11334 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
11335 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm7, %ymm6, %ymm8
11336 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
11337 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm9 = ymm14[0,0,2,1,4,4,6,5]
11338 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm8 = ymm8[0,1,2],ymm9[3],ymm8[4,5],ymm9[6],ymm8[7,8,9,10],ymm9[11],ymm8[12,13],ymm9[14],ymm8[15]
11339 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,2,3,3]
11340 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
11341 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm9 = ymm13[0,1,1,3,4,5,5,7]
11342 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,2,3]
11343 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm10 = <u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u>
11344 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm8, %ymm9, %ymm8
11345 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11346 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm7, %ymm1, %ymm9
11347 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
11348 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm11 = ymm5[0,0,2,1,4,4,6,5]
11349 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm9 = ymm9[0,1,2],ymm11[3],ymm9[4,5],ymm11[6],ymm9[7,8,9,10],ymm11[11],ymm9[12,13],ymm11[14],ymm9[15]
11350 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,3,3]
11351 ; AVX2-FAST-PERLANE-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
11352 ; AVX2-FAST-PERLANE-NEXT: # ymm11 = mem[0,1,1,3,4,5,5,7]
11353 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,2,2,3]
11354 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm9, %ymm11, %ymm9
11355 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
11356 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm7, %ymm15, %ymm7
11357 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
11358 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm11 = ymm3[0,0,2,1,4,4,6,5]
11359 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm7 = ymm7[0,1,2],ymm11[3],ymm7[4,5],ymm11[6],ymm7[7,8,9,10],ymm11[11],ymm7[12,13],ymm11[14],ymm7[15]
11360 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,2,3,3]
11361 ; AVX2-FAST-PERLANE-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
11362 ; AVX2-FAST-PERLANE-NEXT: # ymm11 = mem[0,1,1,3,4,5,5,7]
11363 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,2,2,3]
11364 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm7, %ymm11, %ymm7
11365 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm10 = [255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255]
11366 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm1 # 32-byte Folded Reload
11367 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11368 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm1 # 32-byte Folded Reload
11369 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11370 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm0, %ymm7, %ymm0
11371 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11372 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
11373 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm10 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
11374 ; AVX2-FAST-PERLANE-NEXT: # ymm10 = mem[0,1,0,1]
11375 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm10, %ymm7, %ymm7
11376 ; AVX2-FAST-PERLANE-NEXT: vpshufd $170, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Folded Reload
11377 ; AVX2-FAST-PERLANE-NEXT: # ymm8 = mem[2,2,2,2,6,6,6,6]
11378 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm7 = ymm7[0,1],ymm8[2],ymm7[3,4],ymm8[5],ymm7[6,7,8,9],ymm8[10],ymm7[11,12],ymm8[13],ymm7[14,15]
11379 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm0 = <10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u>
11380 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11381 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm0, %ymm1, %ymm8
11382 ; AVX2-FAST-PERLANE-NEXT: vpshufd $170, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Folded Reload
11383 ; AVX2-FAST-PERLANE-NEXT: # ymm9 = mem[2,2,2,2,6,6,6,6]
11384 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0],ymm8[1],ymm9[2,3],ymm8[4],ymm9[5,6,7,8],ymm8[9],ymm9[10,11],ymm8[12],ymm9[13,14,15]
11385 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,2,2,3]
11386 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,2,2,3]
11387 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm1 = <255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u,u,u,u,u,0,0,0,0,255,255,255,255,u,u>
11388 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm1, %ymm7, %ymm8, %ymm7
11389 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11390 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
11391 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm10, %ymm7, %ymm8
11392 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm10, %ymm11
11393 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
11394 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm9 = ymm7[2,2,2,2,6,6,6,6]
11395 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm8 = ymm8[0,1],ymm9[2],ymm8[3,4],ymm9[5],ymm8[6,7,8,9],ymm9[10],ymm8[11,12],ymm9[13],ymm8[14,15]
11396 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
11397 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm0, %ymm9, %ymm9
11398 ; AVX2-FAST-PERLANE-NEXT: vpshufd $170, {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Folded Reload
11399 ; AVX2-FAST-PERLANE-NEXT: # ymm10 = mem[2,2,2,2,6,6,6,6]
11400 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm9 = ymm10[0],ymm9[1],ymm10[2,3],ymm9[4],ymm10[5,6,7,8],ymm9[9],ymm10[10,11],ymm9[12],ymm10[13,14,15]
11401 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,2,2,3]
11402 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,2,2,3]
11403 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm1, %ymm8, %ymm9, %ymm8
11404 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm11, %ymm2, %ymm9
11405 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm10 = ymm4[2,2,2,2,6,6,6,6]
11406 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm9 = ymm9[0,1],ymm10[2],ymm9[3,4],ymm10[5],ymm9[6,7,8,9],ymm10[10],ymm9[11,12],ymm10[13],ymm9[14,15]
11407 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm0, %ymm12, %ymm10
11408 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm12, %ymm0
11409 ; AVX2-FAST-PERLANE-NEXT: vpshufd $170, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
11410 ; AVX2-FAST-PERLANE-NEXT: # ymm11 = mem[2,2,2,2,6,6,6,6]
11411 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm10 = ymm11[0],ymm10[1],ymm11[2,3],ymm10[4],ymm11[5,6,7,8],ymm10[9],ymm11[10,11],ymm10[12],ymm11[13,14,15]
11412 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
11413 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,2,2,3]
11414 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm1, %ymm9, %ymm10, %ymm9
11415 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm10 = <u,u,u,u,u,u,u,u,u,u,8,9,u,u,u,u,u,u,u,u,22,23,u,u,u,u,24,25,u,u,u,u>
11416 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm10, %ymm6, %ymm11
11417 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm12 = ymm14[1,2,2,3,5,6,6,7]
11418 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm11 = ymm12[0,1],ymm11[2],ymm12[3,4],ymm11[5],ymm12[6,7,8,9],ymm11[10],ymm12[11,12],ymm11[13],ymm12[14,15]
11419 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,1,3,2]
11420 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm12 = ymm13[0,1,2,2,4,5,6,6]
11421 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,1,3,3]
11422 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm13 = <u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255>
11423 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm11, %ymm12, %ymm11
11424 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
11425 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm10, %ymm4, %ymm12
11426 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm14 = ymm5[1,2,2,3,5,6,6,7]
11427 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm12 = ymm14[0,1],ymm12[2],ymm14[3,4],ymm12[5],ymm14[6,7,8,9],ymm12[10],ymm14[11,12],ymm12[13],ymm14[14,15]
11428 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,1,3,2]
11429 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
11430 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm14 = ymm5[0,1,2,2,4,5,6,6]
11431 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,3,3]
11432 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm12, %ymm14, %ymm12
11433 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm10, %ymm15, %ymm10
11434 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm14 = ymm3[1,2,2,3,5,6,6,7]
11435 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm10 = ymm14[0,1],ymm10[2],ymm14[3,4],ymm10[5],ymm14[6,7,8,9],ymm10[10],ymm14[11,12],ymm10[13],ymm14[14,15]
11436 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,1,3,2]
11437 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
11438 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm14 = ymm2[0,1,2,2,4,5,6,6]
11439 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,1,3,3]
11440 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm10, %ymm14, %ymm10
11441 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm13 = [255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0]
11442 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm1 # 32-byte Folded Reload
11443 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11444 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm8, %ymm12, %ymm8
11445 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm9, %ymm10, %ymm9
11446 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm14 = <u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29>
11447 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11448 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm14, %ymm1, %ymm10
11449 ; AVX2-FAST-PERLANE-NEXT: vpshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
11450 ; AVX2-FAST-PERLANE-NEXT: # ymm11 = mem[3,3,3,3,7,7,7,7]
11451 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm10 = ymm11[0,1,2],ymm10[3],ymm11[4,5],ymm10[6],ymm11[7,8,9,10],ymm10[11],ymm11[12,13],ymm10[14],ymm11[15]
11452 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11453 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm13 = <14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u>
11454 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm13, %ymm1, %ymm11
11455 ; AVX2-FAST-PERLANE-NEXT: vpshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Folded Reload
11456 ; AVX2-FAST-PERLANE-NEXT: # ymm12 = mem[3,3,3,3,7,7,7,7]
11457 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm11 = ymm12[0],ymm11[1],ymm12[2,3],ymm11[4],ymm12[5,6,7,8],ymm11[9],ymm12[10,11],ymm11[12],ymm12[13,14,15]
11458 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,1,3,3]
11459 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,2,2,3]
11460 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = <u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u,255,255,255,255,0,0,0,0,u,u,u,u,u,u>
11461 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm10, %ymm11, %ymm10
11462 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11463 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm14, %ymm1, %ymm11
11464 ; AVX2-FAST-PERLANE-NEXT: vpshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Folded Reload
11465 ; AVX2-FAST-PERLANE-NEXT: # ymm12 = mem[3,3,3,3,7,7,7,7]
11466 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm11 = ymm12[0,1,2],ymm11[3],ymm12[4,5],ymm11[6],ymm12[7,8,9,10],ymm11[11],ymm12[12,13],ymm11[14],ymm12[15]
11467 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
11468 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm13, %ymm1, %ymm12
11469 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm13, %ymm1
11470 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm13 = ymm7[3,3,3,3,7,7,7,7]
11471 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm12 = ymm13[0],ymm12[1],ymm13[2,3],ymm12[4],ymm13[5,6,7,8],ymm12[9],ymm13[10,11],ymm12[12],ymm13[13,14,15]
11472 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,1,3,3]
11473 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,2,2,3]
11474 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm11, %ymm12, %ymm11
11475 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm14, %ymm0, %ymm12
11476 ; AVX2-FAST-PERLANE-NEXT: vpshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Folded Reload
11477 ; AVX2-FAST-PERLANE-NEXT: # ymm13 = mem[3,3,3,3,7,7,7,7]
11478 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm12 = ymm13[0,1,2],ymm12[3],ymm13[4,5],ymm12[6],ymm13[7,8,9,10],ymm12[11],ymm13[12,13],ymm12[14],ymm13[15]
11479 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11480 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm1, %ymm0, %ymm13
11481 ; AVX2-FAST-PERLANE-NEXT: vpshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Folded Reload
11482 ; AVX2-FAST-PERLANE-NEXT: # ymm14 = mem[3,3,3,3,7,7,7,7]
11483 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm13 = ymm14[0],ymm13[1],ymm14[2,3],ymm13[4],ymm14[5,6,7,8],ymm13[9],ymm14[10,11],ymm13[12],ymm14[13,14,15]
11484 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,1,3,3]
11485 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm13 = ymm13[0,2,2,3]
11486 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm12, %ymm13, %ymm12
11487 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm13 = [26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
11488 ; AVX2-FAST-PERLANE-NEXT: # ymm13 = mem[0,1,0,1]
11489 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm13, %ymm6, %ymm14
11490 ; AVX2-FAST-PERLANE-NEXT: vpshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Folded Reload
11491 ; AVX2-FAST-PERLANE-NEXT: # ymm15 = mem[3,3,3,3,7,7,7,7]
11492 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm14 = ymm14[0,1],ymm15[2],ymm14[3,4],ymm15[5],ymm14[6,7,8,9],ymm15[10],ymm14[11,12],ymm15[13],ymm14[14,15]
11493 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,3]
11494 ; AVX2-FAST-PERLANE-NEXT: vpshufd $254, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Folded Reload
11495 ; AVX2-FAST-PERLANE-NEXT: # ymm15 = mem[2,3,3,3,6,7,7,7]
11496 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,1,3,2]
11497 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm0 = <255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0,u,u,u,u,u,u,u,u,255,255,255,255,0,0>
11498 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm14, %ymm15, %ymm14
11499 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm13, %ymm4, %ymm15
11500 ; AVX2-FAST-PERLANE-NEXT: vpshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Folded Reload
11501 ; AVX2-FAST-PERLANE-NEXT: # ymm1 = mem[3,3,3,3,7,7,7,7]
11502 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm1 = ymm15[0,1],ymm1[2],ymm15[3,4],ymm1[5],ymm15[6,7,8,9],ymm1[10],ymm15[11,12],ymm1[13],ymm15[14,15]
11503 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
11504 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm15 = ymm5[2,3,3,3,6,7,7,7]
11505 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,1,3,2]
11506 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm1, %ymm15, %ymm1
11507 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
11508 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm13, %ymm4, %ymm13
11509 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm15 = ymm3[3,3,3,3,7,7,7,7]
11510 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm13 = ymm13[0,1],ymm15[2],ymm13[3,4],ymm15[5],ymm13[6,7,8,9],ymm15[10],ymm13[11,12],ymm15[13],ymm13[14,15]
11511 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,2,2,3]
11512 ; AVX2-FAST-PERLANE-NEXT: vpshufd {{.*#+}} ymm15 = ymm2[2,3,3,3,6,7,7,7]
11513 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,1,3,2]
11514 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm13, %ymm15, %ymm0
11515 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm13 = [0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,0,0,0,0]
11516 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm10, %ymm14, %ymm10
11517 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm11, %ymm1, %ymm1
11518 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm12, %ymm0, %ymm0
11519 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
11520 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
11521 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm2, 544(%rax)
11522 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
11523 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm2, 320(%rax)
11524 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
11525 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm2, 96(%rax)
11526 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, 640(%rax)
11527 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm9, 608(%rax)
11528 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11529 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 576(%rax)
11530 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm1, 416(%rax)
11531 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm8, 384(%rax)
11532 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11533 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 352(%rax)
11534 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm10, 192(%rax)
11535 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11536 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 160(%rax)
11537 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11538 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 128(%rax)
11539 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11540 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 768(%rax)
11541 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11542 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 736(%rax)
11543 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11544 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 704(%rax)
11545 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11546 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 672(%rax)
11547 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11548 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 512(%rax)
11549 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11550 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 480(%rax)
11551 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11552 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 448(%rax)
11553 ; AVX2-FAST-PERLANE-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
11554 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 288(%rax)
11555 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11556 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 256(%rax)
11557 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11558 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 224(%rax)
11559 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11560 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 64(%rax)
11561 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11562 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 32(%rax)
11563 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11564 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, (%rax)
11565 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11566 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 864(%rax)
11567 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11568 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 832(%rax)
11569 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11570 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 800(%rax)
11571 ; AVX2-FAST-PERLANE-NEXT: addq $1544, %rsp # imm = 0x608
11572 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
11573 ; AVX2-FAST-PERLANE-NEXT: retq
11575 ; AVX512F-ONLY-SLOW-LABEL: store_i16_stride7_vf64:
11576 ; AVX512F-ONLY-SLOW: # %bb.0:
11577 ; AVX512F-ONLY-SLOW-NEXT: subq $2168, %rsp # imm = 0x878
11578 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rcx), %ymm2
11579 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rdx), %ymm6
11580 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rdi), %ymm7
11581 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rsi), %ymm9
11582 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
11583 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm0, %ymm2, %ymm1
11584 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm2, %ymm16
11585 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm11 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
11586 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm11, %ymm6, %ymm2
11587 ; AVX512F-ONLY-SLOW-NEXT: vporq %ymm1, %ymm2, %ymm18
11588 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm14 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
11589 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm14, %ymm9, %ymm1
11590 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm13 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
11591 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm13, %ymm7, %ymm2
11592 ; AVX512F-ONLY-SLOW-NEXT: vporq %ymm1, %ymm2, %ymm17
11593 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
11594 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%r9), %ymm2
11595 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm3, %ymm2, %ymm1
11596 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa %ymm3, %ymm8
11597 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm2, %ymm21
11598 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%r8), %ymm3
11599 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm15 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
11600 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm15, %ymm3, %ymm2
11601 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm3, %ymm23
11602 ; AVX512F-ONLY-SLOW-NEXT: vpor %ymm1, %ymm2, %ymm1
11603 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11604 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rcx), %ymm10
11605 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm0, %ymm10, %ymm1
11606 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdx), %ymm4
11607 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm11, %ymm4, %ymm2
11608 ; AVX512F-ONLY-SLOW-NEXT: vpor %ymm1, %ymm2, %ymm1
11609 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11610 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rsi), %ymm5
11611 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm14, %ymm5, %ymm1
11612 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdi), %ymm3
11613 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm13, %ymm3, %ymm2
11614 ; AVX512F-ONLY-SLOW-NEXT: vpor %ymm1, %ymm2, %ymm1
11615 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11616 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r9), %ymm1
11617 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11618 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm8, %ymm1, %ymm1
11619 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa %ymm8, %ymm12
11620 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r8), %ymm2
11621 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11622 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm15, %ymm2, %ymm2
11623 ; AVX512F-ONLY-SLOW-NEXT: vpor %ymm1, %ymm2, %ymm1
11624 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11625 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rcx), %ymm1
11626 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11627 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm0, %ymm1, %ymm1
11628 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %ymm2
11629 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11630 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm11, %ymm2, %ymm2
11631 ; AVX512F-ONLY-SLOW-NEXT: vpor %ymm1, %ymm2, %ymm1
11632 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11633 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rsi), %ymm2
11634 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm14, %ymm2, %ymm1
11635 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm2, %ymm24
11636 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %ymm8
11637 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm13, %ymm8, %ymm2
11638 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm8, %ymm19
11639 ; AVX512F-ONLY-SLOW-NEXT: vpor %ymm1, %ymm2, %ymm1
11640 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11641 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rcx), %ymm8
11642 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm0, %ymm8, %ymm0
11643 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdx), %ymm2
11644 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm11, %ymm2, %ymm1
11645 ; AVX512F-ONLY-SLOW-NEXT: vpor %ymm0, %ymm1, %ymm0
11646 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11647 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rsi), %ymm1
11648 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm14, %ymm1, %ymm0
11649 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdi), %ymm11
11650 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm13, %ymm11, %ymm13
11651 ; AVX512F-ONLY-SLOW-NEXT: vpor %ymm0, %ymm13, %ymm0
11652 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11653 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%r8), %ymm13
11654 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm15, %ymm13, %ymm14
11655 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%r9), %ymm0
11656 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm12, %ymm0, %ymm15
11657 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm12, %ymm20
11658 ; AVX512F-ONLY-SLOW-NEXT: vpor %ymm15, %ymm14, %ymm12
11659 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11660 ; AVX512F-ONLY-SLOW-NEXT: vprold $16, %ymm0, %ymm14
11661 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm15 = ymm13[1,2,2,3,5,6,6,7]
11662 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm14 = ymm15[0,1],ymm14[2],ymm15[3,4],ymm14[5],ymm15[6,7,8,9],ymm14[10],ymm15[11,12],ymm14[13],ymm15[14,15]
11663 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm15 = ymm0[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
11664 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm15 = ymm15[2,2,2,3,6,6,6,7]
11665 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm13[3,3,3,3,7,7,7,7]
11666 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm15[0,1],ymm12[2],ymm15[3,4],ymm12[5],ymm15[6,7,8,9],ymm12[10],ymm15[11,12],ymm12[13],ymm15[14,15]
11667 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm15 = [2,1,3,2,10,10,10,11]
11668 ; AVX512F-ONLY-SLOW-NEXT: vpermi2q %zmm12, %zmm14, %zmm15
11669 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11670 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm14 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
11671 ; AVX512F-ONLY-SLOW-NEXT: # ymm14 = mem[0,1,0,1]
11672 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm16, %ymm15
11673 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm14, %ymm15, %ymm12
11674 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm14, %ymm16
11675 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm6[2,2,2,2,6,6,6,6]
11676 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm12[0,1],ymm14[2],ymm12[3,4],ymm14[5],ymm12[6,7,8,9],ymm14[10],ymm12[11,12],ymm14[13],ymm12[14,15]
11677 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11678 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm12 = ymm15[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
11679 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
11680 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm6[3,3,3,3,7,7,7,7]
11681 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm14[0],ymm12[1],ymm14[2,3],ymm12[4],ymm14[5,6,7,8],ymm12[9],ymm14[10,11],ymm12[12],ymm14[13,14,15]
11682 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11683 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm12 = ymm9[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
11684 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
11685 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm7[2,2,2,2,6,6,6,6]
11686 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm14[0],ymm12[1],ymm14[2,3],ymm12[4],ymm14[5,6,7,8],ymm12[9],ymm14[10,11],ymm12[12],ymm14[13,14,15]
11687 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm12, %ymm25
11688 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm12 = ymm9[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
11689 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
11690 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm7[3,3,3,3,7,7,7,7]
11691 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm14[0,1,2],ymm12[3],ymm14[4,5],ymm12[6],ymm14[7,8,9,10],ymm12[11],ymm14[12,13],ymm12[14],ymm14[15]
11692 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11693 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[0,1,1,3,4,5,5,7]
11694 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm12 = ymm15[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
11695 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[0,0,0,0,4,4,4,4]
11696 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm6 = ymm6[0,1],ymm12[2],ymm6[3,4],ymm12[5],ymm6[6,7,8,9],ymm12[10],ymm6[11,12],ymm12[13],ymm6[14,15]
11697 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,2]
11698 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm7[1,1,1,1,5,5,5,5]
11699 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm9 = ymm9[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
11700 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm9[0,0,2,1,4,4,6,5]
11701 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm7 = ymm9[0,1],ymm7[2],ymm9[3,4],ymm7[5],ymm9[6,7,8,9],ymm7[10],ymm9[11,12],ymm7[13],ymm9[14,15]
11702 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,2,2,3]
11703 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm6, %zmm18, %zmm6
11704 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm17, %zmm7
11705 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm7
11706 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
11707 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%r8), %ymm12
11708 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11709 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = ymm12[u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm12[14,15],zero,zero,ymm12[u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm12[16,17],zero,zero,ymm12[u,u],zero,zero
11710 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $248, %ymm9, %ymm7, %ymm6
11711 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%r9), %ymm15
11712 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11713 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm20, %ymm14
11714 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm14, %ymm15, %ymm14
11715 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm6, %ymm14
11716 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm7, %ymm6
11717 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm12[0,0,2,1,4,4,6,5]
11718 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,1,3,3]
11719 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm6, %ymm7
11720 ; AVX512F-ONLY-SLOW-NEXT: vprold $16, %ymm15, %ymm6
11721 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,2]
11722 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm7, %ymm6
11723 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm6, %zmm0, %zmm6
11724 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm6 = zmm14[0,1,2,3],zmm6[4,5,6,7]
11725 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11726 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
11727 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm18 = [6,5,0,0,7,6,0,7,6,5,0,0,7,6,0,7]
11728 ; AVX512F-ONLY-SLOW-NEXT: # zmm18 = mem[0,1,2,3,0,1,2,3]
11729 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rax), %ymm6
11730 ; AVX512F-ONLY-SLOW-NEXT: vpermd %zmm6, %zmm18, %zmm12
11731 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11732 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm6[0,1,1,3,4,5,5,7]
11733 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm15 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
11734 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm15, %ymm6, %ymm6
11735 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,2,2,3]
11736 ; AVX512F-ONLY-SLOW-NEXT: vpandn %ymm14, %ymm9, %ymm14
11737 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm14, %zmm6, %zmm6
11738 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11739 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm17 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
11740 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd 72(%rax), %ymm6
11741 ; AVX512F-ONLY-SLOW-NEXT: vpandnq %ymm6, %ymm17, %ymm14
11742 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rax), %ymm6
11743 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm15, %ymm6, %ymm12
11744 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm12, %zmm14, %zmm12
11745 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11746 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd 8(%rax), %ymm12
11747 ; AVX512F-ONLY-SLOW-NEXT: vpandnq %ymm12, %ymm17, %ymm12
11748 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rax), %ymm7
11749 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm15, %ymm7, %ymm14
11750 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm7, %ymm22
11751 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm14, %zmm12, %zmm12
11752 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11753 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm12 = ymm8[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
11754 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[0,0,0,0,4,4,4,4]
11755 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm2[0,1,1,3,4,5,5,7]
11756 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm14[0,1],ymm12[2],ymm14[3,4],ymm12[5],ymm14[6,7,8,9],ymm12[10],ymm14[11,12],ymm12[13],ymm14[14,15]
11757 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11758 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm12 = ymm1[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
11759 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[0,0,2,1,4,4,6,5]
11760 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm11[1,1,1,1,5,5,5,5]
11761 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm12[0,1],ymm14[2],ymm12[3,4],ymm14[5],ymm12[6,7,8,9],ymm14[10],ymm12[11,12],ymm14[13],ymm12[14,15]
11762 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11763 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rax), %ymm12
11764 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm12[0,1,1,3,4,5,5,7]
11765 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,2,2,3]
11766 ; AVX512F-ONLY-SLOW-NEXT: vpandn %ymm14, %ymm9, %ymm9
11767 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm15, %ymm12, %ymm14
11768 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm14, %zmm9
11769 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11770 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm13[0,0,2,1,4,4,6,5]
11771 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
11772 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,0,0,0,4,4,4,4]
11773 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1,2],ymm9[3],ymm0[4,5],ymm9[6],ymm0[7,8,9,10],ymm9[11],ymm0[12,13],ymm9[14],ymm0[15]
11774 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11775 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm16, %ymm7
11776 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm7, %ymm8, %ymm0
11777 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm2[2,2,2,2,6,6,6,6]
11778 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm9[2],ymm0[3,4],ymm9[5],ymm0[6,7,8,9],ymm9[10],ymm0[11,12],ymm9[13],ymm0[14,15]
11779 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11780 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm2[3,3,3,3,7,7,7,7]
11781 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm8[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
11782 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,2,6,6,6,6]
11783 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2,3],ymm2[4],ymm0[5,6,7,8],ymm2[9],ymm0[10,11],ymm2[12],ymm0[13,14,15]
11784 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11785 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm1[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
11786 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
11787 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm11[2,2,2,2,6,6,6,6]
11788 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0],ymm0[1],ymm2[2,3],ymm0[4],ymm2[5,6,7,8],ymm0[9],ymm2[10,11],ymm0[12],ymm2[13,14,15]
11789 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm0, %ymm31
11790 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm11[3,3,3,3,7,7,7,7]
11791 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm1[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
11792 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
11793 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1,2],ymm1[3],ymm0[4,5],ymm1[6],ymm0[7,8,9,10],ymm1[11],ymm0[12,13],ymm1[14],ymm0[15]
11794 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11795 ; AVX512F-ONLY-SLOW-NEXT: vpermd %zmm12, %zmm18, %zmm0
11796 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11797 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm5[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
11798 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
11799 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm3[1,1,1,1,5,5,5,5]
11800 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
11801 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11802 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm5[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
11803 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
11804 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm3[2,2,2,2,6,6,6,6]
11805 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
11806 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11807 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm10[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
11808 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,0,0,0,4,4,4,4]
11809 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm4[0,1,1,3,4,5,5,7]
11810 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
11811 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11812 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %ymm7, %ymm10, %ymm0
11813 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm4[2,2,2,2,6,6,6,6]
11814 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
11815 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11816 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm21, %ymm9
11817 ; AVX512F-ONLY-SLOW-NEXT: vprold $16, %ymm21, %ymm0
11818 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm23[1,2,2,3,5,6,6,7]
11819 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
11820 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm9[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
11821 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,0,0,0,4,4,4,4]
11822 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm23[0,0,2,1,4,4,6,5]
11823 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7,8,9,10],ymm2[11],ymm1[12,13],ymm2[14],ymm1[15]
11824 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm7 = [2,2,3,3,10,9,11,10]
11825 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm0, %zmm7, %zmm1
11826 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm15 = [0,5,4,0,0,6,5,0,0,5,4,0,0,6,5,0]
11827 ; AVX512F-ONLY-SLOW-NEXT: # zmm15 = mem[0,1,2,3,0,1,2,3]
11828 ; AVX512F-ONLY-SLOW-NEXT: vpermd 64(%rax), %zmm15, %zmm0
11829 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm14 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535]
11830 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm1, %zmm14, %zmm0
11831 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11832 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm4[3,3,3,3,7,7,7,7]
11833 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm10[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
11834 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
11835 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6,7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13,14,15]
11836 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11837 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm3[3,3,3,3,7,7,7,7]
11838 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm5[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
11839 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
11840 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1,2],ymm1[3],ymm0[4,5],ymm1[6],ymm0[7,8,9,10],ymm1[11],ymm0[12,13],ymm1[14],ymm0[15]
11841 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11842 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm23[3,3,3,3,7,7,7,7]
11843 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm9[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
11844 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,3,6,6,6,7]
11845 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm4 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
11846 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm11 = [4,5,4,5,4,5,6,7,16,17,16,17,16,17,17,19]
11847 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%r9), %xmm0
11848 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%r8), %xmm2
11849 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
11850 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm3[0,1,3,2,4,5,6,7]
11851 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm1, %zmm11, %zmm4
11852 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm6[2,3,3,3,6,7,7,7]
11853 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,2]
11854 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd 96(%rax), %ymm5
11855 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm1, %zmm5
11856 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535]
11857 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm4, %zmm1, %zmm5
11858 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11859 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rsi), %xmm4
11860 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rdi), %xmm5
11861 ; AVX512F-ONLY-SLOW-NEXT: vprold $16, %xmm4, %xmm6
11862 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm5[1,1,2,3]
11863 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} xmm6 = xmm9[0,1],xmm6[2],xmm9[3,4],xmm6[5],xmm9[6,7]
11864 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11865 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
11866 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm6, %xmm23
11867 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
11868 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11869 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rcx), %xmm4
11870 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 96(%rdx), %xmm5
11871 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm13 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
11872 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm13, %xmm4, %xmm6
11873 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm5[1,1,2,2]
11874 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} xmm6 = xmm9[0],xmm6[1],xmm9[2,3],xmm6[4],xmm9[5,6],xmm6[7]
11875 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11876 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
11877 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11878 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
11879 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11880 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7]
11881 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,7,6]
11882 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
11883 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm0, %xmm2, %xmm2
11884 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm30 = <16,18,19,19,19,19,u,u,0,1,0,1,2,3,2,3>
11885 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm3, %zmm30, %zmm2
11886 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd 100(%rax), %ymm3
11887 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd 104(%rax), %ymm4
11888 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm3, %zmm3
11889 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm28 = [65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
11890 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm2, %zmm28, %zmm3
11891 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11892 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rcx), %xmm2
11893 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdx), %xmm3
11894 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
11895 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm4, %xmm29
11896 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
11897 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm13, %xmm2, %xmm2
11898 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[1,1,2,2]
11899 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0],xmm2[1],xmm3[2,3],xmm2[4],xmm3[5,6],xmm2[7]
11900 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm4[0,1,3,2,4,5,6,7]
11901 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm5 = <16,16,17,17,17,17,u,u,0,1,0,1,2,3,2,3>
11902 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm3, %zmm5, %zmm2
11903 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rdi), %xmm3
11904 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%rsi), %xmm4
11905 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
11906 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm6
11907 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,1,1]
11908 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
11909 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm9, %xmm27
11910 ; AVX512F-ONLY-SLOW-NEXT: vprold $16, %xmm4, %xmm4
11911 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[1,1,2,3]
11912 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm4[2],xmm3[3,4],xmm4[5],xmm3[6,7]
11913 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
11914 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm6, %zmm4
11915 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0]
11916 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $226, %zmm2, %zmm3, %zmm4
11917 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11918 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%r9), %xmm2
11919 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 64(%r8), %xmm4
11920 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
11921 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
11922 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm2[0,1,2,3,4,5,7,6]
11923 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,1,3,2,4,5,6,7]
11924 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm6 = <0,1,0,1,0,1,1,3,16,18,19,19,19,19,u,u>
11925 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm4, %zmm6, %zmm2
11926 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd 64(%rax), %ymm4
11927 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd 68(%rax), %ymm9
11928 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm4, %zmm4
11929 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm9 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
11930 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm2, %zmm9, %zmm4
11931 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11932 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rcx), %xmm2
11933 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm10
11934 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm10[4],xmm2[4],xmm10[5],xmm2[5],xmm10[6],xmm2[6],xmm10[7],xmm2[7]
11935 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm4, %xmm26
11936 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm10[0],xmm2[0],xmm10[1],xmm2[1],xmm10[2],xmm2[2],xmm10[3],xmm2[3]
11937 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm13, %xmm2, %xmm2
11938 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[1,1,2,2]
11939 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} xmm2 = xmm10[0],xmm2[1],xmm10[2,3],xmm2[4],xmm10[5,6],xmm2[7]
11940 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm10 = xmm12[0,1,3,2,4,5,6,7]
11941 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm10, %zmm5, %zmm2
11942 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %xmm5
11943 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rsi), %xmm10
11944 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm5[0],xmm10[0],xmm5[1],xmm10[1],xmm5[2],xmm10[2],xmm5[3],xmm10[3]
11945 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm0, %xmm12, %xmm12
11946 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,0,1,1]
11947 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm10[4],xmm5[4],xmm10[5],xmm5[5],xmm10[6],xmm5[6],xmm10[7],xmm5[7]
11948 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm4, %xmm21
11949 ; AVX512F-ONLY-SLOW-NEXT: vprold $16, %xmm10, %xmm10
11950 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[1,1,2,3]
11951 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1],xmm10[2],xmm5[3,4],xmm10[5],xmm5[6,7]
11952 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,2,1]
11953 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm12, %zmm4
11954 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $226, %zmm2, %zmm3, %zmm4
11955 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11956 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r9), %xmm2
11957 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r8), %xmm3
11958 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
11959 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
11960 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm2[0,1,2,3,4,5,7,6]
11961 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,1,3,2,4,5,6,7]
11962 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm3, %zmm6, %zmm2
11963 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd (%rax), %ymm3
11964 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd 4(%rax), %ymm5
11965 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm3, %zmm3
11966 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm2, %zmm9, %zmm3
11967 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11968 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm24, %ymm10
11969 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm10[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
11970 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,0,2,1,4,4,6,5]
11971 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm19[1,1,1,1,5,5,5,5]
11972 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm5[2],ymm2[3,4],ymm5[5],ymm2[6,7,8,9],ymm5[10],ymm2[11,12],ymm5[13],ymm2[14,15]
11973 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm2, %ymm20
11974 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
11975 ; AVX512F-ONLY-SLOW-NEXT: vprold $16, %ymm3, %ymm2
11976 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
11977 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm9[1,2,2,3,5,6,6,7]
11978 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0,1],ymm2[2],ymm5[3,4],ymm2[5],ymm5[6,7,8,9],ymm2[10],ymm5[11,12],ymm2[13],ymm5[14,15]
11979 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm5 = ymm3[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
11980 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[0,0,0,0,4,4,4,4]
11981 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm9[0,0,2,1,4,4,6,5]
11982 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm5 = ymm5[0,1,2],ymm6[3],ymm5[4,5],ymm6[6],ymm5[7,8,9,10],ymm6[11],ymm5[12,13],ymm6[14],ymm5[15]
11983 ; AVX512F-ONLY-SLOW-NEXT: vpermt2q %zmm2, %zmm7, %zmm5
11984 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm10[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
11985 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,2,6,6,6,6]
11986 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm19[2,2,2,2,6,6,6,6]
11987 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm6[0],ymm2[1],ymm6[2,3],ymm2[4],ymm6[5,6,7,8],ymm2[9],ymm6[10,11],ymm2[12],ymm6[13,14,15]
11988 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm2, %ymm16
11989 ; AVX512F-ONLY-SLOW-NEXT: vpermd (%rax), %zmm15, %zmm2
11990 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm5, %zmm14, %zmm2
11991 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11992 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
11993 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm6[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
11994 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,0,0,0,4,4,4,4]
11995 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
11996 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm7[0,1,1,3,4,5,5,7]
11997 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0,1],ymm2[2],ymm5[3,4],ymm2[5],ymm5[6,7,8,9],ymm2[10],ymm5[11,12],ymm2[13],ymm5[14,15]
11998 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm2, %ymm17
11999 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
12000 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm7[2,2,2,2,6,6,6,6]
12001 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm5[2],ymm2[3,4],ymm5[5],ymm2[6,7,8,9],ymm5[10],ymm2[11,12],ymm5[13],ymm2[14,15]
12002 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm2, %ymm18
12003 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm7[3,3,3,3,7,7,7,7]
12004 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm5 = ymm6[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
12005 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[2,2,2,2,6,6,6,6]
12006 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0],ymm5[1],ymm2[2,3],ymm5[4],ymm2[5,6,7,8],ymm5[9],ymm2[10,11],ymm5[12],ymm2[13,14,15]
12007 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12008 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm19[3,3,3,3,7,7,7,7]
12009 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm5 = ymm10[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
12010 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[2,2,2,2,6,6,6,6]
12011 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1,2],ymm5[3],ymm2[4,5],ymm5[6],ymm2[7,8,9,10],ymm5[11],ymm2[12,13],ymm5[14],ymm2[15]
12012 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12013 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm9[3,3,3,3,7,7,7,7]
12014 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm5 = ymm3[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
12015 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[2,2,2,3,6,6,6,7]
12016 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0,1],ymm2[2],ymm5[3,4],ymm2[5],ymm5[6,7,8,9],ymm2[10],ymm5[11,12],ymm2[13],ymm5[14,15]
12017 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%r9), %xmm6
12018 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%r8), %xmm14
12019 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
12020 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm12 = xmm5[0,1,3,2,4,5,6,7]
12021 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm12, %zmm11, %zmm2
12022 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm22[2,3,3,3,6,7,7,7]
12023 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,1,3,2]
12024 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd 32(%rax), %ymm12
12025 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm12, %zmm11, %zmm22
12026 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm2, %zmm1, %zmm22
12027 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdi), %xmm1
12028 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rsi), %xmm2
12029 ; AVX512F-ONLY-SLOW-NEXT: vprold $16, %xmm2, %xmm12
12030 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm1[1,1,2,3]
12031 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} xmm3 = xmm15[0,1],xmm12[2],xmm15[3,4],xmm12[5],xmm15[6,7]
12032 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12033 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
12034 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
12035 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12036 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm14[4],xmm6[4],xmm14[5],xmm6[5],xmm14[6],xmm6[6],xmm14[7],xmm6[7]
12037 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm0, %xmm8, %xmm2
12038 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm3
12039 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm23, %xmm4
12040 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm4
12041 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12042 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm0, %xmm12, %xmm4
12043 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12044 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm0
12045 ; AVX512F-ONLY-SLOW-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm24 # 32-byte Folded Reload
12046 ; AVX512F-ONLY-SLOW-NEXT: # ymm24 = mem[2,2,2,3]
12047 ; AVX512F-ONLY-SLOW-NEXT: vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm19 # 32-byte Folded Reload
12048 ; AVX512F-ONLY-SLOW-NEXT: # ymm19 = mem[0,2,2,3]
12049 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm25 = ymm25[0,2,2,3]
12050 ; AVX512F-ONLY-SLOW-NEXT: vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Folded Reload
12051 ; AVX512F-ONLY-SLOW-NEXT: # ymm8 = mem[2,1,3,3]
12052 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm5[0,1,2,3,4,5,7,6]
12053 ; AVX512F-ONLY-SLOW-NEXT: vpermt2d %zmm1, %zmm30, %zmm0
12054 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd 36(%rax), %ymm1
12055 ; AVX512F-ONLY-SLOW-NEXT: vpbroadcastd 40(%rax), %ymm5
12056 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm1, %zmm30
12057 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm0, %zmm28, %zmm30
12058 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rcx), %xmm0
12059 ; AVX512F-ONLY-SLOW-NEXT: vpshufb %xmm13, %xmm0, %xmm5
12060 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 32(%rdx), %xmm14
12061 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm14[1,1,2,2]
12062 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} xmm1 = xmm15[0],xmm5[1],xmm15[2,3],xmm5[4],xmm15[5,6],xmm5[7]
12063 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12064 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm14[0],xmm0[0],xmm14[1],xmm0[1],xmm14[2],xmm0[2],xmm14[3],xmm0[3]
12065 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12066 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm14[4],xmm0[4],xmm14[5],xmm0[5],xmm14[6],xmm0[6],xmm14[7],xmm0[7]
12067 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12068 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
12069 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm4[0,1,2,3,5,5,7,6,8,9,10,11,13,13,15,14]
12070 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm0[3,3,3,3]
12071 ; AVX512F-ONLY-SLOW-NEXT: vpshufd $233, {{[-0-9]+}}(%r{{[sb]}}p), %ymm23 # 32-byte Folded Reload
12072 ; AVX512F-ONLY-SLOW-NEXT: # ymm23 = mem[1,2,2,3,5,6,6,7]
12073 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm13 = ymm4[2,1,2,3,6,5,6,7]
12074 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm13 = ymm13[0,0,3,3,4,5,6,7,8,8,11,11,12,13,14,15]
12075 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm28 = ymm13[2,2,2,2]
12076 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm13 = ymm2[0,0,1,1]
12077 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm29, %xmm0
12078 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm0[0,2,3,3,4,5,6,7]
12079 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm2[0,0,2,1]
12080 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm27, %xmm0
12081 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm9 = xmm0[2,1,2,3,4,5,6,7]
12082 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,5,5,4]
12083 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,1,3]
12084 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm3[0,0,1,1]
12085 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm26, %xmm0
12086 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm0[0,2,3,3,4,5,6,7]
12087 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm3[0,0,2,1]
12088 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm21, %xmm0
12089 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm0[2,1,2,3,4,5,6,7]
12090 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,5,4]
12091 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm5[0,0,1,3]
12092 ; AVX512F-ONLY-SLOW-NEXT: vpermq $182, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
12093 ; AVX512F-ONLY-SLOW-NEXT: # ymm5 = mem[2,1,3,2]
12094 ; AVX512F-ONLY-SLOW-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Folded Reload
12095 ; AVX512F-ONLY-SLOW-NEXT: # ymm10 = mem[2,2,2,3]
12096 ; AVX512F-ONLY-SLOW-NEXT: vpermq $250, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
12097 ; AVX512F-ONLY-SLOW-NEXT: # ymm11 = mem[2,2,3,3]
12098 ; AVX512F-ONLY-SLOW-NEXT: vpermpd $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Folded Reload
12099 ; AVX512F-ONLY-SLOW-NEXT: # ymm0 = mem[2,2,2,3]
12100 ; AVX512F-ONLY-SLOW-NEXT: vmovups %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12101 ; AVX512F-ONLY-SLOW-NEXT: vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm29 # 32-byte Folded Reload
12102 ; AVX512F-ONLY-SLOW-NEXT: # ymm29 = mem[0,2,2,3]
12103 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm31 = ymm31[0,2,2,3]
12104 ; AVX512F-ONLY-SLOW-NEXT: vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm26 # 32-byte Folded Reload
12105 ; AVX512F-ONLY-SLOW-NEXT: # ymm26 = mem[2,1,3,3]
12106 ; AVX512F-ONLY-SLOW-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm21 # 32-byte Folded Reload
12107 ; AVX512F-ONLY-SLOW-NEXT: # ymm21 = mem[2,2,2,3]
12108 ; AVX512F-ONLY-SLOW-NEXT: vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Folded Reload
12109 ; AVX512F-ONLY-SLOW-NEXT: # ymm15 = mem[0,2,2,3]
12110 ; AVX512F-ONLY-SLOW-NEXT: vpermq $182, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
12111 ; AVX512F-ONLY-SLOW-NEXT: # ymm4 = mem[2,1,3,2]
12112 ; AVX512F-ONLY-SLOW-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Folded Reload
12113 ; AVX512F-ONLY-SLOW-NEXT: # ymm12 = mem[2,2,2,3]
12114 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
12115 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm16 = ymm16[0,2,2,3]
12116 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm17 = ymm17[2,1,3,2]
12117 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm18[2,2,2,3]
12118 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm19, %zmm24, %zmm24
12119 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm8, %zmm25, %zmm0
12120 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm27 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
12121 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm24, %zmm27, %zmm0
12122 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm23[2,1,3,2]
12123 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm8
12124 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm8, %ymm28
12125 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm8 # 32-byte Folded Reload
12126 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm8 = zmm13[0,1,2,3],zmm8[4,5,6,7]
12127 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm13 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535]
12128 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm13, %zmm8 # 64-byte Folded Reload
12129 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm23 # 32-byte Folded Reload
12130 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm23 = zmm2[0,1,2,3],zmm23[4,5,6,7]
12131 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm13, %zmm23 # 64-byte Folded Reload
12132 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm13 # 32-byte Folded Reload
12133 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm9, %zmm7 # 32-byte Folded Reload
12134 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm24 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
12135 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $226, %zmm13, %zmm24, %zmm7
12136 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm9 = [0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535]
12137 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $226, %zmm8, %zmm9, %zmm7
12138 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6, %zmm6 # 32-byte Folded Reload
12139 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm8 # 32-byte Folded Reload
12140 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $226, %zmm6, %zmm24, %zmm8
12141 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $226, %zmm23, %zmm9, %zmm8
12142 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12143 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm2, %zmm3
12144 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12145 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm2, %zmm18
12146 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm18
12147 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
12148 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
12149 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $248, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm19 # 64-byte Folded Reload
12150 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm11, %zmm0, %zmm5
12151 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12152 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm23 = zmm2[0,1,2,3],zmm5[4,5,6,7]
12153 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm23 # 64-byte Folded Reload
12154 ; AVX512F-ONLY-SLOW-NEXT: vextracti64x4 $1, %zmm0, %ymm0
12155 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535]
12156 ; AVX512F-ONLY-SLOW-NEXT: vpternlogd $226, 124(%r8){1to8}, %ymm2, %ymm0
12157 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1
12158 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
12159 ; AVX512F-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm28[0,1,2,3],zmm0[4,5,6,7]
12160 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0]
12161 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm25 # 64-byte Reload
12162 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm0, %zmm1, %zmm25
12163 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
12164 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm28 # 64-byte Folded Reload
12165 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535]
12166 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
12167 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm13 # 64-byte Folded Reload
12168 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
12169 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm9 # 64-byte Folded Reload
12170 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm21, %zmm0
12171 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm12, %zmm4, %zmm1
12172 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535]
12173 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $226, %zmm0, %zmm3, %zmm1
12174 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm16, %zmm20, %zmm0
12175 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm14, %zmm17, %zmm5
12176 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $226, %zmm0, %zmm3, %zmm5
12177 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0]
12178 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
12179 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm1, %zmm0, %zmm20
12180 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
12181 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm5, %zmm0, %zmm21
12182 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
12183 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm29, %zmm0, %zmm0
12184 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm26, %zmm31, %zmm1
12185 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm0, %zmm27, %zmm1
12186 ; AVX512F-ONLY-SLOW-NEXT: vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Folded Reload
12187 ; AVX512F-ONLY-SLOW-NEXT: # ymm0 = mem[0,2,2,3]
12188 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw $180, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
12189 ; AVX512F-ONLY-SLOW-NEXT: # xmm3 = mem[0,1,3,2,4,5,6,7]
12190 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,0,1,1]
12191 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
12192 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm0, %zmm0
12193 ; AVX512F-ONLY-SLOW-NEXT: vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
12194 ; AVX512F-ONLY-SLOW-NEXT: # ymm3 = mem[2,1,3,3]
12195 ; AVX512F-ONLY-SLOW-NEXT: vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
12196 ; AVX512F-ONLY-SLOW-NEXT: # ymm5 = mem[0,0,1,1]
12197 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm3, %zmm3
12198 ; AVX512F-ONLY-SLOW-NEXT: vpermq $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
12199 ; AVX512F-ONLY-SLOW-NEXT: # ymm5 = mem[0,0,2,1]
12200 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw $230, {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Folded Reload
12201 ; AVX512F-ONLY-SLOW-NEXT: # xmm6 = mem[2,1,2,3,4,5,6,7]
12202 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,5,5,4]
12203 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,1,3]
12204 ; AVX512F-ONLY-SLOW-NEXT: vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm26 # 32-byte Folded Reload
12205 ; AVX512F-ONLY-SLOW-NEXT: # ymm26 = mem[0,0,1,1]
12206 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw $248, {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Folded Reload
12207 ; AVX512F-ONLY-SLOW-NEXT: # xmm10 = mem[0,2,3,3,4,5,6,7]
12208 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,2,1]
12209 ; AVX512F-ONLY-SLOW-NEXT: vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
12210 ; AVX512F-ONLY-SLOW-NEXT: # ymm11 = mem[0,2,2,3]
12211 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw $180, {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Folded Reload
12212 ; AVX512F-ONLY-SLOW-NEXT: # xmm12 = mem[0,1,3,2,4,5,6,7]
12213 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[0,0,1,1]
12214 ; AVX512F-ONLY-SLOW-NEXT: vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Folded Reload
12215 ; AVX512F-ONLY-SLOW-NEXT: # ymm14 = mem[2,1,3,3]
12216 ; AVX512F-ONLY-SLOW-NEXT: vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Folded Reload
12217 ; AVX512F-ONLY-SLOW-NEXT: # ymm15 = mem[0,0,1,1]
12218 ; AVX512F-ONLY-SLOW-NEXT: vpermq $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm16 # 32-byte Folded Reload
12219 ; AVX512F-ONLY-SLOW-NEXT: # ymm16 = mem[0,0,2,1]
12220 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw $230, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
12221 ; AVX512F-ONLY-SLOW-NEXT: # xmm2 = mem[2,1,2,3,4,5,6,7]
12222 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,5,4]
12223 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,3]
12224 ; AVX512F-ONLY-SLOW-NEXT: vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm17 # 32-byte Folded Reload
12225 ; AVX512F-ONLY-SLOW-NEXT: # ymm17 = mem[0,0,1,1]
12226 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw $248, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
12227 ; AVX512F-ONLY-SLOW-NEXT: # xmm4 = mem[0,2,3,3,4,5,6,7]
12228 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
12229 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $226, %zmm0, %zmm27, %zmm3
12230 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm12[0,1,1,3]
12231 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm11, %zmm0
12232 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm14, %zmm11
12233 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $226, %zmm0, %zmm27, %zmm11
12234 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535]
12235 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
12236 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm3, %zmm0, %zmm12
12237 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm11, %zmm0, %zmm22
12238 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm6, %zmm5, %zmm0
12239 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm26, %zmm3
12240 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $226, %zmm0, %zmm24, %zmm3
12241 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm16, %zmm0
12242 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm17, %zmm2
12243 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $226, %zmm0, %zmm24, %zmm2
12244 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0]
12245 ; AVX512F-ONLY-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
12246 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm3, %zmm0, %zmm4
12247 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $184, %zmm2, %zmm0, %zmm30
12248 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm23
12249 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm28, %zmm0
12250 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm0
12251 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
12252 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm23, 320(%rax)
12253 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm30, 256(%rax)
12254 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm22, 192(%rax)
12255 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm21, 128(%rax)
12256 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, 64(%rax)
12257 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, (%rax)
12258 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm13, 448(%rax)
12259 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm4, 704(%rax)
12260 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm12, 640(%rax)
12261 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm20, 576(%rax)
12262 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, 512(%rax)
12263 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, 384(%rax)
12264 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm19, 768(%rax)
12265 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm25, 832(%rax)
12266 ; AVX512F-ONLY-SLOW-NEXT: addq $2168, %rsp # imm = 0x878
12267 ; AVX512F-ONLY-SLOW-NEXT: vzeroupper
12268 ; AVX512F-ONLY-SLOW-NEXT: retq
12270 ; AVX512F-ONLY-FAST-LABEL: store_i16_stride7_vf64:
12271 ; AVX512F-ONLY-FAST: # %bb.0:
12272 ; AVX512F-ONLY-FAST-NEXT: subq $1432, %rsp # imm = 0x598
12273 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%rcx), %ymm4
12274 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
12275 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm11, %ymm4, %ymm0
12276 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%rdx), %ymm5
12277 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
12278 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm14, %ymm5, %ymm1
12279 ; AVX512F-ONLY-FAST-NEXT: vporq %ymm0, %ymm1, %ymm16
12280 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%rsi), %ymm6
12281 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
12282 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm15, %ymm6, %ymm0
12283 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%rdi), %ymm7
12284 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
12285 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm8, %ymm7, %ymm1
12286 ; AVX512F-ONLY-FAST-NEXT: vporq %ymm0, %ymm1, %ymm19
12287 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
12288 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%r9), %ymm0
12289 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12290 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm10, %ymm0, %ymm0
12291 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%r8), %ymm1
12292 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12293 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
12294 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm12, %ymm1, %ymm1
12295 ; AVX512F-ONLY-FAST-NEXT: vporq %ymm0, %ymm1, %ymm22
12296 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rcx), %ymm0
12297 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12298 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm11, %ymm0, %ymm0
12299 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rdx), %ymm1
12300 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12301 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm14, %ymm1, %ymm1
12302 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm0, %ymm1, %ymm0
12303 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12304 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rsi), %ymm0
12305 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12306 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm15, %ymm0, %ymm13
12307 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rdi), %ymm0
12308 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm8, %ymm0, %ymm9
12309 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm0, %ymm30
12310 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm13, %ymm9, %ymm9
12311 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12312 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r9), %ymm0
12313 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12314 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm10, %ymm0, %ymm9
12315 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r8), %ymm0
12316 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12317 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm12, %ymm0, %ymm13
12318 ; AVX512F-ONLY-FAST-NEXT: vporq %ymm9, %ymm13, %ymm17
12319 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rcx), %ymm0
12320 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12321 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm11, %ymm0, %ymm9
12322 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %ymm0
12323 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12324 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm14, %ymm0, %ymm13
12325 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm9, %ymm13, %ymm9
12326 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12327 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rsi), %ymm0
12328 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12329 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm15, %ymm0, %ymm9
12330 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdi), %ymm0
12331 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12332 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm8, %ymm0, %ymm13
12333 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm9, %ymm13, %ymm9
12334 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12335 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rcx), %ymm3
12336 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm11, %ymm3, %ymm9
12337 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdx), %ymm2
12338 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm14, %ymm2, %ymm11
12339 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm9, %ymm11, %ymm9
12340 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12341 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rsi), %ymm0
12342 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm15, %ymm0, %ymm9
12343 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdi), %ymm1
12344 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm8, %ymm1, %ymm8
12345 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm9, %ymm8, %ymm8
12346 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12347 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r8), %ymm11
12348 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm12, %ymm11, %ymm8
12349 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r9), %ymm15
12350 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm10, %ymm15, %ymm9
12351 ; AVX512F-ONLY-FAST-NEXT: vporq %ymm9, %ymm8, %ymm18
12352 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm8 = ymm4[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
12353 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm9 = ymm5[0,1,1,3,4,5,5,7]
12354 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0,1],ymm8[2],ymm9[3,4],ymm8[5],ymm9[6,7,8,9],ymm8[10],ymm9[11,12],ymm8[13],ymm9[14,15]
12355 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,1,3,2]
12356 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm12 = [18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
12357 ; AVX512F-ONLY-FAST-NEXT: # ymm12 = mem[0,1,0,1]
12358 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm12, %ymm6, %ymm9
12359 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm12, %ymm31
12360 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm12 = ymm7[1,1,1,1,5,5,5,5]
12361 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm9 = ymm9[0,1],ymm12[2],ymm9[3,4],ymm12[5],ymm9[6,7,8,9],ymm12[10],ymm9[11,12],ymm12[13],ymm9[14,15]
12362 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
12363 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm8, %zmm16, %zmm8
12364 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm19, %zmm9
12365 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm9
12366 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
12367 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%r8), %ymm8
12368 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm8[u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm8[14,15],zero,zero,ymm8[u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm8[16,17],zero,zero,ymm8[u,u],zero,zero
12369 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $248, %ymm13, %ymm9, %ymm12
12370 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %ymm13, %ymm14
12371 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%r9), %ymm13
12372 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12373 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm10, %ymm13, %ymm10
12374 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm12, %ymm10
12375 ; AVX512F-ONLY-FAST-NEXT: vextracti64x4 $1, %zmm9, %ymm9
12376 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <u,4,u,u,u,5,u,u>
12377 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm8, %ymm12, %ymm12
12378 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm9, %ymm12
12379 ; AVX512F-ONLY-FAST-NEXT: vprold $16, %ymm13, %ymm9
12380 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,2]
12381 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm12, %ymm9
12382 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm9
12383 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm9 = zmm10[0,1,2,3],zmm9[4,5,6,7]
12384 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12385 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
12386 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = [0,1,4,5,4,5,5,7]
12387 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%rax), %ymm9
12388 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm9, %ymm10, %ymm10
12389 ; AVX512F-ONLY-FAST-NEXT: vpandn %ymm10, %ymm14, %ymm10
12390 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
12391 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm13, %ymm9, %ymm12
12392 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm10, %zmm12, %zmm10
12393 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12394 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29>
12395 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm12, %ymm6, %ymm10
12396 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm12, %ymm19
12397 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm12 = ymm7[3,3,3,3,7,7,7,7]
12398 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm10 = ymm12[0,1,2],ymm10[3],ymm12[4,5],ymm10[6],ymm12[7,8,9,10],ymm10[11],ymm12[12,13],ymm10[14],ymm12[15]
12399 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u>
12400 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm12, %ymm6, %ymm6
12401 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm12, %ymm23
12402 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm7 = ymm7[2,2,2,2,6,6,6,6]
12403 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm7 = ymm7[0],ymm6[1],ymm7[2,3],ymm6[4],ymm7[5,6,7,8],ymm6[9],ymm7[10,11],ymm6[12],ymm7[13,14,15]
12404 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm25 = [0,2,2,3,10,9,11,11]
12405 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm25, %zmm7
12406 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = <14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u>
12407 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm6, %ymm4, %ymm10
12408 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm6, %ymm26
12409 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm12 = ymm5[3,3,3,3,7,7,7,7]
12410 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm10 = ymm12[0],ymm10[1],ymm12[2,3],ymm10[4],ymm12[5,6,7,8],ymm10[9],ymm12[10,11],ymm10[12],ymm12[13,14,15]
12411 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
12412 ; AVX512F-ONLY-FAST-NEXT: # ymm6 = mem[0,1,0,1]
12413 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm6, %ymm4, %ymm4
12414 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm6, %ymm29
12415 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[2,2,2,2,6,6,6,6]
12416 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7,8,9],ymm5[10],ymm4[11,12],ymm5[13],ymm4[14,15]
12417 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm20 = [2,2,2,3,8,10,10,11]
12418 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm10, %zmm20, %zmm4
12419 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
12420 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, %zmm7, %zmm5, %zmm4
12421 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, %zmm10
12422 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12423 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <5,u,u,u,6,u,u,6>
12424 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm8, %ymm4, %ymm4
12425 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12426 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 64(%rax), %zmm28
12427 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm4 = [30,5,0,0,31,6,0,31,30,5,0,0,31,6,0,31]
12428 ; AVX512F-ONLY-FAST-NEXT: # zmm4 = mem[0,1,2,3,0,1,2,3]
12429 ; AVX512F-ONLY-FAST-NEXT: vpermi2d %zmm28, %zmm9, %zmm4
12430 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12431 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
12432 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd 72(%rax), %ymm4
12433 ; AVX512F-ONLY-FAST-NEXT: vpandn %ymm4, %ymm7, %ymm4
12434 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm7, %ymm16
12435 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rax), %ymm5
12436 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %ymm13, %ymm14
12437 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm13, %ymm5, %ymm7
12438 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm5, %ymm27
12439 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm4, %zmm9
12440 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm22, %zmm0, %zmm4
12441 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%r9), %xmm5
12442 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%r8), %xmm12
12443 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm12[4],xmm5[4],xmm12[5],xmm5[5],xmm12[6],xmm5[6],xmm12[7],xmm5[7]
12444 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm5, %xmm21
12445 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm13 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
12446 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm13, %xmm7, %xmm7
12447 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm13, %xmm8
12448 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm13 = [0,0,1,1,12,13,14,15]
12449 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm13, %zmm7
12450 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm4 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535]
12451 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $248, %zmm4, %zmm7, %zmm9
12452 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12453 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd 8(%rax), %ymm7
12454 ; AVX512F-ONLY-FAST-NEXT: vpandnq %ymm7, %ymm16, %ymm7
12455 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rax), %ymm5
12456 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12457 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm14, %ymm5, %ymm9
12458 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm7, %zmm16
12459 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm17, %zmm0, %zmm7
12460 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r9), %xmm5
12461 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12462 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r8), %xmm6
12463 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm6, (%rsp) # 16-byte Spill
12464 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm14 = xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
12465 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm8, %xmm14, %xmm14
12466 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm13, %zmm14
12467 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $248, %zmm4, %zmm14, %zmm16
12468 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12469 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd {{.*#+}} ymm9 = [18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
12470 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm9, %ymm15, %ymm4
12471 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm7 = ymm11[0,0,2,1,4,4,6,5]
12472 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0,1,2],ymm7[3],ymm4[4,5],ymm7[6],ymm4[7,8,9,10],ymm7[11],ymm4[12,13],ymm7[14],ymm4[15]
12473 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm7 = <0,1,u,3,10,10,11,11>
12474 ; AVX512F-ONLY-FAST-NEXT: vpermi2q %zmm4, %zmm18, %zmm7
12475 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12476 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12477 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm19, %ymm14
12478 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm14, %ymm0, %ymm4
12479 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12480 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm7 = ymm1[3,3,3,3,7,7,7,7]
12481 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm7[0,1,2],ymm4[3],ymm7[4,5],ymm4[6],ymm7[7,8,9,10],ymm4[11],ymm7[12,13],ymm4[14],ymm7[15]
12482 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm23, %ymm5
12483 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm5, %ymm0, %ymm7
12484 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm13 = ymm1[2,2,2,2,6,6,6,6]
12485 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm7 = ymm13[0],ymm7[1],ymm13[2,3],ymm7[4],ymm13[5,6,7,8],ymm7[9],ymm13[10,11],ymm7[12],ymm13[13,14,15]
12486 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm25, %zmm7
12487 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12488 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm26, %ymm8
12489 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm8, %ymm3, %ymm4
12490 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12491 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm6 = ymm2[3,3,3,3,7,7,7,7]
12492 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm6[0],ymm4[1],ymm6[2,3],ymm4[4],ymm6[5,6,7,8],ymm4[9],ymm6[10,11],ymm4[12],ymm6[13,14,15]
12493 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm29, %ymm1
12494 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm1, %ymm3, %ymm6
12495 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm13 = ymm2[2,2,2,2,6,6,6,6]
12496 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm6[0,1],ymm13[2],ymm6[3,4],ymm13[5],ymm6[6,7,8,9],ymm13[10],ymm6[11,12],ymm13[13],ymm6[14,15]
12497 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm20, %zmm0
12498 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, %zmm7, %zmm10, %zmm0
12499 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12500 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
12501 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm6 = ymm11[3,3,3,3,7,7,7,7]
12502 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0,1],ymm6[2],ymm4[3,4],ymm6[5],ymm4[6,7,8,9],ymm6[10],ymm4[11,12],ymm6[13],ymm4[14,15]
12503 ; AVX512F-ONLY-FAST-NEXT: vprold $16, %ymm15, %ymm6
12504 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm7 = ymm11[1,2,2,3,5,6,6,7]
12505 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm7[0,1],ymm6[2],ymm7[3,4],ymm6[5],ymm7[6,7,8,9],ymm6[10],ymm7[11,12],ymm6[13],ymm7[14,15]
12506 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm17 = [2,1,3,2,10,10,10,11]
12507 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm17, %zmm0
12508 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12509 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
12510 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm5, %ymm2, %ymm4
12511 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm6 = ymm30[2,2,2,2,6,6,6,6]
12512 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm6[0],ymm4[1],ymm6[2,3],ymm4[4],ymm6[5,6,7,8],ymm4[9],ymm6[10,11],ymm4[12],ymm6[13,14,15]
12513 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm31, %ymm0
12514 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm0, %ymm2, %ymm6
12515 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %ymm2, %ymm5
12516 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm7 = ymm30[1,1,1,1,5,5,5,5]
12517 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm6 = ymm6[0,1],ymm7[2],ymm6[3,4],ymm7[5],ymm6[6,7,8,9],ymm7[10],ymm6[11,12],ymm7[13],ymm6[14,15]
12518 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm20, %zmm6
12519 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12520 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm1, %ymm0, %ymm4
12521 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm29, %ymm31
12522 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
12523 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm7 = ymm1[2,2,2,2,6,6,6,6]
12524 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0,1],ymm7[2],ymm4[3,4],ymm7[5],ymm4[6,7,8,9],ymm7[10],ymm4[11,12],ymm7[13],ymm4[14,15]
12525 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm0[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
12526 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm11 = ymm1[0,1,1,3,4,5,5,7]
12527 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm7 = ymm11[0,1],ymm7[2],ymm11[3,4],ymm7[5],ymm11[6,7,8,9],ymm7[10],ymm11[11,12],ymm7[13],ymm11[14,15]
12528 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm17, %zmm7
12529 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535]
12530 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, %zmm6, %zmm23, %zmm7
12531 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
12532 ; AVX512F-ONLY-FAST-NEXT: vprold $16, %ymm15, %ymm4
12533 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm19 # 32-byte Reload
12534 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm6 = ymm19[1,2,2,3,5,6,6,7]
12535 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm6[0,1],ymm4[2],ymm6[3,4],ymm4[5],ymm6[6,7,8,9],ymm4[10],ymm6[11,12],ymm4[13],ymm6[14,15]
12536 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm9, %ymm15, %ymm6
12537 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm11 = ymm19[0,0,2,1,4,4,6,5]
12538 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm6 = ymm6[0,1,2],ymm11[3],ymm6[4,5],ymm11[6],ymm6[7,8,9,10],ymm11[11],ymm6[12,13],ymm11[14],ymm6[15]
12539 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm25 = [2,2,3,3,10,9,11,10]
12540 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm25, %zmm6
12541 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm27, %zmm28, %zmm4
12542 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm29 = [0,13,4,0,0,14,5,0,0,13,4,0,0,14,5,0]
12543 ; AVX512F-ONLY-FAST-NEXT: # zmm29 = mem[0,1,2,3,0,1,2,3]
12544 ; AVX512F-ONLY-FAST-NEXT: vpermd %zmm4, %zmm29, %zmm2
12545 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm28 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535]
12546 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm6, %zmm28, %zmm2
12547 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm2
12548 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12549 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm8, %ymm0, %ymm3
12550 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm1[3,3,3,3,7,7,7,7]
12551 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm10 = ymm2[0],ymm3[1],ymm2[2,3],ymm3[4],ymm2[5,6,7,8],ymm3[9],ymm2[10,11],ymm3[12],ymm2[13,14,15]
12552 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%rcx), %xmm0
12553 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12554 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%rdx), %xmm11
12555 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm11[0],xmm0[0],xmm11[1],xmm0[1],xmm11[2],xmm0[2],xmm11[3],xmm0[3]
12556 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
12557 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm2, %xmm3, %xmm3
12558 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,2,2,3,8,9,9,11]
12559 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm3, %zmm0, %zmm10
12560 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm14, %ymm5, %ymm1
12561 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm0 = ymm30[3,3,3,3,7,7,7,7]
12562 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm5 = ymm0[0,1,2],ymm1[3],ymm0[4,5],ymm1[6],ymm0[7,8,9,10],ymm1[11],ymm0[12,13],ymm1[14],ymm0[15]
12563 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rcx), %xmm1
12564 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rdx), %xmm7
12565 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq {{.*#+}} xmm3 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
12566 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm3, %xmm1, %xmm0
12567 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm3, %xmm8
12568 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} xmm3 = xmm7[1,1,2,2]
12569 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} xmm0 = xmm3[0],xmm0[1],xmm3[2,3],xmm0[4],xmm3[5,6],xmm0[7]
12570 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm7[4],xmm1[4],xmm7[5],xmm1[5],xmm7[6],xmm1[6],xmm7[7],xmm1[7]
12571 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12572 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm7[0],xmm1[0],xmm7[1],xmm1[1],xmm7[2],xmm1[2],xmm7[3],xmm1[3]
12573 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm2, %xmm1, %xmm1
12574 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,1,3,8,8,9,9]
12575 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm3, %zmm1
12576 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, %zmm13
12577 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rdi), %xmm0
12578 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 64(%rsi), %xmm3
12579 ; AVX512F-ONLY-FAST-NEXT: vprold $16, %xmm3, %xmm4
12580 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} xmm6 = xmm0[1,1,2,3]
12581 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} xmm4 = xmm6[0,1],xmm4[2],xmm6[3,4],xmm4[5],xmm6[6,7]
12582 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
12583 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12584 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
12585 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm9 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
12586 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm9, %xmm0, %xmm14
12587 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm26 = [0,0,1,1,8,8,10,9]
12588 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm26, %zmm14
12589 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm4 = [65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0]
12590 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, %zmm1, %zmm4, %zmm14
12591 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm21, %xmm0
12592 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm12[0],xmm0[0],xmm12[1],xmm0[1],xmm12[2],xmm0[2],xmm12[3],xmm0[3]
12593 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm6 = [0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
12594 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
12595 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm3, %xmm0, %xmm1
12596 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm3, %xmm12
12597 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm6, %xmm0, %xmm0
12598 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm21 = [0,0,0,1,8,9,9,11]
12599 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm21, %zmm0
12600 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rcx), %xmm1
12601 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm3
12602 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm8, %xmm1, %xmm7
12603 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} xmm8 = xmm3[1,1,2,2]
12604 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} xmm7 = xmm8[0],xmm7[1],xmm8[2,3],xmm7[4],xmm8[5,6],xmm7[7]
12605 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
12606 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12607 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
12608 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm2, %xmm1, %xmm1
12609 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm2, %xmm24
12610 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm13, %zmm1
12611 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdi), %xmm2
12612 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12613 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rsi), %xmm8
12614 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12615 ; AVX512F-ONLY-FAST-NEXT: vprold $16, %xmm8, %xmm3
12616 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} xmm7 = xmm2[1,1,2,3]
12617 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1],xmm3[2],xmm7[3,4],xmm3[5],xmm7[6,7]
12618 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm2[0],xmm8[0],xmm2[1],xmm8[1],xmm2[2],xmm8[2],xmm2[3],xmm8[3]
12619 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm9, %xmm2
12620 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm9, %xmm3, %xmm3
12621 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm26, %zmm3
12622 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, %zmm1, %zmm4, %zmm3
12623 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd 64(%rax), %ymm1
12624 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd 68(%rax), %ymm4
12625 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm1, %zmm18
12626 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
12627 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm0, %zmm1, %zmm18
12628 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rsp), %xmm0 # 16-byte Reload
12629 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12630 ; AVX512F-ONLY-FAST-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
12631 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm12, %xmm0, %xmm4
12632 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm6, %xmm0, %xmm0
12633 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm6, %xmm8
12634 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm21, %zmm0
12635 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd (%rax), %ymm4
12636 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd 4(%rax), %ymm6
12637 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm6, %zmm4, %zmm13
12638 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm0, %zmm1, %zmm13
12639 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%rsi), %xmm9
12640 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%rdi), %xmm0
12641 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm0[0],xmm9[0],xmm0[1],xmm9[1],xmm0[2],xmm9[2],xmm0[3],xmm9[3]
12642 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm2, %xmm1, %xmm1
12643 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = [2,1,3,3,8,8,9,9]
12644 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm2, %zmm5
12645 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm10, %zmm5
12646 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
12647 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm19[3,3,3,3,7,7,7,7]
12648 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7,8,9],ymm2[10],ymm1[11,12],ymm2[13],ymm1[14,15]
12649 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = [65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535]
12650 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm14, %zmm2, %zmm18
12651 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm3, %zmm2, %zmm13
12652 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
12653 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm4[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
12654 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm20 # 32-byte Reload
12655 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm20[2,2,2,2,6,6,6,6]
12656 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5,6,7,8],ymm2[9],ymm3[10,11],ymm2[12],ymm3[13,14,15]
12657 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
12658 ; AVX512F-ONLY-FAST-NEXT: # ymm3 = mem[0,1,0,1]
12659 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm3, %ymm4, %ymm3
12660 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm4, %ymm16
12661 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm4 = ymm20[1,1,1,1,5,5,5,5]
12662 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm6 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7,8,9],ymm4[10],ymm3[11,12],ymm4[13],ymm3[14,15]
12663 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = [2,2,2,3,8,10,10,11]
12664 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm3, %zmm6
12665 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
12666 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm31, %ymm2
12667 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm2, %ymm15, %ymm2
12668 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
12669 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm7[2,2,2,2,6,6,6,6]
12670 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7,8,9],ymm3[10],ymm2[11,12],ymm3[13],ymm2[14,15]
12671 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm15[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
12672 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm4 = ymm7[0,1,1,3,4,5,5,7]
12673 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm7, %ymm21
12674 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm14 = ymm4[0,1],ymm3[2],ymm4[3,4],ymm3[5],ymm4[6,7,8,9],ymm3[10],ymm4[11,12],ymm3[13],ymm4[14,15]
12675 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm17, %zmm14
12676 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = [2,2,2,3,8,8,8,9]
12677 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%r9), %xmm3
12678 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 96(%r8), %xmm2
12679 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
12680 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm8, %xmm4, %xmm7
12681 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm7, %zmm10, %zmm1
12682 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, %zmm6, %zmm23, %zmm14
12683 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
12684 ; AVX512F-ONLY-FAST-NEXT: vprold $16, %ymm8, %ymm6
12685 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm17 # 32-byte Reload
12686 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm7 = ymm17[1,2,2,3,5,6,6,7]
12687 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm6 = ymm7[0,1],ymm6[2],ymm7[3,4],ymm6[5],ymm7[6,7,8,9],ymm6[10],ymm7[11,12],ymm6[13],ymm7[14,15]
12688 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
12689 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm8, %ymm19
12690 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm8 = ymm17[0,0,2,1,4,4,6,5]
12691 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm7 = ymm7[0,1,2],ymm8[3],ymm7[4,5],ymm8[6],ymm7[7,8,9,10],ymm8[11],ymm7[12,13],ymm8[14],ymm7[15]
12692 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm6, %zmm25, %zmm7
12693 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 (%rax), %zmm25
12694 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm23 # 32-byte Reload
12695 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm23, %zmm25, %zmm6
12696 ; AVX512F-ONLY-FAST-NEXT: vpermd %zmm6, %zmm29, %zmm10
12697 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm7, %zmm28, %zmm10
12698 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} ymm28 = [6,7,3,3,7,7,6,7]
12699 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm27, %ymm28, %ymm6
12700 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd 96(%rax), %ymm7
12701 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm6, %zmm12
12702 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm31 = [65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535]
12703 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm1, %zmm31, %zmm12
12704 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm30 = [0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535]
12705 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm5, %zmm30, %zmm12
12706 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm9[4],xmm0[4],xmm9[5],xmm0[5],xmm9[6],xmm0[6],xmm9[7],xmm0[7]
12707 ; AVX512F-ONLY-FAST-NEXT: vprold $16, %xmm9, %xmm1
12708 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
12709 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} xmm7 = xmm0[0,1],xmm1[2],xmm0[3,4],xmm1[5],xmm0[6,7]
12710 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm14, %zmm10
12711 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rcx), %xmm14
12712 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdx), %xmm8
12713 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3]
12714 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm24, %xmm1
12715 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm0
12716 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm15[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
12717 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm9 = ymm21[3,3,3,3,7,7,7,7]
12718 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm6 = ymm9[0],ymm1[1],ymm9[2,3],ymm1[4],ymm9[5,6,7,8],ymm1[9],ymm9[10,11],ymm1[12],ymm9[13,14,15]
12719 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,2,2,3,8,9,9,11]
12720 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm6
12721 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm5[4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
12722 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm22 = [0,0,2,1,8,8,9,11]
12723 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm0, %zmm22, %zmm7
12724 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12725 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm11[4],xmm0[4],xmm11[5],xmm0[5],xmm11[6],xmm0[6],xmm11[7],xmm0[7]
12726 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq {{.*#+}} xmm24 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
12727 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm24, %xmm1
12728 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm0
12729 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[1,1,2,2]
12730 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} xmm1 = xmm11[0],xmm0[1],xmm11[2,3],xmm0[4],xmm11[5,6],xmm0[7]
12731 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,2,3,3,4,5,6,7]
12732 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm26, %zmm1
12733 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm26 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
12734 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, %zmm7, %zmm26, %zmm1
12735 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
12736 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm5 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25,22,23,22,23,u,u,u,u,u,u,u,u]
12737 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm7 = ymm5[2,2,2,2]
12738 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
12739 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
12740 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm0, %xmm2, %xmm2
12741 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} xmm29 = [0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
12742 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm29, %xmm3
12743 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm3, %xmm4, %xmm3
12744 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm27 = [0,1,1,3,8,8,9,9]
12745 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm27, %zmm3
12746 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm16, %ymm2
12747 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29]
12748 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm4 = ymm20[3,3,3,3,7,7,7,7]
12749 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm11 = ymm4[0,1,2],ymm2[3],ymm4[4,5],ymm2[6],ymm4[7,8,9,10],ymm2[11],ymm4[12,13],ymm2[14],ymm4[15]
12750 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdi), %xmm2
12751 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rsi), %xmm5
12752 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
12753 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm0, %xmm4, %xmm4
12754 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [2,1,3,3,8,8,9,9]
12755 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm4, %zmm0, %zmm11
12756 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm11
12757 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd 100(%rax), %ymm0
12758 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd 104(%rax), %ymm4
12759 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm16
12760 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm21 = [65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
12761 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm3, %zmm21, %zmm16
12762 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm19, %ymm0
12763 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
12764 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm17[3,3,3,3,7,7,7,7]
12765 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm3[2],ymm0[3,4],ymm3[5],ymm0[6,7,8,9],ymm3[10],ymm0[11,12],ymm3[13],ymm0[14,15]
12766 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r9), %xmm4
12767 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r8), %xmm3
12768 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
12769 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} xmm9 = xmm6[0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
12770 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm17 = [2,2,2,3,8,8,8,9]
12771 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm9, %zmm17, %zmm0
12772 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm23, %ymm28, %ymm9
12773 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd 32(%rax), %ymm20
12774 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm20, %zmm9, %zmm20
12775 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm0, %zmm31, %zmm20
12776 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm11, %zmm30, %zmm20
12777 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0]
12778 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm1, %zmm0, %zmm16
12779 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm5[4],xmm2[4],xmm5[5],xmm2[5],xmm5[6],xmm2[6],xmm5[7],xmm2[7]
12780 ; AVX512F-ONLY-FAST-NEXT: vprold $16, %xmm5, %xmm5
12781 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,1,2,3]
12782 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm5[2],xmm2[3,4],xmm5[5],xmm2[6,7]
12783 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm9 = [4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
12784 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm9, %xmm1, %xmm1
12785 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm1, %zmm22, %zmm2
12786 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm24, %xmm1
12787 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm1, %xmm14, %xmm1
12788 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
12789 ; AVX512F-ONLY-FAST-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[1,1,2,2]
12790 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} xmm1 = xmm8[0],xmm1[1],xmm8[2,3],xmm1[4],xmm8[5,6],xmm1[7]
12791 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,2,3,3,4,5,6,7]
12792 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm8 = [0,0,1,1,8,8,10,9]
12793 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm5, %zmm8, %zmm1
12794 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, %zmm2, %zmm26, %zmm1
12795 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
12796 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
12797 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm29, %xmm3
12798 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm3, %xmm6, %xmm3
12799 ; AVX512F-ONLY-FAST-NEXT: vpermt2q %zmm2, %zmm27, %zmm3
12800 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd 36(%rax), %ymm2
12801 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastd 40(%rax), %ymm4
12802 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm2, %zmm2
12803 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm3, %zmm21, %zmm2
12804 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm1, %zmm0, %zmm2
12805 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
12806 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
12807 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm11, %ymm0
12808 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm7
12809 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
12810 ; AVX512F-ONLY-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
12811 ; AVX512F-ONLY-FAST-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
12812 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
12813 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm9, %xmm1, %xmm1
12814 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm9, %xmm0, %xmm0
12815 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
12816 ; AVX512F-ONLY-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm3[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
12817 ; AVX512F-ONLY-FAST-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
12818 ; AVX512F-ONLY-FAST-NEXT: # ymm4 = mem[0,1,1,3,4,5,5,7]
12819 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0,1],ymm3[2],ymm4[3,4],ymm3[5],ymm4[6,7,8,9],ymm3[10],ymm4[11,12],ymm3[13],ymm4[14,15]
12820 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
12821 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
12822 ; AVX512F-ONLY-FAST-NEXT: vpshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
12823 ; AVX512F-ONLY-FAST-NEXT: # ymm5 = mem[1,1,1,1,5,5,5,5]
12824 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7,8,9],ymm5[10],ymm4[11,12],ymm5[13],ymm4[14,15]
12825 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rax), %ymm5
12826 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [14,21,0,0,15,22,0,15,14,21,0,0,15,22,0,15]
12827 ; AVX512F-ONLY-FAST-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
12828 ; AVX512F-ONLY-FAST-NEXT: vpermi2d %zmm5, %zmm25, %zmm6
12829 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [0,1,4,5,4,5,5,7]
12830 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm5, %ymm8, %ymm8
12831 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[12,13],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm5[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm5[16,17],zero,zero
12832 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
12833 ; AVX512F-ONLY-FAST-NEXT: vpandn %ymm8, %ymm9, %ymm8
12834 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm8, %zmm5, %zmm5
12835 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm8 = [65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0]
12836 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm6 # 64-byte Folded Reload
12837 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
12838 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm6
12839 ; AVX512F-ONLY-FAST-NEXT: vextracti64x4 $1, %zmm11, %ymm9
12840 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535]
12841 ; AVX512F-ONLY-FAST-NEXT: vpternlogd $226, 124(%r8){1to8}, %ymm11, %ymm9
12842 ; AVX512F-ONLY-FAST-NEXT: vpshufhw {{.*#+}} ymm11 = ymm15[0,1,2,3,5,5,7,6,8,9,10,11,13,13,15,14]
12843 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[3,3,3,3]
12844 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm9, %ymm11
12845 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm11, %zmm0, %zmm9
12846 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm7 = zmm7[0,1,2,3],zmm9[4,5,6,7]
12847 ; AVX512F-ONLY-FAST-NEXT: vpshuflw $248, {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Folded Reload
12848 ; AVX512F-ONLY-FAST-NEXT: # xmm9 = mem[0,2,3,3,4,5,6,7]
12849 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,2,1]
12850 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,3]
12851 ; AVX512F-ONLY-FAST-NEXT: vpshuflw $248, {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Folded Reload
12852 ; AVX512F-ONLY-FAST-NEXT: # xmm11 = mem[0,2,3,3,4,5,6,7]
12853 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,2,1]
12854 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,3]
12855 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,2]
12856 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
12857 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
12858 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm7, %zmm8, %zmm14
12859 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm9, %zmm7 # 32-byte Folded Reload
12860 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 32-byte Folded Reload
12861 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, %zmm7, %zmm26, %zmm1
12862 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm11, %zmm7 # 32-byte Folded Reload
12863 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 32-byte Folded Reload
12864 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, %zmm7, %zmm26, %zmm0
12865 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm7 = [0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535]
12866 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm1 # 64-byte Folded Reload
12867 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm0 # 64-byte Folded Reload
12868 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
12869 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm7, %zmm3
12870 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
12871 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm7, %zmm4
12872 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm4
12873 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
12874 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
12875 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $248, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm7 # 64-byte Folded Reload
12876 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $248, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm5 # 64-byte Folded Reload
12877 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm5
12878 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
12879 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm5, 320(%rax)
12880 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, 256(%rax)
12881 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm20, 192(%rax)
12882 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm10, 128(%rax)
12883 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm13, (%rax)
12884 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm18, 448(%rax)
12885 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm16, 704(%rax)
12886 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm12, 640(%rax)
12887 ; AVX512F-ONLY-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12888 ; AVX512F-ONLY-FAST-NEXT: vmovaps %zmm2, 576(%rax)
12889 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm6, 384(%rax)
12890 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm0, 64(%rax)
12891 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 512(%rax)
12892 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm14, 832(%rax)
12893 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm7, 768(%rax)
12894 ; AVX512F-ONLY-FAST-NEXT: addq $1432, %rsp # imm = 0x598
12895 ; AVX512F-ONLY-FAST-NEXT: vzeroupper
12896 ; AVX512F-ONLY-FAST-NEXT: retq
12898 ; AVX512DQ-SLOW-LABEL: store_i16_stride7_vf64:
12899 ; AVX512DQ-SLOW: # %bb.0:
12900 ; AVX512DQ-SLOW-NEXT: subq $2168, %rsp # imm = 0x878
12901 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rcx), %ymm2
12902 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rdx), %ymm6
12903 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rdi), %ymm7
12904 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rsi), %ymm9
12905 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
12906 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm0, %ymm2, %ymm1
12907 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm2, %ymm16
12908 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm11 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
12909 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm11, %ymm6, %ymm2
12910 ; AVX512DQ-SLOW-NEXT: vporq %ymm1, %ymm2, %ymm18
12911 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm14 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
12912 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm14, %ymm9, %ymm1
12913 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm13 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
12914 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm13, %ymm7, %ymm2
12915 ; AVX512DQ-SLOW-NEXT: vporq %ymm1, %ymm2, %ymm17
12916 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
12917 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%r9), %ymm2
12918 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm3, %ymm2, %ymm1
12919 ; AVX512DQ-SLOW-NEXT: vmovdqa %ymm3, %ymm8
12920 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm2, %ymm21
12921 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%r8), %ymm3
12922 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm15 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
12923 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm15, %ymm3, %ymm2
12924 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm3, %ymm23
12925 ; AVX512DQ-SLOW-NEXT: vpor %ymm1, %ymm2, %ymm1
12926 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12927 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rcx), %ymm10
12928 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm0, %ymm10, %ymm1
12929 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdx), %ymm4
12930 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm11, %ymm4, %ymm2
12931 ; AVX512DQ-SLOW-NEXT: vpor %ymm1, %ymm2, %ymm1
12932 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12933 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rsi), %ymm5
12934 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm14, %ymm5, %ymm1
12935 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdi), %ymm3
12936 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm13, %ymm3, %ymm2
12937 ; AVX512DQ-SLOW-NEXT: vpor %ymm1, %ymm2, %ymm1
12938 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12939 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r9), %ymm1
12940 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12941 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm8, %ymm1, %ymm1
12942 ; AVX512DQ-SLOW-NEXT: vmovdqa %ymm8, %ymm12
12943 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r8), %ymm2
12944 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12945 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm15, %ymm2, %ymm2
12946 ; AVX512DQ-SLOW-NEXT: vpor %ymm1, %ymm2, %ymm1
12947 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12948 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rcx), %ymm1
12949 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12950 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm0, %ymm1, %ymm1
12951 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %ymm2
12952 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12953 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm11, %ymm2, %ymm2
12954 ; AVX512DQ-SLOW-NEXT: vpor %ymm1, %ymm2, %ymm1
12955 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12956 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rsi), %ymm2
12957 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm14, %ymm2, %ymm1
12958 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm2, %ymm24
12959 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %ymm8
12960 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm13, %ymm8, %ymm2
12961 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm8, %ymm19
12962 ; AVX512DQ-SLOW-NEXT: vpor %ymm1, %ymm2, %ymm1
12963 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12964 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rcx), %ymm8
12965 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm0, %ymm8, %ymm0
12966 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdx), %ymm2
12967 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm11, %ymm2, %ymm1
12968 ; AVX512DQ-SLOW-NEXT: vpor %ymm0, %ymm1, %ymm0
12969 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12970 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rsi), %ymm1
12971 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm14, %ymm1, %ymm0
12972 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdi), %ymm11
12973 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm13, %ymm11, %ymm13
12974 ; AVX512DQ-SLOW-NEXT: vpor %ymm0, %ymm13, %ymm0
12975 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12976 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%r8), %ymm13
12977 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm15, %ymm13, %ymm14
12978 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%r9), %ymm0
12979 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm12, %ymm0, %ymm15
12980 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm12, %ymm20
12981 ; AVX512DQ-SLOW-NEXT: vpor %ymm15, %ymm14, %ymm12
12982 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12983 ; AVX512DQ-SLOW-NEXT: vprold $16, %ymm0, %ymm14
12984 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm15 = ymm13[1,2,2,3,5,6,6,7]
12985 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm14 = ymm15[0,1],ymm14[2],ymm15[3,4],ymm14[5],ymm15[6,7,8,9],ymm14[10],ymm15[11,12],ymm14[13],ymm15[14,15]
12986 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm15 = ymm0[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
12987 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm15 = ymm15[2,2,2,3,6,6,6,7]
12988 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm13[3,3,3,3,7,7,7,7]
12989 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm15[0,1],ymm12[2],ymm15[3,4],ymm12[5],ymm15[6,7,8,9],ymm12[10],ymm15[11,12],ymm12[13],ymm15[14,15]
12990 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm15 = [2,1,3,2,10,10,10,11]
12991 ; AVX512DQ-SLOW-NEXT: vpermi2q %zmm12, %zmm14, %zmm15
12992 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12993 ; AVX512DQ-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm14 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
12994 ; AVX512DQ-SLOW-NEXT: # ymm14 = mem[0,1,0,1]
12995 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm16, %ymm15
12996 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm14, %ymm15, %ymm12
12997 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm14, %ymm16
12998 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm6[2,2,2,2,6,6,6,6]
12999 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm12[0,1],ymm14[2],ymm12[3,4],ymm14[5],ymm12[6,7,8,9],ymm14[10],ymm12[11,12],ymm14[13],ymm12[14,15]
13000 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13001 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm12 = ymm15[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
13002 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
13003 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm6[3,3,3,3,7,7,7,7]
13004 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm14[0],ymm12[1],ymm14[2,3],ymm12[4],ymm14[5,6,7,8],ymm12[9],ymm14[10,11],ymm12[12],ymm14[13,14,15]
13005 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13006 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm12 = ymm9[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
13007 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
13008 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm7[2,2,2,2,6,6,6,6]
13009 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm14[0],ymm12[1],ymm14[2,3],ymm12[4],ymm14[5,6,7,8],ymm12[9],ymm14[10,11],ymm12[12],ymm14[13,14,15]
13010 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm12, %ymm25
13011 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm12 = ymm9[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
13012 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
13013 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm7[3,3,3,3,7,7,7,7]
13014 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm14[0,1,2],ymm12[3],ymm14[4,5],ymm12[6],ymm14[7,8,9,10],ymm12[11],ymm14[12,13],ymm12[14],ymm14[15]
13015 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13016 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[0,1,1,3,4,5,5,7]
13017 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm12 = ymm15[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
13018 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[0,0,0,0,4,4,4,4]
13019 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm6 = ymm6[0,1],ymm12[2],ymm6[3,4],ymm12[5],ymm6[6,7,8,9],ymm12[10],ymm6[11,12],ymm12[13],ymm6[14,15]
13020 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,1,3,2]
13021 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm7[1,1,1,1,5,5,5,5]
13022 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm9 = ymm9[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
13023 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm9[0,0,2,1,4,4,6,5]
13024 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm7 = ymm9[0,1],ymm7[2],ymm9[3,4],ymm7[5],ymm9[6,7,8,9],ymm7[10],ymm9[11,12],ymm7[13],ymm9[14,15]
13025 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,2,2,3]
13026 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm6, %zmm18, %zmm6
13027 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm17, %zmm7
13028 ; AVX512DQ-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm7
13029 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
13030 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%r8), %ymm12
13031 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13032 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = ymm12[u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm12[14,15],zero,zero,ymm12[u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm12[16,17],zero,zero,ymm12[u,u],zero,zero
13033 ; AVX512DQ-SLOW-NEXT: vpternlogq $248, %ymm9, %ymm7, %ymm6
13034 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%r9), %ymm15
13035 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13036 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm20, %ymm14
13037 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm14, %ymm15, %ymm14
13038 ; AVX512DQ-SLOW-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm6, %ymm14
13039 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm7, %ymm6
13040 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm12[0,0,2,1,4,4,6,5]
13041 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,1,3,3]
13042 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm6, %ymm7
13043 ; AVX512DQ-SLOW-NEXT: vprold $16, %ymm15, %ymm6
13044 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,2]
13045 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm7, %ymm6
13046 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm6, %zmm0, %zmm6
13047 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm6 = zmm14[0,1,2,3],zmm6[4,5,6,7]
13048 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13049 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
13050 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x8 {{.*#+}} zmm18 = [6,5,0,0,7,6,0,7,6,5,0,0,7,6,0,7]
13051 ; AVX512DQ-SLOW-NEXT: # zmm18 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
13052 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rax), %ymm6
13053 ; AVX512DQ-SLOW-NEXT: vpermd %zmm6, %zmm18, %zmm12
13054 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13055 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm6[0,1,1,3,4,5,5,7]
13056 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm15 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
13057 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm15, %ymm6, %ymm6
13058 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,2,2,3]
13059 ; AVX512DQ-SLOW-NEXT: vpandn %ymm14, %ymm9, %ymm14
13060 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm14, %zmm6, %zmm6
13061 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13062 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm17 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
13063 ; AVX512DQ-SLOW-NEXT: vpbroadcastd 72(%rax), %ymm6
13064 ; AVX512DQ-SLOW-NEXT: vpandnq %ymm6, %ymm17, %ymm14
13065 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rax), %ymm6
13066 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm15, %ymm6, %ymm12
13067 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm12, %zmm14, %zmm12
13068 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13069 ; AVX512DQ-SLOW-NEXT: vpbroadcastd 8(%rax), %ymm12
13070 ; AVX512DQ-SLOW-NEXT: vpandnq %ymm12, %ymm17, %ymm12
13071 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rax), %ymm7
13072 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm15, %ymm7, %ymm14
13073 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm7, %ymm22
13074 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm14, %zmm12, %zmm12
13075 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13076 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm12 = ymm8[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
13077 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[0,0,0,0,4,4,4,4]
13078 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm2[0,1,1,3,4,5,5,7]
13079 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm14[0,1],ymm12[2],ymm14[3,4],ymm12[5],ymm14[6,7,8,9],ymm12[10],ymm14[11,12],ymm12[13],ymm14[14,15]
13080 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13081 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm12 = ymm1[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
13082 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm12 = ymm12[0,0,2,1,4,4,6,5]
13083 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm11[1,1,1,1,5,5,5,5]
13084 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm12[0,1],ymm14[2],ymm12[3,4],ymm14[5],ymm12[6,7,8,9],ymm14[10],ymm12[11,12],ymm14[13],ymm12[14,15]
13085 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13086 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rax), %ymm12
13087 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm12[0,1,1,3,4,5,5,7]
13088 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,2,2,3]
13089 ; AVX512DQ-SLOW-NEXT: vpandn %ymm14, %ymm9, %ymm9
13090 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm15, %ymm12, %ymm14
13091 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm14, %zmm9
13092 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13093 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm13[0,0,2,1,4,4,6,5]
13094 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
13095 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,0,0,0,4,4,4,4]
13096 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1,2],ymm9[3],ymm0[4,5],ymm9[6],ymm0[7,8,9,10],ymm9[11],ymm0[12,13],ymm9[14],ymm0[15]
13097 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13098 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm16, %ymm7
13099 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm7, %ymm8, %ymm0
13100 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm2[2,2,2,2,6,6,6,6]
13101 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm9[2],ymm0[3,4],ymm9[5],ymm0[6,7,8,9],ymm9[10],ymm0[11,12],ymm9[13],ymm0[14,15]
13102 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13103 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm2[3,3,3,3,7,7,7,7]
13104 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm8[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
13105 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,2,6,6,6,6]
13106 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2,3],ymm2[4],ymm0[5,6,7,8],ymm2[9],ymm0[10,11],ymm2[12],ymm0[13,14,15]
13107 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13108 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm1[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
13109 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
13110 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm11[2,2,2,2,6,6,6,6]
13111 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0],ymm0[1],ymm2[2,3],ymm0[4],ymm2[5,6,7,8],ymm0[9],ymm2[10,11],ymm0[12],ymm2[13,14,15]
13112 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm0, %ymm31
13113 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm11[3,3,3,3,7,7,7,7]
13114 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm1[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
13115 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
13116 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1,2],ymm1[3],ymm0[4,5],ymm1[6],ymm0[7,8,9,10],ymm1[11],ymm0[12,13],ymm1[14],ymm0[15]
13117 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13118 ; AVX512DQ-SLOW-NEXT: vpermd %zmm12, %zmm18, %zmm0
13119 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13120 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm5[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
13121 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
13122 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm3[1,1,1,1,5,5,5,5]
13123 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
13124 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13125 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm5[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
13126 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
13127 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm3[2,2,2,2,6,6,6,6]
13128 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
13129 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13130 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm0 = ymm10[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
13131 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,0,0,0,4,4,4,4]
13132 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm4[0,1,1,3,4,5,5,7]
13133 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
13134 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13135 ; AVX512DQ-SLOW-NEXT: vpshufb %ymm7, %ymm10, %ymm0
13136 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm4[2,2,2,2,6,6,6,6]
13137 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
13138 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13139 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm21, %ymm9
13140 ; AVX512DQ-SLOW-NEXT: vprold $16, %ymm21, %ymm0
13141 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm23[1,2,2,3,5,6,6,7]
13142 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
13143 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm9[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
13144 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,0,0,0,4,4,4,4]
13145 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm23[0,0,2,1,4,4,6,5]
13146 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7,8,9,10],ymm2[11],ymm1[12,13],ymm2[14],ymm1[15]
13147 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm7 = [2,2,3,3,10,9,11,10]
13148 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm0, %zmm7, %zmm1
13149 ; AVX512DQ-SLOW-NEXT: vbroadcasti32x8 {{.*#+}} zmm15 = [0,5,4,0,0,6,5,0,0,5,4,0,0,6,5,0]
13150 ; AVX512DQ-SLOW-NEXT: # zmm15 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
13151 ; AVX512DQ-SLOW-NEXT: vpermd 64(%rax), %zmm15, %zmm0
13152 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm14 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535]
13153 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm1, %zmm14, %zmm0
13154 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13155 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm4[3,3,3,3,7,7,7,7]
13156 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm10[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
13157 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
13158 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6,7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13,14,15]
13159 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13160 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm3[3,3,3,3,7,7,7,7]
13161 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm5[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
13162 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
13163 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1,2],ymm1[3],ymm0[4,5],ymm1[6],ymm0[7,8,9,10],ymm1[11],ymm0[12,13],ymm1[14],ymm0[15]
13164 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13165 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm23[3,3,3,3,7,7,7,7]
13166 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm9[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
13167 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,3,6,6,6,7]
13168 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm4 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
13169 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm11 = [4,5,4,5,4,5,6,7,16,17,16,17,16,17,17,19]
13170 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%r9), %xmm0
13171 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%r8), %xmm2
13172 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
13173 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm3[0,1,3,2,4,5,6,7]
13174 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm1, %zmm11, %zmm4
13175 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm6[2,3,3,3,6,7,7,7]
13176 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,1,3,2]
13177 ; AVX512DQ-SLOW-NEXT: vpbroadcastd 96(%rax), %ymm5
13178 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm1, %zmm5
13179 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535]
13180 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm4, %zmm1, %zmm5
13181 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13182 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rsi), %xmm4
13183 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rdi), %xmm5
13184 ; AVX512DQ-SLOW-NEXT: vprold $16, %xmm4, %xmm6
13185 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm5[1,1,2,3]
13186 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} xmm6 = xmm9[0,1],xmm6[2],xmm9[3,4],xmm6[5],xmm9[6,7]
13187 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13188 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
13189 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm6, %xmm23
13190 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
13191 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13192 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rcx), %xmm4
13193 ; AVX512DQ-SLOW-NEXT: vmovdqa 96(%rdx), %xmm5
13194 ; AVX512DQ-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm13 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
13195 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm13, %xmm4, %xmm6
13196 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm5[1,1,2,2]
13197 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} xmm6 = xmm9[0],xmm6[1],xmm9[2,3],xmm6[4],xmm9[5,6],xmm6[7]
13198 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13199 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
13200 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13201 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
13202 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13203 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7]
13204 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,7,6]
13205 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
13206 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm0, %xmm2, %xmm2
13207 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm30 = <16,18,19,19,19,19,u,u,0,1,0,1,2,3,2,3>
13208 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm3, %zmm30, %zmm2
13209 ; AVX512DQ-SLOW-NEXT: vpbroadcastd 100(%rax), %ymm3
13210 ; AVX512DQ-SLOW-NEXT: vpbroadcastd 104(%rax), %ymm4
13211 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm3, %zmm3
13212 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm28 = [65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
13213 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm2, %zmm28, %zmm3
13214 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13215 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rcx), %xmm2
13216 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdx), %xmm3
13217 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
13218 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm4, %xmm29
13219 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
13220 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm13, %xmm2, %xmm2
13221 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[1,1,2,2]
13222 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0],xmm2[1],xmm3[2,3],xmm2[4],xmm3[5,6],xmm2[7]
13223 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm4[0,1,3,2,4,5,6,7]
13224 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm5 = <16,16,17,17,17,17,u,u,0,1,0,1,2,3,2,3>
13225 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm3, %zmm5, %zmm2
13226 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rdi), %xmm3
13227 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%rsi), %xmm4
13228 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
13229 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm6
13230 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,1,1]
13231 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm9 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
13232 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm9, %xmm27
13233 ; AVX512DQ-SLOW-NEXT: vprold $16, %xmm4, %xmm4
13234 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[1,1,2,3]
13235 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm4[2],xmm3[3,4],xmm4[5],xmm3[6,7]
13236 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
13237 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm6, %zmm4
13238 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0]
13239 ; AVX512DQ-SLOW-NEXT: vpternlogq $226, %zmm2, %zmm3, %zmm4
13240 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13241 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%r9), %xmm2
13242 ; AVX512DQ-SLOW-NEXT: vmovdqa 64(%r8), %xmm4
13243 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
13244 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
13245 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm2[0,1,2,3,4,5,7,6]
13246 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,1,3,2,4,5,6,7]
13247 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm6 = <0,1,0,1,0,1,1,3,16,18,19,19,19,19,u,u>
13248 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm4, %zmm6, %zmm2
13249 ; AVX512DQ-SLOW-NEXT: vpbroadcastd 64(%rax), %ymm4
13250 ; AVX512DQ-SLOW-NEXT: vpbroadcastd 68(%rax), %ymm9
13251 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm4, %zmm4
13252 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm9 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
13253 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm2, %zmm9, %zmm4
13254 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13255 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rcx), %xmm2
13256 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %xmm10
13257 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm10[4],xmm2[4],xmm10[5],xmm2[5],xmm10[6],xmm2[6],xmm10[7],xmm2[7]
13258 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm4, %xmm26
13259 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm10[0],xmm2[0],xmm10[1],xmm2[1],xmm10[2],xmm2[2],xmm10[3],xmm2[3]
13260 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm13, %xmm2, %xmm2
13261 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[1,1,2,2]
13262 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} xmm2 = xmm10[0],xmm2[1],xmm10[2,3],xmm2[4],xmm10[5,6],xmm2[7]
13263 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm10 = xmm12[0,1,3,2,4,5,6,7]
13264 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm10, %zmm5, %zmm2
13265 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %xmm5
13266 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rsi), %xmm10
13267 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm5[0],xmm10[0],xmm5[1],xmm10[1],xmm5[2],xmm10[2],xmm5[3],xmm10[3]
13268 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm0, %xmm12, %xmm12
13269 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,0,1,1]
13270 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm10[4],xmm5[4],xmm10[5],xmm5[5],xmm10[6],xmm5[6],xmm10[7],xmm5[7]
13271 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm4, %xmm21
13272 ; AVX512DQ-SLOW-NEXT: vprold $16, %xmm10, %xmm10
13273 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[1,1,2,3]
13274 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1],xmm10[2],xmm5[3,4],xmm10[5],xmm5[6,7]
13275 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,2,1]
13276 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm12, %zmm4
13277 ; AVX512DQ-SLOW-NEXT: vpternlogq $226, %zmm2, %zmm3, %zmm4
13278 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13279 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r9), %xmm2
13280 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r8), %xmm3
13281 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
13282 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
13283 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm2[0,1,2,3,4,5,7,6]
13284 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,1,3,2,4,5,6,7]
13285 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm3, %zmm6, %zmm2
13286 ; AVX512DQ-SLOW-NEXT: vpbroadcastd (%rax), %ymm3
13287 ; AVX512DQ-SLOW-NEXT: vpbroadcastd 4(%rax), %ymm5
13288 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm3, %zmm3
13289 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm2, %zmm9, %zmm3
13290 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13291 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm24, %ymm10
13292 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm10[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
13293 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,0,2,1,4,4,6,5]
13294 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm19[1,1,1,1,5,5,5,5]
13295 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm5[2],ymm2[3,4],ymm5[5],ymm2[6,7,8,9],ymm5[10],ymm2[11,12],ymm5[13],ymm2[14,15]
13296 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm2, %ymm20
13297 ; AVX512DQ-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
13298 ; AVX512DQ-SLOW-NEXT: vprold $16, %ymm3, %ymm2
13299 ; AVX512DQ-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
13300 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm9[1,2,2,3,5,6,6,7]
13301 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0,1],ymm2[2],ymm5[3,4],ymm2[5],ymm5[6,7,8,9],ymm2[10],ymm5[11,12],ymm2[13],ymm5[14,15]
13302 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm5 = ymm3[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
13303 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[0,0,0,0,4,4,4,4]
13304 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm9[0,0,2,1,4,4,6,5]
13305 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm5 = ymm5[0,1,2],ymm6[3],ymm5[4,5],ymm6[6],ymm5[7,8,9,10],ymm6[11],ymm5[12,13],ymm6[14],ymm5[15]
13306 ; AVX512DQ-SLOW-NEXT: vpermt2q %zmm2, %zmm7, %zmm5
13307 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm10[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
13308 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,2,6,6,6,6]
13309 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm19[2,2,2,2,6,6,6,6]
13310 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm6[0],ymm2[1],ymm6[2,3],ymm2[4],ymm6[5,6,7,8],ymm2[9],ymm6[10,11],ymm2[12],ymm6[13,14,15]
13311 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm2, %ymm16
13312 ; AVX512DQ-SLOW-NEXT: vpermd (%rax), %zmm15, %zmm2
13313 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm5, %zmm14, %zmm2
13314 ; AVX512DQ-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13315 ; AVX512DQ-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
13316 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm6[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
13317 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,0,0,0,4,4,4,4]
13318 ; AVX512DQ-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
13319 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm7[0,1,1,3,4,5,5,7]
13320 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0,1],ymm2[2],ymm5[3,4],ymm2[5],ymm5[6,7,8,9],ymm2[10],ymm5[11,12],ymm2[13],ymm5[14,15]
13321 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm2, %ymm17
13322 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
13323 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm7[2,2,2,2,6,6,6,6]
13324 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm5[2],ymm2[3,4],ymm5[5],ymm2[6,7,8,9],ymm5[10],ymm2[11,12],ymm5[13],ymm2[14,15]
13325 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm2, %ymm18
13326 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm7[3,3,3,3,7,7,7,7]
13327 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm5 = ymm6[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
13328 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[2,2,2,2,6,6,6,6]
13329 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0],ymm5[1],ymm2[2,3],ymm5[4],ymm2[5,6,7,8],ymm5[9],ymm2[10,11],ymm5[12],ymm2[13,14,15]
13330 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13331 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm19[3,3,3,3,7,7,7,7]
13332 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm5 = ymm10[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
13333 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[2,2,2,2,6,6,6,6]
13334 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1,2],ymm5[3],ymm2[4,5],ymm5[6],ymm2[7,8,9,10],ymm5[11],ymm2[12,13],ymm5[14],ymm2[15]
13335 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13336 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm9[3,3,3,3,7,7,7,7]
13337 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm5 = ymm3[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
13338 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[2,2,2,3,6,6,6,7]
13339 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0,1],ymm2[2],ymm5[3,4],ymm2[5],ymm5[6,7,8,9],ymm2[10],ymm5[11,12],ymm2[13],ymm5[14,15]
13340 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%r9), %xmm6
13341 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%r8), %xmm14
13342 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
13343 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm12 = xmm5[0,1,3,2,4,5,6,7]
13344 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm12, %zmm11, %zmm2
13345 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm11 = ymm22[2,3,3,3,6,7,7,7]
13346 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,1,3,2]
13347 ; AVX512DQ-SLOW-NEXT: vpbroadcastd 32(%rax), %ymm12
13348 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm12, %zmm11, %zmm22
13349 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm2, %zmm1, %zmm22
13350 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdi), %xmm1
13351 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rsi), %xmm2
13352 ; AVX512DQ-SLOW-NEXT: vprold $16, %xmm2, %xmm12
13353 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm1[1,1,2,3]
13354 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} xmm3 = xmm15[0,1],xmm12[2],xmm15[3,4],xmm12[5],xmm15[6,7]
13355 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13356 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
13357 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
13358 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13359 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm14[4],xmm6[4],xmm14[5],xmm6[5],xmm14[6],xmm6[6],xmm14[7],xmm6[7]
13360 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm0, %xmm8, %xmm2
13361 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm3
13362 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm23, %xmm4
13363 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm4
13364 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13365 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm0, %xmm12, %xmm4
13366 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13367 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm0
13368 ; AVX512DQ-SLOW-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm24 # 32-byte Folded Reload
13369 ; AVX512DQ-SLOW-NEXT: # ymm24 = mem[2,2,2,3]
13370 ; AVX512DQ-SLOW-NEXT: vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm19 # 32-byte Folded Reload
13371 ; AVX512DQ-SLOW-NEXT: # ymm19 = mem[0,2,2,3]
13372 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm25 = ymm25[0,2,2,3]
13373 ; AVX512DQ-SLOW-NEXT: vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Folded Reload
13374 ; AVX512DQ-SLOW-NEXT: # ymm8 = mem[2,1,3,3]
13375 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm5[0,1,2,3,4,5,7,6]
13376 ; AVX512DQ-SLOW-NEXT: vpermt2d %zmm1, %zmm30, %zmm0
13377 ; AVX512DQ-SLOW-NEXT: vpbroadcastd 36(%rax), %ymm1
13378 ; AVX512DQ-SLOW-NEXT: vpbroadcastd 40(%rax), %ymm5
13379 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm1, %zmm30
13380 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm0, %zmm28, %zmm30
13381 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rcx), %xmm0
13382 ; AVX512DQ-SLOW-NEXT: vpshufb %xmm13, %xmm0, %xmm5
13383 ; AVX512DQ-SLOW-NEXT: vmovdqa 32(%rdx), %xmm14
13384 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm14[1,1,2,2]
13385 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} xmm1 = xmm15[0],xmm5[1],xmm15[2,3],xmm5[4],xmm15[5,6],xmm5[7]
13386 ; AVX512DQ-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13387 ; AVX512DQ-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm14[0],xmm0[0],xmm14[1],xmm0[1],xmm14[2],xmm0[2],xmm14[3],xmm0[3]
13388 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13389 ; AVX512DQ-SLOW-NEXT: vpunpckhwd {{.*#+}} xmm0 = xmm14[4],xmm0[4],xmm14[5],xmm0[5],xmm14[6],xmm0[6],xmm14[7],xmm0[7]
13390 ; AVX512DQ-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13391 ; AVX512DQ-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
13392 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm0 = ymm4[0,1,2,3,5,5,7,6,8,9,10,11,13,13,15,14]
13393 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm0[3,3,3,3]
13394 ; AVX512DQ-SLOW-NEXT: vpshufd $233, {{[-0-9]+}}(%r{{[sb]}}p), %ymm23 # 32-byte Folded Reload
13395 ; AVX512DQ-SLOW-NEXT: # ymm23 = mem[1,2,2,3,5,6,6,7]
13396 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm13 = ymm4[2,1,2,3,6,5,6,7]
13397 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm13 = ymm13[0,0,3,3,4,5,6,7,8,8,11,11,12,13,14,15]
13398 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm28 = ymm13[2,2,2,2]
13399 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm13 = ymm2[0,0,1,1]
13400 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm29, %xmm0
13401 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm0[0,2,3,3,4,5,6,7]
13402 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm2[0,0,2,1]
13403 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm27, %xmm0
13404 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm9 = xmm0[2,1,2,3,4,5,6,7]
13405 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,5,5,4]
13406 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,1,3]
13407 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm3[0,0,1,1]
13408 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm26, %xmm0
13409 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm0[0,2,3,3,4,5,6,7]
13410 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm3[0,0,2,1]
13411 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm21, %xmm0
13412 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm0[2,1,2,3,4,5,6,7]
13413 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,5,4]
13414 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm5[0,0,1,3]
13415 ; AVX512DQ-SLOW-NEXT: vpermq $182, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
13416 ; AVX512DQ-SLOW-NEXT: # ymm5 = mem[2,1,3,2]
13417 ; AVX512DQ-SLOW-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Folded Reload
13418 ; AVX512DQ-SLOW-NEXT: # ymm10 = mem[2,2,2,3]
13419 ; AVX512DQ-SLOW-NEXT: vpermq $250, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
13420 ; AVX512DQ-SLOW-NEXT: # ymm11 = mem[2,2,3,3]
13421 ; AVX512DQ-SLOW-NEXT: vpermpd $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Folded Reload
13422 ; AVX512DQ-SLOW-NEXT: # ymm0 = mem[2,2,2,3]
13423 ; AVX512DQ-SLOW-NEXT: vmovups %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13424 ; AVX512DQ-SLOW-NEXT: vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm29 # 32-byte Folded Reload
13425 ; AVX512DQ-SLOW-NEXT: # ymm29 = mem[0,2,2,3]
13426 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm31 = ymm31[0,2,2,3]
13427 ; AVX512DQ-SLOW-NEXT: vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm26 # 32-byte Folded Reload
13428 ; AVX512DQ-SLOW-NEXT: # ymm26 = mem[2,1,3,3]
13429 ; AVX512DQ-SLOW-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm21 # 32-byte Folded Reload
13430 ; AVX512DQ-SLOW-NEXT: # ymm21 = mem[2,2,2,3]
13431 ; AVX512DQ-SLOW-NEXT: vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Folded Reload
13432 ; AVX512DQ-SLOW-NEXT: # ymm15 = mem[0,2,2,3]
13433 ; AVX512DQ-SLOW-NEXT: vpermq $182, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
13434 ; AVX512DQ-SLOW-NEXT: # ymm4 = mem[2,1,3,2]
13435 ; AVX512DQ-SLOW-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Folded Reload
13436 ; AVX512DQ-SLOW-NEXT: # ymm12 = mem[2,2,2,3]
13437 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
13438 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm16 = ymm16[0,2,2,3]
13439 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm17 = ymm17[2,1,3,2]
13440 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm18[2,2,2,3]
13441 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm19, %zmm24, %zmm24
13442 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm8, %zmm25, %zmm0
13443 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm27 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
13444 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm24, %zmm27, %zmm0
13445 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm23[2,1,3,2]
13446 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm8
13447 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm8, %ymm28
13448 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm8 # 32-byte Folded Reload
13449 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm8 = zmm13[0,1,2,3],zmm8[4,5,6,7]
13450 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm13 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535]
13451 ; AVX512DQ-SLOW-NEXT: vpternlogq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm13, %zmm8 # 64-byte Folded Reload
13452 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm23 # 32-byte Folded Reload
13453 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm23 = zmm2[0,1,2,3],zmm23[4,5,6,7]
13454 ; AVX512DQ-SLOW-NEXT: vpternlogq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm13, %zmm23 # 64-byte Folded Reload
13455 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm13 # 32-byte Folded Reload
13456 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm9, %zmm7 # 32-byte Folded Reload
13457 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm24 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
13458 ; AVX512DQ-SLOW-NEXT: vpternlogq $226, %zmm13, %zmm24, %zmm7
13459 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm9 = [0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535]
13460 ; AVX512DQ-SLOW-NEXT: vpternlogq $226, %zmm8, %zmm9, %zmm7
13461 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6, %zmm6 # 32-byte Folded Reload
13462 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm8 # 32-byte Folded Reload
13463 ; AVX512DQ-SLOW-NEXT: vpternlogq $226, %zmm6, %zmm24, %zmm8
13464 ; AVX512DQ-SLOW-NEXT: vpternlogq $226, %zmm23, %zmm9, %zmm8
13465 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13466 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm2, %zmm3
13467 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13468 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm2, %zmm18
13469 ; AVX512DQ-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm18
13470 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
13471 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
13472 ; AVX512DQ-SLOW-NEXT: vpternlogq $248, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm19 # 64-byte Folded Reload
13473 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm11, %zmm0, %zmm5
13474 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
13475 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm23 = zmm2[0,1,2,3],zmm5[4,5,6,7]
13476 ; AVX512DQ-SLOW-NEXT: vpternlogq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm23 # 64-byte Folded Reload
13477 ; AVX512DQ-SLOW-NEXT: vextracti64x4 $1, %zmm0, %ymm0
13478 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535]
13479 ; AVX512DQ-SLOW-NEXT: vpternlogd $226, 124(%r8){1to8}, %ymm2, %ymm0
13480 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1
13481 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
13482 ; AVX512DQ-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm28[0,1,2,3],zmm0[4,5,6,7]
13483 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0]
13484 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm25 # 64-byte Reload
13485 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm0, %zmm1, %zmm25
13486 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
13487 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm28 # 64-byte Folded Reload
13488 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535]
13489 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
13490 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm13 # 64-byte Folded Reload
13491 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
13492 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm9 # 64-byte Folded Reload
13493 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm21, %zmm0
13494 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm12, %zmm4, %zmm1
13495 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535]
13496 ; AVX512DQ-SLOW-NEXT: vpternlogq $226, %zmm0, %zmm3, %zmm1
13497 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm16, %zmm20, %zmm0
13498 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm14, %zmm17, %zmm5
13499 ; AVX512DQ-SLOW-NEXT: vpternlogq $226, %zmm0, %zmm3, %zmm5
13500 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0]
13501 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
13502 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm1, %zmm0, %zmm20
13503 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
13504 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm5, %zmm0, %zmm21
13505 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
13506 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm29, %zmm0, %zmm0
13507 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm26, %zmm31, %zmm1
13508 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm0, %zmm27, %zmm1
13509 ; AVX512DQ-SLOW-NEXT: vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Folded Reload
13510 ; AVX512DQ-SLOW-NEXT: # ymm0 = mem[0,2,2,3]
13511 ; AVX512DQ-SLOW-NEXT: vpshuflw $180, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
13512 ; AVX512DQ-SLOW-NEXT: # xmm3 = mem[0,1,3,2,4,5,6,7]
13513 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,0,1,1]
13514 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
13515 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm0, %zmm0
13516 ; AVX512DQ-SLOW-NEXT: vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
13517 ; AVX512DQ-SLOW-NEXT: # ymm3 = mem[2,1,3,3]
13518 ; AVX512DQ-SLOW-NEXT: vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
13519 ; AVX512DQ-SLOW-NEXT: # ymm5 = mem[0,0,1,1]
13520 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm3, %zmm3
13521 ; AVX512DQ-SLOW-NEXT: vpermq $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
13522 ; AVX512DQ-SLOW-NEXT: # ymm5 = mem[0,0,2,1]
13523 ; AVX512DQ-SLOW-NEXT: vpshuflw $230, {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Folded Reload
13524 ; AVX512DQ-SLOW-NEXT: # xmm6 = mem[2,1,2,3,4,5,6,7]
13525 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,5,5,4]
13526 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,1,3]
13527 ; AVX512DQ-SLOW-NEXT: vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm26 # 32-byte Folded Reload
13528 ; AVX512DQ-SLOW-NEXT: # ymm26 = mem[0,0,1,1]
13529 ; AVX512DQ-SLOW-NEXT: vpshuflw $248, {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Folded Reload
13530 ; AVX512DQ-SLOW-NEXT: # xmm10 = mem[0,2,3,3,4,5,6,7]
13531 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,2,1]
13532 ; AVX512DQ-SLOW-NEXT: vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
13533 ; AVX512DQ-SLOW-NEXT: # ymm11 = mem[0,2,2,3]
13534 ; AVX512DQ-SLOW-NEXT: vpshuflw $180, {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Folded Reload
13535 ; AVX512DQ-SLOW-NEXT: # xmm12 = mem[0,1,3,2,4,5,6,7]
13536 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[0,0,1,1]
13537 ; AVX512DQ-SLOW-NEXT: vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Folded Reload
13538 ; AVX512DQ-SLOW-NEXT: # ymm14 = mem[2,1,3,3]
13539 ; AVX512DQ-SLOW-NEXT: vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Folded Reload
13540 ; AVX512DQ-SLOW-NEXT: # ymm15 = mem[0,0,1,1]
13541 ; AVX512DQ-SLOW-NEXT: vpermq $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm16 # 32-byte Folded Reload
13542 ; AVX512DQ-SLOW-NEXT: # ymm16 = mem[0,0,2,1]
13543 ; AVX512DQ-SLOW-NEXT: vpshuflw $230, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
13544 ; AVX512DQ-SLOW-NEXT: # xmm2 = mem[2,1,2,3,4,5,6,7]
13545 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,5,4]
13546 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,1,3]
13547 ; AVX512DQ-SLOW-NEXT: vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm17 # 32-byte Folded Reload
13548 ; AVX512DQ-SLOW-NEXT: # ymm17 = mem[0,0,1,1]
13549 ; AVX512DQ-SLOW-NEXT: vpshuflw $248, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
13550 ; AVX512DQ-SLOW-NEXT: # xmm4 = mem[0,2,3,3,4,5,6,7]
13551 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
13552 ; AVX512DQ-SLOW-NEXT: vpternlogq $226, %zmm0, %zmm27, %zmm3
13553 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm12[0,1,1,3]
13554 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm11, %zmm0
13555 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm14, %zmm11
13556 ; AVX512DQ-SLOW-NEXT: vpternlogq $226, %zmm0, %zmm27, %zmm11
13557 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535]
13558 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
13559 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm3, %zmm0, %zmm12
13560 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm11, %zmm0, %zmm22
13561 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm6, %zmm5, %zmm0
13562 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm26, %zmm3
13563 ; AVX512DQ-SLOW-NEXT: vpternlogq $226, %zmm0, %zmm24, %zmm3
13564 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm16, %zmm0
13565 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm17, %zmm2
13566 ; AVX512DQ-SLOW-NEXT: vpternlogq $226, %zmm0, %zmm24, %zmm2
13567 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm0 = [65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0]
13568 ; AVX512DQ-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
13569 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm3, %zmm0, %zmm4
13570 ; AVX512DQ-SLOW-NEXT: vpternlogq $184, %zmm2, %zmm0, %zmm30
13571 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm23
13572 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm28, %zmm0
13573 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm0
13574 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
13575 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm23, 320(%rax)
13576 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm30, 256(%rax)
13577 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm22, 192(%rax)
13578 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm21, 128(%rax)
13579 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, 64(%rax)
13580 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm9, (%rax)
13581 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm13, 448(%rax)
13582 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm4, 704(%rax)
13583 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm12, 640(%rax)
13584 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm20, 576(%rax)
13585 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, 512(%rax)
13586 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, 384(%rax)
13587 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm19, 768(%rax)
13588 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm25, 832(%rax)
13589 ; AVX512DQ-SLOW-NEXT: addq $2168, %rsp # imm = 0x878
13590 ; AVX512DQ-SLOW-NEXT: vzeroupper
13591 ; AVX512DQ-SLOW-NEXT: retq
13593 ; AVX512DQ-FAST-LABEL: store_i16_stride7_vf64:
13594 ; AVX512DQ-FAST: # %bb.0:
13595 ; AVX512DQ-FAST-NEXT: subq $1432, %rsp # imm = 0x598
13596 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%rcx), %ymm4
13597 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
13598 ; AVX512DQ-FAST-NEXT: vpshufb %ymm11, %ymm4, %ymm0
13599 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%rdx), %ymm5
13600 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
13601 ; AVX512DQ-FAST-NEXT: vpshufb %ymm14, %ymm5, %ymm1
13602 ; AVX512DQ-FAST-NEXT: vporq %ymm0, %ymm1, %ymm16
13603 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%rsi), %ymm6
13604 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
13605 ; AVX512DQ-FAST-NEXT: vpshufb %ymm15, %ymm6, %ymm0
13606 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%rdi), %ymm7
13607 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
13608 ; AVX512DQ-FAST-NEXT: vpshufb %ymm8, %ymm7, %ymm1
13609 ; AVX512DQ-FAST-NEXT: vporq %ymm0, %ymm1, %ymm19
13610 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
13611 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%r9), %ymm0
13612 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13613 ; AVX512DQ-FAST-NEXT: vpshufb %ymm10, %ymm0, %ymm0
13614 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%r8), %ymm1
13615 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13616 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
13617 ; AVX512DQ-FAST-NEXT: vpshufb %ymm12, %ymm1, %ymm1
13618 ; AVX512DQ-FAST-NEXT: vporq %ymm0, %ymm1, %ymm22
13619 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rcx), %ymm0
13620 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13621 ; AVX512DQ-FAST-NEXT: vpshufb %ymm11, %ymm0, %ymm0
13622 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rdx), %ymm1
13623 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13624 ; AVX512DQ-FAST-NEXT: vpshufb %ymm14, %ymm1, %ymm1
13625 ; AVX512DQ-FAST-NEXT: vpor %ymm0, %ymm1, %ymm0
13626 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13627 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rsi), %ymm0
13628 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13629 ; AVX512DQ-FAST-NEXT: vpshufb %ymm15, %ymm0, %ymm13
13630 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rdi), %ymm0
13631 ; AVX512DQ-FAST-NEXT: vpshufb %ymm8, %ymm0, %ymm9
13632 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm0, %ymm30
13633 ; AVX512DQ-FAST-NEXT: vpor %ymm13, %ymm9, %ymm9
13634 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13635 ; AVX512DQ-FAST-NEXT: vmovdqa (%r9), %ymm0
13636 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13637 ; AVX512DQ-FAST-NEXT: vpshufb %ymm10, %ymm0, %ymm9
13638 ; AVX512DQ-FAST-NEXT: vmovdqa (%r8), %ymm0
13639 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13640 ; AVX512DQ-FAST-NEXT: vpshufb %ymm12, %ymm0, %ymm13
13641 ; AVX512DQ-FAST-NEXT: vporq %ymm9, %ymm13, %ymm17
13642 ; AVX512DQ-FAST-NEXT: vmovdqa (%rcx), %ymm0
13643 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13644 ; AVX512DQ-FAST-NEXT: vpshufb %ymm11, %ymm0, %ymm9
13645 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %ymm0
13646 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13647 ; AVX512DQ-FAST-NEXT: vpshufb %ymm14, %ymm0, %ymm13
13648 ; AVX512DQ-FAST-NEXT: vpor %ymm9, %ymm13, %ymm9
13649 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13650 ; AVX512DQ-FAST-NEXT: vmovdqa (%rsi), %ymm0
13651 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13652 ; AVX512DQ-FAST-NEXT: vpshufb %ymm15, %ymm0, %ymm9
13653 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdi), %ymm0
13654 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13655 ; AVX512DQ-FAST-NEXT: vpshufb %ymm8, %ymm0, %ymm13
13656 ; AVX512DQ-FAST-NEXT: vpor %ymm9, %ymm13, %ymm9
13657 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13658 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rcx), %ymm3
13659 ; AVX512DQ-FAST-NEXT: vpshufb %ymm11, %ymm3, %ymm9
13660 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdx), %ymm2
13661 ; AVX512DQ-FAST-NEXT: vpshufb %ymm14, %ymm2, %ymm11
13662 ; AVX512DQ-FAST-NEXT: vpor %ymm9, %ymm11, %ymm9
13663 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13664 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rsi), %ymm0
13665 ; AVX512DQ-FAST-NEXT: vpshufb %ymm15, %ymm0, %ymm9
13666 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdi), %ymm1
13667 ; AVX512DQ-FAST-NEXT: vpshufb %ymm8, %ymm1, %ymm8
13668 ; AVX512DQ-FAST-NEXT: vpor %ymm9, %ymm8, %ymm8
13669 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13670 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r8), %ymm11
13671 ; AVX512DQ-FAST-NEXT: vpshufb %ymm12, %ymm11, %ymm8
13672 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r9), %ymm15
13673 ; AVX512DQ-FAST-NEXT: vpshufb %ymm10, %ymm15, %ymm9
13674 ; AVX512DQ-FAST-NEXT: vporq %ymm9, %ymm8, %ymm18
13675 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm8 = ymm4[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
13676 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm9 = ymm5[0,1,1,3,4,5,5,7]
13677 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0,1],ymm8[2],ymm9[3,4],ymm8[5],ymm9[6,7,8,9],ymm8[10],ymm9[11,12],ymm8[13],ymm9[14,15]
13678 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,1,3,2]
13679 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm12 = [18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
13680 ; AVX512DQ-FAST-NEXT: # ymm12 = mem[0,1,0,1]
13681 ; AVX512DQ-FAST-NEXT: vpshufb %ymm12, %ymm6, %ymm9
13682 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm12, %ymm31
13683 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm12 = ymm7[1,1,1,1,5,5,5,5]
13684 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm9 = ymm9[0,1],ymm12[2],ymm9[3,4],ymm12[5],ymm9[6,7,8,9],ymm12[10],ymm9[11,12],ymm12[13],ymm9[14,15]
13685 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
13686 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm8, %zmm16, %zmm8
13687 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm19, %zmm9
13688 ; AVX512DQ-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm9
13689 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
13690 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%r8), %ymm8
13691 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm8[u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm8[14,15],zero,zero,ymm8[u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm8[16,17],zero,zero,ymm8[u,u],zero,zero
13692 ; AVX512DQ-FAST-NEXT: vpternlogq $248, %ymm13, %ymm9, %ymm12
13693 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm13, %ymm14
13694 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%r9), %ymm13
13695 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13696 ; AVX512DQ-FAST-NEXT: vpshufb %ymm10, %ymm13, %ymm10
13697 ; AVX512DQ-FAST-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm12, %ymm10
13698 ; AVX512DQ-FAST-NEXT: vextracti64x4 $1, %zmm9, %ymm9
13699 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <u,4,u,u,u,5,u,u>
13700 ; AVX512DQ-FAST-NEXT: vpermd %ymm8, %ymm12, %ymm12
13701 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm9, %ymm12
13702 ; AVX512DQ-FAST-NEXT: vprold $16, %ymm13, %ymm9
13703 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,2]
13704 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm12, %ymm9
13705 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm9
13706 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm9 = zmm10[0,1,2,3],zmm9[4,5,6,7]
13707 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13708 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
13709 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = [0,1,4,5,4,5,5,7]
13710 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%rax), %ymm9
13711 ; AVX512DQ-FAST-NEXT: vpermd %ymm9, %ymm10, %ymm10
13712 ; AVX512DQ-FAST-NEXT: vpandn %ymm10, %ymm14, %ymm10
13713 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
13714 ; AVX512DQ-FAST-NEXT: vpshufb %ymm13, %ymm9, %ymm12
13715 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm10, %zmm12, %zmm10
13716 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13717 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29>
13718 ; AVX512DQ-FAST-NEXT: vpshufb %ymm12, %ymm6, %ymm10
13719 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm12, %ymm19
13720 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm12 = ymm7[3,3,3,3,7,7,7,7]
13721 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm10 = ymm12[0,1,2],ymm10[3],ymm12[4,5],ymm10[6],ymm12[7,8,9,10],ymm10[11],ymm12[12,13],ymm10[14],ymm12[15]
13722 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u>
13723 ; AVX512DQ-FAST-NEXT: vpshufb %ymm12, %ymm6, %ymm6
13724 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm12, %ymm23
13725 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm7 = ymm7[2,2,2,2,6,6,6,6]
13726 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm7 = ymm7[0],ymm6[1],ymm7[2,3],ymm6[4],ymm7[5,6,7,8],ymm6[9],ymm7[10,11],ymm6[12],ymm7[13,14,15]
13727 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm25 = [0,2,2,3,10,9,11,11]
13728 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm10, %zmm25, %zmm7
13729 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = <14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u>
13730 ; AVX512DQ-FAST-NEXT: vpshufb %ymm6, %ymm4, %ymm10
13731 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm6, %ymm26
13732 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm12 = ymm5[3,3,3,3,7,7,7,7]
13733 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm10 = ymm12[0],ymm10[1],ymm12[2,3],ymm10[4],ymm12[5,6,7,8],ymm10[9],ymm12[10,11],ymm10[12],ymm12[13,14,15]
13734 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
13735 ; AVX512DQ-FAST-NEXT: # ymm6 = mem[0,1,0,1]
13736 ; AVX512DQ-FAST-NEXT: vpshufb %ymm6, %ymm4, %ymm4
13737 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm6, %ymm29
13738 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[2,2,2,2,6,6,6,6]
13739 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7,8,9],ymm5[10],ymm4[11,12],ymm5[13],ymm4[14,15]
13740 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm20 = [2,2,2,3,8,10,10,11]
13741 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm10, %zmm20, %zmm4
13742 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
13743 ; AVX512DQ-FAST-NEXT: vpternlogq $226, %zmm7, %zmm5, %zmm4
13744 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, %zmm10
13745 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13746 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <5,u,u,u,6,u,u,6>
13747 ; AVX512DQ-FAST-NEXT: vpermd %ymm8, %ymm4, %ymm4
13748 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13749 ; AVX512DQ-FAST-NEXT: vmovdqa64 64(%rax), %zmm28
13750 ; AVX512DQ-FAST-NEXT: vbroadcasti32x8 {{.*#+}} zmm4 = [30,5,0,0,31,6,0,31,30,5,0,0,31,6,0,31]
13751 ; AVX512DQ-FAST-NEXT: # zmm4 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
13752 ; AVX512DQ-FAST-NEXT: vpermi2d %zmm28, %zmm9, %zmm4
13753 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13754 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
13755 ; AVX512DQ-FAST-NEXT: vpbroadcastd 72(%rax), %ymm4
13756 ; AVX512DQ-FAST-NEXT: vpandn %ymm4, %ymm7, %ymm4
13757 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm7, %ymm16
13758 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rax), %ymm5
13759 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm13, %ymm14
13760 ; AVX512DQ-FAST-NEXT: vpshufb %ymm13, %ymm5, %ymm7
13761 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm5, %ymm27
13762 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm4, %zmm9
13763 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm22, %zmm0, %zmm4
13764 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%r9), %xmm5
13765 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%r8), %xmm12
13766 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm7 = xmm12[4],xmm5[4],xmm12[5],xmm5[5],xmm12[6],xmm5[6],xmm12[7],xmm5[7]
13767 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm5, %xmm21
13768 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm13 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
13769 ; AVX512DQ-FAST-NEXT: vpshufb %xmm13, %xmm7, %xmm7
13770 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm13, %xmm8
13771 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm13 = [0,0,1,1,12,13,14,15]
13772 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm13, %zmm7
13773 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm4 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535]
13774 ; AVX512DQ-FAST-NEXT: vpternlogq $248, %zmm4, %zmm7, %zmm9
13775 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13776 ; AVX512DQ-FAST-NEXT: vpbroadcastd 8(%rax), %ymm7
13777 ; AVX512DQ-FAST-NEXT: vpandnq %ymm7, %ymm16, %ymm7
13778 ; AVX512DQ-FAST-NEXT: vmovdqa (%rax), %ymm5
13779 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13780 ; AVX512DQ-FAST-NEXT: vpshufb %ymm14, %ymm5, %ymm9
13781 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm7, %zmm16
13782 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm17, %zmm0, %zmm7
13783 ; AVX512DQ-FAST-NEXT: vmovdqa (%r9), %xmm5
13784 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13785 ; AVX512DQ-FAST-NEXT: vmovdqa (%r8), %xmm6
13786 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm6, (%rsp) # 16-byte Spill
13787 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm14 = xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
13788 ; AVX512DQ-FAST-NEXT: vpshufb %xmm8, %xmm14, %xmm14
13789 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm13, %zmm14
13790 ; AVX512DQ-FAST-NEXT: vpternlogq $248, %zmm4, %zmm14, %zmm16
13791 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13792 ; AVX512DQ-FAST-NEXT: vpbroadcastd {{.*#+}} ymm9 = [18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
13793 ; AVX512DQ-FAST-NEXT: vpshufb %ymm9, %ymm15, %ymm4
13794 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm7 = ymm11[0,0,2,1,4,4,6,5]
13795 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0,1,2],ymm7[3],ymm4[4,5],ymm7[6],ymm4[7,8,9,10],ymm7[11],ymm4[12,13],ymm7[14],ymm4[15]
13796 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm7 = <0,1,u,3,10,10,11,11>
13797 ; AVX512DQ-FAST-NEXT: vpermi2q %zmm4, %zmm18, %zmm7
13798 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13799 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13800 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm19, %ymm14
13801 ; AVX512DQ-FAST-NEXT: vpshufb %ymm14, %ymm0, %ymm4
13802 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13803 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm7 = ymm1[3,3,3,3,7,7,7,7]
13804 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm7[0,1,2],ymm4[3],ymm7[4,5],ymm4[6],ymm7[7,8,9,10],ymm4[11],ymm7[12,13],ymm4[14],ymm7[15]
13805 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm23, %ymm5
13806 ; AVX512DQ-FAST-NEXT: vpshufb %ymm5, %ymm0, %ymm7
13807 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm13 = ymm1[2,2,2,2,6,6,6,6]
13808 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm7 = ymm13[0],ymm7[1],ymm13[2,3],ymm7[4],ymm13[5,6,7,8],ymm7[9],ymm13[10,11],ymm7[12],ymm13[13,14,15]
13809 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm25, %zmm7
13810 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13811 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm26, %ymm8
13812 ; AVX512DQ-FAST-NEXT: vpshufb %ymm8, %ymm3, %ymm4
13813 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
13814 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm6 = ymm2[3,3,3,3,7,7,7,7]
13815 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm6[0],ymm4[1],ymm6[2,3],ymm4[4],ymm6[5,6,7,8],ymm4[9],ymm6[10,11],ymm4[12],ymm6[13,14,15]
13816 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm29, %ymm1
13817 ; AVX512DQ-FAST-NEXT: vpshufb %ymm1, %ymm3, %ymm6
13818 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm13 = ymm2[2,2,2,2,6,6,6,6]
13819 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm6[0,1],ymm13[2],ymm6[3,4],ymm13[5],ymm6[6,7,8,9],ymm13[10],ymm6[11,12],ymm13[13],ymm6[14,15]
13820 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm20, %zmm0
13821 ; AVX512DQ-FAST-NEXT: vpternlogq $226, %zmm7, %zmm10, %zmm0
13822 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13823 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
13824 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm6 = ymm11[3,3,3,3,7,7,7,7]
13825 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0,1],ymm6[2],ymm4[3,4],ymm6[5],ymm4[6,7,8,9],ymm6[10],ymm4[11,12],ymm6[13],ymm4[14,15]
13826 ; AVX512DQ-FAST-NEXT: vprold $16, %ymm15, %ymm6
13827 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm7 = ymm11[1,2,2,3,5,6,6,7]
13828 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm7[0,1],ymm6[2],ymm7[3,4],ymm6[5],ymm7[6,7,8,9],ymm6[10],ymm7[11,12],ymm6[13],ymm7[14,15]
13829 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm17 = [2,1,3,2,10,10,10,11]
13830 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm17, %zmm0
13831 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13832 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
13833 ; AVX512DQ-FAST-NEXT: vpshufb %ymm5, %ymm2, %ymm4
13834 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm6 = ymm30[2,2,2,2,6,6,6,6]
13835 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm6[0],ymm4[1],ymm6[2,3],ymm4[4],ymm6[5,6,7,8],ymm4[9],ymm6[10,11],ymm4[12],ymm6[13,14,15]
13836 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm31, %ymm0
13837 ; AVX512DQ-FAST-NEXT: vpshufb %ymm0, %ymm2, %ymm6
13838 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm2, %ymm5
13839 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm7 = ymm30[1,1,1,1,5,5,5,5]
13840 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm6 = ymm6[0,1],ymm7[2],ymm6[3,4],ymm7[5],ymm6[6,7,8,9],ymm7[10],ymm6[11,12],ymm7[13],ymm6[14,15]
13841 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm20, %zmm6
13842 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
13843 ; AVX512DQ-FAST-NEXT: vpshufb %ymm1, %ymm0, %ymm4
13844 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm29, %ymm31
13845 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
13846 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm7 = ymm1[2,2,2,2,6,6,6,6]
13847 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0,1],ymm7[2],ymm4[3,4],ymm7[5],ymm4[6,7,8,9],ymm7[10],ymm4[11,12],ymm7[13],ymm4[14,15]
13848 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm7 = ymm0[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
13849 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm11 = ymm1[0,1,1,3,4,5,5,7]
13850 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm7 = ymm11[0,1],ymm7[2],ymm11[3,4],ymm7[5],ymm11[6,7,8,9],ymm7[10],ymm11[11,12],ymm7[13],ymm11[14,15]
13851 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm17, %zmm7
13852 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535]
13853 ; AVX512DQ-FAST-NEXT: vpternlogq $226, %zmm6, %zmm23, %zmm7
13854 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
13855 ; AVX512DQ-FAST-NEXT: vprold $16, %ymm15, %ymm4
13856 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm19 # 32-byte Reload
13857 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm6 = ymm19[1,2,2,3,5,6,6,7]
13858 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm6[0,1],ymm4[2],ymm6[3,4],ymm4[5],ymm6[6,7,8,9],ymm4[10],ymm6[11,12],ymm4[13],ymm6[14,15]
13859 ; AVX512DQ-FAST-NEXT: vpshufb %ymm9, %ymm15, %ymm6
13860 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm11 = ymm19[0,0,2,1,4,4,6,5]
13861 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm6 = ymm6[0,1,2],ymm11[3],ymm6[4,5],ymm11[6],ymm6[7,8,9,10],ymm11[11],ymm6[12,13],ymm11[14],ymm6[15]
13862 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm25 = [2,2,3,3,10,9,11,10]
13863 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm25, %zmm6
13864 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm27, %zmm28, %zmm4
13865 ; AVX512DQ-FAST-NEXT: vbroadcasti32x8 {{.*#+}} zmm29 = [0,13,4,0,0,14,5,0,0,13,4,0,0,14,5,0]
13866 ; AVX512DQ-FAST-NEXT: # zmm29 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
13867 ; AVX512DQ-FAST-NEXT: vpermd %zmm4, %zmm29, %zmm2
13868 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm28 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535]
13869 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm6, %zmm28, %zmm2
13870 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm2
13871 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13872 ; AVX512DQ-FAST-NEXT: vpshufb %ymm8, %ymm0, %ymm3
13873 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm1[3,3,3,3,7,7,7,7]
13874 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm10 = ymm2[0],ymm3[1],ymm2[2,3],ymm3[4],ymm2[5,6,7,8],ymm3[9],ymm2[10,11],ymm3[12],ymm2[13,14,15]
13875 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%rcx), %xmm0
13876 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13877 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%rdx), %xmm11
13878 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm11[0],xmm0[0],xmm11[1],xmm0[1],xmm11[2],xmm0[2],xmm11[3],xmm0[3]
13879 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
13880 ; AVX512DQ-FAST-NEXT: vpshufb %xmm2, %xmm3, %xmm3
13881 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [0,2,2,3,8,9,9,11]
13882 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm3, %zmm0, %zmm10
13883 ; AVX512DQ-FAST-NEXT: vpshufb %ymm14, %ymm5, %ymm1
13884 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm0 = ymm30[3,3,3,3,7,7,7,7]
13885 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm5 = ymm0[0,1,2],ymm1[3],ymm0[4,5],ymm1[6],ymm0[7,8,9,10],ymm1[11],ymm0[12,13],ymm1[14],ymm0[15]
13886 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rcx), %xmm1
13887 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rdx), %xmm7
13888 ; AVX512DQ-FAST-NEXT: vpbroadcastq {{.*#+}} xmm3 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
13889 ; AVX512DQ-FAST-NEXT: vpshufb %xmm3, %xmm1, %xmm0
13890 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm3, %xmm8
13891 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} xmm3 = xmm7[1,1,2,2]
13892 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} xmm0 = xmm3[0],xmm0[1],xmm3[2,3],xmm0[4],xmm3[5,6],xmm0[7]
13893 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm7[4],xmm1[4],xmm7[5],xmm1[5],xmm7[6],xmm1[6],xmm7[7],xmm1[7]
13894 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13895 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm7[0],xmm1[0],xmm7[1],xmm1[1],xmm7[2],xmm1[2],xmm7[3],xmm1[3]
13896 ; AVX512DQ-FAST-NEXT: vpshufb %xmm2, %xmm1, %xmm1
13897 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,1,3,8,8,9,9]
13898 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm3, %zmm1
13899 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, %zmm13
13900 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rdi), %xmm0
13901 ; AVX512DQ-FAST-NEXT: vmovdqa 64(%rsi), %xmm3
13902 ; AVX512DQ-FAST-NEXT: vprold $16, %xmm3, %xmm4
13903 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} xmm6 = xmm0[1,1,2,3]
13904 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} xmm4 = xmm6[0,1],xmm4[2],xmm6[3,4],xmm4[5],xmm6[6,7]
13905 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm6 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
13906 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13907 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
13908 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm9 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
13909 ; AVX512DQ-FAST-NEXT: vpshufb %xmm9, %xmm0, %xmm14
13910 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm26 = [0,0,1,1,8,8,10,9]
13911 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm26, %zmm14
13912 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm4 = [65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0]
13913 ; AVX512DQ-FAST-NEXT: vpternlogq $226, %zmm1, %zmm4, %zmm14
13914 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm21, %xmm0
13915 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm12[0],xmm0[0],xmm12[1],xmm0[1],xmm12[2],xmm0[2],xmm12[3],xmm0[3]
13916 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm6 = [0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
13917 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
13918 ; AVX512DQ-FAST-NEXT: vpshufb %xmm3, %xmm0, %xmm1
13919 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm3, %xmm12
13920 ; AVX512DQ-FAST-NEXT: vpshufb %xmm6, %xmm0, %xmm0
13921 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm21 = [0,0,0,1,8,9,9,11]
13922 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm21, %zmm0
13923 ; AVX512DQ-FAST-NEXT: vmovdqa (%rcx), %xmm1
13924 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %xmm3
13925 ; AVX512DQ-FAST-NEXT: vpshufb %xmm8, %xmm1, %xmm7
13926 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} xmm8 = xmm3[1,1,2,2]
13927 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} xmm7 = xmm8[0],xmm7[1],xmm8[2,3],xmm7[4],xmm8[5,6],xmm7[7]
13928 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm8 = xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
13929 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13930 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
13931 ; AVX512DQ-FAST-NEXT: vpshufb %xmm2, %xmm1, %xmm1
13932 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm2, %xmm24
13933 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm13, %zmm1
13934 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdi), %xmm2
13935 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13936 ; AVX512DQ-FAST-NEXT: vmovdqa (%rsi), %xmm8
13937 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
13938 ; AVX512DQ-FAST-NEXT: vprold $16, %xmm8, %xmm3
13939 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} xmm7 = xmm2[1,1,2,3]
13940 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1],xmm3[2],xmm7[3,4],xmm3[5],xmm7[6,7]
13941 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm2[0],xmm8[0],xmm2[1],xmm8[1],xmm2[2],xmm8[2],xmm2[3],xmm8[3]
13942 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm9, %xmm2
13943 ; AVX512DQ-FAST-NEXT: vpshufb %xmm9, %xmm3, %xmm3
13944 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm26, %zmm3
13945 ; AVX512DQ-FAST-NEXT: vpternlogq $226, %zmm1, %zmm4, %zmm3
13946 ; AVX512DQ-FAST-NEXT: vpbroadcastd 64(%rax), %ymm1
13947 ; AVX512DQ-FAST-NEXT: vpbroadcastd 68(%rax), %ymm4
13948 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm1, %zmm18
13949 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
13950 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm0, %zmm1, %zmm18
13951 ; AVX512DQ-FAST-NEXT: vmovdqa (%rsp), %xmm0 # 16-byte Reload
13952 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
13953 ; AVX512DQ-FAST-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
13954 ; AVX512DQ-FAST-NEXT: vpshufb %xmm12, %xmm0, %xmm4
13955 ; AVX512DQ-FAST-NEXT: vpshufb %xmm6, %xmm0, %xmm0
13956 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm6, %xmm8
13957 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm21, %zmm0
13958 ; AVX512DQ-FAST-NEXT: vpbroadcastd (%rax), %ymm4
13959 ; AVX512DQ-FAST-NEXT: vpbroadcastd 4(%rax), %ymm6
13960 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm6, %zmm4, %zmm13
13961 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm0, %zmm1, %zmm13
13962 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%rsi), %xmm9
13963 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%rdi), %xmm0
13964 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm0[0],xmm9[0],xmm0[1],xmm9[1],xmm0[2],xmm9[2],xmm0[3],xmm9[3]
13965 ; AVX512DQ-FAST-NEXT: vpshufb %xmm2, %xmm1, %xmm1
13966 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = [2,1,3,3,8,8,9,9]
13967 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm2, %zmm5
13968 ; AVX512DQ-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm10, %zmm5
13969 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
13970 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm2 = ymm19[3,3,3,3,7,7,7,7]
13971 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7,8,9],ymm2[10],ymm1[11,12],ymm2[13],ymm1[14,15]
13972 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = [65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535]
13973 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm14, %zmm2, %zmm18
13974 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm3, %zmm2, %zmm13
13975 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
13976 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm4[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
13977 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm20 # 32-byte Reload
13978 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm20[2,2,2,2,6,6,6,6]
13979 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5,6,7,8],ymm2[9],ymm3[10,11],ymm2[12],ymm3[13,14,15]
13980 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
13981 ; AVX512DQ-FAST-NEXT: # ymm3 = mem[0,1,0,1]
13982 ; AVX512DQ-FAST-NEXT: vpshufb %ymm3, %ymm4, %ymm3
13983 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm4, %ymm16
13984 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm4 = ymm20[1,1,1,1,5,5,5,5]
13985 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm6 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7,8,9],ymm4[10],ymm3[11,12],ymm4[13],ymm3[14,15]
13986 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = [2,2,2,3,8,10,10,11]
13987 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm3, %zmm6
13988 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
13989 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm31, %ymm2
13990 ; AVX512DQ-FAST-NEXT: vpshufb %ymm2, %ymm15, %ymm2
13991 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
13992 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm7[2,2,2,2,6,6,6,6]
13993 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm3[2],ymm2[3,4],ymm3[5],ymm2[6,7,8,9],ymm3[10],ymm2[11,12],ymm3[13],ymm2[14,15]
13994 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm15[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
13995 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm4 = ymm7[0,1,1,3,4,5,5,7]
13996 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm7, %ymm21
13997 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm14 = ymm4[0,1],ymm3[2],ymm4[3,4],ymm3[5],ymm4[6,7,8,9],ymm3[10],ymm4[11,12],ymm3[13],ymm4[14,15]
13998 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm17, %zmm14
13999 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm10 = [2,2,2,3,8,8,8,9]
14000 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%r9), %xmm3
14001 ; AVX512DQ-FAST-NEXT: vmovdqa 96(%r8), %xmm2
14002 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
14003 ; AVX512DQ-FAST-NEXT: vpshufb %xmm8, %xmm4, %xmm7
14004 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm7, %zmm10, %zmm1
14005 ; AVX512DQ-FAST-NEXT: vpternlogq $226, %zmm6, %zmm23, %zmm14
14006 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
14007 ; AVX512DQ-FAST-NEXT: vprold $16, %ymm8, %ymm6
14008 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm17 # 32-byte Reload
14009 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm7 = ymm17[1,2,2,3,5,6,6,7]
14010 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm6 = ymm7[0,1],ymm6[2],ymm7[3,4],ymm6[5],ymm7[6,7,8,9],ymm6[10],ymm7[11,12],ymm6[13],ymm7[14,15]
14011 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
14012 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm8, %ymm19
14013 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm8 = ymm17[0,0,2,1,4,4,6,5]
14014 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm7 = ymm7[0,1,2],ymm8[3],ymm7[4,5],ymm8[6],ymm7[7,8,9,10],ymm8[11],ymm7[12,13],ymm8[14],ymm7[15]
14015 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm6, %zmm25, %zmm7
14016 ; AVX512DQ-FAST-NEXT: vmovdqa64 (%rax), %zmm25
14017 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm23 # 32-byte Reload
14018 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm23, %zmm25, %zmm6
14019 ; AVX512DQ-FAST-NEXT: vpermd %zmm6, %zmm29, %zmm10
14020 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm7, %zmm28, %zmm10
14021 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} ymm28 = [6,7,3,3,7,7,6,7]
14022 ; AVX512DQ-FAST-NEXT: vpermd %ymm27, %ymm28, %ymm6
14023 ; AVX512DQ-FAST-NEXT: vpbroadcastd 96(%rax), %ymm7
14024 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm6, %zmm12
14025 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm31 = [65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535]
14026 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm1, %zmm31, %zmm12
14027 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm30 = [0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535]
14028 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm5, %zmm30, %zmm12
14029 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm9[4],xmm0[4],xmm9[5],xmm0[5],xmm9[6],xmm0[6],xmm9[7],xmm0[7]
14030 ; AVX512DQ-FAST-NEXT: vprold $16, %xmm9, %xmm1
14031 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
14032 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} xmm7 = xmm0[0,1],xmm1[2],xmm0[3,4],xmm1[5],xmm0[6,7]
14033 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm14, %zmm10
14034 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rcx), %xmm14
14035 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdx), %xmm8
14036 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3]
14037 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm24, %xmm1
14038 ; AVX512DQ-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm0
14039 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm15[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
14040 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm9 = ymm21[3,3,3,3,7,7,7,7]
14041 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm6 = ymm9[0],ymm1[1],ymm9[2,3],ymm1[4],ymm9[5,6,7,8],ymm1[9],ymm9[10,11],ymm1[12],ymm9[13,14,15]
14042 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,2,2,3,8,9,9,11]
14043 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm1, %zmm6
14044 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm5[4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
14045 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm22 = [0,0,2,1,8,8,9,11]
14046 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm0, %zmm22, %zmm7
14047 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14048 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm11[4],xmm0[4],xmm11[5],xmm0[5],xmm11[6],xmm0[6],xmm11[7],xmm0[7]
14049 ; AVX512DQ-FAST-NEXT: vpbroadcastq {{.*#+}} xmm24 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
14050 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm24, %xmm1
14051 ; AVX512DQ-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm0
14052 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[1,1,2,2]
14053 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} xmm1 = xmm11[0],xmm0[1],xmm11[2,3],xmm0[4],xmm11[5,6],xmm0[7]
14054 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,2,3,3,4,5,6,7]
14055 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm26, %zmm1
14056 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm26 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
14057 ; AVX512DQ-FAST-NEXT: vpternlogq $226, %zmm7, %zmm26, %zmm1
14058 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
14059 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm5 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25,22,23,22,23,u,u,u,u,u,u,u,u]
14060 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm7 = ymm5[2,2,2,2]
14061 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
14062 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
14063 ; AVX512DQ-FAST-NEXT: vpshufb %xmm0, %xmm2, %xmm2
14064 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} xmm29 = [0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
14065 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm29, %xmm3
14066 ; AVX512DQ-FAST-NEXT: vpshufb %xmm3, %xmm4, %xmm3
14067 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm27 = [0,1,1,3,8,8,9,9]
14068 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm27, %zmm3
14069 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm16, %ymm2
14070 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29]
14071 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm4 = ymm20[3,3,3,3,7,7,7,7]
14072 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm11 = ymm4[0,1,2],ymm2[3],ymm4[4,5],ymm2[6],ymm4[7,8,9,10],ymm2[11],ymm4[12,13],ymm2[14],ymm4[15]
14073 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdi), %xmm2
14074 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rsi), %xmm5
14075 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
14076 ; AVX512DQ-FAST-NEXT: vpshufb %xmm0, %xmm4, %xmm4
14077 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [2,1,3,3,8,8,9,9]
14078 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm4, %zmm0, %zmm11
14079 ; AVX512DQ-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm11
14080 ; AVX512DQ-FAST-NEXT: vpbroadcastd 100(%rax), %ymm0
14081 ; AVX512DQ-FAST-NEXT: vpbroadcastd 104(%rax), %ymm4
14082 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm16
14083 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm21 = [65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
14084 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm3, %zmm21, %zmm16
14085 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm19, %ymm0
14086 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
14087 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} ymm3 = ymm17[3,3,3,3,7,7,7,7]
14088 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm3[2],ymm0[3,4],ymm3[5],ymm0[6,7,8,9],ymm3[10],ymm0[11,12],ymm3[13],ymm0[14,15]
14089 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r9), %xmm4
14090 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r8), %xmm3
14091 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
14092 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} xmm9 = xmm6[0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
14093 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm17 = [2,2,2,3,8,8,8,9]
14094 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm9, %zmm17, %zmm0
14095 ; AVX512DQ-FAST-NEXT: vpermd %ymm23, %ymm28, %ymm9
14096 ; AVX512DQ-FAST-NEXT: vpbroadcastd 32(%rax), %ymm20
14097 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm20, %zmm9, %zmm20
14098 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm0, %zmm31, %zmm20
14099 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm11, %zmm30, %zmm20
14100 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0]
14101 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm1, %zmm0, %zmm16
14102 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm5[4],xmm2[4],xmm5[5],xmm2[5],xmm5[6],xmm2[6],xmm5[7],xmm2[7]
14103 ; AVX512DQ-FAST-NEXT: vprold $16, %xmm5, %xmm5
14104 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,1,2,3]
14105 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm5[2],xmm2[3,4],xmm5[5],xmm2[6,7]
14106 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm9 = [4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
14107 ; AVX512DQ-FAST-NEXT: vpshufb %xmm9, %xmm1, %xmm1
14108 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm1, %zmm22, %zmm2
14109 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm24, %xmm1
14110 ; AVX512DQ-FAST-NEXT: vpshufb %xmm1, %xmm14, %xmm1
14111 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
14112 ; AVX512DQ-FAST-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[1,1,2,2]
14113 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} xmm1 = xmm8[0],xmm1[1],xmm8[2,3],xmm1[4],xmm8[5,6],xmm1[7]
14114 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,2,3,3,4,5,6,7]
14115 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm8 = [0,0,1,1,8,8,10,9]
14116 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm5, %zmm8, %zmm1
14117 ; AVX512DQ-FAST-NEXT: vpternlogq $226, %zmm2, %zmm26, %zmm1
14118 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
14119 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
14120 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm29, %xmm3
14121 ; AVX512DQ-FAST-NEXT: vpshufb %xmm3, %xmm6, %xmm3
14122 ; AVX512DQ-FAST-NEXT: vpermt2q %zmm2, %zmm27, %zmm3
14123 ; AVX512DQ-FAST-NEXT: vpbroadcastd 36(%rax), %ymm2
14124 ; AVX512DQ-FAST-NEXT: vpbroadcastd 40(%rax), %ymm4
14125 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm2, %zmm2
14126 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm3, %zmm21, %zmm2
14127 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm1, %zmm0, %zmm2
14128 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
14129 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
14130 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm11, %ymm0
14131 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm7
14132 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
14133 ; AVX512DQ-FAST-NEXT: vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
14134 ; AVX512DQ-FAST-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
14135 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
14136 ; AVX512DQ-FAST-NEXT: vpshufb %xmm9, %xmm1, %xmm1
14137 ; AVX512DQ-FAST-NEXT: vpshufb %xmm9, %xmm0, %xmm0
14138 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
14139 ; AVX512DQ-FAST-NEXT: vpunpcklwd {{.*#+}} ymm3 = ymm3[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
14140 ; AVX512DQ-FAST-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
14141 ; AVX512DQ-FAST-NEXT: # ymm4 = mem[0,1,1,3,4,5,5,7]
14142 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0,1],ymm3[2],ymm4[3,4],ymm3[5],ymm4[6,7,8,9],ymm3[10],ymm4[11,12],ymm3[13],ymm4[14,15]
14143 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
14144 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
14145 ; AVX512DQ-FAST-NEXT: vpshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
14146 ; AVX512DQ-FAST-NEXT: # ymm5 = mem[1,1,1,1,5,5,5,5]
14147 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7,8,9],ymm5[10],ymm4[11,12],ymm5[13],ymm4[14,15]
14148 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rax), %ymm5
14149 ; AVX512DQ-FAST-NEXT: vbroadcasti32x8 {{.*#+}} zmm6 = [14,21,0,0,15,22,0,15,14,21,0,0,15,22,0,15]
14150 ; AVX512DQ-FAST-NEXT: # zmm6 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
14151 ; AVX512DQ-FAST-NEXT: vpermi2d %zmm5, %zmm25, %zmm6
14152 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [0,1,4,5,4,5,5,7]
14153 ; AVX512DQ-FAST-NEXT: vpermd %ymm5, %ymm8, %ymm8
14154 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[12,13],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm5[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm5[16,17],zero,zero
14155 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
14156 ; AVX512DQ-FAST-NEXT: vpandn %ymm8, %ymm9, %ymm8
14157 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm8, %zmm5, %zmm5
14158 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm8 = [65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0]
14159 ; AVX512DQ-FAST-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm6 # 64-byte Folded Reload
14160 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
14161 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm6
14162 ; AVX512DQ-FAST-NEXT: vextracti64x4 $1, %zmm11, %ymm9
14163 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535]
14164 ; AVX512DQ-FAST-NEXT: vpternlogd $226, 124(%r8){1to8}, %ymm11, %ymm9
14165 ; AVX512DQ-FAST-NEXT: vpshufhw {{.*#+}} ymm11 = ymm15[0,1,2,3,5,5,7,6,8,9,10,11,13,13,15,14]
14166 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[3,3,3,3]
14167 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm9, %ymm11
14168 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm11, %zmm0, %zmm9
14169 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm7 = zmm7[0,1,2,3],zmm9[4,5,6,7]
14170 ; AVX512DQ-FAST-NEXT: vpshuflw $248, {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Folded Reload
14171 ; AVX512DQ-FAST-NEXT: # xmm9 = mem[0,2,3,3,4,5,6,7]
14172 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,2,1]
14173 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,3]
14174 ; AVX512DQ-FAST-NEXT: vpshuflw $248, {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Folded Reload
14175 ; AVX512DQ-FAST-NEXT: # xmm11 = mem[0,2,3,3,4,5,6,7]
14176 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,2,1]
14177 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,3]
14178 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,1,3,2]
14179 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
14180 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
14181 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm7, %zmm8, %zmm14
14182 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm9, %zmm7 # 32-byte Folded Reload
14183 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 32-byte Folded Reload
14184 ; AVX512DQ-FAST-NEXT: vpternlogq $226, %zmm7, %zmm26, %zmm1
14185 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm11, %zmm7 # 32-byte Folded Reload
14186 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 32-byte Folded Reload
14187 ; AVX512DQ-FAST-NEXT: vpternlogq $226, %zmm7, %zmm26, %zmm0
14188 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm7 = [0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535]
14189 ; AVX512DQ-FAST-NEXT: vpternlogq $226, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm1 # 64-byte Folded Reload
14190 ; AVX512DQ-FAST-NEXT: vpternlogq $226, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm0 # 64-byte Folded Reload
14191 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
14192 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm7, %zmm3
14193 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
14194 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm7, %zmm4
14195 ; AVX512DQ-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm4
14196 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
14197 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
14198 ; AVX512DQ-FAST-NEXT: vpternlogq $248, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm7 # 64-byte Folded Reload
14199 ; AVX512DQ-FAST-NEXT: vpternlogq $248, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm5 # 64-byte Folded Reload
14200 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm5
14201 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
14202 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm5, 320(%rax)
14203 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, 256(%rax)
14204 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm20, 192(%rax)
14205 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm10, 128(%rax)
14206 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm13, (%rax)
14207 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm18, 448(%rax)
14208 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, 704(%rax)
14209 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm12, 640(%rax)
14210 ; AVX512DQ-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14211 ; AVX512DQ-FAST-NEXT: vmovaps %zmm2, 576(%rax)
14212 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm6, 384(%rax)
14213 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, 64(%rax)
14214 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, 512(%rax)
14215 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm14, 832(%rax)
14216 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm7, 768(%rax)
14217 ; AVX512DQ-FAST-NEXT: addq $1432, %rsp # imm = 0x598
14218 ; AVX512DQ-FAST-NEXT: vzeroupper
14219 ; AVX512DQ-FAST-NEXT: retq
14221 ; AVX512BW-LABEL: store_i16_stride7_vf64:
14222 ; AVX512BW: # %bb.0:
14223 ; AVX512BW-NEXT: subq $136, %rsp
14224 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
14225 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm14
14226 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm29
14227 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %zmm15
14228 ; AVX512BW-NEXT: vmovdqa64 64(%rsi), %zmm9
14229 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %zmm5
14230 ; AVX512BW-NEXT: vmovdqa64 64(%rdx), %zmm25
14231 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %zmm12
14232 ; AVX512BW-NEXT: vmovdqa64 64(%rcx), %zmm13
14233 ; AVX512BW-NEXT: vmovdqa64 (%r8), %zmm4
14234 ; AVX512BW-NEXT: vmovdqa64 64(%r8), %zmm30
14235 ; AVX512BW-NEXT: vmovdqa64 (%r9), %zmm8
14236 ; AVX512BW-NEXT: vmovdqa64 64(%r9), %zmm26
14237 ; AVX512BW-NEXT: vmovdqa64 (%rax), %zmm20
14238 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [0,0,30,62,28,60,0,0,0,31,63,29,61,0,0,0,0,0,30,62,28,60,0,0,0,31,63,29,61,0,0,0]
14239 ; AVX512BW-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
14240 ; AVX512BW-NEXT: vmovdqa64 %zmm29, %zmm0
14241 ; AVX512BW-NEXT: vpermt2w %zmm9, %zmm21, %zmm0
14242 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [27,0,0,0,62,30,60,28,0,0,0,63,31,61,29,0,27,0,0,0,62,30,60,28,0,0,0,63,31,61,29,0]
14243 ; AVX512BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
14244 ; AVX512BW-NEXT: vmovdqa64 %zmm13, %zmm3
14245 ; AVX512BW-NEXT: vpermt2w %zmm25, %zmm6, %zmm3
14246 ; AVX512BW-NEXT: movl $101455920, %ecx # imm = 0x60C1830
14247 ; AVX512BW-NEXT: kmovd %ecx, %k1
14248 ; AVX512BW-NEXT: vmovdqu16 %zmm0, %zmm3 {%k1}
14249 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm0 = <0,59,u,u,4,5,6,7,60,u,u,11,12,13,14,61,u,u,18,19,20,21,62,u,u,25,26,27,28,63,u,u>
14250 ; AVX512BW-NEXT: vpermi2w %zmm30, %zmm3, %zmm0
14251 ; AVX512BW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14252 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm24 = [0,25,57,23,55,0,0,0,26,58,24,56,0,0,0,27,0,25,57,23,55,0,0,0,26,58,24,56,0,0,0,27]
14253 ; AVX512BW-NEXT: # zmm24 = mem[0,1,2,3,0,1,2,3]
14254 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm0
14255 ; AVX512BW-NEXT: vpermt2w %zmm13, %zmm24, %zmm0
14256 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [57,23,55,0,0,0,26,58,24,56,0,0,0,27,59,25,57,23,55,0,0,0,26,58,24,56,0,0,0,27,59,25]
14257 ; AVX512BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
14258 ; AVX512BW-NEXT: vmovdqa64 %zmm29, %zmm3
14259 ; AVX512BW-NEXT: vpermt2w %zmm9, %zmm11, %zmm3
14260 ; AVX512BW-NEXT: movl $-2096755688, %ecx # imm = 0x83060C18
14261 ; AVX512BW-NEXT: kmovd %ecx, %k2
14262 ; AVX512BW-NEXT: vmovdqu16 %zmm0, %zmm3 {%k2}
14263 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm0 = <u,1,2,3,4,55,u,u,8,9,10,11,56,u,u,15,16,17,18,57,u,u,22,23,24,25,58,u,u,29,30,31>
14264 ; AVX512BW-NEXT: vpermi2w %zmm30, %zmm3, %zmm0
14265 ; AVX512BW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
14266 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm19 = [2,34,0,32,0,0,0,3,35,1,33,0,0,0,4,36,2,34,0,32,0,0,0,3,35,1,33,0,0,0,4,36]
14267 ; AVX512BW-NEXT: # zmm19 = mem[0,1,2,3,0,1,2,3]
14268 ; AVX512BW-NEXT: vmovdqa64 %zmm5, %zmm0
14269 ; AVX512BW-NEXT: vpermt2w %zmm12, %zmm19, %zmm0
14270 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm28 = [0,32,0,0,0,3,35,1,33,0,0,0,4,36,2,34,0,32,0,0,0,3,35,1,33,0,0,0,4,36,2,34]
14271 ; AVX512BW-NEXT: # zmm28 = mem[0,1,2,3,0,1,2,3]
14272 ; AVX512BW-NEXT: vmovdqa64 %zmm14, %zmm7
14273 ; AVX512BW-NEXT: vpermt2w %zmm15, %zmm28, %zmm7
14274 ; AVX512BW-NEXT: movl $-1048377844, %ecx # imm = 0xC183060C
14275 ; AVX512BW-NEXT: kmovd %ecx, %k3
14276 ; AVX512BW-NEXT: vmovdqu16 %zmm0, %zmm7 {%k3}
14277 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,2,34,0,32,0,0,0,3,35,1,33,0,0,0,0,0,2,34,0,32,0,0,0,3,35,1,33,0,0,0]
14278 ; AVX512BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
14279 ; AVX512BW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14280 ; AVX512BW-NEXT: vmovdqa64 %zmm4, %zmm0
14281 ; AVX512BW-NEXT: vpermt2w %zmm8, %zmm1, %zmm0
14282 ; AVX512BW-NEXT: vmovdqa64 %zmm29, %zmm3
14283 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm16
14284 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm27
14285 ; AVX512BW-NEXT: vmovdqa64 %zmm29, %zmm17
14286 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm23 = [9,0,0,0,44,12,42,10,0,0,0,45,13,43,11,0,9,0,0,0,44,12,42,10,0,0,0,45,13,43,11,0]
14287 ; AVX512BW-NEXT: # zmm23 = mem[0,1,2,3,0,1,2,3]
14288 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm31
14289 ; AVX512BW-NEXT: vpermt2w %zmm29, %zmm23, %zmm31
14290 ; AVX512BW-NEXT: vmovdqa64 %zmm25, %zmm18
14291 ; AVX512BW-NEXT: vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
14292 ; AVX512BW-NEXT: vpermt2w %zmm13, %zmm19, %zmm25
14293 ; AVX512BW-NEXT: vmovdqa64 %zmm29, %zmm19
14294 ; AVX512BW-NEXT: vpermt2w %zmm9, %zmm28, %zmm29
14295 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <u,u,u,u,4,5,32,u,u,u,u,11,12,33,u,u,u,u,18,19,34,u,u,u,u,25,26,35,u,u,u,u>
14296 ; AVX512BW-NEXT: vpermt2w %zmm20, %zmm2, %zmm0
14297 ; AVX512BW-NEXT: movl $236730480, %ecx # imm = 0xE1C3870
14298 ; AVX512BW-NEXT: vmovdqu16 %zmm25, %zmm29 {%k3}
14299 ; AVX512BW-NEXT: kmovd %ecx, %k3
14300 ; AVX512BW-NEXT: vmovdqu16 %zmm0, %zmm7 {%k3}
14301 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [13,0,0,0,48,16,46,14,0,0,0,49,17,47,15,0,13,0,0,0,48,16,46,14,0,0,0,49,17,47,15,0]
14302 ; AVX512BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
14303 ; AVX512BW-NEXT: vmovdqa64 %zmm26, %zmm10
14304 ; AVX512BW-NEXT: vpermt2w %zmm30, %zmm0, %zmm10
14305 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm22
14306 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm1
14307 ; AVX512BW-NEXT: vmovdqa64 %zmm30, %zmm28
14308 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm25 # 64-byte Reload
14309 ; AVX512BW-NEXT: vpermt2w %zmm26, %zmm25, %zmm30
14310 ; AVX512BW-NEXT: vmovdqa64 64(%rax), %zmm25
14311 ; AVX512BW-NEXT: vpermt2w %zmm25, %zmm2, %zmm30
14312 ; AVX512BW-NEXT: vmovdqu16 %zmm30, %zmm29 {%k3}
14313 ; AVX512BW-NEXT: vpermi2w %zmm15, %zmm14, %zmm21
14314 ; AVX512BW-NEXT: vpermi2w %zmm5, %zmm12, %zmm6
14315 ; AVX512BW-NEXT: vmovdqu16 %zmm21, %zmm6 {%k1}
14316 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [61,27,59,0,0,0,30,62,28,60,0,0,0,31,63,29,61,27,59,0,0,0,30,62,28,60,0,0,0,31,63,29]
14317 ; AVX512BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
14318 ; AVX512BW-NEXT: vpermi2w %zmm8, %zmm4, %zmm2
14319 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm21 = <u,1,2,59,u,u,u,u,8,9,60,u,u,u,u,15,16,61,u,u,u,u,22,23,62,u,u,u,u,29,30,63>
14320 ; AVX512BW-NEXT: vpermi2w %zmm20, %zmm2, %zmm21
14321 ; AVX512BW-NEXT: movl $-507279602, %eax # imm = 0xE1C3870E
14322 ; AVX512BW-NEXT: kmovd %eax, %k3
14323 ; AVX512BW-NEXT: vmovdqu16 %zmm21, %zmm6 {%k3}
14324 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,0,0,21,53,19,51,0,0,0,22,54,20,52,0,0,0,0,0,21,53,19,51,0,0,0,22,54,20,52,0,0]
14325 ; AVX512BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
14326 ; AVX512BW-NEXT: vpermt2w %zmm9, %zmm2, %zmm3
14327 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm21 = [18,50,0,0,0,21,53,19,51,0,0,0,22,54,20,52,18,50,0,0,0,21,53,19,51,0,0,0,22,54,20,52]
14328 ; AVX512BW-NEXT: # zmm21 = mem[0,1,2,3,0,1,2,3]
14329 ; AVX512BW-NEXT: vpermt2w %zmm13, %zmm21, %zmm16
14330 ; AVX512BW-NEXT: movl $202911840, %eax # imm = 0xC183060
14331 ; AVX512BW-NEXT: kmovd %eax, %k3
14332 ; AVX512BW-NEXT: vmovdqu16 %zmm3, %zmm16 {%k3}
14333 ; AVX512BW-NEXT: vpermi2w %zmm12, %zmm5, %zmm24
14334 ; AVX512BW-NEXT: vpermi2w %zmm15, %zmm14, %zmm11
14335 ; AVX512BW-NEXT: vmovdqu16 %zmm24, %zmm11 {%k2}
14336 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [0,0,0,25,57,23,55,0,0,0,26,58,24,56,0,0,0,0,0,25,57,23,55,0,0,0,26,58,24,56,0,0]
14337 ; AVX512BW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
14338 ; AVX512BW-NEXT: vpermi2w %zmm8, %zmm4, %zmm3
14339 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm24 = <54,u,u,u,u,5,6,55,u,u,u,u,12,13,56,u,u,u,u,19,20,57,u,u,u,u,26,27,58,u,u,u>
14340 ; AVX512BW-NEXT: vpermi2w %zmm20, %zmm3, %zmm24
14341 ; AVX512BW-NEXT: movl $473460961, %eax # imm = 0x1C3870E1
14342 ; AVX512BW-NEXT: kmovd %eax, %k2
14343 ; AVX512BW-NEXT: vmovdqu16 %zmm24, %zmm11 {%k2}
14344 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [20,52,18,50,0,0,0,21,53,19,51,0,0,0,22,54,20,52,18,50,0,0,0,21,53,19,51,0,0,0,22,54]
14345 ; AVX512BW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
14346 ; AVX512BW-NEXT: vpermt2w %zmm26, %zmm3, %zmm22
14347 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm24 = <u,u,2,3,50,u,u,u,u,9,10,51,u,u,u,u,16,17,52,u,u,u,u,23,24,53,u,u,u,u,30,31>
14348 ; AVX512BW-NEXT: vpermt2w %zmm25, %zmm24, %zmm22
14349 ; AVX512BW-NEXT: movl $-1014559204, %eax # imm = 0xC3870E1C
14350 ; AVX512BW-NEXT: kmovd %eax, %k2
14351 ; AVX512BW-NEXT: vmovdqu16 %zmm22, %zmm16 {%k2}
14352 ; AVX512BW-NEXT: vpermi2w %zmm15, %zmm14, %zmm2
14353 ; AVX512BW-NEXT: vpermi2w %zmm12, %zmm5, %zmm21
14354 ; AVX512BW-NEXT: vmovdqu16 %zmm2, %zmm21 {%k3}
14355 ; AVX512BW-NEXT: vpermi2w %zmm8, %zmm4, %zmm3
14356 ; AVX512BW-NEXT: vpermt2w %zmm20, %zmm24, %zmm3
14357 ; AVX512BW-NEXT: vmovdqu16 %zmm3, %zmm21 {%k2}
14358 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,0,16,48,14,46,0,0,0,17,49,15,47,0,0,0,0,0,16,48,14,46,0,0,0,17,49,15,47,0,0,0]
14359 ; AVX512BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
14360 ; AVX512BW-NEXT: vpermt2w %zmm13, %zmm2, %zmm27
14361 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [16,48,14,46,0,0,0,17,49,15,47,0,0,0,18,50,16,48,14,46,0,0,0,17,49,15,47,0,0,0,18,50]
14362 ; AVX512BW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
14363 ; AVX512BW-NEXT: vpermt2w %zmm9, %zmm3, %zmm17
14364 ; AVX512BW-NEXT: vmovdqu16 %zmm27, %zmm17 {%k1}
14365 ; AVX512BW-NEXT: vpermi2w %zmm12, %zmm5, %zmm2
14366 ; AVX512BW-NEXT: vpermi2w %zmm15, %zmm14, %zmm3
14367 ; AVX512BW-NEXT: vmovdqu16 %zmm2, %zmm3 {%k1}
14368 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <0,45,u,u,u,u,6,7,46,u,u,u,u,13,14,47,u,u,u,u,20,21,48,u,u,u,u,27,28,49,u,u>
14369 ; AVX512BW-NEXT: vpermt2w %zmm25, %zmm2, %zmm10
14370 ; AVX512BW-NEXT: movl $946921923, %eax # imm = 0x3870E1C3
14371 ; AVX512BW-NEXT: kmovd %eax, %k1
14372 ; AVX512BW-NEXT: vmovdqu16 %zmm10, %zmm17 {%k1}
14373 ; AVX512BW-NEXT: vpermi2w %zmm4, %zmm8, %zmm0
14374 ; AVX512BW-NEXT: vpermt2w %zmm20, %zmm2, %zmm0
14375 ; AVX512BW-NEXT: vmovdqu16 %zmm0, %zmm3 {%k1}
14376 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm0 = [43,9,41,0,0,0,12,44,10,42,0,0,0,13,45,11,43,9,41,0,0,0,12,44,10,42,0,0,0,13,45,11]
14377 ; AVX512BW-NEXT: # zmm0 = mem[0,1,2,3,0,1,2,3]
14378 ; AVX512BW-NEXT: vpermt2w %zmm13, %zmm0, %zmm18
14379 ; AVX512BW-NEXT: movl $405823681, %eax # imm = 0x183060C1
14380 ; AVX512BW-NEXT: kmovd %eax, %k1
14381 ; AVX512BW-NEXT: vmovdqu16 %zmm31, %zmm18 {%k1}
14382 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,11,43,9,41,0,0,0,12,44,10,42,0,0,0,13,0,11,43,9,41,0,0,0,12,44,10,42,0,0,0,13]
14383 ; AVX512BW-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
14384 ; AVX512BW-NEXT: vpermt2w %zmm26, %zmm2, %zmm1
14385 ; AVX512BW-NEXT: vpermi2w %zmm14, %zmm15, %zmm23
14386 ; AVX512BW-NEXT: vpermi2w %zmm12, %zmm5, %zmm0
14387 ; AVX512BW-NEXT: vmovdqu16 %zmm23, %zmm0 {%k1}
14388 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm10 = <u,u,u,3,4,41,u,u,u,u,10,11,42,u,u,u,u,17,18,43,u,u,u,u,24,25,44,u,u,u,u,31>
14389 ; AVX512BW-NEXT: vpermt2w %zmm25, %zmm10, %zmm1
14390 ; AVX512BW-NEXT: movl $-2029118408, %eax # imm = 0x870E1C38
14391 ; AVX512BW-NEXT: kmovd %eax, %k1
14392 ; AVX512BW-NEXT: vmovdqu16 %zmm1, %zmm18 {%k1}
14393 ; AVX512BW-NEXT: vpermi2w %zmm8, %zmm4, %zmm2
14394 ; AVX512BW-NEXT: vpermt2w %zmm20, %zmm10, %zmm2
14395 ; AVX512BW-NEXT: vmovdqu16 %zmm2, %zmm0 {%k1}
14396 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,0,0,7,39,5,37,0,0,0,8,40,6,38,0,0,0,0,0,7,39,5,37,0,0,0,8,40,6,38,0,0]
14397 ; AVX512BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
14398 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14399 ; AVX512BW-NEXT: vpermt2w %zmm13, %zmm1, %zmm2
14400 ; AVX512BW-NEXT: vpermt2w %zmm12, %zmm1, %zmm5
14401 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [0,7,39,5,37,0,0,0,8,40,6,38,0,0,0,9,0,7,39,5,37,0,0,0,8,40,6,38,0,0,0,9]
14402 ; AVX512BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
14403 ; AVX512BW-NEXT: vpermt2w %zmm9, %zmm1, %zmm19
14404 ; AVX512BW-NEXT: vpermt2w %zmm15, %zmm1, %zmm14
14405 ; AVX512BW-NEXT: vmovdqu16 %zmm2, %zmm19 {%k3}
14406 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [4,36,0,0,0,7,39,5,37,0,0,0,8,40,6,38,4,36,0,0,0,7,39,5,37,0,0,0,8,40,6,38]
14407 ; AVX512BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
14408 ; AVX512BW-NEXT: vpermt2w %zmm8, %zmm1, %zmm4
14409 ; AVX512BW-NEXT: vpermt2w %zmm26, %zmm1, %zmm28
14410 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,1,36,u,u,u,u,7,8,37,u,u,u,u,14,15,38,u,u,u,u,21,22,39,u,u,u,u,28,29,40,u>
14411 ; AVX512BW-NEXT: vpermt2w %zmm20, %zmm1, %zmm4
14412 ; AVX512BW-NEXT: vpermt2w %zmm25, %zmm1, %zmm28
14413 ; AVX512BW-NEXT: vmovdqu16 %zmm5, %zmm14 {%k3}
14414 ; AVX512BW-NEXT: movl $1893843847, %eax # imm = 0x70E1C387
14415 ; AVX512BW-NEXT: kmovd %eax, %k1
14416 ; AVX512BW-NEXT: vmovdqu16 %zmm28, %zmm19 {%k1}
14417 ; AVX512BW-NEXT: vmovdqu16 %zmm4, %zmm14 {%k1}
14418 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,1,59,u,4,5,6,7,8,60,u,11,12,13,14,15,61,u,18,19,20,21,22,62,u,25,26,27,28,29,63,u>
14419 ; AVX512BW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
14420 ; AVX512BW-NEXT: vpermi2w %zmm26, %zmm2, %zmm1
14421 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = <u,1,2,3,4,5,55,u,8,9,10,11,12,56,u,15,16,17,18,19,57,u,22,23,24,25,26,58,u,29,30,31>
14422 ; AVX512BW-NEXT: vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
14423 ; AVX512BW-NEXT: vpermi2w %zmm26, %zmm4, %zmm2
14424 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,1,2,59,4,5,6,7,8,9,60,11,12,13,14,15,16,61,18,19,20,21,22,23,62,25,26,27,28,29,30,63]
14425 ; AVX512BW-NEXT: vpermi2w %zmm25, %zmm1, %zmm4
14426 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [54,1,2,3,4,5,6,55,8,9,10,11,12,13,56,15,16,17,18,19,20,57,22,23,24,25,26,27,58,29,30,31]
14427 ; AVX512BW-NEXT: vpermi2w %zmm25, %zmm2, %zmm1
14428 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
14429 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 64(%rax)
14430 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 128(%rax)
14431 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 192(%rax)
14432 ; AVX512BW-NEXT: vmovdqa64 %zmm21, 256(%rax)
14433 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 320(%rax)
14434 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 384(%rax)
14435 ; AVX512BW-NEXT: vmovdqa64 %zmm29, 448(%rax)
14436 ; AVX512BW-NEXT: vmovdqa64 %zmm19, 512(%rax)
14437 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 576(%rax)
14438 ; AVX512BW-NEXT: vmovdqa64 %zmm17, 640(%rax)
14439 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 704(%rax)
14440 ; AVX512BW-NEXT: vmovdqa64 %zmm7, (%rax)
14441 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 768(%rax)
14442 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 832(%rax)
14443 ; AVX512BW-NEXT: addq $136, %rsp
14444 ; AVX512BW-NEXT: vzeroupper
14445 ; AVX512BW-NEXT: retq
14446 %in.vec0 = load <64 x i16>, ptr %in.vecptr0, align 64
14447 %in.vec1 = load <64 x i16>, ptr %in.vecptr1, align 64
14448 %in.vec2 = load <64 x i16>, ptr %in.vecptr2, align 64
14449 %in.vec3 = load <64 x i16>, ptr %in.vecptr3, align 64
14450 %in.vec4 = load <64 x i16>, ptr %in.vecptr4, align 64
14451 %in.vec5 = load <64 x i16>, ptr %in.vecptr5, align 64
14452 %in.vec6 = load <64 x i16>, ptr %in.vecptr6, align 64
14453 %1 = shufflevector <64 x i16> %in.vec0, <64 x i16> %in.vec1, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
14454 %2 = shufflevector <64 x i16> %in.vec2, <64 x i16> %in.vec3, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
14455 %3 = shufflevector <64 x i16> %in.vec4, <64 x i16> %in.vec5, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
14456 %4 = shufflevector <128 x i16> %1, <128 x i16> %2, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
14457 %5 = shufflevector <64 x i16> %in.vec6, <64 x i16> poison, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
14458 %6 = shufflevector <128 x i16> %3, <128 x i16> %5, <192 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191>
14459 %7 = shufflevector <192 x i16> %6, <192 x i16> poison, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
14460 %8 = shufflevector <256 x i16> %4, <256 x i16> %7, <448 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255, i32 256, i32 257, i32 258, i32 259, i32 260, i32 261, i32 262, i32 263, i32 264, i32 265, i32 266, i32 267, i32 268, i32 269, i32 270, i32 271, i32 272, i32 273, i32 274, i32 275, i32 276, i32 277, i32 278, i32 279, i32 280, i32 281, i32 282, i32 283, i32 284, i32 285, i32 286, i32 287, i32 288, i32 289, i32 290, i32 291, i32 292, i32 293, i32 294, i32 295, i32 296, i32 297, i32 298, i32 299, i32 300, i32 301, i32 302, i32 303, i32 304, i32 305, i32 306, i32 307, i32 308, i32 309, i32 310, i32 311, i32 312, i32 313, i32 314, i32 315, i32 316, i32 317, i32 318, i32 319, i32 320, i32 321, i32 322, i32 323, i32 324, i32 325, i32 326, i32 327, i32 328, i32 329, i32 330, i32 331, i32 332, i32 333, i32 334, i32 335, i32 336, i32 337, i32 338, i32 339, i32 340, i32 341, i32 342, i32 343, i32 344, i32 345, i32 346, i32 347, i32 348, i32 349, i32 350, i32 351, i32 352, i32 353, i32 354, i32 355, i32 356, i32 357, i32 358, i32 359, i32 360, i32 361, i32 362, i32 363, i32 364, i32 365, i32 366, i32 367, i32 368, i32 369, i32 370, i32 371, i32 372, i32 373, i32 374, i32 375, i32 376, i32 377, i32 378, i32 379, i32 380, i32 381, i32 382, i32 383, i32 384, i32 385, i32 386, i32 387, i32 388, i32 389, i32 390, i32 391, i32 392, i32 393, i32 394, i32 395, i32 396, i32 397, i32 398, i32 399, i32 400, i32 401, i32 402, i32 403, i32 404, i32 405, i32 406, i32 407, i32 408, i32 409, i32 410, i32 411, i32 412, i32 413, i32 414, i32 415, i32 416, i32 417, i32 418, i32 419, i32 420, i32 421, i32 422, i32 423, i32 424, i32 425, i32 426, i32 427, i32 428, i32 429, i32 430, i32 431, i32 432, i32 433, i32 434, i32 435, i32 436, i32 437, i32 438, i32 439, i32 440, i32 441, i32 442, i32 443, i32 444, i32 445, i32 446, i32 447>
14461 %interleaved.vec = shufflevector <448 x i16> %8, <448 x i16> poison, <448 x i32> <i32 0, i32 64, i32 128, i32 192, i32 256, i32 320, i32 384, i32 1, i32 65, i32 129, i32 193, i32 257, i32 321, i32 385, i32 2, i32 66, i32 130, i32 194, i32 258, i32 322, i32 386, i32 3, i32 67, i32 131, i32 195, i32 259, i32 323, i32 387, i32 4, i32 68, i32 132, i32 196, i32 260, i32 324, i32 388, i32 5, i32 69, i32 133, i32 197, i32 261, i32 325, i32 389, i32 6, i32 70, i32 134, i32 198, i32 262, i32 326, i32 390, i32 7, i32 71, i32 135, i32 199, i32 263, i32 327, i32 391, i32 8, i32 72, i32 136, i32 200, i32 264, i32 328, i32 392, i32 9, i32 73, i32 137, i32 201, i32 265, i32 329, i32 393, i32 10, i32 74, i32 138, i32 202, i32 266, i32 330, i32 394, i32 11, i32 75, i32 139, i32 203, i32 267, i32 331, i32 395, i32 12, i32 76, i32 140, i32 204, i32 268, i32 332, i32 396, i32 13, i32 77, i32 141, i32 205, i32 269, i32 333, i32 397, i32 14, i32 78, i32 142, i32 206, i32 270, i32 334, i32 398, i32 15, i32 79, i32 143, i32 207, i32 271, i32 335, i32 399, i32 16, i32 80, i32 144, i32 208, i32 272, i32 336, i32 400, i32 17, i32 81, i32 145, i32 209, i32 273, i32 337, i32 401, i32 18, i32 82, i32 146, i32 210, i32 274, i32 338, i32 402, i32 19, i32 83, i32 147, i32 211, i32 275, i32 339, i32 403, i32 20, i32 84, i32 148, i32 212, i32 276, i32 340, i32 404, i32 21, i32 85, i32 149, i32 213, i32 277, i32 341, i32 405, i32 22, i32 86, i32 150, i32 214, i32 278, i32 342, i32 406, i32 23, i32 87, i32 151, i32 215, i32 279, i32 343, i32 407, i32 24, i32 88, i32 152, i32 216, i32 280, i32 344, i32 408, i32 25, i32 89, i32 153, i32 217, i32 281, i32 345, i32 409, i32 26, i32 90, i32 154, i32 218, i32 282, i32 346, i32 410, i32 27, i32 91, i32 155, i32 219, i32 283, i32 347, i32 411, i32 28, i32 92, i32 156, i32 220, i32 284, i32 348, i32 412, i32 29, i32 93, i32 157, i32 221, i32 285, i32 349, i32 413, i32 30, i32 94, i32 158, i32 222, i32 286, i32 350, i32 414, i32 31, i32 95, i32 159, i32 223, i32 287, i32 351, i32 415, i32 32, i32 96, i32 160, i32 224, i32 288, i32 352, i32 416, i32 33, i32 97, i32 161, i32 225, i32 289, i32 353, i32 417, i32 34, i32 98, i32 162, i32 226, i32 290, i32 354, i32 418, i32 35, i32 99, i32 163, i32 227, i32 291, i32 355, i32 419, i32 36, i32 100, i32 164, i32 228, i32 292, i32 356, i32 420, i32 37, i32 101, i32 165, i32 229, i32 293, i32 357, i32 421, i32 38, i32 102, i32 166, i32 230, i32 294, i32 358, i32 422, i32 39, i32 103, i32 167, i32 231, i32 295, i32 359, i32 423, i32 40, i32 104, i32 168, i32 232, i32 296, i32 360, i32 424, i32 41, i32 105, i32 169, i32 233, i32 297, i32 361, i32 425, i32 42, i32 106, i32 170, i32 234, i32 298, i32 362, i32 426, i32 43, i32 107, i32 171, i32 235, i32 299, i32 363, i32 427, i32 44, i32 108, i32 172, i32 236, i32 300, i32 364, i32 428, i32 45, i32 109, i32 173, i32 237, i32 301, i32 365, i32 429, i32 46, i32 110, i32 174, i32 238, i32 302, i32 366, i32 430, i32 47, i32 111, i32 175, i32 239, i32 303, i32 367, i32 431, i32 48, i32 112, i32 176, i32 240, i32 304, i32 368, i32 432, i32 49, i32 113, i32 177, i32 241, i32 305, i32 369, i32 433, i32 50, i32 114, i32 178, i32 242, i32 306, i32 370, i32 434, i32 51, i32 115, i32 179, i32 243, i32 307, i32 371, i32 435, i32 52, i32 116, i32 180, i32 244, i32 308, i32 372, i32 436, i32 53, i32 117, i32 181, i32 245, i32 309, i32 373, i32 437, i32 54, i32 118, i32 182, i32 246, i32 310, i32 374, i32 438, i32 55, i32 119, i32 183, i32 247, i32 311, i32 375, i32 439, i32 56, i32 120, i32 184, i32 248, i32 312, i32 376, i32 440, i32 57, i32 121, i32 185, i32 249, i32 313, i32 377, i32 441, i32 58, i32 122, i32 186, i32 250, i32 314, i32 378, i32 442, i32 59, i32 123, i32 187, i32 251, i32 315, i32 379, i32 443, i32 60, i32 124, i32 188, i32 252, i32 316, i32 380, i32 444, i32 61, i32 125, i32 189, i32 253, i32 317, i32 381, i32 445, i32 62, i32 126, i32 190, i32 254, i32 318, i32 382, i32 446, i32 63, i32 127, i32 191, i32 255, i32 319, i32 383, i32 447>
14462 store <448 x i16> %interleaved.vec, ptr %out.vec, align 64
14465 ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
14470 ; AVX512-FAST: {{.*}}
14471 ; AVX512-SLOW: {{.*}}
14472 ; AVX512BW-ONLY-FAST: {{.*}}
14473 ; AVX512BW-ONLY-SLOW: {{.*}}
14474 ; AVX512DQBW-FAST: {{.*}}
14475 ; AVX512DQBW-SLOW: {{.*}}
14476 ; FALLBACK0: {{.*}}
14477 ; FALLBACK1: {{.*}}
14478 ; FALLBACK10: {{.*}}
14479 ; FALLBACK11: {{.*}}
14480 ; FALLBACK12: {{.*}}
14481 ; FALLBACK2: {{.*}}
14482 ; FALLBACK3: {{.*}}
14483 ; FALLBACK4: {{.*}}
14484 ; FALLBACK5: {{.*}}
14485 ; FALLBACK6: {{.*}}
14486 ; FALLBACK7: {{.*}}
14487 ; FALLBACK8: {{.*}}
14488 ; FALLBACK9: {{.*}}