1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE,FALLBACK0
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx | FileCheck %s --check-prefixes=AVX,AVX1,AVX1-ONLY,FALLBACK1
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-SLOW,FALLBACK2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST,FALLBACK3
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST-PERLANE,FALLBACK4
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512F-ONLY-SLOW,FALLBACK5
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512F-ONLY-FAST,FALLBACK6
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512DQ-SLOW,FALLBACK7
10 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512DQ-FAST,FALLBACK8
11 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512BW-ONLY-SLOW,FALLBACK9
12 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512BW-ONLY-FAST,FALLBACK10
13 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512DQBW-SLOW,FALLBACK11
14 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512DQBW-FAST,FALLBACK12
16 ; These patterns are produced by LoopVectorizer for interleaved loads.
18 define void @load_i8_stride8_vf2(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4, ptr %out.vec5, ptr %out.vec6, ptr %out.vec7) nounwind {
19 ; SSE-LABEL: load_i8_stride8_vf2:
21 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
22 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
23 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r11
24 ; SSE-NEXT: movdqa (%rdi), %xmm1
25 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,0,0,0,255,0,0,0]
26 ; SSE-NEXT: pand %xmm1, %xmm0
27 ; SSE-NEXT: packuswb %xmm0, %xmm0
28 ; SSE-NEXT: packuswb %xmm0, %xmm0
29 ; SSE-NEXT: packuswb %xmm0, %xmm0
30 ; SSE-NEXT: pxor %xmm2, %xmm2
31 ; SSE-NEXT: movdqa %xmm1, %xmm3
32 ; SSE-NEXT: punpckhbw {{.*#+}} xmm3 = xmm3[8],xmm2[8],xmm3[9],xmm2[9],xmm3[10],xmm2[10],xmm3[11],xmm2[11],xmm3[12],xmm2[12],xmm3[13],xmm2[13],xmm3[14],xmm2[14],xmm3[15],xmm2[15]
33 ; SSE-NEXT: movdqa %xmm1, %xmm4
34 ; SSE-NEXT: punpcklbw {{.*#+}} xmm4 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3],xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
35 ; SSE-NEXT: movdqa %xmm4, %xmm2
36 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
37 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[1,1,1,1]
38 ; SSE-NEXT: packuswb %xmm5, %xmm5
39 ; SSE-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1
40 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm1[0,2,2,3]
41 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm6[1,3,2,3,4,5,6,7]
42 ; SSE-NEXT: packuswb %xmm6, %xmm6
43 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[3,3,3,3]
44 ; SSE-NEXT: packuswb %xmm2, %xmm2
45 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,3]
46 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm1[2,0,2,3,4,5,6,7]
47 ; SSE-NEXT: packuswb %xmm7, %xmm7
48 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
49 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm4[1,1,1,1]
50 ; SSE-NEXT: packuswb %xmm3, %xmm3
51 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[3,1,2,3,4,5,6,7]
52 ; SSE-NEXT: packuswb %xmm1, %xmm1
53 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[3,3,3,3]
54 ; SSE-NEXT: packuswb %xmm4, %xmm4
55 ; SSE-NEXT: movd %xmm0, %edi
56 ; SSE-NEXT: movw %di, (%rsi)
57 ; SSE-NEXT: movd %xmm5, %esi
58 ; SSE-NEXT: movw %si, (%rdx)
59 ; SSE-NEXT: movd %xmm6, %edx
60 ; SSE-NEXT: movw %dx, (%rcx)
61 ; SSE-NEXT: movd %xmm2, %ecx
62 ; SSE-NEXT: movw %cx, (%r8)
63 ; SSE-NEXT: movd %xmm7, %ecx
64 ; SSE-NEXT: movw %cx, (%r9)
65 ; SSE-NEXT: movd %xmm3, %ecx
66 ; SSE-NEXT: movw %cx, (%r11)
67 ; SSE-NEXT: movd %xmm1, %ecx
68 ; SSE-NEXT: movw %cx, (%r10)
69 ; SSE-NEXT: movd %xmm4, %ecx
70 ; SSE-NEXT: movw %cx, (%rax)
73 ; AVX1-LABEL: load_i8_stride8_vf2:
75 ; AVX1-NEXT: movq {{[0-9]+}}(%rsp), %rax
76 ; AVX1-NEXT: movq {{[0-9]+}}(%rsp), %r10
77 ; AVX1-NEXT: movq {{[0-9]+}}(%rsp), %r11
78 ; AVX1-NEXT: vmovdqa (%rdi), %xmm0
79 ; AVX1-NEXT: vpshufb {{.*#+}} xmm1 = xmm0[0,8,u,u,u,u,u,u,u,u,u,u,u,u,u,u]
80 ; AVX1-NEXT: vpshufb {{.*#+}} xmm2 = xmm0[1,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u]
81 ; AVX1-NEXT: vpshufb {{.*#+}} xmm3 = xmm0[2,10,u,u,u,u,u,u,u,u,u,u,u,u,u,u]
82 ; AVX1-NEXT: vpshufb {{.*#+}} xmm4 = xmm0[3,11,u,u,u,u,u,u,u,u,u,u,u,u,u,u]
83 ; AVX1-NEXT: vpshufb {{.*#+}} xmm5 = xmm0[4,12,u,u,u,u,u,u,u,u,u,u,u,u,u,u]
84 ; AVX1-NEXT: vpshufb {{.*#+}} xmm6 = xmm0[5,13,u,u,u,u,u,u,u,u,u,u,u,u,u,u]
85 ; AVX1-NEXT: vpshufb {{.*#+}} xmm7 = xmm0[6,14,u,u,u,u,u,u,u,u,u,u,u,u,u,u]
86 ; AVX1-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[7,15,u,u,u,u,u,u,u,u,u,u,u,u,u,u]
87 ; AVX1-NEXT: vpextrw $0, %xmm1, (%rsi)
88 ; AVX1-NEXT: vpextrw $0, %xmm2, (%rdx)
89 ; AVX1-NEXT: vpextrw $0, %xmm3, (%rcx)
90 ; AVX1-NEXT: vpextrw $0, %xmm4, (%r8)
91 ; AVX1-NEXT: vpextrw $0, %xmm5, (%r9)
92 ; AVX1-NEXT: vpextrw $0, %xmm6, (%r11)
93 ; AVX1-NEXT: vpextrw $0, %xmm7, (%r10)
94 ; AVX1-NEXT: vpextrw $0, %xmm0, (%rax)
97 ; AVX512-LABEL: load_i8_stride8_vf2:
99 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
100 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %r10
101 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %r11
102 ; AVX512-NEXT: vmovdqa (%rdi), %xmm0
103 ; AVX512-NEXT: vpshufb {{.*#+}} xmm1 = xmm0[1,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u]
104 ; AVX512-NEXT: vpshufb {{.*#+}} xmm2 = xmm0[2,10,u,u,u,u,u,u,u,u,u,u,u,u,u,u]
105 ; AVX512-NEXT: vpshufb {{.*#+}} xmm3 = xmm0[3,11,u,u,u,u,u,u,u,u,u,u,u,u,u,u]
106 ; AVX512-NEXT: vpshufb {{.*#+}} xmm4 = xmm0[4,12,u,u,u,u,u,u,u,u,u,u,u,u,u,u]
107 ; AVX512-NEXT: vpshufb {{.*#+}} xmm5 = xmm0[5,13,u,u,u,u,u,u,u,u,u,u,u,u,u,u]
108 ; AVX512-NEXT: vpshufb {{.*#+}} xmm6 = xmm0[6,14,u,u,u,u,u,u,u,u,u,u,u,u,u,u]
109 ; AVX512-NEXT: vpshufb {{.*#+}} xmm7 = xmm0[7,15,u,u,u,u,u,u,u,u,u,u,u,u,u,u]
110 ; AVX512-NEXT: vpmovqb %xmm0, (%rsi)
111 ; AVX512-NEXT: vpextrw $0, %xmm1, (%rdx)
112 ; AVX512-NEXT: vpextrw $0, %xmm2, (%rcx)
113 ; AVX512-NEXT: vpextrw $0, %xmm3, (%r8)
114 ; AVX512-NEXT: vpextrw $0, %xmm4, (%r9)
115 ; AVX512-NEXT: vpextrw $0, %xmm5, (%r11)
116 ; AVX512-NEXT: vpextrw $0, %xmm6, (%r10)
117 ; AVX512-NEXT: vpextrw $0, %xmm7, (%rax)
119 %wide.vec = load <16 x i8>, ptr %in.vec, align 64
120 %strided.vec0 = shufflevector <16 x i8> %wide.vec, <16 x i8> poison, <2 x i32> <i32 0, i32 8>
121 %strided.vec1 = shufflevector <16 x i8> %wide.vec, <16 x i8> poison, <2 x i32> <i32 1, i32 9>
122 %strided.vec2 = shufflevector <16 x i8> %wide.vec, <16 x i8> poison, <2 x i32> <i32 2, i32 10>
123 %strided.vec3 = shufflevector <16 x i8> %wide.vec, <16 x i8> poison, <2 x i32> <i32 3, i32 11>
124 %strided.vec4 = shufflevector <16 x i8> %wide.vec, <16 x i8> poison, <2 x i32> <i32 4, i32 12>
125 %strided.vec5 = shufflevector <16 x i8> %wide.vec, <16 x i8> poison, <2 x i32> <i32 5, i32 13>
126 %strided.vec6 = shufflevector <16 x i8> %wide.vec, <16 x i8> poison, <2 x i32> <i32 6, i32 14>
127 %strided.vec7 = shufflevector <16 x i8> %wide.vec, <16 x i8> poison, <2 x i32> <i32 7, i32 15>
128 store <2 x i8> %strided.vec0, ptr %out.vec0, align 64
129 store <2 x i8> %strided.vec1, ptr %out.vec1, align 64
130 store <2 x i8> %strided.vec2, ptr %out.vec2, align 64
131 store <2 x i8> %strided.vec3, ptr %out.vec3, align 64
132 store <2 x i8> %strided.vec4, ptr %out.vec4, align 64
133 store <2 x i8> %strided.vec5, ptr %out.vec5, align 64
134 store <2 x i8> %strided.vec6, ptr %out.vec6, align 64
135 store <2 x i8> %strided.vec7, ptr %out.vec7, align 64
139 define void @load_i8_stride8_vf4(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4, ptr %out.vec5, ptr %out.vec6, ptr %out.vec7) nounwind {
140 ; SSE-LABEL: load_i8_stride8_vf4:
142 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
143 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
144 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r11
145 ; SSE-NEXT: movdqa (%rdi), %xmm5
146 ; SSE-NEXT: movdqa 16(%rdi), %xmm8
147 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,0,0,0,255,0,0,0]
148 ; SSE-NEXT: movdqa %xmm8, %xmm1
149 ; SSE-NEXT: pand %xmm0, %xmm1
150 ; SSE-NEXT: pand %xmm5, %xmm0
151 ; SSE-NEXT: packuswb %xmm1, %xmm0
152 ; SSE-NEXT: packuswb %xmm0, %xmm0
153 ; SSE-NEXT: packuswb %xmm0, %xmm0
154 ; SSE-NEXT: pxor %xmm3, %xmm3
155 ; SSE-NEXT: movdqa %xmm8, %xmm7
156 ; SSE-NEXT: punpckhbw {{.*#+}} xmm7 = xmm7[8],xmm3[8],xmm7[9],xmm3[9],xmm7[10],xmm3[10],xmm7[11],xmm3[11],xmm7[12],xmm3[12],xmm7[13],xmm3[13],xmm7[14],xmm3[14],xmm7[15],xmm3[15]
157 ; SSE-NEXT: movdqa %xmm8, %xmm2
158 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3],xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
159 ; SSE-NEXT: movdqa %xmm2, %xmm10
160 ; SSE-NEXT: punpcklwd {{.*#+}} xmm10 = xmm10[0],xmm7[0],xmm10[1],xmm7[1],xmm10[2],xmm7[2],xmm10[3],xmm7[3]
161 ; SSE-NEXT: packuswb %xmm10, %xmm10
162 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,0,65535,65535,65535,65535,65535,65535]
163 ; SSE-NEXT: movdqa %xmm1, %xmm6
164 ; SSE-NEXT: pandn %xmm10, %xmm6
165 ; SSE-NEXT: movdqa %xmm5, %xmm9
166 ; SSE-NEXT: punpckhbw {{.*#+}} xmm9 = xmm9[8],xmm3[8],xmm9[9],xmm3[9],xmm9[10],xmm3[10],xmm9[11],xmm3[11],xmm9[12],xmm3[12],xmm9[13],xmm3[13],xmm9[14],xmm3[14],xmm9[15],xmm3[15]
167 ; SSE-NEXT: movdqa %xmm5, %xmm4
168 ; SSE-NEXT: punpcklbw {{.*#+}} xmm4 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3],xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
169 ; SSE-NEXT: movdqa %xmm4, %xmm11
170 ; SSE-NEXT: punpcklwd {{.*#+}} xmm11 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
171 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm11[1,1,1,1]
172 ; SSE-NEXT: packuswb %xmm3, %xmm3
173 ; SSE-NEXT: pand %xmm1, %xmm3
174 ; SSE-NEXT: por %xmm6, %xmm3
175 ; SSE-NEXT: movdqa {{.*#+}} xmm6 = [255,255,255,255,255,255,255,255]
176 ; SSE-NEXT: pand %xmm6, %xmm8
177 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm8[0,2,2,3]
178 ; SSE-NEXT: pshuflw {{.*#+}} xmm12 = xmm12[0,1,1,3,4,5,6,7]
179 ; SSE-NEXT: packuswb %xmm12, %xmm12
180 ; SSE-NEXT: movdqa %xmm1, %xmm13
181 ; SSE-NEXT: pandn %xmm12, %xmm13
182 ; SSE-NEXT: pand %xmm6, %xmm5
183 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,2,2,3]
184 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm6[1,3,2,3,4,5,6,7]
185 ; SSE-NEXT: packuswb %xmm6, %xmm6
186 ; SSE-NEXT: pand %xmm1, %xmm6
187 ; SSE-NEXT: por %xmm13, %xmm6
188 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[1,1,2,3]
189 ; SSE-NEXT: movdqa %xmm1, %xmm12
190 ; SSE-NEXT: pandn %xmm10, %xmm12
191 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm11[3,3,3,3]
192 ; SSE-NEXT: packuswb %xmm10, %xmm10
193 ; SSE-NEXT: pand %xmm1, %xmm10
194 ; SSE-NEXT: por %xmm12, %xmm10
195 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[3,1,2,3]
196 ; SSE-NEXT: pshuflw {{.*#+}} xmm11 = xmm8[0,1,2,0,4,5,6,7]
197 ; SSE-NEXT: packuswb %xmm11, %xmm11
198 ; SSE-NEXT: movdqa %xmm1, %xmm12
199 ; SSE-NEXT: pandn %xmm11, %xmm12
200 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm5[3,1,2,3]
201 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm11[2,0,2,3,4,5,6,7]
202 ; SSE-NEXT: packuswb %xmm5, %xmm5
203 ; SSE-NEXT: pand %xmm1, %xmm5
204 ; SSE-NEXT: por %xmm12, %xmm5
205 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm7[4],xmm2[5],xmm7[5],xmm2[6],xmm7[6],xmm2[7],xmm7[7]
206 ; SSE-NEXT: packuswb %xmm2, %xmm2
207 ; SSE-NEXT: movdqa %xmm1, %xmm12
208 ; SSE-NEXT: pandn %xmm2, %xmm12
209 ; SSE-NEXT: punpckhwd {{.*#+}} xmm4 = xmm4[4],xmm9[4],xmm4[5],xmm9[5],xmm4[6],xmm9[6],xmm4[7],xmm9[7]
210 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm4[1,1,1,1]
211 ; SSE-NEXT: packuswb %xmm7, %xmm7
212 ; SSE-NEXT: pand %xmm1, %xmm7
213 ; SSE-NEXT: por %xmm12, %xmm7
214 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm8[0,1,3,1,4,5,6,7]
215 ; SSE-NEXT: packuswb %xmm8, %xmm8
216 ; SSE-NEXT: movdqa %xmm1, %xmm9
217 ; SSE-NEXT: pandn %xmm8, %xmm9
218 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm11[3,1,2,3,4,5,6,7]
219 ; SSE-NEXT: packuswb %xmm8, %xmm8
220 ; SSE-NEXT: pand %xmm1, %xmm8
221 ; SSE-NEXT: por %xmm9, %xmm8
222 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,2,3]
223 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[3,3,3,3]
224 ; SSE-NEXT: packuswb %xmm4, %xmm4
225 ; SSE-NEXT: pand %xmm1, %xmm4
226 ; SSE-NEXT: pandn %xmm2, %xmm1
227 ; SSE-NEXT: por %xmm4, %xmm1
228 ; SSE-NEXT: movd %xmm0, (%rsi)
229 ; SSE-NEXT: movd %xmm3, (%rdx)
230 ; SSE-NEXT: movd %xmm6, (%rcx)
231 ; SSE-NEXT: movd %xmm10, (%r8)
232 ; SSE-NEXT: movd %xmm5, (%r9)
233 ; SSE-NEXT: movd %xmm7, (%r11)
234 ; SSE-NEXT: movd %xmm8, (%r10)
235 ; SSE-NEXT: movd %xmm1, (%rax)
238 ; AVX1-ONLY-LABEL: load_i8_stride8_vf4:
239 ; AVX1-ONLY: # %bb.0:
240 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
241 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
242 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
243 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm0 = [0,8,0,0,0,8,0,0,0,8,0,0,0,8,0,0]
244 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm1
245 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm2
246 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm2, %xmm3
247 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm1, %xmm0
248 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
249 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [1,9,0,0,1,9,0,0,1,9,0,0,1,9,0,0]
250 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm2, %xmm4
251 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm1, %xmm3
252 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
253 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm4 = [2,10,0,0,2,10,0,0,2,10,0,0,2,10,0,0]
254 ; AVX1-ONLY-NEXT: vpshufb %xmm4, %xmm2, %xmm5
255 ; AVX1-ONLY-NEXT: vpshufb %xmm4, %xmm1, %xmm4
256 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
257 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm5 = [3,11,0,0,3,11,0,0,3,11,0,0,3,11,0,0]
258 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm2, %xmm6
259 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm1, %xmm5
260 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
261 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm6 = [4,12,0,0,4,12,0,0,4,12,0,0,4,12,0,0]
262 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm2, %xmm7
263 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm1, %xmm6
264 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
265 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm7 = [5,13,0,0,5,13,0,0,5,13,0,0,5,13,0,0]
266 ; AVX1-ONLY-NEXT: vpshufb %xmm7, %xmm2, %xmm8
267 ; AVX1-ONLY-NEXT: vpshufb %xmm7, %xmm1, %xmm7
268 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
269 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm8 = [6,14,0,0,6,14,0,0,6,14,0,0,6,14,0,0]
270 ; AVX1-ONLY-NEXT: vpshufb %xmm8, %xmm2, %xmm9
271 ; AVX1-ONLY-NEXT: vpshufb %xmm8, %xmm1, %xmm8
272 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
273 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm9 = [7,15,0,0,7,15,0,0,7,15,0,0,7,15,0,0]
274 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm2, %xmm2
275 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm1, %xmm1
276 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
277 ; AVX1-ONLY-NEXT: vmovd %xmm0, (%rsi)
278 ; AVX1-ONLY-NEXT: vmovd %xmm3, (%rdx)
279 ; AVX1-ONLY-NEXT: vmovd %xmm4, (%rcx)
280 ; AVX1-ONLY-NEXT: vmovd %xmm5, (%r8)
281 ; AVX1-ONLY-NEXT: vmovd %xmm6, (%r9)
282 ; AVX1-ONLY-NEXT: vmovd %xmm7, (%r11)
283 ; AVX1-ONLY-NEXT: vmovd %xmm8, (%r10)
284 ; AVX1-ONLY-NEXT: vmovd %xmm1, (%rax)
285 ; AVX1-ONLY-NEXT: retq
287 ; AVX2-ONLY-LABEL: load_i8_stride8_vf4:
288 ; AVX2-ONLY: # %bb.0:
289 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
290 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
291 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
292 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm0 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
293 ; AVX2-ONLY-NEXT: vmovdqa (%rdi), %xmm1
294 ; AVX2-ONLY-NEXT: vmovdqa 16(%rdi), %xmm2
295 ; AVX2-ONLY-NEXT: vpshufb %xmm0, %xmm2, %xmm3
296 ; AVX2-ONLY-NEXT: vpshufb %xmm0, %xmm1, %xmm0
297 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
298 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm3 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
299 ; AVX2-ONLY-NEXT: vpshufb %xmm3, %xmm2, %xmm4
300 ; AVX2-ONLY-NEXT: vpshufb %xmm3, %xmm1, %xmm3
301 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
302 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm4 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
303 ; AVX2-ONLY-NEXT: vpshufb %xmm4, %xmm2, %xmm5
304 ; AVX2-ONLY-NEXT: vpshufb %xmm4, %xmm1, %xmm4
305 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
306 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm5 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
307 ; AVX2-ONLY-NEXT: vpshufb %xmm5, %xmm2, %xmm6
308 ; AVX2-ONLY-NEXT: vpshufb %xmm5, %xmm1, %xmm5
309 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
310 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm6 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
311 ; AVX2-ONLY-NEXT: vpshufb %xmm6, %xmm2, %xmm7
312 ; AVX2-ONLY-NEXT: vpshufb %xmm6, %xmm1, %xmm6
313 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
314 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm7 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
315 ; AVX2-ONLY-NEXT: vpshufb %xmm7, %xmm2, %xmm8
316 ; AVX2-ONLY-NEXT: vpshufb %xmm7, %xmm1, %xmm7
317 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
318 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm8 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
319 ; AVX2-ONLY-NEXT: vpshufb %xmm8, %xmm2, %xmm9
320 ; AVX2-ONLY-NEXT: vpshufb %xmm8, %xmm1, %xmm8
321 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
322 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm9 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
323 ; AVX2-ONLY-NEXT: vpshufb %xmm9, %xmm2, %xmm2
324 ; AVX2-ONLY-NEXT: vpshufb %xmm9, %xmm1, %xmm1
325 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
326 ; AVX2-ONLY-NEXT: vmovd %xmm0, (%rsi)
327 ; AVX2-ONLY-NEXT: vmovd %xmm3, (%rdx)
328 ; AVX2-ONLY-NEXT: vmovd %xmm4, (%rcx)
329 ; AVX2-ONLY-NEXT: vmovd %xmm5, (%r8)
330 ; AVX2-ONLY-NEXT: vmovd %xmm6, (%r9)
331 ; AVX2-ONLY-NEXT: vmovd %xmm7, (%r11)
332 ; AVX2-ONLY-NEXT: vmovd %xmm8, (%r10)
333 ; AVX2-ONLY-NEXT: vmovd %xmm1, (%rax)
334 ; AVX2-ONLY-NEXT: retq
336 ; AVX512-LABEL: load_i8_stride8_vf4:
338 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
339 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %r10
340 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %r11
341 ; AVX512-NEXT: vmovdqa (%rdi), %ymm0
342 ; AVX512-NEXT: vpsrlq $8, %ymm0, %ymm1
343 ; AVX512-NEXT: vpsrlq $16, %ymm0, %ymm2
344 ; AVX512-NEXT: vpsrlq $24, %ymm0, %ymm3
345 ; AVX512-NEXT: vpsrlq $32, %ymm0, %ymm4
346 ; AVX512-NEXT: vpsrlq $40, %ymm0, %ymm5
347 ; AVX512-NEXT: vpsrlq $48, %ymm0, %ymm6
348 ; AVX512-NEXT: vpsrlq $56, %ymm0, %ymm7
349 ; AVX512-NEXT: vpmovqb %ymm0, (%rsi)
350 ; AVX512-NEXT: vpmovqb %ymm1, (%rdx)
351 ; AVX512-NEXT: vpmovqb %ymm2, (%rcx)
352 ; AVX512-NEXT: vpmovqb %ymm3, (%r8)
353 ; AVX512-NEXT: vpmovqb %ymm4, (%r9)
354 ; AVX512-NEXT: vpmovqb %ymm5, (%r11)
355 ; AVX512-NEXT: vpmovqb %ymm6, (%r10)
356 ; AVX512-NEXT: vpmovqb %ymm7, (%rax)
357 ; AVX512-NEXT: vzeroupper
359 %wide.vec = load <32 x i8>, ptr %in.vec, align 64
360 %strided.vec0 = shufflevector <32 x i8> %wide.vec, <32 x i8> poison, <4 x i32> <i32 0, i32 8, i32 16, i32 24>
361 %strided.vec1 = shufflevector <32 x i8> %wide.vec, <32 x i8> poison, <4 x i32> <i32 1, i32 9, i32 17, i32 25>
362 %strided.vec2 = shufflevector <32 x i8> %wide.vec, <32 x i8> poison, <4 x i32> <i32 2, i32 10, i32 18, i32 26>
363 %strided.vec3 = shufflevector <32 x i8> %wide.vec, <32 x i8> poison, <4 x i32> <i32 3, i32 11, i32 19, i32 27>
364 %strided.vec4 = shufflevector <32 x i8> %wide.vec, <32 x i8> poison, <4 x i32> <i32 4, i32 12, i32 20, i32 28>
365 %strided.vec5 = shufflevector <32 x i8> %wide.vec, <32 x i8> poison, <4 x i32> <i32 5, i32 13, i32 21, i32 29>
366 %strided.vec6 = shufflevector <32 x i8> %wide.vec, <32 x i8> poison, <4 x i32> <i32 6, i32 14, i32 22, i32 30>
367 %strided.vec7 = shufflevector <32 x i8> %wide.vec, <32 x i8> poison, <4 x i32> <i32 7, i32 15, i32 23, i32 31>
368 store <4 x i8> %strided.vec0, ptr %out.vec0, align 64
369 store <4 x i8> %strided.vec1, ptr %out.vec1, align 64
370 store <4 x i8> %strided.vec2, ptr %out.vec2, align 64
371 store <4 x i8> %strided.vec3, ptr %out.vec3, align 64
372 store <4 x i8> %strided.vec4, ptr %out.vec4, align 64
373 store <4 x i8> %strided.vec5, ptr %out.vec5, align 64
374 store <4 x i8> %strided.vec6, ptr %out.vec6, align 64
375 store <4 x i8> %strided.vec7, ptr %out.vec7, align 64
379 define void @load_i8_stride8_vf8(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4, ptr %out.vec5, ptr %out.vec6, ptr %out.vec7) nounwind {
380 ; SSE-LABEL: load_i8_stride8_vf8:
382 ; SSE-NEXT: pushq %rax
383 ; SSE-NEXT: movdqa (%rdi), %xmm12
384 ; SSE-NEXT: movdqa 16(%rdi), %xmm11
385 ; SSE-NEXT: movdqa 32(%rdi), %xmm9
386 ; SSE-NEXT: movdqa 48(%rdi), %xmm10
387 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,0,0,0,255,0,0,0]
388 ; SSE-NEXT: movdqa %xmm11, %xmm1
389 ; SSE-NEXT: pand %xmm0, %xmm1
390 ; SSE-NEXT: movdqa %xmm12, %xmm2
391 ; SSE-NEXT: pand %xmm0, %xmm2
392 ; SSE-NEXT: packuswb %xmm1, %xmm2
393 ; SSE-NEXT: packuswb %xmm2, %xmm2
394 ; SSE-NEXT: movdqa %xmm10, %xmm1
395 ; SSE-NEXT: pand %xmm0, %xmm1
396 ; SSE-NEXT: pand %xmm9, %xmm0
397 ; SSE-NEXT: packuswb %xmm1, %xmm0
398 ; SSE-NEXT: packuswb %xmm0, %xmm0
399 ; SSE-NEXT: packuswb %xmm0, %xmm2
400 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
401 ; SSE-NEXT: pxor %xmm7, %xmm7
402 ; SSE-NEXT: movdqa %xmm11, %xmm0
403 ; SSE-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8],xmm7[8],xmm0[9],xmm7[9],xmm0[10],xmm7[10],xmm0[11],xmm7[11],xmm0[12],xmm7[12],xmm0[13],xmm7[13],xmm0[14],xmm7[14],xmm0[15],xmm7[15]
404 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
405 ; SSE-NEXT: movdqa %xmm11, %xmm14
406 ; SSE-NEXT: punpcklbw {{.*#+}} xmm14 = xmm14[0],xmm7[0],xmm14[1],xmm7[1],xmm14[2],xmm7[2],xmm14[3],xmm7[3],xmm14[4],xmm7[4],xmm14[5],xmm7[5],xmm14[6],xmm7[6],xmm14[7],xmm7[7]
407 ; SSE-NEXT: movdqa %xmm14, %xmm15
408 ; SSE-NEXT: punpcklwd {{.*#+}} xmm15 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3]
409 ; SSE-NEXT: packuswb %xmm15, %xmm15
410 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [65535,0,65535,65535,65535,65535,65535,65535]
411 ; SSE-NEXT: movdqa %xmm3, %xmm0
412 ; SSE-NEXT: pandn %xmm15, %xmm0
413 ; SSE-NEXT: movdqa %xmm12, %xmm1
414 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm7[8],xmm1[9],xmm7[9],xmm1[10],xmm7[10],xmm1[11],xmm7[11],xmm1[12],xmm7[12],xmm1[13],xmm7[13],xmm1[14],xmm7[14],xmm1[15],xmm7[15]
415 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
416 ; SSE-NEXT: movdqa %xmm12, %xmm5
417 ; SSE-NEXT: punpcklbw {{.*#+}} xmm5 = xmm5[0],xmm7[0],xmm5[1],xmm7[1],xmm5[2],xmm7[2],xmm5[3],xmm7[3],xmm5[4],xmm7[4],xmm5[5],xmm7[5],xmm5[6],xmm7[6],xmm5[7],xmm7[7]
418 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
419 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
420 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[1,1,1,1]
421 ; SSE-NEXT: packuswb %xmm1, %xmm1
422 ; SSE-NEXT: pand %xmm3, %xmm1
423 ; SSE-NEXT: por %xmm0, %xmm1
424 ; SSE-NEXT: movdqa %xmm10, %xmm0
425 ; SSE-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8],xmm7[8],xmm0[9],xmm7[9],xmm0[10],xmm7[10],xmm0[11],xmm7[11],xmm0[12],xmm7[12],xmm0[13],xmm7[13],xmm0[14],xmm7[14],xmm0[15],xmm7[15]
426 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
427 ; SSE-NEXT: movdqa %xmm10, %xmm13
428 ; SSE-NEXT: punpcklbw {{.*#+}} xmm13 = xmm13[0],xmm7[0],xmm13[1],xmm7[1],xmm13[2],xmm7[2],xmm13[3],xmm7[3],xmm13[4],xmm7[4],xmm13[5],xmm7[5],xmm13[6],xmm7[6],xmm13[7],xmm7[7]
429 ; SSE-NEXT: movdqa %xmm13, %xmm4
430 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
431 ; SSE-NEXT: packuswb %xmm4, %xmm4
432 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm4[0,0,2,3]
433 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [65535,65535,65535,0,65535,65535,65535,65535]
434 ; SSE-NEXT: movdqa %xmm2, %xmm6
435 ; SSE-NEXT: pandn %xmm8, %xmm6
436 ; SSE-NEXT: movdqa %xmm9, %xmm8
437 ; SSE-NEXT: punpckhbw {{.*#+}} xmm8 = xmm8[8],xmm7[8],xmm8[9],xmm7[9],xmm8[10],xmm7[10],xmm8[11],xmm7[11],xmm8[12],xmm7[12],xmm8[13],xmm7[13],xmm8[14],xmm7[14],xmm8[15],xmm7[15]
438 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
439 ; SSE-NEXT: movdqa %xmm9, %xmm0
440 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm7[0],xmm0[1],xmm7[1],xmm0[2],xmm7[2],xmm0[3],xmm7[3],xmm0[4],xmm7[4],xmm0[5],xmm7[5],xmm0[6],xmm7[6],xmm0[7],xmm7[7]
441 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
442 ; SSE-NEXT: movdqa %xmm0, %xmm7
443 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
444 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm7[0,1,1,3]
445 ; SSE-NEXT: packuswb %xmm8, %xmm8
446 ; SSE-NEXT: pand %xmm2, %xmm8
447 ; SSE-NEXT: por %xmm6, %xmm8
448 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm8[1,1,1,1]
449 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm6[0],xmm1[1],xmm6[1]
450 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
451 ; SSE-NEXT: movdqa {{.*#+}} xmm6 = [255,255,255,255,255,255,255,255]
452 ; SSE-NEXT: pand %xmm6, %xmm11
453 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm11[0,2,2,3]
454 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm8[0,1,1,3,4,5,6,7]
455 ; SSE-NEXT: packuswb %xmm8, %xmm8
456 ; SSE-NEXT: movdqa %xmm3, %xmm1
457 ; SSE-NEXT: pandn %xmm8, %xmm1
458 ; SSE-NEXT: pand %xmm6, %xmm12
459 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm12[0,2,2,3]
460 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm8[1,3,2,3,4,5,6,7]
461 ; SSE-NEXT: packuswb %xmm8, %xmm8
462 ; SSE-NEXT: pand %xmm3, %xmm8
463 ; SSE-NEXT: por %xmm1, %xmm8
464 ; SSE-NEXT: pand %xmm6, %xmm10
465 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm10[0,1,2,0]
466 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,7,5]
467 ; SSE-NEXT: packuswb %xmm1, %xmm1
468 ; SSE-NEXT: movdqa %xmm2, %xmm0
469 ; SSE-NEXT: pandn %xmm1, %xmm0
470 ; SSE-NEXT: pand %xmm6, %xmm9
471 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm9[0,1,2,0]
472 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,7,5,6,7]
473 ; SSE-NEXT: packuswb %xmm1, %xmm1
474 ; SSE-NEXT: pand %xmm2, %xmm1
475 ; SSE-NEXT: por %xmm0, %xmm1
476 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,1,1]
477 ; SSE-NEXT: punpckldq {{.*#+}} xmm8 = xmm8[0],xmm0[0],xmm8[1],xmm0[1]
478 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm15[1,1,2,3]
479 ; SSE-NEXT: movdqa %xmm3, %xmm1
480 ; SSE-NEXT: pandn %xmm0, %xmm1
481 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm5[3,3,3,3]
482 ; SSE-NEXT: packuswb %xmm15, %xmm15
483 ; SSE-NEXT: pand %xmm3, %xmm15
484 ; SSE-NEXT: por %xmm1, %xmm15
485 ; SSE-NEXT: movdqa %xmm2, %xmm0
486 ; SSE-NEXT: pandn %xmm4, %xmm0
487 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm7[2,2,3,3]
488 ; SSE-NEXT: packuswb %xmm1, %xmm1
489 ; SSE-NEXT: pand %xmm2, %xmm1
490 ; SSE-NEXT: por %xmm0, %xmm1
491 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,1,1]
492 ; SSE-NEXT: punpckldq {{.*#+}} xmm15 = xmm15[0],xmm0[0],xmm15[1],xmm0[1]
493 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm11[3,1,2,3]
494 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm4[0,1,2,0,4,5,6,7]
495 ; SSE-NEXT: packuswb %xmm0, %xmm0
496 ; SSE-NEXT: movdqa %xmm3, %xmm1
497 ; SSE-NEXT: pandn %xmm0, %xmm1
498 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm12[3,1,2,3]
499 ; SSE-NEXT: pshuflw {{.*#+}} xmm11 = xmm5[2,0,2,3,4,5,6,7]
500 ; SSE-NEXT: packuswb %xmm11, %xmm11
501 ; SSE-NEXT: pand %xmm3, %xmm11
502 ; SSE-NEXT: por %xmm1, %xmm11
503 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[0,1,1,3]
504 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm10[0,1,2,3,4,5,4,6]
505 ; SSE-NEXT: packuswb %xmm0, %xmm0
506 ; SSE-NEXT: movdqa %xmm2, %xmm1
507 ; SSE-NEXT: pandn %xmm0, %xmm1
508 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm9[0,1,1,3]
509 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm12[0,1,2,3,4,6,6,7]
510 ; SSE-NEXT: packuswb %xmm0, %xmm0
511 ; SSE-NEXT: pand %xmm2, %xmm0
512 ; SSE-NEXT: por %xmm1, %xmm0
513 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
514 ; SSE-NEXT: punpckldq {{.*#+}} xmm11 = xmm11[0],xmm0[0],xmm11[1],xmm0[1]
515 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Folded Reload
516 ; SSE-NEXT: # xmm14 = xmm14[4],mem[4],xmm14[5],mem[5],xmm14[6],mem[6],xmm14[7],mem[7]
517 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
518 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Folded Reload
519 ; SSE-NEXT: # xmm6 = xmm6[4],mem[4],xmm6[5],mem[5],xmm6[6],mem[6],xmm6[7],mem[7]
520 ; SSE-NEXT: packuswb %xmm14, %xmm14
521 ; SSE-NEXT: movdqa %xmm3, %xmm0
522 ; SSE-NEXT: pandn %xmm14, %xmm0
523 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm6[1,1,1,1]
524 ; SSE-NEXT: packuswb %xmm9, %xmm9
525 ; SSE-NEXT: pand %xmm3, %xmm9
526 ; SSE-NEXT: por %xmm0, %xmm9
527 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Folded Reload
528 ; SSE-NEXT: # xmm13 = xmm13[4],mem[4],xmm13[5],mem[5],xmm13[6],mem[6],xmm13[7],mem[7]
529 ; SSE-NEXT: packuswb %xmm13, %xmm13
530 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm13[0,0,2,3]
531 ; SSE-NEXT: movdqa %xmm2, %xmm1
532 ; SSE-NEXT: pandn %xmm0, %xmm1
533 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
534 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Folded Reload
535 ; SSE-NEXT: # xmm7 = xmm7[4],mem[4],xmm7[5],mem[5],xmm7[6],mem[6],xmm7[7],mem[7]
536 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm7[0,1,1,3]
537 ; SSE-NEXT: packuswb %xmm0, %xmm0
538 ; SSE-NEXT: pand %xmm2, %xmm0
539 ; SSE-NEXT: por %xmm1, %xmm0
540 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
541 ; SSE-NEXT: punpckldq {{.*#+}} xmm9 = xmm9[0],xmm0[0],xmm9[1],xmm0[1]
542 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm4[0,1,3,1,4,5,6,7]
543 ; SSE-NEXT: packuswb %xmm0, %xmm0
544 ; SSE-NEXT: movdqa %xmm3, %xmm1
545 ; SSE-NEXT: pandn %xmm0, %xmm1
546 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm5[3,1,2,3,4,5,6,7]
547 ; SSE-NEXT: packuswb %xmm4, %xmm4
548 ; SSE-NEXT: pand %xmm3, %xmm4
549 ; SSE-NEXT: por %xmm1, %xmm4
550 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm10[0,1,2,3,4,5,5,7]
551 ; SSE-NEXT: packuswb %xmm0, %xmm0
552 ; SSE-NEXT: movdqa %xmm2, %xmm1
553 ; SSE-NEXT: pandn %xmm0, %xmm1
554 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm12[0,1,2,3,5,7,6,7]
555 ; SSE-NEXT: packuswb %xmm0, %xmm0
556 ; SSE-NEXT: pand %xmm2, %xmm0
557 ; SSE-NEXT: por %xmm1, %xmm0
558 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
559 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1]
560 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm14[1,1,2,3]
561 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm6[3,3,3,3]
562 ; SSE-NEXT: packuswb %xmm1, %xmm1
563 ; SSE-NEXT: pand %xmm3, %xmm1
564 ; SSE-NEXT: pandn %xmm0, %xmm3
565 ; SSE-NEXT: por %xmm1, %xmm3
566 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm7[2,2,3,3]
567 ; SSE-NEXT: packuswb %xmm0, %xmm0
568 ; SSE-NEXT: pand %xmm2, %xmm0
569 ; SSE-NEXT: pandn %xmm13, %xmm2
570 ; SSE-NEXT: por %xmm0, %xmm2
571 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,1,1]
572 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
573 ; SSE-NEXT: pshufd $236, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
574 ; SSE-NEXT: # xmm0 = mem[0,3,2,3]
575 ; SSE-NEXT: movq %xmm0, (%rsi)
576 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
577 ; SSE-NEXT: movlps %xmm0, (%rdx)
578 ; SSE-NEXT: movq %xmm8, (%rcx)
579 ; SSE-NEXT: movq %xmm15, (%r8)
580 ; SSE-NEXT: movq %xmm11, (%r9)
581 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
582 ; SSE-NEXT: movq %xmm9, (%rax)
583 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
584 ; SSE-NEXT: movq %xmm4, (%rax)
585 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
586 ; SSE-NEXT: movq %xmm3, (%rax)
587 ; SSE-NEXT: popq %rax
590 ; AVX1-ONLY-LABEL: load_i8_stride8_vf8:
591 ; AVX1-ONLY: # %bb.0:
592 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
593 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
594 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
595 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm4 = [0,0,0,8,0,0,0,8,0,0,0,8,0,0,0,8]
596 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm0
597 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm1
598 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdi), %xmm2
599 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdi), %xmm3
600 ; AVX1-ONLY-NEXT: vpshufb %xmm4, %xmm3, %xmm5
601 ; AVX1-ONLY-NEXT: vpshufb %xmm4, %xmm2, %xmm4
602 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
603 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm5 = [0,8,0,0,0,8,0,0,0,8,0,0,0,8,0,0]
604 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm1, %xmm6
605 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm0, %xmm5
606 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
607 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3],xmm5[4,5,6,7]
608 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm5 = [0,0,1,9,0,0,1,9,0,0,1,9,0,0,1,9]
609 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm3, %xmm6
610 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm2, %xmm5
611 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
612 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm6 = [1,9,0,0,1,9,0,0,1,9,0,0,1,9,0,0]
613 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm1, %xmm7
614 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm0, %xmm6
615 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
616 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2,3],xmm6[4,5,6,7]
617 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm6 = [0,0,2,10,0,0,2,10,0,0,2,10,0,0,2,10]
618 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm3, %xmm7
619 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm2, %xmm6
620 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
621 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm7 = [2,10,0,0,2,10,0,0,2,10,0,0,2,10,0,0]
622 ; AVX1-ONLY-NEXT: vpshufb %xmm7, %xmm1, %xmm8
623 ; AVX1-ONLY-NEXT: vpshufb %xmm7, %xmm0, %xmm7
624 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
625 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm7[0,1],xmm6[2,3],xmm7[4,5,6,7]
626 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm7 = [0,0,3,11,0,0,3,11,0,0,3,11,0,0,3,11]
627 ; AVX1-ONLY-NEXT: vpshufb %xmm7, %xmm3, %xmm8
628 ; AVX1-ONLY-NEXT: vpshufb %xmm7, %xmm2, %xmm7
629 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
630 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm8 = [3,11,0,0,3,11,0,0,3,11,0,0,3,11,0,0]
631 ; AVX1-ONLY-NEXT: vpshufb %xmm8, %xmm1, %xmm9
632 ; AVX1-ONLY-NEXT: vpshufb %xmm8, %xmm0, %xmm8
633 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
634 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm8[0,1],xmm7[2,3],xmm8[4,5,6,7]
635 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm8 = [0,0,4,12,0,0,4,12,0,0,4,12,0,0,4,12]
636 ; AVX1-ONLY-NEXT: vpshufb %xmm8, %xmm3, %xmm9
637 ; AVX1-ONLY-NEXT: vpshufb %xmm8, %xmm2, %xmm8
638 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
639 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm9 = [4,12,0,0,4,12,0,0,4,12,0,0,4,12,0,0]
640 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm1, %xmm10
641 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm0, %xmm9
642 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
643 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm8 = xmm9[0,1],xmm8[2,3],xmm9[4,5,6,7]
644 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm9 = [0,0,5,13,0,0,5,13,0,0,5,13,0,0,5,13]
645 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm3, %xmm10
646 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm2, %xmm9
647 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
648 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm10 = [5,13,0,0,5,13,0,0,5,13,0,0,5,13,0,0]
649 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm1, %xmm11
650 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm0, %xmm10
651 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
652 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm9 = xmm10[0,1],xmm9[2,3],xmm10[4,5,6,7]
653 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm10 = [0,0,6,14,0,0,6,14,0,0,6,14,0,0,6,14]
654 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm3, %xmm11
655 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm2, %xmm10
656 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
657 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm11 = [6,14,0,0,6,14,0,0,6,14,0,0,6,14,0,0]
658 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm1, %xmm12
659 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm0, %xmm11
660 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
661 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm11[0,1],xmm10[2,3],xmm11[4,5,6,7]
662 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm11 = [0,0,7,15,0,0,7,15,0,0,7,15,0,0,7,15]
663 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm3, %xmm3
664 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm2, %xmm2
665 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
666 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [7,15,0,0,7,15,0,0,7,15,0,0,7,15,0,0]
667 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm1, %xmm1
668 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm0, %xmm0
669 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
670 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm2[2,3],xmm0[4,5,6,7]
671 ; AVX1-ONLY-NEXT: vmovq %xmm4, (%rsi)
672 ; AVX1-ONLY-NEXT: vmovq %xmm5, (%rdx)
673 ; AVX1-ONLY-NEXT: vmovq %xmm6, (%rcx)
674 ; AVX1-ONLY-NEXT: vmovq %xmm7, (%r8)
675 ; AVX1-ONLY-NEXT: vmovq %xmm8, (%r9)
676 ; AVX1-ONLY-NEXT: vmovq %xmm9, (%r11)
677 ; AVX1-ONLY-NEXT: vmovq %xmm10, (%r10)
678 ; AVX1-ONLY-NEXT: vmovq %xmm0, (%rax)
679 ; AVX1-ONLY-NEXT: retq
681 ; AVX2-ONLY-LABEL: load_i8_stride8_vf8:
682 ; AVX2-ONLY: # %bb.0:
683 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
684 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
685 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
686 ; AVX2-ONLY-NEXT: vmovdqa (%rdi), %xmm0
687 ; AVX2-ONLY-NEXT: vmovdqa 16(%rdi), %xmm1
688 ; AVX2-ONLY-NEXT: vmovdqa 32(%rdi), %xmm2
689 ; AVX2-ONLY-NEXT: vmovdqa 48(%rdi), %xmm3
690 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm4 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
691 ; AVX2-ONLY-NEXT: vpshufb %xmm4, %xmm3, %xmm5
692 ; AVX2-ONLY-NEXT: vpshufb %xmm4, %xmm2, %xmm4
693 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
694 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm5 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
695 ; AVX2-ONLY-NEXT: vpshufb %xmm5, %xmm1, %xmm6
696 ; AVX2-ONLY-NEXT: vpshufb %xmm5, %xmm0, %xmm5
697 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
698 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0],xmm4[1],xmm5[2,3]
699 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm5 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
700 ; AVX2-ONLY-NEXT: vpshufb %xmm5, %xmm3, %xmm6
701 ; AVX2-ONLY-NEXT: vpshufb %xmm5, %xmm2, %xmm5
702 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
703 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm6 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
704 ; AVX2-ONLY-NEXT: vpshufb %xmm6, %xmm1, %xmm7
705 ; AVX2-ONLY-NEXT: vpshufb %xmm6, %xmm0, %xmm6
706 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
707 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
708 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm6 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
709 ; AVX2-ONLY-NEXT: vpshufb %xmm6, %xmm3, %xmm7
710 ; AVX2-ONLY-NEXT: vpshufb %xmm6, %xmm2, %xmm6
711 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
712 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm7 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
713 ; AVX2-ONLY-NEXT: vpshufb %xmm7, %xmm1, %xmm8
714 ; AVX2-ONLY-NEXT: vpshufb %xmm7, %xmm0, %xmm7
715 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
716 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm6 = xmm7[0],xmm6[1],xmm7[2,3]
717 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm7 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
718 ; AVX2-ONLY-NEXT: vpshufb %xmm7, %xmm3, %xmm8
719 ; AVX2-ONLY-NEXT: vpshufb %xmm7, %xmm2, %xmm7
720 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
721 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm8 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
722 ; AVX2-ONLY-NEXT: vpshufb %xmm8, %xmm1, %xmm9
723 ; AVX2-ONLY-NEXT: vpshufb %xmm8, %xmm0, %xmm8
724 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
725 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm7 = xmm8[0],xmm7[1],xmm8[2,3]
726 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm8 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
727 ; AVX2-ONLY-NEXT: vpshufb %xmm8, %xmm3, %xmm9
728 ; AVX2-ONLY-NEXT: vpshufb %xmm8, %xmm2, %xmm8
729 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
730 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm9 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
731 ; AVX2-ONLY-NEXT: vpshufb %xmm9, %xmm1, %xmm10
732 ; AVX2-ONLY-NEXT: vpshufb %xmm9, %xmm0, %xmm9
733 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
734 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0],xmm8[1],xmm9[2,3]
735 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm9 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
736 ; AVX2-ONLY-NEXT: vpshufb %xmm9, %xmm3, %xmm10
737 ; AVX2-ONLY-NEXT: vpshufb %xmm9, %xmm2, %xmm9
738 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
739 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm10 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
740 ; AVX2-ONLY-NEXT: vpshufb %xmm10, %xmm1, %xmm11
741 ; AVX2-ONLY-NEXT: vpshufb %xmm10, %xmm0, %xmm10
742 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
743 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm9 = xmm10[0],xmm9[1],xmm10[2,3]
744 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm10 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
745 ; AVX2-ONLY-NEXT: vpshufb %xmm10, %xmm3, %xmm11
746 ; AVX2-ONLY-NEXT: vpshufb %xmm10, %xmm2, %xmm10
747 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
748 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm11 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
749 ; AVX2-ONLY-NEXT: vpshufb %xmm11, %xmm1, %xmm12
750 ; AVX2-ONLY-NEXT: vpshufb %xmm11, %xmm0, %xmm11
751 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
752 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm10 = xmm11[0],xmm10[1],xmm11[2,3]
753 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm11 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
754 ; AVX2-ONLY-NEXT: vpshufb %xmm11, %xmm3, %xmm3
755 ; AVX2-ONLY-NEXT: vpshufb %xmm11, %xmm2, %xmm2
756 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
757 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm3 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
758 ; AVX2-ONLY-NEXT: vpshufb %xmm3, %xmm1, %xmm1
759 ; AVX2-ONLY-NEXT: vpshufb %xmm3, %xmm0, %xmm0
760 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
761 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm2[1],xmm0[2,3]
762 ; AVX2-ONLY-NEXT: vmovq %xmm4, (%rsi)
763 ; AVX2-ONLY-NEXT: vmovq %xmm5, (%rdx)
764 ; AVX2-ONLY-NEXT: vmovq %xmm6, (%rcx)
765 ; AVX2-ONLY-NEXT: vmovq %xmm7, (%r8)
766 ; AVX2-ONLY-NEXT: vmovq %xmm8, (%r9)
767 ; AVX2-ONLY-NEXT: vmovq %xmm9, (%r11)
768 ; AVX2-ONLY-NEXT: vmovq %xmm10, (%r10)
769 ; AVX2-ONLY-NEXT: vmovq %xmm0, (%rax)
770 ; AVX2-ONLY-NEXT: retq
772 ; AVX512-LABEL: load_i8_stride8_vf8:
774 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
775 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %r10
776 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %r11
777 ; AVX512-NEXT: vmovdqa64 (%rdi), %zmm0
778 ; AVX512-NEXT: vpsrlq $8, %zmm0, %zmm1
779 ; AVX512-NEXT: vpsrlq $16, %zmm0, %zmm2
780 ; AVX512-NEXT: vpsrlq $24, %zmm0, %zmm3
781 ; AVX512-NEXT: vpsrlq $32, %zmm0, %zmm4
782 ; AVX512-NEXT: vpsrlq $40, %zmm0, %zmm5
783 ; AVX512-NEXT: vpsrlq $48, %zmm0, %zmm6
784 ; AVX512-NEXT: vpsrlq $56, %zmm0, %zmm7
785 ; AVX512-NEXT: vpmovqb %zmm0, (%rsi)
786 ; AVX512-NEXT: vpmovqb %zmm1, (%rdx)
787 ; AVX512-NEXT: vpmovqb %zmm2, (%rcx)
788 ; AVX512-NEXT: vpmovqb %zmm3, (%r8)
789 ; AVX512-NEXT: vpmovqb %zmm4, (%r9)
790 ; AVX512-NEXT: vpmovqb %zmm5, (%r11)
791 ; AVX512-NEXT: vpmovqb %zmm6, (%r10)
792 ; AVX512-NEXT: vpmovqb %zmm7, (%rax)
793 ; AVX512-NEXT: vzeroupper
795 %wide.vec = load <64 x i8>, ptr %in.vec, align 64
796 %strided.vec0 = shufflevector <64 x i8> %wide.vec, <64 x i8> poison, <8 x i32> <i32 0, i32 8, i32 16, i32 24, i32 32, i32 40, i32 48, i32 56>
797 %strided.vec1 = shufflevector <64 x i8> %wide.vec, <64 x i8> poison, <8 x i32> <i32 1, i32 9, i32 17, i32 25, i32 33, i32 41, i32 49, i32 57>
798 %strided.vec2 = shufflevector <64 x i8> %wide.vec, <64 x i8> poison, <8 x i32> <i32 2, i32 10, i32 18, i32 26, i32 34, i32 42, i32 50, i32 58>
799 %strided.vec3 = shufflevector <64 x i8> %wide.vec, <64 x i8> poison, <8 x i32> <i32 3, i32 11, i32 19, i32 27, i32 35, i32 43, i32 51, i32 59>
800 %strided.vec4 = shufflevector <64 x i8> %wide.vec, <64 x i8> poison, <8 x i32> <i32 4, i32 12, i32 20, i32 28, i32 36, i32 44, i32 52, i32 60>
801 %strided.vec5 = shufflevector <64 x i8> %wide.vec, <64 x i8> poison, <8 x i32> <i32 5, i32 13, i32 21, i32 29, i32 37, i32 45, i32 53, i32 61>
802 %strided.vec6 = shufflevector <64 x i8> %wide.vec, <64 x i8> poison, <8 x i32> <i32 6, i32 14, i32 22, i32 30, i32 38, i32 46, i32 54, i32 62>
803 %strided.vec7 = shufflevector <64 x i8> %wide.vec, <64 x i8> poison, <8 x i32> <i32 7, i32 15, i32 23, i32 31, i32 39, i32 47, i32 55, i32 63>
804 store <8 x i8> %strided.vec0, ptr %out.vec0, align 64
805 store <8 x i8> %strided.vec1, ptr %out.vec1, align 64
806 store <8 x i8> %strided.vec2, ptr %out.vec2, align 64
807 store <8 x i8> %strided.vec3, ptr %out.vec3, align 64
808 store <8 x i8> %strided.vec4, ptr %out.vec4, align 64
809 store <8 x i8> %strided.vec5, ptr %out.vec5, align 64
810 store <8 x i8> %strided.vec6, ptr %out.vec6, align 64
811 store <8 x i8> %strided.vec7, ptr %out.vec7, align 64
815 define void @load_i8_stride8_vf16(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4, ptr %out.vec5, ptr %out.vec6, ptr %out.vec7) nounwind {
816 ; SSE-LABEL: load_i8_stride8_vf16:
818 ; SSE-NEXT: subq $328, %rsp # imm = 0x148
819 ; SSE-NEXT: movdqa (%rdi), %xmm6
820 ; SSE-NEXT: movdqa 16(%rdi), %xmm14
821 ; SSE-NEXT: movdqa 32(%rdi), %xmm13
822 ; SSE-NEXT: movdqa 48(%rdi), %xmm8
823 ; SSE-NEXT: movdqa 64(%rdi), %xmm15
824 ; SSE-NEXT: movdqa 80(%rdi), %xmm4
825 ; SSE-NEXT: movdqa 96(%rdi), %xmm12
826 ; SSE-NEXT: movdqa 112(%rdi), %xmm9
827 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [255,0,0,0,255,0,0,0]
828 ; SSE-NEXT: movdqa %xmm9, %xmm0
829 ; SSE-NEXT: pand %xmm3, %xmm0
830 ; SSE-NEXT: movdqa %xmm12, %xmm1
831 ; SSE-NEXT: pand %xmm3, %xmm1
832 ; SSE-NEXT: packuswb %xmm0, %xmm1
833 ; SSE-NEXT: packuswb %xmm1, %xmm0
834 ; SSE-NEXT: movdqa %xmm4, %xmm1
835 ; SSE-NEXT: pand %xmm3, %xmm1
836 ; SSE-NEXT: movdqa %xmm15, %xmm2
837 ; SSE-NEXT: pand %xmm3, %xmm2
838 ; SSE-NEXT: packuswb %xmm1, %xmm2
839 ; SSE-NEXT: packuswb %xmm2, %xmm2
840 ; SSE-NEXT: packuswb %xmm0, %xmm2
841 ; SSE-NEXT: movdqa %xmm8, %xmm0
842 ; SSE-NEXT: pand %xmm3, %xmm0
843 ; SSE-NEXT: movdqa %xmm13, %xmm1
844 ; SSE-NEXT: pand %xmm3, %xmm1
845 ; SSE-NEXT: packuswb %xmm0, %xmm1
846 ; SSE-NEXT: packuswb %xmm1, %xmm0
847 ; SSE-NEXT: movdqa %xmm14, %xmm1
848 ; SSE-NEXT: pand %xmm3, %xmm1
849 ; SSE-NEXT: pand %xmm6, %xmm3
850 ; SSE-NEXT: packuswb %xmm1, %xmm3
851 ; SSE-NEXT: packuswb %xmm3, %xmm3
852 ; SSE-NEXT: packuswb %xmm0, %xmm3
853 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,3],xmm2[0,3]
854 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
855 ; SSE-NEXT: pxor %xmm7, %xmm7
856 ; SSE-NEXT: movdqa %xmm14, %xmm0
857 ; SSE-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8],xmm7[8],xmm0[9],xmm7[9],xmm0[10],xmm7[10],xmm0[11],xmm7[11],xmm0[12],xmm7[12],xmm0[13],xmm7[13],xmm0[14],xmm7[14],xmm0[15],xmm7[15]
858 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
859 ; SSE-NEXT: movdqa %xmm14, %xmm11
860 ; SSE-NEXT: punpcklbw {{.*#+}} xmm11 = xmm11[0],xmm7[0],xmm11[1],xmm7[1],xmm11[2],xmm7[2],xmm11[3],xmm7[3],xmm11[4],xmm7[4],xmm11[5],xmm7[5],xmm11[6],xmm7[6],xmm11[7],xmm7[7]
861 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
862 ; SSE-NEXT: punpcklwd {{.*#+}} xmm11 = xmm11[0],xmm0[0],xmm11[1],xmm0[1],xmm11[2],xmm0[2],xmm11[3],xmm0[3]
863 ; SSE-NEXT: packuswb %xmm11, %xmm11
864 ; SSE-NEXT: movdqa {{.*#+}} xmm10 = [65535,0,65535,65535,65535,65535,65535,65535]
865 ; SSE-NEXT: movdqa %xmm10, %xmm1
866 ; SSE-NEXT: pandn %xmm11, %xmm1
867 ; SSE-NEXT: movdqa %xmm6, %xmm0
868 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
869 ; SSE-NEXT: movdqa %xmm6, %xmm2
870 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm7[8],xmm2[9],xmm7[9],xmm2[10],xmm7[10],xmm2[11],xmm7[11],xmm2[12],xmm7[12],xmm2[13],xmm7[13],xmm2[14],xmm7[14],xmm2[15],xmm7[15]
871 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
872 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm7[0],xmm0[1],xmm7[1],xmm0[2],xmm7[2],xmm0[3],xmm7[3],xmm0[4],xmm7[4],xmm0[5],xmm7[5],xmm0[6],xmm7[6],xmm0[7],xmm7[7]
873 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
874 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3]
875 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
876 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
877 ; SSE-NEXT: packuswb %xmm0, %xmm0
878 ; SSE-NEXT: pand %xmm10, %xmm0
879 ; SSE-NEXT: por %xmm1, %xmm0
880 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
881 ; SSE-NEXT: movdqa %xmm8, %xmm1
882 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm7[8],xmm1[9],xmm7[9],xmm1[10],xmm7[10],xmm1[11],xmm7[11],xmm1[12],xmm7[12],xmm1[13],xmm7[13],xmm1[14],xmm7[14],xmm1[15],xmm7[15]
883 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
884 ; SSE-NEXT: punpcklbw {{.*#+}} xmm8 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3],xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
885 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
886 ; SSE-NEXT: punpcklwd {{.*#+}} xmm8 = xmm8[0],xmm1[0],xmm8[1],xmm1[1],xmm8[2],xmm1[2],xmm8[3],xmm1[3]
887 ; SSE-NEXT: packuswb %xmm8, %xmm8
888 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm8[0,0,2,3]
889 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [65535,65535,65535,0,65535,65535,65535,65535]
890 ; SSE-NEXT: movdqa %xmm5, %xmm6
891 ; SSE-NEXT: pandn %xmm1, %xmm6
892 ; SSE-NEXT: movdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
893 ; SSE-NEXT: movdqa %xmm13, %xmm1
894 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm7[8],xmm1[9],xmm7[9],xmm1[10],xmm7[10],xmm1[11],xmm7[11],xmm1[12],xmm7[12],xmm1[13],xmm7[13],xmm1[14],xmm7[14],xmm1[15],xmm7[15]
895 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
896 ; SSE-NEXT: punpcklbw {{.*#+}} xmm13 = xmm13[0],xmm7[0],xmm13[1],xmm7[1],xmm13[2],xmm7[2],xmm13[3],xmm7[3],xmm13[4],xmm7[4],xmm13[5],xmm7[5],xmm13[6],xmm7[6],xmm13[7],xmm7[7]
897 ; SSE-NEXT: movdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
898 ; SSE-NEXT: punpcklwd {{.*#+}} xmm13 = xmm13[0],xmm1[0],xmm13[1],xmm1[1],xmm13[2],xmm1[2],xmm13[3],xmm1[3]
899 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm13[0,1,1,3]
900 ; SSE-NEXT: packuswb %xmm1, %xmm1
901 ; SSE-NEXT: pand %xmm5, %xmm1
902 ; SSE-NEXT: por %xmm6, %xmm1
903 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
904 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
905 ; SSE-NEXT: movdqa %xmm9, %xmm1
906 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
907 ; SSE-NEXT: movdqa %xmm9, %xmm2
908 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm7[8],xmm2[9],xmm7[9],xmm2[10],xmm7[10],xmm2[11],xmm7[11],xmm2[12],xmm7[12],xmm2[13],xmm7[13],xmm2[14],xmm7[14],xmm2[15],xmm7[15]
909 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
910 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3],xmm1[4],xmm7[4],xmm1[5],xmm7[5],xmm1[6],xmm7[6],xmm1[7],xmm7[7]
911 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
912 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
913 ; SSE-NEXT: packuswb %xmm1, %xmm1
914 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
915 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,2,2]
916 ; SSE-NEXT: movdqa {{.*#+}} xmm6 = [65535,65535,65535,65535,65535,65535,65535,0]
917 ; SSE-NEXT: movdqa %xmm6, %xmm9
918 ; SSE-NEXT: pandn %xmm1, %xmm9
919 ; SSE-NEXT: movdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
920 ; SSE-NEXT: movdqa %xmm12, %xmm1
921 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm7[8],xmm1[9],xmm7[9],xmm1[10],xmm7[10],xmm1[11],xmm7[11],xmm1[12],xmm7[12],xmm1[13],xmm7[13],xmm1[14],xmm7[14],xmm1[15],xmm7[15]
922 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
923 ; SSE-NEXT: punpcklbw {{.*#+}} xmm12 = xmm12[0],xmm7[0],xmm12[1],xmm7[1],xmm12[2],xmm7[2],xmm12[3],xmm7[3],xmm12[4],xmm7[4],xmm12[5],xmm7[5],xmm12[6],xmm7[6],xmm12[7],xmm7[7]
924 ; SSE-NEXT: movdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
925 ; SSE-NEXT: punpcklwd {{.*#+}} xmm12 = xmm12[0],xmm1[0],xmm12[1],xmm1[1],xmm12[2],xmm1[2],xmm12[3],xmm1[3]
926 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm12[0,1,1,3]
927 ; SSE-NEXT: packuswb %xmm1, %xmm1
928 ; SSE-NEXT: pand %xmm6, %xmm1
929 ; SSE-NEXT: por %xmm9, %xmm1
930 ; SSE-NEXT: movdqa %xmm4, %xmm3
931 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
932 ; SSE-NEXT: movdqa %xmm4, %xmm2
933 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm7[8],xmm2[9],xmm7[9],xmm2[10],xmm7[10],xmm2[11],xmm7[11],xmm2[12],xmm7[12],xmm2[13],xmm7[13],xmm2[14],xmm7[14],xmm2[15],xmm7[15]
934 ; SSE-NEXT: movdqa %xmm2, %xmm4
935 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
936 ; SSE-NEXT: movdqa %xmm3, %xmm2
937 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm7[0],xmm2[1],xmm7[1],xmm2[2],xmm7[2],xmm2[3],xmm7[3],xmm2[4],xmm7[4],xmm2[5],xmm7[5],xmm2[6],xmm7[6],xmm2[7],xmm7[7]
938 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
939 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
940 ; SSE-NEXT: movdqa %xmm15, %xmm3
941 ; SSE-NEXT: punpckhbw {{.*#+}} xmm3 = xmm3[8],xmm7[8],xmm3[9],xmm7[9],xmm3[10],xmm7[10],xmm3[11],xmm7[11],xmm3[12],xmm7[12],xmm3[13],xmm7[13],xmm3[14],xmm7[14],xmm3[15],xmm7[15]
942 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
943 ; SSE-NEXT: punpcklbw {{.*#+}} xmm15 = xmm15[0],xmm7[0],xmm15[1],xmm7[1],xmm15[2],xmm7[2],xmm15[3],xmm7[3],xmm15[4],xmm7[4],xmm15[5],xmm7[5],xmm15[6],xmm7[6],xmm15[7],xmm7[7]
944 ; SSE-NEXT: movdqa %xmm15, (%rsp) # 16-byte Spill
945 ; SSE-NEXT: movdqa %xmm2, %xmm7
946 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm4[0],xmm7[1],xmm4[1],xmm7[2],xmm4[2],xmm7[3],xmm4[3]
947 ; SSE-NEXT: packuswb %xmm7, %xmm7
948 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
949 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [65535,65535,65535,65535,65535,0,65535,65535]
950 ; SSE-NEXT: movdqa %xmm4, %xmm9
951 ; SSE-NEXT: pandn %xmm7, %xmm9
952 ; SSE-NEXT: movdqa %xmm15, %xmm7
953 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm3[0],xmm7[1],xmm3[1],xmm7[2],xmm3[2],xmm7[3],xmm3[3]
954 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm7[1,1,1,1]
955 ; SSE-NEXT: packuswb %xmm15, %xmm15
956 ; SSE-NEXT: pand %xmm4, %xmm15
957 ; SSE-NEXT: por %xmm9, %xmm15
958 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm15[2,2,2,2]
959 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
960 ; SSE-NEXT: movsd {{.*#+}} xmm2 = xmm0[0],xmm2[1]
961 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
962 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,255,255,255,255,255,255]
963 ; SSE-NEXT: pand %xmm1, %xmm14
964 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm14[0,2,2,3]
965 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,1,1,3,4,5,6,7]
966 ; SSE-NEXT: packuswb %xmm0, %xmm0
967 ; SSE-NEXT: movdqa %xmm10, %xmm9
968 ; SSE-NEXT: pandn %xmm0, %xmm9
969 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
970 ; SSE-NEXT: pand %xmm1, %xmm0
971 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
972 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
973 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[1,3,2,3,4,5,6,7]
974 ; SSE-NEXT: packuswb %xmm0, %xmm0
975 ; SSE-NEXT: pand %xmm10, %xmm0
976 ; SSE-NEXT: por %xmm9, %xmm0
977 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
978 ; SSE-NEXT: pand %xmm1, %xmm2
979 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
980 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm2[0,1,2,0]
981 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,5,7,5]
982 ; SSE-NEXT: packuswb %xmm9, %xmm9
983 ; SSE-NEXT: movdqa %xmm5, %xmm15
984 ; SSE-NEXT: pandn %xmm9, %xmm15
985 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
986 ; SSE-NEXT: pand %xmm1, %xmm2
987 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
988 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm2[0,1,2,0]
989 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,7,5,6,7]
990 ; SSE-NEXT: packuswb %xmm9, %xmm9
991 ; SSE-NEXT: pand %xmm5, %xmm9
992 ; SSE-NEXT: por %xmm15, %xmm9
993 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[1,1,1,1]
994 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm9[0],xmm0[1],xmm9[1]
995 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
996 ; SSE-NEXT: pand %xmm1, %xmm2
997 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
998 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm2[0,1,2,0]
999 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,5,7,5]
1000 ; SSE-NEXT: packuswb %xmm9, %xmm9
1001 ; SSE-NEXT: movdqa %xmm6, %xmm15
1002 ; SSE-NEXT: pandn %xmm9, %xmm15
1003 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1004 ; SSE-NEXT: pand %xmm1, %xmm2
1005 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1006 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm2[0,1,2,0]
1007 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,7,5,6,7]
1008 ; SSE-NEXT: packuswb %xmm9, %xmm9
1009 ; SSE-NEXT: pand %xmm6, %xmm9
1010 ; SSE-NEXT: por %xmm15, %xmm9
1011 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1012 ; SSE-NEXT: pand %xmm1, %xmm2
1013 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1014 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm2[0,2,2,3]
1015 ; SSE-NEXT: pshuflw {{.*#+}} xmm15 = xmm15[0,1,1,3,4,5,6,7]
1016 ; SSE-NEXT: packuswb %xmm15, %xmm15
1017 ; SSE-NEXT: movdqa %xmm4, %xmm2
1018 ; SSE-NEXT: pandn %xmm15, %xmm2
1019 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
1020 ; SSE-NEXT: pand %xmm1, %xmm15
1021 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm15[0,2,2,3]
1022 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[1,3,2,3,4,5,6,7]
1023 ; SSE-NEXT: packuswb %xmm1, %xmm1
1024 ; SSE-NEXT: pand %xmm4, %xmm1
1025 ; SSE-NEXT: por %xmm2, %xmm1
1026 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
1027 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm9[2],xmm1[3],xmm9[3]
1028 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
1029 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1030 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm11[1,1,2,3]
1031 ; SSE-NEXT: movdqa %xmm10, %xmm1
1032 ; SSE-NEXT: pandn %xmm0, %xmm1
1033 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
1034 ; SSE-NEXT: # xmm0 = mem[3,3,3,3]
1035 ; SSE-NEXT: packuswb %xmm0, %xmm0
1036 ; SSE-NEXT: pand %xmm10, %xmm0
1037 ; SSE-NEXT: por %xmm1, %xmm0
1038 ; SSE-NEXT: movdqa %xmm5, %xmm1
1039 ; SSE-NEXT: pandn %xmm8, %xmm1
1040 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm13[2,2,3,3]
1041 ; SSE-NEXT: packuswb %xmm2, %xmm2
1042 ; SSE-NEXT: pand %xmm5, %xmm2
1043 ; SSE-NEXT: por %xmm1, %xmm2
1044 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,1,1,1]
1045 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
1046 ; SSE-NEXT: movdqa %xmm6, %xmm1
1047 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1048 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm12[2,2,3,3]
1049 ; SSE-NEXT: packuswb %xmm2, %xmm2
1050 ; SSE-NEXT: pand %xmm6, %xmm2
1051 ; SSE-NEXT: por %xmm1, %xmm2
1052 ; SSE-NEXT: pshufd $244, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1053 ; SSE-NEXT: # xmm1 = mem[0,1,3,3]
1054 ; SSE-NEXT: movdqa %xmm4, %xmm8
1055 ; SSE-NEXT: pandn %xmm1, %xmm8
1056 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm7[3,3,3,3]
1057 ; SSE-NEXT: packuswb %xmm1, %xmm1
1058 ; SSE-NEXT: pand %xmm4, %xmm1
1059 ; SSE-NEXT: por %xmm8, %xmm1
1060 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
1061 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm2[2],xmm1[3],xmm2[3]
1062 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
1063 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1064 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm14[3,1,2,3]
1065 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm3[0,1,2,0,4,5,6,7]
1066 ; SSE-NEXT: packuswb %xmm0, %xmm0
1067 ; SSE-NEXT: movdqa %xmm10, %xmm1
1068 ; SSE-NEXT: pandn %xmm0, %xmm1
1069 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
1070 ; SSE-NEXT: # xmm0 = mem[3,1,2,3]
1071 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1072 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[2,0,2,3,4,5,6,7]
1073 ; SSE-NEXT: packuswb %xmm0, %xmm0
1074 ; SSE-NEXT: pand %xmm10, %xmm0
1075 ; SSE-NEXT: por %xmm1, %xmm0
1076 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1077 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
1078 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1079 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,4,6]
1080 ; SSE-NEXT: packuswb %xmm1, %xmm1
1081 ; SSE-NEXT: movdqa %xmm5, %xmm8
1082 ; SSE-NEXT: pandn %xmm1, %xmm8
1083 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1084 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
1085 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1086 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
1087 ; SSE-NEXT: packuswb %xmm1, %xmm1
1088 ; SSE-NEXT: pand %xmm5, %xmm1
1089 ; SSE-NEXT: por %xmm8, %xmm1
1090 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
1091 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
1092 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1093 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
1094 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1095 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,4,6]
1096 ; SSE-NEXT: packuswb %xmm1, %xmm1
1097 ; SSE-NEXT: movdqa %xmm6, %xmm9
1098 ; SSE-NEXT: pandn %xmm1, %xmm9
1099 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Folded Reload
1100 ; SSE-NEXT: # xmm8 = mem[0,1,1,3]
1101 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm8[0,1,2,3,4,6,6,7]
1102 ; SSE-NEXT: packuswb %xmm1, %xmm14
1103 ; SSE-NEXT: pand %xmm6, %xmm14
1104 ; SSE-NEXT: por %xmm9, %xmm14
1105 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Folded Reload
1106 ; SSE-NEXT: # xmm12 = mem[3,1,2,3]
1107 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm12[0,1,2,0,4,5,6,7]
1108 ; SSE-NEXT: packuswb %xmm1, %xmm1
1109 ; SSE-NEXT: movdqa %xmm4, %xmm9
1110 ; SSE-NEXT: pandn %xmm1, %xmm9
1111 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm15[3,1,2,3]
1112 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm13[2,0,2,3,4,5,6,7]
1113 ; SSE-NEXT: packuswb %xmm1, %xmm1
1114 ; SSE-NEXT: pand %xmm4, %xmm1
1115 ; SSE-NEXT: por %xmm9, %xmm1
1116 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm1[2,2,2,2]
1117 ; SSE-NEXT: punpckhdq {{.*#+}} xmm11 = xmm11[2],xmm14[2],xmm11[3],xmm14[3]
1118 ; SSE-NEXT: movsd {{.*#+}} xmm11 = xmm0[0],xmm11[1]
1119 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1120 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
1121 ; SSE-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
1122 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1123 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1124 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
1125 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1126 ; SSE-NEXT: packuswb %xmm0, %xmm0
1127 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1128 ; SSE-NEXT: movdqa %xmm10, %xmm9
1129 ; SSE-NEXT: pandn %xmm0, %xmm9
1130 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[1,1,1,1]
1131 ; SSE-NEXT: packuswb %xmm2, %xmm2
1132 ; SSE-NEXT: pand %xmm10, %xmm2
1133 ; SSE-NEXT: por %xmm9, %xmm2
1134 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1135 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
1136 ; SSE-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
1137 ; SSE-NEXT: packuswb %xmm0, %xmm0
1138 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1139 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm0[0,0,2,3]
1140 ; SSE-NEXT: movdqa %xmm5, %xmm14
1141 ; SSE-NEXT: pandn %xmm9, %xmm14
1142 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1143 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
1144 ; SSE-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
1145 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1146 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm0[0,1,1,3]
1147 ; SSE-NEXT: packuswb %xmm9, %xmm9
1148 ; SSE-NEXT: pand %xmm5, %xmm9
1149 ; SSE-NEXT: por %xmm14, %xmm9
1150 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[1,1,1,1]
1151 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm9[0],xmm2[1],xmm9[1]
1152 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1153 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
1154 ; SSE-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
1155 ; SSE-NEXT: packuswb %xmm0, %xmm14
1156 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm14[0,1,2,2]
1157 ; SSE-NEXT: movdqa %xmm6, %xmm15
1158 ; SSE-NEXT: pandn %xmm9, %xmm15
1159 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1160 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
1161 ; SSE-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
1162 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1163 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm0[0,1,1,3]
1164 ; SSE-NEXT: packuswb %xmm9, %xmm1
1165 ; SSE-NEXT: pand %xmm6, %xmm1
1166 ; SSE-NEXT: por %xmm15, %xmm1
1167 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
1168 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Folded Reload
1169 ; SSE-NEXT: # xmm7 = xmm7[4],mem[4],xmm7[5],mem[5],xmm7[6],mem[6],xmm7[7],mem[7]
1170 ; SSE-NEXT: movdqa (%rsp), %xmm0 # 16-byte Reload
1171 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
1172 ; SSE-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
1173 ; SSE-NEXT: movdqa %xmm0, (%rsp) # 16-byte Spill
1174 ; SSE-NEXT: packuswb %xmm7, %xmm9
1175 ; SSE-NEXT: movdqa %xmm4, %xmm15
1176 ; SSE-NEXT: pandn %xmm9, %xmm15
1177 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm0[1,1,1,1]
1178 ; SSE-NEXT: packuswb %xmm7, %xmm7
1179 ; SSE-NEXT: pand %xmm4, %xmm7
1180 ; SSE-NEXT: por %xmm15, %xmm7
1181 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm7[2,2,2,2]
1182 ; SSE-NEXT: punpckhdq {{.*#+}} xmm15 = xmm15[2],xmm1[2],xmm15[3],xmm1[3]
1183 ; SSE-NEXT: movsd {{.*#+}} xmm15 = xmm2[0],xmm15[1]
1184 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm3[0,1,3,1,4,5,6,7]
1185 ; SSE-NEXT: packuswb %xmm0, %xmm0
1186 ; SSE-NEXT: movdqa %xmm10, %xmm1
1187 ; SSE-NEXT: pandn %xmm0, %xmm1
1188 ; SSE-NEXT: pshuflw $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
1189 ; SSE-NEXT: # xmm0 = mem[3,1,2,3,4,5,6,7]
1190 ; SSE-NEXT: packuswb %xmm0, %xmm0
1191 ; SSE-NEXT: pand %xmm10, %xmm0
1192 ; SSE-NEXT: por %xmm1, %xmm0
1193 ; SSE-NEXT: pshufhw $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1194 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,4,5,5,7]
1195 ; SSE-NEXT: packuswb %xmm1, %xmm1
1196 ; SSE-NEXT: movdqa %xmm5, %xmm7
1197 ; SSE-NEXT: pandn %xmm1, %xmm7
1198 ; SSE-NEXT: pshufhw $237, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1199 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,5,7,6,7]
1200 ; SSE-NEXT: packuswb %xmm1, %xmm1
1201 ; SSE-NEXT: pand %xmm5, %xmm1
1202 ; SSE-NEXT: por %xmm7, %xmm1
1203 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
1204 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
1205 ; SSE-NEXT: pshufhw $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1206 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,4,5,5,7]
1207 ; SSE-NEXT: packuswb %xmm1, %xmm1
1208 ; SSE-NEXT: movdqa %xmm6, %xmm7
1209 ; SSE-NEXT: pandn %xmm1, %xmm7
1210 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm8[0,1,2,3,5,7,6,7]
1211 ; SSE-NEXT: packuswb %xmm1, %xmm1
1212 ; SSE-NEXT: pand %xmm6, %xmm1
1213 ; SSE-NEXT: por %xmm7, %xmm1
1214 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm12[0,1,3,1,4,5,6,7]
1215 ; SSE-NEXT: packuswb %xmm7, %xmm7
1216 ; SSE-NEXT: movdqa %xmm4, %xmm8
1217 ; SSE-NEXT: pandn %xmm7, %xmm8
1218 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm13[3,1,2,3,4,5,6,7]
1219 ; SSE-NEXT: packuswb %xmm7, %xmm7
1220 ; SSE-NEXT: pand %xmm4, %xmm7
1221 ; SSE-NEXT: por %xmm8, %xmm7
1222 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[2,2,2,2]
1223 ; SSE-NEXT: punpckhdq {{.*#+}} xmm7 = xmm7[2],xmm1[2],xmm7[3],xmm1[3]
1224 ; SSE-NEXT: movsd {{.*#+}} xmm7 = xmm0[0],xmm7[1]
1225 ; SSE-NEXT: pshufd $229, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
1226 ; SSE-NEXT: # xmm0 = mem[1,1,2,3]
1227 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1228 ; SSE-NEXT: # xmm1 = mem[3,3,3,3]
1229 ; SSE-NEXT: packuswb %xmm1, %xmm1
1230 ; SSE-NEXT: pand %xmm10, %xmm1
1231 ; SSE-NEXT: pandn %xmm0, %xmm10
1232 ; SSE-NEXT: por %xmm1, %xmm10
1233 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
1234 ; SSE-NEXT: # xmm0 = mem[2,2,3,3]
1235 ; SSE-NEXT: packuswb %xmm0, %xmm0
1236 ; SSE-NEXT: pand %xmm5, %xmm0
1237 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
1238 ; SSE-NEXT: por %xmm0, %xmm5
1239 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm5[1,1,1,1]
1240 ; SSE-NEXT: punpckldq {{.*#+}} xmm10 = xmm10[0],xmm0[0],xmm10[1],xmm0[1]
1241 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
1242 ; SSE-NEXT: # xmm0 = mem[2,2,3,3]
1243 ; SSE-NEXT: packuswb %xmm0, %xmm0
1244 ; SSE-NEXT: pand %xmm6, %xmm0
1245 ; SSE-NEXT: pandn %xmm14, %xmm6
1246 ; SSE-NEXT: por %xmm0, %xmm6
1247 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm9[0,1,3,3]
1248 ; SSE-NEXT: pshufd $255, (%rsp), %xmm1 # 16-byte Folded Reload
1249 ; SSE-NEXT: # xmm1 = mem[3,3,3,3]
1250 ; SSE-NEXT: packuswb %xmm1, %xmm1
1251 ; SSE-NEXT: pand %xmm4, %xmm1
1252 ; SSE-NEXT: pandn %xmm0, %xmm4
1253 ; SSE-NEXT: por %xmm1, %xmm4
1254 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm4[2,2,2,2]
1255 ; SSE-NEXT: punpckhdq {{.*#+}} xmm0 = xmm0[2],xmm6[2],xmm0[3],xmm6[3]
1256 ; SSE-NEXT: movsd {{.*#+}} xmm0 = xmm10[0],xmm0[1]
1257 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1258 ; SSE-NEXT: movaps %xmm1, (%rsi)
1259 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1260 ; SSE-NEXT: movaps %xmm1, (%rdx)
1261 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1262 ; SSE-NEXT: movaps %xmm1, (%rcx)
1263 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1264 ; SSE-NEXT: movaps %xmm1, (%r8)
1265 ; SSE-NEXT: movapd %xmm11, (%r9)
1266 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1267 ; SSE-NEXT: movapd %xmm15, (%rax)
1268 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1269 ; SSE-NEXT: movapd %xmm7, (%rax)
1270 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1271 ; SSE-NEXT: movapd %xmm0, (%rax)
1272 ; SSE-NEXT: addq $328, %rsp # imm = 0x148
1275 ; AVX1-ONLY-LABEL: load_i8_stride8_vf16:
1276 ; AVX1-ONLY: # %bb.0:
1277 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm0 = [0,0,0,8,0,0,0,8,0,0,0,8,0,0,0,8]
1278 ; AVX1-ONLY-NEXT: vmovdqa 112(%rdi), %xmm8
1279 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm8, %xmm3
1280 ; AVX1-ONLY-NEXT: vmovdqa 96(%rdi), %xmm2
1281 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm2, %xmm0
1282 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
1283 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [0,8,0,0,0,8,0,0,0,8,0,0,0,8,0,0]
1284 ; AVX1-ONLY-NEXT: vmovdqa 80(%rdi), %xmm4
1285 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm4, %xmm6
1286 ; AVX1-ONLY-NEXT: vmovdqa 64(%rdi), %xmm5
1287 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm5, %xmm3
1288 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm6[0],xmm3[1],xmm6[1],xmm3[2],xmm6[2],xmm3[3],xmm6[3]
1289 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm3[0,1,2,3,4,5],xmm0[6,7]
1290 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm9 = [0,0,0,8,0,0,0,8,0,0,0,8,0,0,0,8]
1291 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm1
1292 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm3
1293 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdi), %xmm6
1294 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdi), %xmm7
1295 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm7, %xmm10
1296 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm6, %xmm9
1297 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
1298 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm10 = [0,8,0,0,0,8,0,0,0,8,0,0,0,8,0,0]
1299 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm3, %xmm11
1300 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm1, %xmm10
1301 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
1302 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm9 = xmm10[0,1],xmm9[2,3],xmm10[4,5,6,7]
1303 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm9[0,1,2,3],xmm0[4,5,6,7]
1304 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1305 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm9 = [0,0,1,9,0,0,1,9,0,0,1,9,0,0,1,9]
1306 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm8, %xmm10
1307 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm2, %xmm9
1308 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
1309 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm10 = [1,9,0,0,1,9,0,0,1,9,0,0,1,9,0,0]
1310 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm4, %xmm11
1311 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm5, %xmm10
1312 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
1313 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm9 = xmm10[0,1,2,3,4,5],xmm9[6,7]
1314 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm10 = [0,0,1,9,0,0,1,9,0,0,1,9,0,0,1,9]
1315 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm7, %xmm11
1316 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm6, %xmm10
1317 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
1318 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm11 = [1,9,0,0,1,9,0,0,1,9,0,0,1,9,0,0]
1319 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm3, %xmm12
1320 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm1, %xmm11
1321 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
1322 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm11[0,1],xmm10[2,3],xmm11[4,5,6,7]
1323 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm10[0,1,2,3],xmm9[4,5,6,7]
1324 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1325 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm10 = [0,0,2,10,0,0,2,10,0,0,2,10,0,0,2,10]
1326 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm8, %xmm11
1327 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm2, %xmm10
1328 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
1329 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm11 = [2,10,0,0,2,10,0,0,2,10,0,0,2,10,0,0]
1330 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm4, %xmm12
1331 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm5, %xmm11
1332 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
1333 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm11[0,1,2,3,4,5],xmm10[6,7]
1334 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm11 = [0,0,2,10,0,0,2,10,0,0,2,10,0,0,2,10]
1335 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm7, %xmm12
1336 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm6, %xmm11
1337 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
1338 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm12 = [2,10,0,0,2,10,0,0,2,10,0,0,2,10,0,0]
1339 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm3, %xmm13
1340 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm1, %xmm12
1341 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, %xmm9
1342 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3]
1343 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm11 = xmm12[0,1],xmm11[2,3],xmm12[4,5,6,7]
1344 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm11[0,1,2,3],xmm10[4,5,6,7]
1345 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm11 = [0,0,3,11,0,0,3,11,0,0,3,11,0,0,3,11]
1346 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm8, %xmm12
1347 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm2, %xmm11
1348 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
1349 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm12 = [3,11,0,0,3,11,0,0,3,11,0,0,3,11,0,0]
1350 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm4, %xmm13
1351 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm5, %xmm12
1352 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3]
1353 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm11 = xmm12[0,1,2,3,4,5],xmm11[6,7]
1354 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm12 = [0,0,3,11,0,0,3,11,0,0,3,11,0,0,3,11]
1355 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm7, %xmm13
1356 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm6, %xmm12
1357 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3]
1358 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm13 = [3,11,0,0,3,11,0,0,3,11,0,0,3,11,0,0]
1359 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm3, %xmm14
1360 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm1, %xmm13
1361 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3]
1362 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm13[0,1],xmm12[2,3],xmm13[4,5,6,7]
1363 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm11 = xmm12[0,1,2,3],xmm11[4,5,6,7]
1364 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm12 = [0,0,4,12,0,0,4,12,0,0,4,12,0,0,4,12]
1365 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm8, %xmm13
1366 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm2, %xmm12
1367 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3]
1368 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm13 = [4,12,0,0,4,12,0,0,4,12,0,0,4,12,0,0]
1369 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm4, %xmm14
1370 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm5, %xmm13
1371 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3]
1372 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm13[0,1,2,3,4,5],xmm12[6,7]
1373 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm13 = [0,0,4,12,0,0,4,12,0,0,4,12,0,0,4,12]
1374 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm7, %xmm14
1375 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm6, %xmm13
1376 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3]
1377 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm14 = [4,12,0,0,4,12,0,0,4,12,0,0,4,12,0,0]
1378 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm3, %xmm15
1379 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm1, %xmm14
1380 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
1381 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm14[0,1],xmm13[2,3],xmm14[4,5,6,7]
1382 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm13[0,1,2,3],xmm12[4,5,6,7]
1383 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm13 = [0,0,5,13,0,0,5,13,0,0,5,13,0,0,5,13]
1384 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm8, %xmm14
1385 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm2, %xmm13
1386 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3]
1387 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm14 = [5,13,0,0,5,13,0,0,5,13,0,0,5,13,0,0]
1388 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm4, %xmm15
1389 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm5, %xmm14
1390 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
1391 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm14[0,1,2,3,4,5],xmm13[6,7]
1392 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm14 = [0,0,5,13,0,0,5,13,0,0,5,13,0,0,5,13]
1393 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm7, %xmm15
1394 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm6, %xmm14
1395 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
1396 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm15 = [5,13,0,0,5,13,0,0,5,13,0,0,5,13,0,0]
1397 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm3, %xmm0
1398 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm1, %xmm15
1399 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3]
1400 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm14[2,3],xmm0[4,5,6,7]
1401 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm0[0,1,2,3],xmm13[4,5,6,7]
1402 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm0 = [0,0,6,14,0,0,6,14,0,0,6,14,0,0,6,14]
1403 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm8, %xmm14
1404 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm2, %xmm0
1405 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm14[0],xmm0[1],xmm14[1],xmm0[2],xmm14[2],xmm0[3],xmm14[3]
1406 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm14 = [6,14,0,0,6,14,0,0,6,14,0,0,6,14,0,0]
1407 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm4, %xmm15
1408 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm5, %xmm14
1409 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
1410 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm14[0,1,2,3,4,5],xmm0[6,7]
1411 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm14 = [0,0,6,14,0,0,6,14,0,0,6,14,0,0,6,14]
1412 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm7, %xmm15
1413 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm6, %xmm14
1414 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
1415 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm15 = [6,14,0,0,6,14,0,0,6,14,0,0,6,14,0,0]
1416 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm3, %xmm0
1417 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm9, %xmm15
1418 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3]
1419 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm14[2,3],xmm0[4,5,6,7]
1420 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3],xmm1[4,5,6,7]
1421 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm1 = [0,0,7,15,0,0,7,15,0,0,7,15,0,0,7,15]
1422 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm8, %xmm8
1423 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm2, %xmm1
1424 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm8[0],xmm1[1],xmm8[1],xmm1[2],xmm8[2],xmm1[3],xmm8[3]
1425 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm2 = [7,15,0,0,7,15,0,0,7,15,0,0,7,15,0,0]
1426 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm4, %xmm4
1427 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm5, %xmm2
1428 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
1429 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1,2,3,4,5],xmm1[6,7]
1430 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm2 = [0,0,7,15,0,0,7,15,0,0,7,15,0,0,7,15]
1431 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm7, %xmm4
1432 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm6, %xmm2
1433 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
1434 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm4 = [7,15,0,0,7,15,0,0,7,15,0,0,7,15,0,0]
1435 ; AVX1-ONLY-NEXT: vpshufb %xmm4, %xmm3, %xmm3
1436 ; AVX1-ONLY-NEXT: vpshufb %xmm4, %xmm9, %xmm4
1437 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
1438 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3],xmm3[4,5,6,7]
1439 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1,2,3],xmm1[4,5,6,7]
1440 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1441 ; AVX1-ONLY-NEXT: vmovaps %xmm2, (%rsi)
1442 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1443 ; AVX1-ONLY-NEXT: vmovaps %xmm2, (%rdx)
1444 ; AVX1-ONLY-NEXT: vmovdqa %xmm10, (%rcx)
1445 ; AVX1-ONLY-NEXT: vmovdqa %xmm11, (%r8)
1446 ; AVX1-ONLY-NEXT: vmovdqa %xmm12, (%r9)
1447 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1448 ; AVX1-ONLY-NEXT: vmovdqa %xmm13, (%rax)
1449 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1450 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, (%rax)
1451 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1452 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, (%rax)
1453 ; AVX1-ONLY-NEXT: retq
1455 ; AVX2-ONLY-LABEL: load_i8_stride8_vf16:
1456 ; AVX2-ONLY: # %bb.0:
1457 ; AVX2-ONLY-NEXT: vmovdqa 112(%rdi), %xmm8
1458 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm0 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
1459 ; AVX2-ONLY-NEXT: vpshufb %xmm0, %xmm8, %xmm2
1460 ; AVX2-ONLY-NEXT: vmovdqa 96(%rdi), %xmm3
1461 ; AVX2-ONLY-NEXT: vpshufb %xmm0, %xmm3, %xmm0
1462 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3]
1463 ; AVX2-ONLY-NEXT: vmovdqa 80(%rdi), %xmm4
1464 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm2 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
1465 ; AVX2-ONLY-NEXT: vpshufb %xmm2, %xmm4, %xmm6
1466 ; AVX2-ONLY-NEXT: vmovdqa 64(%rdi), %xmm5
1467 ; AVX2-ONLY-NEXT: vpshufb %xmm2, %xmm5, %xmm2
1468 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1],xmm2[2],xmm6[2],xmm2[3],xmm6[3]
1469 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1,2],xmm0[3]
1470 ; AVX2-ONLY-NEXT: vmovdqa (%rdi), %xmm1
1471 ; AVX2-ONLY-NEXT: vmovdqa 16(%rdi), %xmm2
1472 ; AVX2-ONLY-NEXT: vmovdqa 32(%rdi), %xmm6
1473 ; AVX2-ONLY-NEXT: vmovdqa 48(%rdi), %xmm7
1474 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm9 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
1475 ; AVX2-ONLY-NEXT: vpshufb %xmm9, %xmm7, %xmm10
1476 ; AVX2-ONLY-NEXT: vpshufb %xmm9, %xmm6, %xmm9
1477 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
1478 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm10 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
1479 ; AVX2-ONLY-NEXT: vpshufb %xmm10, %xmm2, %xmm11
1480 ; AVX2-ONLY-NEXT: vpshufb %xmm10, %xmm1, %xmm10
1481 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
1482 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm9 = xmm10[0],xmm9[1],xmm10[2,3]
1483 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm0 = xmm9[0,1],xmm0[2,3]
1484 ; AVX2-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1485 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm9 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
1486 ; AVX2-ONLY-NEXT: vpshufb %xmm9, %xmm8, %xmm10
1487 ; AVX2-ONLY-NEXT: vpshufb %xmm9, %xmm3, %xmm9
1488 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
1489 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm10 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
1490 ; AVX2-ONLY-NEXT: vpshufb %xmm10, %xmm4, %xmm11
1491 ; AVX2-ONLY-NEXT: vpshufb %xmm10, %xmm5, %xmm10
1492 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
1493 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm9 = xmm10[0,1,2],xmm9[3]
1494 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm10 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
1495 ; AVX2-ONLY-NEXT: vpshufb %xmm10, %xmm7, %xmm11
1496 ; AVX2-ONLY-NEXT: vpshufb %xmm10, %xmm6, %xmm10
1497 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
1498 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm11 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
1499 ; AVX2-ONLY-NEXT: vpshufb %xmm11, %xmm2, %xmm12
1500 ; AVX2-ONLY-NEXT: vpshufb %xmm11, %xmm1, %xmm11
1501 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
1502 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm10 = xmm11[0],xmm10[1],xmm11[2,3]
1503 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm0 = xmm10[0,1],xmm9[2,3]
1504 ; AVX2-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1505 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm10 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
1506 ; AVX2-ONLY-NEXT: vpshufb %xmm10, %xmm8, %xmm11
1507 ; AVX2-ONLY-NEXT: vpshufb %xmm10, %xmm3, %xmm10
1508 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
1509 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm11 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
1510 ; AVX2-ONLY-NEXT: vpshufb %xmm11, %xmm4, %xmm12
1511 ; AVX2-ONLY-NEXT: vpshufb %xmm11, %xmm5, %xmm11
1512 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
1513 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm10 = xmm11[0,1,2],xmm10[3]
1514 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm11 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
1515 ; AVX2-ONLY-NEXT: vpshufb %xmm11, %xmm7, %xmm12
1516 ; AVX2-ONLY-NEXT: vpshufb %xmm11, %xmm6, %xmm11
1517 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
1518 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm12 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
1519 ; AVX2-ONLY-NEXT: vpshufb %xmm12, %xmm2, %xmm13
1520 ; AVX2-ONLY-NEXT: vpshufb %xmm12, %xmm1, %xmm12
1521 ; AVX2-ONLY-NEXT: vmovdqa %xmm1, %xmm9
1522 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3]
1523 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm11 = xmm12[0],xmm11[1],xmm12[2,3]
1524 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm10 = xmm11[0,1],xmm10[2,3]
1525 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm11 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
1526 ; AVX2-ONLY-NEXT: vpshufb %xmm11, %xmm8, %xmm12
1527 ; AVX2-ONLY-NEXT: vpshufb %xmm11, %xmm3, %xmm11
1528 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
1529 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm12 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
1530 ; AVX2-ONLY-NEXT: vpshufb %xmm12, %xmm4, %xmm13
1531 ; AVX2-ONLY-NEXT: vpshufb %xmm12, %xmm5, %xmm12
1532 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3]
1533 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm11 = xmm12[0,1,2],xmm11[3]
1534 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm12 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
1535 ; AVX2-ONLY-NEXT: vpshufb %xmm12, %xmm7, %xmm13
1536 ; AVX2-ONLY-NEXT: vpshufb %xmm12, %xmm6, %xmm12
1537 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3]
1538 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm13 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
1539 ; AVX2-ONLY-NEXT: vpshufb %xmm13, %xmm2, %xmm14
1540 ; AVX2-ONLY-NEXT: vpshufb %xmm13, %xmm1, %xmm13
1541 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3]
1542 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm12 = xmm13[0],xmm12[1],xmm13[2,3]
1543 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm11 = xmm12[0,1],xmm11[2,3]
1544 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm12 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
1545 ; AVX2-ONLY-NEXT: vpshufb %xmm12, %xmm8, %xmm13
1546 ; AVX2-ONLY-NEXT: vpshufb %xmm12, %xmm3, %xmm12
1547 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3]
1548 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm13 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
1549 ; AVX2-ONLY-NEXT: vpshufb %xmm13, %xmm4, %xmm14
1550 ; AVX2-ONLY-NEXT: vpshufb %xmm13, %xmm5, %xmm13
1551 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3]
1552 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm12 = xmm13[0,1,2],xmm12[3]
1553 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm13 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
1554 ; AVX2-ONLY-NEXT: vpshufb %xmm13, %xmm7, %xmm14
1555 ; AVX2-ONLY-NEXT: vpshufb %xmm13, %xmm6, %xmm13
1556 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3]
1557 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm14 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
1558 ; AVX2-ONLY-NEXT: vpshufb %xmm14, %xmm2, %xmm15
1559 ; AVX2-ONLY-NEXT: vpshufb %xmm14, %xmm1, %xmm14
1560 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
1561 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm13 = xmm14[0],xmm13[1],xmm14[2,3]
1562 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm12 = xmm13[0,1],xmm12[2,3]
1563 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm13 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
1564 ; AVX2-ONLY-NEXT: vpshufb %xmm13, %xmm8, %xmm14
1565 ; AVX2-ONLY-NEXT: vpshufb %xmm13, %xmm3, %xmm13
1566 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3]
1567 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm14 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
1568 ; AVX2-ONLY-NEXT: vpshufb %xmm14, %xmm4, %xmm15
1569 ; AVX2-ONLY-NEXT: vpshufb %xmm14, %xmm5, %xmm14
1570 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
1571 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm13 = xmm14[0,1,2],xmm13[3]
1572 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm14 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
1573 ; AVX2-ONLY-NEXT: vpshufb %xmm14, %xmm7, %xmm15
1574 ; AVX2-ONLY-NEXT: vpshufb %xmm14, %xmm6, %xmm14
1575 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
1576 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm15 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
1577 ; AVX2-ONLY-NEXT: vpshufb %xmm15, %xmm2, %xmm0
1578 ; AVX2-ONLY-NEXT: vpshufb %xmm15, %xmm1, %xmm15
1579 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3]
1580 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm14[1],xmm0[2,3]
1581 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm13 = xmm0[0,1],xmm13[2,3]
1582 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm0 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
1583 ; AVX2-ONLY-NEXT: vpshufb %xmm0, %xmm8, %xmm14
1584 ; AVX2-ONLY-NEXT: vpshufb %xmm0, %xmm3, %xmm0
1585 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm14[0],xmm0[1],xmm14[1],xmm0[2],xmm14[2],xmm0[3],xmm14[3]
1586 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm14 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
1587 ; AVX2-ONLY-NEXT: vpshufb %xmm14, %xmm4, %xmm15
1588 ; AVX2-ONLY-NEXT: vpshufb %xmm14, %xmm5, %xmm14
1589 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
1590 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm1 = xmm14[0,1,2],xmm0[3]
1591 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm14 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
1592 ; AVX2-ONLY-NEXT: vpshufb %xmm14, %xmm7, %xmm15
1593 ; AVX2-ONLY-NEXT: vpshufb %xmm14, %xmm6, %xmm14
1594 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
1595 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm15 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
1596 ; AVX2-ONLY-NEXT: vpshufb %xmm15, %xmm2, %xmm0
1597 ; AVX2-ONLY-NEXT: vpshufb %xmm15, %xmm9, %xmm15
1598 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3]
1599 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm14[1],xmm0[2,3]
1600 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3]
1601 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm1 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
1602 ; AVX2-ONLY-NEXT: vpshufb %xmm1, %xmm8, %xmm8
1603 ; AVX2-ONLY-NEXT: vpshufb %xmm1, %xmm3, %xmm1
1604 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm8[0],xmm1[1],xmm8[1],xmm1[2],xmm8[2],xmm1[3],xmm8[3]
1605 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm3 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
1606 ; AVX2-ONLY-NEXT: vpshufb %xmm3, %xmm4, %xmm4
1607 ; AVX2-ONLY-NEXT: vpshufb %xmm3, %xmm5, %xmm3
1608 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
1609 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
1610 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm3 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
1611 ; AVX2-ONLY-NEXT: vpshufb %xmm3, %xmm7, %xmm4
1612 ; AVX2-ONLY-NEXT: vpshufb %xmm3, %xmm6, %xmm3
1613 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
1614 ; AVX2-ONLY-NEXT: vpbroadcastw {{.*#+}} xmm4 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
1615 ; AVX2-ONLY-NEXT: vpshufb %xmm4, %xmm2, %xmm2
1616 ; AVX2-ONLY-NEXT: vpshufb %xmm4, %xmm9, %xmm4
1617 ; AVX2-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
1618 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm2 = xmm2[0],xmm3[1],xmm2[2,3]
1619 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3]
1620 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1621 ; AVX2-ONLY-NEXT: vmovaps %xmm2, (%rsi)
1622 ; AVX2-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1623 ; AVX2-ONLY-NEXT: vmovaps %xmm2, (%rdx)
1624 ; AVX2-ONLY-NEXT: vmovdqa %xmm10, (%rcx)
1625 ; AVX2-ONLY-NEXT: vmovdqa %xmm11, (%r8)
1626 ; AVX2-ONLY-NEXT: vmovdqa %xmm12, (%r9)
1627 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1628 ; AVX2-ONLY-NEXT: vmovdqa %xmm13, (%rax)
1629 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1630 ; AVX2-ONLY-NEXT: vmovdqa %xmm0, (%rax)
1631 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1632 ; AVX2-ONLY-NEXT: vmovdqa %xmm1, (%rax)
1633 ; AVX2-ONLY-NEXT: retq
1635 ; AVX512F-LABEL: load_i8_stride8_vf16:
1637 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
1638 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r10
1639 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r11
1640 ; AVX512F-NEXT: vpbroadcastd {{.*#+}} xmm2 = [0,0,0,8,0,0,0,8,0,0,0,8,0,0,0,8]
1641 ; AVX512F-NEXT: vmovdqa 112(%rdi), %xmm0
1642 ; AVX512F-NEXT: vpshufb %xmm2, %xmm0, %xmm3
1643 ; AVX512F-NEXT: vmovdqa 96(%rdi), %xmm1
1644 ; AVX512F-NEXT: vpshufb %xmm2, %xmm1, %xmm2
1645 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
1646 ; AVX512F-NEXT: vpbroadcastd {{.*#+}} xmm5 = [0,8,0,0,0,8,0,0,0,8,0,0,0,8,0,0]
1647 ; AVX512F-NEXT: vmovdqa 80(%rdi), %xmm2
1648 ; AVX512F-NEXT: vpshufb %xmm5, %xmm2, %xmm6
1649 ; AVX512F-NEXT: vmovdqa 64(%rdi), %xmm3
1650 ; AVX512F-NEXT: vpshufb %xmm5, %xmm3, %xmm5
1651 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
1652 ; AVX512F-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3]
1653 ; AVX512F-NEXT: vmovdqa64 (%rdi), %zmm5
1654 ; AVX512F-NEXT: vpmovqb %zmm5, %xmm6
1655 ; AVX512F-NEXT: vpblendd {{.*#+}} xmm4 = xmm6[0,1],xmm4[2,3]
1656 ; AVX512F-NEXT: vpbroadcastd {{.*#+}} xmm6 = [0,0,1,9,0,0,1,9,0,0,1,9,0,0,1,9]
1657 ; AVX512F-NEXT: vpshufb %xmm6, %xmm0, %xmm7
1658 ; AVX512F-NEXT: vpshufb %xmm6, %xmm1, %xmm6
1659 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
1660 ; AVX512F-NEXT: vpbroadcastd {{.*#+}} xmm7 = [1,9,0,0,1,9,0,0,1,9,0,0,1,9,0,0]
1661 ; AVX512F-NEXT: vpshufb %xmm7, %xmm2, %xmm8
1662 ; AVX512F-NEXT: vpshufb %xmm7, %xmm3, %xmm7
1663 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
1664 ; AVX512F-NEXT: vpblendd {{.*#+}} xmm6 = xmm7[0,1,2],xmm6[3]
1665 ; AVX512F-NEXT: vpsrlq $8, %zmm5, %zmm7
1666 ; AVX512F-NEXT: vpmovqb %zmm7, %xmm7
1667 ; AVX512F-NEXT: vpblendd {{.*#+}} xmm6 = xmm7[0,1],xmm6[2,3]
1668 ; AVX512F-NEXT: vpbroadcastd {{.*#+}} xmm7 = [0,0,2,10,0,0,2,10,0,0,2,10,0,0,2,10]
1669 ; AVX512F-NEXT: vpshufb %xmm7, %xmm0, %xmm8
1670 ; AVX512F-NEXT: vpshufb %xmm7, %xmm1, %xmm7
1671 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
1672 ; AVX512F-NEXT: vpbroadcastd {{.*#+}} xmm8 = [2,10,0,0,2,10,0,0,2,10,0,0,2,10,0,0]
1673 ; AVX512F-NEXT: vpshufb %xmm8, %xmm2, %xmm9
1674 ; AVX512F-NEXT: vpshufb %xmm8, %xmm3, %xmm8
1675 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
1676 ; AVX512F-NEXT: vpblendd {{.*#+}} xmm7 = xmm8[0,1,2],xmm7[3]
1677 ; AVX512F-NEXT: vpsrlq $16, %zmm5, %zmm8
1678 ; AVX512F-NEXT: vpmovqb %zmm8, %xmm8
1679 ; AVX512F-NEXT: vpblendd {{.*#+}} xmm7 = xmm8[0,1],xmm7[2,3]
1680 ; AVX512F-NEXT: vpbroadcastd {{.*#+}} xmm8 = [0,0,3,11,0,0,3,11,0,0,3,11,0,0,3,11]
1681 ; AVX512F-NEXT: vpshufb %xmm8, %xmm0, %xmm9
1682 ; AVX512F-NEXT: vpshufb %xmm8, %xmm1, %xmm8
1683 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
1684 ; AVX512F-NEXT: vpbroadcastd {{.*#+}} xmm9 = [3,11,0,0,3,11,0,0,3,11,0,0,3,11,0,0]
1685 ; AVX512F-NEXT: vpshufb %xmm9, %xmm2, %xmm10
1686 ; AVX512F-NEXT: vpshufb %xmm9, %xmm3, %xmm9
1687 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
1688 ; AVX512F-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1,2],xmm8[3]
1689 ; AVX512F-NEXT: vpsrlq $24, %zmm5, %zmm9
1690 ; AVX512F-NEXT: vpmovqb %zmm9, %xmm9
1691 ; AVX512F-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1],xmm8[2,3]
1692 ; AVX512F-NEXT: vpbroadcastd {{.*#+}} xmm9 = [0,0,4,12,0,0,4,12,0,0,4,12,0,0,4,12]
1693 ; AVX512F-NEXT: vpshufb %xmm9, %xmm0, %xmm10
1694 ; AVX512F-NEXT: vpshufb %xmm9, %xmm1, %xmm9
1695 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
1696 ; AVX512F-NEXT: vpbroadcastd {{.*#+}} xmm10 = [4,12,0,0,4,12,0,0,4,12,0,0,4,12,0,0]
1697 ; AVX512F-NEXT: vpshufb %xmm10, %xmm2, %xmm11
1698 ; AVX512F-NEXT: vpshufb %xmm10, %xmm3, %xmm10
1699 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
1700 ; AVX512F-NEXT: vpblendd {{.*#+}} xmm9 = xmm10[0,1,2],xmm9[3]
1701 ; AVX512F-NEXT: vpsrlq $32, %zmm5, %zmm10
1702 ; AVX512F-NEXT: vpmovqb %zmm10, %xmm10
1703 ; AVX512F-NEXT: vpblendd {{.*#+}} xmm9 = xmm10[0,1],xmm9[2,3]
1704 ; AVX512F-NEXT: vpbroadcastd {{.*#+}} xmm10 = [0,0,5,13,0,0,5,13,0,0,5,13,0,0,5,13]
1705 ; AVX512F-NEXT: vpshufb %xmm10, %xmm0, %xmm11
1706 ; AVX512F-NEXT: vpshufb %xmm10, %xmm1, %xmm10
1707 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
1708 ; AVX512F-NEXT: vpbroadcastd {{.*#+}} xmm11 = [5,13,0,0,5,13,0,0,5,13,0,0,5,13,0,0]
1709 ; AVX512F-NEXT: vpshufb %xmm11, %xmm2, %xmm12
1710 ; AVX512F-NEXT: vpshufb %xmm11, %xmm3, %xmm11
1711 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
1712 ; AVX512F-NEXT: vpblendd {{.*#+}} xmm10 = xmm11[0,1,2],xmm10[3]
1713 ; AVX512F-NEXT: vpsrlq $40, %zmm5, %zmm11
1714 ; AVX512F-NEXT: vpmovqb %zmm11, %xmm11
1715 ; AVX512F-NEXT: vpblendd {{.*#+}} xmm10 = xmm11[0,1],xmm10[2,3]
1716 ; AVX512F-NEXT: vpbroadcastd {{.*#+}} xmm11 = [0,0,6,14,0,0,6,14,0,0,6,14,0,0,6,14]
1717 ; AVX512F-NEXT: vpshufb %xmm11, %xmm0, %xmm12
1718 ; AVX512F-NEXT: vpshufb %xmm11, %xmm1, %xmm11
1719 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
1720 ; AVX512F-NEXT: vpbroadcastd {{.*#+}} xmm12 = [6,14,0,0,6,14,0,0,6,14,0,0,6,14,0,0]
1721 ; AVX512F-NEXT: vpshufb %xmm12, %xmm2, %xmm13
1722 ; AVX512F-NEXT: vpshufb %xmm12, %xmm3, %xmm12
1723 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3]
1724 ; AVX512F-NEXT: vpblendd {{.*#+}} xmm11 = xmm12[0,1,2],xmm11[3]
1725 ; AVX512F-NEXT: vpsrlq $48, %zmm5, %zmm12
1726 ; AVX512F-NEXT: vpmovqb %zmm12, %xmm12
1727 ; AVX512F-NEXT: vpblendd {{.*#+}} xmm11 = xmm12[0,1],xmm11[2,3]
1728 ; AVX512F-NEXT: vpbroadcastd {{.*#+}} xmm12 = [0,0,7,15,0,0,7,15,0,0,7,15,0,0,7,15]
1729 ; AVX512F-NEXT: vpshufb %xmm12, %xmm0, %xmm0
1730 ; AVX512F-NEXT: vpshufb %xmm12, %xmm1, %xmm1
1731 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
1732 ; AVX512F-NEXT: vpbroadcastd {{.*#+}} xmm1 = [7,15,0,0,7,15,0,0,7,15,0,0,7,15,0,0]
1733 ; AVX512F-NEXT: vpshufb %xmm1, %xmm2, %xmm2
1734 ; AVX512F-NEXT: vpshufb %xmm1, %xmm3, %xmm1
1735 ; AVX512F-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
1736 ; AVX512F-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3]
1737 ; AVX512F-NEXT: vpsrlq $56, %zmm5, %zmm1
1738 ; AVX512F-NEXT: vpmovqb %zmm1, %xmm1
1739 ; AVX512F-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3]
1740 ; AVX512F-NEXT: vmovdqa %xmm4, (%rsi)
1741 ; AVX512F-NEXT: vmovdqa %xmm6, (%rdx)
1742 ; AVX512F-NEXT: vmovdqa %xmm7, (%rcx)
1743 ; AVX512F-NEXT: vmovdqa %xmm8, (%r8)
1744 ; AVX512F-NEXT: vmovdqa %xmm9, (%r9)
1745 ; AVX512F-NEXT: vmovdqa %xmm10, (%r11)
1746 ; AVX512F-NEXT: vmovdqa %xmm11, (%r10)
1747 ; AVX512F-NEXT: vmovdqa %xmm0, (%rax)
1748 ; AVX512F-NEXT: vzeroupper
1749 ; AVX512F-NEXT: retq
1751 ; AVX512BW-LABEL: load_i8_stride8_vf16:
1752 ; AVX512BW: # %bb.0:
1753 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1754 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
1755 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r11
1756 ; AVX512BW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
1757 ; AVX512BW-NEXT: vmovdqa 112(%rdi), %xmm0
1758 ; AVX512BW-NEXT: vpshufb %xmm2, %xmm0, %xmm3
1759 ; AVX512BW-NEXT: vmovdqa 96(%rdi), %xmm1
1760 ; AVX512BW-NEXT: vpshufb %xmm2, %xmm1, %xmm2
1761 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
1762 ; AVX512BW-NEXT: vpbroadcastw {{.*#+}} xmm5 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
1763 ; AVX512BW-NEXT: vmovdqa 80(%rdi), %xmm2
1764 ; AVX512BW-NEXT: vpshufb %xmm5, %xmm2, %xmm6
1765 ; AVX512BW-NEXT: vmovdqa 64(%rdi), %xmm3
1766 ; AVX512BW-NEXT: vpshufb %xmm5, %xmm3, %xmm5
1767 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
1768 ; AVX512BW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3]
1769 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm5
1770 ; AVX512BW-NEXT: vpmovqb %zmm5, %xmm6
1771 ; AVX512BW-NEXT: vpblendd {{.*#+}} xmm4 = xmm6[0,1],xmm4[2,3]
1772 ; AVX512BW-NEXT: vpbroadcastw {{.*#+}} xmm6 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
1773 ; AVX512BW-NEXT: vpshufb %xmm6, %xmm0, %xmm7
1774 ; AVX512BW-NEXT: vpshufb %xmm6, %xmm1, %xmm6
1775 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
1776 ; AVX512BW-NEXT: vpbroadcastw {{.*#+}} xmm7 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
1777 ; AVX512BW-NEXT: vpshufb %xmm7, %xmm2, %xmm8
1778 ; AVX512BW-NEXT: vpshufb %xmm7, %xmm3, %xmm7
1779 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
1780 ; AVX512BW-NEXT: vpblendd {{.*#+}} xmm6 = xmm7[0,1,2],xmm6[3]
1781 ; AVX512BW-NEXT: vpsrlq $8, %zmm5, %zmm7
1782 ; AVX512BW-NEXT: vpmovqb %zmm7, %xmm7
1783 ; AVX512BW-NEXT: vpblendd {{.*#+}} xmm6 = xmm7[0,1],xmm6[2,3]
1784 ; AVX512BW-NEXT: vpbroadcastw {{.*#+}} xmm7 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
1785 ; AVX512BW-NEXT: vpshufb %xmm7, %xmm0, %xmm8
1786 ; AVX512BW-NEXT: vpshufb %xmm7, %xmm1, %xmm7
1787 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
1788 ; AVX512BW-NEXT: vpbroadcastw {{.*#+}} xmm8 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
1789 ; AVX512BW-NEXT: vpshufb %xmm8, %xmm2, %xmm9
1790 ; AVX512BW-NEXT: vpshufb %xmm8, %xmm3, %xmm8
1791 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
1792 ; AVX512BW-NEXT: vpblendd {{.*#+}} xmm7 = xmm8[0,1,2],xmm7[3]
1793 ; AVX512BW-NEXT: vpsrlq $16, %zmm5, %zmm8
1794 ; AVX512BW-NEXT: vpmovqb %zmm8, %xmm8
1795 ; AVX512BW-NEXT: vpblendd {{.*#+}} xmm7 = xmm8[0,1],xmm7[2,3]
1796 ; AVX512BW-NEXT: vpbroadcastw {{.*#+}} xmm8 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
1797 ; AVX512BW-NEXT: vpshufb %xmm8, %xmm0, %xmm9
1798 ; AVX512BW-NEXT: vpshufb %xmm8, %xmm1, %xmm8
1799 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
1800 ; AVX512BW-NEXT: vpbroadcastw {{.*#+}} xmm9 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
1801 ; AVX512BW-NEXT: vpshufb %xmm9, %xmm2, %xmm10
1802 ; AVX512BW-NEXT: vpshufb %xmm9, %xmm3, %xmm9
1803 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
1804 ; AVX512BW-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1,2],xmm8[3]
1805 ; AVX512BW-NEXT: vpsrlq $24, %zmm5, %zmm9
1806 ; AVX512BW-NEXT: vpmovqb %zmm9, %xmm9
1807 ; AVX512BW-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1],xmm8[2,3]
1808 ; AVX512BW-NEXT: vpbroadcastw {{.*#+}} xmm9 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
1809 ; AVX512BW-NEXT: vpshufb %xmm9, %xmm0, %xmm10
1810 ; AVX512BW-NEXT: vpshufb %xmm9, %xmm1, %xmm9
1811 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
1812 ; AVX512BW-NEXT: vpbroadcastw {{.*#+}} xmm10 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
1813 ; AVX512BW-NEXT: vpshufb %xmm10, %xmm2, %xmm11
1814 ; AVX512BW-NEXT: vpshufb %xmm10, %xmm3, %xmm10
1815 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
1816 ; AVX512BW-NEXT: vpblendd {{.*#+}} xmm9 = xmm10[0,1,2],xmm9[3]
1817 ; AVX512BW-NEXT: vpsrlq $32, %zmm5, %zmm10
1818 ; AVX512BW-NEXT: vpmovqb %zmm10, %xmm10
1819 ; AVX512BW-NEXT: vpblendd {{.*#+}} xmm9 = xmm10[0,1],xmm9[2,3]
1820 ; AVX512BW-NEXT: vpbroadcastw {{.*#+}} xmm10 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
1821 ; AVX512BW-NEXT: vpshufb %xmm10, %xmm0, %xmm11
1822 ; AVX512BW-NEXT: vpshufb %xmm10, %xmm1, %xmm10
1823 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
1824 ; AVX512BW-NEXT: vpbroadcastw {{.*#+}} xmm11 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
1825 ; AVX512BW-NEXT: vpshufb %xmm11, %xmm2, %xmm12
1826 ; AVX512BW-NEXT: vpshufb %xmm11, %xmm3, %xmm11
1827 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
1828 ; AVX512BW-NEXT: vpblendd {{.*#+}} xmm10 = xmm11[0,1,2],xmm10[3]
1829 ; AVX512BW-NEXT: vpsrlq $40, %zmm5, %zmm11
1830 ; AVX512BW-NEXT: vpmovqb %zmm11, %xmm11
1831 ; AVX512BW-NEXT: vpblendd {{.*#+}} xmm10 = xmm11[0,1],xmm10[2,3]
1832 ; AVX512BW-NEXT: vpbroadcastw {{.*#+}} xmm11 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
1833 ; AVX512BW-NEXT: vpshufb %xmm11, %xmm0, %xmm12
1834 ; AVX512BW-NEXT: vpshufb %xmm11, %xmm1, %xmm11
1835 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
1836 ; AVX512BW-NEXT: vpbroadcastw {{.*#+}} xmm12 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
1837 ; AVX512BW-NEXT: vpshufb %xmm12, %xmm2, %xmm13
1838 ; AVX512BW-NEXT: vpshufb %xmm12, %xmm3, %xmm12
1839 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3]
1840 ; AVX512BW-NEXT: vpblendd {{.*#+}} xmm11 = xmm12[0,1,2],xmm11[3]
1841 ; AVX512BW-NEXT: vpsrlq $48, %zmm5, %zmm12
1842 ; AVX512BW-NEXT: vpmovqb %zmm12, %xmm12
1843 ; AVX512BW-NEXT: vpblendd {{.*#+}} xmm11 = xmm12[0,1],xmm11[2,3]
1844 ; AVX512BW-NEXT: vpbroadcastw {{.*#+}} xmm12 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
1845 ; AVX512BW-NEXT: vpshufb %xmm12, %xmm0, %xmm0
1846 ; AVX512BW-NEXT: vpshufb %xmm12, %xmm1, %xmm1
1847 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
1848 ; AVX512BW-NEXT: vpbroadcastw {{.*#+}} xmm1 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
1849 ; AVX512BW-NEXT: vpshufb %xmm1, %xmm2, %xmm2
1850 ; AVX512BW-NEXT: vpshufb %xmm1, %xmm3, %xmm1
1851 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
1852 ; AVX512BW-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3]
1853 ; AVX512BW-NEXT: vpsrlq $56, %zmm5, %zmm1
1854 ; AVX512BW-NEXT: vpmovqb %zmm1, %xmm1
1855 ; AVX512BW-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3]
1856 ; AVX512BW-NEXT: vmovdqa %xmm4, (%rsi)
1857 ; AVX512BW-NEXT: vmovdqa %xmm6, (%rdx)
1858 ; AVX512BW-NEXT: vmovdqa %xmm7, (%rcx)
1859 ; AVX512BW-NEXT: vmovdqa %xmm8, (%r8)
1860 ; AVX512BW-NEXT: vmovdqa %xmm9, (%r9)
1861 ; AVX512BW-NEXT: vmovdqa %xmm10, (%r11)
1862 ; AVX512BW-NEXT: vmovdqa %xmm11, (%r10)
1863 ; AVX512BW-NEXT: vmovdqa %xmm0, (%rax)
1864 ; AVX512BW-NEXT: vzeroupper
1865 ; AVX512BW-NEXT: retq
1866 %wide.vec = load <128 x i8>, ptr %in.vec, align 64
1867 %strided.vec0 = shufflevector <128 x i8> %wide.vec, <128 x i8> poison, <16 x i32> <i32 0, i32 8, i32 16, i32 24, i32 32, i32 40, i32 48, i32 56, i32 64, i32 72, i32 80, i32 88, i32 96, i32 104, i32 112, i32 120>
1868 %strided.vec1 = shufflevector <128 x i8> %wide.vec, <128 x i8> poison, <16 x i32> <i32 1, i32 9, i32 17, i32 25, i32 33, i32 41, i32 49, i32 57, i32 65, i32 73, i32 81, i32 89, i32 97, i32 105, i32 113, i32 121>
1869 %strided.vec2 = shufflevector <128 x i8> %wide.vec, <128 x i8> poison, <16 x i32> <i32 2, i32 10, i32 18, i32 26, i32 34, i32 42, i32 50, i32 58, i32 66, i32 74, i32 82, i32 90, i32 98, i32 106, i32 114, i32 122>
1870 %strided.vec3 = shufflevector <128 x i8> %wide.vec, <128 x i8> poison, <16 x i32> <i32 3, i32 11, i32 19, i32 27, i32 35, i32 43, i32 51, i32 59, i32 67, i32 75, i32 83, i32 91, i32 99, i32 107, i32 115, i32 123>
1871 %strided.vec4 = shufflevector <128 x i8> %wide.vec, <128 x i8> poison, <16 x i32> <i32 4, i32 12, i32 20, i32 28, i32 36, i32 44, i32 52, i32 60, i32 68, i32 76, i32 84, i32 92, i32 100, i32 108, i32 116, i32 124>
1872 %strided.vec5 = shufflevector <128 x i8> %wide.vec, <128 x i8> poison, <16 x i32> <i32 5, i32 13, i32 21, i32 29, i32 37, i32 45, i32 53, i32 61, i32 69, i32 77, i32 85, i32 93, i32 101, i32 109, i32 117, i32 125>
1873 %strided.vec6 = shufflevector <128 x i8> %wide.vec, <128 x i8> poison, <16 x i32> <i32 6, i32 14, i32 22, i32 30, i32 38, i32 46, i32 54, i32 62, i32 70, i32 78, i32 86, i32 94, i32 102, i32 110, i32 118, i32 126>
1874 %strided.vec7 = shufflevector <128 x i8> %wide.vec, <128 x i8> poison, <16 x i32> <i32 7, i32 15, i32 23, i32 31, i32 39, i32 47, i32 55, i32 63, i32 71, i32 79, i32 87, i32 95, i32 103, i32 111, i32 119, i32 127>
1875 store <16 x i8> %strided.vec0, ptr %out.vec0, align 64
1876 store <16 x i8> %strided.vec1, ptr %out.vec1, align 64
1877 store <16 x i8> %strided.vec2, ptr %out.vec2, align 64
1878 store <16 x i8> %strided.vec3, ptr %out.vec3, align 64
1879 store <16 x i8> %strided.vec4, ptr %out.vec4, align 64
1880 store <16 x i8> %strided.vec5, ptr %out.vec5, align 64
1881 store <16 x i8> %strided.vec6, ptr %out.vec6, align 64
1882 store <16 x i8> %strided.vec7, ptr %out.vec7, align 64
1886 define void @load_i8_stride8_vf32(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4, ptr %out.vec5, ptr %out.vec6, ptr %out.vec7) nounwind {
1887 ; SSE-LABEL: load_i8_stride8_vf32:
1889 ; SSE-NEXT: subq $904, %rsp # imm = 0x388
1890 ; SSE-NEXT: movdqa 64(%rdi), %xmm5
1891 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1892 ; SSE-NEXT: movdqa 80(%rdi), %xmm10
1893 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1894 ; SSE-NEXT: movdqa 96(%rdi), %xmm12
1895 ; SSE-NEXT: movdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1896 ; SSE-NEXT: movdqa 128(%rdi), %xmm6
1897 ; SSE-NEXT: movdqa 144(%rdi), %xmm13
1898 ; SSE-NEXT: movdqa 160(%rdi), %xmm11
1899 ; SSE-NEXT: movdqa 176(%rdi), %xmm14
1900 ; SSE-NEXT: movdqa 192(%rdi), %xmm2
1901 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1902 ; SSE-NEXT: movdqa 208(%rdi), %xmm7
1903 ; SSE-NEXT: movdqa 224(%rdi), %xmm8
1904 ; SSE-NEXT: movdqa 240(%rdi), %xmm9
1905 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [255,0,0,0,255,0,0,0]
1906 ; SSE-NEXT: movdqa %xmm9, %xmm0
1907 ; SSE-NEXT: pand %xmm4, %xmm0
1908 ; SSE-NEXT: movdqa %xmm8, %xmm1
1909 ; SSE-NEXT: pand %xmm4, %xmm1
1910 ; SSE-NEXT: packuswb %xmm0, %xmm1
1911 ; SSE-NEXT: packuswb %xmm1, %xmm0
1912 ; SSE-NEXT: movdqa %xmm7, %xmm1
1913 ; SSE-NEXT: pand %xmm4, %xmm1
1914 ; SSE-NEXT: pand %xmm4, %xmm2
1915 ; SSE-NEXT: packuswb %xmm1, %xmm2
1916 ; SSE-NEXT: packuswb %xmm2, %xmm2
1917 ; SSE-NEXT: packuswb %xmm0, %xmm2
1918 ; SSE-NEXT: movdqa %xmm14, %xmm0
1919 ; SSE-NEXT: pand %xmm4, %xmm0
1920 ; SSE-NEXT: movdqa %xmm11, %xmm1
1921 ; SSE-NEXT: pand %xmm4, %xmm1
1922 ; SSE-NEXT: packuswb %xmm0, %xmm1
1923 ; SSE-NEXT: movdqa %xmm13, %xmm0
1924 ; SSE-NEXT: pand %xmm4, %xmm0
1925 ; SSE-NEXT: movdqa %xmm6, %xmm3
1926 ; SSE-NEXT: pand %xmm4, %xmm3
1927 ; SSE-NEXT: packuswb %xmm0, %xmm3
1928 ; SSE-NEXT: movdqa 112(%rdi), %xmm15
1929 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1930 ; SSE-NEXT: packuswb %xmm1, %xmm0
1931 ; SSE-NEXT: packuswb %xmm3, %xmm3
1932 ; SSE-NEXT: packuswb %xmm0, %xmm3
1933 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,3],xmm2[0,3]
1934 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1935 ; SSE-NEXT: movdqa %xmm15, %xmm0
1936 ; SSE-NEXT: pand %xmm4, %xmm0
1937 ; SSE-NEXT: movdqa %xmm12, %xmm1
1938 ; SSE-NEXT: pand %xmm4, %xmm1
1939 ; SSE-NEXT: packuswb %xmm0, %xmm1
1940 ; SSE-NEXT: movdqa %xmm10, %xmm0
1941 ; SSE-NEXT: pand %xmm4, %xmm0
1942 ; SSE-NEXT: movdqa %xmm5, %xmm2
1943 ; SSE-NEXT: pand %xmm4, %xmm2
1944 ; SSE-NEXT: packuswb %xmm0, %xmm2
1945 ; SSE-NEXT: packuswb %xmm1, %xmm0
1946 ; SSE-NEXT: packuswb %xmm2, %xmm2
1947 ; SSE-NEXT: packuswb %xmm0, %xmm2
1948 ; SSE-NEXT: movdqa 48(%rdi), %xmm15
1949 ; SSE-NEXT: movdqa %xmm15, %xmm0
1950 ; SSE-NEXT: pand %xmm4, %xmm0
1951 ; SSE-NEXT: movdqa 32(%rdi), %xmm1
1952 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1953 ; SSE-NEXT: pand %xmm4, %xmm1
1954 ; SSE-NEXT: packuswb %xmm0, %xmm1
1955 ; SSE-NEXT: movdqa 16(%rdi), %xmm10
1956 ; SSE-NEXT: movdqa %xmm10, %xmm0
1957 ; SSE-NEXT: pand %xmm4, %xmm0
1958 ; SSE-NEXT: movdqa (%rdi), %xmm12
1959 ; SSE-NEXT: pand %xmm12, %xmm4
1960 ; SSE-NEXT: packuswb %xmm0, %xmm4
1961 ; SSE-NEXT: packuswb %xmm1, %xmm0
1962 ; SSE-NEXT: packuswb %xmm4, %xmm4
1963 ; SSE-NEXT: packuswb %xmm0, %xmm4
1964 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,3],xmm2[0,3]
1965 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1966 ; SSE-NEXT: pxor %xmm5, %xmm5
1967 ; SSE-NEXT: movdqa %xmm13, %xmm0
1968 ; SSE-NEXT: movdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1969 ; SSE-NEXT: movdqa %xmm13, %xmm1
1970 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
1971 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1972 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm5[0],xmm0[1],xmm5[1],xmm0[2],xmm5[2],xmm0[3],xmm5[3],xmm0[4],xmm5[4],xmm0[5],xmm5[5],xmm0[6],xmm5[6],xmm0[7],xmm5[7]
1973 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1974 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
1975 ; SSE-NEXT: packuswb %xmm0, %xmm0
1976 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1977 ; SSE-NEXT: movdqa {{.*#+}} xmm13 = [65535,0,65535,65535,65535,65535,65535,65535]
1978 ; SSE-NEXT: movdqa %xmm13, %xmm1
1979 ; SSE-NEXT: pandn %xmm0, %xmm1
1980 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1981 ; SSE-NEXT: movdqa %xmm6, %xmm2
1982 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm5[8],xmm2[9],xmm5[9],xmm2[10],xmm5[10],xmm2[11],xmm5[11],xmm2[12],xmm5[12],xmm2[13],xmm5[13],xmm2[14],xmm5[14],xmm2[15],xmm5[15]
1983 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1984 ; SSE-NEXT: punpcklbw {{.*#+}} xmm6 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3],xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
1985 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1986 ; SSE-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
1987 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1988 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[1,1,1,1]
1989 ; SSE-NEXT: packuswb %xmm0, %xmm0
1990 ; SSE-NEXT: pand %xmm13, %xmm0
1991 ; SSE-NEXT: por %xmm1, %xmm0
1992 ; SSE-NEXT: movdqa %xmm14, %xmm3
1993 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1994 ; SSE-NEXT: movdqa %xmm14, %xmm1
1995 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
1996 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1997 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3],xmm3[4],xmm5[4],xmm3[5],xmm5[5],xmm3[6],xmm5[6],xmm3[7],xmm5[7]
1998 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1999 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
2000 ; SSE-NEXT: packuswb %xmm3, %xmm3
2001 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2002 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm3[0,0,2,3]
2003 ; SSE-NEXT: movdqa {{.*#+}} xmm14 = [65535,65535,65535,0,65535,65535,65535,65535]
2004 ; SSE-NEXT: movdqa %xmm14, %xmm4
2005 ; SSE-NEXT: pandn %xmm1, %xmm4
2006 ; SSE-NEXT: movdqa %xmm11, %xmm2
2007 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2008 ; SSE-NEXT: movdqa %xmm11, %xmm1
2009 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
2010 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2011 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3],xmm2[4],xmm5[4],xmm2[5],xmm5[5],xmm2[6],xmm5[6],xmm2[7],xmm5[7]
2012 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2013 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
2014 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2015 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,1,1,3]
2016 ; SSE-NEXT: packuswb %xmm1, %xmm1
2017 ; SSE-NEXT: pand %xmm14, %xmm1
2018 ; SSE-NEXT: por %xmm4, %xmm1
2019 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
2020 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
2021 ; SSE-NEXT: movdqa %xmm9, %xmm1
2022 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2023 ; SSE-NEXT: movdqa %xmm9, %xmm2
2024 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm5[8],xmm2[9],xmm5[9],xmm2[10],xmm5[10],xmm2[11],xmm5[11],xmm2[12],xmm5[12],xmm2[13],xmm5[13],xmm2[14],xmm5[14],xmm2[15],xmm5[15]
2025 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2026 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3],xmm1[4],xmm5[4],xmm1[5],xmm5[5],xmm1[6],xmm5[6],xmm1[7],xmm5[7]
2027 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2028 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
2029 ; SSE-NEXT: packuswb %xmm1, %xmm1
2030 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2031 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,2,2]
2032 ; SSE-NEXT: movdqa {{.*#+}} xmm11 = [65535,65535,65535,65535,65535,65535,65535,0]
2033 ; SSE-NEXT: movdqa %xmm11, %xmm4
2034 ; SSE-NEXT: pandn %xmm1, %xmm4
2035 ; SSE-NEXT: movdqa %xmm8, %xmm2
2036 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2037 ; SSE-NEXT: movdqa %xmm8, %xmm1
2038 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
2039 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2040 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3],xmm2[4],xmm5[4],xmm2[5],xmm5[5],xmm2[6],xmm5[6],xmm2[7],xmm5[7]
2041 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2042 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
2043 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2044 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,1,1,3]
2045 ; SSE-NEXT: packuswb %xmm1, %xmm1
2046 ; SSE-NEXT: pand %xmm11, %xmm1
2047 ; SSE-NEXT: por %xmm4, %xmm1
2048 ; SSE-NEXT: movdqa %xmm7, %xmm4
2049 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2050 ; SSE-NEXT: movdqa %xmm7, %xmm2
2051 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm5[8],xmm2[9],xmm5[9],xmm2[10],xmm5[10],xmm2[11],xmm5[11],xmm2[12],xmm5[12],xmm2[13],xmm5[13],xmm2[14],xmm5[14],xmm2[15],xmm5[15]
2052 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2053 ; SSE-NEXT: punpcklbw {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3],xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
2054 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2055 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
2056 ; SSE-NEXT: packuswb %xmm4, %xmm2
2057 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2058 ; SSE-NEXT: movdqa {{.*#+}} xmm9 = [65535,65535,65535,65535,65535,0,65535,65535]
2059 ; SSE-NEXT: movdqa %xmm9, %xmm4
2060 ; SSE-NEXT: pandn %xmm2, %xmm4
2061 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
2062 ; SSE-NEXT: movdqa %xmm7, %xmm2
2063 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm5[8],xmm2[9],xmm5[9],xmm2[10],xmm5[10],xmm2[11],xmm5[11],xmm2[12],xmm5[12],xmm2[13],xmm5[13],xmm2[14],xmm5[14],xmm2[15],xmm5[15]
2064 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2065 ; SSE-NEXT: punpcklbw {{.*#+}} xmm7 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3],xmm7[4],xmm5[4],xmm7[5],xmm5[5],xmm7[6],xmm5[6],xmm7[7],xmm5[7]
2066 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2067 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm2[0],xmm7[1],xmm2[1],xmm7[2],xmm2[2],xmm7[3],xmm2[3]
2068 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2069 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm7[1,1,1,1]
2070 ; SSE-NEXT: packuswb %xmm8, %xmm8
2071 ; SSE-NEXT: pand %xmm9, %xmm8
2072 ; SSE-NEXT: por %xmm4, %xmm8
2073 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm8[2,2,2,2]
2074 ; SSE-NEXT: punpckhdq {{.*#+}} xmm4 = xmm4[2],xmm1[2],xmm4[3],xmm1[3]
2075 ; SSE-NEXT: movsd {{.*#+}} xmm4 = xmm0[0],xmm4[1]
2076 ; SSE-NEXT: movapd %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2077 ; SSE-NEXT: movdqa %xmm10, (%rsp) # 16-byte Spill
2078 ; SSE-NEXT: movdqa %xmm10, %xmm0
2079 ; SSE-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8],xmm5[8],xmm0[9],xmm5[9],xmm0[10],xmm5[10],xmm0[11],xmm5[11],xmm0[12],xmm5[12],xmm0[13],xmm5[13],xmm0[14],xmm5[14],xmm0[15],xmm5[15]
2080 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2081 ; SSE-NEXT: punpcklbw {{.*#+}} xmm10 = xmm10[0],xmm5[0],xmm10[1],xmm5[1],xmm10[2],xmm5[2],xmm10[3],xmm5[3],xmm10[4],xmm5[4],xmm10[5],xmm5[5],xmm10[6],xmm5[6],xmm10[7],xmm5[7]
2082 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2083 ; SSE-NEXT: movdqa %xmm10, %xmm8
2084 ; SSE-NEXT: punpcklwd {{.*#+}} xmm8 = xmm8[0],xmm0[0],xmm8[1],xmm0[1],xmm8[2],xmm0[2],xmm8[3],xmm0[3]
2085 ; SSE-NEXT: packuswb %xmm8, %xmm8
2086 ; SSE-NEXT: movdqa %xmm13, %xmm1
2087 ; SSE-NEXT: pandn %xmm8, %xmm1
2088 ; SSE-NEXT: movdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2089 ; SSE-NEXT: movdqa %xmm12, %xmm2
2090 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm5[8],xmm2[9],xmm5[9],xmm2[10],xmm5[10],xmm2[11],xmm5[11],xmm2[12],xmm5[12],xmm2[13],xmm5[13],xmm2[14],xmm5[14],xmm2[15],xmm5[15]
2091 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2092 ; SSE-NEXT: punpcklbw {{.*#+}} xmm12 = xmm12[0],xmm5[0],xmm12[1],xmm5[1],xmm12[2],xmm5[2],xmm12[3],xmm5[3],xmm12[4],xmm5[4],xmm12[5],xmm5[5],xmm12[6],xmm5[6],xmm12[7],xmm5[7]
2093 ; SSE-NEXT: movdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2094 ; SSE-NEXT: punpcklwd {{.*#+}} xmm12 = xmm12[0],xmm2[0],xmm12[1],xmm2[1],xmm12[2],xmm2[2],xmm12[3],xmm2[3]
2095 ; SSE-NEXT: movdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2096 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm12[1,1,1,1]
2097 ; SSE-NEXT: packuswb %xmm6, %xmm6
2098 ; SSE-NEXT: pand %xmm13, %xmm6
2099 ; SSE-NEXT: por %xmm1, %xmm6
2100 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2101 ; SSE-NEXT: movdqa %xmm15, %xmm1
2102 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
2103 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2104 ; SSE-NEXT: punpcklbw {{.*#+}} xmm15 = xmm15[0],xmm5[0],xmm15[1],xmm5[1],xmm15[2],xmm5[2],xmm15[3],xmm5[3],xmm15[4],xmm5[4],xmm15[5],xmm5[5],xmm15[6],xmm5[6],xmm15[7],xmm5[7]
2105 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2106 ; SSE-NEXT: movdqa %xmm15, %xmm4
2107 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
2108 ; SSE-NEXT: packuswb %xmm4, %xmm4
2109 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm4[0,0,2,3]
2110 ; SSE-NEXT: movdqa %xmm14, %xmm12
2111 ; SSE-NEXT: pandn %xmm1, %xmm12
2112 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
2113 ; SSE-NEXT: movdqa %xmm2, %xmm1
2114 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
2115 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2116 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3],xmm2[4],xmm5[4],xmm2[5],xmm5[5],xmm2[6],xmm5[6],xmm2[7],xmm5[7]
2117 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2118 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
2119 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2120 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,1,1,3]
2121 ; SSE-NEXT: packuswb %xmm1, %xmm1
2122 ; SSE-NEXT: pand %xmm14, %xmm1
2123 ; SSE-NEXT: por %xmm12, %xmm1
2124 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
2125 ; SSE-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm1[0],xmm6[1],xmm1[1]
2126 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2127 ; SSE-NEXT: movdqa %xmm1, %xmm2
2128 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm5[8],xmm2[9],xmm5[9],xmm2[10],xmm5[10],xmm2[11],xmm5[11],xmm2[12],xmm5[12],xmm2[13],xmm5[13],xmm2[14],xmm5[14],xmm2[15],xmm5[15]
2129 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2130 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3],xmm1[4],xmm5[4],xmm1[5],xmm5[5],xmm1[6],xmm5[6],xmm1[7],xmm5[7]
2131 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2132 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
2133 ; SSE-NEXT: packuswb %xmm1, %xmm0
2134 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2135 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm0[0,1,2,2]
2136 ; SSE-NEXT: movdqa %xmm11, %xmm7
2137 ; SSE-NEXT: pandn %xmm1, %xmm7
2138 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
2139 ; SSE-NEXT: movdqa %xmm12, %xmm1
2140 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
2141 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2142 ; SSE-NEXT: punpcklbw {{.*#+}} xmm12 = xmm12[0],xmm5[0],xmm12[1],xmm5[1],xmm12[2],xmm5[2],xmm12[3],xmm5[3],xmm12[4],xmm5[4],xmm12[5],xmm5[5],xmm12[6],xmm5[6],xmm12[7],xmm5[7]
2143 ; SSE-NEXT: movdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2144 ; SSE-NEXT: punpcklwd {{.*#+}} xmm12 = xmm12[0],xmm1[0],xmm12[1],xmm1[1],xmm12[2],xmm1[2],xmm12[3],xmm1[3]
2145 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm12[0,1,1,3]
2146 ; SSE-NEXT: packuswb %xmm1, %xmm3
2147 ; SSE-NEXT: pand %xmm11, %xmm3
2148 ; SSE-NEXT: por %xmm7, %xmm3
2149 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2150 ; SSE-NEXT: movdqa %xmm0, %xmm2
2151 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm5[8],xmm2[9],xmm5[9],xmm2[10],xmm5[10],xmm2[11],xmm5[11],xmm2[12],xmm5[12],xmm2[13],xmm5[13],xmm2[14],xmm5[14],xmm2[15],xmm5[15]
2152 ; SSE-NEXT: movdqa %xmm2, %xmm7
2153 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2154 ; SSE-NEXT: movdqa %xmm0, %xmm2
2155 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3],xmm2[4],xmm5[4],xmm2[5],xmm5[5],xmm2[6],xmm5[6],xmm2[7],xmm5[7]
2156 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2157 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2158 ; SSE-NEXT: movdqa %xmm0, %xmm1
2159 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
2160 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2161 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm5[0],xmm0[1],xmm5[1],xmm0[2],xmm5[2],xmm0[3],xmm5[3],xmm0[4],xmm5[4],xmm0[5],xmm5[5],xmm0[6],xmm5[6],xmm0[7],xmm5[7]
2162 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2163 ; SSE-NEXT: movdqa %xmm2, %xmm5
2164 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm7[0],xmm5[1],xmm7[1],xmm5[2],xmm7[2],xmm5[3],xmm7[3]
2165 ; SSE-NEXT: packuswb %xmm5, %xmm2
2166 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2167 ; SSE-NEXT: movdqa %xmm9, %xmm7
2168 ; SSE-NEXT: pandn %xmm2, %xmm7
2169 ; SSE-NEXT: movdqa %xmm0, %xmm5
2170 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
2171 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm5[1,1,1,1]
2172 ; SSE-NEXT: packuswb %xmm15, %xmm15
2173 ; SSE-NEXT: pand %xmm9, %xmm15
2174 ; SSE-NEXT: por %xmm7, %xmm15
2175 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm15[2,2,2,2]
2176 ; SSE-NEXT: punpckhdq {{.*#+}} xmm7 = xmm7[2],xmm3[2],xmm7[3],xmm3[3]
2177 ; SSE-NEXT: movsd {{.*#+}} xmm7 = xmm6[0],xmm7[1]
2178 ; SSE-NEXT: movapd %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2179 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,255,255,255,255,255,255]
2180 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
2181 ; SSE-NEXT: pand %xmm2, %xmm10
2182 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm10[0,2,2,3]
2183 ; SSE-NEXT: movdqa %xmm10, %xmm3
2184 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,1,1,3,4,5,6,7]
2185 ; SSE-NEXT: packuswb %xmm0, %xmm0
2186 ; SSE-NEXT: movdqa %xmm13, %xmm7
2187 ; SSE-NEXT: pandn %xmm0, %xmm7
2188 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
2189 ; SSE-NEXT: pand %xmm2, %xmm10
2190 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm10[0,2,2,3]
2191 ; SSE-NEXT: movdqa %xmm10, %xmm6
2192 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm0[1,3,2,3,4,5,6,7]
2193 ; SSE-NEXT: packuswb %xmm1, %xmm1
2194 ; SSE-NEXT: pand %xmm13, %xmm1
2195 ; SSE-NEXT: por %xmm7, %xmm1
2196 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2197 ; SSE-NEXT: pand %xmm2, %xmm0
2198 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2199 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm0[0,1,2,0]
2200 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,5,7,5]
2201 ; SSE-NEXT: packuswb %xmm7, %xmm7
2202 ; SSE-NEXT: movdqa %xmm14, %xmm15
2203 ; SSE-NEXT: pandn %xmm7, %xmm15
2204 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2205 ; SSE-NEXT: pand %xmm2, %xmm0
2206 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2207 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm0[0,1,2,0]
2208 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,7,5,6,7]
2209 ; SSE-NEXT: packuswb %xmm7, %xmm7
2210 ; SSE-NEXT: pand %xmm14, %xmm7
2211 ; SSE-NEXT: por %xmm15, %xmm7
2212 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[1,1,1,1]
2213 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm7[0],xmm1[1],xmm7[1]
2214 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2215 ; SSE-NEXT: pand %xmm2, %xmm0
2216 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2217 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm0[0,1,2,0]
2218 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,5,7,5]
2219 ; SSE-NEXT: packuswb %xmm7, %xmm7
2220 ; SSE-NEXT: movdqa %xmm11, %xmm15
2221 ; SSE-NEXT: pandn %xmm7, %xmm15
2222 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2223 ; SSE-NEXT: pand %xmm2, %xmm0
2224 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2225 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm0[0,1,2,0]
2226 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,7,5,6,7]
2227 ; SSE-NEXT: packuswb %xmm7, %xmm7
2228 ; SSE-NEXT: pand %xmm11, %xmm7
2229 ; SSE-NEXT: por %xmm15, %xmm7
2230 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2231 ; SSE-NEXT: pand %xmm2, %xmm0
2232 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2233 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm0[0,2,2,3]
2234 ; SSE-NEXT: pshuflw {{.*#+}} xmm15 = xmm15[0,1,1,3,4,5,6,7]
2235 ; SSE-NEXT: packuswb %xmm15, %xmm15
2236 ; SSE-NEXT: movdqa %xmm9, %xmm10
2237 ; SSE-NEXT: pandn %xmm15, %xmm10
2238 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2239 ; SSE-NEXT: pand %xmm2, %xmm0
2240 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2241 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm0[0,2,2,3]
2242 ; SSE-NEXT: pshuflw {{.*#+}} xmm15 = xmm15[1,3,2,3,4,5,6,7]
2243 ; SSE-NEXT: packuswb %xmm15, %xmm15
2244 ; SSE-NEXT: pand %xmm9, %xmm15
2245 ; SSE-NEXT: por %xmm10, %xmm15
2246 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm15[2,2,2,2]
2247 ; SSE-NEXT: punpckhdq {{.*#+}} xmm10 = xmm10[2],xmm7[2],xmm10[3],xmm7[3]
2248 ; SSE-NEXT: movsd {{.*#+}} xmm10 = xmm1[0],xmm10[1]
2249 ; SSE-NEXT: movapd %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2250 ; SSE-NEXT: movdqa (%rsp), %xmm0 # 16-byte Reload
2251 ; SSE-NEXT: pand %xmm2, %xmm0
2252 ; SSE-NEXT: movdqa %xmm0, (%rsp) # 16-byte Spill
2253 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
2254 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,1,1,3,4,5,6,7]
2255 ; SSE-NEXT: packuswb %xmm0, %xmm0
2256 ; SSE-NEXT: movdqa %xmm13, %xmm7
2257 ; SSE-NEXT: pandn %xmm0, %xmm7
2258 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2259 ; SSE-NEXT: pand %xmm2, %xmm0
2260 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2261 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
2262 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[1,3,2,3,4,5,6,7]
2263 ; SSE-NEXT: packuswb %xmm0, %xmm0
2264 ; SSE-NEXT: pand %xmm13, %xmm0
2265 ; SSE-NEXT: por %xmm7, %xmm0
2266 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2267 ; SSE-NEXT: pand %xmm2, %xmm1
2268 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2269 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm1[0,1,2,0]
2270 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,5,7,5]
2271 ; SSE-NEXT: packuswb %xmm7, %xmm7
2272 ; SSE-NEXT: movdqa %xmm14, %xmm10
2273 ; SSE-NEXT: pandn %xmm7, %xmm10
2274 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2275 ; SSE-NEXT: pand %xmm2, %xmm1
2276 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2277 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm1[0,1,2,0]
2278 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,7,5,6,7]
2279 ; SSE-NEXT: packuswb %xmm7, %xmm7
2280 ; SSE-NEXT: pand %xmm14, %xmm7
2281 ; SSE-NEXT: por %xmm10, %xmm7
2282 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[1,1,1,1]
2283 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm7[0],xmm0[1],xmm7[1]
2284 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2285 ; SSE-NEXT: pand %xmm2, %xmm1
2286 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2287 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm1[0,1,2,0]
2288 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,5,7,5]
2289 ; SSE-NEXT: packuswb %xmm7, %xmm7
2290 ; SSE-NEXT: movdqa %xmm11, %xmm10
2291 ; SSE-NEXT: pandn %xmm7, %xmm10
2292 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2293 ; SSE-NEXT: pand %xmm2, %xmm1
2294 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2295 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm1[0,1,2,0]
2296 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,7,5,6,7]
2297 ; SSE-NEXT: packuswb %xmm7, %xmm7
2298 ; SSE-NEXT: pand %xmm11, %xmm7
2299 ; SSE-NEXT: por %xmm10, %xmm7
2300 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2301 ; SSE-NEXT: pand %xmm2, %xmm1
2302 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2303 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm1[0,2,2,3]
2304 ; SSE-NEXT: pshuflw {{.*#+}} xmm10 = xmm10[0,1,1,3,4,5,6,7]
2305 ; SSE-NEXT: packuswb %xmm10, %xmm10
2306 ; SSE-NEXT: movdqa %xmm9, %xmm15
2307 ; SSE-NEXT: pandn %xmm10, %xmm15
2308 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2309 ; SSE-NEXT: pand %xmm2, %xmm1
2310 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2311 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
2312 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[1,3,2,3,4,5,6,7]
2313 ; SSE-NEXT: packuswb %xmm1, %xmm1
2314 ; SSE-NEXT: pand %xmm9, %xmm1
2315 ; SSE-NEXT: por %xmm15, %xmm1
2316 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
2317 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm7[2],xmm1[3],xmm7[3]
2318 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
2319 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2320 ; SSE-NEXT: pshufd $229, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
2321 ; SSE-NEXT: # xmm0 = mem[1,1,2,3]
2322 ; SSE-NEXT: movdqa %xmm13, %xmm1
2323 ; SSE-NEXT: pandn %xmm0, %xmm1
2324 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
2325 ; SSE-NEXT: # xmm0 = mem[3,3,3,3]
2326 ; SSE-NEXT: packuswb %xmm0, %xmm0
2327 ; SSE-NEXT: pand %xmm13, %xmm0
2328 ; SSE-NEXT: por %xmm1, %xmm0
2329 ; SSE-NEXT: movdqa %xmm14, %xmm1
2330 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2331 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Folded Reload
2332 ; SSE-NEXT: # xmm7 = mem[2,2,3,3]
2333 ; SSE-NEXT: packuswb %xmm7, %xmm7
2334 ; SSE-NEXT: pand %xmm14, %xmm7
2335 ; SSE-NEXT: por %xmm1, %xmm7
2336 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm7[1,1,1,1]
2337 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
2338 ; SSE-NEXT: movdqa %xmm11, %xmm1
2339 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2340 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Folded Reload
2341 ; SSE-NEXT: # xmm7 = mem[2,2,3,3]
2342 ; SSE-NEXT: packuswb %xmm7, %xmm7
2343 ; SSE-NEXT: pand %xmm11, %xmm7
2344 ; SSE-NEXT: por %xmm1, %xmm7
2345 ; SSE-NEXT: pshufd $244, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2346 ; SSE-NEXT: # xmm1 = mem[0,1,3,3]
2347 ; SSE-NEXT: movdqa %xmm9, %xmm10
2348 ; SSE-NEXT: pandn %xmm1, %xmm10
2349 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2350 ; SSE-NEXT: # xmm1 = mem[3,3,3,3]
2351 ; SSE-NEXT: packuswb %xmm1, %xmm1
2352 ; SSE-NEXT: pand %xmm9, %xmm1
2353 ; SSE-NEXT: por %xmm10, %xmm1
2354 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
2355 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm7[2],xmm1[3],xmm7[3]
2356 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
2357 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2358 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm8[1,1,2,3]
2359 ; SSE-NEXT: movdqa %xmm13, %xmm1
2360 ; SSE-NEXT: pandn %xmm0, %xmm1
2361 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
2362 ; SSE-NEXT: # xmm0 = mem[3,3,3,3]
2363 ; SSE-NEXT: packuswb %xmm0, %xmm0
2364 ; SSE-NEXT: pand %xmm13, %xmm0
2365 ; SSE-NEXT: por %xmm1, %xmm0
2366 ; SSE-NEXT: movdqa %xmm14, %xmm1
2367 ; SSE-NEXT: pandn %xmm4, %xmm1
2368 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
2369 ; SSE-NEXT: # xmm4 = mem[2,2,3,3]
2370 ; SSE-NEXT: packuswb %xmm4, %xmm4
2371 ; SSE-NEXT: pand %xmm14, %xmm4
2372 ; SSE-NEXT: por %xmm1, %xmm4
2373 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm4[1,1,1,1]
2374 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
2375 ; SSE-NEXT: movdqa %xmm11, %xmm1
2376 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2377 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm12[2,2,3,3]
2378 ; SSE-NEXT: packuswb %xmm4, %xmm4
2379 ; SSE-NEXT: pand %xmm11, %xmm4
2380 ; SSE-NEXT: por %xmm1, %xmm4
2381 ; SSE-NEXT: pshufd $244, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2382 ; SSE-NEXT: # xmm1 = mem[0,1,3,3]
2383 ; SSE-NEXT: movdqa %xmm9, %xmm7
2384 ; SSE-NEXT: pandn %xmm1, %xmm7
2385 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[3,3,3,3]
2386 ; SSE-NEXT: packuswb %xmm1, %xmm1
2387 ; SSE-NEXT: pand %xmm9, %xmm1
2388 ; SSE-NEXT: por %xmm7, %xmm1
2389 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
2390 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm4[2],xmm1[3],xmm4[3]
2391 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
2392 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2393 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm3[3,1,2,3]
2394 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2395 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,1,2,0,4,5,6,7]
2396 ; SSE-NEXT: packuswb %xmm0, %xmm0
2397 ; SSE-NEXT: movdqa %xmm13, %xmm1
2398 ; SSE-NEXT: pandn %xmm0, %xmm1
2399 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[3,1,2,3]
2400 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2401 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[2,0,2,3,4,5,6,7]
2402 ; SSE-NEXT: packuswb %xmm0, %xmm0
2403 ; SSE-NEXT: pand %xmm13, %xmm0
2404 ; SSE-NEXT: por %xmm1, %xmm0
2405 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2406 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
2407 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2408 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,4,6]
2409 ; SSE-NEXT: packuswb %xmm1, %xmm1
2410 ; SSE-NEXT: movdqa %xmm14, %xmm4
2411 ; SSE-NEXT: pandn %xmm1, %xmm4
2412 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2413 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
2414 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2415 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
2416 ; SSE-NEXT: packuswb %xmm1, %xmm1
2417 ; SSE-NEXT: pand %xmm14, %xmm1
2418 ; SSE-NEXT: por %xmm4, %xmm1
2419 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
2420 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
2421 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2422 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
2423 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2424 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,4,6]
2425 ; SSE-NEXT: packuswb %xmm1, %xmm1
2426 ; SSE-NEXT: movdqa %xmm11, %xmm4
2427 ; SSE-NEXT: pandn %xmm1, %xmm4
2428 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2429 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
2430 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2431 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
2432 ; SSE-NEXT: packuswb %xmm1, %xmm1
2433 ; SSE-NEXT: pand %xmm11, %xmm1
2434 ; SSE-NEXT: por %xmm4, %xmm1
2435 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2436 ; SSE-NEXT: # xmm2 = mem[3,1,2,3]
2437 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2438 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm2[0,1,2,0,4,5,6,7]
2439 ; SSE-NEXT: packuswb %xmm4, %xmm4
2440 ; SSE-NEXT: movdqa %xmm9, %xmm5
2441 ; SSE-NEXT: pandn %xmm4, %xmm5
2442 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2443 ; SSE-NEXT: # xmm2 = mem[3,1,2,3]
2444 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2445 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm2[2,0,2,3,4,5,6,7]
2446 ; SSE-NEXT: packuswb %xmm4, %xmm4
2447 ; SSE-NEXT: pand %xmm9, %xmm4
2448 ; SSE-NEXT: por %xmm5, %xmm4
2449 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm4[2,2,2,2]
2450 ; SSE-NEXT: punpckhdq {{.*#+}} xmm6 = xmm6[2],xmm1[2],xmm6[3],xmm1[3]
2451 ; SSE-NEXT: movsd {{.*#+}} xmm6 = xmm0[0],xmm6[1]
2452 ; SSE-NEXT: pshufd $231, (%rsp), %xmm0 # 16-byte Folded Reload
2453 ; SSE-NEXT: # xmm0 = mem[3,1,2,3]
2454 ; SSE-NEXT: movdqa %xmm0, (%rsp) # 16-byte Spill
2455 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,1,2,0,4,5,6,7]
2456 ; SSE-NEXT: packuswb %xmm0, %xmm0
2457 ; SSE-NEXT: movdqa %xmm13, %xmm1
2458 ; SSE-NEXT: pandn %xmm0, %xmm1
2459 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
2460 ; SSE-NEXT: # xmm0 = mem[3,1,2,3]
2461 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2462 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[2,0,2,3,4,5,6,7]
2463 ; SSE-NEXT: packuswb %xmm0, %xmm0
2464 ; SSE-NEXT: pand %xmm13, %xmm0
2465 ; SSE-NEXT: por %xmm1, %xmm0
2466 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2467 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
2468 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2469 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,4,6]
2470 ; SSE-NEXT: packuswb %xmm1, %xmm1
2471 ; SSE-NEXT: movdqa %xmm14, %xmm4
2472 ; SSE-NEXT: pandn %xmm1, %xmm4
2473 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2474 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
2475 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2476 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
2477 ; SSE-NEXT: packuswb %xmm1, %xmm1
2478 ; SSE-NEXT: pand %xmm14, %xmm1
2479 ; SSE-NEXT: por %xmm4, %xmm1
2480 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
2481 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
2482 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2483 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
2484 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2485 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,4,6]
2486 ; SSE-NEXT: packuswb %xmm1, %xmm1
2487 ; SSE-NEXT: movdqa %xmm11, %xmm4
2488 ; SSE-NEXT: pandn %xmm1, %xmm4
2489 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2490 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
2491 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2492 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
2493 ; SSE-NEXT: packuswb %xmm1, %xmm1
2494 ; SSE-NEXT: pand %xmm11, %xmm1
2495 ; SSE-NEXT: por %xmm4, %xmm1
2496 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2497 ; SSE-NEXT: # xmm2 = mem[3,1,2,3]
2498 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2499 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm2[0,1,2,0,4,5,6,7]
2500 ; SSE-NEXT: packuswb %xmm4, %xmm4
2501 ; SSE-NEXT: movdqa %xmm9, %xmm5
2502 ; SSE-NEXT: pandn %xmm4, %xmm5
2503 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Folded Reload
2504 ; SSE-NEXT: # xmm12 = mem[3,1,2,3]
2505 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm12[2,0,2,3,4,5,6,7]
2506 ; SSE-NEXT: packuswb %xmm4, %xmm4
2507 ; SSE-NEXT: pand %xmm9, %xmm4
2508 ; SSE-NEXT: por %xmm5, %xmm4
2509 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm4[2,2,2,2]
2510 ; SSE-NEXT: punpckhdq {{.*#+}} xmm10 = xmm10[2],xmm1[2],xmm10[3],xmm1[3]
2511 ; SSE-NEXT: movsd {{.*#+}} xmm10 = xmm0[0],xmm10[1]
2512 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2513 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
2514 ; SSE-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
2515 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
2516 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2517 ; SSE-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
2518 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2519 ; SSE-NEXT: packuswb %xmm0, %xmm0
2520 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2521 ; SSE-NEXT: movdqa %xmm13, %xmm1
2522 ; SSE-NEXT: pandn %xmm0, %xmm1
2523 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,1,1]
2524 ; SSE-NEXT: packuswb %xmm0, %xmm0
2525 ; SSE-NEXT: pand %xmm13, %xmm0
2526 ; SSE-NEXT: por %xmm1, %xmm0
2527 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2528 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2529 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
2530 ; SSE-NEXT: packuswb %xmm1, %xmm1
2531 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2532 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,2,3]
2533 ; SSE-NEXT: movdqa %xmm14, %xmm4
2534 ; SSE-NEXT: pandn %xmm1, %xmm4
2535 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2536 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2537 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
2538 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2539 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,1,3]
2540 ; SSE-NEXT: packuswb %xmm1, %xmm1
2541 ; SSE-NEXT: pand %xmm14, %xmm1
2542 ; SSE-NEXT: por %xmm4, %xmm1
2543 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
2544 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
2545 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2546 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2547 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
2548 ; SSE-NEXT: packuswb %xmm1, %xmm1
2549 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2550 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,2,2]
2551 ; SSE-NEXT: movdqa %xmm11, %xmm4
2552 ; SSE-NEXT: pandn %xmm1, %xmm4
2553 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2554 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2555 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
2556 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2557 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,1,3]
2558 ; SSE-NEXT: packuswb %xmm1, %xmm1
2559 ; SSE-NEXT: pand %xmm11, %xmm1
2560 ; SSE-NEXT: por %xmm4, %xmm1
2561 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
2562 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
2563 ; SSE-NEXT: # xmm3 = xmm3[4],mem[4],xmm3[5],mem[5],xmm3[6],mem[6],xmm3[7],mem[7]
2564 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
2565 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2566 ; SSE-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
2567 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2568 ; SSE-NEXT: packuswb %xmm3, %xmm3
2569 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2570 ; SSE-NEXT: movdqa %xmm9, %xmm4
2571 ; SSE-NEXT: pandn %xmm3, %xmm4
2572 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[1,1,1,1]
2573 ; SSE-NEXT: packuswb %xmm5, %xmm5
2574 ; SSE-NEXT: pand %xmm9, %xmm5
2575 ; SSE-NEXT: por %xmm4, %xmm5
2576 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm5[2,2,2,2]
2577 ; SSE-NEXT: punpckhdq {{.*#+}} xmm8 = xmm8[2],xmm1[2],xmm8[3],xmm1[3]
2578 ; SSE-NEXT: movsd {{.*#+}} xmm8 = xmm0[0],xmm8[1]
2579 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2580 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
2581 ; SSE-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
2582 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
2583 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2584 ; SSE-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
2585 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2586 ; SSE-NEXT: packuswb %xmm0, %xmm0
2587 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2588 ; SSE-NEXT: movdqa %xmm13, %xmm1
2589 ; SSE-NEXT: pandn %xmm0, %xmm1
2590 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,1,1]
2591 ; SSE-NEXT: packuswb %xmm0, %xmm0
2592 ; SSE-NEXT: pand %xmm13, %xmm0
2593 ; SSE-NEXT: por %xmm1, %xmm0
2594 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2595 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2596 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
2597 ; SSE-NEXT: packuswb %xmm1, %xmm1
2598 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2599 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,2,3]
2600 ; SSE-NEXT: movdqa %xmm14, %xmm4
2601 ; SSE-NEXT: pandn %xmm1, %xmm4
2602 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2603 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2604 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
2605 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2606 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,1,3]
2607 ; SSE-NEXT: packuswb %xmm1, %xmm1
2608 ; SSE-NEXT: pand %xmm14, %xmm1
2609 ; SSE-NEXT: por %xmm4, %xmm1
2610 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
2611 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
2612 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2613 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2614 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
2615 ; SSE-NEXT: packuswb %xmm1, %xmm7
2616 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm7[0,1,2,2]
2617 ; SSE-NEXT: movdqa %xmm11, %xmm4
2618 ; SSE-NEXT: pandn %xmm1, %xmm4
2619 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2620 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2621 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
2622 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2623 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,1,3]
2624 ; SSE-NEXT: packuswb %xmm1, %xmm1
2625 ; SSE-NEXT: pand %xmm11, %xmm1
2626 ; SSE-NEXT: por %xmm4, %xmm1
2627 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
2628 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
2629 ; SSE-NEXT: # xmm3 = xmm3[4],mem[4],xmm3[5],mem[5],xmm3[6],mem[6],xmm3[7],mem[7]
2630 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
2631 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2632 ; SSE-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
2633 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2634 ; SSE-NEXT: packuswb %xmm3, %xmm5
2635 ; SSE-NEXT: movdqa %xmm9, %xmm4
2636 ; SSE-NEXT: pandn %xmm5, %xmm4
2637 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm2[1,1,1,1]
2638 ; SSE-NEXT: packuswb %xmm15, %xmm15
2639 ; SSE-NEXT: pand %xmm9, %xmm15
2640 ; SSE-NEXT: por %xmm4, %xmm15
2641 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm15[2,2,2,2]
2642 ; SSE-NEXT: punpckhdq {{.*#+}} xmm4 = xmm4[2],xmm1[2],xmm4[3],xmm1[3]
2643 ; SSE-NEXT: movsd {{.*#+}} xmm4 = xmm0[0],xmm4[1]
2644 ; SSE-NEXT: pshuflw $116, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
2645 ; SSE-NEXT: # xmm0 = mem[0,1,3,1,4,5,6,7]
2646 ; SSE-NEXT: packuswb %xmm0, %xmm0
2647 ; SSE-NEXT: movdqa %xmm13, %xmm1
2648 ; SSE-NEXT: pandn %xmm0, %xmm1
2649 ; SSE-NEXT: pshuflw $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
2650 ; SSE-NEXT: # xmm0 = mem[3,1,2,3,4,5,6,7]
2651 ; SSE-NEXT: packuswb %xmm0, %xmm0
2652 ; SSE-NEXT: pand %xmm13, %xmm0
2653 ; SSE-NEXT: por %xmm1, %xmm0
2654 ; SSE-NEXT: pshufhw $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2655 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,4,5,5,7]
2656 ; SSE-NEXT: packuswb %xmm1, %xmm1
2657 ; SSE-NEXT: movdqa %xmm14, %xmm15
2658 ; SSE-NEXT: pandn %xmm1, %xmm15
2659 ; SSE-NEXT: pshufhw $237, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2660 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,5,7,6,7]
2661 ; SSE-NEXT: packuswb %xmm1, %xmm1
2662 ; SSE-NEXT: pand %xmm14, %xmm1
2663 ; SSE-NEXT: por %xmm15, %xmm1
2664 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
2665 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
2666 ; SSE-NEXT: pshufhw $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2667 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,4,5,5,7]
2668 ; SSE-NEXT: packuswb %xmm1, %xmm1
2669 ; SSE-NEXT: movdqa %xmm11, %xmm15
2670 ; SSE-NEXT: pandn %xmm1, %xmm15
2671 ; SSE-NEXT: pshufhw $237, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2672 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,5,7,6,7]
2673 ; SSE-NEXT: packuswb %xmm1, %xmm1
2674 ; SSE-NEXT: pand %xmm11, %xmm1
2675 ; SSE-NEXT: por %xmm15, %xmm1
2676 ; SSE-NEXT: pshuflw $116, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
2677 ; SSE-NEXT: # xmm15 = mem[0,1,3,1,4,5,6,7]
2678 ; SSE-NEXT: packuswb %xmm15, %xmm15
2679 ; SSE-NEXT: movdqa %xmm9, %xmm2
2680 ; SSE-NEXT: pandn %xmm15, %xmm2
2681 ; SSE-NEXT: pshuflw $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
2682 ; SSE-NEXT: # xmm15 = mem[3,1,2,3,4,5,6,7]
2683 ; SSE-NEXT: packuswb %xmm15, %xmm15
2684 ; SSE-NEXT: pand %xmm9, %xmm15
2685 ; SSE-NEXT: por %xmm2, %xmm15
2686 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm15[2,2,2,2]
2687 ; SSE-NEXT: punpckhdq {{.*#+}} xmm15 = xmm15[2],xmm1[2],xmm15[3],xmm1[3]
2688 ; SSE-NEXT: movsd {{.*#+}} xmm15 = xmm0[0],xmm15[1]
2689 ; SSE-NEXT: pshuflw $116, (%rsp), %xmm0 # 16-byte Folded Reload
2690 ; SSE-NEXT: # xmm0 = mem[0,1,3,1,4,5,6,7]
2691 ; SSE-NEXT: packuswb %xmm0, %xmm0
2692 ; SSE-NEXT: movdqa %xmm13, %xmm1
2693 ; SSE-NEXT: pandn %xmm0, %xmm1
2694 ; SSE-NEXT: pshuflw $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
2695 ; SSE-NEXT: # xmm0 = mem[3,1,2,3,4,5,6,7]
2696 ; SSE-NEXT: packuswb %xmm0, %xmm0
2697 ; SSE-NEXT: pand %xmm13, %xmm0
2698 ; SSE-NEXT: por %xmm1, %xmm0
2699 ; SSE-NEXT: pshufhw $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2700 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,4,5,5,7]
2701 ; SSE-NEXT: packuswb %xmm1, %xmm1
2702 ; SSE-NEXT: movdqa %xmm14, %xmm2
2703 ; SSE-NEXT: pandn %xmm1, %xmm2
2704 ; SSE-NEXT: pshufhw $237, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2705 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,5,7,6,7]
2706 ; SSE-NEXT: packuswb %xmm1, %xmm1
2707 ; SSE-NEXT: pand %xmm14, %xmm1
2708 ; SSE-NEXT: por %xmm2, %xmm1
2709 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
2710 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
2711 ; SSE-NEXT: pshufhw $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2712 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,4,5,5,7]
2713 ; SSE-NEXT: packuswb %xmm1, %xmm1
2714 ; SSE-NEXT: movdqa %xmm11, %xmm2
2715 ; SSE-NEXT: pandn %xmm1, %xmm2
2716 ; SSE-NEXT: pshufhw $237, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2717 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,5,7,6,7]
2718 ; SSE-NEXT: packuswb %xmm1, %xmm1
2719 ; SSE-NEXT: pand %xmm11, %xmm1
2720 ; SSE-NEXT: por %xmm2, %xmm1
2721 ; SSE-NEXT: pshuflw $116, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2722 ; SSE-NEXT: # xmm2 = mem[0,1,3,1,4,5,6,7]
2723 ; SSE-NEXT: packuswb %xmm2, %xmm2
2724 ; SSE-NEXT: movdqa %xmm9, %xmm3
2725 ; SSE-NEXT: pandn %xmm2, %xmm3
2726 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm12[3,1,2,3,4,5,6,7]
2727 ; SSE-NEXT: packuswb %xmm2, %xmm2
2728 ; SSE-NEXT: pand %xmm9, %xmm2
2729 ; SSE-NEXT: por %xmm3, %xmm2
2730 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm2[2,2,2,2]
2731 ; SSE-NEXT: punpckhdq {{.*#+}} xmm12 = xmm12[2],xmm1[2],xmm12[3],xmm1[3]
2732 ; SSE-NEXT: movsd {{.*#+}} xmm12 = xmm0[0],xmm12[1]
2733 ; SSE-NEXT: pshufd $229, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
2734 ; SSE-NEXT: # xmm0 = mem[1,1,2,3]
2735 ; SSE-NEXT: movdqa %xmm13, %xmm1
2736 ; SSE-NEXT: pandn %xmm0, %xmm1
2737 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
2738 ; SSE-NEXT: # xmm0 = mem[3,3,3,3]
2739 ; SSE-NEXT: packuswb %xmm0, %xmm0
2740 ; SSE-NEXT: pand %xmm13, %xmm0
2741 ; SSE-NEXT: por %xmm1, %xmm0
2742 ; SSE-NEXT: movdqa %xmm14, %xmm1
2743 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2744 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2745 ; SSE-NEXT: # xmm2 = mem[2,2,3,3]
2746 ; SSE-NEXT: packuswb %xmm2, %xmm2
2747 ; SSE-NEXT: pand %xmm14, %xmm2
2748 ; SSE-NEXT: por %xmm1, %xmm2
2749 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,1,1,1]
2750 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
2751 ; SSE-NEXT: movdqa %xmm11, %xmm1
2752 ; SSE-NEXT: pandn %xmm7, %xmm1
2753 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2754 ; SSE-NEXT: # xmm2 = mem[2,2,3,3]
2755 ; SSE-NEXT: packuswb %xmm2, %xmm2
2756 ; SSE-NEXT: pand %xmm11, %xmm2
2757 ; SSE-NEXT: por %xmm1, %xmm2
2758 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[0,1,3,3]
2759 ; SSE-NEXT: movdqa %xmm9, %xmm3
2760 ; SSE-NEXT: pandn %xmm1, %xmm3
2761 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2762 ; SSE-NEXT: # xmm1 = mem[3,3,3,3]
2763 ; SSE-NEXT: packuswb %xmm1, %xmm1
2764 ; SSE-NEXT: pand %xmm9, %xmm1
2765 ; SSE-NEXT: por %xmm3, %xmm1
2766 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[2,2,2,2]
2767 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm2[2],xmm5[3],xmm2[3]
2768 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm0[0],xmm5[1]
2769 ; SSE-NEXT: pshufd $229, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
2770 ; SSE-NEXT: # xmm0 = mem[1,1,2,3]
2771 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2772 ; SSE-NEXT: # xmm1 = mem[3,3,3,3]
2773 ; SSE-NEXT: packuswb %xmm1, %xmm1
2774 ; SSE-NEXT: pand %xmm13, %xmm1
2775 ; SSE-NEXT: pandn %xmm0, %xmm13
2776 ; SSE-NEXT: por %xmm1, %xmm13
2777 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
2778 ; SSE-NEXT: # xmm0 = mem[2,2,3,3]
2779 ; SSE-NEXT: packuswb %xmm0, %xmm0
2780 ; SSE-NEXT: pand %xmm14, %xmm0
2781 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Folded Reload
2782 ; SSE-NEXT: por %xmm0, %xmm14
2783 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm14[1,1,1,1]
2784 ; SSE-NEXT: punpckldq {{.*#+}} xmm13 = xmm13[0],xmm0[0],xmm13[1],xmm0[1]
2785 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
2786 ; SSE-NEXT: # xmm0 = mem[2,2,3,3]
2787 ; SSE-NEXT: packuswb %xmm0, %xmm0
2788 ; SSE-NEXT: pand %xmm11, %xmm0
2789 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Folded Reload
2790 ; SSE-NEXT: por %xmm0, %xmm11
2791 ; SSE-NEXT: pshufd $244, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
2792 ; SSE-NEXT: # xmm0 = mem[0,1,3,3]
2793 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2794 ; SSE-NEXT: # xmm1 = mem[3,3,3,3]
2795 ; SSE-NEXT: packuswb %xmm1, %xmm1
2796 ; SSE-NEXT: pand %xmm9, %xmm1
2797 ; SSE-NEXT: pandn %xmm0, %xmm9
2798 ; SSE-NEXT: por %xmm1, %xmm9
2799 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm9[2,2,2,2]
2800 ; SSE-NEXT: punpckhdq {{.*#+}} xmm0 = xmm0[2],xmm11[2],xmm0[3],xmm11[3]
2801 ; SSE-NEXT: movsd {{.*#+}} xmm0 = xmm13[0],xmm0[1]
2802 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2803 ; SSE-NEXT: movaps %xmm1, (%rsi)
2804 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2805 ; SSE-NEXT: movaps %xmm1, 16(%rsi)
2806 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2807 ; SSE-NEXT: movaps %xmm1, (%rdx)
2808 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2809 ; SSE-NEXT: movaps %xmm1, 16(%rdx)
2810 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2811 ; SSE-NEXT: movaps %xmm1, (%rcx)
2812 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2813 ; SSE-NEXT: movaps %xmm1, 16(%rcx)
2814 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2815 ; SSE-NEXT: movaps %xmm1, (%r8)
2816 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2817 ; SSE-NEXT: movaps %xmm1, 16(%r8)
2818 ; SSE-NEXT: movapd %xmm10, (%r9)
2819 ; SSE-NEXT: movapd %xmm6, 16(%r9)
2820 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2821 ; SSE-NEXT: movapd %xmm4, (%rax)
2822 ; SSE-NEXT: movapd %xmm8, 16(%rax)
2823 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2824 ; SSE-NEXT: movapd %xmm12, (%rax)
2825 ; SSE-NEXT: movapd %xmm15, 16(%rax)
2826 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2827 ; SSE-NEXT: movapd %xmm0, 16(%rax)
2828 ; SSE-NEXT: movapd %xmm5, (%rax)
2829 ; SSE-NEXT: addq $904, %rsp # imm = 0x388
2832 ; AVX1-ONLY-LABEL: load_i8_stride8_vf32:
2833 ; AVX1-ONLY: # %bb.0:
2834 ; AVX1-ONLY-NEXT: subq $360, %rsp # imm = 0x168
2835 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm10 = [0,0,0,8,0,0,0,8,0,0,0,8,0,0,0,8]
2836 ; AVX1-ONLY-NEXT: vmovdqa 112(%rdi), %xmm4
2837 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm4, %xmm0
2838 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2839 ; AVX1-ONLY-NEXT: vmovdqa 96(%rdi), %xmm1
2840 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm1, %xmm3
2841 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, %xmm7
2842 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2843 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
2844 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm12 = [0,8,0,0,0,8,0,0,0,8,0,0,0,8,0,0]
2845 ; AVX1-ONLY-NEXT: vmovdqa 80(%rdi), %xmm1
2846 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2847 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm1, %xmm3
2848 ; AVX1-ONLY-NEXT: vmovdqa 64(%rdi), %xmm5
2849 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm5, %xmm6
2850 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2851 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm6[0],xmm3[0],xmm6[1],xmm3[1],xmm6[2],xmm3[2],xmm6[3],xmm3[3]
2852 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm8 = xmm3[0,1,2,3,4,5],xmm0[6,7]
2853 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm14 = [0,0,0,8,0,0,0,8,0,0,0,8,0,0,0,8]
2854 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm0
2855 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2856 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm1
2857 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2858 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdi), %xmm2
2859 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2860 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdi), %xmm3
2861 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2862 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm3, %xmm9
2863 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm2, %xmm11
2864 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
2865 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [0,8,0,0,0,8,0,0,0,8,0,0,0,8,0,0]
2866 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm1, %xmm11
2867 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm0, %xmm13
2868 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm13[0],xmm11[0],xmm13[1],xmm11[1],xmm13[2],xmm11[2],xmm13[3],xmm11[3]
2869 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm9 = xmm11[0,1],xmm9[2,3],xmm11[4,5,6,7]
2870 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm8 = xmm9[0,1,2,3],xmm8[4,5,6,7]
2871 ; AVX1-ONLY-NEXT: vmovdqa 240(%rdi), %xmm0
2872 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
2873 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm0, %xmm11
2874 ; AVX1-ONLY-NEXT: vmovdqa 224(%rdi), %xmm0
2875 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2876 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm0, %xmm10
2877 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
2878 ; AVX1-ONLY-NEXT: vmovdqa 208(%rdi), %xmm0
2879 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2880 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm0, %xmm15
2881 ; AVX1-ONLY-NEXT: vmovdqa 192(%rdi), %xmm0
2882 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2883 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm0, %xmm12
2884 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm12[0],xmm15[0],xmm12[1],xmm15[1],xmm12[2],xmm15[2],xmm12[3],xmm15[3]
2885 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm12[0,1,2,3,4,5],xmm13[6,7]
2886 ; AVX1-ONLY-NEXT: vmovdqa 176(%rdi), %xmm0
2887 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2888 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm0, %xmm15
2889 ; AVX1-ONLY-NEXT: vmovdqa 160(%rdi), %xmm0
2890 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2891 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm0, %xmm14
2892 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
2893 ; AVX1-ONLY-NEXT: vmovdqa 144(%rdi), %xmm15
2894 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm15, %xmm0
2895 ; AVX1-ONLY-NEXT: vmovdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2896 ; AVX1-ONLY-NEXT: vmovdqa 128(%rdi), %xmm6
2897 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm6, %xmm3
2898 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, %xmm14
2899 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2900 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
2901 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5,6,7]
2902 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm1
2903 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
2904 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
2905 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm8[0,1,2,3],ymm0[4,5,6,7]
2906 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2907 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm8 = [0,0,1,9,0,0,1,9,0,0,1,9,0,0,1,9]
2908 ; AVX1-ONLY-NEXT: vpshufb %xmm8, %xmm4, %xmm1
2909 ; AVX1-ONLY-NEXT: vpshufb %xmm8, %xmm7, %xmm2
2910 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
2911 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm2 = [1,9,0,0,1,9,0,0,1,9,0,0,1,9,0,0]
2912 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
2913 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm6, %xmm3
2914 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm5, %xmm4
2915 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
2916 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0,1,2,3,4,5],xmm1[6,7]
2917 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [0,0,1,9,0,0,1,9,0,0,1,9,0,0,1,9]
2918 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
2919 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm13, %xmm4
2920 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
2921 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm9, %xmm5
2922 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
2923 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm5 = [1,9,0,0,1,9,0,0,1,9,0,0,1,9,0,0]
2924 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
2925 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm11, %xmm0
2926 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
2927 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm7, %xmm7
2928 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm7[0],xmm0[0],xmm7[1],xmm0[1],xmm7[2],xmm0[2],xmm7[3],xmm0[3]
2929 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm4[2,3],xmm0[4,5,6,7]
2930 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3],xmm1[4,5,6,7]
2931 ; AVX1-ONLY-NEXT: vmovdqa (%rsp), %xmm12 # 16-byte Reload
2932 ; AVX1-ONLY-NEXT: vpshufb %xmm8, %xmm12, %xmm1
2933 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
2934 ; AVX1-ONLY-NEXT: vpshufb %xmm8, %xmm10, %xmm4
2935 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
2936 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
2937 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm4, %xmm4
2938 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
2939 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm7, %xmm2
2940 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
2941 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1,2,3,4,5],xmm1[6,7]
2942 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
2943 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm2, %xmm2
2944 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
2945 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm4, %xmm3
2946 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
2947 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm15, %xmm3
2948 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm14, %xmm4
2949 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
2950 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3],xmm3[4,5,6,7]
2951 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
2952 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
2953 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
2954 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
2955 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2956 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm0 = [0,0,2,10,0,0,2,10,0,0,2,10,0,0,2,10]
2957 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
2958 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm14, %xmm1
2959 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
2960 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm15, %xmm2
2961 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
2962 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm2 = [2,10,0,0,2,10,0,0,2,10,0,0,2,10,0,0]
2963 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm6, %xmm3
2964 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
2965 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm8, %xmm4
2966 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
2967 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0,1,2,3,4,5],xmm1[6,7]
2968 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [0,0,2,10,0,0,2,10,0,0,2,10,0,0,2,10]
2969 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm13, %xmm4
2970 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm9, %xmm5
2971 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
2972 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm5 = [2,10,0,0,2,10,0,0,2,10,0,0,2,10,0,0]
2973 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm11, %xmm6
2974 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
2975 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm13, %xmm7
2976 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
2977 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm6[0,1],xmm4[2,3],xmm6[4,5,6,7]
2978 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm4[0,1,2,3],xmm1[4,5,6,7]
2979 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm12, %xmm4
2980 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm10, %xmm0
2981 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
2982 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
2983 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm10, %xmm4
2984 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
2985 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm11, %xmm2
2986 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
2987 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm2[0,1,2,3,4,5],xmm0[6,7]
2988 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
2989 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm12, %xmm2
2990 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
2991 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm4, %xmm3
2992 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
2993 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
2994 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm3, %xmm3
2995 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
2996 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm4, %xmm4
2997 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
2998 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3],xmm3[4,5,6,7]
2999 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
3000 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
3001 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
3002 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
3003 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3004 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm0 = [0,0,3,11,0,0,3,11,0,0,3,11,0,0,3,11]
3005 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm14, %xmm1
3006 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm15, %xmm2
3007 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
3008 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm2 = [3,11,0,0,3,11,0,0,3,11,0,0,3,11,0,0]
3009 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3010 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm3, %xmm3
3011 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm8, %xmm4
3012 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3013 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0,1,2,3,4,5],xmm1[6,7]
3014 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [0,0,3,11,0,0,3,11,0,0,3,11,0,0,3,11]
3015 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
3016 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm9, %xmm4
3017 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
3018 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm5, %xmm5
3019 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3020 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm5 = [3,11,0,0,3,11,0,0,3,11,0,0,3,11,0,0]
3021 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3022 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm6, %xmm6
3023 ; AVX1-ONLY-NEXT: vmovdqa %xmm13, %xmm8
3024 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm13, %xmm7
3025 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
3026 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm6[0,1],xmm4[2,3],xmm6[4,5,6,7]
3027 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm4[0,1,2,3],xmm1[4,5,6,7]
3028 ; AVX1-ONLY-NEXT: vmovdqa (%rsp), %xmm4 # 16-byte Reload
3029 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm4, %xmm4
3030 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3031 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm6, %xmm0
3032 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
3033 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm10, %xmm4
3034 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm11, %xmm2
3035 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
3036 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm2[0,1,2,3,4,5],xmm0[6,7]
3037 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm12, %xmm2
3038 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3039 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm14, %xmm3
3040 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3041 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
3042 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm12, %xmm3
3043 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3044 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm15, %xmm4
3045 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3046 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3],xmm3[4,5,6,7]
3047 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
3048 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
3049 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
3050 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
3051 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3052 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm0 = [0,0,4,12,0,0,4,12,0,0,4,12,0,0,4,12]
3053 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
3054 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm10, %xmm1
3055 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3056 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm2, %xmm2
3057 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
3058 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm2 = [4,12,0,0,4,12,0,0,4,12,0,0,4,12,0,0]
3059 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
3060 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm11, %xmm3
3061 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
3062 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm13, %xmm4
3063 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3064 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0,1,2,3,4,5],xmm1[6,7]
3065 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [0,0,4,12,0,0,4,12,0,0,4,12,0,0,4,12]
3066 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm9, %xmm4
3067 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
3068 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm5, %xmm5
3069 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3070 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm5 = [4,12,0,0,4,12,0,0,4,12,0,0,4,12,0,0]
3071 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3072 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm6, %xmm6
3073 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm8, %xmm7
3074 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
3075 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm6[0,1],xmm4[2,3],xmm6[4,5,6,7]
3076 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm4[0,1,2,3],xmm1[4,5,6,7]
3077 ; AVX1-ONLY-NEXT: vmovdqa (%rsp), %xmm9 # 16-byte Reload
3078 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm9, %xmm4
3079 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3080 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm6, %xmm0
3081 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
3082 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3083 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm4, %xmm4
3084 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3085 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm6, %xmm2
3086 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
3087 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm2[0,1,2,3,4,5],xmm0[6,7]
3088 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3089 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm2, %xmm2
3090 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm14, %xmm3
3091 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3092 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm12, %xmm3
3093 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm15, %xmm4
3094 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3095 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3],xmm3[4,5,6,7]
3096 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
3097 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
3098 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
3099 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
3100 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3101 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm0 = [0,0,5,13,0,0,5,13,0,0,5,13,0,0,5,13]
3102 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm10, %xmm1
3103 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
3104 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm8, %xmm2
3105 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
3106 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm2 = [5,13,0,0,5,13,0,0,5,13,0,0,5,13,0,0]
3107 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm11, %xmm3
3108 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm13, %xmm4
3109 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3110 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0,1,2,3,4,5],xmm1[6,7]
3111 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [0,0,5,13,0,0,5,13,0,0,5,13,0,0,5,13]
3112 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3113 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm4, %xmm4
3114 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
3115 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm11, %xmm5
3116 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3117 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm5 = [5,13,0,0,5,13,0,0,5,13,0,0,5,13,0,0]
3118 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
3119 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm10, %xmm6
3120 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3121 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm14, %xmm7
3122 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
3123 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm6[0,1],xmm4[2,3],xmm6[4,5,6,7]
3124 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm4[0,1,2,3],xmm1[4,5,6,7]
3125 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm9, %xmm4
3126 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3127 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm15, %xmm0
3128 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
3129 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
3130 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm9, %xmm4
3131 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
3132 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm12, %xmm2
3133 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
3134 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm2[0,1,2,3,4,5],xmm0[6,7]
3135 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
3136 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm13, %xmm2
3137 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3138 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm4, %xmm3
3139 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3140 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3141 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm3, %xmm3
3142 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3143 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm4, %xmm4
3144 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3145 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3],xmm3[4,5,6,7]
3146 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
3147 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
3148 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
3149 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
3150 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3151 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm0 = [0,0,6,14,0,0,6,14,0,0,6,14,0,0,6,14]
3152 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3153 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm1, %xmm1
3154 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm8, %xmm2
3155 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
3156 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm2 = [6,14,0,0,6,14,0,0,6,14,0,0,6,14,0,0]
3157 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3158 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm3, %xmm3
3159 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3160 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm4, %xmm4
3161 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3162 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0,1,2,3,4,5],xmm1[6,7]
3163 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [0,0,6,14,0,0,6,14,0,0,6,14,0,0,6,14]
3164 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
3165 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm8, %xmm4
3166 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm11, %xmm5
3167 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3168 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm5 = [6,14,0,0,6,14,0,0,6,14,0,0,6,14,0,0]
3169 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm10, %xmm6
3170 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm14, %xmm7
3171 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
3172 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm6[0,1],xmm4[2,3],xmm6[4,5,6,7]
3173 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm4[0,1,2,3],xmm1[4,5,6,7]
3174 ; AVX1-ONLY-NEXT: vmovdqa (%rsp), %xmm14 # 16-byte Reload
3175 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm14, %xmm4
3176 ; AVX1-ONLY-NEXT: vmovdqa %xmm15, %xmm10
3177 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm15, %xmm0
3178 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
3179 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm9, %xmm4
3180 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm12, %xmm2
3181 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
3182 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm2[0,1,2,3,4,5],xmm0[6,7]
3183 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm13, %xmm2
3184 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3185 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm15, %xmm3
3186 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3187 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
3188 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm9, %xmm3
3189 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
3190 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm11, %xmm4
3191 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3192 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3],xmm3[4,5,6,7]
3193 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
3194 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
3195 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
3196 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
3197 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3198 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm0 = [0,0,7,15,0,0,7,15,0,0,7,15,0,0,7,15]
3199 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3200 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm1, %xmm1
3201 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3202 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm2, %xmm2
3203 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
3204 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm2 = [7,15,0,0,7,15,0,0,7,15,0,0,7,15,0,0]
3205 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3206 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm3, %xmm3
3207 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3208 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm4, %xmm4
3209 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3210 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0,1,2,3,4,5],xmm1[6,7]
3211 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [0,0,7,15,0,0,7,15,0,0,7,15,0,0,7,15]
3212 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm8, %xmm4
3213 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
3214 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm5, %xmm5
3215 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3216 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm5 = [7,15,0,0,7,15,0,0,7,15,0,0,7,15,0,0]
3217 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3218 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm6, %xmm6
3219 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
3220 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm7, %xmm7
3221 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
3222 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm6[0,1],xmm4[2,3],xmm6[4,5,6,7]
3223 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm4[0,1,2,3],xmm1[4,5,6,7]
3224 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm14, %xmm4
3225 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm10, %xmm0
3226 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
3227 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3228 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm4, %xmm4
3229 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm12, %xmm2
3230 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
3231 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm2[0,1,2,3,4,5],xmm0[6,7]
3232 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm13, %xmm2
3233 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm15, %xmm3
3234 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3235 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm9, %xmm3
3236 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm11, %xmm4
3237 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3238 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3],xmm3[4,5,6,7]
3239 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
3240 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
3241 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
3242 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
3243 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3244 ; AVX1-ONLY-NEXT: vmovaps %ymm1, (%rsi)
3245 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3246 ; AVX1-ONLY-NEXT: vmovaps %ymm1, (%rdx)
3247 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3248 ; AVX1-ONLY-NEXT: vmovaps %ymm1, (%rcx)
3249 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3250 ; AVX1-ONLY-NEXT: vmovaps %ymm1, (%r8)
3251 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3252 ; AVX1-ONLY-NEXT: vmovaps %ymm1, (%r9)
3253 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
3254 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3255 ; AVX1-ONLY-NEXT: vmovaps %ymm1, (%rax)
3256 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
3257 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3258 ; AVX1-ONLY-NEXT: vmovaps %ymm1, (%rax)
3259 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
3260 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rax)
3261 ; AVX1-ONLY-NEXT: addq $360, %rsp # imm = 0x168
3262 ; AVX1-ONLY-NEXT: vzeroupper
3263 ; AVX1-ONLY-NEXT: retq
3265 ; AVX2-SLOW-LABEL: load_i8_stride8_vf32:
3266 ; AVX2-SLOW: # %bb.0:
3267 ; AVX2-SLOW-NEXT: subq $360, %rsp # imm = 0x168
3268 ; AVX2-SLOW-NEXT: vmovdqa 112(%rdi), %xmm0
3269 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3270 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm10 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
3271 ; AVX2-SLOW-NEXT: vpshufb %xmm10, %xmm0, %xmm0
3272 ; AVX2-SLOW-NEXT: vmovdqa 96(%rdi), %xmm4
3273 ; AVX2-SLOW-NEXT: vpshufb %xmm10, %xmm4, %xmm2
3274 ; AVX2-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3275 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
3276 ; AVX2-SLOW-NEXT: vmovdqa 80(%rdi), %xmm1
3277 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3278 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm12 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
3279 ; AVX2-SLOW-NEXT: vpshufb %xmm12, %xmm1, %xmm2
3280 ; AVX2-SLOW-NEXT: vmovdqa 64(%rdi), %xmm5
3281 ; AVX2-SLOW-NEXT: vpshufb %xmm12, %xmm5, %xmm6
3282 ; AVX2-SLOW-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3283 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
3284 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm8 = xmm2[0,1,2],xmm0[3]
3285 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm0
3286 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3287 ; AVX2-SLOW-NEXT: vmovdqa 16(%rdi), %xmm7
3288 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %xmm2
3289 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3290 ; AVX2-SLOW-NEXT: vmovdqa 48(%rdi), %xmm3
3291 ; AVX2-SLOW-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3292 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm14 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
3293 ; AVX2-SLOW-NEXT: vpshufb %xmm14, %xmm3, %xmm9
3294 ; AVX2-SLOW-NEXT: vpshufb %xmm14, %xmm2, %xmm11
3295 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
3296 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
3297 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm7, %xmm11
3298 ; AVX2-SLOW-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3299 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm0, %xmm13
3300 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm13[0],xmm11[0],xmm13[1],xmm11[1],xmm13[2],xmm11[2],xmm13[3],xmm11[3]
3301 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm9 = xmm11[0],xmm9[1],xmm11[2,3]
3302 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1],xmm8[2,3]
3303 ; AVX2-SLOW-NEXT: vmovdqa 240(%rdi), %xmm0
3304 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
3305 ; AVX2-SLOW-NEXT: vpshufb %xmm10, %xmm0, %xmm11
3306 ; AVX2-SLOW-NEXT: vmovdqa 224(%rdi), %xmm0
3307 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3308 ; AVX2-SLOW-NEXT: vpshufb %xmm10, %xmm0, %xmm10
3309 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
3310 ; AVX2-SLOW-NEXT: vmovdqa 208(%rdi), %xmm11
3311 ; AVX2-SLOW-NEXT: vpshufb %xmm12, %xmm11, %xmm15
3312 ; AVX2-SLOW-NEXT: vmovdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3313 ; AVX2-SLOW-NEXT: vmovdqa 192(%rdi), %xmm0
3314 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3315 ; AVX2-SLOW-NEXT: vpshufb %xmm12, %xmm0, %xmm12
3316 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm12[0],xmm15[0],xmm12[1],xmm15[1],xmm12[2],xmm15[2],xmm12[3],xmm15[3]
3317 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
3318 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm12, %ymm0, %ymm12
3319 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm12[0,1,2,3,4,5,6],ymm13[7]
3320 ; AVX2-SLOW-NEXT: vmovdqa 176(%rdi), %xmm0
3321 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3322 ; AVX2-SLOW-NEXT: vpshufb %xmm14, %xmm0, %xmm15
3323 ; AVX2-SLOW-NEXT: vmovdqa 160(%rdi), %xmm0
3324 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3325 ; AVX2-SLOW-NEXT: vpshufb %xmm14, %xmm0, %xmm14
3326 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
3327 ; AVX2-SLOW-NEXT: vmovdqa 144(%rdi), %xmm0
3328 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3329 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm0, %xmm0
3330 ; AVX2-SLOW-NEXT: vmovdqa 128(%rdi), %xmm13
3331 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm13, %xmm3
3332 ; AVX2-SLOW-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3333 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
3334 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
3335 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
3336 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm1[5],ymm0[6,7]
3337 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
3338 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm8[0,1,2,3],ymm0[4,5,6,7]
3339 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3340 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm8 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
3341 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3342 ; AVX2-SLOW-NEXT: vpshufb %xmm8, %xmm0, %xmm1
3343 ; AVX2-SLOW-NEXT: vpshufb %xmm8, %xmm4, %xmm2
3344 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
3345 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
3346 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3347 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm6, %xmm3
3348 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm4
3349 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3350 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
3351 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
3352 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3353 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm15, %xmm4
3354 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
3355 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm9, %xmm5
3356 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3357 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm5 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
3358 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm7, %xmm0
3359 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3360 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm14, %xmm7
3361 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm7[0],xmm0[0],xmm7[1],xmm0[1],xmm7[2],xmm0[2],xmm7[3],xmm0[3]
3362 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm4[1],xmm0[2,3]
3363 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3]
3364 ; AVX2-SLOW-NEXT: vmovdqa (%rsp), %xmm12 # 16-byte Reload
3365 ; AVX2-SLOW-NEXT: vpshufb %xmm8, %xmm12, %xmm1
3366 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
3367 ; AVX2-SLOW-NEXT: vpshufb %xmm8, %xmm10, %xmm4
3368 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
3369 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm11, %xmm4
3370 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
3371 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm11, %xmm2
3372 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
3373 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
3374 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3375 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5,6],ymm1[7]
3376 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3377 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm2, %xmm2
3378 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3379 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm4, %xmm3
3380 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3381 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3382 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm3, %xmm3
3383 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm13, %xmm4
3384 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3385 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3386 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
3387 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
3388 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
3389 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3390 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3391 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
3392 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3393 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm1
3394 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
3395 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm13, %xmm2
3396 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
3397 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
3398 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm6, %xmm3
3399 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
3400 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm8, %xmm4
3401 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3402 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
3403 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
3404 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm15, %xmm4
3405 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm9, %xmm5
3406 ; AVX2-SLOW-NEXT: vmovdqa %xmm9, %xmm15
3407 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3408 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm5 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
3409 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3410 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm6, %xmm6
3411 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm14, %xmm7
3412 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
3413 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm6[0],xmm4[1],xmm6[2,3]
3414 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm4[0,1],xmm1[2,3]
3415 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm12, %xmm4
3416 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm10, %xmm0
3417 ; AVX2-SLOW-NEXT: vmovdqa %xmm10, %xmm14
3418 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
3419 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
3420 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm10, %xmm4
3421 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm11, %xmm2
3422 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
3423 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
3424 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3425 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5,6],ymm0[7]
3426 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
3427 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm12, %xmm2
3428 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3429 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm4, %xmm3
3430 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3431 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3432 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm3, %xmm3
3433 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3434 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm4, %xmm4
3435 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3436 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3437 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
3438 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
3439 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
3440 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
3441 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3442 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
3443 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3444 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm1
3445 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm13, %xmm2
3446 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
3447 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
3448 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3449 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm3, %xmm3
3450 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm8, %xmm4
3451 ; AVX2-SLOW-NEXT: vmovdqa %xmm8, %xmm13
3452 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3453 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
3454 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
3455 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
3456 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm9, %xmm4
3457 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm15, %xmm5
3458 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3459 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm5 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
3460 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
3461 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm11, %xmm6
3462 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3463 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm15, %xmm7
3464 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
3465 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm6[0],xmm4[1],xmm6[2,3]
3466 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm4[0,1],xmm1[2,3]
3467 ; AVX2-SLOW-NEXT: vmovdqa (%rsp), %xmm4 # 16-byte Reload
3468 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm4
3469 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm14, %xmm0
3470 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
3471 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm10, %xmm4
3472 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
3473 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm8, %xmm2
3474 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
3475 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
3476 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3477 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5,6],ymm0[7]
3478 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm12, %xmm2
3479 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3480 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm14, %xmm3
3481 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3482 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3483 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm3, %xmm3
3484 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3485 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm4, %xmm4
3486 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3487 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3488 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
3489 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
3490 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
3491 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
3492 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3493 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
3494 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
3495 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm10, %xmm1
3496 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3497 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm2, %xmm2
3498 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
3499 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
3500 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
3501 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm12, %xmm3
3502 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm13, %xmm4
3503 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3504 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
3505 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
3506 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm9, %xmm4
3507 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
3508 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm13, %xmm5
3509 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3510 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm5 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
3511 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm11, %xmm6
3512 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm15, %xmm7
3513 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
3514 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm6[0],xmm4[1],xmm6[2,3]
3515 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm4[0,1],xmm1[2,3]
3516 ; AVX2-SLOW-NEXT: vmovdqa (%rsp), %xmm9 # 16-byte Reload
3517 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm9, %xmm4
3518 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3519 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm0
3520 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
3521 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3522 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm4, %xmm4
3523 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm8, %xmm2
3524 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
3525 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
3526 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3527 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5,6],ymm0[7]
3528 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3529 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm15, %xmm2
3530 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm14, %xmm3
3531 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3532 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3533 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm14, %xmm3
3534 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3535 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm4, %xmm4
3536 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3537 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3538 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
3539 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
3540 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
3541 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
3542 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3543 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
3544 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm10, %xmm1
3545 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
3546 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm8, %xmm2
3547 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
3548 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
3549 ; AVX2-SLOW-NEXT: vmovdqa %xmm12, %xmm11
3550 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm12, %xmm3
3551 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3552 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm4, %xmm4
3553 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3554 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
3555 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
3556 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3557 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm4, %xmm4
3558 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm13, %xmm5
3559 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3560 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm5 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
3561 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3562 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm6, %xmm6
3563 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
3564 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm7, %xmm7
3565 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
3566 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm6[0],xmm4[1],xmm6[2,3]
3567 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm4[0,1],xmm1[2,3]
3568 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm9, %xmm4
3569 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
3570 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm9, %xmm0
3571 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
3572 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
3573 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm13, %xmm4
3574 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
3575 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm12, %xmm2
3576 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
3577 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
3578 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3579 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5,6],ymm0[7]
3580 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm15, %xmm2
3581 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3582 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm4, %xmm3
3583 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3584 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm14, %xmm3
3585 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
3586 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm10, %xmm4
3587 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3588 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3589 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
3590 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
3591 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
3592 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
3593 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3594 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
3595 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3596 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm1
3597 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm8, %xmm2
3598 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
3599 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
3600 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm11, %xmm3
3601 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3602 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm14, %xmm4
3603 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3604 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
3605 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
3606 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
3607 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm11, %xmm4
3608 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
3609 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm5, %xmm5
3610 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3611 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm5 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
3612 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3613 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm6, %xmm6
3614 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
3615 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm8, %xmm7
3616 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
3617 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm6[0],xmm4[1],xmm6[2,3]
3618 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm4[0,1],xmm1[2,3]
3619 ; AVX2-SLOW-NEXT: vmovdqa (%rsp), %xmm15 # 16-byte Reload
3620 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm15, %xmm4
3621 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm9, %xmm0
3622 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
3623 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm13, %xmm4
3624 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm12, %xmm2
3625 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
3626 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
3627 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3628 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5,6],ymm0[7]
3629 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
3630 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm13, %xmm2
3631 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
3632 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm9, %xmm3
3633 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3634 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
3635 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm12, %xmm3
3636 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm10, %xmm4
3637 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3638 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3639 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
3640 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
3641 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
3642 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
3643 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3644 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
3645 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3646 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm1
3647 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3648 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm2, %xmm2
3649 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
3650 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
3651 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3652 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm3, %xmm3
3653 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm14, %xmm4
3654 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3655 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
3656 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
3657 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm11, %xmm4
3658 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
3659 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm5, %xmm5
3660 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
3661 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm5 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
3662 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3663 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm6, %xmm6
3664 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm8, %xmm7
3665 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
3666 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm6[0],xmm4[1],xmm6[2,3]
3667 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm4[0,1],xmm1[2,3]
3668 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm15, %xmm4
3669 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3670 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm0
3671 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
3672 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3673 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm4, %xmm4
3674 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3675 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm6, %xmm2
3676 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
3677 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
3678 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3679 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5,6],ymm0[7]
3680 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm13, %xmm2
3681 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm9, %xmm3
3682 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
3683 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm12, %xmm3
3684 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm10, %xmm4
3685 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
3686 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3687 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
3688 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
3689 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
3690 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
3691 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3692 ; AVX2-SLOW-NEXT: vmovaps %ymm1, (%rsi)
3693 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3694 ; AVX2-SLOW-NEXT: vmovaps %ymm1, (%rdx)
3695 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3696 ; AVX2-SLOW-NEXT: vmovaps %ymm1, (%rcx)
3697 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3698 ; AVX2-SLOW-NEXT: vmovaps %ymm1, (%r8)
3699 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3700 ; AVX2-SLOW-NEXT: vmovaps %ymm1, (%r9)
3701 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3702 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3703 ; AVX2-SLOW-NEXT: vmovaps %ymm1, (%rax)
3704 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3705 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3706 ; AVX2-SLOW-NEXT: vmovaps %ymm1, (%rax)
3707 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3708 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, (%rax)
3709 ; AVX2-SLOW-NEXT: addq $360, %rsp # imm = 0x168
3710 ; AVX2-SLOW-NEXT: vzeroupper
3711 ; AVX2-SLOW-NEXT: retq
3713 ; AVX2-FAST-LABEL: load_i8_stride8_vf32:
3714 ; AVX2-FAST: # %bb.0:
3715 ; AVX2-FAST-NEXT: subq $248, %rsp
3716 ; AVX2-FAST-NEXT: vmovdqa 192(%rdi), %ymm4
3717 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3718 ; AVX2-FAST-NEXT: vmovdqa 224(%rdi), %ymm7
3719 ; AVX2-FAST-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3720 ; AVX2-FAST-NEXT: vmovdqa 112(%rdi), %xmm8
3721 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm0 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
3722 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm8, %xmm1
3723 ; AVX2-FAST-NEXT: vmovdqa 96(%rdi), %xmm13
3724 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm13, %xmm0
3725 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
3726 ; AVX2-FAST-NEXT: vmovdqa 80(%rdi), %xmm3
3727 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm1 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
3728 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm3, %xmm2
3729 ; AVX2-FAST-NEXT: vmovdqa %xmm3, %xmm10
3730 ; AVX2-FAST-NEXT: vmovdqa 64(%rdi), %xmm3
3731 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3732 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm3, %xmm1
3733 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
3734 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3]
3735 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm2
3736 ; AVX2-FAST-NEXT: vmovdqa 16(%rdi), %xmm3
3737 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3738 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %xmm5
3739 ; AVX2-FAST-NEXT: vmovdqa 48(%rdi), %xmm15
3740 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm1 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
3741 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm15, %xmm9
3742 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm5, %xmm1
3743 ; AVX2-FAST-NEXT: vmovdqa %xmm5, %xmm6
3744 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm9[0],xmm1[1],xmm9[1],xmm1[2],xmm9[2],xmm1[3],xmm9[3]
3745 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm9 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
3746 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm3, %xmm11
3747 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm2, %xmm9
3748 ; AVX2-FAST-NEXT: vmovdqa %xmm2, %xmm5
3749 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm11[0],xmm9[1],xmm11[1],xmm9[2],xmm11[2],xmm9[3],xmm11[3]
3750 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm9[0],xmm1[1],xmm9[2,3]
3751 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm3 = xmm1[0,1],xmm0[2,3]
3752 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [0,2,2,3,0,2,4,6]
3753 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm1, %ymm9
3754 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28]
3755 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm1, %ymm0
3756 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3757 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u]
3758 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm11[0,1,2,3,4,5,6],ymm2[7]
3759 ; AVX2-FAST-NEXT: vmovdqa 128(%rdi), %ymm0
3760 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3761 ; AVX2-FAST-NEXT: vmovdqa 160(%rdi), %ymm4
3762 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3763 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm1, %ymm11
3764 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm1, %ymm14
3765 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u,u,u,u,u]
3766 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u,u,u,u,u,u,u,u,u]
3767 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm12[0,1,2,3,4],ymm0[5],ymm12[6,7]
3768 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
3769 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
3770 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3771 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm0 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
3772 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm8, %xmm2
3773 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm13, %xmm0
3774 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3]
3775 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm2 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
3776 ; AVX2-FAST-NEXT: vmovdqa %xmm10, %xmm4
3777 ; AVX2-FAST-NEXT: vmovdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3778 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm10, %xmm3
3779 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3780 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm1, %xmm2
3781 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
3782 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1,2],xmm0[3]
3783 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm2 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
3784 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm15, %xmm3
3785 ; AVX2-FAST-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3786 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm6, %xmm2
3787 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
3788 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm3 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
3789 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
3790 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm7, %xmm12
3791 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm5, %xmm3
3792 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm12[0],xmm3[1],xmm12[1],xmm3[2],xmm12[2],xmm3[3],xmm12[3]
3793 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm3[0],xmm2[1],xmm3[2,3]
3794 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1],xmm0[2,3]
3795 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29]
3796 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
3797 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u]
3798 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
3799 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u,u,u,u,u]
3800 ; AVX2-FAST-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3801 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u,u,u,u,u,u,u,u,u]
3802 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm12[0,1,2,3,4],ymm3[5],ymm12[6,7]
3803 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5],ymm2[6,7]
3804 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
3805 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3806 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm0 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
3807 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm8, %xmm2
3808 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm13, %xmm0
3809 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3]
3810 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm2 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
3811 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm4, %xmm3
3812 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm1, %xmm2
3813 ; AVX2-FAST-NEXT: vmovdqa %xmm1, %xmm4
3814 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
3815 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1,2],xmm0[3]
3816 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm2 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
3817 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm15, %xmm3
3818 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm6, %xmm2
3819 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
3820 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm3 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
3821 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm7, %xmm12
3822 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm5, %xmm3
3823 ; AVX2-FAST-NEXT: vmovdqa %xmm5, %xmm1
3824 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm12[0],xmm3[1],xmm12[1],xmm3[2],xmm12[2],xmm3[3],xmm12[3]
3825 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm3[0],xmm2[1],xmm3[2,3]
3826 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1],xmm0[2,3]
3827 ; AVX2-FAST-NEXT: vmovdqa %ymm9, %ymm7
3828 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30]
3829 ; AVX2-FAST-NEXT: vmovdqa %ymm10, %ymm9
3830 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30,u,u,u,u]
3831 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
3832 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30,u,u,u,u,u,u,u,u]
3833 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30,u,u,u,u,u,u,u,u,u,u,u,u]
3834 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm12[0,1,2,3,4],ymm3[5],ymm12[6,7]
3835 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5],ymm2[6,7]
3836 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
3837 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3838 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm0 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
3839 ; AVX2-FAST-NEXT: vmovdqa %xmm8, %xmm5
3840 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm8, %xmm2
3841 ; AVX2-FAST-NEXT: vmovdqa %xmm13, %xmm8
3842 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm13, %xmm0
3843 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3]
3844 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm2 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
3845 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3846 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm6, %xmm3
3847 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm4, %xmm2
3848 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
3849 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1,2],xmm0[3]
3850 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm2 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
3851 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm15, %xmm3
3852 ; AVX2-FAST-NEXT: vmovdqa %xmm15, %xmm10
3853 ; AVX2-FAST-NEXT: vmovdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3854 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
3855 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm11, %xmm2
3856 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
3857 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm3 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
3858 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
3859 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm13, %xmm12
3860 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm1, %xmm3
3861 ; AVX2-FAST-NEXT: vmovdqa %xmm1, %xmm4
3862 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm12[0],xmm3[1],xmm12[1],xmm3[2],xmm12[2],xmm3[3],xmm12[3]
3863 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm3[0],xmm2[1],xmm3[2,3]
3864 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1],xmm0[2,3]
3865 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,19,23,27,31]
3866 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,19,23,27,31,u,u,u,u]
3867 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
3868 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3869 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,19,23,27,31,u,u,u,u,u,u,u,u]
3870 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,19,23,27,31,u,u,u,u,u,u,u,u,u,u,u,u]
3871 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4],ymm3[5],ymm1[6,7]
3872 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm2[6,7]
3873 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3874 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3875 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm0 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
3876 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm5, %xmm1
3877 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm8, %xmm0
3878 ; AVX2-FAST-NEXT: vmovdqa %xmm8, %xmm9
3879 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
3880 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm1 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
3881 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm6, %xmm2
3882 ; AVX2-FAST-NEXT: vmovdqa %xmm6, %xmm8
3883 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3884 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm15, %xmm1
3885 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
3886 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3]
3887 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm1 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
3888 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm10, %xmm2
3889 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm11, %xmm1
3890 ; AVX2-FAST-NEXT: vmovdqa %xmm11, %xmm14
3891 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
3892 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm2 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
3893 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm13, %xmm3
3894 ; AVX2-FAST-NEXT: vmovdqa %xmm13, %xmm6
3895 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm4, %xmm2
3896 ; AVX2-FAST-NEXT: vmovdqa %xmm4, %xmm10
3897 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
3898 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0],xmm1[1],xmm2[2,3]
3899 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm1[0,1],xmm0[2,3]
3900 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [1,3,2,3,1,3,5,7]
3901 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm7 # 32-byte Folded Reload
3902 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28]
3903 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm12 # 32-byte Folded Reload
3904 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u]
3905 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3,4,5,6],ymm3[7]
3906 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm11 # 32-byte Folded Reload
3907 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm13 # 32-byte Folded Reload
3908 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u,u,u,u,u]
3909 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u,u,u,u,u,u,u,u,u]
3910 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2,3,4],ymm2[5],ymm4[6,7]
3911 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
3912 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm2[4,5,6,7]
3913 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3914 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm1 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
3915 ; AVX2-FAST-NEXT: vmovdqa %xmm5, %xmm0
3916 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm5, %xmm2
3917 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm9, %xmm1
3918 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
3919 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm2 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
3920 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm8, %xmm3
3921 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm15, %xmm2
3922 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
3923 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0,1,2],xmm1[3]
3924 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm2 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
3925 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
3926 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm5, %xmm3
3927 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm14, %xmm2
3928 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
3929 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm3 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
3930 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm6, %xmm4
3931 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm10, %xmm3
3932 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
3933 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm3[0],xmm2[1],xmm3[2,3]
3934 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3]
3935 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29]
3936 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u]
3937 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
3938 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u,u,u,u,u]
3939 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u,u,u,u,u,u,u,u,u]
3940 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3,4],ymm3[5],ymm4[6,7]
3941 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5],ymm2[6,7]
3942 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
3943 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3944 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm1 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
3945 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm3
3946 ; AVX2-FAST-NEXT: vmovdqa %xmm0, %xmm2
3947 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm9, %xmm1
3948 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
3949 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm3 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
3950 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm8, %xmm4
3951 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm15, %xmm3
3952 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
3953 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
3954 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm3 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
3955 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm5, %xmm4
3956 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm14, %xmm3
3957 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
3958 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm4 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
3959 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm6, %xmm0
3960 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm10, %xmm4
3961 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
3962 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm3[1],xmm0[2,3]
3963 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3]
3964 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30]
3965 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30,u,u,u,u]
3966 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5,6],ymm1[7]
3967 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30,u,u,u,u,u,u,u,u]
3968 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30,u,u,u,u,u,u,u,u,u,u,u,u]
3969 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3,4],ymm3[5],ymm4[6,7]
3970 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5],ymm1[6,7]
3971 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3972 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm0 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
3973 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm2, %xmm3
3974 ; AVX2-FAST-NEXT: vpshufb %xmm0, %xmm9, %xmm0
3975 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
3976 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm3 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
3977 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm8, %xmm4
3978 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm15, %xmm3
3979 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
3980 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm3[0,1,2],xmm0[3]
3981 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm3 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
3982 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm5, %xmm4
3983 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm14, %xmm3
3984 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
3985 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm4 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
3986 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm6, %xmm6
3987 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm10, %xmm4
3988 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm6[0],xmm4[1],xmm6[1],xmm4[2],xmm6[2],xmm4[3],xmm6[3]
3989 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm3 = xmm4[0],xmm3[1],xmm4[2,3]
3990 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm3[0,1],xmm0[2,3]
3991 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,19,23,27,31]
3992 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,19,23,27,31,u,u,u,u]
3993 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3,4,5,6],ymm3[7]
3994 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,19,23,27,31,u,u,u,u,u,u,u,u]
3995 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm5 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,19,23,27,31,u,u,u,u,u,u,u,u,u,u,u,u]
3996 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2,3,4],ymm4[5],ymm5[6,7]
3997 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3,4,5],ymm3[6,7]
3998 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
3999 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
4000 ; AVX2-FAST-NEXT: vmovaps %ymm2, (%rsi)
4001 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
4002 ; AVX2-FAST-NEXT: vmovaps %ymm2, (%rdx)
4003 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
4004 ; AVX2-FAST-NEXT: vmovaps %ymm2, (%rcx)
4005 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
4006 ; AVX2-FAST-NEXT: vmovaps %ymm2, (%r8)
4007 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
4008 ; AVX2-FAST-NEXT: vmovaps %ymm2, (%r9)
4009 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
4010 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
4011 ; AVX2-FAST-NEXT: vmovaps %ymm2, (%rax)
4012 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
4013 ; AVX2-FAST-NEXT: vmovdqa %ymm1, (%rax)
4014 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
4015 ; AVX2-FAST-NEXT: vmovdqa %ymm0, (%rax)
4016 ; AVX2-FAST-NEXT: addq $248, %rsp
4017 ; AVX2-FAST-NEXT: vzeroupper
4018 ; AVX2-FAST-NEXT: retq
4020 ; AVX2-FAST-PERLANE-LABEL: load_i8_stride8_vf32:
4021 ; AVX2-FAST-PERLANE: # %bb.0:
4022 ; AVX2-FAST-PERLANE-NEXT: subq $360, %rsp # imm = 0x168
4023 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 112(%rdi), %xmm0
4024 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4025 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm10 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
4026 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm10, %xmm0, %xmm0
4027 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rdi), %xmm4
4028 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm10, %xmm4, %xmm2
4029 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4030 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
4031 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 80(%rdi), %xmm1
4032 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4033 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm12 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
4034 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm12, %xmm1, %xmm2
4035 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rdi), %xmm5
4036 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm12, %xmm5, %xmm6
4037 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4038 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
4039 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm8 = xmm2[0,1,2],xmm0[3]
4040 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm0
4041 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4042 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rdi), %xmm7
4043 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %xmm2
4044 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4045 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 48(%rdi), %xmm3
4046 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4047 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm14 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
4048 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm14, %xmm3, %xmm9
4049 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm14, %xmm2, %xmm11
4050 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
4051 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm3 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
4052 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm7, %xmm11
4053 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4054 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm0, %xmm13
4055 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm13[0],xmm11[0],xmm13[1],xmm11[1],xmm13[2],xmm11[2],xmm13[3],xmm11[3]
4056 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm9 = xmm11[0],xmm9[1],xmm11[2,3]
4057 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1],xmm8[2,3]
4058 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 240(%rdi), %xmm0
4059 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
4060 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm10, %xmm0, %xmm11
4061 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 224(%rdi), %xmm0
4062 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4063 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm10, %xmm0, %xmm10
4064 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
4065 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 208(%rdi), %xmm11
4066 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm12, %xmm11, %xmm15
4067 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4068 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 192(%rdi), %xmm0
4069 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4070 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm12, %xmm0, %xmm12
4071 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm12[0],xmm15[0],xmm12[1],xmm15[1],xmm12[2],xmm15[2],xmm12[3],xmm15[3]
4072 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
4073 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm12, %ymm0, %ymm12
4074 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm12[0,1,2,3,4,5,6],ymm13[7]
4075 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 176(%rdi), %xmm0
4076 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4077 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm14, %xmm0, %xmm15
4078 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 160(%rdi), %xmm0
4079 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4080 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm14, %xmm0, %xmm14
4081 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
4082 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 144(%rdi), %xmm0
4083 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4084 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm0, %xmm0
4085 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 128(%rdi), %xmm13
4086 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm13, %xmm3
4087 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4088 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
4089 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
4090 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4091 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm1[5],ymm0[6,7]
4092 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
4093 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm8[0,1,2,3],ymm0[4,5,6,7]
4094 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4095 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm8 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
4096 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4097 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm8, %xmm0, %xmm1
4098 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm8, %xmm4, %xmm2
4099 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
4100 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm2 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
4101 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
4102 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm6, %xmm3
4103 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm5, %xmm4
4104 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4105 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
4106 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm3 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
4107 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
4108 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm15, %xmm4
4109 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
4110 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm9, %xmm5
4111 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4112 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm5 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
4113 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm7, %xmm0
4114 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
4115 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm14, %xmm7
4116 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm7[0],xmm0[0],xmm7[1],xmm0[1],xmm7[2],xmm0[2],xmm7[3],xmm0[3]
4117 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm4[1],xmm0[2,3]
4118 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3]
4119 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsp), %xmm12 # 16-byte Reload
4120 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm8, %xmm12, %xmm1
4121 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
4122 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm8, %xmm10, %xmm4
4123 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
4124 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm11, %xmm4
4125 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
4126 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm11, %xmm2
4127 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
4128 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
4129 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
4130 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5,6],ymm1[7]
4131 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
4132 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm2, %xmm2
4133 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4134 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm4, %xmm3
4135 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4136 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
4137 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm3, %xmm3
4138 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm13, %xmm4
4139 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4140 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
4141 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
4142 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
4143 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
4144 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
4145 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4146 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm0 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
4147 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4148 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm1, %xmm1
4149 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
4150 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm13, %xmm2
4151 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
4152 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm2 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
4153 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm6, %xmm3
4154 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
4155 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm8, %xmm4
4156 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4157 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
4158 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm3 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
4159 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm15, %xmm4
4160 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm9, %xmm5
4161 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm9, %xmm15
4162 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4163 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm5 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
4164 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
4165 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm6, %xmm6
4166 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm14, %xmm7
4167 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
4168 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm6[0],xmm4[1],xmm6[2,3]
4169 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm1 = xmm4[0,1],xmm1[2,3]
4170 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm12, %xmm4
4171 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm10, %xmm0
4172 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm10, %xmm14
4173 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
4174 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
4175 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm10, %xmm4
4176 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm11, %xmm2
4177 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
4178 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4179 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
4180 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5,6],ymm0[7]
4181 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
4182 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm12, %xmm2
4183 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4184 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm4, %xmm3
4185 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4186 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
4187 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm3, %xmm3
4188 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4189 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm4, %xmm4
4190 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4191 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
4192 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
4193 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
4194 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
4195 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4196 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4197 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm0 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
4198 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4199 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm1, %xmm1
4200 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm13, %xmm2
4201 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
4202 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm2 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
4203 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
4204 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm3, %xmm3
4205 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm8, %xmm4
4206 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm8, %xmm13
4207 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4208 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
4209 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm3 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
4210 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
4211 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm9, %xmm4
4212 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm15, %xmm5
4213 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4214 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm5 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
4215 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
4216 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm11, %xmm6
4217 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
4218 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm15, %xmm7
4219 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
4220 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm6[0],xmm4[1],xmm6[2,3]
4221 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm1 = xmm4[0,1],xmm1[2,3]
4222 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsp), %xmm4 # 16-byte Reload
4223 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm4, %xmm4
4224 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm14, %xmm0
4225 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
4226 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm10, %xmm4
4227 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
4228 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm8, %xmm2
4229 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
4230 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4231 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
4232 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5,6],ymm0[7]
4233 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm12, %xmm2
4234 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
4235 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm14, %xmm3
4236 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4237 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
4238 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm3, %xmm3
4239 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4240 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm4, %xmm4
4241 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4242 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
4243 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
4244 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
4245 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
4246 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4247 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4248 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm0 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
4249 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
4250 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm10, %xmm1
4251 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
4252 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm2, %xmm2
4253 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
4254 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm2 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
4255 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
4256 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm12, %xmm3
4257 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm13, %xmm4
4258 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4259 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
4260 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm3 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
4261 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm9, %xmm4
4262 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
4263 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm13, %xmm5
4264 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4265 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm5 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
4266 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm11, %xmm6
4267 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm15, %xmm7
4268 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
4269 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm6[0],xmm4[1],xmm6[2,3]
4270 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm1 = xmm4[0,1],xmm1[2,3]
4271 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsp), %xmm9 # 16-byte Reload
4272 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm9, %xmm4
4273 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
4274 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm6, %xmm0
4275 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
4276 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4277 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm4, %xmm4
4278 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm8, %xmm2
4279 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
4280 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4281 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
4282 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5,6],ymm0[7]
4283 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
4284 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm15, %xmm2
4285 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm14, %xmm3
4286 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4287 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
4288 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm14, %xmm3
4289 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4290 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm4, %xmm4
4291 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4292 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
4293 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
4294 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
4295 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
4296 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4297 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4298 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm0 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
4299 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm10, %xmm1
4300 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
4301 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm8, %xmm2
4302 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
4303 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm2 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
4304 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm12, %xmm11
4305 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm12, %xmm3
4306 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4307 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm4, %xmm4
4308 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4309 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
4310 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm3 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
4311 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4312 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm4, %xmm4
4313 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm13, %xmm5
4314 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4315 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm5 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
4316 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
4317 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm6, %xmm6
4318 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
4319 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm7, %xmm7
4320 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
4321 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm6[0],xmm4[1],xmm6[2,3]
4322 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm1 = xmm4[0,1],xmm1[2,3]
4323 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm9, %xmm4
4324 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
4325 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm9, %xmm0
4326 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
4327 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
4328 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm13, %xmm4
4329 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
4330 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm12, %xmm2
4331 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
4332 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4333 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
4334 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5,6],ymm0[7]
4335 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm15, %xmm2
4336 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4337 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm4, %xmm3
4338 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4339 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm14, %xmm3
4340 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
4341 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm10, %xmm4
4342 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4343 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
4344 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
4345 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
4346 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
4347 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4348 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4349 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm0 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
4350 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4351 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm1, %xmm1
4352 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm8, %xmm2
4353 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
4354 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm2 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
4355 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm11, %xmm3
4356 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
4357 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm14, %xmm4
4358 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4359 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
4360 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm3 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
4361 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
4362 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm11, %xmm4
4363 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
4364 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm5, %xmm5
4365 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4366 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm5 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
4367 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
4368 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm6, %xmm6
4369 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
4370 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm8, %xmm7
4371 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
4372 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm6[0],xmm4[1],xmm6[2,3]
4373 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm1 = xmm4[0,1],xmm1[2,3]
4374 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsp), %xmm15 # 16-byte Reload
4375 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm15, %xmm4
4376 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm9, %xmm0
4377 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
4378 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm13, %xmm4
4379 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm12, %xmm2
4380 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
4381 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4382 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
4383 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5,6],ymm0[7]
4384 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
4385 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm13, %xmm2
4386 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
4387 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm9, %xmm3
4388 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4389 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
4390 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm12, %xmm3
4391 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm10, %xmm4
4392 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4393 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
4394 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
4395 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
4396 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
4397 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4398 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4399 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm0 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
4400 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4401 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm1, %xmm1
4402 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
4403 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm2, %xmm2
4404 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
4405 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm2 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
4406 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
4407 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm3, %xmm3
4408 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm14, %xmm4
4409 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4410 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
4411 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm3 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
4412 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm11, %xmm4
4413 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
4414 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm5, %xmm5
4415 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4416 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm5 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
4417 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
4418 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm6, %xmm6
4419 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm8, %xmm7
4420 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
4421 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm6[0],xmm4[1],xmm6[2,3]
4422 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm1 = xmm4[0,1],xmm1[2,3]
4423 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm15, %xmm4
4424 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
4425 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm6, %xmm0
4426 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
4427 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4428 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm4, %xmm4
4429 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
4430 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm6, %xmm2
4431 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
4432 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4433 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
4434 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5,6],ymm0[7]
4435 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm13, %xmm2
4436 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm9, %xmm3
4437 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4438 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm12, %xmm3
4439 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm5, %xmm10, %xmm4
4440 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4441 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
4442 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
4443 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
4444 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
4445 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4446 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4447 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, (%rsi)
4448 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4449 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, (%rdx)
4450 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4451 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, (%rcx)
4452 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4453 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, (%r8)
4454 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4455 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, (%r9)
4456 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
4457 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4458 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, (%rax)
4459 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
4460 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4461 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, (%rax)
4462 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
4463 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, (%rax)
4464 ; AVX2-FAST-PERLANE-NEXT: addq $360, %rsp # imm = 0x168
4465 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
4466 ; AVX2-FAST-PERLANE-NEXT: retq
4468 ; AVX512F-SLOW-LABEL: load_i8_stride8_vf32:
4469 ; AVX512F-SLOW: # %bb.0:
4470 ; AVX512F-SLOW-NEXT: vmovdqa64 (%rdi), %zmm16
4471 ; AVX512F-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm0
4472 ; AVX512F-SLOW-NEXT: vpmovqb %zmm0, %xmm0
4473 ; AVX512F-SLOW-NEXT: vmovdqa 240(%rdi), %xmm2
4474 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm3 = [0,0,0,8,0,0,0,8,0,0,0,8,0,0,0,8]
4475 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm2, %xmm1
4476 ; AVX512F-SLOW-NEXT: vmovdqa %xmm2, %xmm12
4477 ; AVX512F-SLOW-NEXT: vmovdqa 224(%rdi), %xmm4
4478 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm4, %xmm2
4479 ; AVX512F-SLOW-NEXT: vmovdqa %xmm4, %xmm7
4480 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
4481 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
4482 ; AVX512F-SLOW-NEXT: vmovdqa 208(%rdi), %xmm5
4483 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm4 = [0,8,0,0,0,8,0,0,0,8,0,0,0,8,0,0]
4484 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm5, %xmm2
4485 ; AVX512F-SLOW-NEXT: vmovdqa %xmm5, %xmm9
4486 ; AVX512F-SLOW-NEXT: vmovdqa 192(%rdi), %xmm5
4487 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm5, %xmm6
4488 ; AVX512F-SLOW-NEXT: vmovdqa %xmm5, %xmm8
4489 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
4490 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
4491 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5,6],ymm1[7]
4492 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4493 ; AVX512F-SLOW-NEXT: vmovdqa 128(%rdi), %ymm2
4494 ; AVX512F-SLOW-NEXT: vpmovqb %ymm2, %xmm2
4495 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
4496 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4],ymm0[5],ymm2[6,7]
4497 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
4498 ; AVX512F-SLOW-NEXT: vmovdqa 112(%rdi), %xmm1
4499 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm1, %xmm6
4500 ; AVX512F-SLOW-NEXT: vmovdqa %xmm1, %xmm5
4501 ; AVX512F-SLOW-NEXT: vmovdqa 96(%rdi), %xmm1
4502 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm1, %xmm3
4503 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm1, %xmm18
4504 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm3[0],xmm6[0],xmm3[1],xmm6[1],xmm3[2],xmm6[2],xmm3[3],xmm6[3]
4505 ; AVX512F-SLOW-NEXT: vmovdqa 80(%rdi), %xmm1
4506 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm1, %xmm11
4507 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm1, %xmm21
4508 ; AVX512F-SLOW-NEXT: vmovdqa 64(%rdi), %xmm3
4509 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm3, %xmm4
4510 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm11[0],xmm4[1],xmm11[1],xmm4[2],xmm11[2],xmm4[3],xmm11[3]
4511 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm4[0,1,2],xmm10[3]
4512 ; AVX512F-SLOW-NEXT: vpmovqb %zmm16, %xmm10
4513 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm10[0,1],xmm4[2,3]
4514 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
4515 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm0, %ymm17
4516 ; AVX512F-SLOW-NEXT: vmovdqa 160(%rdi), %xmm0
4517 ; AVX512F-SLOW-NEXT: vmovdqa 176(%rdi), %xmm6
4518 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm2 = [0,0,1,9,0,0,1,9,0,0,1,9,0,0,1,9]
4519 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm12, %xmm4
4520 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm12, %xmm19
4521 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm7, %xmm12
4522 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm12[0],xmm4[0],xmm12[1],xmm4[1],xmm12[2],xmm4[2],xmm12[3],xmm4[3]
4523 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
4524 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm14 = [1,9,0,0,1,9,0,0,1,9,0,0,1,9,0,0]
4525 ; AVX512F-SLOW-NEXT: vpshufb %xmm14, %xmm9, %xmm12
4526 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm9, %xmm22
4527 ; AVX512F-SLOW-NEXT: vpshufb %xmm14, %xmm8, %xmm13
4528 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm8, %xmm20
4529 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
4530 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm12, %ymm0, %ymm12
4531 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm12[0,1,2,3,4,5,6],ymm4[7]
4532 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm12 = [0,0,1,9,0,0,1,9,0,0,1,9,0,0,1,9]
4533 ; AVX512F-SLOW-NEXT: vpshufb %xmm12, %xmm6, %xmm13
4534 ; AVX512F-SLOW-NEXT: vpshufb %xmm12, %xmm0, %xmm12
4535 ; AVX512F-SLOW-NEXT: vmovdqa %xmm0, %xmm10
4536 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3]
4537 ; AVX512F-SLOW-NEXT: vmovdqa 128(%rdi), %xmm8
4538 ; AVX512F-SLOW-NEXT: vmovdqa 144(%rdi), %xmm9
4539 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm1 = [1,9,0,0,1,9,0,0,1,9,0,0,1,9,0,0]
4540 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm9, %xmm0
4541 ; AVX512F-SLOW-NEXT: vmovdqa %xmm9, %xmm13
4542 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm8, %xmm1
4543 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm8, %xmm26
4544 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
4545 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm1
4546 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4547 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm1[5],ymm0[6,7]
4548 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm4[6,7]
4549 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm1
4550 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm5, %xmm24
4551 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm18, %xmm5
4552 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm2
4553 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
4554 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm21, %xmm4
4555 ; AVX512F-SLOW-NEXT: vpshufb %xmm14, %xmm4, %xmm2
4556 ; AVX512F-SLOW-NEXT: vpshufb %xmm14, %xmm3, %xmm4
4557 ; AVX512F-SLOW-NEXT: vmovdqa %xmm3, %xmm11
4558 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
4559 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0,1,2],xmm1[3]
4560 ; AVX512F-SLOW-NEXT: vpsrlq $8, %zmm16, %zmm2
4561 ; AVX512F-SLOW-NEXT: vpmovqb %zmm2, %xmm2
4562 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3]
4563 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4564 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm0, %ymm18
4565 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm0 = [0,0,2,10,0,0,2,10,0,0,2,10,0,0,2,10]
4566 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm19, %xmm3
4567 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm3, %xmm1
4568 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm7, %xmm2
4569 ; AVX512F-SLOW-NEXT: vmovdqa %xmm7, %xmm9
4570 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
4571 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm2 = [2,10,0,0,2,10,0,0,2,10,0,0,2,10,0,0]
4572 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm22, %xmm8
4573 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm8, %xmm4
4574 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm20, %xmm7
4575 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm7, %xmm14
4576 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm14[0],xmm4[0],xmm14[1],xmm4[1],xmm14[2],xmm4[2],xmm14[3],xmm4[3]
4577 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
4578 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
4579 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm4[0,1,2,3,4,5,6],ymm1[7]
4580 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm4 = [0,0,2,10,0,0,2,10,0,0,2,10,0,0,2,10]
4581 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm6, %xmm14
4582 ; AVX512F-SLOW-NEXT: vmovdqa %xmm6, %xmm12
4583 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm10, %xmm4
4584 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm14[0],xmm4[1],xmm14[1],xmm4[2],xmm14[2],xmm4[3],xmm14[3]
4585 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm14 = [2,10,0,0,2,10,0,0,2,10,0,0,2,10,0,0]
4586 ; AVX512F-SLOW-NEXT: vpshufb %xmm14, %xmm13, %xmm15
4587 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm26, %xmm6
4588 ; AVX512F-SLOW-NEXT: vpshufb %xmm14, %xmm6, %xmm14
4589 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
4590 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
4591 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
4592 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm14[0,1,2,3,4],ymm4[5],ymm14[6,7]
4593 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm4[0,1,2,3,4,5],ymm1[6,7]
4594 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm24, %xmm4
4595 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm4
4596 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm0
4597 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm5, %xmm20
4598 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
4599 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm21, %xmm6
4600 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm6, %xmm4
4601 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm11, %xmm2
4602 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
4603 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1,2],xmm0[3]
4604 ; AVX512F-SLOW-NEXT: vpsrlq $16, %zmm16, %zmm2
4605 ; AVX512F-SLOW-NEXT: vpmovqb %zmm2, %xmm2
4606 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1],xmm0[2,3]
4607 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
4608 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm0, %ymm19
4609 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm0 = [0,0,3,11,0,0,3,11,0,0,3,11,0,0,3,11]
4610 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm3, %xmm1
4611 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm3, %xmm25
4612 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm9, %xmm2
4613 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
4614 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm2 = [3,11,0,0,3,11,0,0,3,11,0,0,3,11,0,0]
4615 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm8, %xmm4
4616 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm7, %xmm14
4617 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm7, %xmm22
4618 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm14[0],xmm4[0],xmm14[1],xmm4[1],xmm14[2],xmm4[2],xmm14[3],xmm4[3]
4619 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
4620 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
4621 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm4[0,1,2,3,4,5,6],ymm1[7]
4622 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm4 = [0,0,3,11,0,0,3,11,0,0,3,11,0,0,3,11]
4623 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm12, %xmm14
4624 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm12, %xmm27
4625 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm10, %xmm4
4626 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm14[0],xmm4[1],xmm14[1],xmm4[2],xmm14[2],xmm4[3],xmm14[3]
4627 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm14 = [3,11,0,0,3,11,0,0,3,11,0,0,3,11,0,0]
4628 ; AVX512F-SLOW-NEXT: vpshufb %xmm14, %xmm13, %xmm15
4629 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm26, %xmm12
4630 ; AVX512F-SLOW-NEXT: vpshufb %xmm14, %xmm12, %xmm14
4631 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
4632 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
4633 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
4634 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm14[0,1,2,3,4],ymm4[5],ymm14[6,7]
4635 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm4[0,1,2,3,4,5],ymm1[6,7]
4636 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm24, %xmm5
4637 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm4
4638 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm20, %xmm15
4639 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm15, %xmm0
4640 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
4641 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm6, %xmm4
4642 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm11, %xmm2
4643 ; AVX512F-SLOW-NEXT: vmovdqa %xmm11, %xmm6
4644 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
4645 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1,2],xmm0[3]
4646 ; AVX512F-SLOW-NEXT: vpsrlq $24, %zmm16, %zmm2
4647 ; AVX512F-SLOW-NEXT: vpmovqb %zmm2, %xmm2
4648 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1],xmm0[2,3]
4649 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
4650 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm0, %ymm20
4651 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm3 = [0,0,4,12,0,0,4,12,0,0,4,12,0,0,4,12]
4652 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm25, %xmm11
4653 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm11, %xmm1
4654 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm9, %xmm2
4655 ; AVX512F-SLOW-NEXT: vmovdqa %xmm9, %xmm7
4656 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
4657 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm2 = [4,12,0,0,4,12,0,0,4,12,0,0,4,12,0,0]
4658 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm8, %xmm4
4659 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm22, %xmm9
4660 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm9, %xmm14
4661 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm14[0],xmm4[0],xmm14[1],xmm4[1],xmm14[2],xmm4[2],xmm14[3],xmm4[3]
4662 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
4663 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
4664 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm4[0,1,2,3,4,5,6],ymm1[7]
4665 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm4 = [0,0,4,12,0,0,4,12,0,0,4,12,0,0,4,12]
4666 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm27, %xmm0
4667 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm0, %xmm14
4668 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm10, %xmm4
4669 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm10, %xmm22
4670 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm14[0],xmm4[1],xmm14[1],xmm4[2],xmm14[2],xmm4[3],xmm14[3]
4671 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm14 = [4,12,0,0,4,12,0,0,4,12,0,0,4,12,0,0]
4672 ; AVX512F-SLOW-NEXT: vpshufb %xmm14, %xmm13, %xmm0
4673 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm13, %xmm25
4674 ; AVX512F-SLOW-NEXT: vpshufb %xmm14, %xmm12, %xmm14
4675 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm14[0],xmm0[0],xmm14[1],xmm0[1],xmm14[2],xmm0[2],xmm14[3],xmm0[3]
4676 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
4677 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4678 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm4[5],ymm0[6,7]
4679 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
4680 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm5, %xmm1
4681 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm15, %xmm3
4682 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
4683 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm21, %xmm10
4684 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm10, %xmm3
4685 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm6, %xmm2
4686 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm6, %xmm23
4687 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
4688 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0,1,2],xmm1[3]
4689 ; AVX512F-SLOW-NEXT: vpsrlq $32, %zmm16, %zmm2
4690 ; AVX512F-SLOW-NEXT: vpmovqb %zmm2, %xmm2
4691 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3]
4692 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4693 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm0, %ymm21
4694 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm5 = [0,0,5,13,0,0,5,13,0,0,5,13,0,0,5,13]
4695 ; AVX512F-SLOW-NEXT: vpshufb %xmm5, %xmm11, %xmm1
4696 ; AVX512F-SLOW-NEXT: vpshufb %xmm5, %xmm7, %xmm2
4697 ; AVX512F-SLOW-NEXT: vmovdqa %xmm7, %xmm13
4698 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
4699 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm2 = [5,13,0,0,5,13,0,0,5,13,0,0,5,13,0,0]
4700 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm8, %xmm3
4701 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm8, %xmm26
4702 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm9, %xmm4
4703 ; AVX512F-SLOW-NEXT: vmovdqa %xmm9, %xmm14
4704 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
4705 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
4706 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
4707 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5,6],ymm1[7]
4708 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm3 = [0,0,5,13,0,0,5,13,0,0,5,13,0,0,5,13]
4709 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm27, %xmm6
4710 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm6, %xmm4
4711 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm22, %xmm7
4712 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm7, %xmm3
4713 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
4714 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm4 = [5,13,0,0,5,13,0,0,5,13,0,0,5,13,0,0]
4715 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm25, %xmm9
4716 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm9, %xmm0
4717 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm12, %xmm4
4718 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
4719 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
4720 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4721 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm3[5],ymm0[6,7]
4722 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
4723 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm24, %xmm8
4724 ; AVX512F-SLOW-NEXT: vpshufb %xmm5, %xmm8, %xmm1
4725 ; AVX512F-SLOW-NEXT: vpshufb %xmm5, %xmm15, %xmm3
4726 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
4727 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm10, %xmm3
4728 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm10, %xmm24
4729 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm23, %xmm4
4730 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm4, %xmm2
4731 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
4732 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0,1,2],xmm1[3]
4733 ; AVX512F-SLOW-NEXT: vpsrlq $40, %zmm16, %zmm2
4734 ; AVX512F-SLOW-NEXT: vpmovqb %zmm2, %xmm2
4735 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3]
4736 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4737 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm0, %ymm22
4738 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm10 = [0,0,6,14,0,0,6,14,0,0,6,14,0,0,6,14]
4739 ; AVX512F-SLOW-NEXT: vpshufb %xmm10, %xmm11, %xmm1
4740 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm11, %xmm23
4741 ; AVX512F-SLOW-NEXT: vpshufb %xmm10, %xmm13, %xmm2
4742 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
4743 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm2 = [6,14,0,0,6,14,0,0,6,14,0,0,6,14,0,0]
4744 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm26, %xmm11
4745 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm11, %xmm3
4746 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm14, %xmm5
4747 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm14, %xmm25
4748 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
4749 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
4750 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
4751 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5,6],ymm1[7]
4752 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm3 = [0,0,6,14,0,0,6,14,0,0,6,14,0,0,6,14]
4753 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm6, %xmm5
4754 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm7, %xmm3
4755 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
4756 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm5 = [6,14,0,0,6,14,0,0,6,14,0,0,6,14,0,0]
4757 ; AVX512F-SLOW-NEXT: vpshufb %xmm5, %xmm9, %xmm0
4758 ; AVX512F-SLOW-NEXT: vmovdqa %xmm9, %xmm14
4759 ; AVX512F-SLOW-NEXT: vpshufb %xmm5, %xmm12, %xmm5
4760 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm5[0],xmm0[0],xmm5[1],xmm0[1],xmm5[2],xmm0[2],xmm5[3],xmm0[3]
4761 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
4762 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4763 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm3[5],ymm0[6,7]
4764 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
4765 ; AVX512F-SLOW-NEXT: vpshufb %xmm10, %xmm8, %xmm1
4766 ; AVX512F-SLOW-NEXT: vmovdqa %xmm8, %xmm9
4767 ; AVX512F-SLOW-NEXT: vpshufb %xmm10, %xmm15, %xmm3
4768 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
4769 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm24, %xmm10
4770 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm10, %xmm3
4771 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm4, %xmm2
4772 ; AVX512F-SLOW-NEXT: vmovdqa %xmm4, %xmm8
4773 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
4774 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0,1,2],xmm1[3]
4775 ; AVX512F-SLOW-NEXT: vpsrlq $48, %zmm16, %zmm2
4776 ; AVX512F-SLOW-NEXT: vpmovqb %zmm2, %xmm2
4777 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3]
4778 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
4779 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm1 = [0,0,7,15,0,0,7,15,0,0,7,15,0,0,7,15]
4780 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm23, %xmm2
4781 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm2, %xmm2
4782 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm13, %xmm3
4783 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
4784 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm3 = [7,15,0,0,7,15,0,0,7,15,0,0,7,15,0,0]
4785 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm11, %xmm4
4786 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm25, %xmm5
4787 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm5, %xmm5
4788 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
4789 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
4790 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
4791 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2,3,4,5,6],ymm2[7]
4792 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm4 = [0,0,7,15,0,0,7,15,0,0,7,15,0,0,7,15]
4793 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm6, %xmm5
4794 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm7, %xmm4
4795 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
4796 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm5 = [7,15,0,0,7,15,0,0,7,15,0,0,7,15,0,0]
4797 ; AVX512F-SLOW-NEXT: vpshufb %xmm5, %xmm14, %xmm7
4798 ; AVX512F-SLOW-NEXT: vpshufb %xmm5, %xmm12, %xmm5
4799 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm7[0],xmm5[1],xmm7[1],xmm5[2],xmm7[2],xmm5[3],xmm7[3]
4800 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
4801 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
4802 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2,3,4],ymm4[5],ymm5[6,7]
4803 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2,3,4,5],ymm2[6,7]
4804 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm9, %xmm4
4805 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm15, %xmm1
4806 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
4807 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm10, %xmm4
4808 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm8, %xmm3
4809 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
4810 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
4811 ; AVX512F-SLOW-NEXT: vpsrlq $56, %zmm16, %zmm3
4812 ; AVX512F-SLOW-NEXT: vpmovqb %zmm3, %xmm3
4813 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1],xmm1[2,3]
4814 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
4815 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm17, (%rsi)
4816 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm18, (%rdx)
4817 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm19, (%rcx)
4818 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm20, (%r8)
4819 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm21, (%r9)
4820 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
4821 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm22, (%rax)
4822 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
4823 ; AVX512F-SLOW-NEXT: vmovdqa %ymm0, (%rax)
4824 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
4825 ; AVX512F-SLOW-NEXT: vmovdqa %ymm1, (%rax)
4826 ; AVX512F-SLOW-NEXT: vzeroupper
4827 ; AVX512F-SLOW-NEXT: retq
4829 ; AVX512F-FAST-LABEL: load_i8_stride8_vf32:
4830 ; AVX512F-FAST: # %bb.0:
4831 ; AVX512F-FAST-NEXT: vmovdqa64 (%rdi), %zmm18
4832 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm14 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
4833 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [0,2,2,3,0,2,4,6]
4834 ; AVX512F-FAST-NEXT: vmovdqa64 224(%rdi), %ymm16
4835 ; AVX512F-FAST-NEXT: vpermd %ymm16, %ymm0, %ymm4
4836 ; AVX512F-FAST-NEXT: vpshufb %ymm14, %ymm4, %ymm1
4837 ; AVX512F-FAST-NEXT: vmovdqa64 192(%rdi), %ymm17
4838 ; AVX512F-FAST-NEXT: vpermd %ymm17, %ymm0, %ymm6
4839 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u]
4840 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5,6],ymm1[7]
4841 ; AVX512F-FAST-NEXT: vmovdqa 160(%rdi), %ymm13
4842 ; AVX512F-FAST-NEXT: vpermd %ymm13, %ymm0, %ymm9
4843 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u,u,u,u,u]
4844 ; AVX512F-FAST-NEXT: vmovdqa 128(%rdi), %ymm15
4845 ; AVX512F-FAST-NEXT: vpermd %ymm15, %ymm0, %ymm11
4846 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u,u,u,u,u,u,u,u,u]
4847 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm2[5],ymm0[6,7]
4848 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
4849 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm3 = [0,0,0,8,0,0,0,8,0,0,0,8,0,0,0,8]
4850 ; AVX512F-FAST-NEXT: vmovdqa 112(%rdi), %xmm1
4851 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm1, %xmm5
4852 ; AVX512F-FAST-NEXT: vmovdqa %xmm1, %xmm2
4853 ; AVX512F-FAST-NEXT: vmovdqa 96(%rdi), %xmm1
4854 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm1, %xmm3
4855 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
4856 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm8 = [0,8,0,0,0,8,0,0,0,8,0,0,0,8,0,0]
4857 ; AVX512F-FAST-NEXT: vmovdqa 80(%rdi), %xmm12
4858 ; AVX512F-FAST-NEXT: vpshufb %xmm8, %xmm12, %xmm10
4859 ; AVX512F-FAST-NEXT: vmovdqa 64(%rdi), %xmm5
4860 ; AVX512F-FAST-NEXT: vpshufb %xmm8, %xmm5, %xmm8
4861 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm10[0],xmm8[1],xmm10[1],xmm8[2],xmm10[2],xmm8[3],xmm10[3]
4862 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm7 = xmm8[0,1,2],xmm7[3]
4863 ; AVX512F-FAST-NEXT: vpmovqb %zmm18, %xmm8
4864 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm7 = xmm8[0,1],xmm7[2,3]
4865 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm7[0,1,2,3],ymm0[4,5,6,7]
4866 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm0, %ymm22
4867 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29]
4868 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u]
4869 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm7[0,1,2,3,4,5,6],ymm0[7]
4870 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm3 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
4871 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm9, %ymm7
4872 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm3, %ymm27
4873 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm3 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
4874 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm11, %ymm8
4875 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm3, %ymm28
4876 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm7 = ymm8[0,1,2,3,4],ymm7[5],ymm8[6,7]
4877 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm7[0,1,2,3,4,5],ymm0[6,7]
4878 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm7 = [0,0,1,9,0,0,1,9,0,0,1,9,0,0,1,9]
4879 ; AVX512F-FAST-NEXT: vpshufb %xmm7, %xmm2, %xmm8
4880 ; AVX512F-FAST-NEXT: vpshufb %xmm7, %xmm1, %xmm7
4881 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
4882 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm8 = [1,9,0,0,1,9,0,0,1,9,0,0,1,9,0,0]
4883 ; AVX512F-FAST-NEXT: vpshufb %xmm8, %xmm12, %xmm10
4884 ; AVX512F-FAST-NEXT: vpshufb %xmm8, %xmm5, %xmm8
4885 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm10[0],xmm8[1],xmm10[1],xmm8[2],xmm10[2],xmm8[3],xmm10[3]
4886 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm7 = xmm8[0,1,2],xmm7[3]
4887 ; AVX512F-FAST-NEXT: vpsrlq $8, %zmm18, %zmm8
4888 ; AVX512F-FAST-NEXT: vpmovqb %zmm8, %xmm8
4889 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm7 = xmm8[0,1],xmm7[2,3]
4890 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm7[0,1,2,3],ymm0[4,5,6,7]
4891 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm0, %ymm20
4892 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm3 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
4893 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm4, %ymm0
4894 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm3, %ymm29
4895 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm3 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
4896 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm6, %ymm7
4897 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm3, %ymm30
4898 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm7[0,1,2,3,4,5,6],ymm0[7]
4899 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm3 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
4900 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm9, %ymm7
4901 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm3, %ymm31
4902 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm3 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
4903 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm11, %ymm8
4904 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm3, %ymm26
4905 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm7 = ymm8[0,1,2,3,4],ymm7[5],ymm8[6,7]
4906 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm7[0,1,2,3,4,5],ymm0[6,7]
4907 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm7 = [0,0,2,10,0,0,2,10,0,0,2,10,0,0,2,10]
4908 ; AVX512F-FAST-NEXT: vpshufb %xmm7, %xmm2, %xmm8
4909 ; AVX512F-FAST-NEXT: vpshufb %xmm7, %xmm1, %xmm7
4910 ; AVX512F-FAST-NEXT: vmovdqa %xmm1, %xmm3
4911 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
4912 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm8 = [2,10,0,0,2,10,0,0,2,10,0,0,2,10,0,0]
4913 ; AVX512F-FAST-NEXT: vpshufb %xmm8, %xmm12, %xmm10
4914 ; AVX512F-FAST-NEXT: vpshufb %xmm8, %xmm5, %xmm8
4915 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm10[0],xmm8[1],xmm10[1],xmm8[2],xmm10[2],xmm8[3],xmm10[3]
4916 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm7 = xmm8[0,1,2],xmm7[3]
4917 ; AVX512F-FAST-NEXT: vpsrlq $16, %zmm18, %zmm8
4918 ; AVX512F-FAST-NEXT: vpmovqb %zmm8, %xmm8
4919 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm7 = xmm8[0,1],xmm7[2,3]
4920 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm7[0,1,2,3],ymm0[4,5,6,7]
4921 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm0, %ymm21
4922 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm1 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
4923 ; AVX512F-FAST-NEXT: vpshufb %ymm1, %ymm4, %ymm0
4924 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm1, %ymm25
4925 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm1 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
4926 ; AVX512F-FAST-NEXT: vpshufb %ymm1, %ymm6, %ymm4
4927 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm1, %ymm24
4928 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3,4,5,6],ymm0[7]
4929 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm1 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
4930 ; AVX512F-FAST-NEXT: vpshufb %ymm1, %ymm9, %ymm4
4931 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm1, %ymm23
4932 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm8 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
4933 ; AVX512F-FAST-NEXT: vpshufb %ymm8, %ymm11, %ymm6
4934 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm6[0,1,2,3,4],ymm4[5],ymm6[6,7]
4935 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3,4,5],ymm0[6,7]
4936 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm4 = [0,0,3,11,0,0,3,11,0,0,3,11,0,0,3,11]
4937 ; AVX512F-FAST-NEXT: vpshufb %xmm4, %xmm2, %xmm6
4938 ; AVX512F-FAST-NEXT: vpshufb %xmm4, %xmm3, %xmm4
4939 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm6[0],xmm4[1],xmm6[1],xmm4[2],xmm6[2],xmm4[3],xmm6[3]
4940 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm6 = [3,11,0,0,3,11,0,0,3,11,0,0,3,11,0,0]
4941 ; AVX512F-FAST-NEXT: vpshufb %xmm6, %xmm12, %xmm9
4942 ; AVX512F-FAST-NEXT: vpshufb %xmm6, %xmm5, %xmm6
4943 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm9[0],xmm6[1],xmm9[1],xmm6[2],xmm9[2],xmm6[3],xmm9[3]
4944 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm4 = xmm6[0,1,2],xmm4[3]
4945 ; AVX512F-FAST-NEXT: vpsrlq $24, %zmm18, %zmm6
4946 ; AVX512F-FAST-NEXT: vpmovqb %zmm6, %xmm6
4947 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm4 = xmm6[0,1],xmm4[2,3]
4948 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
4949 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm0, %ymm19
4950 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [1,3,2,3,1,3,5,7]
4951 ; AVX512F-FAST-NEXT: vpermd %ymm16, %ymm0, %ymm11
4952 ; AVX512F-FAST-NEXT: vpshufb %ymm14, %ymm11, %ymm4
4953 ; AVX512F-FAST-NEXT: vpermd %ymm17, %ymm0, %ymm9
4954 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u]
4955 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm14[0,1,2,3,4,5,6],ymm4[7]
4956 ; AVX512F-FAST-NEXT: vpermd %ymm13, %ymm0, %ymm13
4957 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u,u,u,u,u]
4958 ; AVX512F-FAST-NEXT: vpermd %ymm15, %ymm0, %ymm1
4959 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm15 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u,u,u,u,u,u,u,u,u]
4960 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm14 = ymm15[0,1,2,3,4],ymm14[5],ymm15[6,7]
4961 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm14[0,1,2,3,4,5],ymm4[6,7]
4962 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm14 = [0,0,4,12,0,0,4,12,0,0,4,12,0,0,4,12]
4963 ; AVX512F-FAST-NEXT: vmovdqa %xmm2, %xmm6
4964 ; AVX512F-FAST-NEXT: vpshufb %xmm14, %xmm2, %xmm15
4965 ; AVX512F-FAST-NEXT: vpshufb %xmm14, %xmm3, %xmm14
4966 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
4967 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm15 = [4,12,0,0,4,12,0,0,4,12,0,0,4,12,0,0]
4968 ; AVX512F-FAST-NEXT: vpshufb %xmm15, %xmm12, %xmm0
4969 ; AVX512F-FAST-NEXT: vpshufb %xmm15, %xmm5, %xmm15
4970 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3]
4971 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1,2],xmm14[3]
4972 ; AVX512F-FAST-NEXT: vpsrlq $32, %zmm18, %zmm14
4973 ; AVX512F-FAST-NEXT: vpmovqb %zmm14, %xmm14
4974 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm14[0,1],xmm0[2,3]
4975 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm15 = ymm0[0,1,2,3],ymm4[4,5,6,7]
4976 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29]
4977 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u]
4978 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3,4,5,6],ymm0[7]
4979 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm27, %ymm2
4980 ; AVX512F-FAST-NEXT: vpshufb %ymm2, %ymm13, %ymm4
4981 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm28, %ymm2
4982 ; AVX512F-FAST-NEXT: vpshufb %ymm2, %ymm1, %ymm14
4983 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm14[0,1,2,3,4],ymm4[5],ymm14[6,7]
4984 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2,3,4,5],ymm0[6,7]
4985 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm4 = [0,0,5,13,0,0,5,13,0,0,5,13,0,0,5,13]
4986 ; AVX512F-FAST-NEXT: vpshufb %xmm4, %xmm6, %xmm14
4987 ; AVX512F-FAST-NEXT: vmovdqa %xmm3, %xmm7
4988 ; AVX512F-FAST-NEXT: vpshufb %xmm4, %xmm3, %xmm4
4989 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm14[0],xmm4[1],xmm14[1],xmm4[2],xmm14[2],xmm4[3],xmm14[3]
4990 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm14 = [5,13,0,0,5,13,0,0,5,13,0,0,5,13,0,0]
4991 ; AVX512F-FAST-NEXT: vpshufb %xmm14, %xmm12, %xmm0
4992 ; AVX512F-FAST-NEXT: vpshufb %xmm14, %xmm5, %xmm14
4993 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm14[0],xmm0[0],xmm14[1],xmm0[1],xmm14[2],xmm0[2],xmm14[3],xmm0[3]
4994 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1,2],xmm4[3]
4995 ; AVX512F-FAST-NEXT: vpsrlq $40, %zmm18, %zmm4
4996 ; AVX512F-FAST-NEXT: vpmovqb %zmm4, %xmm4
4997 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm4[0,1],xmm0[2,3]
4998 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm10 = ymm0[0,1,2,3],ymm2[4,5,6,7]
4999 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm29, %ymm0
5000 ; AVX512F-FAST-NEXT: vpshufb %ymm0, %ymm11, %ymm0
5001 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm30, %ymm2
5002 ; AVX512F-FAST-NEXT: vpshufb %ymm2, %ymm9, %ymm2
5003 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5,6],ymm0[7]
5004 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm31, %ymm2
5005 ; AVX512F-FAST-NEXT: vpshufb %ymm2, %ymm13, %ymm2
5006 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm26, %ymm4
5007 ; AVX512F-FAST-NEXT: vpshufb %ymm4, %ymm1, %ymm14
5008 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm14[0,1,2,3,4],ymm2[5],ymm14[6,7]
5009 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm2[0,1,2,3,4,5],ymm0[6,7]
5010 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm2 = [0,0,6,14,0,0,6,14,0,0,6,14,0,0,6,14]
5011 ; AVX512F-FAST-NEXT: vpshufb %xmm2, %xmm6, %xmm14
5012 ; AVX512F-FAST-NEXT: vpshufb %xmm2, %xmm3, %xmm2
5013 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm14[0],xmm2[1],xmm14[1],xmm2[2],xmm14[2],xmm2[3],xmm14[3]
5014 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm14 = [6,14,0,0,6,14,0,0,6,14,0,0,6,14,0,0]
5015 ; AVX512F-FAST-NEXT: vpshufb %xmm14, %xmm12, %xmm0
5016 ; AVX512F-FAST-NEXT: vpshufb %xmm14, %xmm5, %xmm14
5017 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm14[0],xmm0[0],xmm14[1],xmm0[1],xmm14[2],xmm0[2],xmm14[3],xmm0[3]
5018 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1,2],xmm2[3]
5019 ; AVX512F-FAST-NEXT: vpsrlq $48, %zmm18, %zmm2
5020 ; AVX512F-FAST-NEXT: vpmovqb %zmm2, %xmm2
5021 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1],xmm0[2,3]
5022 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm4[4,5,6,7]
5023 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm25, %ymm2
5024 ; AVX512F-FAST-NEXT: vpshufb %ymm2, %ymm11, %ymm2
5025 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm24, %ymm3
5026 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm9, %ymm3
5027 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
5028 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm23, %ymm3
5029 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm13, %ymm3
5030 ; AVX512F-FAST-NEXT: vpshufb %ymm8, %ymm1, %ymm1
5031 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4],ymm3[5],ymm1[6,7]
5032 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm2[6,7]
5033 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm2 = [0,0,7,15,0,0,7,15,0,0,7,15,0,0,7,15]
5034 ; AVX512F-FAST-NEXT: vpshufb %xmm2, %xmm6, %xmm3
5035 ; AVX512F-FAST-NEXT: vpshufb %xmm2, %xmm7, %xmm2
5036 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
5037 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm3 = [7,15,0,0,7,15,0,0,7,15,0,0,7,15,0,0]
5038 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm12, %xmm8
5039 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm5, %xmm3
5040 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm8[0],xmm3[1],xmm8[1],xmm3[2],xmm8[2],xmm3[3],xmm8[3]
5041 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm3[0,1,2],xmm2[3]
5042 ; AVX512F-FAST-NEXT: vpsrlq $56, %zmm18, %zmm3
5043 ; AVX512F-FAST-NEXT: vpmovqb %zmm3, %xmm3
5044 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3]
5045 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
5046 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm22, (%rsi)
5047 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm20, (%rdx)
5048 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm21, (%rcx)
5049 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm19, (%r8)
5050 ; AVX512F-FAST-NEXT: vmovdqa %ymm15, (%r9)
5051 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
5052 ; AVX512F-FAST-NEXT: vmovdqa %ymm10, (%rax)
5053 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
5054 ; AVX512F-FAST-NEXT: vmovdqa %ymm0, (%rax)
5055 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
5056 ; AVX512F-FAST-NEXT: vmovdqa %ymm1, (%rax)
5057 ; AVX512F-FAST-NEXT: vzeroupper
5058 ; AVX512F-FAST-NEXT: retq
5060 ; AVX512BW-SLOW-LABEL: load_i8_stride8_vf32:
5061 ; AVX512BW-SLOW: # %bb.0:
5062 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
5063 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
5064 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r11
5065 ; AVX512BW-SLOW-NEXT: vmovdqa64 (%rdi), %zmm16
5066 ; AVX512BW-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm0
5067 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm0, %xmm0
5068 ; AVX512BW-SLOW-NEXT: vmovdqa 240(%rdi), %xmm2
5069 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm4 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
5070 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm2, %xmm1
5071 ; AVX512BW-SLOW-NEXT: vmovdqa 224(%rdi), %xmm6
5072 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm6, %xmm3
5073 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
5074 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
5075 ; AVX512BW-SLOW-NEXT: vmovdqa 208(%rdi), %xmm7
5076 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm5 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
5077 ; AVX512BW-SLOW-NEXT: vpshufb %xmm5, %xmm7, %xmm3
5078 ; AVX512BW-SLOW-NEXT: vmovdqa 192(%rdi), %xmm9
5079 ; AVX512BW-SLOW-NEXT: vpshufb %xmm5, %xmm9, %xmm8
5080 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm8[0],xmm3[0],xmm8[1],xmm3[1],xmm8[2],xmm3[2],xmm8[3],xmm3[3]
5081 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
5082 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5,6],ymm1[7]
5083 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5084 ; AVX512BW-SLOW-NEXT: vmovdqa 128(%rdi), %ymm3
5085 ; AVX512BW-SLOW-NEXT: vpmovqb %ymm3, %xmm3
5086 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
5087 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4],ymm0[5],ymm3[6,7]
5088 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
5089 ; AVX512BW-SLOW-NEXT: vmovdqa 112(%rdi), %xmm1
5090 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm1, %xmm8
5091 ; AVX512BW-SLOW-NEXT: vmovdqa 96(%rdi), %xmm3
5092 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm3, %xmm4
5093 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm4[0],xmm8[0],xmm4[1],xmm8[1],xmm4[2],xmm8[2],xmm4[3],xmm8[3]
5094 ; AVX512BW-SLOW-NEXT: vmovdqa 80(%rdi), %xmm4
5095 ; AVX512BW-SLOW-NEXT: vpshufb %xmm5, %xmm4, %xmm11
5096 ; AVX512BW-SLOW-NEXT: vmovdqa 64(%rdi), %xmm8
5097 ; AVX512BW-SLOW-NEXT: vpshufb %xmm5, %xmm8, %xmm5
5098 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm11[0],xmm5[1],xmm11[1],xmm5[2],xmm11[2],xmm5[3],xmm11[3]
5099 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm5 = xmm5[0,1,2],xmm10[3]
5100 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm16, %xmm10
5101 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm5 = xmm10[0,1],xmm5[2,3]
5102 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm5[0,1,2,3],ymm0[4,5,6,7]
5103 ; AVX512BW-SLOW-NEXT: vmovdqa64 %ymm0, %ymm20
5104 ; AVX512BW-SLOW-NEXT: vmovdqa64 128(%rdi), %xmm19
5105 ; AVX512BW-SLOW-NEXT: vmovdqa 144(%rdi), %xmm11
5106 ; AVX512BW-SLOW-NEXT: vmovdqa 160(%rdi), %xmm12
5107 ; AVX512BW-SLOW-NEXT: vmovdqa 176(%rdi), %xmm13
5108 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
5109 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm2, %xmm5
5110 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm14
5111 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm14[0],xmm5[0],xmm14[1],xmm5[1],xmm14[2],xmm5[2],xmm14[3],xmm5[3]
5112 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
5113 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm14 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
5114 ; AVX512BW-SLOW-NEXT: vpshufb %xmm14, %xmm7, %xmm15
5115 ; AVX512BW-SLOW-NEXT: vpshufb %xmm14, %xmm9, %xmm17
5116 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm17[0],xmm15[0],xmm17[1],xmm15[1],xmm17[2],xmm15[2],xmm17[3],xmm15[3]
5117 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm15
5118 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm15[0,1,2,3,4,5,6],ymm5[7]
5119 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm15 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
5120 ; AVX512BW-SLOW-NEXT: vpshufb %xmm15, %xmm13, %xmm17
5121 ; AVX512BW-SLOW-NEXT: vpshufb %xmm15, %xmm12, %xmm15
5122 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm15[0],xmm17[0],xmm15[1],xmm17[1],xmm15[2],xmm17[2],xmm15[3],xmm17[3]
5123 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm15
5124 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm17 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
5125 ; AVX512BW-SLOW-NEXT: vpshufb %xmm17, %xmm11, %xmm18
5126 ; AVX512BW-SLOW-NEXT: vpshufb %xmm17, %xmm19, %xmm17
5127 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm17 = xmm17[0],xmm18[0],xmm17[1],xmm18[1],xmm17[2],xmm18[2],xmm17[3],xmm18[3]
5128 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm17, %ymm0, %ymm10
5129 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0,1,2,3,4],ymm15[5],ymm10[6,7]
5130 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm10[0,1,2,3,4,5],ymm5[6,7]
5131 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm10
5132 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm3, %xmm0
5133 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm10[0],xmm0[1],xmm10[1],xmm0[2],xmm10[2],xmm0[3],xmm10[3]
5134 ; AVX512BW-SLOW-NEXT: vpshufb %xmm14, %xmm4, %xmm10
5135 ; AVX512BW-SLOW-NEXT: vpshufb %xmm14, %xmm8, %xmm14
5136 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm14[0],xmm10[0],xmm14[1],xmm10[1],xmm14[2],xmm10[2],xmm14[3],xmm10[3]
5137 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm10[0,1,2],xmm0[3]
5138 ; AVX512BW-SLOW-NEXT: vpsrlq $8, %zmm16, %zmm10
5139 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm10, %xmm10
5140 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm10[0,1],xmm0[2,3]
5141 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm5[4,5,6,7]
5142 ; AVX512BW-SLOW-NEXT: vmovdqa64 %ymm0, %ymm21
5143 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
5144 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm2, %xmm5
5145 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm10
5146 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm10[0],xmm5[0],xmm10[1],xmm5[1],xmm10[2],xmm5[2],xmm10[3],xmm5[3]
5147 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
5148 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm10 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
5149 ; AVX512BW-SLOW-NEXT: vpshufb %xmm10, %xmm7, %xmm14
5150 ; AVX512BW-SLOW-NEXT: vpshufb %xmm10, %xmm9, %xmm15
5151 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
5152 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
5153 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm14[0,1,2,3,4,5,6],ymm5[7]
5154 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm14 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
5155 ; AVX512BW-SLOW-NEXT: vpshufb %xmm14, %xmm13, %xmm15
5156 ; AVX512BW-SLOW-NEXT: vpshufb %xmm14, %xmm12, %xmm14
5157 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
5158 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
5159 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm15 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
5160 ; AVX512BW-SLOW-NEXT: vpshufb %xmm15, %xmm11, %xmm17
5161 ; AVX512BW-SLOW-NEXT: vpshufb %xmm15, %xmm19, %xmm15
5162 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm15[0],xmm17[0],xmm15[1],xmm17[1],xmm15[2],xmm17[2],xmm15[3],xmm17[3]
5163 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm15
5164 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm14 = ymm15[0,1,2,3,4],ymm14[5],ymm15[6,7]
5165 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm14[0,1,2,3,4,5],ymm5[6,7]
5166 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm14
5167 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm3, %xmm0
5168 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm14[0],xmm0[1],xmm14[1],xmm0[2],xmm14[2],xmm0[3],xmm14[3]
5169 ; AVX512BW-SLOW-NEXT: vpshufb %xmm10, %xmm4, %xmm14
5170 ; AVX512BW-SLOW-NEXT: vpshufb %xmm10, %xmm8, %xmm10
5171 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm14[0],xmm10[1],xmm14[1],xmm10[2],xmm14[2],xmm10[3],xmm14[3]
5172 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm10[0,1,2],xmm0[3]
5173 ; AVX512BW-SLOW-NEXT: vpsrlq $16, %zmm16, %zmm10
5174 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm10, %xmm10
5175 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm10[0,1],xmm0[2,3]
5176 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm5[4,5,6,7]
5177 ; AVX512BW-SLOW-NEXT: vmovdqa64 %ymm0, %ymm22
5178 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
5179 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm2, %xmm5
5180 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm10
5181 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm10[0],xmm5[0],xmm10[1],xmm5[1],xmm10[2],xmm5[2],xmm10[3],xmm5[3]
5182 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
5183 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm10 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
5184 ; AVX512BW-SLOW-NEXT: vpshufb %xmm10, %xmm7, %xmm14
5185 ; AVX512BW-SLOW-NEXT: vpshufb %xmm10, %xmm9, %xmm15
5186 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
5187 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
5188 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm14[0,1,2,3,4,5,6],ymm5[7]
5189 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm14 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
5190 ; AVX512BW-SLOW-NEXT: vpshufb %xmm14, %xmm13, %xmm15
5191 ; AVX512BW-SLOW-NEXT: vpshufb %xmm14, %xmm12, %xmm14
5192 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
5193 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
5194 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm15 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
5195 ; AVX512BW-SLOW-NEXT: vpshufb %xmm15, %xmm11, %xmm17
5196 ; AVX512BW-SLOW-NEXT: vpshufb %xmm15, %xmm19, %xmm15
5197 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm15[0],xmm17[0],xmm15[1],xmm17[1],xmm15[2],xmm17[2],xmm15[3],xmm17[3]
5198 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm15
5199 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm14 = ymm15[0,1,2,3,4],ymm14[5],ymm15[6,7]
5200 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm14[0,1,2,3,4,5],ymm5[6,7]
5201 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm14
5202 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm3, %xmm0
5203 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm14[0],xmm0[1],xmm14[1],xmm0[2],xmm14[2],xmm0[3],xmm14[3]
5204 ; AVX512BW-SLOW-NEXT: vpshufb %xmm10, %xmm4, %xmm14
5205 ; AVX512BW-SLOW-NEXT: vpshufb %xmm10, %xmm8, %xmm10
5206 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm14[0],xmm10[1],xmm14[1],xmm10[2],xmm14[2],xmm10[3],xmm14[3]
5207 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm10[0,1,2],xmm0[3]
5208 ; AVX512BW-SLOW-NEXT: vpsrlq $24, %zmm16, %zmm10
5209 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm10, %xmm10
5210 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm10[0,1],xmm0[2,3]
5211 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm5[4,5,6,7]
5212 ; AVX512BW-SLOW-NEXT: vmovdqa64 %ymm0, %ymm23
5213 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
5214 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm2, %xmm5
5215 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm10
5216 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm10[0],xmm5[0],xmm10[1],xmm5[1],xmm10[2],xmm5[2],xmm10[3],xmm5[3]
5217 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
5218 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm10 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
5219 ; AVX512BW-SLOW-NEXT: vpshufb %xmm10, %xmm7, %xmm14
5220 ; AVX512BW-SLOW-NEXT: vpshufb %xmm10, %xmm9, %xmm17
5221 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm17[0],xmm14[0],xmm17[1],xmm14[1],xmm17[2],xmm14[2],xmm17[3],xmm14[3]
5222 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
5223 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm14[0,1,2,3,4,5,6],ymm5[7]
5224 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm14 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
5225 ; AVX512BW-SLOW-NEXT: vpshufb %xmm14, %xmm13, %xmm17
5226 ; AVX512BW-SLOW-NEXT: vpshufb %xmm14, %xmm12, %xmm14
5227 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm17[0],xmm14[1],xmm17[1],xmm14[2],xmm17[2],xmm14[3],xmm17[3]
5228 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
5229 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm17 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
5230 ; AVX512BW-SLOW-NEXT: vpshufb %xmm17, %xmm11, %xmm18
5231 ; AVX512BW-SLOW-NEXT: vpshufb %xmm17, %xmm19, %xmm17
5232 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm17 = xmm17[0],xmm18[0],xmm17[1],xmm18[1],xmm17[2],xmm18[2],xmm17[3],xmm18[3]
5233 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm17, %ymm0, %ymm15
5234 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm14 = ymm15[0,1,2,3,4],ymm14[5],ymm15[6,7]
5235 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm14[0,1,2,3,4,5],ymm5[6,7]
5236 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm14
5237 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm3, %xmm0
5238 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm14[0],xmm0[1],xmm14[1],xmm0[2],xmm14[2],xmm0[3],xmm14[3]
5239 ; AVX512BW-SLOW-NEXT: vpshufb %xmm10, %xmm4, %xmm14
5240 ; AVX512BW-SLOW-NEXT: vpshufb %xmm10, %xmm8, %xmm10
5241 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm14[0],xmm10[1],xmm14[1],xmm10[2],xmm14[2],xmm10[3],xmm14[3]
5242 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm10[0,1,2],xmm0[3]
5243 ; AVX512BW-SLOW-NEXT: vpsrlq $32, %zmm16, %zmm10
5244 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm10, %xmm10
5245 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm10[0,1],xmm0[2,3]
5246 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm5[4,5,6,7]
5247 ; AVX512BW-SLOW-NEXT: vmovdqa64 %ymm0, %ymm24
5248 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
5249 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm2, %xmm5
5250 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm10
5251 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm10[0],xmm5[0],xmm10[1],xmm5[1],xmm10[2],xmm5[2],xmm10[3],xmm5[3]
5252 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
5253 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm10 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
5254 ; AVX512BW-SLOW-NEXT: vpshufb %xmm10, %xmm7, %xmm15
5255 ; AVX512BW-SLOW-NEXT: vpshufb %xmm10, %xmm9, %xmm17
5256 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm17[0],xmm15[0],xmm17[1],xmm15[1],xmm17[2],xmm15[2],xmm17[3],xmm15[3]
5257 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm15
5258 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm15[0,1,2,3,4,5,6],ymm5[7]
5259 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm15 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
5260 ; AVX512BW-SLOW-NEXT: vpshufb %xmm15, %xmm13, %xmm17
5261 ; AVX512BW-SLOW-NEXT: vpshufb %xmm15, %xmm12, %xmm15
5262 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm15[0],xmm17[0],xmm15[1],xmm17[1],xmm15[2],xmm17[2],xmm15[3],xmm17[3]
5263 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm15
5264 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm17 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
5265 ; AVX512BW-SLOW-NEXT: vpshufb %xmm17, %xmm11, %xmm18
5266 ; AVX512BW-SLOW-NEXT: vpshufb %xmm17, %xmm19, %xmm17
5267 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm17 = xmm17[0],xmm18[0],xmm17[1],xmm18[1],xmm17[2],xmm18[2],xmm17[3],xmm18[3]
5268 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm17, %ymm0, %ymm14
5269 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm14 = ymm14[0,1,2,3,4],ymm15[5],ymm14[6,7]
5270 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm14[0,1,2,3,4,5],ymm5[6,7]
5271 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm14
5272 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm3, %xmm0
5273 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm14[0],xmm0[1],xmm14[1],xmm0[2],xmm14[2],xmm0[3],xmm14[3]
5274 ; AVX512BW-SLOW-NEXT: vpshufb %xmm10, %xmm4, %xmm14
5275 ; AVX512BW-SLOW-NEXT: vpshufb %xmm10, %xmm8, %xmm10
5276 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm14[0],xmm10[1],xmm14[1],xmm10[2],xmm14[2],xmm10[3],xmm14[3]
5277 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm10[0,1,2],xmm0[3]
5278 ; AVX512BW-SLOW-NEXT: vpsrlq $40, %zmm16, %zmm10
5279 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm10, %xmm10
5280 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm10[0,1],xmm0[2,3]
5281 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm5[4,5,6,7]
5282 ; AVX512BW-SLOW-NEXT: vmovdqa64 %ymm0, %ymm25
5283 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
5284 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm2, %xmm10
5285 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm14
5286 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm14[0],xmm10[0],xmm14[1],xmm10[1],xmm14[2],xmm10[2],xmm14[3],xmm10[3]
5287 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
5288 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm14 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
5289 ; AVX512BW-SLOW-NEXT: vpshufb %xmm14, %xmm7, %xmm15
5290 ; AVX512BW-SLOW-NEXT: vpshufb %xmm14, %xmm9, %xmm17
5291 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm17[0],xmm15[0],xmm17[1],xmm15[1],xmm17[2],xmm15[2],xmm17[3],xmm15[3]
5292 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm15
5293 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm10 = ymm15[0,1,2,3,4,5,6],ymm10[7]
5294 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm15 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
5295 ; AVX512BW-SLOW-NEXT: vpshufb %xmm15, %xmm13, %xmm17
5296 ; AVX512BW-SLOW-NEXT: vpshufb %xmm15, %xmm12, %xmm15
5297 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm15[0],xmm17[0],xmm15[1],xmm17[1],xmm15[2],xmm17[2],xmm15[3],xmm17[3]
5298 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm15, %ymm0, %ymm15
5299 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm17 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
5300 ; AVX512BW-SLOW-NEXT: vpshufb %xmm17, %xmm11, %xmm18
5301 ; AVX512BW-SLOW-NEXT: vpshufb %xmm17, %xmm19, %xmm17
5302 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm17 = xmm17[0],xmm18[0],xmm17[1],xmm18[1],xmm17[2],xmm18[2],xmm17[3],xmm18[3]
5303 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm17, %ymm0, %ymm5
5304 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3,4],ymm15[5],ymm5[6,7]
5305 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5],ymm10[6,7]
5306 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm10
5307 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm3, %xmm0
5308 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm10[0],xmm0[1],xmm10[1],xmm0[2],xmm10[2],xmm0[3],xmm10[3]
5309 ; AVX512BW-SLOW-NEXT: vpshufb %xmm14, %xmm4, %xmm10
5310 ; AVX512BW-SLOW-NEXT: vpshufb %xmm14, %xmm8, %xmm14
5311 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm14[0],xmm10[0],xmm14[1],xmm10[1],xmm14[2],xmm10[2],xmm14[3],xmm10[3]
5312 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm10[0,1,2],xmm0[3]
5313 ; AVX512BW-SLOW-NEXT: vpsrlq $48, %zmm16, %zmm10
5314 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm10, %xmm10
5315 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm10[0,1],xmm0[2,3]
5316 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm5[4,5,6,7]
5317 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm5 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
5318 ; AVX512BW-SLOW-NEXT: vpshufb %xmm5, %xmm2, %xmm2
5319 ; AVX512BW-SLOW-NEXT: vpshufb %xmm5, %xmm6, %xmm6
5320 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
5321 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
5322 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm6 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
5323 ; AVX512BW-SLOW-NEXT: vpshufb %xmm6, %xmm7, %xmm7
5324 ; AVX512BW-SLOW-NEXT: vpshufb %xmm6, %xmm9, %xmm9
5325 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3]
5326 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
5327 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm7[0,1,2,3,4,5,6],ymm2[7]
5328 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm7 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
5329 ; AVX512BW-SLOW-NEXT: vpshufb %xmm7, %xmm13, %xmm9
5330 ; AVX512BW-SLOW-NEXT: vpshufb %xmm7, %xmm12, %xmm7
5331 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm9[0],xmm7[1],xmm9[1],xmm7[2],xmm9[2],xmm7[3],xmm9[3]
5332 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
5333 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm9 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
5334 ; AVX512BW-SLOW-NEXT: vpshufb %xmm9, %xmm11, %xmm10
5335 ; AVX512BW-SLOW-NEXT: vpshufb %xmm9, %xmm19, %xmm9
5336 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
5337 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
5338 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm7 = ymm9[0,1,2,3,4],ymm7[5],ymm9[6,7]
5339 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm7[0,1,2,3,4,5],ymm2[6,7]
5340 ; AVX512BW-SLOW-NEXT: vpshufb %xmm5, %xmm1, %xmm1
5341 ; AVX512BW-SLOW-NEXT: vpshufb %xmm5, %xmm3, %xmm3
5342 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
5343 ; AVX512BW-SLOW-NEXT: vpshufb %xmm6, %xmm4, %xmm3
5344 ; AVX512BW-SLOW-NEXT: vpshufb %xmm6, %xmm8, %xmm4
5345 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
5346 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
5347 ; AVX512BW-SLOW-NEXT: vpsrlq $56, %zmm16, %zmm3
5348 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm3, %xmm3
5349 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1],xmm1[2,3]
5350 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
5351 ; AVX512BW-SLOW-NEXT: vmovdqa64 %ymm20, (%rsi)
5352 ; AVX512BW-SLOW-NEXT: vmovdqa64 %ymm21, (%rdx)
5353 ; AVX512BW-SLOW-NEXT: vmovdqa64 %ymm22, (%rcx)
5354 ; AVX512BW-SLOW-NEXT: vmovdqa64 %ymm23, (%r8)
5355 ; AVX512BW-SLOW-NEXT: vmovdqa64 %ymm24, (%r9)
5356 ; AVX512BW-SLOW-NEXT: vmovdqa64 %ymm25, (%r11)
5357 ; AVX512BW-SLOW-NEXT: vmovdqa %ymm0, (%r10)
5358 ; AVX512BW-SLOW-NEXT: vmovdqa %ymm1, (%rax)
5359 ; AVX512BW-SLOW-NEXT: vzeroupper
5360 ; AVX512BW-SLOW-NEXT: retq
5362 ; AVX512BW-FAST-LABEL: load_i8_stride8_vf32:
5363 ; AVX512BW-FAST: # %bb.0:
5364 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
5365 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
5366 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r11
5367 ; AVX512BW-FAST-NEXT: vmovdqa64 (%rdi), %zmm0
5368 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm12 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
5369 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [0,2,2,3,0,2,4,6]
5370 ; AVX512BW-FAST-NEXT: vmovdqa 224(%rdi), %ymm9
5371 ; AVX512BW-FAST-NEXT: vpermd %ymm9, %ymm1, %ymm4
5372 ; AVX512BW-FAST-NEXT: vpshufb %ymm12, %ymm4, %ymm2
5373 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm30 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
5374 ; AVX512BW-FAST-NEXT: vmovdqa 192(%rdi), %ymm11
5375 ; AVX512BW-FAST-NEXT: vpermd %ymm11, %ymm1, %ymm26
5376 ; AVX512BW-FAST-NEXT: vpshufb %ymm30, %ymm26, %ymm3
5377 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
5378 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm31 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
5379 ; AVX512BW-FAST-NEXT: vmovdqa 160(%rdi), %ymm13
5380 ; AVX512BW-FAST-NEXT: vpermd %ymm13, %ymm1, %ymm27
5381 ; AVX512BW-FAST-NEXT: vpshufb %ymm31, %ymm27, %ymm3
5382 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm14 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
5383 ; AVX512BW-FAST-NEXT: vmovdqa 128(%rdi), %ymm15
5384 ; AVX512BW-FAST-NEXT: vpermd %ymm15, %ymm1, %ymm28
5385 ; AVX512BW-FAST-NEXT: vpshufb %ymm14, %ymm28, %ymm1
5386 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4],ymm3[5],ymm1[6,7]
5387 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm1[0,1,2,3,4,5],ymm2[6,7]
5388 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm3 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
5389 ; AVX512BW-FAST-NEXT: vmovdqa 112(%rdi), %xmm1
5390 ; AVX512BW-FAST-NEXT: vpshufb %xmm3, %xmm1, %xmm5
5391 ; AVX512BW-FAST-NEXT: vmovdqa 96(%rdi), %xmm2
5392 ; AVX512BW-FAST-NEXT: vpshufb %xmm3, %xmm2, %xmm3
5393 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
5394 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm16 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
5395 ; AVX512BW-FAST-NEXT: vmovdqa 80(%rdi), %xmm3
5396 ; AVX512BW-FAST-NEXT: vpshufb %xmm16, %xmm3, %xmm17
5397 ; AVX512BW-FAST-NEXT: vmovdqa 64(%rdi), %xmm5
5398 ; AVX512BW-FAST-NEXT: vpshufb %xmm16, %xmm5, %xmm16
5399 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm16[0],xmm17[0],xmm16[1],xmm17[1],xmm16[2],xmm17[2],xmm16[3],xmm17[3]
5400 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm7 = xmm8[0,1,2],xmm7[3]
5401 ; AVX512BW-FAST-NEXT: vpmovqb %zmm0, %xmm8
5402 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm7 = xmm8[0,1],xmm7[2,3]
5403 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5,6,7]
5404 ; AVX512BW-FAST-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5405 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm16 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
5406 ; AVX512BW-FAST-NEXT: vpshufb %ymm16, %ymm4, %ymm6
5407 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm17 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
5408 ; AVX512BW-FAST-NEXT: vpshufb %ymm17, %ymm26, %ymm7
5409 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5,6],ymm6[7]
5410 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm18 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
5411 ; AVX512BW-FAST-NEXT: vpshufb %ymm18, %ymm27, %ymm7
5412 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm19 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
5413 ; AVX512BW-FAST-NEXT: vpshufb %ymm19, %ymm28, %ymm8
5414 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm7 = ymm8[0,1,2,3,4],ymm7[5],ymm8[6,7]
5415 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0,1,2,3,4,5],ymm6[6,7]
5416 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm7 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
5417 ; AVX512BW-FAST-NEXT: vpshufb %xmm7, %xmm1, %xmm8
5418 ; AVX512BW-FAST-NEXT: vpshufb %xmm7, %xmm2, %xmm7
5419 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
5420 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm8 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
5421 ; AVX512BW-FAST-NEXT: vpshufb %xmm8, %xmm3, %xmm20
5422 ; AVX512BW-FAST-NEXT: vpshufb %xmm8, %xmm5, %xmm8
5423 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm20[0],xmm8[1],xmm20[1],xmm8[2],xmm20[2],xmm8[3],xmm20[3]
5424 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm7 = xmm8[0,1,2],xmm7[3]
5425 ; AVX512BW-FAST-NEXT: vpsrlq $8, %zmm0, %zmm8
5426 ; AVX512BW-FAST-NEXT: vpmovqb %zmm8, %xmm8
5427 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm7 = xmm8[0,1],xmm7[2,3]
5428 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5,6,7]
5429 ; AVX512BW-FAST-NEXT: vmovdqa64 %ymm6, %ymm29
5430 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm20 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
5431 ; AVX512BW-FAST-NEXT: vpshufb %ymm20, %ymm4, %ymm7
5432 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm21 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
5433 ; AVX512BW-FAST-NEXT: vpshufb %ymm21, %ymm26, %ymm8
5434 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm7 = ymm8[0,1,2,3,4,5,6],ymm7[7]
5435 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm22 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
5436 ; AVX512BW-FAST-NEXT: vpshufb %ymm22, %ymm27, %ymm8
5437 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm23 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
5438 ; AVX512BW-FAST-NEXT: vpshufb %ymm23, %ymm28, %ymm10
5439 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm10[0,1,2,3,4],ymm8[5],ymm10[6,7]
5440 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm7 = ymm8[0,1,2,3,4,5],ymm7[6,7]
5441 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm8 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
5442 ; AVX512BW-FAST-NEXT: vpshufb %xmm8, %xmm1, %xmm10
5443 ; AVX512BW-FAST-NEXT: vpshufb %xmm8, %xmm2, %xmm8
5444 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm10[0],xmm8[1],xmm10[1],xmm8[2],xmm10[2],xmm8[3],xmm10[3]
5445 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm10 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
5446 ; AVX512BW-FAST-NEXT: vpshufb %xmm10, %xmm3, %xmm24
5447 ; AVX512BW-FAST-NEXT: vpshufb %xmm10, %xmm5, %xmm10
5448 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm24[0],xmm10[1],xmm24[1],xmm10[2],xmm24[2],xmm10[3],xmm24[3]
5449 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm8 = xmm10[0,1,2],xmm8[3]
5450 ; AVX512BW-FAST-NEXT: vpsrlq $16, %zmm0, %zmm10
5451 ; AVX512BW-FAST-NEXT: vpmovqb %zmm10, %xmm10
5452 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm8 = xmm10[0,1],xmm8[2,3]
5453 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm7 = ymm8[0,1,2,3],ymm7[4,5,6,7]
5454 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm24 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
5455 ; AVX512BW-FAST-NEXT: vpshufb %ymm24, %ymm4, %ymm4
5456 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm25 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
5457 ; AVX512BW-FAST-NEXT: vpshufb %ymm25, %ymm26, %ymm8
5458 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1,2,3,4,5,6],ymm4[7]
5459 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm26 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
5460 ; AVX512BW-FAST-NEXT: vpshufb %ymm26, %ymm27, %ymm8
5461 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm27 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
5462 ; AVX512BW-FAST-NEXT: vpshufb %ymm27, %ymm28, %ymm10
5463 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm10[0,1,2,3,4],ymm8[5],ymm10[6,7]
5464 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1,2,3,4,5],ymm4[6,7]
5465 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm8 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
5466 ; AVX512BW-FAST-NEXT: vpshufb %xmm8, %xmm1, %xmm10
5467 ; AVX512BW-FAST-NEXT: vpshufb %xmm8, %xmm2, %xmm8
5468 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm10[0],xmm8[1],xmm10[1],xmm8[2],xmm10[2],xmm8[3],xmm10[3]
5469 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm10 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
5470 ; AVX512BW-FAST-NEXT: vpshufb %xmm10, %xmm3, %xmm28
5471 ; AVX512BW-FAST-NEXT: vpshufb %xmm10, %xmm5, %xmm10
5472 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm28[0],xmm10[1],xmm28[1],xmm10[2],xmm28[2],xmm10[3],xmm28[3]
5473 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm8 = xmm10[0,1,2],xmm8[3]
5474 ; AVX512BW-FAST-NEXT: vpsrlq $24, %zmm0, %zmm10
5475 ; AVX512BW-FAST-NEXT: vpmovqb %zmm10, %xmm10
5476 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm8 = xmm10[0,1],xmm8[2,3]
5477 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1,2,3],ymm4[4,5,6,7]
5478 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [1,3,2,3,1,3,5,7]
5479 ; AVX512BW-FAST-NEXT: vpermd %ymm9, %ymm8, %ymm9
5480 ; AVX512BW-FAST-NEXT: vpshufb %ymm12, %ymm9, %ymm10
5481 ; AVX512BW-FAST-NEXT: vpermd %ymm11, %ymm8, %ymm11
5482 ; AVX512BW-FAST-NEXT: vpshufb %ymm30, %ymm11, %ymm12
5483 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5,6],ymm10[7]
5484 ; AVX512BW-FAST-NEXT: vpermd %ymm13, %ymm8, %ymm10
5485 ; AVX512BW-FAST-NEXT: vpshufb %ymm31, %ymm10, %ymm13
5486 ; AVX512BW-FAST-NEXT: vpermd %ymm15, %ymm8, %ymm15
5487 ; AVX512BW-FAST-NEXT: vpshufb %ymm14, %ymm15, %ymm8
5488 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4],ymm13[5],ymm8[6,7]
5489 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4,5],ymm12[6,7]
5490 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm12 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
5491 ; AVX512BW-FAST-NEXT: vpshufb %xmm12, %xmm1, %xmm13
5492 ; AVX512BW-FAST-NEXT: vpshufb %xmm12, %xmm2, %xmm12
5493 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3]
5494 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm13 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
5495 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm3, %xmm14
5496 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm5, %xmm13
5497 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3]
5498 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm12 = xmm13[0,1,2],xmm12[3]
5499 ; AVX512BW-FAST-NEXT: vpsrlq $32, %zmm0, %zmm13
5500 ; AVX512BW-FAST-NEXT: vpmovqb %zmm13, %xmm13
5501 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm12 = xmm13[0,1],xmm12[2,3]
5502 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm12[0,1,2,3],ymm8[4,5,6,7]
5503 ; AVX512BW-FAST-NEXT: vpshufb %ymm16, %ymm9, %ymm12
5504 ; AVX512BW-FAST-NEXT: vpshufb %ymm17, %ymm11, %ymm13
5505 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm12 = ymm13[0,1,2,3,4,5,6],ymm12[7]
5506 ; AVX512BW-FAST-NEXT: vpshufb %ymm18, %ymm10, %ymm13
5507 ; AVX512BW-FAST-NEXT: vpshufb %ymm19, %ymm15, %ymm14
5508 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm13 = ymm14[0,1,2,3,4],ymm13[5],ymm14[6,7]
5509 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm12 = ymm13[0,1,2,3,4,5],ymm12[6,7]
5510 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm13 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
5511 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm1, %xmm14
5512 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm2, %xmm13
5513 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3]
5514 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm14 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
5515 ; AVX512BW-FAST-NEXT: vpshufb %xmm14, %xmm3, %xmm16
5516 ; AVX512BW-FAST-NEXT: vpshufb %xmm14, %xmm5, %xmm14
5517 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm16[0],xmm14[1],xmm16[1],xmm14[2],xmm16[2],xmm14[3],xmm16[3]
5518 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm13 = xmm14[0,1,2],xmm13[3]
5519 ; AVX512BW-FAST-NEXT: vpsrlq $40, %zmm0, %zmm14
5520 ; AVX512BW-FAST-NEXT: vpmovqb %zmm14, %xmm14
5521 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm13 = xmm14[0,1],xmm13[2,3]
5522 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm12 = ymm13[0,1,2,3],ymm12[4,5,6,7]
5523 ; AVX512BW-FAST-NEXT: vpshufb %ymm20, %ymm9, %ymm13
5524 ; AVX512BW-FAST-NEXT: vpshufb %ymm21, %ymm11, %ymm14
5525 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm13 = ymm14[0,1,2,3,4,5,6],ymm13[7]
5526 ; AVX512BW-FAST-NEXT: vpshufb %ymm22, %ymm10, %ymm14
5527 ; AVX512BW-FAST-NEXT: vpshufb %ymm23, %ymm15, %ymm6
5528 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3,4],ymm14[5],ymm6[6,7]
5529 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],ymm13[6,7]
5530 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm13 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
5531 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm1, %xmm14
5532 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm2, %xmm13
5533 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3]
5534 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm14 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
5535 ; AVX512BW-FAST-NEXT: vpshufb %xmm14, %xmm3, %xmm16
5536 ; AVX512BW-FAST-NEXT: vpshufb %xmm14, %xmm5, %xmm14
5537 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm16[0],xmm14[1],xmm16[1],xmm14[2],xmm16[2],xmm14[3],xmm16[3]
5538 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm13 = xmm14[0,1,2],xmm13[3]
5539 ; AVX512BW-FAST-NEXT: vpsrlq $48, %zmm0, %zmm14
5540 ; AVX512BW-FAST-NEXT: vpmovqb %zmm14, %xmm14
5541 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm13 = xmm14[0,1],xmm13[2,3]
5542 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm13[0,1,2,3],ymm6[4,5,6,7]
5543 ; AVX512BW-FAST-NEXT: vpshufb %ymm24, %ymm9, %ymm9
5544 ; AVX512BW-FAST-NEXT: vpshufb %ymm25, %ymm11, %ymm11
5545 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm11[0,1,2,3,4,5,6],ymm9[7]
5546 ; AVX512BW-FAST-NEXT: vpshufb %ymm26, %ymm10, %ymm10
5547 ; AVX512BW-FAST-NEXT: vpshufb %ymm27, %ymm15, %ymm11
5548 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm10 = ymm11[0,1,2,3,4],ymm10[5],ymm11[6,7]
5549 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm10[0,1,2,3,4,5],ymm9[6,7]
5550 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm10 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
5551 ; AVX512BW-FAST-NEXT: vpshufb %xmm10, %xmm1, %xmm1
5552 ; AVX512BW-FAST-NEXT: vpshufb %xmm10, %xmm2, %xmm2
5553 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
5554 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm2 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
5555 ; AVX512BW-FAST-NEXT: vpshufb %xmm2, %xmm3, %xmm3
5556 ; AVX512BW-FAST-NEXT: vpshufb %xmm2, %xmm5, %xmm2
5557 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
5558 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0,1,2],xmm1[3]
5559 ; AVX512BW-FAST-NEXT: vpsrlq $56, %zmm0, %zmm0
5560 ; AVX512BW-FAST-NEXT: vpmovqb %zmm0, %xmm0
5561 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3]
5562 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm9[4,5,6,7]
5563 ; AVX512BW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5564 ; AVX512BW-FAST-NEXT: vmovaps %ymm1, (%rsi)
5565 ; AVX512BW-FAST-NEXT: vmovdqa64 %ymm29, (%rdx)
5566 ; AVX512BW-FAST-NEXT: vmovdqa %ymm7, (%rcx)
5567 ; AVX512BW-FAST-NEXT: vmovdqa %ymm4, (%r8)
5568 ; AVX512BW-FAST-NEXT: vmovdqa %ymm8, (%r9)
5569 ; AVX512BW-FAST-NEXT: vmovdqa %ymm12, (%r11)
5570 ; AVX512BW-FAST-NEXT: vmovdqa %ymm6, (%r10)
5571 ; AVX512BW-FAST-NEXT: vmovdqa %ymm0, (%rax)
5572 ; AVX512BW-FAST-NEXT: vzeroupper
5573 ; AVX512BW-FAST-NEXT: retq
5574 %wide.vec = load <256 x i8>, ptr %in.vec, align 64
5575 %strided.vec0 = shufflevector <256 x i8> %wide.vec, <256 x i8> poison, <32 x i32> <i32 0, i32 8, i32 16, i32 24, i32 32, i32 40, i32 48, i32 56, i32 64, i32 72, i32 80, i32 88, i32 96, i32 104, i32 112, i32 120, i32 128, i32 136, i32 144, i32 152, i32 160, i32 168, i32 176, i32 184, i32 192, i32 200, i32 208, i32 216, i32 224, i32 232, i32 240, i32 248>
5576 %strided.vec1 = shufflevector <256 x i8> %wide.vec, <256 x i8> poison, <32 x i32> <i32 1, i32 9, i32 17, i32 25, i32 33, i32 41, i32 49, i32 57, i32 65, i32 73, i32 81, i32 89, i32 97, i32 105, i32 113, i32 121, i32 129, i32 137, i32 145, i32 153, i32 161, i32 169, i32 177, i32 185, i32 193, i32 201, i32 209, i32 217, i32 225, i32 233, i32 241, i32 249>
5577 %strided.vec2 = shufflevector <256 x i8> %wide.vec, <256 x i8> poison, <32 x i32> <i32 2, i32 10, i32 18, i32 26, i32 34, i32 42, i32 50, i32 58, i32 66, i32 74, i32 82, i32 90, i32 98, i32 106, i32 114, i32 122, i32 130, i32 138, i32 146, i32 154, i32 162, i32 170, i32 178, i32 186, i32 194, i32 202, i32 210, i32 218, i32 226, i32 234, i32 242, i32 250>
5578 %strided.vec3 = shufflevector <256 x i8> %wide.vec, <256 x i8> poison, <32 x i32> <i32 3, i32 11, i32 19, i32 27, i32 35, i32 43, i32 51, i32 59, i32 67, i32 75, i32 83, i32 91, i32 99, i32 107, i32 115, i32 123, i32 131, i32 139, i32 147, i32 155, i32 163, i32 171, i32 179, i32 187, i32 195, i32 203, i32 211, i32 219, i32 227, i32 235, i32 243, i32 251>
5579 %strided.vec4 = shufflevector <256 x i8> %wide.vec, <256 x i8> poison, <32 x i32> <i32 4, i32 12, i32 20, i32 28, i32 36, i32 44, i32 52, i32 60, i32 68, i32 76, i32 84, i32 92, i32 100, i32 108, i32 116, i32 124, i32 132, i32 140, i32 148, i32 156, i32 164, i32 172, i32 180, i32 188, i32 196, i32 204, i32 212, i32 220, i32 228, i32 236, i32 244, i32 252>
5580 %strided.vec5 = shufflevector <256 x i8> %wide.vec, <256 x i8> poison, <32 x i32> <i32 5, i32 13, i32 21, i32 29, i32 37, i32 45, i32 53, i32 61, i32 69, i32 77, i32 85, i32 93, i32 101, i32 109, i32 117, i32 125, i32 133, i32 141, i32 149, i32 157, i32 165, i32 173, i32 181, i32 189, i32 197, i32 205, i32 213, i32 221, i32 229, i32 237, i32 245, i32 253>
5581 %strided.vec6 = shufflevector <256 x i8> %wide.vec, <256 x i8> poison, <32 x i32> <i32 6, i32 14, i32 22, i32 30, i32 38, i32 46, i32 54, i32 62, i32 70, i32 78, i32 86, i32 94, i32 102, i32 110, i32 118, i32 126, i32 134, i32 142, i32 150, i32 158, i32 166, i32 174, i32 182, i32 190, i32 198, i32 206, i32 214, i32 222, i32 230, i32 238, i32 246, i32 254>
5582 %strided.vec7 = shufflevector <256 x i8> %wide.vec, <256 x i8> poison, <32 x i32> <i32 7, i32 15, i32 23, i32 31, i32 39, i32 47, i32 55, i32 63, i32 71, i32 79, i32 87, i32 95, i32 103, i32 111, i32 119, i32 127, i32 135, i32 143, i32 151, i32 159, i32 167, i32 175, i32 183, i32 191, i32 199, i32 207, i32 215, i32 223, i32 231, i32 239, i32 247, i32 255>
5583 store <32 x i8> %strided.vec0, ptr %out.vec0, align 64
5584 store <32 x i8> %strided.vec1, ptr %out.vec1, align 64
5585 store <32 x i8> %strided.vec2, ptr %out.vec2, align 64
5586 store <32 x i8> %strided.vec3, ptr %out.vec3, align 64
5587 store <32 x i8> %strided.vec4, ptr %out.vec4, align 64
5588 store <32 x i8> %strided.vec5, ptr %out.vec5, align 64
5589 store <32 x i8> %strided.vec6, ptr %out.vec6, align 64
5590 store <32 x i8> %strided.vec7, ptr %out.vec7, align 64
5594 define void @load_i8_stride8_vf64(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4, ptr %out.vec5, ptr %out.vec6, ptr %out.vec7) nounwind {
5595 ; SSE-LABEL: load_i8_stride8_vf64:
5597 ; SSE-NEXT: subq $2040, %rsp # imm = 0x7F8
5598 ; SSE-NEXT: movdqa 64(%rdi), %xmm6
5599 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5600 ; SSE-NEXT: movdqa 80(%rdi), %xmm8
5601 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5602 ; SSE-NEXT: movdqa 96(%rdi), %xmm11
5603 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5604 ; SSE-NEXT: movdqa 128(%rdi), %xmm5
5605 ; SSE-NEXT: movdqa 144(%rdi), %xmm10
5606 ; SSE-NEXT: movdqa 160(%rdi), %xmm7
5607 ; SSE-NEXT: movdqa 176(%rdi), %xmm13
5608 ; SSE-NEXT: movdqa 192(%rdi), %xmm3
5609 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5610 ; SSE-NEXT: movdqa 208(%rdi), %xmm2
5611 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5612 ; SSE-NEXT: movdqa 224(%rdi), %xmm9
5613 ; SSE-NEXT: movdqa 240(%rdi), %xmm12
5614 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [255,0,0,0,255,0,0,0]
5615 ; SSE-NEXT: movdqa %xmm12, %xmm0
5616 ; SSE-NEXT: pand %xmm4, %xmm0
5617 ; SSE-NEXT: movdqa %xmm9, %xmm1
5618 ; SSE-NEXT: pand %xmm4, %xmm1
5619 ; SSE-NEXT: packuswb %xmm0, %xmm1
5620 ; SSE-NEXT: packuswb %xmm1, %xmm0
5621 ; SSE-NEXT: movdqa %xmm2, %xmm1
5622 ; SSE-NEXT: pand %xmm4, %xmm1
5623 ; SSE-NEXT: movdqa %xmm3, %xmm2
5624 ; SSE-NEXT: pand %xmm4, %xmm2
5625 ; SSE-NEXT: packuswb %xmm1, %xmm2
5626 ; SSE-NEXT: packuswb %xmm2, %xmm2
5627 ; SSE-NEXT: packuswb %xmm0, %xmm2
5628 ; SSE-NEXT: movdqa %xmm13, %xmm0
5629 ; SSE-NEXT: pand %xmm4, %xmm0
5630 ; SSE-NEXT: movdqa %xmm7, %xmm1
5631 ; SSE-NEXT: pand %xmm4, %xmm1
5632 ; SSE-NEXT: packuswb %xmm0, %xmm1
5633 ; SSE-NEXT: movdqa %xmm10, %xmm0
5634 ; SSE-NEXT: pand %xmm4, %xmm0
5635 ; SSE-NEXT: movdqa %xmm5, %xmm3
5636 ; SSE-NEXT: pand %xmm4, %xmm3
5637 ; SSE-NEXT: packuswb %xmm0, %xmm3
5638 ; SSE-NEXT: movdqa 112(%rdi), %xmm14
5639 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5640 ; SSE-NEXT: packuswb %xmm1, %xmm0
5641 ; SSE-NEXT: packuswb %xmm3, %xmm3
5642 ; SSE-NEXT: packuswb %xmm0, %xmm3
5643 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,3],xmm2[0,3]
5644 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5645 ; SSE-NEXT: movdqa %xmm14, %xmm0
5646 ; SSE-NEXT: pand %xmm4, %xmm0
5647 ; SSE-NEXT: movdqa %xmm11, %xmm1
5648 ; SSE-NEXT: pand %xmm4, %xmm1
5649 ; SSE-NEXT: packuswb %xmm0, %xmm1
5650 ; SSE-NEXT: movdqa %xmm8, %xmm0
5651 ; SSE-NEXT: pand %xmm4, %xmm0
5652 ; SSE-NEXT: movdqa %xmm6, %xmm2
5653 ; SSE-NEXT: pand %xmm4, %xmm2
5654 ; SSE-NEXT: packuswb %xmm0, %xmm2
5655 ; SSE-NEXT: packuswb %xmm1, %xmm0
5656 ; SSE-NEXT: packuswb %xmm2, %xmm2
5657 ; SSE-NEXT: packuswb %xmm0, %xmm2
5658 ; SSE-NEXT: movdqa 48(%rdi), %xmm14
5659 ; SSE-NEXT: movdqa %xmm14, %xmm0
5660 ; SSE-NEXT: pand %xmm4, %xmm0
5661 ; SSE-NEXT: movdqa 32(%rdi), %xmm15
5662 ; SSE-NEXT: movdqa %xmm15, %xmm1
5663 ; SSE-NEXT: pand %xmm4, %xmm1
5664 ; SSE-NEXT: packuswb %xmm0, %xmm1
5665 ; SSE-NEXT: movdqa 16(%rdi), %xmm8
5666 ; SSE-NEXT: movdqa %xmm8, %xmm0
5667 ; SSE-NEXT: pand %xmm4, %xmm0
5668 ; SSE-NEXT: movdqa (%rdi), %xmm11
5669 ; SSE-NEXT: movdqa %xmm11, %xmm3
5670 ; SSE-NEXT: pand %xmm4, %xmm3
5671 ; SSE-NEXT: packuswb %xmm0, %xmm3
5672 ; SSE-NEXT: packuswb %xmm1, %xmm0
5673 ; SSE-NEXT: packuswb %xmm3, %xmm3
5674 ; SSE-NEXT: packuswb %xmm0, %xmm3
5675 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,3],xmm2[0,3]
5676 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5677 ; SSE-NEXT: movdqa 496(%rdi), %xmm0
5678 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5679 ; SSE-NEXT: pand %xmm4, %xmm0
5680 ; SSE-NEXT: movdqa 480(%rdi), %xmm1
5681 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5682 ; SSE-NEXT: pand %xmm4, %xmm1
5683 ; SSE-NEXT: packuswb %xmm0, %xmm1
5684 ; SSE-NEXT: movdqa 464(%rdi), %xmm0
5685 ; SSE-NEXT: movdqa %xmm0, (%rsp) # 16-byte Spill
5686 ; SSE-NEXT: pand %xmm4, %xmm0
5687 ; SSE-NEXT: movdqa 448(%rdi), %xmm2
5688 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5689 ; SSE-NEXT: pand %xmm4, %xmm2
5690 ; SSE-NEXT: packuswb %xmm0, %xmm2
5691 ; SSE-NEXT: packuswb %xmm1, %xmm0
5692 ; SSE-NEXT: packuswb %xmm2, %xmm2
5693 ; SSE-NEXT: packuswb %xmm0, %xmm2
5694 ; SSE-NEXT: movdqa 432(%rdi), %xmm0
5695 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5696 ; SSE-NEXT: pand %xmm4, %xmm0
5697 ; SSE-NEXT: movdqa 416(%rdi), %xmm1
5698 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5699 ; SSE-NEXT: pand %xmm4, %xmm1
5700 ; SSE-NEXT: packuswb %xmm0, %xmm1
5701 ; SSE-NEXT: movdqa 400(%rdi), %xmm0
5702 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5703 ; SSE-NEXT: pand %xmm4, %xmm0
5704 ; SSE-NEXT: movdqa 384(%rdi), %xmm3
5705 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5706 ; SSE-NEXT: pand %xmm4, %xmm3
5707 ; SSE-NEXT: packuswb %xmm0, %xmm3
5708 ; SSE-NEXT: packuswb %xmm1, %xmm0
5709 ; SSE-NEXT: packuswb %xmm3, %xmm3
5710 ; SSE-NEXT: packuswb %xmm0, %xmm3
5711 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,3],xmm2[0,3]
5712 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5713 ; SSE-NEXT: movdqa 368(%rdi), %xmm0
5714 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5715 ; SSE-NEXT: pand %xmm4, %xmm0
5716 ; SSE-NEXT: movdqa 352(%rdi), %xmm1
5717 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5718 ; SSE-NEXT: pand %xmm4, %xmm1
5719 ; SSE-NEXT: packuswb %xmm0, %xmm1
5720 ; SSE-NEXT: movdqa 336(%rdi), %xmm0
5721 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5722 ; SSE-NEXT: pand %xmm4, %xmm0
5723 ; SSE-NEXT: movdqa 320(%rdi), %xmm2
5724 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5725 ; SSE-NEXT: pand %xmm4, %xmm2
5726 ; SSE-NEXT: packuswb %xmm0, %xmm2
5727 ; SSE-NEXT: packuswb %xmm1, %xmm0
5728 ; SSE-NEXT: packuswb %xmm2, %xmm2
5729 ; SSE-NEXT: packuswb %xmm0, %xmm2
5730 ; SSE-NEXT: movdqa 304(%rdi), %xmm0
5731 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5732 ; SSE-NEXT: pand %xmm4, %xmm0
5733 ; SSE-NEXT: movdqa 288(%rdi), %xmm1
5734 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5735 ; SSE-NEXT: pand %xmm4, %xmm1
5736 ; SSE-NEXT: packuswb %xmm0, %xmm1
5737 ; SSE-NEXT: movdqa 272(%rdi), %xmm0
5738 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5739 ; SSE-NEXT: pand %xmm4, %xmm0
5740 ; SSE-NEXT: movdqa 256(%rdi), %xmm3
5741 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5742 ; SSE-NEXT: pand %xmm3, %xmm4
5743 ; SSE-NEXT: packuswb %xmm0, %xmm4
5744 ; SSE-NEXT: packuswb %xmm1, %xmm0
5745 ; SSE-NEXT: packuswb %xmm4, %xmm4
5746 ; SSE-NEXT: packuswb %xmm0, %xmm4
5747 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,3],xmm2[0,3]
5748 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5749 ; SSE-NEXT: pxor %xmm6, %xmm6
5750 ; SSE-NEXT: movdqa %xmm10, %xmm0
5751 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5752 ; SSE-NEXT: movdqa %xmm10, %xmm1
5753 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm6[8],xmm1[9],xmm6[9],xmm1[10],xmm6[10],xmm1[11],xmm6[11],xmm1[12],xmm6[12],xmm1[13],xmm6[13],xmm1[14],xmm6[14],xmm1[15],xmm6[15]
5754 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5755 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1],xmm0[2],xmm6[2],xmm0[3],xmm6[3],xmm0[4],xmm6[4],xmm0[5],xmm6[5],xmm0[6],xmm6[6],xmm0[7],xmm6[7]
5756 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5757 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
5758 ; SSE-NEXT: packuswb %xmm0, %xmm0
5759 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5760 ; SSE-NEXT: movdqa {{.*#+}} xmm10 = [65535,0,65535,65535,65535,65535,65535,65535]
5761 ; SSE-NEXT: movdqa %xmm10, %xmm1
5762 ; SSE-NEXT: pandn %xmm0, %xmm1
5763 ; SSE-NEXT: movdqa %xmm5, %xmm0
5764 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5765 ; SSE-NEXT: movdqa %xmm5, %xmm2
5766 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm6[8],xmm2[9],xmm6[9],xmm2[10],xmm6[10],xmm2[11],xmm6[11],xmm2[12],xmm6[12],xmm2[13],xmm6[13],xmm2[14],xmm6[14],xmm2[15],xmm6[15]
5767 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5768 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1],xmm0[2],xmm6[2],xmm0[3],xmm6[3],xmm0[4],xmm6[4],xmm0[5],xmm6[5],xmm0[6],xmm6[6],xmm0[7],xmm6[7]
5769 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5770 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3]
5771 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5772 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm0[1,1,1,1]
5773 ; SSE-NEXT: packuswb %xmm5, %xmm5
5774 ; SSE-NEXT: pand %xmm10, %xmm5
5775 ; SSE-NEXT: por %xmm1, %xmm5
5776 ; SSE-NEXT: movdqa %xmm13, %xmm2
5777 ; SSE-NEXT: movdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5778 ; SSE-NEXT: movdqa %xmm13, %xmm1
5779 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm6[8],xmm1[9],xmm6[9],xmm1[10],xmm6[10],xmm1[11],xmm6[11],xmm1[12],xmm6[12],xmm1[13],xmm6[13],xmm1[14],xmm6[14],xmm1[15],xmm6[15]
5780 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5781 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1],xmm2[2],xmm6[2],xmm2[3],xmm6[3],xmm2[4],xmm6[4],xmm2[5],xmm6[5],xmm2[6],xmm6[6],xmm2[7],xmm6[7]
5782 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5783 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
5784 ; SSE-NEXT: packuswb %xmm2, %xmm2
5785 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5786 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,0,2,3]
5787 ; SSE-NEXT: movdqa {{.*#+}} xmm13 = [65535,65535,65535,0,65535,65535,65535,65535]
5788 ; SSE-NEXT: movdqa %xmm13, %xmm2
5789 ; SSE-NEXT: pandn %xmm1, %xmm2
5790 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5791 ; SSE-NEXT: movdqa %xmm7, %xmm1
5792 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm6[8],xmm1[9],xmm6[9],xmm1[10],xmm6[10],xmm1[11],xmm6[11],xmm1[12],xmm6[12],xmm1[13],xmm6[13],xmm1[14],xmm6[14],xmm1[15],xmm6[15]
5793 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5794 ; SSE-NEXT: punpcklbw {{.*#+}} xmm7 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3],xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
5795 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5796 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm1[0],xmm7[1],xmm1[1],xmm7[2],xmm1[2],xmm7[3],xmm1[3]
5797 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5798 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm7[0,1,1,3]
5799 ; SSE-NEXT: packuswb %xmm1, %xmm1
5800 ; SSE-NEXT: pand %xmm13, %xmm1
5801 ; SSE-NEXT: por %xmm2, %xmm1
5802 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
5803 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1]
5804 ; SSE-NEXT: movdqa %xmm12, %xmm1
5805 ; SSE-NEXT: movdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5806 ; SSE-NEXT: movdqa %xmm12, %xmm2
5807 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm6[8],xmm2[9],xmm6[9],xmm2[10],xmm6[10],xmm2[11],xmm6[11],xmm2[12],xmm6[12],xmm2[13],xmm6[13],xmm2[14],xmm6[14],xmm2[15],xmm6[15]
5808 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5809 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0],xmm6[0],xmm1[1],xmm6[1],xmm1[2],xmm6[2],xmm1[3],xmm6[3],xmm1[4],xmm6[4],xmm1[5],xmm6[5],xmm1[6],xmm6[6],xmm1[7],xmm6[7]
5810 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5811 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
5812 ; SSE-NEXT: packuswb %xmm1, %xmm1
5813 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5814 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,2,2]
5815 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [65535,65535,65535,65535,65535,65535,65535,0]
5816 ; SSE-NEXT: movdqa %xmm12, %xmm2
5817 ; SSE-NEXT: pandn %xmm1, %xmm2
5818 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5819 ; SSE-NEXT: movdqa %xmm9, %xmm1
5820 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm6[8],xmm1[9],xmm6[9],xmm1[10],xmm6[10],xmm1[11],xmm6[11],xmm1[12],xmm6[12],xmm1[13],xmm6[13],xmm1[14],xmm6[14],xmm1[15],xmm6[15]
5821 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5822 ; SSE-NEXT: punpcklbw {{.*#+}} xmm9 = xmm9[0],xmm6[0],xmm9[1],xmm6[1],xmm9[2],xmm6[2],xmm9[3],xmm6[3],xmm9[4],xmm6[4],xmm9[5],xmm6[5],xmm9[6],xmm6[6],xmm9[7],xmm6[7]
5823 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5824 ; SSE-NEXT: punpcklwd {{.*#+}} xmm9 = xmm9[0],xmm1[0],xmm9[1],xmm1[1],xmm9[2],xmm1[2],xmm9[3],xmm1[3]
5825 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5826 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm9[0,1,1,3]
5827 ; SSE-NEXT: packuswb %xmm1, %xmm1
5828 ; SSE-NEXT: pand %xmm12, %xmm1
5829 ; SSE-NEXT: por %xmm2, %xmm1
5830 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
5831 ; SSE-NEXT: movdqa %xmm2, %xmm3
5832 ; SSE-NEXT: punpckhbw {{.*#+}} xmm3 = xmm3[8],xmm6[8],xmm3[9],xmm6[9],xmm3[10],xmm6[10],xmm3[11],xmm6[11],xmm3[12],xmm6[12],xmm3[13],xmm6[13],xmm3[14],xmm6[14],xmm3[15],xmm6[15]
5833 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5834 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1],xmm2[2],xmm6[2],xmm2[3],xmm6[3],xmm2[4],xmm6[4],xmm2[5],xmm6[5],xmm2[6],xmm6[6],xmm2[7],xmm6[7]
5835 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5836 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
5837 ; SSE-NEXT: packuswb %xmm2, %xmm3
5838 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5839 ; SSE-NEXT: movdqa {{.*#+}} xmm9 = [65535,65535,65535,65535,65535,0,65535,65535]
5840 ; SSE-NEXT: movdqa %xmm9, %xmm2
5841 ; SSE-NEXT: pandn %xmm3, %xmm2
5842 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
5843 ; SSE-NEXT: movdqa %xmm3, %xmm4
5844 ; SSE-NEXT: punpckhbw {{.*#+}} xmm4 = xmm4[8],xmm6[8],xmm4[9],xmm6[9],xmm4[10],xmm6[10],xmm4[11],xmm6[11],xmm4[12],xmm6[12],xmm4[13],xmm6[13],xmm4[14],xmm6[14],xmm4[15],xmm6[15]
5845 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5846 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0],xmm6[0],xmm3[1],xmm6[1],xmm3[2],xmm6[2],xmm3[3],xmm6[3],xmm3[4],xmm6[4],xmm3[5],xmm6[5],xmm3[6],xmm6[6],xmm3[7],xmm6[7]
5847 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5848 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
5849 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5850 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[1,1,1,1]
5851 ; SSE-NEXT: packuswb %xmm3, %xmm3
5852 ; SSE-NEXT: pand %xmm9, %xmm3
5853 ; SSE-NEXT: por %xmm2, %xmm3
5854 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[2,2,2,2]
5855 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
5856 ; SSE-NEXT: movsd {{.*#+}} xmm2 = xmm5[0],xmm2[1]
5857 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5858 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5859 ; SSE-NEXT: movdqa %xmm8, %xmm1
5860 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm6[8],xmm1[9],xmm6[9],xmm1[10],xmm6[10],xmm1[11],xmm6[11],xmm1[12],xmm6[12],xmm1[13],xmm6[13],xmm1[14],xmm6[14],xmm1[15],xmm6[15]
5861 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5862 ; SSE-NEXT: punpcklbw {{.*#+}} xmm8 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3],xmm8[4],xmm6[4],xmm8[5],xmm6[5],xmm8[6],xmm6[6],xmm8[7],xmm6[7]
5863 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5864 ; SSE-NEXT: punpcklwd {{.*#+}} xmm8 = xmm8[0],xmm1[0],xmm8[1],xmm1[1],xmm8[2],xmm1[2],xmm8[3],xmm1[3]
5865 ; SSE-NEXT: packuswb %xmm8, %xmm8
5866 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5867 ; SSE-NEXT: movdqa %xmm10, %xmm1
5868 ; SSE-NEXT: pandn %xmm8, %xmm1
5869 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5870 ; SSE-NEXT: movdqa %xmm11, %xmm2
5871 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm6[8],xmm2[9],xmm6[9],xmm2[10],xmm6[10],xmm2[11],xmm6[11],xmm2[12],xmm6[12],xmm2[13],xmm6[13],xmm2[14],xmm6[14],xmm2[15],xmm6[15]
5872 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5873 ; SSE-NEXT: punpcklbw {{.*#+}} xmm11 = xmm11[0],xmm6[0],xmm11[1],xmm6[1],xmm11[2],xmm6[2],xmm11[3],xmm6[3],xmm11[4],xmm6[4],xmm11[5],xmm6[5],xmm11[6],xmm6[6],xmm11[7],xmm6[7]
5874 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5875 ; SSE-NEXT: punpcklwd {{.*#+}} xmm11 = xmm11[0],xmm2[0],xmm11[1],xmm2[1],xmm11[2],xmm2[2],xmm11[3],xmm2[3]
5876 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5877 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm11[1,1,1,1]
5878 ; SSE-NEXT: packuswb %xmm0, %xmm0
5879 ; SSE-NEXT: pand %xmm10, %xmm0
5880 ; SSE-NEXT: por %xmm1, %xmm0
5881 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5882 ; SSE-NEXT: movdqa %xmm14, %xmm1
5883 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm6[8],xmm1[9],xmm6[9],xmm1[10],xmm6[10],xmm1[11],xmm6[11],xmm1[12],xmm6[12],xmm1[13],xmm6[13],xmm1[14],xmm6[14],xmm1[15],xmm6[15]
5884 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5885 ; SSE-NEXT: punpcklbw {{.*#+}} xmm14 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3],xmm14[4],xmm6[4],xmm14[5],xmm6[5],xmm14[6],xmm6[6],xmm14[7],xmm6[7]
5886 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5887 ; SSE-NEXT: punpcklwd {{.*#+}} xmm14 = xmm14[0],xmm1[0],xmm14[1],xmm1[1],xmm14[2],xmm1[2],xmm14[3],xmm1[3]
5888 ; SSE-NEXT: packuswb %xmm14, %xmm14
5889 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5890 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm14[0,0,2,3]
5891 ; SSE-NEXT: movdqa %xmm13, %xmm2
5892 ; SSE-NEXT: pandn %xmm1, %xmm2
5893 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5894 ; SSE-NEXT: movdqa %xmm15, %xmm1
5895 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm6[8],xmm1[9],xmm6[9],xmm1[10],xmm6[10],xmm1[11],xmm6[11],xmm1[12],xmm6[12],xmm1[13],xmm6[13],xmm1[14],xmm6[14],xmm1[15],xmm6[15]
5896 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5897 ; SSE-NEXT: punpcklbw {{.*#+}} xmm15 = xmm15[0],xmm6[0],xmm15[1],xmm6[1],xmm15[2],xmm6[2],xmm15[3],xmm6[3],xmm15[4],xmm6[4],xmm15[5],xmm6[5],xmm15[6],xmm6[6],xmm15[7],xmm6[7]
5898 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5899 ; SSE-NEXT: punpcklwd {{.*#+}} xmm15 = xmm15[0],xmm1[0],xmm15[1],xmm1[1],xmm15[2],xmm1[2],xmm15[3],xmm1[3]
5900 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5901 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm15[0,1,1,3]
5902 ; SSE-NEXT: packuswb %xmm1, %xmm1
5903 ; SSE-NEXT: pand %xmm13, %xmm1
5904 ; SSE-NEXT: por %xmm2, %xmm1
5905 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
5906 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
5907 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5908 ; SSE-NEXT: movdqa %xmm1, %xmm2
5909 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm6[8],xmm2[9],xmm6[9],xmm2[10],xmm6[10],xmm2[11],xmm6[11],xmm2[12],xmm6[12],xmm2[13],xmm6[13],xmm2[14],xmm6[14],xmm2[15],xmm6[15]
5910 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5911 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0],xmm6[0],xmm1[1],xmm6[1],xmm1[2],xmm6[2],xmm1[3],xmm6[3],xmm1[4],xmm6[4],xmm1[5],xmm6[5],xmm1[6],xmm6[6],xmm1[7],xmm6[7]
5912 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5913 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
5914 ; SSE-NEXT: packuswb %xmm1, %xmm1
5915 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5916 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,2,2]
5917 ; SSE-NEXT: movdqa %xmm12, %xmm2
5918 ; SSE-NEXT: pandn %xmm1, %xmm2
5919 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
5920 ; SSE-NEXT: movdqa %xmm3, %xmm1
5921 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm6[8],xmm1[9],xmm6[9],xmm1[10],xmm6[10],xmm1[11],xmm6[11],xmm1[12],xmm6[12],xmm1[13],xmm6[13],xmm1[14],xmm6[14],xmm1[15],xmm6[15]
5922 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5923 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0],xmm6[0],xmm3[1],xmm6[1],xmm3[2],xmm6[2],xmm3[3],xmm6[3],xmm3[4],xmm6[4],xmm3[5],xmm6[5],xmm3[6],xmm6[6],xmm3[7],xmm6[7]
5924 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5925 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
5926 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5927 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm3[0,1,1,3]
5928 ; SSE-NEXT: packuswb %xmm1, %xmm1
5929 ; SSE-NEXT: pand %xmm12, %xmm1
5930 ; SSE-NEXT: por %xmm2, %xmm1
5931 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
5932 ; SSE-NEXT: movdqa %xmm2, %xmm3
5933 ; SSE-NEXT: punpckhbw {{.*#+}} xmm3 = xmm3[8],xmm6[8],xmm3[9],xmm6[9],xmm3[10],xmm6[10],xmm3[11],xmm6[11],xmm3[12],xmm6[12],xmm3[13],xmm6[13],xmm3[14],xmm6[14],xmm3[15],xmm6[15]
5934 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5935 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1],xmm2[2],xmm6[2],xmm2[3],xmm6[3],xmm2[4],xmm6[4],xmm2[5],xmm6[5],xmm2[6],xmm6[6],xmm2[7],xmm6[7]
5936 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5937 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
5938 ; SSE-NEXT: packuswb %xmm2, %xmm3
5939 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5940 ; SSE-NEXT: movdqa %xmm9, %xmm2
5941 ; SSE-NEXT: pandn %xmm3, %xmm2
5942 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
5943 ; SSE-NEXT: movdqa %xmm3, %xmm4
5944 ; SSE-NEXT: punpckhbw {{.*#+}} xmm4 = xmm4[8],xmm6[8],xmm4[9],xmm6[9],xmm4[10],xmm6[10],xmm4[11],xmm6[11],xmm4[12],xmm6[12],xmm4[13],xmm6[13],xmm4[14],xmm6[14],xmm4[15],xmm6[15]
5945 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5946 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0],xmm6[0],xmm3[1],xmm6[1],xmm3[2],xmm6[2],xmm3[3],xmm6[3],xmm3[4],xmm6[4],xmm3[5],xmm6[5],xmm3[6],xmm6[6],xmm3[7],xmm6[7]
5947 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5948 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
5949 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5950 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[1,1,1,1]
5951 ; SSE-NEXT: packuswb %xmm3, %xmm3
5952 ; SSE-NEXT: pand %xmm9, %xmm3
5953 ; SSE-NEXT: por %xmm2, %xmm3
5954 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[2,2,2,2]
5955 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
5956 ; SSE-NEXT: movsd {{.*#+}} xmm2 = xmm0[0],xmm2[1]
5957 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5958 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
5959 ; SSE-NEXT: movdqa %xmm14, %xmm0
5960 ; SSE-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8],xmm6[8],xmm0[9],xmm6[9],xmm0[10],xmm6[10],xmm0[11],xmm6[11],xmm0[12],xmm6[12],xmm0[13],xmm6[13],xmm0[14],xmm6[14],xmm0[15],xmm6[15]
5961 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5962 ; SSE-NEXT: punpcklbw {{.*#+}} xmm14 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3],xmm14[4],xmm6[4],xmm14[5],xmm6[5],xmm14[6],xmm6[6],xmm14[7],xmm6[7]
5963 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5964 ; SSE-NEXT: punpcklwd {{.*#+}} xmm14 = xmm14[0],xmm0[0],xmm14[1],xmm0[1],xmm14[2],xmm0[2],xmm14[3],xmm0[3]
5965 ; SSE-NEXT: packuswb %xmm14, %xmm14
5966 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5967 ; SSE-NEXT: movdqa %xmm10, %xmm1
5968 ; SSE-NEXT: pandn %xmm14, %xmm1
5969 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5970 ; SSE-NEXT: movdqa %xmm0, %xmm2
5971 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm6[8],xmm2[9],xmm6[9],xmm2[10],xmm6[10],xmm2[11],xmm6[11],xmm2[12],xmm6[12],xmm2[13],xmm6[13],xmm2[14],xmm6[14],xmm2[15],xmm6[15]
5972 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5973 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1],xmm0[2],xmm6[2],xmm0[3],xmm6[3],xmm0[4],xmm6[4],xmm0[5],xmm6[5],xmm0[6],xmm6[6],xmm0[7],xmm6[7]
5974 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5975 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3]
5976 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5977 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
5978 ; SSE-NEXT: packuswb %xmm0, %xmm0
5979 ; SSE-NEXT: pand %xmm10, %xmm0
5980 ; SSE-NEXT: por %xmm1, %xmm0
5981 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
5982 ; SSE-NEXT: movdqa %xmm14, %xmm1
5983 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm6[8],xmm1[9],xmm6[9],xmm1[10],xmm6[10],xmm1[11],xmm6[11],xmm1[12],xmm6[12],xmm1[13],xmm6[13],xmm1[14],xmm6[14],xmm1[15],xmm6[15]
5984 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5985 ; SSE-NEXT: punpcklbw {{.*#+}} xmm14 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3],xmm14[4],xmm6[4],xmm14[5],xmm6[5],xmm14[6],xmm6[6],xmm14[7],xmm6[7]
5986 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5987 ; SSE-NEXT: punpcklwd {{.*#+}} xmm14 = xmm14[0],xmm1[0],xmm14[1],xmm1[1],xmm14[2],xmm1[2],xmm14[3],xmm1[3]
5988 ; SSE-NEXT: packuswb %xmm14, %xmm14
5989 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm14[0,0,2,3]
5990 ; SSE-NEXT: movdqa %xmm13, %xmm2
5991 ; SSE-NEXT: pandn %xmm1, %xmm2
5992 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
5993 ; SSE-NEXT: movdqa %xmm3, %xmm1
5994 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm6[8],xmm1[9],xmm6[9],xmm1[10],xmm6[10],xmm1[11],xmm6[11],xmm1[12],xmm6[12],xmm1[13],xmm6[13],xmm1[14],xmm6[14],xmm1[15],xmm6[15]
5995 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5996 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0],xmm6[0],xmm3[1],xmm6[1],xmm3[2],xmm6[2],xmm3[3],xmm6[3],xmm3[4],xmm6[4],xmm3[5],xmm6[5],xmm3[6],xmm6[6],xmm3[7],xmm6[7]
5997 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5998 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
5999 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6000 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm3[0,1,1,3]
6001 ; SSE-NEXT: packuswb %xmm1, %xmm1
6002 ; SSE-NEXT: pand %xmm13, %xmm1
6003 ; SSE-NEXT: por %xmm2, %xmm1
6004 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
6005 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
6006 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6007 ; SSE-NEXT: movdqa %xmm1, %xmm2
6008 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm6[8],xmm2[9],xmm6[9],xmm2[10],xmm6[10],xmm2[11],xmm6[11],xmm2[12],xmm6[12],xmm2[13],xmm6[13],xmm2[14],xmm6[14],xmm2[15],xmm6[15]
6009 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6010 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0],xmm6[0],xmm1[1],xmm6[1],xmm1[2],xmm6[2],xmm1[3],xmm6[3],xmm1[4],xmm6[4],xmm1[5],xmm6[5],xmm1[6],xmm6[6],xmm1[7],xmm6[7]
6011 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6012 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
6013 ; SSE-NEXT: packuswb %xmm1, %xmm1
6014 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6015 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,2,2]
6016 ; SSE-NEXT: movdqa %xmm12, %xmm2
6017 ; SSE-NEXT: pandn %xmm1, %xmm2
6018 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
6019 ; SSE-NEXT: movdqa %xmm3, %xmm1
6020 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm6[8],xmm1[9],xmm6[9],xmm1[10],xmm6[10],xmm1[11],xmm6[11],xmm1[12],xmm6[12],xmm1[13],xmm6[13],xmm1[14],xmm6[14],xmm1[15],xmm6[15]
6021 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6022 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0],xmm6[0],xmm3[1],xmm6[1],xmm3[2],xmm6[2],xmm3[3],xmm6[3],xmm3[4],xmm6[4],xmm3[5],xmm6[5],xmm3[6],xmm6[6],xmm3[7],xmm6[7]
6023 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6024 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
6025 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6026 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm3[0,1,1,3]
6027 ; SSE-NEXT: packuswb %xmm1, %xmm1
6028 ; SSE-NEXT: pand %xmm12, %xmm1
6029 ; SSE-NEXT: por %xmm2, %xmm1
6030 ; SSE-NEXT: movdqa (%rsp), %xmm2 # 16-byte Reload
6031 ; SSE-NEXT: movdqa %xmm2, %xmm3
6032 ; SSE-NEXT: punpckhbw {{.*#+}} xmm3 = xmm3[8],xmm6[8],xmm3[9],xmm6[9],xmm3[10],xmm6[10],xmm3[11],xmm6[11],xmm3[12],xmm6[12],xmm3[13],xmm6[13],xmm3[14],xmm6[14],xmm3[15],xmm6[15]
6033 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6034 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1],xmm2[2],xmm6[2],xmm2[3],xmm6[3],xmm2[4],xmm6[4],xmm2[5],xmm6[5],xmm2[6],xmm6[6],xmm2[7],xmm6[7]
6035 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6036 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
6037 ; SSE-NEXT: packuswb %xmm2, %xmm3
6038 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6039 ; SSE-NEXT: movdqa %xmm9, %xmm2
6040 ; SSE-NEXT: pandn %xmm3, %xmm2
6041 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
6042 ; SSE-NEXT: movdqa %xmm3, %xmm4
6043 ; SSE-NEXT: punpckhbw {{.*#+}} xmm4 = xmm4[8],xmm6[8],xmm4[9],xmm6[9],xmm4[10],xmm6[10],xmm4[11],xmm6[11],xmm4[12],xmm6[12],xmm4[13],xmm6[13],xmm4[14],xmm6[14],xmm4[15],xmm6[15]
6044 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6045 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0],xmm6[0],xmm3[1],xmm6[1],xmm3[2],xmm6[2],xmm3[3],xmm6[3],xmm3[4],xmm6[4],xmm3[5],xmm6[5],xmm3[6],xmm6[6],xmm3[7],xmm6[7]
6046 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6047 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
6048 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6049 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[1,1,1,1]
6050 ; SSE-NEXT: packuswb %xmm3, %xmm3
6051 ; SSE-NEXT: pand %xmm9, %xmm3
6052 ; SSE-NEXT: por %xmm2, %xmm3
6053 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[2,2,2,2]
6054 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
6055 ; SSE-NEXT: movsd {{.*#+}} xmm2 = xmm0[0],xmm2[1]
6056 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6057 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
6058 ; SSE-NEXT: movdqa %xmm8, %xmm0
6059 ; SSE-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8],xmm6[8],xmm0[9],xmm6[9],xmm0[10],xmm6[10],xmm0[11],xmm6[11],xmm0[12],xmm6[12],xmm0[13],xmm6[13],xmm0[14],xmm6[14],xmm0[15],xmm6[15]
6060 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6061 ; SSE-NEXT: punpcklbw {{.*#+}} xmm8 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3],xmm8[4],xmm6[4],xmm8[5],xmm6[5],xmm8[6],xmm6[6],xmm8[7],xmm6[7]
6062 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6063 ; SSE-NEXT: punpcklwd {{.*#+}} xmm8 = xmm8[0],xmm0[0],xmm8[1],xmm0[1],xmm8[2],xmm0[2],xmm8[3],xmm0[3]
6064 ; SSE-NEXT: packuswb %xmm8, %xmm8
6065 ; SSE-NEXT: movdqa %xmm10, %xmm1
6066 ; SSE-NEXT: pandn %xmm8, %xmm1
6067 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6068 ; SSE-NEXT: movdqa %xmm0, %xmm2
6069 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm6[8],xmm2[9],xmm6[9],xmm2[10],xmm6[10],xmm2[11],xmm6[11],xmm2[12],xmm6[12],xmm2[13],xmm6[13],xmm2[14],xmm6[14],xmm2[15],xmm6[15]
6070 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6071 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1],xmm0[2],xmm6[2],xmm0[3],xmm6[3],xmm0[4],xmm6[4],xmm0[5],xmm6[5],xmm0[6],xmm6[6],xmm0[7],xmm6[7]
6072 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6073 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3]
6074 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6075 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
6076 ; SSE-NEXT: packuswb %xmm0, %xmm0
6077 ; SSE-NEXT: pand %xmm10, %xmm0
6078 ; SSE-NEXT: por %xmm1, %xmm0
6079 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
6080 ; SSE-NEXT: movdqa %xmm4, %xmm1
6081 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm6[8],xmm1[9],xmm6[9],xmm1[10],xmm6[10],xmm1[11],xmm6[11],xmm1[12],xmm6[12],xmm1[13],xmm6[13],xmm1[14],xmm6[14],xmm1[15],xmm6[15]
6082 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6083 ; SSE-NEXT: punpcklbw {{.*#+}} xmm4 = xmm4[0],xmm6[0],xmm4[1],xmm6[1],xmm4[2],xmm6[2],xmm4[3],xmm6[3],xmm4[4],xmm6[4],xmm4[5],xmm6[5],xmm4[6],xmm6[6],xmm4[7],xmm6[7]
6084 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6085 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
6086 ; SSE-NEXT: packuswb %xmm4, %xmm4
6087 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm4[0,0,2,3]
6088 ; SSE-NEXT: movdqa %xmm13, %xmm5
6089 ; SSE-NEXT: pandn %xmm1, %xmm5
6090 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6091 ; SSE-NEXT: movdqa %xmm2, %xmm1
6092 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm6[8],xmm1[9],xmm6[9],xmm1[10],xmm6[10],xmm1[11],xmm6[11],xmm1[12],xmm6[12],xmm1[13],xmm6[13],xmm1[14],xmm6[14],xmm1[15],xmm6[15]
6093 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6094 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1],xmm2[2],xmm6[2],xmm2[3],xmm6[3],xmm2[4],xmm6[4],xmm2[5],xmm6[5],xmm2[6],xmm6[6],xmm2[7],xmm6[7]
6095 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6096 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
6097 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6098 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,1,1,3]
6099 ; SSE-NEXT: packuswb %xmm1, %xmm1
6100 ; SSE-NEXT: pand %xmm13, %xmm1
6101 ; SSE-NEXT: por %xmm5, %xmm1
6102 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
6103 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
6104 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6105 ; SSE-NEXT: movdqa %xmm1, %xmm2
6106 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm6[8],xmm2[9],xmm6[9],xmm2[10],xmm6[10],xmm2[11],xmm6[11],xmm2[12],xmm6[12],xmm2[13],xmm6[13],xmm2[14],xmm6[14],xmm2[15],xmm6[15]
6107 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6108 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0],xmm6[0],xmm1[1],xmm6[1],xmm1[2],xmm6[2],xmm1[3],xmm6[3],xmm1[4],xmm6[4],xmm1[5],xmm6[5],xmm1[6],xmm6[6],xmm1[7],xmm6[7]
6109 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6110 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
6111 ; SSE-NEXT: packuswb %xmm1, %xmm1
6112 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6113 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,2,2]
6114 ; SSE-NEXT: movdqa %xmm12, %xmm5
6115 ; SSE-NEXT: pandn %xmm1, %xmm5
6116 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
6117 ; SSE-NEXT: movdqa %xmm11, %xmm1
6118 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm6[8],xmm1[9],xmm6[9],xmm1[10],xmm6[10],xmm1[11],xmm6[11],xmm1[12],xmm6[12],xmm1[13],xmm6[13],xmm1[14],xmm6[14],xmm1[15],xmm6[15]
6119 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6120 ; SSE-NEXT: punpcklbw {{.*#+}} xmm11 = xmm11[0],xmm6[0],xmm11[1],xmm6[1],xmm11[2],xmm6[2],xmm11[3],xmm6[3],xmm11[4],xmm6[4],xmm11[5],xmm6[5],xmm11[6],xmm6[6],xmm11[7],xmm6[7]
6121 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6122 ; SSE-NEXT: punpcklwd {{.*#+}} xmm11 = xmm11[0],xmm1[0],xmm11[1],xmm1[1],xmm11[2],xmm1[2],xmm11[3],xmm1[3]
6123 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm11[0,1,1,3]
6124 ; SSE-NEXT: packuswb %xmm1, %xmm1
6125 ; SSE-NEXT: pand %xmm12, %xmm1
6126 ; SSE-NEXT: por %xmm5, %xmm1
6127 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
6128 ; SSE-NEXT: movdqa %xmm5, %xmm2
6129 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm6[8],xmm2[9],xmm6[9],xmm2[10],xmm6[10],xmm2[11],xmm6[11],xmm2[12],xmm6[12],xmm2[13],xmm6[13],xmm2[14],xmm6[14],xmm2[15],xmm6[15]
6130 ; SSE-NEXT: movdqa %xmm2, %xmm15
6131 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6132 ; SSE-NEXT: punpcklbw {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3],xmm5[4],xmm6[4],xmm5[5],xmm6[5],xmm5[6],xmm6[6],xmm5[7],xmm6[7]
6133 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6134 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
6135 ; SSE-NEXT: movdqa %xmm3, %xmm2
6136 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm6[8],xmm2[9],xmm6[9],xmm2[10],xmm6[10],xmm2[11],xmm6[11],xmm2[12],xmm6[12],xmm2[13],xmm6[13],xmm2[14],xmm6[14],xmm2[15],xmm6[15]
6137 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6138 ; SSE-NEXT: movdqa %xmm3, %xmm7
6139 ; SSE-NEXT: punpcklbw {{.*#+}} xmm7 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3],xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
6140 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6141 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm15[0],xmm5[1],xmm15[1],xmm5[2],xmm15[2],xmm5[3],xmm15[3]
6142 ; SSE-NEXT: packuswb %xmm5, %xmm6
6143 ; SSE-NEXT: movdqa %xmm9, %xmm5
6144 ; SSE-NEXT: pandn %xmm6, %xmm5
6145 ; SSE-NEXT: punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm2[0],xmm7[1],xmm2[1],xmm7[2],xmm2[2],xmm7[3],xmm2[3]
6146 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6147 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm7[1,1,1,1]
6148 ; SSE-NEXT: packuswb %xmm15, %xmm15
6149 ; SSE-NEXT: pand %xmm9, %xmm15
6150 ; SSE-NEXT: por %xmm5, %xmm15
6151 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm15[2,2,2,2]
6152 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm1[2],xmm5[3],xmm1[3]
6153 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm0[0],xmm5[1]
6154 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6155 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,255,255,255,255,255,255]
6156 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6157 ; SSE-NEXT: pand %xmm2, %xmm0
6158 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6159 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
6160 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,1,1,3,4,5,6,7]
6161 ; SSE-NEXT: packuswb %xmm0, %xmm0
6162 ; SSE-NEXT: movdqa %xmm10, %xmm5
6163 ; SSE-NEXT: pandn %xmm0, %xmm5
6164 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6165 ; SSE-NEXT: pand %xmm2, %xmm0
6166 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6167 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
6168 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[1,3,2,3,4,5,6,7]
6169 ; SSE-NEXT: packuswb %xmm0, %xmm0
6170 ; SSE-NEXT: pand %xmm10, %xmm0
6171 ; SSE-NEXT: por %xmm5, %xmm0
6172 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6173 ; SSE-NEXT: pand %xmm2, %xmm1
6174 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6175 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,1,2,0]
6176 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,7,5]
6177 ; SSE-NEXT: packuswb %xmm5, %xmm5
6178 ; SSE-NEXT: movdqa %xmm13, %xmm15
6179 ; SSE-NEXT: pandn %xmm5, %xmm15
6180 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6181 ; SSE-NEXT: pand %xmm2, %xmm1
6182 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6183 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,1,2,0]
6184 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,7,5,6,7]
6185 ; SSE-NEXT: packuswb %xmm5, %xmm5
6186 ; SSE-NEXT: pand %xmm13, %xmm5
6187 ; SSE-NEXT: por %xmm15, %xmm5
6188 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,1,1,1]
6189 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm5[0],xmm0[1],xmm5[1]
6190 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6191 ; SSE-NEXT: pand %xmm2, %xmm1
6192 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6193 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,1,2,0]
6194 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,7,5]
6195 ; SSE-NEXT: packuswb %xmm5, %xmm5
6196 ; SSE-NEXT: movdqa %xmm12, %xmm15
6197 ; SSE-NEXT: pandn %xmm5, %xmm15
6198 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6199 ; SSE-NEXT: pand %xmm2, %xmm1
6200 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6201 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,1,2,0]
6202 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,7,5,6,7]
6203 ; SSE-NEXT: packuswb %xmm5, %xmm5
6204 ; SSE-NEXT: pand %xmm12, %xmm5
6205 ; SSE-NEXT: por %xmm15, %xmm5
6206 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6207 ; SSE-NEXT: pand %xmm2, %xmm1
6208 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6209 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm1[0,2,2,3]
6210 ; SSE-NEXT: pshuflw {{.*#+}} xmm15 = xmm15[0,1,1,3,4,5,6,7]
6211 ; SSE-NEXT: packuswb %xmm15, %xmm15
6212 ; SSE-NEXT: movdqa %xmm9, %xmm7
6213 ; SSE-NEXT: pandn %xmm15, %xmm7
6214 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6215 ; SSE-NEXT: pand %xmm2, %xmm1
6216 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6217 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm1[0,2,2,3]
6218 ; SSE-NEXT: pshuflw {{.*#+}} xmm15 = xmm15[1,3,2,3,4,5,6,7]
6219 ; SSE-NEXT: packuswb %xmm15, %xmm15
6220 ; SSE-NEXT: pand %xmm9, %xmm15
6221 ; SSE-NEXT: por %xmm7, %xmm15
6222 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm15[2,2,2,2]
6223 ; SSE-NEXT: punpckhdq {{.*#+}} xmm7 = xmm7[2],xmm5[2],xmm7[3],xmm5[3]
6224 ; SSE-NEXT: movsd {{.*#+}} xmm7 = xmm0[0],xmm7[1]
6225 ; SSE-NEXT: movapd %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6226 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6227 ; SSE-NEXT: pand %xmm2, %xmm0
6228 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6229 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
6230 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,1,1,3,4,5,6,7]
6231 ; SSE-NEXT: packuswb %xmm0, %xmm0
6232 ; SSE-NEXT: movdqa %xmm10, %xmm5
6233 ; SSE-NEXT: pandn %xmm0, %xmm5
6234 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6235 ; SSE-NEXT: pand %xmm2, %xmm0
6236 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6237 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
6238 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm0[1,3,2,3,4,5,6,7]
6239 ; SSE-NEXT: packuswb %xmm1, %xmm1
6240 ; SSE-NEXT: pand %xmm10, %xmm1
6241 ; SSE-NEXT: por %xmm5, %xmm1
6242 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6243 ; SSE-NEXT: pand %xmm2, %xmm0
6244 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6245 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm0[0,1,2,0]
6246 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,7,5]
6247 ; SSE-NEXT: packuswb %xmm5, %xmm5
6248 ; SSE-NEXT: movdqa %xmm13, %xmm7
6249 ; SSE-NEXT: pandn %xmm5, %xmm7
6250 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6251 ; SSE-NEXT: pand %xmm2, %xmm0
6252 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6253 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm0[0,1,2,0]
6254 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,7,5,6,7]
6255 ; SSE-NEXT: packuswb %xmm5, %xmm5
6256 ; SSE-NEXT: pand %xmm13, %xmm5
6257 ; SSE-NEXT: por %xmm7, %xmm5
6258 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,1,1,1]
6259 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1]
6260 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6261 ; SSE-NEXT: pand %xmm2, %xmm0
6262 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6263 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm0[0,1,2,0]
6264 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,7,5]
6265 ; SSE-NEXT: packuswb %xmm5, %xmm5
6266 ; SSE-NEXT: movdqa %xmm12, %xmm7
6267 ; SSE-NEXT: pandn %xmm5, %xmm7
6268 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6269 ; SSE-NEXT: pand %xmm2, %xmm0
6270 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6271 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm0[0,1,2,0]
6272 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,7,5,6,7]
6273 ; SSE-NEXT: packuswb %xmm5, %xmm5
6274 ; SSE-NEXT: pand %xmm12, %xmm5
6275 ; SSE-NEXT: por %xmm7, %xmm5
6276 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6277 ; SSE-NEXT: pand %xmm2, %xmm0
6278 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6279 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm0[0,2,2,3]
6280 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm7[0,1,1,3,4,5,6,7]
6281 ; SSE-NEXT: packuswb %xmm7, %xmm7
6282 ; SSE-NEXT: movdqa %xmm9, %xmm15
6283 ; SSE-NEXT: pandn %xmm7, %xmm15
6284 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6285 ; SSE-NEXT: pand %xmm2, %xmm0
6286 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6287 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm0[0,2,2,3]
6288 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm7[1,3,2,3,4,5,6,7]
6289 ; SSE-NEXT: packuswb %xmm7, %xmm7
6290 ; SSE-NEXT: pand %xmm9, %xmm7
6291 ; SSE-NEXT: por %xmm15, %xmm7
6292 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[2,2,2,2]
6293 ; SSE-NEXT: punpckhdq {{.*#+}} xmm7 = xmm7[2],xmm5[2],xmm7[3],xmm5[3]
6294 ; SSE-NEXT: movsd {{.*#+}} xmm7 = xmm1[0],xmm7[1]
6295 ; SSE-NEXT: movapd %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6296 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6297 ; SSE-NEXT: pand %xmm2, %xmm0
6298 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6299 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
6300 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,1,1,3,4,5,6,7]
6301 ; SSE-NEXT: packuswb %xmm0, %xmm0
6302 ; SSE-NEXT: movdqa %xmm10, %xmm5
6303 ; SSE-NEXT: pandn %xmm0, %xmm5
6304 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6305 ; SSE-NEXT: pand %xmm2, %xmm0
6306 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6307 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
6308 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[1,3,2,3,4,5,6,7]
6309 ; SSE-NEXT: packuswb %xmm0, %xmm0
6310 ; SSE-NEXT: pand %xmm10, %xmm0
6311 ; SSE-NEXT: por %xmm5, %xmm0
6312 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6313 ; SSE-NEXT: pand %xmm2, %xmm1
6314 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6315 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,1,2,0]
6316 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,7,5]
6317 ; SSE-NEXT: packuswb %xmm5, %xmm5
6318 ; SSE-NEXT: movdqa %xmm13, %xmm7
6319 ; SSE-NEXT: pandn %xmm5, %xmm7
6320 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6321 ; SSE-NEXT: pand %xmm2, %xmm1
6322 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6323 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,1,2,0]
6324 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,7,5,6,7]
6325 ; SSE-NEXT: packuswb %xmm5, %xmm5
6326 ; SSE-NEXT: pand %xmm13, %xmm5
6327 ; SSE-NEXT: por %xmm7, %xmm5
6328 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,1,1,1]
6329 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm5[0],xmm0[1],xmm5[1]
6330 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6331 ; SSE-NEXT: pand %xmm2, %xmm1
6332 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6333 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,1,2,0]
6334 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,7,5]
6335 ; SSE-NEXT: packuswb %xmm5, %xmm5
6336 ; SSE-NEXT: movdqa %xmm12, %xmm7
6337 ; SSE-NEXT: pandn %xmm5, %xmm7
6338 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6339 ; SSE-NEXT: pand %xmm2, %xmm1
6340 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6341 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,1,2,0]
6342 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,7,5,6,7]
6343 ; SSE-NEXT: packuswb %xmm5, %xmm5
6344 ; SSE-NEXT: pand %xmm12, %xmm5
6345 ; SSE-NEXT: por %xmm7, %xmm5
6346 ; SSE-NEXT: movdqa (%rsp), %xmm1 # 16-byte Reload
6347 ; SSE-NEXT: pand %xmm2, %xmm1
6348 ; SSE-NEXT: movdqa %xmm1, (%rsp) # 16-byte Spill
6349 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm1[0,2,2,3]
6350 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm7[0,1,1,3,4,5,6,7]
6351 ; SSE-NEXT: packuswb %xmm7, %xmm7
6352 ; SSE-NEXT: movdqa %xmm9, %xmm15
6353 ; SSE-NEXT: pandn %xmm7, %xmm15
6354 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6355 ; SSE-NEXT: pand %xmm2, %xmm1
6356 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6357 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm1[0,2,2,3]
6358 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm7[1,3,2,3,4,5,6,7]
6359 ; SSE-NEXT: packuswb %xmm7, %xmm7
6360 ; SSE-NEXT: pand %xmm9, %xmm7
6361 ; SSE-NEXT: por %xmm15, %xmm7
6362 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm7[2,2,2,2]
6363 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm5[2],xmm1[3],xmm5[3]
6364 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
6365 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6366 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6367 ; SSE-NEXT: pand %xmm2, %xmm0
6368 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6369 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
6370 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,1,1,3,4,5,6,7]
6371 ; SSE-NEXT: packuswb %xmm0, %xmm0
6372 ; SSE-NEXT: movdqa %xmm10, %xmm5
6373 ; SSE-NEXT: pandn %xmm0, %xmm5
6374 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6375 ; SSE-NEXT: pand %xmm2, %xmm0
6376 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6377 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
6378 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[1,3,2,3,4,5,6,7]
6379 ; SSE-NEXT: packuswb %xmm0, %xmm0
6380 ; SSE-NEXT: pand %xmm10, %xmm0
6381 ; SSE-NEXT: por %xmm5, %xmm0
6382 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6383 ; SSE-NEXT: pand %xmm2, %xmm1
6384 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6385 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,1,2,0]
6386 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,7,5]
6387 ; SSE-NEXT: packuswb %xmm5, %xmm5
6388 ; SSE-NEXT: movdqa %xmm13, %xmm7
6389 ; SSE-NEXT: pandn %xmm5, %xmm7
6390 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6391 ; SSE-NEXT: pand %xmm2, %xmm1
6392 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6393 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,1,2,0]
6394 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,7,5,6,7]
6395 ; SSE-NEXT: packuswb %xmm5, %xmm5
6396 ; SSE-NEXT: pand %xmm13, %xmm5
6397 ; SSE-NEXT: por %xmm7, %xmm5
6398 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,1,1,1]
6399 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm5[0],xmm0[1],xmm5[1]
6400 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6401 ; SSE-NEXT: pand %xmm2, %xmm1
6402 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6403 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,1,2,0]
6404 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,7,5]
6405 ; SSE-NEXT: packuswb %xmm5, %xmm5
6406 ; SSE-NEXT: movdqa %xmm12, %xmm7
6407 ; SSE-NEXT: pandn %xmm5, %xmm7
6408 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6409 ; SSE-NEXT: pand %xmm2, %xmm1
6410 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6411 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,1,2,0]
6412 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,7,5,6,7]
6413 ; SSE-NEXT: packuswb %xmm5, %xmm5
6414 ; SSE-NEXT: pand %xmm12, %xmm5
6415 ; SSE-NEXT: por %xmm7, %xmm5
6416 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6417 ; SSE-NEXT: pand %xmm2, %xmm1
6418 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6419 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm1[0,2,2,3]
6420 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm7[0,1,1,3,4,5,6,7]
6421 ; SSE-NEXT: packuswb %xmm7, %xmm7
6422 ; SSE-NEXT: movdqa %xmm9, %xmm15
6423 ; SSE-NEXT: pandn %xmm7, %xmm15
6424 ; SSE-NEXT: movdqa %xmm3, %xmm1
6425 ; SSE-NEXT: pand %xmm2, %xmm1
6426 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6427 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
6428 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[1,3,2,3,4,5,6,7]
6429 ; SSE-NEXT: packuswb %xmm1, %xmm1
6430 ; SSE-NEXT: pand %xmm9, %xmm1
6431 ; SSE-NEXT: por %xmm15, %xmm1
6432 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
6433 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm5[2],xmm1[3],xmm5[3]
6434 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
6435 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6436 ; SSE-NEXT: pshufd $229, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6437 ; SSE-NEXT: # xmm0 = mem[1,1,2,3]
6438 ; SSE-NEXT: movdqa %xmm10, %xmm1
6439 ; SSE-NEXT: pandn %xmm0, %xmm1
6440 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6441 ; SSE-NEXT: # xmm0 = mem[3,3,3,3]
6442 ; SSE-NEXT: packuswb %xmm0, %xmm0
6443 ; SSE-NEXT: pand %xmm10, %xmm0
6444 ; SSE-NEXT: por %xmm1, %xmm0
6445 ; SSE-NEXT: movdqa %xmm13, %xmm1
6446 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6447 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
6448 ; SSE-NEXT: # xmm5 = mem[2,2,3,3]
6449 ; SSE-NEXT: packuswb %xmm5, %xmm5
6450 ; SSE-NEXT: pand %xmm13, %xmm5
6451 ; SSE-NEXT: por %xmm1, %xmm5
6452 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[1,1,1,1]
6453 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
6454 ; SSE-NEXT: movdqa %xmm12, %xmm1
6455 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6456 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
6457 ; SSE-NEXT: # xmm5 = mem[2,2,3,3]
6458 ; SSE-NEXT: packuswb %xmm5, %xmm5
6459 ; SSE-NEXT: pand %xmm12, %xmm5
6460 ; SSE-NEXT: por %xmm1, %xmm5
6461 ; SSE-NEXT: pshufd $244, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6462 ; SSE-NEXT: # xmm1 = mem[0,1,3,3]
6463 ; SSE-NEXT: movdqa %xmm9, %xmm7
6464 ; SSE-NEXT: pandn %xmm1, %xmm7
6465 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6466 ; SSE-NEXT: # xmm1 = mem[3,3,3,3]
6467 ; SSE-NEXT: packuswb %xmm1, %xmm1
6468 ; SSE-NEXT: pand %xmm9, %xmm1
6469 ; SSE-NEXT: por %xmm7, %xmm1
6470 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
6471 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm5[2],xmm1[3],xmm5[3]
6472 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
6473 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6474 ; SSE-NEXT: pshufd $229, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6475 ; SSE-NEXT: # xmm0 = mem[1,1,2,3]
6476 ; SSE-NEXT: movdqa %xmm10, %xmm1
6477 ; SSE-NEXT: pandn %xmm0, %xmm1
6478 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6479 ; SSE-NEXT: # xmm0 = mem[3,3,3,3]
6480 ; SSE-NEXT: packuswb %xmm0, %xmm0
6481 ; SSE-NEXT: pand %xmm10, %xmm0
6482 ; SSE-NEXT: por %xmm1, %xmm0
6483 ; SSE-NEXT: movdqa %xmm13, %xmm1
6484 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6485 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
6486 ; SSE-NEXT: # xmm5 = mem[2,2,3,3]
6487 ; SSE-NEXT: packuswb %xmm5, %xmm5
6488 ; SSE-NEXT: pand %xmm13, %xmm5
6489 ; SSE-NEXT: por %xmm1, %xmm5
6490 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[1,1,1,1]
6491 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
6492 ; SSE-NEXT: movdqa %xmm12, %xmm1
6493 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6494 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
6495 ; SSE-NEXT: # xmm5 = mem[2,2,3,3]
6496 ; SSE-NEXT: packuswb %xmm5, %xmm5
6497 ; SSE-NEXT: pand %xmm12, %xmm5
6498 ; SSE-NEXT: por %xmm1, %xmm5
6499 ; SSE-NEXT: pshufd $244, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6500 ; SSE-NEXT: # xmm1 = mem[0,1,3,3]
6501 ; SSE-NEXT: movdqa %xmm9, %xmm7
6502 ; SSE-NEXT: pandn %xmm1, %xmm7
6503 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6504 ; SSE-NEXT: # xmm1 = mem[3,3,3,3]
6505 ; SSE-NEXT: packuswb %xmm1, %xmm1
6506 ; SSE-NEXT: pand %xmm9, %xmm1
6507 ; SSE-NEXT: por %xmm7, %xmm1
6508 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
6509 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm5[2],xmm1[3],xmm5[3]
6510 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
6511 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6512 ; SSE-NEXT: pshufd $229, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6513 ; SSE-NEXT: # xmm0 = mem[1,1,2,3]
6514 ; SSE-NEXT: movdqa %xmm10, %xmm1
6515 ; SSE-NEXT: pandn %xmm0, %xmm1
6516 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6517 ; SSE-NEXT: # xmm0 = mem[3,3,3,3]
6518 ; SSE-NEXT: packuswb %xmm0, %xmm0
6519 ; SSE-NEXT: pand %xmm10, %xmm0
6520 ; SSE-NEXT: por %xmm1, %xmm0
6521 ; SSE-NEXT: movdqa %xmm13, %xmm1
6522 ; SSE-NEXT: pandn %xmm14, %xmm1
6523 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
6524 ; SSE-NEXT: # xmm5 = mem[2,2,3,3]
6525 ; SSE-NEXT: packuswb %xmm5, %xmm5
6526 ; SSE-NEXT: pand %xmm13, %xmm5
6527 ; SSE-NEXT: por %xmm1, %xmm5
6528 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[1,1,1,1]
6529 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
6530 ; SSE-NEXT: movdqa %xmm12, %xmm1
6531 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6532 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
6533 ; SSE-NEXT: # xmm5 = mem[2,2,3,3]
6534 ; SSE-NEXT: packuswb %xmm5, %xmm5
6535 ; SSE-NEXT: pand %xmm12, %xmm5
6536 ; SSE-NEXT: por %xmm1, %xmm5
6537 ; SSE-NEXT: pshufd $244, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6538 ; SSE-NEXT: # xmm1 = mem[0,1,3,3]
6539 ; SSE-NEXT: movdqa %xmm9, %xmm7
6540 ; SSE-NEXT: pandn %xmm1, %xmm7
6541 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6542 ; SSE-NEXT: # xmm1 = mem[3,3,3,3]
6543 ; SSE-NEXT: packuswb %xmm1, %xmm1
6544 ; SSE-NEXT: pand %xmm9, %xmm1
6545 ; SSE-NEXT: por %xmm7, %xmm1
6546 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
6547 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm5[2],xmm1[3],xmm5[3]
6548 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
6549 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6550 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm8[1,1,2,3]
6551 ; SSE-NEXT: movdqa %xmm10, %xmm1
6552 ; SSE-NEXT: pandn %xmm0, %xmm1
6553 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6554 ; SSE-NEXT: # xmm0 = mem[3,3,3,3]
6555 ; SSE-NEXT: packuswb %xmm0, %xmm0
6556 ; SSE-NEXT: pand %xmm10, %xmm0
6557 ; SSE-NEXT: por %xmm1, %xmm0
6558 ; SSE-NEXT: movdqa %xmm13, %xmm1
6559 ; SSE-NEXT: pandn %xmm4, %xmm1
6560 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6561 ; SSE-NEXT: # xmm2 = mem[2,2,3,3]
6562 ; SSE-NEXT: packuswb %xmm2, %xmm2
6563 ; SSE-NEXT: pand %xmm13, %xmm2
6564 ; SSE-NEXT: por %xmm1, %xmm2
6565 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,1,1,1]
6566 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
6567 ; SSE-NEXT: movdqa %xmm12, %xmm1
6568 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6569 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm11[2,2,3,3]
6570 ; SSE-NEXT: packuswb %xmm2, %xmm2
6571 ; SSE-NEXT: pand %xmm12, %xmm2
6572 ; SSE-NEXT: por %xmm1, %xmm2
6573 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm6[0,1,3,3]
6574 ; SSE-NEXT: movdqa %xmm9, %xmm3
6575 ; SSE-NEXT: pandn %xmm1, %xmm3
6576 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6577 ; SSE-NEXT: # xmm1 = mem[3,3,3,3]
6578 ; SSE-NEXT: packuswb %xmm1, %xmm1
6579 ; SSE-NEXT: pand %xmm9, %xmm1
6580 ; SSE-NEXT: por %xmm3, %xmm1
6581 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
6582 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm2[2],xmm1[3],xmm2[3]
6583 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
6584 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6585 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Folded Reload
6586 ; SSE-NEXT: # xmm7 = mem[3,1,2,3]
6587 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm7[0,1,2,0,4,5,6,7]
6588 ; SSE-NEXT: packuswb %xmm0, %xmm0
6589 ; SSE-NEXT: movdqa %xmm10, %xmm1
6590 ; SSE-NEXT: pandn %xmm0, %xmm1
6591 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6592 ; SSE-NEXT: # xmm0 = mem[3,1,2,3]
6593 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6594 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[2,0,2,3,4,5,6,7]
6595 ; SSE-NEXT: packuswb %xmm0, %xmm0
6596 ; SSE-NEXT: pand %xmm10, %xmm0
6597 ; SSE-NEXT: por %xmm1, %xmm0
6598 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6599 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
6600 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6601 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,4,6]
6602 ; SSE-NEXT: packuswb %xmm1, %xmm1
6603 ; SSE-NEXT: movdqa %xmm13, %xmm2
6604 ; SSE-NEXT: pandn %xmm1, %xmm2
6605 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6606 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
6607 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6608 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
6609 ; SSE-NEXT: packuswb %xmm1, %xmm1
6610 ; SSE-NEXT: pand %xmm13, %xmm1
6611 ; SSE-NEXT: por %xmm2, %xmm1
6612 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
6613 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
6614 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6615 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
6616 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6617 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,4,6]
6618 ; SSE-NEXT: packuswb %xmm1, %xmm1
6619 ; SSE-NEXT: movdqa %xmm12, %xmm2
6620 ; SSE-NEXT: pandn %xmm1, %xmm2
6621 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6622 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
6623 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6624 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
6625 ; SSE-NEXT: packuswb %xmm1, %xmm1
6626 ; SSE-NEXT: pand %xmm12, %xmm1
6627 ; SSE-NEXT: por %xmm2, %xmm1
6628 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6629 ; SSE-NEXT: # xmm2 = mem[3,1,2,3]
6630 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6631 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[0,1,2,0,4,5,6,7]
6632 ; SSE-NEXT: packuswb %xmm2, %xmm2
6633 ; SSE-NEXT: movdqa %xmm9, %xmm3
6634 ; SSE-NEXT: pandn %xmm2, %xmm3
6635 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6636 ; SSE-NEXT: # xmm2 = mem[3,1,2,3]
6637 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6638 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[2,0,2,3,4,5,6,7]
6639 ; SSE-NEXT: packuswb %xmm2, %xmm2
6640 ; SSE-NEXT: pand %xmm9, %xmm2
6641 ; SSE-NEXT: por %xmm3, %xmm2
6642 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,2,2,2]
6643 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
6644 ; SSE-NEXT: movsd {{.*#+}} xmm2 = xmm0[0],xmm2[1]
6645 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6646 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Folded Reload
6647 ; SSE-NEXT: # xmm14 = mem[3,1,2,3]
6648 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm14[0,1,2,0,4,5,6,7]
6649 ; SSE-NEXT: packuswb %xmm0, %xmm0
6650 ; SSE-NEXT: movdqa %xmm10, %xmm1
6651 ; SSE-NEXT: pandn %xmm0, %xmm1
6652 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6653 ; SSE-NEXT: # xmm0 = mem[3,1,2,3]
6654 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6655 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[2,0,2,3,4,5,6,7]
6656 ; SSE-NEXT: packuswb %xmm0, %xmm0
6657 ; SSE-NEXT: pand %xmm10, %xmm0
6658 ; SSE-NEXT: por %xmm1, %xmm0
6659 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6660 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
6661 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6662 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,4,6]
6663 ; SSE-NEXT: packuswb %xmm1, %xmm1
6664 ; SSE-NEXT: movdqa %xmm13, %xmm2
6665 ; SSE-NEXT: pandn %xmm1, %xmm2
6666 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6667 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
6668 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6669 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
6670 ; SSE-NEXT: packuswb %xmm1, %xmm1
6671 ; SSE-NEXT: pand %xmm13, %xmm1
6672 ; SSE-NEXT: por %xmm2, %xmm1
6673 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
6674 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
6675 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6676 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
6677 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6678 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,4,6]
6679 ; SSE-NEXT: packuswb %xmm1, %xmm1
6680 ; SSE-NEXT: movdqa %xmm12, %xmm2
6681 ; SSE-NEXT: pandn %xmm1, %xmm2
6682 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6683 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
6684 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6685 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
6686 ; SSE-NEXT: packuswb %xmm1, %xmm1
6687 ; SSE-NEXT: pand %xmm12, %xmm1
6688 ; SSE-NEXT: por %xmm2, %xmm1
6689 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6690 ; SSE-NEXT: # xmm2 = mem[3,1,2,3]
6691 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6692 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[0,1,2,0,4,5,6,7]
6693 ; SSE-NEXT: packuswb %xmm2, %xmm2
6694 ; SSE-NEXT: movdqa %xmm9, %xmm3
6695 ; SSE-NEXT: pandn %xmm2, %xmm3
6696 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Folded Reload
6697 ; SSE-NEXT: # xmm11 = mem[3,1,2,3]
6698 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm11[2,0,2,3,4,5,6,7]
6699 ; SSE-NEXT: packuswb %xmm2, %xmm2
6700 ; SSE-NEXT: pand %xmm9, %xmm2
6701 ; SSE-NEXT: por %xmm3, %xmm2
6702 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,2,2,2]
6703 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
6704 ; SSE-NEXT: movsd {{.*#+}} xmm2 = xmm0[0],xmm2[1]
6705 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6706 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6707 ; SSE-NEXT: # xmm0 = mem[3,1,2,3]
6708 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6709 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,1,2,0,4,5,6,7]
6710 ; SSE-NEXT: packuswb %xmm0, %xmm0
6711 ; SSE-NEXT: movdqa %xmm10, %xmm1
6712 ; SSE-NEXT: pandn %xmm0, %xmm1
6713 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6714 ; SSE-NEXT: # xmm0 = mem[3,1,2,3]
6715 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6716 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[2,0,2,3,4,5,6,7]
6717 ; SSE-NEXT: packuswb %xmm0, %xmm0
6718 ; SSE-NEXT: pand %xmm10, %xmm0
6719 ; SSE-NEXT: por %xmm1, %xmm0
6720 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6721 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
6722 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6723 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,4,6]
6724 ; SSE-NEXT: packuswb %xmm1, %xmm1
6725 ; SSE-NEXT: movdqa %xmm13, %xmm2
6726 ; SSE-NEXT: pandn %xmm1, %xmm2
6727 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6728 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
6729 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6730 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
6731 ; SSE-NEXT: packuswb %xmm1, %xmm1
6732 ; SSE-NEXT: pand %xmm13, %xmm1
6733 ; SSE-NEXT: por %xmm2, %xmm1
6734 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
6735 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
6736 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6737 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
6738 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6739 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,4,6]
6740 ; SSE-NEXT: packuswb %xmm1, %xmm1
6741 ; SSE-NEXT: movdqa %xmm12, %xmm2
6742 ; SSE-NEXT: pandn %xmm1, %xmm2
6743 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6744 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
6745 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6746 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
6747 ; SSE-NEXT: packuswb %xmm1, %xmm1
6748 ; SSE-NEXT: pand %xmm12, %xmm1
6749 ; SSE-NEXT: por %xmm2, %xmm1
6750 ; SSE-NEXT: pshufd $231, (%rsp), %xmm2 # 16-byte Folded Reload
6751 ; SSE-NEXT: # xmm2 = mem[3,1,2,3]
6752 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6753 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[0,1,2,0,4,5,6,7]
6754 ; SSE-NEXT: packuswb %xmm2, %xmm2
6755 ; SSE-NEXT: movdqa %xmm9, %xmm3
6756 ; SSE-NEXT: pandn %xmm2, %xmm3
6757 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6758 ; SSE-NEXT: # xmm2 = mem[3,1,2,3]
6759 ; SSE-NEXT: movdqa %xmm2, (%rsp) # 16-byte Spill
6760 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[2,0,2,3,4,5,6,7]
6761 ; SSE-NEXT: packuswb %xmm2, %xmm2
6762 ; SSE-NEXT: pand %xmm9, %xmm2
6763 ; SSE-NEXT: por %xmm3, %xmm2
6764 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,2,2,2]
6765 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
6766 ; SSE-NEXT: movsd {{.*#+}} xmm2 = xmm0[0],xmm2[1]
6767 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6768 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6769 ; SSE-NEXT: # xmm0 = mem[3,1,2,3]
6770 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6771 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,1,2,0,4,5,6,7]
6772 ; SSE-NEXT: packuswb %xmm0, %xmm0
6773 ; SSE-NEXT: movdqa %xmm10, %xmm1
6774 ; SSE-NEXT: pandn %xmm0, %xmm1
6775 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6776 ; SSE-NEXT: # xmm0 = mem[3,1,2,3]
6777 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6778 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[2,0,2,3,4,5,6,7]
6779 ; SSE-NEXT: packuswb %xmm0, %xmm0
6780 ; SSE-NEXT: pand %xmm10, %xmm0
6781 ; SSE-NEXT: por %xmm1, %xmm0
6782 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6783 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
6784 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6785 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,4,6]
6786 ; SSE-NEXT: packuswb %xmm1, %xmm1
6787 ; SSE-NEXT: movdqa %xmm13, %xmm2
6788 ; SSE-NEXT: pandn %xmm1, %xmm2
6789 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6790 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
6791 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6792 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
6793 ; SSE-NEXT: packuswb %xmm1, %xmm1
6794 ; SSE-NEXT: pand %xmm13, %xmm1
6795 ; SSE-NEXT: por %xmm2, %xmm1
6796 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
6797 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
6798 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6799 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
6800 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6801 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,4,6]
6802 ; SSE-NEXT: packuswb %xmm1, %xmm1
6803 ; SSE-NEXT: movdqa %xmm12, %xmm2
6804 ; SSE-NEXT: pandn %xmm1, %xmm2
6805 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6806 ; SSE-NEXT: # xmm1 = mem[0,1,1,3]
6807 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6808 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
6809 ; SSE-NEXT: packuswb %xmm1, %xmm1
6810 ; SSE-NEXT: pand %xmm12, %xmm1
6811 ; SSE-NEXT: por %xmm2, %xmm1
6812 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6813 ; SSE-NEXT: # xmm2 = mem[3,1,2,3]
6814 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6815 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[0,1,2,0,4,5,6,7]
6816 ; SSE-NEXT: packuswb %xmm2, %xmm2
6817 ; SSE-NEXT: movdqa %xmm9, %xmm3
6818 ; SSE-NEXT: pandn %xmm2, %xmm3
6819 ; SSE-NEXT: pshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6820 ; SSE-NEXT: # xmm2 = mem[3,1,2,3]
6821 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6822 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[2,0,2,3,4,5,6,7]
6823 ; SSE-NEXT: packuswb %xmm2, %xmm2
6824 ; SSE-NEXT: pand %xmm9, %xmm2
6825 ; SSE-NEXT: por %xmm3, %xmm2
6826 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,2,2,2]
6827 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
6828 ; SSE-NEXT: movsd {{.*#+}} xmm2 = xmm0[0],xmm2[1]
6829 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6830 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6831 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6832 ; SSE-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
6833 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6834 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6835 ; SSE-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
6836 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6837 ; SSE-NEXT: packuswb %xmm2, %xmm2
6838 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6839 ; SSE-NEXT: movdqa %xmm10, %xmm1
6840 ; SSE-NEXT: pandn %xmm2, %xmm1
6841 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
6842 ; SSE-NEXT: packuswb %xmm0, %xmm0
6843 ; SSE-NEXT: pand %xmm10, %xmm0
6844 ; SSE-NEXT: por %xmm1, %xmm0
6845 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6846 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6847 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
6848 ; SSE-NEXT: packuswb %xmm1, %xmm1
6849 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6850 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,2,3]
6851 ; SSE-NEXT: movdqa %xmm13, %xmm2
6852 ; SSE-NEXT: pandn %xmm1, %xmm2
6853 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6854 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6855 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
6856 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6857 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,1,3]
6858 ; SSE-NEXT: packuswb %xmm1, %xmm1
6859 ; SSE-NEXT: pand %xmm13, %xmm1
6860 ; SSE-NEXT: por %xmm2, %xmm1
6861 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
6862 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
6863 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6864 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6865 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
6866 ; SSE-NEXT: packuswb %xmm1, %xmm1
6867 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6868 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,2,2]
6869 ; SSE-NEXT: movdqa %xmm12, %xmm2
6870 ; SSE-NEXT: pandn %xmm1, %xmm2
6871 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6872 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6873 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
6874 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6875 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,1,3]
6876 ; SSE-NEXT: packuswb %xmm1, %xmm1
6877 ; SSE-NEXT: pand %xmm12, %xmm1
6878 ; SSE-NEXT: por %xmm2, %xmm1
6879 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6880 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6881 ; SSE-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
6882 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
6883 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
6884 ; SSE-NEXT: # xmm4 = xmm4[4],mem[4],xmm4[5],mem[5],xmm4[6],mem[6],xmm4[7],mem[7]
6885 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6886 ; SSE-NEXT: packuswb %xmm2, %xmm3
6887 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6888 ; SSE-NEXT: movdqa %xmm9, %xmm2
6889 ; SSE-NEXT: pandn %xmm3, %xmm2
6890 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm4[1,1,1,1]
6891 ; SSE-NEXT: packuswb %xmm3, %xmm3
6892 ; SSE-NEXT: pand %xmm9, %xmm3
6893 ; SSE-NEXT: por %xmm2, %xmm3
6894 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[2,2,2,2]
6895 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
6896 ; SSE-NEXT: movsd {{.*#+}} xmm2 = xmm0[0],xmm2[1]
6897 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6898 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6899 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6900 ; SSE-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
6901 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6902 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6903 ; SSE-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
6904 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6905 ; SSE-NEXT: packuswb %xmm2, %xmm2
6906 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6907 ; SSE-NEXT: movdqa %xmm10, %xmm1
6908 ; SSE-NEXT: pandn %xmm2, %xmm1
6909 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
6910 ; SSE-NEXT: packuswb %xmm0, %xmm0
6911 ; SSE-NEXT: pand %xmm10, %xmm0
6912 ; SSE-NEXT: por %xmm1, %xmm0
6913 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6914 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6915 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
6916 ; SSE-NEXT: packuswb %xmm1, %xmm1
6917 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6918 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,2,3]
6919 ; SSE-NEXT: movdqa %xmm13, %xmm2
6920 ; SSE-NEXT: pandn %xmm1, %xmm2
6921 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6922 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6923 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
6924 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6925 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,1,3]
6926 ; SSE-NEXT: packuswb %xmm1, %xmm1
6927 ; SSE-NEXT: pand %xmm13, %xmm1
6928 ; SSE-NEXT: por %xmm2, %xmm1
6929 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
6930 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
6931 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6932 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6933 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
6934 ; SSE-NEXT: packuswb %xmm1, %xmm1
6935 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6936 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,2,2]
6937 ; SSE-NEXT: movdqa %xmm12, %xmm2
6938 ; SSE-NEXT: pandn %xmm1, %xmm2
6939 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6940 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6941 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
6942 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6943 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,1,3]
6944 ; SSE-NEXT: packuswb %xmm1, %xmm1
6945 ; SSE-NEXT: pand %xmm12, %xmm1
6946 ; SSE-NEXT: por %xmm2, %xmm1
6947 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6948 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6949 ; SSE-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
6950 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
6951 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
6952 ; SSE-NEXT: # xmm3 = xmm3[4],mem[4],xmm3[5],mem[5],xmm3[6],mem[6],xmm3[7],mem[7]
6953 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6954 ; SSE-NEXT: packuswb %xmm2, %xmm8
6955 ; SSE-NEXT: movdqa %xmm9, %xmm2
6956 ; SSE-NEXT: pandn %xmm8, %xmm2
6957 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[1,1,1,1]
6958 ; SSE-NEXT: packuswb %xmm3, %xmm3
6959 ; SSE-NEXT: pand %xmm9, %xmm3
6960 ; SSE-NEXT: por %xmm2, %xmm3
6961 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[2,2,2,2]
6962 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
6963 ; SSE-NEXT: movsd {{.*#+}} xmm2 = xmm0[0],xmm2[1]
6964 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6965 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6966 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6967 ; SSE-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
6968 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6969 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6970 ; SSE-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
6971 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6972 ; SSE-NEXT: packuswb %xmm0, %xmm0
6973 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6974 ; SSE-NEXT: movdqa %xmm10, %xmm1
6975 ; SSE-NEXT: pandn %xmm0, %xmm1
6976 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,1,1]
6977 ; SSE-NEXT: packuswb %xmm0, %xmm0
6978 ; SSE-NEXT: pand %xmm10, %xmm0
6979 ; SSE-NEXT: por %xmm1, %xmm0
6980 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6981 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6982 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
6983 ; SSE-NEXT: packuswb %xmm1, %xmm1
6984 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6985 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,2,3]
6986 ; SSE-NEXT: movdqa %xmm13, %xmm2
6987 ; SSE-NEXT: pandn %xmm1, %xmm2
6988 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6989 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6990 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
6991 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6992 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,1,3]
6993 ; SSE-NEXT: packuswb %xmm1, %xmm1
6994 ; SSE-NEXT: pand %xmm13, %xmm1
6995 ; SSE-NEXT: por %xmm2, %xmm1
6996 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
6997 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
6998 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6999 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7000 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
7001 ; SSE-NEXT: packuswb %xmm1, %xmm1
7002 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7003 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,2,2]
7004 ; SSE-NEXT: movdqa %xmm12, %xmm2
7005 ; SSE-NEXT: pandn %xmm1, %xmm2
7006 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7007 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7008 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
7009 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7010 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,1,3]
7011 ; SSE-NEXT: packuswb %xmm1, %xmm1
7012 ; SSE-NEXT: pand %xmm12, %xmm1
7013 ; SSE-NEXT: por %xmm2, %xmm1
7014 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7015 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
7016 ; SSE-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
7017 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7018 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
7019 ; SSE-NEXT: # xmm4 = xmm4[4],mem[4],xmm4[5],mem[5],xmm4[6],mem[6],xmm4[7],mem[7]
7020 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7021 ; SSE-NEXT: packuswb %xmm2, %xmm3
7022 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7023 ; SSE-NEXT: movdqa %xmm9, %xmm2
7024 ; SSE-NEXT: pandn %xmm3, %xmm2
7025 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm4[1,1,1,1]
7026 ; SSE-NEXT: packuswb %xmm3, %xmm3
7027 ; SSE-NEXT: pand %xmm9, %xmm3
7028 ; SSE-NEXT: por %xmm2, %xmm3
7029 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[2,2,2,2]
7030 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
7031 ; SSE-NEXT: movsd {{.*#+}} xmm2 = xmm0[0],xmm2[1]
7032 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7033 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7034 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
7035 ; SSE-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
7036 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7037 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7038 ; SSE-NEXT: # xmm0 = xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
7039 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7040 ; SSE-NEXT: packuswb %xmm2, %xmm2
7041 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7042 ; SSE-NEXT: movdqa %xmm10, %xmm1
7043 ; SSE-NEXT: pandn %xmm2, %xmm1
7044 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
7045 ; SSE-NEXT: packuswb %xmm0, %xmm0
7046 ; SSE-NEXT: pand %xmm10, %xmm0
7047 ; SSE-NEXT: por %xmm1, %xmm0
7048 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7049 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7050 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
7051 ; SSE-NEXT: packuswb %xmm1, %xmm1
7052 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7053 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,2,3]
7054 ; SSE-NEXT: movdqa %xmm13, %xmm2
7055 ; SSE-NEXT: pandn %xmm1, %xmm2
7056 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7057 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7058 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
7059 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7060 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,1,3]
7061 ; SSE-NEXT: packuswb %xmm1, %xmm1
7062 ; SSE-NEXT: pand %xmm13, %xmm1
7063 ; SSE-NEXT: por %xmm2, %xmm1
7064 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
7065 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
7066 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7067 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7068 ; SSE-NEXT: # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
7069 ; SSE-NEXT: packuswb %xmm1, %xmm1
7070 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7071 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,2,2]
7072 ; SSE-NEXT: movdqa %xmm12, %xmm2
7073 ; SSE-NEXT: pandn %xmm1, %xmm2
7074 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7075 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Folded Reload
7076 ; SSE-NEXT: # xmm6 = xmm6[4],mem[4],xmm6[5],mem[5],xmm6[6],mem[6],xmm6[7],mem[7]
7077 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm6[0,1,1,3]
7078 ; SSE-NEXT: packuswb %xmm1, %xmm1
7079 ; SSE-NEXT: pand %xmm12, %xmm1
7080 ; SSE-NEXT: por %xmm2, %xmm1
7081 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7082 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
7083 ; SSE-NEXT: # xmm2 = xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
7084 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7085 ; SSE-NEXT: punpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
7086 ; SSE-NEXT: # xmm3 = xmm3[4],mem[4],xmm3[5],mem[5],xmm3[6],mem[6],xmm3[7],mem[7]
7087 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7088 ; SSE-NEXT: packuswb %xmm2, %xmm4
7089 ; SSE-NEXT: movdqa %xmm9, %xmm2
7090 ; SSE-NEXT: pandn %xmm4, %xmm2
7091 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[1,1,1,1]
7092 ; SSE-NEXT: packuswb %xmm3, %xmm3
7093 ; SSE-NEXT: pand %xmm9, %xmm3
7094 ; SSE-NEXT: por %xmm2, %xmm3
7095 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm3[2,2,2,2]
7096 ; SSE-NEXT: punpckhdq {{.*#+}} xmm15 = xmm15[2],xmm1[2],xmm15[3],xmm1[3]
7097 ; SSE-NEXT: movsd {{.*#+}} xmm15 = xmm0[0],xmm15[1]
7098 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm14[0,1,3,1,4,5,6,7]
7099 ; SSE-NEXT: packuswb %xmm0, %xmm0
7100 ; SSE-NEXT: movdqa %xmm10, %xmm1
7101 ; SSE-NEXT: pandn %xmm0, %xmm1
7102 ; SSE-NEXT: pshuflw $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7103 ; SSE-NEXT: # xmm0 = mem[3,1,2,3,4,5,6,7]
7104 ; SSE-NEXT: packuswb %xmm0, %xmm0
7105 ; SSE-NEXT: pand %xmm10, %xmm0
7106 ; SSE-NEXT: por %xmm1, %xmm0
7107 ; SSE-NEXT: pshufhw $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7108 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,4,5,5,7]
7109 ; SSE-NEXT: packuswb %xmm1, %xmm1
7110 ; SSE-NEXT: movdqa %xmm13, %xmm2
7111 ; SSE-NEXT: pandn %xmm1, %xmm2
7112 ; SSE-NEXT: pshufhw $237, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7113 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,5,7,6,7]
7114 ; SSE-NEXT: packuswb %xmm1, %xmm1
7115 ; SSE-NEXT: pand %xmm13, %xmm1
7116 ; SSE-NEXT: por %xmm2, %xmm1
7117 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
7118 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
7119 ; SSE-NEXT: pshufhw $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7120 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,4,5,5,7]
7121 ; SSE-NEXT: packuswb %xmm1, %xmm1
7122 ; SSE-NEXT: movdqa %xmm12, %xmm2
7123 ; SSE-NEXT: pandn %xmm1, %xmm2
7124 ; SSE-NEXT: pshufhw $237, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7125 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,5,7,6,7]
7126 ; SSE-NEXT: packuswb %xmm1, %xmm1
7127 ; SSE-NEXT: pand %xmm12, %xmm1
7128 ; SSE-NEXT: por %xmm2, %xmm1
7129 ; SSE-NEXT: pshuflw $116, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
7130 ; SSE-NEXT: # xmm2 = mem[0,1,3,1,4,5,6,7]
7131 ; SSE-NEXT: packuswb %xmm2, %xmm2
7132 ; SSE-NEXT: movdqa %xmm9, %xmm3
7133 ; SSE-NEXT: pandn %xmm2, %xmm3
7134 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm11[3,1,2,3,4,5,6,7]
7135 ; SSE-NEXT: packuswb %xmm2, %xmm2
7136 ; SSE-NEXT: pand %xmm9, %xmm2
7137 ; SSE-NEXT: por %xmm3, %xmm2
7138 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm2[2,2,2,2]
7139 ; SSE-NEXT: punpckhdq {{.*#+}} xmm14 = xmm14[2],xmm1[2],xmm14[3],xmm1[3]
7140 ; SSE-NEXT: movsd {{.*#+}} xmm14 = xmm0[0],xmm14[1]
7141 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm7[0,1,3,1,4,5,6,7]
7142 ; SSE-NEXT: packuswb %xmm0, %xmm0
7143 ; SSE-NEXT: movdqa %xmm10, %xmm1
7144 ; SSE-NEXT: pandn %xmm0, %xmm1
7145 ; SSE-NEXT: pshuflw $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7146 ; SSE-NEXT: # xmm0 = mem[3,1,2,3,4,5,6,7]
7147 ; SSE-NEXT: packuswb %xmm0, %xmm0
7148 ; SSE-NEXT: pand %xmm10, %xmm0
7149 ; SSE-NEXT: por %xmm1, %xmm0
7150 ; SSE-NEXT: pshufhw $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7151 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,4,5,5,7]
7152 ; SSE-NEXT: packuswb %xmm1, %xmm1
7153 ; SSE-NEXT: movdqa %xmm13, %xmm2
7154 ; SSE-NEXT: pandn %xmm1, %xmm2
7155 ; SSE-NEXT: pshufhw $237, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7156 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,5,7,6,7]
7157 ; SSE-NEXT: packuswb %xmm1, %xmm1
7158 ; SSE-NEXT: pand %xmm13, %xmm1
7159 ; SSE-NEXT: por %xmm2, %xmm1
7160 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
7161 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
7162 ; SSE-NEXT: pshufhw $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7163 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,4,5,5,7]
7164 ; SSE-NEXT: packuswb %xmm1, %xmm1
7165 ; SSE-NEXT: movdqa %xmm12, %xmm2
7166 ; SSE-NEXT: pandn %xmm1, %xmm2
7167 ; SSE-NEXT: pshufhw $237, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7168 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,5,7,6,7]
7169 ; SSE-NEXT: packuswb %xmm1, %xmm1
7170 ; SSE-NEXT: pand %xmm12, %xmm1
7171 ; SSE-NEXT: por %xmm2, %xmm1
7172 ; SSE-NEXT: pshuflw $116, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
7173 ; SSE-NEXT: # xmm2 = mem[0,1,3,1,4,5,6,7]
7174 ; SSE-NEXT: packuswb %xmm2, %xmm2
7175 ; SSE-NEXT: movdqa %xmm9, %xmm3
7176 ; SSE-NEXT: pandn %xmm2, %xmm3
7177 ; SSE-NEXT: pshuflw $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
7178 ; SSE-NEXT: # xmm2 = mem[3,1,2,3,4,5,6,7]
7179 ; SSE-NEXT: packuswb %xmm2, %xmm2
7180 ; SSE-NEXT: pand %xmm9, %xmm2
7181 ; SSE-NEXT: por %xmm3, %xmm2
7182 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm2[2,2,2,2]
7183 ; SSE-NEXT: punpckhdq {{.*#+}} xmm11 = xmm11[2],xmm1[2],xmm11[3],xmm1[3]
7184 ; SSE-NEXT: movsd {{.*#+}} xmm11 = xmm0[0],xmm11[1]
7185 ; SSE-NEXT: pshuflw $116, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7186 ; SSE-NEXT: # xmm0 = mem[0,1,3,1,4,5,6,7]
7187 ; SSE-NEXT: packuswb %xmm0, %xmm0
7188 ; SSE-NEXT: movdqa %xmm10, %xmm1
7189 ; SSE-NEXT: pandn %xmm0, %xmm1
7190 ; SSE-NEXT: pshuflw $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7191 ; SSE-NEXT: # xmm0 = mem[3,1,2,3,4,5,6,7]
7192 ; SSE-NEXT: packuswb %xmm0, %xmm0
7193 ; SSE-NEXT: pand %xmm10, %xmm0
7194 ; SSE-NEXT: por %xmm1, %xmm0
7195 ; SSE-NEXT: pshufhw $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7196 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,4,5,5,7]
7197 ; SSE-NEXT: packuswb %xmm1, %xmm1
7198 ; SSE-NEXT: movdqa %xmm13, %xmm2
7199 ; SSE-NEXT: pandn %xmm1, %xmm2
7200 ; SSE-NEXT: pshufhw $237, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7201 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,5,7,6,7]
7202 ; SSE-NEXT: packuswb %xmm1, %xmm1
7203 ; SSE-NEXT: pand %xmm13, %xmm1
7204 ; SSE-NEXT: por %xmm2, %xmm1
7205 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
7206 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
7207 ; SSE-NEXT: pshufhw $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7208 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,4,5,5,7]
7209 ; SSE-NEXT: packuswb %xmm1, %xmm1
7210 ; SSE-NEXT: movdqa %xmm12, %xmm2
7211 ; SSE-NEXT: pandn %xmm1, %xmm2
7212 ; SSE-NEXT: pshufhw $237, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7213 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,5,7,6,7]
7214 ; SSE-NEXT: packuswb %xmm1, %xmm1
7215 ; SSE-NEXT: pand %xmm12, %xmm1
7216 ; SSE-NEXT: por %xmm2, %xmm1
7217 ; SSE-NEXT: pshuflw $116, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
7218 ; SSE-NEXT: # xmm2 = mem[0,1,3,1,4,5,6,7]
7219 ; SSE-NEXT: packuswb %xmm2, %xmm2
7220 ; SSE-NEXT: movdqa %xmm9, %xmm3
7221 ; SSE-NEXT: pandn %xmm2, %xmm3
7222 ; SSE-NEXT: pshuflw $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
7223 ; SSE-NEXT: # xmm2 = mem[3,1,2,3,4,5,6,7]
7224 ; SSE-NEXT: packuswb %xmm2, %xmm2
7225 ; SSE-NEXT: pand %xmm9, %xmm2
7226 ; SSE-NEXT: por %xmm3, %xmm2
7227 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm2[2,2,2,2]
7228 ; SSE-NEXT: punpckhdq {{.*#+}} xmm7 = xmm7[2],xmm1[2],xmm7[3],xmm1[3]
7229 ; SSE-NEXT: movsd {{.*#+}} xmm7 = xmm0[0],xmm7[1]
7230 ; SSE-NEXT: pshuflw $116, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7231 ; SSE-NEXT: # xmm0 = mem[0,1,3,1,4,5,6,7]
7232 ; SSE-NEXT: packuswb %xmm0, %xmm0
7233 ; SSE-NEXT: movdqa %xmm10, %xmm1
7234 ; SSE-NEXT: pandn %xmm0, %xmm1
7235 ; SSE-NEXT: pshuflw $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7236 ; SSE-NEXT: # xmm0 = mem[3,1,2,3,4,5,6,7]
7237 ; SSE-NEXT: packuswb %xmm0, %xmm0
7238 ; SSE-NEXT: pand %xmm10, %xmm0
7239 ; SSE-NEXT: por %xmm1, %xmm0
7240 ; SSE-NEXT: pshufhw $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7241 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,4,5,5,7]
7242 ; SSE-NEXT: packuswb %xmm1, %xmm1
7243 ; SSE-NEXT: movdqa %xmm13, %xmm2
7244 ; SSE-NEXT: pandn %xmm1, %xmm2
7245 ; SSE-NEXT: pshufhw $237, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7246 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,5,7,6,7]
7247 ; SSE-NEXT: packuswb %xmm1, %xmm1
7248 ; SSE-NEXT: pand %xmm13, %xmm1
7249 ; SSE-NEXT: por %xmm2, %xmm1
7250 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
7251 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
7252 ; SSE-NEXT: pshufhw $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7253 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,4,5,5,7]
7254 ; SSE-NEXT: packuswb %xmm1, %xmm1
7255 ; SSE-NEXT: movdqa %xmm12, %xmm2
7256 ; SSE-NEXT: pandn %xmm1, %xmm2
7257 ; SSE-NEXT: pshufhw $237, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7258 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,5,7,6,7]
7259 ; SSE-NEXT: packuswb %xmm1, %xmm1
7260 ; SSE-NEXT: pand %xmm12, %xmm1
7261 ; SSE-NEXT: por %xmm2, %xmm1
7262 ; SSE-NEXT: pshuflw $116, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
7263 ; SSE-NEXT: # xmm2 = mem[0,1,3,1,4,5,6,7]
7264 ; SSE-NEXT: packuswb %xmm2, %xmm2
7265 ; SSE-NEXT: movdqa %xmm9, %xmm3
7266 ; SSE-NEXT: pandn %xmm2, %xmm3
7267 ; SSE-NEXT: pshuflw $231, (%rsp), %xmm2 # 16-byte Folded Reload
7268 ; SSE-NEXT: # xmm2 = mem[3,1,2,3,4,5,6,7]
7269 ; SSE-NEXT: packuswb %xmm2, %xmm2
7270 ; SSE-NEXT: pand %xmm9, %xmm2
7271 ; SSE-NEXT: por %xmm3, %xmm2
7272 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[2,2,2,2]
7273 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm1[2],xmm5[3],xmm1[3]
7274 ; SSE-NEXT: movsd {{.*#+}} xmm5 = xmm0[0],xmm5[1]
7275 ; SSE-NEXT: pshufd $229, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7276 ; SSE-NEXT: # xmm0 = mem[1,1,2,3]
7277 ; SSE-NEXT: movdqa %xmm10, %xmm1
7278 ; SSE-NEXT: pandn %xmm0, %xmm1
7279 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7280 ; SSE-NEXT: # xmm0 = mem[3,3,3,3]
7281 ; SSE-NEXT: packuswb %xmm0, %xmm0
7282 ; SSE-NEXT: pand %xmm10, %xmm0
7283 ; SSE-NEXT: por %xmm1, %xmm0
7284 ; SSE-NEXT: movdqa %xmm13, %xmm1
7285 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7286 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
7287 ; SSE-NEXT: # xmm2 = mem[2,2,3,3]
7288 ; SSE-NEXT: packuswb %xmm2, %xmm2
7289 ; SSE-NEXT: pand %xmm13, %xmm2
7290 ; SSE-NEXT: por %xmm1, %xmm2
7291 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,1,1,1]
7292 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
7293 ; SSE-NEXT: movdqa %xmm12, %xmm1
7294 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7295 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
7296 ; SSE-NEXT: # xmm2 = mem[2,2,3,3]
7297 ; SSE-NEXT: packuswb %xmm2, %xmm2
7298 ; SSE-NEXT: pand %xmm12, %xmm2
7299 ; SSE-NEXT: por %xmm1, %xmm2
7300 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm8[0,1,3,3]
7301 ; SSE-NEXT: movdqa %xmm9, %xmm3
7302 ; SSE-NEXT: pandn %xmm1, %xmm3
7303 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7304 ; SSE-NEXT: # xmm1 = mem[3,3,3,3]
7305 ; SSE-NEXT: packuswb %xmm1, %xmm1
7306 ; SSE-NEXT: pand %xmm9, %xmm1
7307 ; SSE-NEXT: por %xmm3, %xmm1
7308 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm1[2,2,2,2]
7309 ; SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]
7310 ; SSE-NEXT: movsd {{.*#+}} xmm3 = xmm0[0],xmm3[1]
7311 ; SSE-NEXT: pshufd $229, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7312 ; SSE-NEXT: # xmm0 = mem[1,1,2,3]
7313 ; SSE-NEXT: movdqa %xmm10, %xmm1
7314 ; SSE-NEXT: pandn %xmm0, %xmm1
7315 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7316 ; SSE-NEXT: # xmm0 = mem[3,3,3,3]
7317 ; SSE-NEXT: packuswb %xmm0, %xmm0
7318 ; SSE-NEXT: pand %xmm10, %xmm0
7319 ; SSE-NEXT: por %xmm1, %xmm0
7320 ; SSE-NEXT: movdqa %xmm13, %xmm1
7321 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7322 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
7323 ; SSE-NEXT: # xmm2 = mem[2,2,3,3]
7324 ; SSE-NEXT: packuswb %xmm2, %xmm2
7325 ; SSE-NEXT: pand %xmm13, %xmm2
7326 ; SSE-NEXT: por %xmm1, %xmm2
7327 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,1,1,1]
7328 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
7329 ; SSE-NEXT: movdqa %xmm12, %xmm8
7330 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Folded Reload
7331 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
7332 ; SSE-NEXT: # xmm2 = mem[2,2,3,3]
7333 ; SSE-NEXT: packuswb %xmm2, %xmm1
7334 ; SSE-NEXT: pand %xmm12, %xmm1
7335 ; SSE-NEXT: por %xmm8, %xmm1
7336 ; SSE-NEXT: pshufd $244, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
7337 ; SSE-NEXT: # xmm2 = mem[0,1,3,3]
7338 ; SSE-NEXT: movdqa %xmm9, %xmm8
7339 ; SSE-NEXT: pandn %xmm2, %xmm8
7340 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
7341 ; SSE-NEXT: # xmm2 = mem[3,3,3,3]
7342 ; SSE-NEXT: packuswb %xmm2, %xmm2
7343 ; SSE-NEXT: pand %xmm9, %xmm2
7344 ; SSE-NEXT: por %xmm8, %xmm2
7345 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,2,2,2]
7346 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3]
7347 ; SSE-NEXT: movsd {{.*#+}} xmm2 = xmm0[0],xmm2[1]
7348 ; SSE-NEXT: pshufd $229, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7349 ; SSE-NEXT: # xmm0 = mem[1,1,2,3]
7350 ; SSE-NEXT: movdqa %xmm10, %xmm1
7351 ; SSE-NEXT: pandn %xmm0, %xmm1
7352 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7353 ; SSE-NEXT: # xmm0 = mem[3,3,3,3]
7354 ; SSE-NEXT: packuswb %xmm0, %xmm0
7355 ; SSE-NEXT: pand %xmm10, %xmm0
7356 ; SSE-NEXT: por %xmm1, %xmm0
7357 ; SSE-NEXT: movdqa %xmm13, %xmm1
7358 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7359 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Folded Reload
7360 ; SSE-NEXT: # xmm8 = mem[2,2,3,3]
7361 ; SSE-NEXT: packuswb %xmm8, %xmm8
7362 ; SSE-NEXT: pand %xmm13, %xmm8
7363 ; SSE-NEXT: por %xmm1, %xmm8
7364 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm8[1,1,1,1]
7365 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
7366 ; SSE-NEXT: movdqa %xmm12, %xmm1
7367 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7368 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm6[2,2,3,3]
7369 ; SSE-NEXT: packuswb %xmm8, %xmm8
7370 ; SSE-NEXT: pand %xmm12, %xmm8
7371 ; SSE-NEXT: por %xmm1, %xmm8
7372 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm4[0,1,3,3]
7373 ; SSE-NEXT: movdqa %xmm9, %xmm4
7374 ; SSE-NEXT: pandn %xmm1, %xmm4
7375 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
7376 ; SSE-NEXT: # xmm1 = mem[3,3,3,3]
7377 ; SSE-NEXT: packuswb %xmm1, %xmm1
7378 ; SSE-NEXT: pand %xmm9, %xmm1
7379 ; SSE-NEXT: por %xmm4, %xmm1
7380 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
7381 ; SSE-NEXT: punpckhdq {{.*#+}} xmm1 = xmm1[2],xmm8[2],xmm1[3],xmm8[3]
7382 ; SSE-NEXT: movsd {{.*#+}} xmm1 = xmm0[0],xmm1[1]
7383 ; SSE-NEXT: pshufd $229, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7384 ; SSE-NEXT: # xmm0 = mem[1,1,2,3]
7385 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
7386 ; SSE-NEXT: # xmm4 = mem[3,3,3,3]
7387 ; SSE-NEXT: packuswb %xmm4, %xmm4
7388 ; SSE-NEXT: pand %xmm10, %xmm4
7389 ; SSE-NEXT: pandn %xmm0, %xmm10
7390 ; SSE-NEXT: por %xmm4, %xmm10
7391 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7392 ; SSE-NEXT: # xmm0 = mem[2,2,3,3]
7393 ; SSE-NEXT: packuswb %xmm0, %xmm0
7394 ; SSE-NEXT: pand %xmm13, %xmm0
7395 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Folded Reload
7396 ; SSE-NEXT: por %xmm0, %xmm13
7397 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm13[1,1,1,1]
7398 ; SSE-NEXT: punpckldq {{.*#+}} xmm10 = xmm10[0],xmm0[0],xmm10[1],xmm0[1]
7399 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7400 ; SSE-NEXT: # xmm0 = mem[2,2,3,3]
7401 ; SSE-NEXT: packuswb %xmm0, %xmm0
7402 ; SSE-NEXT: pand %xmm12, %xmm0
7403 ; SSE-NEXT: pandn {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Folded Reload
7404 ; SSE-NEXT: por %xmm0, %xmm12
7405 ; SSE-NEXT: pshufd $244, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
7406 ; SSE-NEXT: # xmm0 = mem[0,1,3,3]
7407 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
7408 ; SSE-NEXT: # xmm4 = mem[3,3,3,3]
7409 ; SSE-NEXT: packuswb %xmm4, %xmm4
7410 ; SSE-NEXT: pand %xmm9, %xmm4
7411 ; SSE-NEXT: pandn %xmm0, %xmm9
7412 ; SSE-NEXT: por %xmm4, %xmm9
7413 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm9[2,2,2,2]
7414 ; SSE-NEXT: punpckhdq {{.*#+}} xmm0 = xmm0[2],xmm12[2],xmm0[3],xmm12[3]
7415 ; SSE-NEXT: movsd {{.*#+}} xmm0 = xmm10[0],xmm0[1]
7416 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7417 ; SSE-NEXT: movaps %xmm4, 32(%rsi)
7418 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7419 ; SSE-NEXT: movaps %xmm4, 48(%rsi)
7420 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7421 ; SSE-NEXT: movaps %xmm4, (%rsi)
7422 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7423 ; SSE-NEXT: movaps %xmm4, 16(%rsi)
7424 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7425 ; SSE-NEXT: movaps %xmm4, 32(%rdx)
7426 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7427 ; SSE-NEXT: movaps %xmm4, 48(%rdx)
7428 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7429 ; SSE-NEXT: movaps %xmm4, (%rdx)
7430 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7431 ; SSE-NEXT: movaps %xmm4, 16(%rdx)
7432 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7433 ; SSE-NEXT: movaps %xmm4, 32(%rcx)
7434 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7435 ; SSE-NEXT: movaps %xmm4, 48(%rcx)
7436 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7437 ; SSE-NEXT: movaps %xmm4, (%rcx)
7438 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7439 ; SSE-NEXT: movaps %xmm4, 16(%rcx)
7440 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7441 ; SSE-NEXT: movaps %xmm4, 32(%r8)
7442 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7443 ; SSE-NEXT: movaps %xmm4, 48(%r8)
7444 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7445 ; SSE-NEXT: movaps %xmm4, (%r8)
7446 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7447 ; SSE-NEXT: movaps %xmm4, 16(%r8)
7448 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7449 ; SSE-NEXT: movaps %xmm4, 32(%r9)
7450 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7451 ; SSE-NEXT: movaps %xmm4, 48(%r9)
7452 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7453 ; SSE-NEXT: movaps %xmm4, (%r9)
7454 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7455 ; SSE-NEXT: movaps %xmm4, 16(%r9)
7456 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
7457 ; SSE-NEXT: movapd %xmm15, 32(%rax)
7458 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7459 ; SSE-NEXT: movaps %xmm4, 48(%rax)
7460 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7461 ; SSE-NEXT: movaps %xmm4, (%rax)
7462 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7463 ; SSE-NEXT: movaps %xmm4, 16(%rax)
7464 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
7465 ; SSE-NEXT: movapd %xmm5, 48(%rax)
7466 ; SSE-NEXT: movapd %xmm7, 32(%rax)
7467 ; SSE-NEXT: movapd %xmm11, 16(%rax)
7468 ; SSE-NEXT: movapd %xmm14, (%rax)
7469 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
7470 ; SSE-NEXT: movapd %xmm0, 48(%rax)
7471 ; SSE-NEXT: movapd %xmm1, 32(%rax)
7472 ; SSE-NEXT: movapd %xmm2, 16(%rax)
7473 ; SSE-NEXT: movapd %xmm3, (%rax)
7474 ; SSE-NEXT: addq $2040, %rsp # imm = 0x7F8
7477 ; AVX1-ONLY-LABEL: load_i8_stride8_vf64:
7478 ; AVX1-ONLY: # %bb.0:
7479 ; AVX1-ONLY-NEXT: subq $808, %rsp # imm = 0x328
7480 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [0,0,0,8,0,0,0,8,0,0,0,8,0,0,0,8]
7481 ; AVX1-ONLY-NEXT: vmovdqa 368(%rdi), %xmm0
7482 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7483 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm0, %xmm0
7484 ; AVX1-ONLY-NEXT: vmovdqa 352(%rdi), %xmm13
7485 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm13, %xmm1
7486 ; AVX1-ONLY-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7487 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
7488 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm1 = [0,8,0,0,0,8,0,0,0,8,0,0,0,8,0,0]
7489 ; AVX1-ONLY-NEXT: vmovdqa 336(%rdi), %xmm4
7490 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm4, %xmm2
7491 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, %xmm15
7492 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7493 ; AVX1-ONLY-NEXT: vmovdqa 320(%rdi), %xmm4
7494 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7495 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm4, %xmm4
7496 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
7497 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm2[0,1,2,3,4,5],xmm0[6,7]
7498 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm2 = [0,0,0,8,0,0,0,8,0,0,0,8,0,0,0,8]
7499 ; AVX1-ONLY-NEXT: vmovdqa 304(%rdi), %xmm12
7500 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm12, %xmm0
7501 ; AVX1-ONLY-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7502 ; AVX1-ONLY-NEXT: vmovdqa 288(%rdi), %xmm5
7503 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7504 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm5, %xmm5
7505 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm0[0],xmm5[1],xmm0[1],xmm5[2],xmm0[2],xmm5[3],xmm0[3]
7506 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm9 = [0,8,0,0,0,8,0,0,0,8,0,0,0,8,0,0]
7507 ; AVX1-ONLY-NEXT: vmovdqa 272(%rdi), %xmm11
7508 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm11, %xmm6
7509 ; AVX1-ONLY-NEXT: vmovdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7510 ; AVX1-ONLY-NEXT: vmovdqa 256(%rdi), %xmm10
7511 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm10, %xmm7
7512 ; AVX1-ONLY-NEXT: vmovdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7513 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
7514 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2,3],xmm6[4,5,6,7]
7515 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3],xmm4[4,5,6,7]
7516 ; AVX1-ONLY-NEXT: vmovdqa 496(%rdi), %xmm0
7517 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7518 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm0, %xmm5
7519 ; AVX1-ONLY-NEXT: vmovdqa 480(%rdi), %xmm0
7520 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7521 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm0, %xmm6
7522 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
7523 ; AVX1-ONLY-NEXT: vmovdqa 464(%rdi), %xmm0
7524 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7525 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm0, %xmm6
7526 ; AVX1-ONLY-NEXT: vmovdqa 448(%rdi), %xmm0
7527 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7528 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm0, %xmm7
7529 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
7530 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1,2,3,4,5],xmm5[6,7]
7531 ; AVX1-ONLY-NEXT: vmovdqa 432(%rdi), %xmm0
7532 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7533 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm0, %xmm6
7534 ; AVX1-ONLY-NEXT: vmovdqa 416(%rdi), %xmm14
7535 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm14, %xmm7
7536 ; AVX1-ONLY-NEXT: vmovdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7537 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
7538 ; AVX1-ONLY-NEXT: vmovdqa 400(%rdi), %xmm0
7539 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
7540 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm0, %xmm7
7541 ; AVX1-ONLY-NEXT: vmovdqa 384(%rdi), %xmm0
7542 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7543 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm0, %xmm8
7544 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
7545 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm7[0,1],xmm6[2,3],xmm7[4,5,6,7]
7546 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
7547 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
7548 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
7549 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
7550 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7551 ; AVX1-ONLY-NEXT: vmovdqa 112(%rdi), %xmm0
7552 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7553 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm0, %xmm4
7554 ; AVX1-ONLY-NEXT: vmovdqa 96(%rdi), %xmm0
7555 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7556 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm0, %xmm5
7557 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
7558 ; AVX1-ONLY-NEXT: vmovdqa 80(%rdi), %xmm0
7559 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7560 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm0, %xmm5
7561 ; AVX1-ONLY-NEXT: vmovdqa 64(%rdi), %xmm0
7562 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7563 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm0, %xmm6
7564 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
7565 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3,4,5],xmm4[6,7]
7566 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdi), %xmm0
7567 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7568 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdi), %xmm5
7569 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7570 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm5, %xmm5
7571 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm0, %xmm6
7572 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
7573 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm0
7574 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7575 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm6
7576 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7577 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm6, %xmm6
7578 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm0, %xmm7
7579 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
7580 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2,3],xmm6[4,5,6,7]
7581 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3],xmm4[4,5,6,7]
7582 ; AVX1-ONLY-NEXT: vmovdqa 240(%rdi), %xmm0
7583 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7584 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm0, %xmm5
7585 ; AVX1-ONLY-NEXT: vmovdqa 224(%rdi), %xmm0
7586 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7587 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm0, %xmm3
7588 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
7589 ; AVX1-ONLY-NEXT: vmovdqa 208(%rdi), %xmm0
7590 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7591 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm0, %xmm5
7592 ; AVX1-ONLY-NEXT: vmovdqa 192(%rdi), %xmm0
7593 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7594 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm0, %xmm1
7595 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
7596 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5],xmm3[6,7]
7597 ; AVX1-ONLY-NEXT: vmovdqa 176(%rdi), %xmm0
7598 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7599 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm0, %xmm3
7600 ; AVX1-ONLY-NEXT: vmovdqa 160(%rdi), %xmm0
7601 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7602 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm0, %xmm2
7603 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
7604 ; AVX1-ONLY-NEXT: vmovdqa 144(%rdi), %xmm0
7605 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7606 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm0, %xmm3
7607 ; AVX1-ONLY-NEXT: vmovdqa 128(%rdi), %xmm0
7608 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7609 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm0, %xmm0
7610 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
7611 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm2[2,3],xmm0[4,5,6,7]
7612 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7613 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
7614 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7615 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
7616 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7617 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm6 = [0,0,1,9,0,0,1,9,0,0,1,9,0,0,1,9]
7618 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7619 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm0, %xmm0
7620 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm13, %xmm1
7621 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
7622 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm1 = [1,9,0,0,1,9,0,0,1,9,0,0,1,9,0,0]
7623 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm15, %xmm3
7624 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
7625 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm13, %xmm4
7626 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
7627 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm3[0,1,2,3,4,5],xmm0[6,7]
7628 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm2 = [0,0,1,9,0,0,1,9,0,0,1,9,0,0,1,9]
7629 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm12, %xmm3
7630 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
7631 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm7, %xmm5
7632 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
7633 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [1,9,0,0,1,9,0,0,1,9,0,0,1,9,0,0]
7634 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm11, %xmm0
7635 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm10, %xmm15
7636 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3]
7637 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm5[2,3],xmm0[4,5,6,7]
7638 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3],xmm4[4,5,6,7]
7639 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
7640 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm12, %xmm4
7641 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
7642 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm9, %xmm5
7643 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
7644 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
7645 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm10, %xmm5
7646 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
7647 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm11, %xmm15
7648 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm15[0],xmm5[0],xmm15[1],xmm5[1],xmm15[2],xmm5[2],xmm15[3],xmm5[3]
7649 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3,4,5],xmm4[6,7]
7650 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
7651 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm8, %xmm5
7652 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm14, %xmm15
7653 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm15[0],xmm5[0],xmm15[1],xmm5[1],xmm15[2],xmm5[2],xmm15[3],xmm5[3]
7654 ; AVX1-ONLY-NEXT: vmovdqa (%rsp), %xmm14 # 16-byte Reload
7655 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm14, %xmm15
7656 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
7657 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm14, %xmm14
7658 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
7659 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm14[0,1],xmm5[2,3],xmm14[4,5,6,7]
7660 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm4
7661 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
7662 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm5[0,1,2,3,4,5],ymm4[6,7]
7663 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm4[4,5,6,7]
7664 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7665 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
7666 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm0, %xmm0
7667 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7668 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm4, %xmm4
7669 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
7670 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7671 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm4, %xmm4
7672 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7673 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm5, %xmm5
7674 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
7675 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm4[0,1,2,3,4,5],xmm0[6,7]
7676 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7677 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm4, %xmm4
7678 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7679 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm5, %xmm5
7680 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
7681 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7682 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm5, %xmm5
7683 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
7684 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm14, %xmm14
7685 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm14[0],xmm5[0],xmm14[1],xmm5[1],xmm14[2],xmm5[2],xmm14[3],xmm5[3]
7686 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3],xmm5[4,5,6,7]
7687 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm4[0,1,2,3],xmm0[4,5,6,7]
7688 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7689 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm4, %xmm4
7690 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7691 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm5, %xmm5
7692 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
7693 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7694 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm5, %xmm5
7695 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7696 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm6, %xmm1
7697 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
7698 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5],xmm4[6,7]
7699 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7700 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm4, %xmm4
7701 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7702 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm5, %xmm2
7703 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
7704 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7705 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm4, %xmm4
7706 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7707 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm5, %xmm3
7708 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
7709 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3],xmm3[4,5,6,7]
7710 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7711 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm2
7712 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
7713 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7714 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7715 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm0 = [0,0,2,10,0,0,2,10,0,0,2,10,0,0,2,10]
7716 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7717 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm1, %xmm1
7718 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7719 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm2, %xmm2
7720 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
7721 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm1 = [2,10,0,0,2,10,0,0,2,10,0,0,2,10,0,0]
7722 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7723 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm3, %xmm3
7724 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm13, %xmm4
7725 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
7726 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm3[0,1,2,3,4,5],xmm2[6,7]
7727 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm2 = [0,0,2,10,0,0,2,10,0,0,2,10,0,0,2,10]
7728 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7729 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm3, %xmm3
7730 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm7, %xmm5
7731 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
7732 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [2,10,0,0,2,10,0,0,2,10,0,0,2,10,0,0]
7733 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
7734 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm7, %xmm6
7735 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
7736 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm13, %xmm14
7737 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
7738 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2,3],xmm6[4,5,6,7]
7739 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3],xmm4[4,5,6,7]
7740 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm12, %xmm5
7741 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm9, %xmm6
7742 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
7743 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm10, %xmm6
7744 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm11, %xmm14
7745 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
7746 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1,2,3,4,5],xmm5[6,7]
7747 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm8, %xmm6
7748 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
7749 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm9, %xmm14
7750 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
7751 ; AVX1-ONLY-NEXT: vmovdqa (%rsp), %xmm10 # 16-byte Reload
7752 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm10, %xmm14
7753 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
7754 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm11, %xmm15
7755 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
7756 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm14[0,1],xmm6[2,3],xmm14[4,5,6,7]
7757 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
7758 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
7759 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
7760 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
7761 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7762 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
7763 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm12, %xmm4
7764 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
7765 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm8, %xmm5
7766 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
7767 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7768 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm5, %xmm5
7769 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7770 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm6, %xmm6
7771 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
7772 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3,4,5],xmm4[6,7]
7773 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7774 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm5, %xmm5
7775 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7776 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm6, %xmm6
7777 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
7778 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7779 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm6, %xmm6
7780 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
7781 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm13, %xmm14
7782 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
7783 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2,3],xmm6[4,5,6,7]
7784 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3],xmm4[4,5,6,7]
7785 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7786 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm5, %xmm5
7787 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7788 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm6, %xmm0
7789 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm5[0],xmm0[1],xmm5[1],xmm0[2],xmm5[2],xmm0[3],xmm5[3]
7790 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7791 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm5, %xmm5
7792 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7793 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm6, %xmm1
7794 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
7795 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1,2,3,4,5],xmm0[6,7]
7796 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7797 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm1, %xmm1
7798 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7799 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm5, %xmm2
7800 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
7801 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7802 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm2, %xmm2
7803 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7804 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm5, %xmm3
7805 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
7806 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3],xmm2[4,5,6,7]
7807 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
7808 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7809 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
7810 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
7811 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7812 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm0 = [0,0,3,11,0,0,3,11,0,0,3,11,0,0,3,11]
7813 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
7814 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm13, %xmm1
7815 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7816 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm2, %xmm2
7817 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
7818 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm1 = [3,11,0,0,3,11,0,0,3,11,0,0,3,11,0,0]
7819 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7820 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm3, %xmm3
7821 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7822 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm4, %xmm4
7823 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
7824 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm3[0,1,2,3,4,5],xmm2[6,7]
7825 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm2 = [0,0,3,11,0,0,3,11,0,0,3,11,0,0,3,11]
7826 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7827 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm3, %xmm3
7828 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7829 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm5, %xmm5
7830 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
7831 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [3,11,0,0,3,11,0,0,3,11,0,0,3,11,0,0]
7832 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm7, %xmm6
7833 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
7834 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm7, %xmm14
7835 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
7836 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2,3],xmm6[4,5,6,7]
7837 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3],xmm4[4,5,6,7]
7838 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7839 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm5, %xmm5
7840 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7841 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm6, %xmm6
7842 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
7843 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7844 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm6, %xmm6
7845 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
7846 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm7, %xmm14
7847 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
7848 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1,2,3,4,5],xmm5[6,7]
7849 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7850 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm6, %xmm6
7851 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm9, %xmm14
7852 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
7853 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm10, %xmm14
7854 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm11, %xmm15
7855 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
7856 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm14[0,1],xmm6[2,3],xmm14[4,5,6,7]
7857 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
7858 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
7859 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
7860 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
7861 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7862 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm12, %xmm4
7863 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm8, %xmm5
7864 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
7865 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
7866 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm9, %xmm5
7867 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
7868 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm10, %xmm6
7869 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
7870 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3,4,5],xmm4[6,7]
7871 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
7872 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm12, %xmm5
7873 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
7874 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm11, %xmm6
7875 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
7876 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7877 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm6, %xmm6
7878 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
7879 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm7, %xmm14
7880 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
7881 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2,3],xmm6[4,5,6,7]
7882 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3],xmm4[4,5,6,7]
7883 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7884 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm5, %xmm5
7885 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7886 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm6, %xmm0
7887 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm5[0],xmm0[1],xmm5[1],xmm0[2],xmm5[2],xmm0[3],xmm5[3]
7888 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7889 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm5, %xmm5
7890 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7891 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm6, %xmm1
7892 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
7893 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1,2,3,4,5],xmm0[6,7]
7894 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7895 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm1, %xmm1
7896 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7897 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm5, %xmm2
7898 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
7899 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7900 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm2, %xmm2
7901 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7902 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm5, %xmm3
7903 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
7904 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3],xmm2[4,5,6,7]
7905 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
7906 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
7907 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
7908 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
7909 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7910 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm0 = [0,0,4,12,0,0,4,12,0,0,4,12,0,0,4,12]
7911 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm13, %xmm1
7912 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7913 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm2, %xmm2
7914 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
7915 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm1 = [4,12,0,0,4,12,0,0,4,12,0,0,4,12,0,0]
7916 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7917 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm3, %xmm3
7918 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7919 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm4, %xmm4
7920 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
7921 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm3[0,1,2,3,4,5],xmm2[6,7]
7922 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm2 = [0,0,4,12,0,0,4,12,0,0,4,12,0,0,4,12]
7923 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
7924 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm7, %xmm3
7925 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7926 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm5, %xmm5
7927 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
7928 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [4,12,0,0,4,12,0,0,4,12,0,0,4,12,0,0]
7929 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7930 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm6, %xmm6
7931 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
7932 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm8, %xmm14
7933 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
7934 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2,3],xmm6[4,5,6,7]
7935 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3],xmm4[4,5,6,7]
7936 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7937 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm5, %xmm5
7938 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7939 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm6, %xmm6
7940 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
7941 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7942 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm6, %xmm6
7943 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
7944 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm13, %xmm14
7945 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
7946 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1,2,3,4,5],xmm5[6,7]
7947 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7948 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm6, %xmm6
7949 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
7950 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm13, %xmm14
7951 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
7952 ; AVX1-ONLY-NEXT: vmovdqa (%rsp), %xmm13 # 16-byte Reload
7953 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm13, %xmm14
7954 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
7955 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm13, %xmm15
7956 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
7957 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm14[0,1],xmm6[2,3],xmm14[4,5,6,7]
7958 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
7959 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
7960 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
7961 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
7962 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7963 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7964 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm4, %xmm4
7965 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7966 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm5, %xmm5
7967 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
7968 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm9, %xmm5
7969 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm10, %xmm6
7970 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
7971 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3,4,5],xmm4[6,7]
7972 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm12, %xmm5
7973 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm11, %xmm6
7974 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
7975 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
7976 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm10, %xmm6
7977 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
7978 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm9, %xmm14
7979 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
7980 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2,3],xmm6[4,5,6,7]
7981 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3],xmm4[4,5,6,7]
7982 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
7983 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm11, %xmm5
7984 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
7985 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm12, %xmm0
7986 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm5[0],xmm0[1],xmm5[1],xmm0[2],xmm5[2],xmm0[3],xmm5[3]
7987 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
7988 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm13, %xmm5
7989 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7990 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm6, %xmm1
7991 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
7992 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1,2,3,4,5],xmm0[6,7]
7993 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7994 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm1, %xmm1
7995 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7996 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm5, %xmm2
7997 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
7998 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7999 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm2, %xmm2
8000 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8001 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm5, %xmm3
8002 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
8003 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3],xmm2[4,5,6,7]
8004 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
8005 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
8006 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
8007 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
8008 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8009 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm0 = [0,0,5,13,0,0,5,13,0,0,5,13,0,0,5,13]
8010 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
8011 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm1, %xmm1
8012 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
8013 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm2, %xmm2
8014 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
8015 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm1 = [5,13,0,0,5,13,0,0,5,13,0,0,5,13,0,0]
8016 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
8017 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm3, %xmm3
8018 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8019 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm4, %xmm4
8020 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
8021 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm3[0,1,2,3,4,5],xmm2[6,7]
8022 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm2 = [0,0,5,13,0,0,5,13,0,0,5,13,0,0,5,13]
8023 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm7, %xmm3
8024 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8025 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm5, %xmm5
8026 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
8027 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [5,13,0,0,5,13,0,0,5,13,0,0,5,13,0,0]
8028 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8029 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm6, %xmm6
8030 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm8, %xmm14
8031 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8032 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2,3],xmm6[4,5,6,7]
8033 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3],xmm4[4,5,6,7]
8034 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8035 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm5, %xmm5
8036 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8037 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm6, %xmm6
8038 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8039 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8040 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm6, %xmm6
8041 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
8042 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm7, %xmm14
8043 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8044 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1,2,3,4,5],xmm5[6,7]
8045 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8046 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm6, %xmm6
8047 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
8048 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm7, %xmm14
8049 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8050 ; AVX1-ONLY-NEXT: vmovdqa (%rsp), %xmm7 # 16-byte Reload
8051 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm7, %xmm14
8052 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
8053 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm7, %xmm15
8054 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
8055 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm14[0,1],xmm6[2,3],xmm14[4,5,6,7]
8056 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
8057 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
8058 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
8059 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
8060 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8061 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8062 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm4, %xmm4
8063 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8064 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm5, %xmm5
8065 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
8066 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8067 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm5, %xmm5
8068 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8069 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm6, %xmm6
8070 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8071 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3,4,5],xmm4[6,7]
8072 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8073 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm5, %xmm5
8074 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8075 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm6, %xmm6
8076 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8077 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm10, %xmm6
8078 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm9, %xmm14
8079 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8080 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2,3],xmm6[4,5,6,7]
8081 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3],xmm4[4,5,6,7]
8082 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm11, %xmm5
8083 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm12, %xmm0
8084 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm5[0],xmm0[1],xmm5[1],xmm0[2],xmm5[2],xmm0[3],xmm5[3]
8085 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm13, %xmm5
8086 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
8087 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm13, %xmm1
8088 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
8089 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1,2,3,4,5],xmm0[6,7]
8090 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
8091 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm7, %xmm1
8092 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
8093 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm8, %xmm2
8094 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
8095 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
8096 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm9, %xmm2
8097 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
8098 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm10, %xmm3
8099 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
8100 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3],xmm2[4,5,6,7]
8101 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
8102 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
8103 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
8104 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
8105 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8106 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm0 = [0,0,6,14,0,0,6,14,0,0,6,14,0,0,6,14]
8107 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
8108 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm1, %xmm1
8109 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8110 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm11, %xmm2
8111 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
8112 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm1 = [6,14,0,0,6,14,0,0,6,14,0,0,6,14,0,0]
8113 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
8114 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm12, %xmm3
8115 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8116 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm4, %xmm4
8117 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
8118 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm3[0,1,2,3,4,5],xmm2[6,7]
8119 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm2 = [0,0,6,14,0,0,6,14,0,0,6,14,0,0,6,14]
8120 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
8121 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm3, %xmm3
8122 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8123 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm5, %xmm5
8124 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
8125 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [6,14,0,0,6,14,0,0,6,14,0,0,6,14,0,0]
8126 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8127 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm6, %xmm6
8128 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8129 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm14, %xmm14
8130 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8131 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2,3],xmm6[4,5,6,7]
8132 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3],xmm4[4,5,6,7]
8133 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8134 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm5, %xmm5
8135 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8136 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm6, %xmm6
8137 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8138 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8139 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm6, %xmm6
8140 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8141 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm14, %xmm14
8142 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8143 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1,2,3,4,5],xmm5[6,7]
8144 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8145 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm6, %xmm6
8146 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8147 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm14, %xmm14
8148 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8149 ; AVX1-ONLY-NEXT: vmovdqa (%rsp), %xmm14 # 16-byte Reload
8150 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm14, %xmm14
8151 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
8152 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm15, %xmm15
8153 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
8154 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm14[0,1],xmm6[2,3],xmm14[4,5,6,7]
8155 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm5
8156 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
8157 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
8158 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
8159 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8160 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8161 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm4, %xmm4
8162 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8163 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm5, %xmm5
8164 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
8165 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8166 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm5, %xmm5
8167 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8168 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm6, %xmm6
8169 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8170 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3,4,5],xmm4[6,7]
8171 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8172 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm5, %xmm5
8173 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8174 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm6, %xmm6
8175 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8176 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8177 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm6, %xmm6
8178 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8179 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm14, %xmm14
8180 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8181 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2,3],xmm6[4,5,6,7]
8182 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3],xmm4[4,5,6,7]
8183 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8184 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm5, %xmm5
8185 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8186 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm6, %xmm0
8187 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm5[0],xmm0[1],xmm5[1],xmm0[2],xmm5[2],xmm0[3],xmm5[3]
8188 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8189 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm5, %xmm5
8190 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm13, %xmm1
8191 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
8192 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1,2,3,4,5],xmm0[6,7]
8193 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm7, %xmm1
8194 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm8, %xmm2
8195 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
8196 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm9, %xmm2
8197 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm10, %xmm3
8198 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
8199 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3],xmm2[4,5,6,7]
8200 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
8201 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
8202 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
8203 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm4[0,1,2,3],ymm0[4,5,6,7]
8204 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm3 = [0,0,7,15,0,0,7,15,0,0,7,15,0,0,7,15]
8205 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8206 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm0, %xmm0
8207 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm11, %xmm1
8208 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
8209 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm2 = [7,15,0,0,7,15,0,0,7,15,0,0,7,15,0,0]
8210 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm12, %xmm1
8211 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8212 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm4, %xmm4
8213 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
8214 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm1[0,1,2,3,4,5],xmm0[6,7]
8215 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm1 = [0,0,7,15,0,0,7,15,0,0,7,15,0,0,7,15]
8216 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8217 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm0, %xmm0
8218 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8219 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm6, %xmm6
8220 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3]
8221 ; AVX1-ONLY-NEXT: vbroadcastss {{.*#+}} xmm0 = [7,15,0,0,7,15,0,0,7,15,0,0,7,15,0,0]
8222 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8223 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm11, %xmm14
8224 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8225 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm11, %xmm15
8226 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
8227 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm14[0,1],xmm6[2,3],xmm14[4,5,6,7]
8228 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm6[0,1,2,3],xmm4[4,5,6,7]
8229 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8230 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm6, %xmm6
8231 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8232 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm11, %xmm14
8233 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8234 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8235 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm11, %xmm14
8236 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8237 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm11, %xmm15
8238 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
8239 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm14[0,1,2,3,4,5],xmm6[6,7]
8240 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8241 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm11, %xmm14
8242 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8243 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm11, %xmm15
8244 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
8245 ; AVX1-ONLY-NEXT: vmovdqa (%rsp), %xmm11 # 16-byte Reload
8246 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm11, %xmm15
8247 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8248 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm11, %xmm13
8249 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm15[0],xmm13[1],xmm15[1],xmm13[2],xmm15[2],xmm13[3],xmm15[3]
8250 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0,1],xmm14[2,3],xmm13[4,5,6,7]
8251 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm0, %ymm6
8252 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm0, %ymm13
8253 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm6 = ymm13[0,1,2,3,4,5],ymm6[6,7]
8254 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
8255 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8256 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm6, %xmm6
8257 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8258 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm11, %xmm13
8259 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm13[0],xmm6[0],xmm13[1],xmm6[1],xmm13[2],xmm6[2],xmm13[3],xmm6[3]
8260 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8261 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm11, %xmm13
8262 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8263 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm11, %xmm14
8264 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
8265 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm13[0,1,2,3,4,5],xmm6[6,7]
8266 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8267 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm11, %xmm13
8268 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8269 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm11, %xmm14
8270 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
8271 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8272 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm11, %xmm14
8273 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8274 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm11, %xmm15
8275 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
8276 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm13 = xmm14[0,1],xmm13[2,3],xmm14[4,5,6,7]
8277 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm13[0,1,2,3],xmm6[4,5,6,7]
8278 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8279 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm11, %xmm13
8280 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8281 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm11, %xmm3
8282 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm13[0],xmm3[1],xmm13[1],xmm3[2],xmm13[2],xmm3[3],xmm13[3]
8283 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8284 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm11, %xmm12
8285 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8286 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm11, %xmm2
8287 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm12[0],xmm2[1],xmm12[1],xmm2[2],xmm12[2],xmm2[3],xmm12[3]
8288 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5],xmm3[6,7]
8289 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm7, %xmm3
8290 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm8, %xmm1
8291 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
8292 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm9, %xmm3
8293 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm10, %xmm0
8294 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
8295 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5,6,7]
8296 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm1
8297 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
8298 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
8299 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
8300 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8301 ; AVX1-ONLY-NEXT: vmovaps %ymm1, (%rsi)
8302 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8303 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 32(%rsi)
8304 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8305 ; AVX1-ONLY-NEXT: vmovaps %ymm1, (%rdx)
8306 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8307 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 32(%rdx)
8308 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8309 ; AVX1-ONLY-NEXT: vmovaps %ymm1, (%rcx)
8310 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8311 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 32(%rcx)
8312 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8313 ; AVX1-ONLY-NEXT: vmovaps %ymm1, (%r8)
8314 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8315 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 32(%r8)
8316 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8317 ; AVX1-ONLY-NEXT: vmovaps %ymm1, (%r9)
8318 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8319 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 32(%r9)
8320 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
8321 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8322 ; AVX1-ONLY-NEXT: vmovaps %ymm1, (%rax)
8323 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8324 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 32(%rax)
8325 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
8326 ; AVX1-ONLY-NEXT: vmovaps %ymm5, (%rax)
8327 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8328 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 32(%rax)
8329 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
8330 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rax)
8331 ; AVX1-ONLY-NEXT: vmovaps %ymm4, 32(%rax)
8332 ; AVX1-ONLY-NEXT: addq $808, %rsp # imm = 0x328
8333 ; AVX1-ONLY-NEXT: vzeroupper
8334 ; AVX1-ONLY-NEXT: retq
8336 ; AVX2-SLOW-LABEL: load_i8_stride8_vf64:
8337 ; AVX2-SLOW: # %bb.0:
8338 ; AVX2-SLOW-NEXT: subq $840, %rsp # imm = 0x348
8339 ; AVX2-SLOW-NEXT: vmovdqa 368(%rdi), %xmm0
8340 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8341 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
8342 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm0, %xmm0
8343 ; AVX2-SLOW-NEXT: vmovdqa 352(%rdi), %xmm13
8344 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm13, %xmm1
8345 ; AVX2-SLOW-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8346 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
8347 ; AVX2-SLOW-NEXT: vmovdqa 336(%rdi), %xmm4
8348 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
8349 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm4, %xmm1
8350 ; AVX2-SLOW-NEXT: vmovdqa %xmm4, %xmm15
8351 ; AVX2-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8352 ; AVX2-SLOW-NEXT: vmovdqa 320(%rdi), %xmm4
8353 ; AVX2-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8354 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm4, %xmm4
8355 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
8356 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm1[0,1,2],xmm0[3]
8357 ; AVX2-SLOW-NEXT: vmovdqa 304(%rdi), %xmm0
8358 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8359 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm9 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
8360 ; AVX2-SLOW-NEXT: vpshufb %xmm9, %xmm0, %xmm1
8361 ; AVX2-SLOW-NEXT: vmovdqa 288(%rdi), %xmm11
8362 ; AVX2-SLOW-NEXT: vpshufb %xmm9, %xmm11, %xmm5
8363 ; AVX2-SLOW-NEXT: vmovdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8364 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
8365 ; AVX2-SLOW-NEXT: vmovdqa 272(%rdi), %xmm10
8366 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm1 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
8367 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm10, %xmm6
8368 ; AVX2-SLOW-NEXT: vmovdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8369 ; AVX2-SLOW-NEXT: vmovdqa 256(%rdi), %xmm8
8370 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm8, %xmm7
8371 ; AVX2-SLOW-NEXT: vmovdqa %xmm8, %xmm12
8372 ; AVX2-SLOW-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8373 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
8374 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
8375 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
8376 ; AVX2-SLOW-NEXT: vmovdqa 496(%rdi), %xmm0
8377 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8378 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm0, %xmm5
8379 ; AVX2-SLOW-NEXT: vmovdqa 480(%rdi), %xmm0
8380 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8381 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm0, %xmm6
8382 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8383 ; AVX2-SLOW-NEXT: vmovdqa 464(%rdi), %xmm0
8384 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8385 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm0, %xmm6
8386 ; AVX2-SLOW-NEXT: vmovdqa 448(%rdi), %xmm0
8387 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8388 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm0, %xmm7
8389 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
8390 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8391 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
8392 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5,6],ymm5[7]
8393 ; AVX2-SLOW-NEXT: vmovdqa 432(%rdi), %xmm0
8394 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8395 ; AVX2-SLOW-NEXT: vpshufb %xmm9, %xmm0, %xmm6
8396 ; AVX2-SLOW-NEXT: vmovdqa 416(%rdi), %xmm0
8397 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8398 ; AVX2-SLOW-NEXT: vpshufb %xmm9, %xmm0, %xmm7
8399 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
8400 ; AVX2-SLOW-NEXT: vmovdqa 400(%rdi), %xmm14
8401 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm14, %xmm7
8402 ; AVX2-SLOW-NEXT: vmovdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8403 ; AVX2-SLOW-NEXT: vmovdqa 384(%rdi), %xmm0
8404 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8405 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm0, %xmm8
8406 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
8407 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
8408 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
8409 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0,1,2,3,4],ymm6[5],ymm7[6,7]
8410 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
8411 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
8412 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8413 ; AVX2-SLOW-NEXT: vmovdqa 112(%rdi), %xmm8
8414 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm8, %xmm4
8415 ; AVX2-SLOW-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8416 ; AVX2-SLOW-NEXT: vmovdqa 96(%rdi), %xmm0
8417 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8418 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm0, %xmm5
8419 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
8420 ; AVX2-SLOW-NEXT: vmovdqa 80(%rdi), %xmm0
8421 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8422 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm0, %xmm5
8423 ; AVX2-SLOW-NEXT: vmovdqa 64(%rdi), %xmm0
8424 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8425 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm0, %xmm6
8426 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8427 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3]
8428 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %xmm0
8429 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8430 ; AVX2-SLOW-NEXT: vmovdqa 48(%rdi), %xmm5
8431 ; AVX2-SLOW-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8432 ; AVX2-SLOW-NEXT: vpshufb %xmm9, %xmm5, %xmm5
8433 ; AVX2-SLOW-NEXT: vpshufb %xmm9, %xmm0, %xmm6
8434 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8435 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm0
8436 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8437 ; AVX2-SLOW-NEXT: vmovdqa 16(%rdi), %xmm6
8438 ; AVX2-SLOW-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8439 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm6, %xmm6
8440 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm0, %xmm7
8441 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
8442 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
8443 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
8444 ; AVX2-SLOW-NEXT: vmovdqa 240(%rdi), %xmm0
8445 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8446 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm0, %xmm5
8447 ; AVX2-SLOW-NEXT: vmovdqa 224(%rdi), %xmm0
8448 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8449 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm0, %xmm2
8450 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
8451 ; AVX2-SLOW-NEXT: vmovdqa 208(%rdi), %xmm0
8452 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8453 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm0, %xmm5
8454 ; AVX2-SLOW-NEXT: vmovdqa 192(%rdi), %xmm0
8455 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
8456 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm0, %xmm3
8457 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
8458 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
8459 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
8460 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
8461 ; AVX2-SLOW-NEXT: vmovdqa 176(%rdi), %xmm0
8462 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8463 ; AVX2-SLOW-NEXT: vpshufb %xmm9, %xmm0, %xmm3
8464 ; AVX2-SLOW-NEXT: vmovdqa 160(%rdi), %xmm0
8465 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8466 ; AVX2-SLOW-NEXT: vpshufb %xmm9, %xmm0, %xmm0
8467 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
8468 ; AVX2-SLOW-NEXT: vmovdqa 144(%rdi), %xmm3
8469 ; AVX2-SLOW-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8470 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm3, %xmm3
8471 ; AVX2-SLOW-NEXT: vmovdqa 128(%rdi), %xmm5
8472 ; AVX2-SLOW-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8473 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm1
8474 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
8475 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
8476 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
8477 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4],ymm0[5],ymm1[6,7]
8478 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
8479 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
8480 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8481 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
8482 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8483 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm0, %xmm0
8484 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm13, %xmm1
8485 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
8486 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm6 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
8487 ; AVX2-SLOW-NEXT: vpshufb %xmm6, %xmm15, %xmm3
8488 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
8489 ; AVX2-SLOW-NEXT: vpshufb %xmm6, %xmm13, %xmm4
8490 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
8491 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm3[0,1,2],xmm1[3]
8492 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
8493 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8494 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm0, %xmm1
8495 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm11, %xmm5
8496 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
8497 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm1 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
8498 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm10, %xmm0
8499 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm12, %xmm15
8500 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3]
8501 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm5[1],xmm0[2,3]
8502 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1],xmm4[2,3]
8503 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
8504 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm9, %xmm4
8505 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
8506 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm10, %xmm5
8507 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
8508 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8509 ; AVX2-SLOW-NEXT: vpshufb %xmm6, %xmm11, %xmm5
8510 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
8511 ; AVX2-SLOW-NEXT: vpshufb %xmm6, %xmm12, %xmm15
8512 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm15[0],xmm5[0],xmm15[1],xmm5[1],xmm15[2],xmm5[2],xmm15[3],xmm5[3]
8513 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
8514 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8515 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2,3,4,5,6],ymm4[7]
8516 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8517 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm5, %xmm5
8518 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
8519 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm7, %xmm15
8520 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm15[0],xmm5[0],xmm15[1],xmm5[1],xmm15[2],xmm5[2],xmm15[3],xmm5[3]
8521 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm14, %xmm15
8522 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
8523 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm7, %xmm14
8524 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
8525 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8526 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
8527 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm14[0,1,2,3,4],ymm5[5],ymm14[6,7]
8528 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2,3,4,5],ymm4[6,7]
8529 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm4[4,5,6,7]
8530 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8531 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm8, %xmm0
8532 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
8533 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm8, %xmm4
8534 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
8535 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
8536 ; AVX2-SLOW-NEXT: vpshufb %xmm6, %xmm7, %xmm4
8537 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8538 ; AVX2-SLOW-NEXT: vpshufb %xmm6, %xmm5, %xmm5
8539 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
8540 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm4[0,1,2],xmm0[3]
8541 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8542 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm4, %xmm4
8543 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8544 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm5, %xmm5
8545 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
8546 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8547 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm5
8548 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8549 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm14, %xmm14
8550 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm14[0],xmm5[0],xmm14[1],xmm5[1],xmm14[2],xmm5[2],xmm14[3],xmm5[3]
8551 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0],xmm4[1],xmm5[2,3]
8552 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm4[0,1],xmm0[2,3]
8553 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8554 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm4, %xmm4
8555 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8556 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm2
8557 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
8558 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8559 ; AVX2-SLOW-NEXT: vpshufb %xmm6, %xmm4, %xmm4
8560 ; AVX2-SLOW-NEXT: vmovdqa (%rsp), %xmm5 # 16-byte Reload
8561 ; AVX2-SLOW-NEXT: vpshufb %xmm6, %xmm5, %xmm5
8562 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
8563 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
8564 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
8565 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2,3,4,5,6],ymm2[7]
8566 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8567 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm4, %xmm4
8568 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8569 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm5, %xmm3
8570 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
8571 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8572 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm4, %xmm4
8573 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8574 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm1
8575 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
8576 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
8577 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
8578 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4],ymm3[5],ymm1[6,7]
8579 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm2[6,7]
8580 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
8581 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8582 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
8583 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8584 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm0, %xmm0
8585 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
8586 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm1, %xmm1
8587 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
8588 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
8589 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
8590 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm1, %xmm1
8591 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm13, %xmm4
8592 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
8593 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm1[0,1,2],xmm0[3]
8594 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
8595 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
8596 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm13, %xmm1
8597 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8598 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm5
8599 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
8600 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm1 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
8601 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8602 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm6, %xmm6
8603 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8604 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm14, %xmm14
8605 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8606 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
8607 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
8608 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm9, %xmm5
8609 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm10, %xmm6
8610 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8611 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm11, %xmm6
8612 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm12, %xmm14
8613 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8614 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8615 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
8616 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5,6],ymm5[7]
8617 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
8618 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm9, %xmm6
8619 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
8620 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm10, %xmm14
8621 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8622 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8623 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm11, %xmm14
8624 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8625 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm11, %xmm15
8626 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
8627 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
8628 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
8629 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm14[0,1,2,3,4],ymm6[5],ymm14[6,7]
8630 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
8631 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
8632 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8633 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8634 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm4, %xmm4
8635 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm8, %xmm5
8636 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
8637 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm7, %xmm5
8638 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
8639 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm8, %xmm6
8640 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8641 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3]
8642 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8643 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm5
8644 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
8645 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm7, %xmm6
8646 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8647 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8648 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm6, %xmm6
8649 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8650 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm11, %xmm14
8651 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8652 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
8653 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
8654 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8655 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm5
8656 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8657 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm6, %xmm2
8658 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
8659 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8660 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm5, %xmm5
8661 ; AVX2-SLOW-NEXT: vmovdqa (%rsp), %xmm6 # 16-byte Reload
8662 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm6, %xmm3
8663 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
8664 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
8665 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
8666 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
8667 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
8668 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm3, %xmm3
8669 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8670 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm0
8671 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
8672 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
8673 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm3, %xmm3
8674 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8675 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm1
8676 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
8677 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
8678 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
8679 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4],ymm0[5],ymm1[6,7]
8680 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
8681 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
8682 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8683 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
8684 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8685 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm11, %xmm0
8686 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
8687 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm1, %xmm1
8688 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
8689 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
8690 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
8691 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm1, %xmm1
8692 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8693 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm4, %xmm4
8694 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
8695 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm1[0,1,2],xmm0[3]
8696 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
8697 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm13, %xmm1
8698 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
8699 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm12, %xmm5
8700 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
8701 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm1 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
8702 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
8703 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm13, %xmm6
8704 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8705 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm14, %xmm14
8706 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8707 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
8708 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
8709 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8710 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm5
8711 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8712 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm6, %xmm6
8713 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8714 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8715 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm6, %xmm6
8716 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8717 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm14, %xmm14
8718 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8719 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8720 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
8721 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5,6],ymm5[7]
8722 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm9, %xmm6
8723 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm10, %xmm14
8724 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8725 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
8726 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm9, %xmm14
8727 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
8728 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm10, %xmm15
8729 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
8730 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
8731 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
8732 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm14[0,1,2,3,4],ymm6[5],ymm14[6,7]
8733 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
8734 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
8735 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8736 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8737 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm4, %xmm4
8738 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8739 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm5
8740 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
8741 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8742 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm5, %xmm5
8743 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm8, %xmm6
8744 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8745 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3]
8746 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
8747 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm8, %xmm5
8748 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm7, %xmm6
8749 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8750 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8751 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm6, %xmm6
8752 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
8753 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm7, %xmm14
8754 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8755 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
8756 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
8757 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8758 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm5
8759 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8760 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm6, %xmm2
8761 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
8762 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8763 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm5, %xmm5
8764 ; AVX2-SLOW-NEXT: vmovdqa (%rsp), %xmm6 # 16-byte Reload
8765 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm6, %xmm3
8766 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
8767 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
8768 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
8769 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
8770 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
8771 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm3, %xmm3
8772 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8773 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm0
8774 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
8775 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
8776 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm3, %xmm3
8777 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8778 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm1
8779 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
8780 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
8781 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
8782 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4],ymm0[5],ymm1[6,7]
8783 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
8784 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
8785 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8786 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
8787 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm11, %xmm0
8788 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
8789 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm1, %xmm1
8790 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
8791 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
8792 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
8793 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm1, %xmm1
8794 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8795 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm4, %xmm4
8796 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
8797 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm1[0,1,2],xmm0[3]
8798 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
8799 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
8800 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm1
8801 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm12, %xmm5
8802 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
8803 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm1 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
8804 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm13, %xmm6
8805 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
8806 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm7, %xmm14
8807 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8808 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
8809 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
8810 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8811 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm5
8812 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8813 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm6, %xmm6
8814 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8815 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8816 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm6, %xmm6
8817 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
8818 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm7, %xmm14
8819 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8820 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8821 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
8822 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5,6],ymm5[7]
8823 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8824 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm6
8825 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
8826 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm7, %xmm14
8827 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8828 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm9, %xmm14
8829 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm10, %xmm15
8830 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
8831 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
8832 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
8833 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm14[0,1,2,3,4],ymm6[5],ymm14[6,7]
8834 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
8835 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
8836 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8837 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
8838 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm7, %xmm4
8839 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8840 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm5
8841 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
8842 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8843 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm5, %xmm5
8844 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8845 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm6, %xmm6
8846 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8847 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3]
8848 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm8, %xmm5
8849 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8850 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm6
8851 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8852 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
8853 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm9, %xmm6
8854 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
8855 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm8, %xmm14
8856 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8857 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
8858 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
8859 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
8860 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm10, %xmm5
8861 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8862 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm11, %xmm2
8863 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
8864 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
8865 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm12, %xmm5
8866 ; AVX2-SLOW-NEXT: vmovdqa (%rsp), %xmm13 # 16-byte Reload
8867 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm13, %xmm3
8868 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
8869 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
8870 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
8871 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
8872 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
8873 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm3, %xmm3
8874 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8875 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm0
8876 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
8877 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
8878 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm3, %xmm3
8879 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8880 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm1
8881 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
8882 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
8883 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
8884 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4],ymm0[5],ymm1[6,7]
8885 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
8886 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
8887 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8888 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
8889 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8890 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm0, %xmm0
8891 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
8892 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm1, %xmm1
8893 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
8894 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
8895 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
8896 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm1, %xmm1
8897 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8898 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm4, %xmm4
8899 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
8900 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm1[0,1,2],xmm0[3]
8901 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
8902 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
8903 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm1
8904 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8905 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm5
8906 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
8907 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm1 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
8908 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8909 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm6, %xmm6
8910 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8911 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm14, %xmm14
8912 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8913 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
8914 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
8915 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8916 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm5
8917 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8918 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm6, %xmm6
8919 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8920 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8921 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm6, %xmm6
8922 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8923 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm14, %xmm14
8924 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8925 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8926 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
8927 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5,6],ymm5[7]
8928 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8929 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm6
8930 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8931 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm14, %xmm14
8932 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8933 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
8934 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm14, %xmm14
8935 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
8936 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm15, %xmm15
8937 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
8938 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
8939 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
8940 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm14[0,1,2,3,4],ymm6[5],ymm14[6,7]
8941 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
8942 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
8943 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8944 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm7, %xmm4
8945 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8946 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm5
8947 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
8948 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8949 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm5, %xmm5
8950 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8951 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm6, %xmm6
8952 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8953 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3]
8954 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
8955 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm5
8956 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8957 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm6
8958 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
8959 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm9, %xmm6
8960 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm8, %xmm14
8961 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
8962 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
8963 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
8964 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm10, %xmm5
8965 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm11, %xmm2
8966 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
8967 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm12, %xmm5
8968 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm13, %xmm3
8969 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
8970 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
8971 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
8972 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
8973 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
8974 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm13, %xmm3
8975 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
8976 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm7, %xmm0
8977 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
8978 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
8979 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm8, %xmm3
8980 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
8981 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm9, %xmm1
8982 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
8983 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
8984 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
8985 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4],ymm0[5],ymm1[6,7]
8986 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
8987 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
8988 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8989 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
8990 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8991 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm0, %xmm0
8992 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
8993 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm10, %xmm1
8994 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
8995 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
8996 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
8997 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm11, %xmm1
8998 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8999 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm4, %xmm4
9000 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
9001 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm1[0,1,2],xmm0[3]
9002 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
9003 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
9004 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm1
9005 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9006 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm5
9007 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
9008 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm1 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
9009 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9010 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm6, %xmm6
9011 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
9012 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm12, %xmm14
9013 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
9014 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
9015 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
9016 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9017 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm5
9018 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9019 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm6, %xmm6
9020 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
9021 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9022 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm6, %xmm6
9023 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
9024 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm14, %xmm14
9025 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
9026 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
9027 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
9028 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5,6],ymm5[7]
9029 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9030 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm6
9031 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
9032 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm14, %xmm14
9033 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
9034 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
9035 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm14, %xmm14
9036 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
9037 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm15, %xmm15
9038 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
9039 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
9040 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
9041 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm14[0,1,2,3,4],ymm6[5],ymm14[6,7]
9042 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
9043 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
9044 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9045 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
9046 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm4, %xmm4
9047 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9048 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm5
9049 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
9050 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9051 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm5, %xmm5
9052 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9053 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm6, %xmm6
9054 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
9055 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3]
9056 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9057 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm5
9058 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9059 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm6
9060 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
9061 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9062 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm6, %xmm6
9063 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
9064 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm14, %xmm14
9065 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
9066 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
9067 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
9068 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9069 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm5
9070 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9071 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm6, %xmm2
9072 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
9073 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9074 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm5, %xmm5
9075 ; AVX2-SLOW-NEXT: vmovdqa (%rsp), %xmm6 # 16-byte Reload
9076 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm6, %xmm3
9077 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
9078 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
9079 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
9080 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
9081 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm13, %xmm3
9082 ; AVX2-SLOW-NEXT: vmovdqa %xmm13, %xmm5
9083 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm7, %xmm0
9084 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
9085 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm8, %xmm3
9086 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm9, %xmm1
9087 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
9088 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
9089 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
9090 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4],ymm0[5],ymm1[6,7]
9091 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
9092 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
9093 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9094 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
9095 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9096 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm0, %xmm0
9097 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm10, %xmm1
9098 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
9099 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
9100 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm11, %xmm2
9101 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
9102 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm4
9103 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
9104 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm2[0,1,2],xmm1[3]
9105 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm1 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
9106 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
9107 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm2, %xmm2
9108 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9109 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm6, %xmm6
9110 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
9111 ; AVX2-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
9112 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9113 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm10, %xmm14
9114 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm12, %xmm15
9115 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
9116 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm6 = xmm14[0],xmm6[1],xmm14[2,3]
9117 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm6[0,1],xmm4[2,3]
9118 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9119 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm6, %xmm6
9120 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9121 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm10, %xmm14
9122 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
9123 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9124 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm10, %xmm14
9125 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9126 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm10, %xmm15
9127 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
9128 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
9129 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
9130 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm14[0,1,2,3,4,5,6],ymm6[7]
9131 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9132 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm10, %xmm14
9133 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9134 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm10, %xmm15
9135 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
9136 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9137 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm10, %xmm15
9138 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9139 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm10, %xmm13
9140 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm15[0],xmm13[1],xmm15[1],xmm13[2],xmm15[2],xmm13[3],xmm15[3]
9141 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
9142 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
9143 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm13 = ymm13[0,1,2,3,4],ymm14[5],ymm13[6,7]
9144 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm6 = ymm13[0,1,2,3,4,5],ymm6[6,7]
9145 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
9146 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9147 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm6, %xmm6
9148 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9149 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm10, %xmm13
9150 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm13[0],xmm6[0],xmm13[1],xmm6[1],xmm13[2],xmm6[2],xmm13[3],xmm6[3]
9151 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9152 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm10, %xmm13
9153 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9154 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm10, %xmm14
9155 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
9156 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm6 = xmm13[0,1,2],xmm6[3]
9157 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9158 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm10, %xmm13
9159 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9160 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm10, %xmm14
9161 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
9162 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9163 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm10, %xmm14
9164 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9165 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm10, %xmm15
9166 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
9167 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm13 = xmm14[0],xmm13[1],xmm14[2,3]
9168 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} xmm6 = xmm13[0,1],xmm6[2,3]
9169 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9170 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm10, %xmm13
9171 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9172 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm10, %xmm3
9173 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm13[0],xmm3[1],xmm13[1],xmm3[2],xmm13[2],xmm3[3],xmm13[3]
9174 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9175 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm10, %xmm12
9176 ; AVX2-SLOW-NEXT: vmovdqa (%rsp), %xmm10 # 16-byte Reload
9177 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm10, %xmm0
9178 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm12[0],xmm0[1],xmm12[1],xmm0[2],xmm12[2],xmm0[3],xmm12[3]
9179 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
9180 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
9181 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5,6],ymm3[7]
9182 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm3
9183 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm7, %xmm1
9184 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
9185 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm8, %xmm3
9186 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm9, %xmm2
9187 ; AVX2-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
9188 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
9189 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
9190 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4],ymm1[5],ymm2[6,7]
9191 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
9192 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
9193 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9194 ; AVX2-SLOW-NEXT: vmovaps %ymm1, (%rsi)
9195 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9196 ; AVX2-SLOW-NEXT: vmovaps %ymm1, 32(%rsi)
9197 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9198 ; AVX2-SLOW-NEXT: vmovaps %ymm1, (%rdx)
9199 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9200 ; AVX2-SLOW-NEXT: vmovaps %ymm1, 32(%rdx)
9201 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9202 ; AVX2-SLOW-NEXT: vmovaps %ymm1, (%rcx)
9203 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9204 ; AVX2-SLOW-NEXT: vmovaps %ymm1, 32(%rcx)
9205 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9206 ; AVX2-SLOW-NEXT: vmovaps %ymm1, (%r8)
9207 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9208 ; AVX2-SLOW-NEXT: vmovaps %ymm1, 32(%r8)
9209 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9210 ; AVX2-SLOW-NEXT: vmovaps %ymm1, (%r9)
9211 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9212 ; AVX2-SLOW-NEXT: vmovaps %ymm1, 32(%r9)
9213 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
9214 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9215 ; AVX2-SLOW-NEXT: vmovaps %ymm1, (%rax)
9216 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9217 ; AVX2-SLOW-NEXT: vmovaps %ymm1, 32(%rax)
9218 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
9219 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9220 ; AVX2-SLOW-NEXT: vmovaps %ymm1, (%rax)
9221 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9222 ; AVX2-SLOW-NEXT: vmovaps %ymm1, 32(%rax)
9223 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
9224 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, (%rax)
9225 ; AVX2-SLOW-NEXT: vmovdqa %ymm4, 32(%rax)
9226 ; AVX2-SLOW-NEXT: addq $840, %rsp # imm = 0x348
9227 ; AVX2-SLOW-NEXT: vzeroupper
9228 ; AVX2-SLOW-NEXT: retq
9230 ; AVX2-FAST-LABEL: load_i8_stride8_vf64:
9231 ; AVX2-FAST: # %bb.0:
9232 ; AVX2-FAST-NEXT: subq $904, %rsp # imm = 0x388
9233 ; AVX2-FAST-NEXT: vmovdqa 368(%rdi), %xmm0
9234 ; AVX2-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9235 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm1 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
9236 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm0
9237 ; AVX2-FAST-NEXT: vmovdqa 352(%rdi), %xmm2
9238 ; AVX2-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9239 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm2, %xmm2
9240 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
9241 ; AVX2-FAST-NEXT: vmovdqa 336(%rdi), %xmm15
9242 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm3 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
9243 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm15, %xmm2
9244 ; AVX2-FAST-NEXT: vmovdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9245 ; AVX2-FAST-NEXT: vmovdqa 320(%rdi), %xmm4
9246 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9247 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm4, %xmm4
9248 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
9249 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1,2],xmm0[3]
9250 ; AVX2-FAST-NEXT: vmovdqa 304(%rdi), %xmm14
9251 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm2 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
9252 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm14, %xmm4
9253 ; AVX2-FAST-NEXT: vmovdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9254 ; AVX2-FAST-NEXT: vmovdqa 288(%rdi), %xmm5
9255 ; AVX2-FAST-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9256 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm5, %xmm5
9257 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
9258 ; AVX2-FAST-NEXT: vmovdqa 272(%rdi), %xmm5
9259 ; AVX2-FAST-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9260 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm8 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
9261 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm5, %xmm5
9262 ; AVX2-FAST-NEXT: vmovdqa 256(%rdi), %xmm6
9263 ; AVX2-FAST-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9264 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm6, %xmm6
9265 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
9266 ; AVX2-FAST-NEXT: vmovdqa 448(%rdi), %ymm6
9267 ; AVX2-FAST-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9268 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0],xmm4[1],xmm5[2,3]
9269 ; AVX2-FAST-NEXT: vmovdqa 480(%rdi), %ymm5
9270 ; AVX2-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9271 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm9 = xmm4[0,1],xmm0[2,3]
9272 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [0,2,2,3,0,2,4,6]
9273 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm0, %ymm4
9274 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9275 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28]
9276 ; AVX2-FAST-NEXT: vpermd %ymm6, %ymm0, %ymm5
9277 ; AVX2-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9278 ; AVX2-FAST-NEXT: vpbroadcastd {{.*#+}} ymm7 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
9279 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm5, %ymm5
9280 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm10 = ymm5[0,1,2,3,4,5,6],ymm4[7]
9281 ; AVX2-FAST-NEXT: vmovdqa 384(%rdi), %ymm5
9282 ; AVX2-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9283 ; AVX2-FAST-NEXT: vmovdqa 416(%rdi), %ymm4
9284 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9285 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm0, %ymm13
9286 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u,u,u,u,u]
9287 ; AVX2-FAST-NEXT: vmovdqu %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9288 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm0, %ymm5
9289 ; AVX2-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9290 ; AVX2-FAST-NEXT: vpbroadcastd {{.*#+}} ymm6 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
9291 ; AVX2-FAST-NEXT: vpshufb %ymm6, %ymm5, %ymm12
9292 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm11 = ymm12[0,1,2,3,4],ymm11[5],ymm12[6,7]
9293 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm10 = ymm11[0,1,2,3,4,5],ymm10[6,7]
9294 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm9[0,1,2,3],ymm10[4,5,6,7]
9295 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9296 ; AVX2-FAST-NEXT: vmovdqa 112(%rdi), %xmm4
9297 ; AVX2-FAST-NEXT: vmovdqa %xmm4, (%rsp) # 16-byte Spill
9298 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm4, %xmm9
9299 ; AVX2-FAST-NEXT: vmovdqa 96(%rdi), %xmm4
9300 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9301 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm4, %xmm1
9302 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm9[0],xmm1[1],xmm9[1],xmm1[2],xmm9[2],xmm1[3],xmm9[3]
9303 ; AVX2-FAST-NEXT: vmovdqa 80(%rdi), %xmm4
9304 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9305 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm4, %xmm9
9306 ; AVX2-FAST-NEXT: vmovdqa 64(%rdi), %xmm4
9307 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9308 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm4, %xmm3
9309 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm9[0],xmm3[1],xmm9[1],xmm3[2],xmm9[2],xmm3[3],xmm9[3]
9310 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
9311 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %xmm4
9312 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9313 ; AVX2-FAST-NEXT: vmovdqa 48(%rdi), %xmm3
9314 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9315 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm3, %xmm3
9316 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm4, %xmm2
9317 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
9318 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm4
9319 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9320 ; AVX2-FAST-NEXT: vmovdqa 16(%rdi), %xmm3
9321 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9322 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm3, %xmm3
9323 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm4, %xmm8
9324 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm8[0],xmm3[0],xmm8[1],xmm3[1],xmm8[2],xmm3[2],xmm8[3],xmm3[3]
9325 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm3[0],xmm2[1],xmm3[2,3]
9326 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm8 = xmm2[0,1],xmm1[2,3]
9327 ; AVX2-FAST-NEXT: vmovdqa 192(%rdi), %ymm2
9328 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9329 ; AVX2-FAST-NEXT: vmovdqa 224(%rdi), %ymm1
9330 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9331 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm0, %ymm1
9332 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9333 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28]
9334 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm0, %ymm2
9335 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9336 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm2, %ymm10
9337 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5,6],ymm1[7]
9338 ; AVX2-FAST-NEXT: vmovdqa 128(%rdi), %ymm1
9339 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9340 ; AVX2-FAST-NEXT: vmovdqa 160(%rdi), %ymm2
9341 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9342 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm0, %ymm2
9343 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9344 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm0, %ymm1
9345 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9346 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u,u,u,u,u]
9347 ; AVX2-FAST-NEXT: vpshufb %ymm6, %ymm1, %ymm12
9348 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm12[0,1,2,3,4],ymm0[5],ymm12[6,7]
9349 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm10[6,7]
9350 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm8[0,1,2,3],ymm0[4,5,6,7]
9351 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9352 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm8 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
9353 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9354 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm6, %xmm0
9355 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
9356 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm7, %xmm10
9357 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm10[0],xmm0[0],xmm10[1],xmm0[1],xmm10[2],xmm0[2],xmm10[3],xmm0[3]
9358 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm10 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
9359 ; AVX2-FAST-NEXT: vpshufb %xmm10, %xmm15, %xmm12
9360 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
9361 ; AVX2-FAST-NEXT: vpshufb %xmm10, %xmm2, %xmm11
9362 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
9363 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm11[0,1,2],xmm0[3]
9364 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm11 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
9365 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm14, %xmm12
9366 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
9367 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm15, %xmm14
9368 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm14[0],xmm12[0],xmm14[1],xmm12[1],xmm14[2],xmm12[2],xmm14[3],xmm12[3]
9369 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm14 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
9370 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9371 ; AVX2-FAST-NEXT: vpshufb %xmm14, %xmm0, %xmm0
9372 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
9373 ; AVX2-FAST-NEXT: vpshufb %xmm14, %xmm4, %xmm9
9374 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm9[0],xmm0[0],xmm9[1],xmm0[1],xmm9[2],xmm0[2],xmm9[3],xmm0[3]
9375 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm12[1],xmm0[2,3]
9376 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3]
9377 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
9378 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29]
9379 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
9380 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm9 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u]
9381 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm9[0,1,2,3,4,5,6],ymm1[7]
9382 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm9 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u,u,u,u,u]
9383 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
9384 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u,u,u,u,u,u,u,u,u]
9385 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm12[0,1,2,3,4],ymm9[5],ymm12[6,7]
9386 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm9[0,1,2,3,4,5],ymm1[6,7]
9387 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
9388 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9389 ; AVX2-FAST-NEXT: vmovdqa (%rsp), %xmm0 # 16-byte Reload
9390 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm0, %xmm0
9391 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
9392 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm13, %xmm1
9393 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
9394 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
9395 ; AVX2-FAST-NEXT: vpshufb %xmm10, %xmm1, %xmm1
9396 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9397 ; AVX2-FAST-NEXT: vpshufb %xmm10, %xmm5, %xmm8
9398 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm8[0],xmm1[0],xmm8[1],xmm1[1],xmm8[2],xmm1[2],xmm8[3],xmm1[3]
9399 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3]
9400 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
9401 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm1, %xmm1
9402 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
9403 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm8, %xmm8
9404 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm8[0],xmm1[0],xmm8[1],xmm1[1],xmm8[2],xmm1[2],xmm8[3],xmm1[3]
9405 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
9406 ; AVX2-FAST-NEXT: vpshufb %xmm14, %xmm8, %xmm8
9407 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
9408 ; AVX2-FAST-NEXT: vpshufb %xmm14, %xmm9, %xmm9
9409 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
9410 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm8[0],xmm1[1],xmm8[2,3]
9411 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3]
9412 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9413 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29]
9414 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
9415 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm8 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u]
9416 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm8[0,1,2,3,4,5,6],ymm1[7]
9417 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
9418 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm8 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u,u,u,u,u]
9419 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
9420 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm9 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u,u,u,u,u,u,u,u,u]
9421 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2,3,4],ymm8[5],ymm9[6,7]
9422 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm8[0,1,2,3,4,5],ymm1[6,7]
9423 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
9424 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9425 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm8 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
9426 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm6, %xmm0
9427 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm7, %xmm1
9428 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
9429 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm1 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
9430 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9431 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm6, %xmm9
9432 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm2, %xmm10
9433 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
9434 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm9[0,1,2],xmm0[3]
9435 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm9 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
9436 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
9437 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm2, %xmm10
9438 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm15, %xmm11
9439 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
9440 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm11 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
9441 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
9442 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm7, %xmm12
9443 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm4, %xmm14
9444 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm14[0],xmm12[0],xmm14[1],xmm12[1],xmm14[2],xmm12[2],xmm14[3],xmm12[3]
9445 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm10 = xmm12[0],xmm10[1],xmm12[2,3]
9446 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm10[0,1],xmm0[2,3]
9447 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm10 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30]
9448 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
9449 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30,u,u,u,u]
9450 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm10 = ymm12[0,1,2,3,4,5,6],ymm10[7]
9451 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
9452 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30,u,u,u,u,u,u,u,u]
9453 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
9454 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30,u,u,u,u,u,u,u,u,u,u,u,u]
9455 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm12 = ymm14[0,1,2,3,4],ymm12[5],ymm14[6,7]
9456 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm10 = ymm12[0,1,2,3,4,5],ymm10[6,7]
9457 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm10[4,5,6,7]
9458 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9459 ; AVX2-FAST-NEXT: vmovdqa (%rsp), %xmm4 # 16-byte Reload
9460 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm4, %xmm0
9461 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm13, %xmm8
9462 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm8[0],xmm0[0],xmm8[1],xmm0[1],xmm8[2],xmm0[2],xmm8[3],xmm0[3]
9463 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
9464 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm13, %xmm8
9465 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm5, %xmm1
9466 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm8[0],xmm1[1],xmm8[1],xmm1[2],xmm8[2],xmm1[3],xmm8[3]
9467 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3]
9468 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
9469 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm15, %xmm1
9470 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
9471 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm3, %xmm8
9472 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm8[0],xmm1[0],xmm8[1],xmm1[1],xmm8[2],xmm1[2],xmm8[3],xmm1[3]
9473 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9474 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm5, %xmm8
9475 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9476 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm5, %xmm9
9477 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
9478 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm8[0],xmm1[1],xmm8[2,3]
9479 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3]
9480 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9481 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30]
9482 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
9483 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm8 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30,u,u,u,u]
9484 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm8[0,1,2,3,4,5,6],ymm1[7]
9485 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
9486 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm8 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30,u,u,u,u,u,u,u,u]
9487 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
9488 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm9 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30,u,u,u,u,u,u,u,u,u,u,u,u]
9489 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2,3,4],ymm8[5],ymm9[6,7]
9490 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm8[0,1,2,3,4,5],ymm1[6,7]
9491 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
9492 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9493 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm5 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
9494 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9495 ; AVX2-FAST-NEXT: vpshufb %xmm5, %xmm0, %xmm0
9496 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
9497 ; AVX2-FAST-NEXT: vpshufb %xmm5, %xmm1, %xmm1
9498 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
9499 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm1 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
9500 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm6, %xmm9
9501 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9502 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm5, %xmm10
9503 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
9504 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm9[0,1,2],xmm0[3]
9505 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm9 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
9506 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm2, %xmm10
9507 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
9508 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm8, %xmm11
9509 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
9510 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm11 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
9511 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm7, %xmm12
9512 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
9513 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm2, %xmm14
9514 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm14[0],xmm12[0],xmm14[1],xmm12[1],xmm14[2],xmm12[2],xmm14[3],xmm12[3]
9515 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm10 = xmm12[0],xmm10[1],xmm12[2,3]
9516 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm10[0,1],xmm0[2,3]
9517 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9518 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm10 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,19,23,27,31]
9519 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9520 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,19,23,27,31,u,u,u,u]
9521 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm10 = ymm12[0,1,2,3,4,5,6],ymm10[7]
9522 ; AVX2-FAST-NEXT: vpbroadcastd {{.*#+}} ymm14 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
9523 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
9524 ; AVX2-FAST-NEXT: vpshufb %ymm14, %ymm6, %ymm7
9525 ; AVX2-FAST-NEXT: vpbroadcastd {{.*#+}} ymm2 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
9526 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
9527 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm6, %ymm6
9528 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3,4],ymm7[5],ymm6[6,7]
9529 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5],ymm10[6,7]
9530 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm6[4,5,6,7]
9531 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9532 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm6 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
9533 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm4, %xmm0
9534 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
9535 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm4, %xmm6
9536 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3]
9537 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm13, %xmm6
9538 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
9539 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm13, %xmm1
9540 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm6[0],xmm1[1],xmm6[1],xmm1[2],xmm6[2],xmm1[3],xmm6[3]
9541 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3]
9542 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm15, %xmm1
9543 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm3, %xmm6
9544 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm6[0],xmm1[0],xmm6[1],xmm1[1],xmm6[2],xmm1[2],xmm6[3],xmm1[3]
9545 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
9546 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm3, %xmm6
9547 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
9548 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm3, %xmm7
9549 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
9550 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm6[0],xmm1[1],xmm6[2,3]
9551 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3]
9552 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9553 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,19,23,27,31]
9554 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
9555 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,19,23,27,31,u,u,u,u]
9556 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm4[0,1,2,3,4,5,6],ymm1[7]
9557 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
9558 ; AVX2-FAST-NEXT: vpshufb %ymm14, %ymm3, %ymm3
9559 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
9560 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm4, %ymm2
9561 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm3[5],ymm2[6,7]
9562 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
9563 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
9564 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9565 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm1 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
9566 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9567 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm0
9568 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
9569 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm14, %xmm1
9570 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
9571 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm6 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
9572 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
9573 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm1, %xmm1
9574 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm5, %xmm2
9575 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
9576 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3]
9577 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm7 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
9578 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
9579 ; AVX2-FAST-NEXT: vpshufb %xmm7, %xmm1, %xmm1
9580 ; AVX2-FAST-NEXT: vpshufb %xmm7, %xmm8, %xmm2
9581 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
9582 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm8 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
9583 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
9584 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm2, %xmm2
9585 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
9586 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm3, %xmm3
9587 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
9588 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0],xmm1[1],xmm2[2,3]
9589 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm9 = xmm1[0,1],xmm0[2,3]
9590 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [1,3,2,3,1,3,5,7]
9591 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm1 # 32-byte Folded Reload
9592 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9593 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm2 # 32-byte Folded Reload
9594 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9595 ; AVX2-FAST-NEXT: vpbroadcastd {{.*#+}} ymm15 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
9596 ; AVX2-FAST-NEXT: vpshufb %ymm15, %ymm1, %ymm3
9597 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u]
9598 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm10 = ymm4[0,1,2,3,4,5,6],ymm3[7]
9599 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm1 # 32-byte Folded Reload
9600 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9601 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm2 # 32-byte Folded Reload
9602 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9603 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u,u,u,u,u]
9604 ; AVX2-FAST-NEXT: vpbroadcastd {{.*#+}} ymm1 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
9605 ; AVX2-FAST-NEXT: vpshufb %ymm1, %ymm2, %ymm12
9606 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm11 = ymm12[0,1,2,3,4],ymm11[5],ymm12[6,7]
9607 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm10 = ymm11[0,1,2,3,4,5],ymm10[6,7]
9608 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm9[0,1,2,3],ymm10[4,5,6,7]
9609 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9610 ; AVX2-FAST-NEXT: vmovdqa (%rsp), %xmm1 # 16-byte Reload
9611 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm3 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
9612 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm1, %xmm9
9613 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
9614 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm2, %xmm5
9615 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm9[0],xmm5[1],xmm9[1],xmm5[2],xmm9[2],xmm5[3],xmm9[3]
9616 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
9617 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm2, %xmm9
9618 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm13, %xmm6
9619 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm9[0],xmm6[1],xmm9[1],xmm6[2],xmm9[2],xmm6[3],xmm9[3]
9620 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0,1,2],xmm5[3]
9621 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
9622 ; AVX2-FAST-NEXT: vpshufb %xmm7, %xmm3, %xmm6
9623 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
9624 ; AVX2-FAST-NEXT: vpshufb %xmm7, %xmm4, %xmm7
9625 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
9626 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
9627 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm4, %xmm7
9628 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
9629 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm9, %xmm8
9630 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
9631 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm6 = xmm7[0],xmm6[1],xmm7[2,3]
9632 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm8 = xmm6[0,1],xmm5[2,3]
9633 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm5 # 32-byte Folded Reload
9634 ; AVX2-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9635 ; AVX2-FAST-NEXT: vpshufb %ymm15, %ymm5, %ymm7
9636 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm5 # 32-byte Folded Reload
9637 ; AVX2-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9638 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm9 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u]
9639 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5,6],ymm7[7]
9640 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm5 # 32-byte Folded Reload
9641 ; AVX2-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9642 ; AVX2-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
9643 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9644 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm10 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u,u,u,u,u]
9645 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,20,24,28,u,u,u,u,u,u,u,u,u,u,u,u]
9646 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm10 = ymm11[0,1,2,3,4],ymm10[5],ymm11[6,7]
9647 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm10[0,1,2,3,4,5],ymm9[6,7]
9648 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm8[0,1,2,3],ymm9[4,5,6,7]
9649 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9650 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm8 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
9651 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9652 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm0, %xmm9
9653 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm14, %xmm10
9654 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
9655 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm10 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
9656 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9657 ; AVX2-FAST-NEXT: vpshufb %xmm10, %xmm5, %xmm11
9658 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9659 ; AVX2-FAST-NEXT: vpshufb %xmm10, %xmm6, %xmm12
9660 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
9661 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm9 = xmm11[0,1,2],xmm9[3]
9662 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm11 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
9663 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9664 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm0, %xmm12
9665 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
9666 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm7, %xmm13
9667 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
9668 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm13 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
9669 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9670 ; AVX2-FAST-NEXT: vpshufb %xmm13, %xmm0, %xmm14
9671 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9672 ; AVX2-FAST-NEXT: vpshufb %xmm13, %xmm0, %xmm15
9673 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
9674 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm12 = xmm14[0],xmm12[1],xmm14[2,3]
9675 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm9 = xmm12[0,1],xmm9[2,3]
9676 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9677 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29]
9678 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9679 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u]
9680 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm12 = ymm14[0,1,2,3,4,5,6],ymm12[7]
9681 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9682 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u,u,u,u,u]
9683 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9684 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm15 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u,u,u,u,u,u,u,u,u]
9685 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm14 = ymm15[0,1,2,3,4],ymm14[5],ymm15[6,7]
9686 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm12 = ymm14[0,1,2,3,4,5],ymm12[6,7]
9687 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm12[4,5,6,7]
9688 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9689 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm1, %xmm9
9690 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9691 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm0, %xmm8
9692 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
9693 ; AVX2-FAST-NEXT: vmovdqa %xmm2, %xmm1
9694 ; AVX2-FAST-NEXT: vpshufb %xmm10, %xmm2, %xmm9
9695 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
9696 ; AVX2-FAST-NEXT: vpshufb %xmm10, %xmm2, %xmm10
9697 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
9698 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1,2],xmm8[3]
9699 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm3, %xmm9
9700 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
9701 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm3, %xmm10
9702 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
9703 ; AVX2-FAST-NEXT: vpshufb %xmm13, %xmm4, %xmm10
9704 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
9705 ; AVX2-FAST-NEXT: vpshufb %xmm13, %xmm4, %xmm11
9706 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
9707 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm9 = xmm10[0],xmm9[1],xmm10[2,3]
9708 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1],xmm8[2,3]
9709 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
9710 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm9 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29]
9711 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
9712 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u]
9713 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm10[0,1,2,3,4,5,6],ymm9[7]
9714 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
9715 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u,u,u,u,u]
9716 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
9717 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u,u,u,u,u,u,u,u,u]
9718 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm10 = ymm11[0,1,2,3,4],ymm10[5],ymm11[6,7]
9719 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm10[0,1,2,3,4,5],ymm9[6,7]
9720 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm9[4,5,6,7]
9721 ; AVX2-FAST-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9722 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm8 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
9723 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
9724 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm9, %xmm9
9725 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
9726 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm10, %xmm10
9727 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
9728 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm10 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
9729 ; AVX2-FAST-NEXT: vpshufb %xmm10, %xmm5, %xmm11
9730 ; AVX2-FAST-NEXT: vpshufb %xmm10, %xmm6, %xmm12
9731 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
9732 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm9 = xmm11[0,1,2],xmm9[3]
9733 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm11 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
9734 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9735 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm6, %xmm12
9736 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm7, %xmm13
9737 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
9738 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm13 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
9739 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
9740 ; AVX2-FAST-NEXT: vpshufb %xmm13, %xmm7, %xmm14
9741 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
9742 ; AVX2-FAST-NEXT: vpshufb %xmm13, %xmm5, %xmm15
9743 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
9744 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm12 = xmm14[0],xmm12[1],xmm14[2,3]
9745 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm9 = xmm12[0,1],xmm9[2,3]
9746 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
9747 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30]
9748 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
9749 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30,u,u,u,u]
9750 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm12 = ymm14[0,1,2,3,4,5,6],ymm12[7]
9751 ; AVX2-FAST-NEXT: vpbroadcastd {{.*#+}} ymm5 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
9752 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
9753 ; AVX2-FAST-NEXT: vpshufb %ymm5, %ymm14, %ymm14
9754 ; AVX2-FAST-NEXT: vpbroadcastd {{.*#+}} ymm0 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
9755 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
9756 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm15, %ymm15
9757 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm14 = ymm15[0,1,2,3,4],ymm14[5],ymm15[6,7]
9758 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm12 = ymm14[0,1,2,3,4,5],ymm12[6,7]
9759 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm12[4,5,6,7]
9760 ; AVX2-FAST-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9761 ; AVX2-FAST-NEXT: vmovdqa (%rsp), %xmm9 # 16-byte Reload
9762 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm9, %xmm9
9763 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9764 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm0, %xmm8
9765 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
9766 ; AVX2-FAST-NEXT: vpshufb %xmm10, %xmm1, %xmm9
9767 ; AVX2-FAST-NEXT: vpshufb %xmm10, %xmm2, %xmm10
9768 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
9769 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1,2],xmm8[3]
9770 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9771 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm0, %xmm9
9772 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm3, %xmm10
9773 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
9774 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
9775 ; AVX2-FAST-NEXT: vpshufb %xmm13, %xmm0, %xmm10
9776 ; AVX2-FAST-NEXT: vpshufb %xmm13, %xmm4, %xmm11
9777 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
9778 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm9 = xmm10[0],xmm9[1],xmm10[2,3]
9779 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1],xmm8[2,3]
9780 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9781 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm9 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30]
9782 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9783 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm10 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30,u,u,u,u]
9784 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm10[0,1,2,3,4,5,6],ymm9[7]
9785 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9786 ; AVX2-FAST-NEXT: vpshufb %ymm5, %ymm1, %ymm10
9787 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9788 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,22,26,30,u,u,u,u,u,u,u,u,u,u,u,u]
9789 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm10 = ymm11[0,1,2,3,4],ymm10[5],ymm11[6,7]
9790 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm10[0,1,2,3,4,5],ymm9[6,7]
9791 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm8[0,1,2,3],ymm9[4,5,6,7]
9792 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9793 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm9 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
9794 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
9795 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm1, %xmm10
9796 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
9797 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm1, %xmm11
9798 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
9799 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm11 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
9800 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
9801 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm1, %xmm12
9802 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
9803 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm1, %xmm13
9804 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
9805 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm10 = xmm12[0,1,2],xmm10[3]
9806 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm12 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
9807 ; AVX2-FAST-NEXT: vpshufb %xmm12, %xmm6, %xmm13
9808 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
9809 ; AVX2-FAST-NEXT: vpshufb %xmm12, %xmm1, %xmm14
9810 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
9811 ; AVX2-FAST-NEXT: vpbroadcastw {{.*#+}} xmm14 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
9812 ; AVX2-FAST-NEXT: vpshufb %xmm14, %xmm7, %xmm15
9813 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
9814 ; AVX2-FAST-NEXT: vpshufb %xmm14, %xmm1, %xmm8
9815 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm15[0],xmm8[1],xmm15[1],xmm8[2],xmm15[2],xmm8[3],xmm15[3]
9816 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm8 = xmm8[0],xmm13[1],xmm8[2,3]
9817 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3]
9818 ; AVX2-FAST-NEXT: vpbroadcastd {{.*#+}} ymm5 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
9819 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9820 ; AVX2-FAST-NEXT: vpshufb %ymm5, %ymm1, %ymm2
9821 ; AVX2-FAST-NEXT: vpbroadcastd {{.*#+}} ymm6 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
9822 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9823 ; AVX2-FAST-NEXT: vpshufb %ymm6, %ymm1, %ymm1
9824 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5,6],ymm2[7]
9825 ; AVX2-FAST-NEXT: vpbroadcastd {{.*#+}} ymm7 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
9826 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9827 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm2, %ymm2
9828 ; AVX2-FAST-NEXT: vpbroadcastd {{.*#+}} ymm10 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
9829 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
9830 ; AVX2-FAST-NEXT: vpshufb %ymm10, %ymm3, %ymm3
9831 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
9832 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
9833 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm8[0,1,2,3],ymm1[4,5,6,7]
9834 ; AVX2-FAST-NEXT: vmovdqa (%rsp), %xmm2 # 16-byte Reload
9835 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm2, %xmm2
9836 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
9837 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm3, %xmm3
9838 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
9839 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
9840 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm3, %xmm3
9841 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
9842 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm4, %xmm4
9843 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
9844 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm3[0,1,2],xmm2[3]
9845 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
9846 ; AVX2-FAST-NEXT: vpshufb %xmm12, %xmm3, %xmm3
9847 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
9848 ; AVX2-FAST-NEXT: vpshufb %xmm12, %xmm4, %xmm4
9849 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
9850 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
9851 ; AVX2-FAST-NEXT: vpshufb %xmm14, %xmm4, %xmm4
9852 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
9853 ; AVX2-FAST-NEXT: vpshufb %xmm14, %xmm8, %xmm8
9854 ; AVX2-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm8[0],xmm4[0],xmm8[1],xmm4[1],xmm8[2],xmm4[2],xmm8[3],xmm4[3]
9855 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm3 = xmm4[0],xmm3[1],xmm4[2,3]
9856 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3]
9857 ; AVX2-FAST-NEXT: vpshufb %ymm5, %ymm0, %ymm3
9858 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9859 ; AVX2-FAST-NEXT: vpshufb %ymm6, %ymm0, %ymm4
9860 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3,4,5,6],ymm3[7]
9861 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9862 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm0, %ymm4
9863 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
9864 ; AVX2-FAST-NEXT: vpshufb %ymm10, %ymm0, %ymm0
9865 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm4[5],ymm0[6,7]
9866 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm3[6,7]
9867 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
9868 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9869 ; AVX2-FAST-NEXT: vmovaps %ymm2, (%rsi)
9870 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9871 ; AVX2-FAST-NEXT: vmovaps %ymm2, 32(%rsi)
9872 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9873 ; AVX2-FAST-NEXT: vmovaps %ymm2, (%rdx)
9874 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9875 ; AVX2-FAST-NEXT: vmovaps %ymm2, 32(%rdx)
9876 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9877 ; AVX2-FAST-NEXT: vmovaps %ymm2, (%rcx)
9878 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9879 ; AVX2-FAST-NEXT: vmovaps %ymm2, 32(%rcx)
9880 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9881 ; AVX2-FAST-NEXT: vmovaps %ymm2, (%r8)
9882 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9883 ; AVX2-FAST-NEXT: vmovaps %ymm2, 32(%r8)
9884 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9885 ; AVX2-FAST-NEXT: vmovaps %ymm2, (%r9)
9886 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9887 ; AVX2-FAST-NEXT: vmovaps %ymm2, 32(%r9)
9888 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
9889 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9890 ; AVX2-FAST-NEXT: vmovaps %ymm2, (%rax)
9891 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9892 ; AVX2-FAST-NEXT: vmovaps %ymm2, 32(%rax)
9893 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
9894 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9895 ; AVX2-FAST-NEXT: vmovaps %ymm2, (%rax)
9896 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9897 ; AVX2-FAST-NEXT: vmovaps %ymm2, 32(%rax)
9898 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
9899 ; AVX2-FAST-NEXT: vmovdqa %ymm0, (%rax)
9900 ; AVX2-FAST-NEXT: vmovdqa %ymm1, 32(%rax)
9901 ; AVX2-FAST-NEXT: addq $904, %rsp # imm = 0x388
9902 ; AVX2-FAST-NEXT: vzeroupper
9903 ; AVX2-FAST-NEXT: retq
9905 ; AVX2-FAST-PERLANE-LABEL: load_i8_stride8_vf64:
9906 ; AVX2-FAST-PERLANE: # %bb.0:
9907 ; AVX2-FAST-PERLANE-NEXT: subq $840, %rsp # imm = 0x348
9908 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 368(%rdi), %xmm0
9909 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9910 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm2 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
9911 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm0
9912 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 352(%rdi), %xmm13
9913 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm13, %xmm1
9914 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9915 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
9916 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 336(%rdi), %xmm4
9917 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm3 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
9918 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm4, %xmm1
9919 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm4, %xmm15
9920 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9921 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 320(%rdi), %xmm4
9922 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9923 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm4, %xmm4
9924 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
9925 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm1[0,1,2],xmm0[3]
9926 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 304(%rdi), %xmm0
9927 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9928 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm9 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
9929 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm9, %xmm0, %xmm1
9930 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 288(%rdi), %xmm11
9931 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm9, %xmm11, %xmm5
9932 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9933 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
9934 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 272(%rdi), %xmm10
9935 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm1 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
9936 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm10, %xmm6
9937 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9938 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 256(%rdi), %xmm8
9939 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm8, %xmm7
9940 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm8, %xmm12
9941 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9942 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
9943 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
9944 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
9945 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 496(%rdi), %xmm0
9946 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9947 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm5
9948 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 480(%rdi), %xmm0
9949 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9950 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm6
9951 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
9952 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 464(%rdi), %xmm0
9953 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9954 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm0, %xmm6
9955 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 448(%rdi), %xmm0
9956 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9957 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm0, %xmm7
9958 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
9959 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
9960 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
9961 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5,6],ymm5[7]
9962 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 432(%rdi), %xmm0
9963 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9964 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm9, %xmm0, %xmm6
9965 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 416(%rdi), %xmm0
9966 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9967 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm9, %xmm0, %xmm7
9968 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
9969 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 400(%rdi), %xmm14
9970 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm14, %xmm7
9971 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9972 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 384(%rdi), %xmm0
9973 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9974 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm0, %xmm8
9975 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
9976 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
9977 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
9978 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0,1,2,3,4],ymm6[5],ymm7[6,7]
9979 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
9980 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
9981 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9982 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 112(%rdi), %xmm8
9983 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm8, %xmm4
9984 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9985 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 96(%rdi), %xmm0
9986 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9987 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm5
9988 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
9989 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 80(%rdi), %xmm0
9990 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9991 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm0, %xmm5
9992 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 64(%rdi), %xmm0
9993 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9994 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm0, %xmm6
9995 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
9996 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3]
9997 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %xmm0
9998 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9999 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 48(%rdi), %xmm5
10000 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10001 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm9, %xmm5, %xmm5
10002 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm9, %xmm0, %xmm6
10003 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
10004 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm0
10005 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10006 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rdi), %xmm6
10007 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10008 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm6, %xmm6
10009 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm0, %xmm7
10010 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
10011 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
10012 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
10013 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 240(%rdi), %xmm0
10014 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10015 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm5
10016 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 224(%rdi), %xmm0
10017 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10018 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm2
10019 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
10020 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 208(%rdi), %xmm0
10021 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10022 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm0, %xmm5
10023 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 192(%rdi), %xmm0
10024 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
10025 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm0, %xmm3
10026 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
10027 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
10028 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
10029 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
10030 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 176(%rdi), %xmm0
10031 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10032 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm9, %xmm0, %xmm3
10033 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 160(%rdi), %xmm0
10034 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10035 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm9, %xmm0, %xmm0
10036 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
10037 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 144(%rdi), %xmm3
10038 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10039 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm3, %xmm3
10040 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 128(%rdi), %xmm5
10041 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10042 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm5, %xmm1
10043 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
10044 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
10045 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
10046 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4],ymm0[5],ymm1[6,7]
10047 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
10048 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
10049 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10050 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm2 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
10051 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10052 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm0
10053 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm13, %xmm1
10054 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
10055 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm6 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
10056 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm6, %xmm15, %xmm3
10057 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
10058 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm6, %xmm13, %xmm4
10059 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
10060 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm3[0,1,2],xmm1[3]
10061 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm3 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
10062 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10063 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm0, %xmm1
10064 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm11, %xmm5
10065 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
10066 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm1 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
10067 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm10, %xmm0
10068 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm12, %xmm15
10069 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3]
10070 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm5[1],xmm0[2,3]
10071 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1],xmm4[2,3]
10072 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
10073 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm9, %xmm4
10074 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10075 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm10, %xmm5
10076 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
10077 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
10078 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm6, %xmm11, %xmm5
10079 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
10080 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm6, %xmm12, %xmm15
10081 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm15[0],xmm5[0],xmm15[1],xmm5[1],xmm15[2],xmm5[2],xmm15[3],xmm5[3]
10082 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
10083 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
10084 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2,3,4,5,6],ymm4[7]
10085 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10086 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm5, %xmm5
10087 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
10088 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm7, %xmm15
10089 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm15[0],xmm5[0],xmm15[1],xmm5[1],xmm15[2],xmm5[2],xmm15[3],xmm5[3]
10090 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm14, %xmm15
10091 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
10092 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm7, %xmm14
10093 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3]
10094 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
10095 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
10096 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm14[0,1,2,3,4],ymm5[5],ymm14[6,7]
10097 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2,3,4,5],ymm4[6,7]
10098 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm4[4,5,6,7]
10099 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10100 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm8, %xmm0
10101 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
10102 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm8, %xmm4
10103 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
10104 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
10105 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm6, %xmm7, %xmm4
10106 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10107 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm6, %xmm5, %xmm5
10108 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
10109 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm0 = xmm4[0,1,2],xmm0[3]
10110 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
10111 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm4, %xmm4
10112 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10113 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm5, %xmm5
10114 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
10115 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10116 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm5, %xmm5
10117 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
10118 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm14, %xmm14
10119 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm14[0],xmm5[0],xmm14[1],xmm5[1],xmm14[2],xmm5[2],xmm14[3],xmm5[3]
10120 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0],xmm4[1],xmm5[2,3]
10121 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm0 = xmm4[0,1],xmm0[2,3]
10122 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
10123 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm4, %xmm4
10124 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10125 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm5, %xmm2
10126 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
10127 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
10128 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm6, %xmm4, %xmm4
10129 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsp), %xmm5 # 16-byte Reload
10130 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm6, %xmm5, %xmm5
10131 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
10132 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
10133 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
10134 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2,3,4,5,6],ymm2[7]
10135 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
10136 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm4, %xmm4
10137 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10138 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm5, %xmm3
10139 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
10140 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
10141 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm4, %xmm4
10142 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10143 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm5, %xmm1
10144 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
10145 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
10146 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
10147 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4],ymm3[5],ymm1[6,7]
10148 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm2[6,7]
10149 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
10150 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10151 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm2 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
10152 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10153 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm0
10154 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
10155 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm1, %xmm1
10156 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
10157 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm3 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
10158 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
10159 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm1, %xmm1
10160 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm13, %xmm4
10161 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
10162 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm1[0,1,2],xmm0[3]
10163 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm0 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
10164 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
10165 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm13, %xmm1
10166 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10167 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm5, %xmm5
10168 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
10169 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm1 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
10170 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10171 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm6, %xmm6
10172 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
10173 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm14, %xmm14
10174 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10175 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
10176 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
10177 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm9, %xmm5
10178 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm10, %xmm6
10179 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
10180 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm11, %xmm6
10181 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm12, %xmm14
10182 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10183 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
10184 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
10185 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5,6],ymm5[7]
10186 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
10187 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm9, %xmm6
10188 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10189 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm10, %xmm14
10190 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10191 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
10192 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm11, %xmm14
10193 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
10194 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm11, %xmm15
10195 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
10196 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
10197 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
10198 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm6 = ymm14[0,1,2,3,4],ymm6[5],ymm14[6,7]
10199 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
10200 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
10201 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10202 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
10203 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm4, %xmm4
10204 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm8, %xmm5
10205 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
10206 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm7, %xmm5
10207 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
10208 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm8, %xmm6
10209 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
10210 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3]
10211 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10212 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm5, %xmm5
10213 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
10214 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm7, %xmm6
10215 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
10216 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10217 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm6, %xmm6
10218 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
10219 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm11, %xmm14
10220 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10221 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
10222 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
10223 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10224 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm5, %xmm5
10225 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10226 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm6, %xmm2
10227 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
10228 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10229 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm5, %xmm5
10230 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsp), %xmm6 # 16-byte Reload
10231 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm6, %xmm3
10232 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
10233 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
10234 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
10235 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
10236 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
10237 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm3, %xmm3
10238 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10239 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm5, %xmm0
10240 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
10241 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
10242 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm3, %xmm3
10243 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10244 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm5, %xmm1
10245 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
10246 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
10247 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
10248 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4],ymm0[5],ymm1[6,7]
10249 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
10250 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
10251 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10252 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm2 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
10253 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
10254 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm11, %xmm0
10255 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
10256 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm1, %xmm1
10257 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
10258 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm3 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
10259 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
10260 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm1, %xmm1
10261 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
10262 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm4, %xmm4
10263 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
10264 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm1[0,1,2],xmm0[3]
10265 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm0 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
10266 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm13, %xmm1
10267 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
10268 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm12, %xmm5
10269 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
10270 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm1 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
10271 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
10272 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm13, %xmm6
10273 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
10274 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm14, %xmm14
10275 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10276 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
10277 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
10278 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10279 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm5, %xmm5
10280 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10281 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm6, %xmm6
10282 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
10283 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10284 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm6, %xmm6
10285 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
10286 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm14, %xmm14
10287 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10288 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
10289 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
10290 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5,6],ymm5[7]
10291 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm9, %xmm6
10292 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm10, %xmm14
10293 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10294 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
10295 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm9, %xmm14
10296 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10297 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm10, %xmm15
10298 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
10299 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
10300 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
10301 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm6 = ymm14[0,1,2,3,4],ymm6[5],ymm14[6,7]
10302 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
10303 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
10304 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10305 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
10306 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm4, %xmm4
10307 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10308 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm5, %xmm5
10309 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
10310 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10311 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm5, %xmm5
10312 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm8, %xmm6
10313 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
10314 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3]
10315 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
10316 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm8, %xmm5
10317 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm7, %xmm6
10318 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
10319 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10320 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm6, %xmm6
10321 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
10322 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm7, %xmm14
10323 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10324 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
10325 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
10326 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10327 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm5, %xmm5
10328 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10329 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm6, %xmm2
10330 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
10331 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10332 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm5, %xmm5
10333 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsp), %xmm6 # 16-byte Reload
10334 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm6, %xmm3
10335 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
10336 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
10337 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
10338 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
10339 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
10340 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm3, %xmm3
10341 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10342 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm5, %xmm0
10343 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
10344 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
10345 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm3, %xmm3
10346 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10347 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm5, %xmm1
10348 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
10349 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
10350 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
10351 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4],ymm0[5],ymm1[6,7]
10352 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
10353 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
10354 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10355 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm2 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
10356 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm11, %xmm0
10357 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
10358 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm1, %xmm1
10359 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
10360 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm3 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
10361 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
10362 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm1, %xmm1
10363 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
10364 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm4, %xmm4
10365 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
10366 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm1[0,1,2],xmm0[3]
10367 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm0 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
10368 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
10369 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm1, %xmm1
10370 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm12, %xmm5
10371 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
10372 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm1 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
10373 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm13, %xmm6
10374 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
10375 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm7, %xmm14
10376 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10377 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
10378 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
10379 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10380 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm5, %xmm5
10381 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10382 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm6, %xmm6
10383 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
10384 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10385 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm6, %xmm6
10386 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
10387 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm7, %xmm14
10388 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10389 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
10390 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
10391 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5,6],ymm5[7]
10392 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10393 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm6, %xmm6
10394 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
10395 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm7, %xmm14
10396 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10397 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm9, %xmm14
10398 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm10, %xmm15
10399 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
10400 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
10401 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
10402 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm6 = ymm14[0,1,2,3,4],ymm6[5],ymm14[6,7]
10403 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
10404 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
10405 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10406 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
10407 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm7, %xmm4
10408 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10409 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm5, %xmm5
10410 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
10411 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10412 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm5, %xmm5
10413 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10414 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm6, %xmm6
10415 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
10416 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3]
10417 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm8, %xmm5
10418 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10419 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm6, %xmm6
10420 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
10421 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
10422 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm9, %xmm6
10423 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
10424 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm8, %xmm14
10425 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10426 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
10427 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
10428 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10429 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm10, %xmm5
10430 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
10431 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm11, %xmm2
10432 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
10433 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
10434 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm12, %xmm5
10435 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsp), %xmm13 # 16-byte Reload
10436 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm13, %xmm3
10437 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
10438 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
10439 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
10440 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
10441 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
10442 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm3, %xmm3
10443 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10444 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm5, %xmm0
10445 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
10446 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
10447 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm3, %xmm3
10448 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10449 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm5, %xmm1
10450 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
10451 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
10452 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
10453 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4],ymm0[5],ymm1[6,7]
10454 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
10455 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
10456 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10457 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm2 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
10458 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10459 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm0
10460 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
10461 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm1, %xmm1
10462 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
10463 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm3 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
10464 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
10465 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm1, %xmm1
10466 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
10467 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm4, %xmm4
10468 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
10469 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm1[0,1,2],xmm0[3]
10470 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm0 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
10471 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
10472 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm1, %xmm1
10473 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10474 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm5, %xmm5
10475 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
10476 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm1 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
10477 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10478 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm6, %xmm6
10479 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
10480 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm14, %xmm14
10481 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10482 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
10483 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
10484 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10485 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm5, %xmm5
10486 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10487 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm6, %xmm6
10488 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
10489 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10490 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm6, %xmm6
10491 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
10492 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm14, %xmm14
10493 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10494 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
10495 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
10496 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5,6],ymm5[7]
10497 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10498 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm6, %xmm6
10499 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
10500 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm14, %xmm14
10501 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10502 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
10503 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm14, %xmm14
10504 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10505 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm15, %xmm15
10506 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
10507 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
10508 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
10509 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm6 = ymm14[0,1,2,3,4],ymm6[5],ymm14[6,7]
10510 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
10511 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
10512 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10513 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm7, %xmm4
10514 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10515 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm5, %xmm5
10516 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
10517 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10518 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm5, %xmm5
10519 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10520 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm6, %xmm6
10521 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
10522 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3]
10523 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10524 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm5, %xmm5
10525 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10526 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm6, %xmm6
10527 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
10528 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm9, %xmm6
10529 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm8, %xmm14
10530 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10531 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
10532 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
10533 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm10, %xmm5
10534 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm11, %xmm2
10535 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
10536 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm12, %xmm5
10537 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm13, %xmm3
10538 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
10539 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
10540 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
10541 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
10542 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
10543 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm13, %xmm3
10544 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
10545 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm7, %xmm0
10546 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
10547 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
10548 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm8, %xmm3
10549 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
10550 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm9, %xmm1
10551 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
10552 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
10553 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
10554 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4],ymm0[5],ymm1[6,7]
10555 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
10556 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
10557 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10558 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm2 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
10559 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10560 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm0
10561 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10562 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm10, %xmm1
10563 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
10564 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm3 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
10565 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
10566 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm11, %xmm1
10567 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
10568 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm4, %xmm4
10569 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
10570 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm1[0,1,2],xmm0[3]
10571 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm0 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
10572 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
10573 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm1, %xmm1
10574 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10575 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm5, %xmm5
10576 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
10577 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm1 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
10578 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10579 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm6, %xmm6
10580 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
10581 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm12, %xmm14
10582 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10583 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
10584 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
10585 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10586 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm5, %xmm5
10587 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10588 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm6, %xmm6
10589 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
10590 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10591 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm6, %xmm6
10592 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
10593 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm14, %xmm14
10594 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10595 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
10596 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
10597 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5,6],ymm5[7]
10598 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10599 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm6, %xmm6
10600 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
10601 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm14, %xmm14
10602 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10603 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
10604 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm14, %xmm14
10605 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
10606 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm15, %xmm15
10607 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
10608 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
10609 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
10610 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm6 = ymm14[0,1,2,3,4],ymm6[5],ymm14[6,7]
10611 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4,5],ymm5[6,7]
10612 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
10613 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10614 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
10615 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm4, %xmm4
10616 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10617 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm5, %xmm5
10618 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
10619 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10620 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm5, %xmm5
10621 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10622 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm6, %xmm6
10623 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
10624 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3]
10625 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10626 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm5, %xmm5
10627 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10628 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm6, %xmm6
10629 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
10630 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10631 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm6, %xmm6
10632 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
10633 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm14, %xmm14
10634 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10635 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3]
10636 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
10637 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10638 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm5, %xmm5
10639 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10640 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm6, %xmm2
10641 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
10642 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
10643 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm5, %xmm5
10644 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsp), %xmm6 # 16-byte Reload
10645 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm6, %xmm3
10646 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
10647 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
10648 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
10649 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
10650 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm13, %xmm3
10651 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm13, %xmm5
10652 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm7, %xmm0
10653 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
10654 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm8, %xmm3
10655 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm9, %xmm1
10656 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
10657 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
10658 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
10659 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4],ymm0[5],ymm1[6,7]
10660 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
10661 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
10662 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
10663 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm3 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
10664 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10665 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm0, %xmm0
10666 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm10, %xmm1
10667 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
10668 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm0 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
10669 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm11, %xmm2
10670 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
10671 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm4, %xmm4
10672 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
10673 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm2[0,1,2],xmm1[3]
10674 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm1 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
10675 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
10676 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm2, %xmm2
10677 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10678 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm6, %xmm6
10679 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
10680 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastw {{.*#+}} xmm2 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
10681 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10682 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm10, %xmm14
10683 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm12, %xmm15
10684 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
10685 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm6 = xmm14[0],xmm6[1],xmm14[2,3]
10686 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm4 = xmm6[0,1],xmm4[2,3]
10687 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10688 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm6, %xmm6
10689 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10690 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm10, %xmm14
10691 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm6[0],xmm14[1],xmm6[1],xmm14[2],xmm6[2],xmm14[3],xmm6[3]
10692 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10693 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm10, %xmm14
10694 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10695 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm10, %xmm15
10696 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
10697 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
10698 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
10699 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm6 = ymm14[0,1,2,3,4,5,6],ymm6[7]
10700 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10701 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm10, %xmm14
10702 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10703 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm10, %xmm15
10704 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
10705 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10706 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm10, %xmm15
10707 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10708 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm10, %xmm13
10709 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm15[0],xmm13[1],xmm15[1],xmm13[2],xmm15[2],xmm13[3],xmm15[3]
10710 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
10711 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
10712 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm13 = ymm13[0,1,2,3,4],ymm14[5],ymm13[6,7]
10713 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm6 = ymm13[0,1,2,3,4,5],ymm6[6,7]
10714 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
10715 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
10716 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm6, %xmm6
10717 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10718 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm10, %xmm13
10719 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm13[0],xmm6[0],xmm13[1],xmm6[1],xmm13[2],xmm6[2],xmm13[3],xmm6[3]
10720 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10721 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm10, %xmm13
10722 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10723 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm10, %xmm14
10724 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
10725 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm6 = xmm13[0,1,2],xmm6[3]
10726 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10727 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm10, %xmm13
10728 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10729 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm10, %xmm14
10730 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
10731 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10732 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm10, %xmm14
10733 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10734 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm10, %xmm15
10735 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
10736 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm13 = xmm14[0],xmm13[1],xmm14[2,3]
10737 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} xmm6 = xmm13[0,1],xmm6[2,3]
10738 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10739 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm10, %xmm13
10740 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10741 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm10, %xmm3
10742 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm13[0],xmm3[1],xmm13[1],xmm3[2],xmm13[2],xmm3[3],xmm13[3]
10743 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
10744 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm10, %xmm12
10745 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsp), %xmm10 # 16-byte Reload
10746 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm10, %xmm0
10747 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm12[0],xmm0[1],xmm12[1],xmm0[2],xmm12[2],xmm0[3],xmm12[3]
10748 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
10749 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
10750 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5,6],ymm3[7]
10751 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm5, %xmm3
10752 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm7, %xmm1
10753 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
10754 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm8, %xmm3
10755 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm9, %xmm2
10756 ; AVX2-FAST-PERLANE-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
10757 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
10758 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
10759 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4],ymm1[5],ymm2[6,7]
10760 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
10761 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
10762 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10763 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, (%rsi)
10764 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10765 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, 32(%rsi)
10766 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10767 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, (%rdx)
10768 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10769 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, 32(%rdx)
10770 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10771 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, (%rcx)
10772 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10773 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, 32(%rcx)
10774 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10775 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, (%r8)
10776 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10777 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, 32(%r8)
10778 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10779 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, (%r9)
10780 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10781 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, 32(%r9)
10782 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
10783 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10784 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, (%rax)
10785 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10786 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, 32(%rax)
10787 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
10788 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10789 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, (%rax)
10790 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
10791 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, 32(%rax)
10792 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
10793 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, (%rax)
10794 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm4, 32(%rax)
10795 ; AVX2-FAST-PERLANE-NEXT: addq $840, %rsp # imm = 0x348
10796 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
10797 ; AVX2-FAST-PERLANE-NEXT: retq
10799 ; AVX512F-SLOW-LABEL: load_i8_stride8_vf64:
10800 ; AVX512F-SLOW: # %bb.0:
10801 ; AVX512F-SLOW-NEXT: subq $520, %rsp # imm = 0x208
10802 ; AVX512F-SLOW-NEXT: vmovdqa64 256(%rdi), %zmm17
10803 ; AVX512F-SLOW-NEXT: vmovdqa64 384(%rdi), %zmm0
10804 ; AVX512F-SLOW-NEXT: vpmovqb %zmm0, %xmm2
10805 ; AVX512F-SLOW-NEXT: vmovdqa 496(%rdi), %xmm3
10806 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm0 = [0,0,0,8,0,0,0,8,0,0,0,8,0,0,0,8]
10807 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm3, %xmm1
10808 ; AVX512F-SLOW-NEXT: vmovdqa %xmm3, %xmm7
10809 ; AVX512F-SLOW-NEXT: vmovdqa 480(%rdi), %xmm4
10810 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm3
10811 ; AVX512F-SLOW-NEXT: vmovdqa %xmm4, %xmm8
10812 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
10813 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm3
10814 ; AVX512F-SLOW-NEXT: vmovdqa 464(%rdi), %xmm5
10815 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm1 = [0,8,0,0,0,8,0,0,0,8,0,0,0,8,0,0]
10816 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm4
10817 ; AVX512F-SLOW-NEXT: vmovdqa %xmm5, %xmm11
10818 ; AVX512F-SLOW-NEXT: vmovdqa 448(%rdi), %xmm15
10819 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm15, %xmm5
10820 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
10821 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
10822 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3,4,5,6],ymm3[7]
10823 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
10824 ; AVX512F-SLOW-NEXT: vmovdqa 384(%rdi), %ymm4
10825 ; AVX512F-SLOW-NEXT: vpmovqb %ymm4, %xmm4
10826 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
10827 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2,3,4],ymm2[5],ymm4[6,7]
10828 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
10829 ; AVX512F-SLOW-NEXT: vmovdqa 368(%rdi), %xmm4
10830 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm3
10831 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm4, %xmm18
10832 ; AVX512F-SLOW-NEXT: vmovdqa 352(%rdi), %xmm5
10833 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm4
10834 ; AVX512F-SLOW-NEXT: vmovdqa %xmm5, %xmm14
10835 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
10836 ; AVX512F-SLOW-NEXT: vmovdqa 336(%rdi), %xmm4
10837 ; AVX512F-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10838 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm4, %xmm4
10839 ; AVX512F-SLOW-NEXT: vmovdqa 320(%rdi), %xmm6
10840 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm6, %xmm5
10841 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm6, %xmm19
10842 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
10843 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm3 = xmm4[0,1,2],xmm3[3]
10844 ; AVX512F-SLOW-NEXT: vpmovqb %zmm17, %xmm4
10845 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm3 = xmm4[0,1],xmm3[2,3]
10846 ; AVX512F-SLOW-NEXT: vinserti32x4 $2, %xmm3, %zmm0, %zmm3
10847 ; AVX512F-SLOW-NEXT: movb $-64, %al
10848 ; AVX512F-SLOW-NEXT: kmovw %eax, %k1
10849 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm3 {%k1}
10850 ; AVX512F-SLOW-NEXT: vmovdqa 240(%rdi), %xmm4
10851 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm2
10852 ; AVX512F-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10853 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm4, %xmm25
10854 ; AVX512F-SLOW-NEXT: vmovdqa 224(%rdi), %xmm4
10855 ; AVX512F-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10856 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm4
10857 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
10858 ; AVX512F-SLOW-NEXT: vmovdqa 208(%rdi), %xmm5
10859 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm4
10860 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm5, %xmm29
10861 ; AVX512F-SLOW-NEXT: vmovdqa 192(%rdi), %xmm10
10862 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm10, %xmm5
10863 ; AVX512F-SLOW-NEXT: vmovdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10864 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
10865 ; AVX512F-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm5
10866 ; AVX512F-SLOW-NEXT: vpmovqb %zmm5, %xmm5
10867 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
10868 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
10869 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2,3,4,5,6],ymm2[7]
10870 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm4
10871 ; AVX512F-SLOW-NEXT: vmovdqa 128(%rdi), %ymm5
10872 ; AVX512F-SLOW-NEXT: vpmovqb %ymm5, %xmm5
10873 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
10874 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2,3,4],ymm4[5],ymm5[6,7]
10875 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2,3,4,5],ymm2[6,7]
10876 ; AVX512F-SLOW-NEXT: vmovdqa 112(%rdi), %xmm4
10877 ; AVX512F-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10878 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm4
10879 ; AVX512F-SLOW-NEXT: vmovdqa 96(%rdi), %xmm5
10880 ; AVX512F-SLOW-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10881 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm0
10882 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
10883 ; AVX512F-SLOW-NEXT: vmovdqa 80(%rdi), %xmm5
10884 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm4
10885 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm5, %xmm30
10886 ; AVX512F-SLOW-NEXT: vmovdqa 64(%rdi), %xmm5
10887 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm1
10888 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm5, %xmm28
10889 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
10890 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3]
10891 ; AVX512F-SLOW-NEXT: vmovdqa64 (%rdi), %zmm16
10892 ; AVX512F-SLOW-NEXT: vpmovqb %zmm16, %xmm1
10893 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3]
10894 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
10895 ; AVX512F-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm0
10896 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10897 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm9 = [0,0,1,9,0,0,1,9,0,0,1,9,0,0,1,9]
10898 ; AVX512F-SLOW-NEXT: vpshufb %xmm9, %xmm7, %xmm0
10899 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm7, %xmm24
10900 ; AVX512F-SLOW-NEXT: vpshufb %xmm9, %xmm8, %xmm2
10901 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm8, %xmm21
10902 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
10903 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm8 = [1,9,0,0,1,9,0,0,1,9,0,0,1,9,0,0]
10904 ; AVX512F-SLOW-NEXT: vpshufb %xmm8, %xmm11, %xmm3
10905 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm11, %xmm27
10906 ; AVX512F-SLOW-NEXT: vpshufb %xmm8, %xmm15, %xmm4
10907 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
10908 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
10909 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
10910 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm3[0,1,2,3,4,5,6],ymm2[7]
10911 ; AVX512F-SLOW-NEXT: vmovdqa 416(%rdi), %xmm0
10912 ; AVX512F-SLOW-NEXT: vmovdqa 432(%rdi), %xmm11
10913 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm3 = [0,0,1,9,0,0,1,9,0,0,1,9,0,0,1,9]
10914 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm11, %xmm2
10915 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm0, %xmm5
10916 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm0, %xmm22
10917 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm2[0],xmm5[1],xmm2[1],xmm5[2],xmm2[2],xmm5[3],xmm2[3]
10918 ; AVX512F-SLOW-NEXT: vmovdqa 384(%rdi), %xmm0
10919 ; AVX512F-SLOW-NEXT: vmovdqa 400(%rdi), %xmm12
10920 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm2 = [1,9,0,0,1,9,0,0,1,9,0,0,1,9,0,0]
10921 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm12, %xmm6
10922 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm0, %xmm7
10923 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm0, %xmm31
10924 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
10925 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
10926 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
10927 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2,3,4],ymm5[5],ymm6[6,7]
10928 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2,3,4,5],ymm4[6,7]
10929 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm18, %xmm0
10930 ; AVX512F-SLOW-NEXT: vpshufb %xmm9, %xmm0, %xmm5
10931 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm18, %xmm23
10932 ; AVX512F-SLOW-NEXT: vpshufb %xmm9, %xmm14, %xmm6
10933 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm14, %xmm26
10934 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
10935 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10936 ; AVX512F-SLOW-NEXT: vpshufb %xmm8, %xmm0, %xmm6
10937 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm19, %xmm14
10938 ; AVX512F-SLOW-NEXT: vpshufb %xmm8, %xmm14, %xmm7
10939 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
10940 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0,1,2],xmm5[3]
10941 ; AVX512F-SLOW-NEXT: vpsrlq $8, %zmm17, %zmm6
10942 ; AVX512F-SLOW-NEXT: vpmovqb %zmm6, %xmm6
10943 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0,1],xmm5[2,3]
10944 ; AVX512F-SLOW-NEXT: vinserti32x4 $2, %xmm5, %zmm0, %zmm19
10945 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm19 {%k1}
10946 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm25, %xmm0
10947 ; AVX512F-SLOW-NEXT: vpshufb %xmm9, %xmm0, %xmm4
10948 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
10949 ; AVX512F-SLOW-NEXT: vpshufb %xmm9, %xmm0, %xmm5
10950 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
10951 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm29, %xmm0
10952 ; AVX512F-SLOW-NEXT: vpshufb %xmm8, %xmm0, %xmm5
10953 ; AVX512F-SLOW-NEXT: vpshufb %xmm8, %xmm10, %xmm6
10954 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
10955 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
10956 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
10957 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm5[0,1,2,3,4,5,6],ymm4[7]
10958 ; AVX512F-SLOW-NEXT: vmovdqa 160(%rdi), %xmm0
10959 ; AVX512F-SLOW-NEXT: vmovdqa 176(%rdi), %xmm5
10960 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm5, %xmm4
10961 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm5, %xmm25
10962 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm0, %xmm3
10963 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm0, %xmm20
10964 ; AVX512F-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10965 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
10966 ; AVX512F-SLOW-NEXT: vmovdqa 128(%rdi), %xmm4
10967 ; AVX512F-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10968 ; AVX512F-SLOW-NEXT: vmovdqa 144(%rdi), %xmm0
10969 ; AVX512F-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10970 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm0, %xmm0
10971 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm4, %xmm2
10972 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
10973 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm2
10974 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
10975 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm2[5],ymm0[6,7]
10976 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
10977 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
10978 ; AVX512F-SLOW-NEXT: vpshufb %xmm9, %xmm1, %xmm1
10979 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
10980 ; AVX512F-SLOW-NEXT: vpshufb %xmm9, %xmm2, %xmm2
10981 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
10982 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm30, %xmm7
10983 ; AVX512F-SLOW-NEXT: vpshufb %xmm8, %xmm7, %xmm2
10984 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm28, %xmm3
10985 ; AVX512F-SLOW-NEXT: vpshufb %xmm8, %xmm3, %xmm3
10986 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
10987 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0,1,2],xmm1[3]
10988 ; AVX512F-SLOW-NEXT: vpsrlq $8, %zmm16, %zmm2
10989 ; AVX512F-SLOW-NEXT: vpmovqb %zmm2, %xmm2
10990 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3]
10991 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
10992 ; AVX512F-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm19, %zmm0
10993 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
10994 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm4 = [0,0,2,10,0,0,2,10,0,0,2,10,0,0,2,10]
10995 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm24, %xmm13
10996 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
10997 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm13, %xmm1
10998 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm21, %xmm10
10999 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11000 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm10, %xmm2
11001 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
11002 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm1 = [2,10,0,0,2,10,0,0,2,10,0,0,2,10,0,0]
11003 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm27, %xmm0
11004 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm0, %xmm3
11005 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm15, %xmm8
11006 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm15, %xmm18
11007 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm8[0],xmm3[0],xmm8[1],xmm3[1],xmm8[2],xmm3[2],xmm8[3],xmm3[3]
11008 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
11009 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
11010 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm3[0,1,2,3,4,5,6],ymm2[7]
11011 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm3 = [0,0,2,10,0,0,2,10,0,0,2,10,0,0,2,10]
11012 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm11, %xmm2
11013 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm11, %xmm24
11014 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm22, %xmm11
11015 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm11, %xmm9
11016 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm2[0],xmm9[1],xmm2[1],xmm9[2],xmm2[2],xmm9[3],xmm2[3]
11017 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm2 = [2,10,0,0,2,10,0,0,2,10,0,0,2,10,0,0]
11018 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm12, %xmm0
11019 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm12, %xmm30
11020 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm31, %xmm5
11021 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm15
11022 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3]
11023 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
11024 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
11025 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm9[5],ymm0[6,7]
11026 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm8[6,7]
11027 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm23, %xmm12
11028 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11029 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm12, %xmm8
11030 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm26, %xmm5
11031 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm5, %xmm9
11032 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
11033 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
11034 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm9
11035 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm14, %xmm15
11036 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm14, %xmm21
11037 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
11038 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1,2],xmm8[3]
11039 ; AVX512F-SLOW-NEXT: vpsrlq $16, %zmm17, %zmm9
11040 ; AVX512F-SLOW-NEXT: vpmovqb %zmm9, %xmm9
11041 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1],xmm8[2,3]
11042 ; AVX512F-SLOW-NEXT: vinserti32x4 $2, %xmm8, %zmm0, %zmm8
11043 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm8 {%k1}
11044 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
11045 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm0, %xmm0
11046 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
11047 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm14, %xmm9
11048 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm9[0],xmm0[0],xmm9[1],xmm0[1],xmm9[2],xmm0[2],xmm9[3],xmm0[3]
11049 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm29, %xmm6
11050 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm6, %xmm9
11051 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11052 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm15, %xmm15
11053 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
11054 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
11055 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
11056 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm9[0,1,2,3,4,5,6],ymm0[7]
11057 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm25, %xmm9
11058 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm9, %xmm9
11059 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm20, %xmm15
11060 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm15, %xmm3
11061 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm9[0],xmm3[1],xmm9[1],xmm3[2],xmm9[2],xmm3[3],xmm9[3]
11062 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
11063 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm9, %xmm9
11064 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11065 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm15, %xmm2
11066 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm9[0],xmm2[1],xmm9[1],xmm2[2],xmm9[2],xmm2[3],xmm9[3]
11067 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
11068 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
11069 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm3[5],ymm2[6,7]
11070 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
11071 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
11072 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm2, %xmm2
11073 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
11074 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm3, %xmm3
11075 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
11076 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm7, %xmm3
11077 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm7, %xmm29
11078 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm28, %xmm7
11079 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm28, (%rsp) # 16-byte Spill
11080 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm7, %xmm1
11081 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
11082 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm1[0,1,2],xmm2[3]
11083 ; AVX512F-SLOW-NEXT: vpsrlq $16, %zmm16, %zmm2
11084 ; AVX512F-SLOW-NEXT: vpmovqb %zmm2, %xmm2
11085 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3]
11086 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11087 ; AVX512F-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm8, %zmm0
11088 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11089 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm0 = [0,0,3,11,0,0,3,11,0,0,3,11,0,0,3,11]
11090 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm13, %xmm1
11091 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm10, %xmm2
11092 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
11093 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm1 = [3,11,0,0,3,11,0,0,3,11,0,0,3,11,0,0]
11094 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm27, %xmm10
11095 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11096 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm10, %xmm3
11097 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm18, %xmm4
11098 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm4, %xmm4
11099 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
11100 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
11101 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
11102 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm3[0,1,2,3,4,5,6],ymm2[7]
11103 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm2 = [0,0,3,11,0,0,3,11,0,0,3,11,0,0,3,11]
11104 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm24, %xmm3
11105 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm3, %xmm3
11106 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm11, %xmm8
11107 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm22, %xmm28
11108 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm3[0],xmm8[1],xmm3[1],xmm8[2],xmm3[2],xmm8[3],xmm3[3]
11109 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm3 = [3,11,0,0,3,11,0,0,3,11,0,0,3,11,0,0]
11110 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm30, %xmm9
11111 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm9, %xmm9
11112 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm31, %xmm11
11113 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm11, %xmm15
11114 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
11115 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm0, %ymm8
11116 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
11117 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2,3,4],ymm8[5],ymm9[6,7]
11118 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1,2,3,4,5],ymm4[6,7]
11119 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm12, %xmm8
11120 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm26, %xmm13
11121 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11122 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm13, %xmm9
11123 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
11124 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm9
11125 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm21, %xmm5
11126 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm15
11127 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
11128 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1,2],xmm8[3]
11129 ; AVX512F-SLOW-NEXT: vpsrlq $24, %zmm17, %zmm9
11130 ; AVX512F-SLOW-NEXT: vpmovqb %zmm9, %xmm9
11131 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1],xmm8[2,3]
11132 ; AVX512F-SLOW-NEXT: vinserti32x4 $2, %xmm8, %zmm0, %zmm8
11133 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm8 {%k1}
11134 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
11135 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm4
11136 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm14, %xmm9
11137 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm14, %xmm22
11138 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm9[0],xmm4[0],xmm9[1],xmm4[1],xmm9[2],xmm4[2],xmm9[3],xmm4[3]
11139 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm6, %xmm9
11140 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm6, %xmm26
11141 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
11142 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm6, %xmm15
11143 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
11144 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
11145 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
11146 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm9[0,1,2,3,4,5,6],ymm4[7]
11147 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm25, %xmm14
11148 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm14, %xmm9
11149 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
11150 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm6, %xmm2
11151 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm9[0],xmm2[1],xmm9[1],xmm2[2],xmm9[2],xmm2[3],xmm9[3]
11152 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
11153 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm6, %xmm9
11154 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
11155 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm6, %xmm3
11156 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm9[0],xmm3[1],xmm9[1],xmm3[2],xmm9[2],xmm3[3],xmm9[3]
11157 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
11158 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
11159 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
11160 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm4[6,7]
11161 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
11162 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm3, %xmm3
11163 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
11164 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm0
11165 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
11166 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm29, %xmm3
11167 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm3, %xmm3
11168 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm7, %xmm1
11169 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
11170 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3]
11171 ; AVX512F-SLOW-NEXT: vpsrlq $24, %zmm16, %zmm1
11172 ; AVX512F-SLOW-NEXT: vpmovqb %zmm1, %xmm1
11173 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3]
11174 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11175 ; AVX512F-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm8, %zmm0
11176 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11177 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm0 = [0,0,4,12,0,0,4,12,0,0,4,12,0,0,4,12]
11178 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11179 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm1
11180 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
11181 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm2, %xmm2
11182 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
11183 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4,12,0,0,4,12,0,0,4,12,0,0,4,12,0,0]
11184 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm10, %xmm3
11185 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm18, %xmm10
11186 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11187 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm10, %xmm4
11188 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
11189 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
11190 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
11191 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm3[0,1,2,3,4,5,6],ymm2[7]
11192 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm2 = [0,0,4,12,0,0,4,12,0,0,4,12,0,0,4,12]
11193 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm24, %xmm11
11194 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm11, %xmm3
11195 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm28, %xmm7
11196 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm7, %xmm8
11197 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm3[0],xmm8[1],xmm3[1],xmm8[2],xmm3[2],xmm8[3],xmm3[3]
11198 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm3 = [4,12,0,0,4,12,0,0,4,12,0,0,4,12,0,0]
11199 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm30, %xmm7
11200 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm7, %xmm9
11201 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm31, %xmm12
11202 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm12, %xmm15
11203 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
11204 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm0, %ymm8
11205 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
11206 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2,3,4],ymm8[5],ymm9[6,7]
11207 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1,2,3,4,5],ymm4[6,7]
11208 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
11209 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm7, %xmm8
11210 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm13, %xmm9
11211 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
11212 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
11213 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm7, %xmm9
11214 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm15
11215 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm5, %xmm23
11216 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
11217 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1,2],xmm8[3]
11218 ; AVX512F-SLOW-NEXT: vpsrlq $32, %zmm17, %zmm9
11219 ; AVX512F-SLOW-NEXT: vpmovqb %zmm9, %xmm9
11220 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1],xmm8[2,3]
11221 ; AVX512F-SLOW-NEXT: vinserti32x4 $2, %xmm8, %zmm0, %zmm8
11222 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm8 {%k1}
11223 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
11224 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm13, %xmm4
11225 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm22, %xmm5
11226 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm9
11227 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm9[0],xmm4[0],xmm9[1],xmm4[1],xmm9[2],xmm4[2],xmm9[3],xmm4[3]
11228 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm26, %xmm5
11229 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm9
11230 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
11231 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm15
11232 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
11233 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
11234 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
11235 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm9[0,1,2,3,4,5,6],ymm4[7]
11236 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm14, %xmm9
11237 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm25, %xmm20
11238 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
11239 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm2
11240 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm9[0],xmm2[1],xmm9[1],xmm2[2],xmm9[2],xmm2[3],xmm9[3]
11241 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
11242 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm5, %xmm9
11243 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
11244 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm14, %xmm3
11245 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm9[0],xmm3[1],xmm9[1],xmm3[2],xmm9[2],xmm3[3],xmm9[3]
11246 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
11247 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
11248 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
11249 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm4[6,7]
11250 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
11251 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm7, %xmm3
11252 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm0
11253 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm6, %xmm25
11254 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
11255 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm29, %xmm3
11256 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm3, %xmm3
11257 ; AVX512F-SLOW-NEXT: vmovdqa (%rsp), %xmm4 # 16-byte Reload
11258 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm4, %xmm1
11259 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
11260 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3]
11261 ; AVX512F-SLOW-NEXT: vpsrlq $32, %zmm16, %zmm1
11262 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm16, %zmm18
11263 ; AVX512F-SLOW-NEXT: vpmovqb %zmm1, %xmm1
11264 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3]
11265 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11266 ; AVX512F-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm8, %zmm0
11267 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11268 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm0 = [0,0,5,13,0,0,5,13,0,0,5,13,0,0,5,13]
11269 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
11270 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm1
11271 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
11272 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm2, %xmm2
11273 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
11274 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm1 = [5,13,0,0,5,13,0,0,5,13,0,0,5,13,0,0]
11275 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
11276 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm3, %xmm3
11277 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm10, %xmm4
11278 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
11279 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
11280 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
11281 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm3[0,1,2,3,4,5,6],ymm2[7]
11282 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm2 = [0,0,5,13,0,0,5,13,0,0,5,13,0,0,5,13]
11283 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm11, %xmm3
11284 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm24, %xmm27
11285 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm28, %xmm5
11286 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm8
11287 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm3[0],xmm8[1],xmm3[1],xmm8[2],xmm3[2],xmm8[3],xmm3[3]
11288 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm3 = [5,13,0,0,5,13,0,0,5,13,0,0,5,13,0,0]
11289 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm30, %xmm10
11290 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm10, %xmm9
11291 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm12, %xmm15
11292 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm12, %xmm31
11293 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
11294 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm0, %ymm8
11295 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
11296 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2,3,4],ymm8[5],ymm9[6,7]
11297 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1,2,3,4,5],ymm4[6,7]
11298 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
11299 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm8
11300 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
11301 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm9
11302 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
11303 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
11304 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm6, %xmm9
11305 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm23, %xmm11
11306 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm11, %xmm15
11307 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
11308 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1,2],xmm8[3]
11309 ; AVX512F-SLOW-NEXT: vpsrlq $40, %zmm17, %zmm9
11310 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm17, %zmm23
11311 ; AVX512F-SLOW-NEXT: vpmovqb %zmm9, %xmm9
11312 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1],xmm8[2,3]
11313 ; AVX512F-SLOW-NEXT: vinserti32x4 $2, %xmm8, %zmm0, %zmm8
11314 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm8 {%k1}
11315 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm13, %xmm4
11316 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm13, %xmm30
11317 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm22, %xmm6
11318 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm9
11319 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm9[0],xmm4[0],xmm9[1],xmm4[1],xmm9[2],xmm4[2],xmm9[3],xmm4[3]
11320 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm26, %xmm13
11321 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm13, %xmm9
11322 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
11323 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm6, %xmm15
11324 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
11325 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
11326 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
11327 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm9[0,1,2,3,4,5,6],ymm4[7]
11328 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm20, %xmm9
11329 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm9, %xmm9
11330 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
11331 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm12, %xmm2
11332 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm9[0],xmm2[1],xmm9[1],xmm2[2],xmm9[2],xmm2[3],xmm9[3]
11333 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
11334 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm15, %xmm9
11335 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm15, %xmm28
11336 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm14, %xmm3
11337 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm14, %xmm16
11338 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm9[0],xmm3[1],xmm9[1],xmm3[2],xmm9[2],xmm3[3],xmm9[3]
11339 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
11340 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
11341 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
11342 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm4[6,7]
11343 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm7, %xmm3
11344 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm7, %xmm24
11345 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm25, %xmm4
11346 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm0
11347 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
11348 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm29, %xmm7
11349 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm7, %xmm3
11350 ; AVX512F-SLOW-NEXT: vmovdqa (%rsp), %xmm4 # 16-byte Reload
11351 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm4, %xmm1
11352 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
11353 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3]
11354 ; AVX512F-SLOW-NEXT: vpsrlq $40, %zmm18, %zmm1
11355 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm18, %zmm26
11356 ; AVX512F-SLOW-NEXT: vpmovqb %zmm1, %xmm1
11357 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3]
11358 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11359 ; AVX512F-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm8, %zmm0
11360 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11361 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm0 = [0,0,6,14,0,0,6,14,0,0,6,14,0,0,6,14]
11362 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
11363 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm2, %xmm1
11364 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm2, %xmm29
11365 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
11366 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm2, %xmm2
11367 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
11368 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm1 = [6,14,0,0,6,14,0,0,6,14,0,0,6,14,0,0]
11369 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
11370 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm3, %xmm3
11371 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
11372 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm4, %xmm4
11373 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
11374 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
11375 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
11376 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm3[0,1,2,3,4,5,6],ymm2[7]
11377 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm2 = [0,0,6,14,0,0,6,14,0,0,6,14,0,0,6,14]
11378 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm27, %xmm3
11379 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm3, %xmm3
11380 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm8
11381 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm5, %xmm21
11382 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm3[0],xmm8[1],xmm3[1],xmm8[2],xmm3[2],xmm8[3],xmm3[3]
11383 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm3 = [6,14,0,0,6,14,0,0,6,14,0,0,6,14,0,0]
11384 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm10, %xmm9
11385 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm10, %xmm18
11386 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm31, %xmm5
11387 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm5, %xmm15
11388 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
11389 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm0, %ymm8
11390 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
11391 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2,3,4],ymm8[5],ymm9[6,7]
11392 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1,2,3,4,5],ymm4[6,7]
11393 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
11394 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm8
11395 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
11396 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm9, %xmm9
11397 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
11398 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
11399 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm10, %xmm9
11400 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm11, %xmm15
11401 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm11, %xmm22
11402 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
11403 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1,2],xmm8[3]
11404 ; AVX512F-SLOW-NEXT: vpsrlq $48, %zmm17, %zmm9
11405 ; AVX512F-SLOW-NEXT: vpmovqb %zmm9, %xmm9
11406 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1],xmm8[2,3]
11407 ; AVX512F-SLOW-NEXT: vinserti32x4 $2, %xmm8, %zmm0, %zmm8
11408 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm8 {%k1}
11409 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm30, %xmm4
11410 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm4
11411 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
11412 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm9, %xmm9
11413 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm9[0],xmm4[0],xmm9[1],xmm4[1],xmm9[2],xmm4[2],xmm9[3],xmm4[3]
11414 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm13, %xmm9
11415 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm13, %xmm19
11416 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm6, %xmm15
11417 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm6, %xmm17
11418 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
11419 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
11420 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
11421 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm9[0,1,2,3,4,5,6],ymm4[7]
11422 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm20, %xmm6
11423 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm6, %xmm9
11424 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm12, %xmm2
11425 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm12, %xmm20
11426 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm9[0],xmm2[1],xmm9[1],xmm2[2],xmm9[2],xmm2[3],xmm9[3]
11427 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm28, %xmm14
11428 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm14, %xmm9
11429 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm16, %xmm13
11430 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm13, %xmm3
11431 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm9[0],xmm3[1],xmm9[1],xmm3[2],xmm9[2],xmm3[3],xmm9[3]
11432 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
11433 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
11434 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
11435 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm4[6,7]
11436 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm24, %xmm15
11437 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm15, %xmm3
11438 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm25, %xmm12
11439 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm12, %xmm0
11440 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
11441 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm7, %xmm3
11442 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm7, %xmm16
11443 ; AVX512F-SLOW-NEXT: vmovdqa (%rsp), %xmm7 # 16-byte Reload
11444 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm7, %xmm1
11445 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
11446 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3]
11447 ; AVX512F-SLOW-NEXT: vpsrlq $48, %zmm26, %zmm1
11448 ; AVX512F-SLOW-NEXT: vpmovqb %zmm1, %xmm1
11449 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3]
11450 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11451 ; AVX512F-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm8, %zmm24
11452 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm0 = [0,0,7,15,0,0,7,15,0,0,7,15,0,0,7,15]
11453 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm29, %xmm1
11454 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm1
11455 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
11456 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm2, %xmm2
11457 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
11458 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm1 = [7,15,0,0,7,15,0,0,7,15,0,0,7,15,0,0]
11459 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
11460 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm3, %xmm3
11461 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
11462 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm4, %xmm4
11463 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
11464 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
11465 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
11466 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm3[0,1,2,3,4,5,6],ymm2[7]
11467 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm2 = [0,0,7,15,0,0,7,15,0,0,7,15,0,0,7,15]
11468 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm27, %xmm3
11469 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm3, %xmm3
11470 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm21, %xmm8
11471 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm8, %xmm8
11472 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm3[0],xmm8[1],xmm3[1],xmm8[2],xmm3[2],xmm8[3],xmm3[3]
11473 ; AVX512F-SLOW-NEXT: vpbroadcastd {{.*#+}} xmm3 = [7,15,0,0,7,15,0,0,7,15,0,0,7,15,0,0]
11474 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm18, %xmm9
11475 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm9, %xmm9
11476 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm31, %xmm11
11477 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm11, %xmm11
11478 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
11479 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm0, %ymm8
11480 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
11481 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2,3,4],ymm8[5],ymm9[6,7]
11482 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm8[0,1,2,3,4,5],ymm4[6,7]
11483 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm8
11484 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
11485 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm9
11486 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
11487 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm10, %xmm9
11488 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm22, %xmm5
11489 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm11
11490 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
11491 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1,2],xmm8[3]
11492 ; AVX512F-SLOW-NEXT: vpsrlq $56, %zmm23, %zmm9
11493 ; AVX512F-SLOW-NEXT: vpmovqb %zmm9, %xmm9
11494 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0,1],xmm8[2,3]
11495 ; AVX512F-SLOW-NEXT: vinserti32x4 $2, %xmm8, %zmm0, %zmm8
11496 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm8 {%k1}
11497 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm30, %xmm4
11498 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm4, %xmm4
11499 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
11500 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm9
11501 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm9[0],xmm4[0],xmm9[1],xmm4[1],xmm9[2],xmm4[2],xmm9[3],xmm4[3]
11502 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm19, %xmm5
11503 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm9
11504 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm17, %xmm5
11505 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm5, %xmm11
11506 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
11507 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
11508 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
11509 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm9[0,1,2,3,4,5,6],ymm4[7]
11510 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm6, %xmm6
11511 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm20, %xmm5
11512 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm2
11513 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1],xmm2[2],xmm6[2],xmm2[3],xmm6[3]
11514 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm14, %xmm5
11515 ; AVX512F-SLOW-NEXT: vpshufb %xmm3, %xmm13, %xmm3
11516 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
11517 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
11518 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
11519 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
11520 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm4[6,7]
11521 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm15, %xmm3
11522 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm12, %xmm0
11523 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
11524 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm16, %xmm3
11525 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm3, %xmm3
11526 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm7, %xmm1
11527 ; AVX512F-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
11528 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3]
11529 ; AVX512F-SLOW-NEXT: vpsrlq $56, %zmm26, %zmm1
11530 ; AVX512F-SLOW-NEXT: vpmovqb %zmm1, %xmm1
11531 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3]
11532 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
11533 ; AVX512F-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm8, %zmm0
11534 ; AVX512F-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11535 ; AVX512F-SLOW-NEXT: vmovaps %zmm1, (%rsi)
11536 ; AVX512F-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11537 ; AVX512F-SLOW-NEXT: vmovaps %zmm1, (%rdx)
11538 ; AVX512F-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11539 ; AVX512F-SLOW-NEXT: vmovaps %zmm1, (%rcx)
11540 ; AVX512F-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11541 ; AVX512F-SLOW-NEXT: vmovaps %zmm1, (%r8)
11542 ; AVX512F-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11543 ; AVX512F-SLOW-NEXT: vmovaps %zmm1, (%r9)
11544 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
11545 ; AVX512F-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
11546 ; AVX512F-SLOW-NEXT: vmovaps %zmm1, (%rax)
11547 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
11548 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm24, (%rax)
11549 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
11550 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm0, (%rax)
11551 ; AVX512F-SLOW-NEXT: addq $520, %rsp # imm = 0x208
11552 ; AVX512F-SLOW-NEXT: vzeroupper
11553 ; AVX512F-SLOW-NEXT: retq
11555 ; AVX512F-FAST-LABEL: load_i8_stride8_vf64:
11556 ; AVX512F-FAST: # %bb.0:
11557 ; AVX512F-FAST-NEXT: subq $440, %rsp # imm = 0x1B8
11558 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm2 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
11559 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [0,2,2,3,0,2,4,6]
11560 ; AVX512F-FAST-NEXT: vmovdqa 480(%rdi), %ymm1
11561 ; AVX512F-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11562 ; AVX512F-FAST-NEXT: vpermd %ymm1, %ymm0, %ymm3
11563 ; AVX512F-FAST-NEXT: vpshufb %ymm2, %ymm3, %ymm1
11564 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm3, %ymm19
11565 ; AVX512F-FAST-NEXT: vmovdqa %ymm2, %ymm14
11566 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm3 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
11567 ; AVX512F-FAST-NEXT: vmovdqa 448(%rdi), %ymm2
11568 ; AVX512F-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11569 ; AVX512F-FAST-NEXT: vpermd %ymm2, %ymm0, %ymm4
11570 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm4, %ymm2
11571 ; AVX512F-FAST-NEXT: vmovdqa %ymm3, %ymm9
11572 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5,6],ymm1[7]
11573 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm2 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
11574 ; AVX512F-FAST-NEXT: vmovdqa 416(%rdi), %ymm3
11575 ; AVX512F-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11576 ; AVX512F-FAST-NEXT: vpermd %ymm3, %ymm0, %ymm12
11577 ; AVX512F-FAST-NEXT: vpshufb %ymm2, %ymm12, %ymm3
11578 ; AVX512F-FAST-NEXT: vmovdqa %ymm2, %ymm11
11579 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm10 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
11580 ; AVX512F-FAST-NEXT: vmovdqa 384(%rdi), %ymm5
11581 ; AVX512F-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11582 ; AVX512F-FAST-NEXT: vpermd %ymm5, %ymm0, %ymm13
11583 ; AVX512F-FAST-NEXT: vpshufb %ymm10, %ymm13, %ymm5
11584 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm5[0,1,2,3,4],ymm3[5],ymm5[6,7]
11585 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5],ymm1[6,7]
11586 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm3 = [0,0,0,8,0,0,0,8,0,0,0,8,0,0,0,8]
11587 ; AVX512F-FAST-NEXT: vmovdqa 368(%rdi), %xmm2
11588 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm2, %xmm5
11589 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm2, %xmm30
11590 ; AVX512F-FAST-NEXT: vmovdqa 352(%rdi), %xmm2
11591 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm2, %xmm6
11592 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm2, %xmm25
11593 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
11594 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm5 = [0,8,0,0,0,8,0,0,0,8,0,0,0,8,0,0]
11595 ; AVX512F-FAST-NEXT: vmovdqa 336(%rdi), %xmm2
11596 ; AVX512F-FAST-NEXT: vpshufb %xmm5, %xmm2, %xmm7
11597 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm2, %xmm24
11598 ; AVX512F-FAST-NEXT: vmovdqa 320(%rdi), %xmm2
11599 ; AVX512F-FAST-NEXT: vpshufb %xmm5, %xmm2, %xmm8
11600 ; AVX512F-FAST-NEXT: vmovdqa %xmm2, %xmm15
11601 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
11602 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm6 = xmm7[0,1,2],xmm6[3]
11603 ; AVX512F-FAST-NEXT: vmovdqa64 256(%rdi), %zmm2
11604 ; AVX512F-FAST-NEXT: vpmovqb %zmm2, %xmm7
11605 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm2, %zmm28
11606 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm6 = xmm7[0,1],xmm6[2,3]
11607 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm6, %zmm0, %zmm16
11608 ; AVX512F-FAST-NEXT: movb $-64, %al
11609 ; AVX512F-FAST-NEXT: kmovw %eax, %k1
11610 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm16 {%k1}
11611 ; AVX512F-FAST-NEXT: vmovdqa 224(%rdi), %ymm1
11612 ; AVX512F-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11613 ; AVX512F-FAST-NEXT: vpermd %ymm1, %ymm0, %ymm2
11614 ; AVX512F-FAST-NEXT: vpshufb %ymm14, %ymm2, %ymm1
11615 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm2, %ymm23
11616 ; AVX512F-FAST-NEXT: vmovdqa 192(%rdi), %ymm2
11617 ; AVX512F-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11618 ; AVX512F-FAST-NEXT: vpermd %ymm2, %ymm0, %ymm2
11619 ; AVX512F-FAST-NEXT: vpshufb %ymm9, %ymm2, %ymm6
11620 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm2, %ymm27
11621 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3,4,5,6],ymm1[7]
11622 ; AVX512F-FAST-NEXT: vmovdqa 160(%rdi), %ymm1
11623 ; AVX512F-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11624 ; AVX512F-FAST-NEXT: vmovdqa 128(%rdi), %ymm2
11625 ; AVX512F-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11626 ; AVX512F-FAST-NEXT: vpermd %ymm1, %ymm0, %ymm1
11627 ; AVX512F-FAST-NEXT: vpshufb %ymm11, %ymm1, %ymm7
11628 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm1, %ymm18
11629 ; AVX512F-FAST-NEXT: vpermd %ymm2, %ymm0, %ymm0
11630 ; AVX512F-FAST-NEXT: vpshufb %ymm10, %ymm0, %ymm8
11631 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm0, %ymm17
11632 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm7 = ymm8[0,1,2,3,4],ymm7[5],ymm8[6,7]
11633 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm7[0,1,2,3,4,5],ymm6[6,7]
11634 ; AVX512F-FAST-NEXT: vmovdqa 112(%rdi), %xmm10
11635 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm10, %xmm7
11636 ; AVX512F-FAST-NEXT: vmovdqa 96(%rdi), %xmm0
11637 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm0, %xmm3
11638 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm0, %xmm20
11639 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm7[0],xmm3[1],xmm7[1],xmm3[2],xmm7[2],xmm3[3],xmm7[3]
11640 ; AVX512F-FAST-NEXT: vmovdqa 80(%rdi), %xmm2
11641 ; AVX512F-FAST-NEXT: vpshufb %xmm5, %xmm2, %xmm0
11642 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm2, %xmm31
11643 ; AVX512F-FAST-NEXT: vmovdqa 64(%rdi), %xmm2
11644 ; AVX512F-FAST-NEXT: vpshufb %xmm5, %xmm2, %xmm5
11645 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm2, %xmm22
11646 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm5[0],xmm0[0],xmm5[1],xmm0[1],xmm5[2],xmm0[2],xmm5[3],xmm0[3]
11647 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1,2],xmm3[3]
11648 ; AVX512F-FAST-NEXT: vmovdqa64 (%rdi), %zmm29
11649 ; AVX512F-FAST-NEXT: vpmovqb %zmm29, %xmm3
11650 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm3[0,1],xmm0[2,3]
11651 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
11652 ; AVX512F-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm16, %zmm0
11653 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11654 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm6 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
11655 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm19, %ymm11
11656 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm11, %ymm0
11657 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm7 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
11658 ; AVX512F-FAST-NEXT: vpshufb %ymm7, %ymm4, %ymm1
11659 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm4, %ymm26
11660 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5,6],ymm0[7]
11661 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u,u,u,u,u]
11662 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm12, %ymm19
11663 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm2 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
11664 ; AVX512F-FAST-NEXT: vpshufb %ymm2, %ymm13, %ymm3
11665 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm13, %ymm21
11666 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm2, %ymm16
11667 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3,4],ymm1[5],ymm3[6,7]
11668 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11669 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm1 = [0,0,1,9,0,0,1,9,0,0,1,9,0,0,1,9]
11670 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm30, %xmm13
11671 ; AVX512F-FAST-NEXT: vpshufb %xmm1, %xmm13, %xmm3
11672 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm25, %xmm14
11673 ; AVX512F-FAST-NEXT: vpshufb %xmm1, %xmm14, %xmm5
11674 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
11675 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm5 = [1,9,0,0,1,9,0,0,1,9,0,0,1,9,0,0]
11676 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm24, %xmm9
11677 ; AVX512F-FAST-NEXT: vpshufb %xmm5, %xmm9, %xmm0
11678 ; AVX512F-FAST-NEXT: vmovdqa %xmm15, %xmm12
11679 ; AVX512F-FAST-NEXT: vpshufb %xmm5, %xmm15, %xmm15
11680 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3]
11681 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1,2],xmm3[3]
11682 ; AVX512F-FAST-NEXT: vpsrlq $8, %zmm28, %zmm3
11683 ; AVX512F-FAST-NEXT: vpmovqb %zmm3, %xmm3
11684 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm0 = xmm3[0,1],xmm0[2,3]
11685 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm0
11686 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0 {%k1}
11687 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm23, %ymm8
11688 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm8, %ymm2
11689 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm27, %ymm4
11690 ; AVX512F-FAST-NEXT: vpshufb %ymm7, %ymm4, %ymm3
11691 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5,6],ymm2[7]
11692 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm18, %ymm7
11693 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,21,25,29,u,u,u,u,u,u,u,u]
11694 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm17, %ymm6
11695 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm16, %ymm15
11696 ; AVX512F-FAST-NEXT: vpshufb %ymm15, %ymm6, %ymm15
11697 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm15[0,1,2,3,4],ymm3[5],ymm15[6,7]
11698 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4,5],ymm2[6,7]
11699 ; AVX512F-FAST-NEXT: vpshufb %xmm1, %xmm10, %xmm3
11700 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm10, %xmm16
11701 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm20, %xmm10
11702 ; AVX512F-FAST-NEXT: vpshufb %xmm1, %xmm10, %xmm1
11703 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
11704 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm31, %xmm15
11705 ; AVX512F-FAST-NEXT: vpshufb %xmm5, %xmm15, %xmm3
11706 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm22, %xmm15
11707 ; AVX512F-FAST-NEXT: vpshufb %xmm5, %xmm15, %xmm5
11708 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
11709 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
11710 ; AVX512F-FAST-NEXT: vpsrlq $8, %zmm29, %zmm3
11711 ; AVX512F-FAST-NEXT: vpmovqb %zmm3, %xmm3
11712 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1],xmm1[2,3]
11713 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
11714 ; AVX512F-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm0, %zmm0
11715 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11716 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm1 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
11717 ; AVX512F-FAST-NEXT: vpshufb %ymm1, %ymm11, %ymm0
11718 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm11, %ymm20
11719 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm1, %ymm23
11720 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm2 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
11721 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm26, %ymm11
11722 ; AVX512F-FAST-NEXT: vpshufb %ymm2, %ymm11, %ymm1
11723 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm2, %ymm24
11724 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5,6],ymm0[7]
11725 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm2 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
11726 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm19, %ymm1
11727 ; AVX512F-FAST-NEXT: vpshufb %ymm2, %ymm1, %ymm1
11728 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm2, %ymm25
11729 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm3 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
11730 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm21, %ymm2
11731 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm2, %ymm2
11732 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm3, %ymm26
11733 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4],ymm1[5],ymm2[6,7]
11734 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11735 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm1 = [0,0,2,10,0,0,2,10,0,0,2,10,0,0,2,10]
11736 ; AVX512F-FAST-NEXT: vpshufb %xmm1, %xmm13, %xmm2
11737 ; AVX512F-FAST-NEXT: vpshufb %xmm1, %xmm14, %xmm3
11738 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
11739 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm3 = [2,10,0,0,2,10,0,0,2,10,0,0,2,10,0,0]
11740 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm9, %xmm5
11741 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm9, %xmm17
11742 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm12, %xmm15
11743 ; AVX512F-FAST-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
11744 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm15[0],xmm5[0],xmm15[1],xmm5[1],xmm15[2],xmm5[2],xmm15[3],xmm5[3]
11745 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm5[0,1,2],xmm2[3]
11746 ; AVX512F-FAST-NEXT: vpsrlq $16, %zmm28, %zmm5
11747 ; AVX512F-FAST-NEXT: vpmovqb %zmm5, %xmm5
11748 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm5[0,1],xmm2[2,3]
11749 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm2, %zmm0, %zmm2
11750 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm2 {%k1}
11751 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm23, %ymm0
11752 ; AVX512F-FAST-NEXT: vpshufb %ymm0, %ymm8, %ymm0
11753 ; AVX512F-FAST-NEXT: vmovdqa %ymm8, %ymm9
11754 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm24, %ymm5
11755 ; AVX512F-FAST-NEXT: vpshufb %ymm5, %ymm4, %ymm5
11756 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm5[0,1,2,3,4,5,6],ymm0[7]
11757 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm25, %ymm4
11758 ; AVX512F-FAST-NEXT: vpshufb %ymm4, %ymm7, %ymm5
11759 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm18, %ymm25
11760 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm26, %ymm4
11761 ; AVX512F-FAST-NEXT: vpshufb %ymm4, %ymm6, %ymm15
11762 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm6, %ymm26
11763 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm15[0,1,2,3,4],ymm5[5],ymm15[6,7]
11764 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm5[0,1,2,3,4,5],ymm0[6,7]
11765 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm16, %xmm6
11766 ; AVX512F-FAST-NEXT: vpshufb %xmm1, %xmm6, %xmm5
11767 ; AVX512F-FAST-NEXT: vpshufb %xmm1, %xmm10, %xmm1
11768 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm10, %xmm16
11769 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
11770 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm31, %xmm7
11771 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm7, %xmm5
11772 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm22, %xmm8
11773 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm8, %xmm3
11774 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
11775 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
11776 ; AVX512F-FAST-NEXT: vpsrlq $16, %zmm29, %zmm3
11777 ; AVX512F-FAST-NEXT: vpmovqb %zmm3, %xmm3
11778 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1],xmm1[2,3]
11779 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11780 ; AVX512F-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm0
11781 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11782 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm1 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
11783 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm20, %ymm0
11784 ; AVX512F-FAST-NEXT: vpshufb %ymm1, %ymm0, %ymm0
11785 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm1, %ymm18
11786 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm2 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
11787 ; AVX512F-FAST-NEXT: vpshufb %ymm2, %ymm11, %ymm1
11788 ; AVX512F-FAST-NEXT: vmovdqa %ymm2, %ymm11
11789 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5,6],ymm0[7]
11790 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm10 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
11791 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm19, %ymm1
11792 ; AVX512F-FAST-NEXT: vpshufb %ymm10, %ymm1, %ymm1
11793 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm4 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
11794 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm21, %ymm2
11795 ; AVX512F-FAST-NEXT: vpshufb %ymm4, %ymm2, %ymm2
11796 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4],ymm1[5],ymm2[6,7]
11797 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
11798 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm1 = [0,0,3,11,0,0,3,11,0,0,3,11,0,0,3,11]
11799 ; AVX512F-FAST-NEXT: vpshufb %xmm1, %xmm13, %xmm2
11800 ; AVX512F-FAST-NEXT: vpshufb %xmm1, %xmm14, %xmm3
11801 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
11802 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm3 = [3,11,0,0,3,11,0,0,3,11,0,0,3,11,0,0]
11803 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm17, %xmm15
11804 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm15, %xmm5
11805 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm12, %xmm12
11806 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm12[0],xmm5[0],xmm12[1],xmm5[1],xmm12[2],xmm5[2],xmm12[3],xmm5[3]
11807 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm5[0,1,2],xmm2[3]
11808 ; AVX512F-FAST-NEXT: vpsrlq $24, %zmm28, %zmm5
11809 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm28, %zmm24
11810 ; AVX512F-FAST-NEXT: vpmovqb %zmm5, %xmm5
11811 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm5[0,1],xmm2[2,3]
11812 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm2, %zmm0, %zmm2
11813 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm2 {%k1}
11814 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm18, %ymm0
11815 ; AVX512F-FAST-NEXT: vpshufb %ymm0, %ymm9, %ymm0
11816 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm27, %ymm5
11817 ; AVX512F-FAST-NEXT: vpshufb %ymm11, %ymm5, %ymm5
11818 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm5[0,1,2,3,4,5,6],ymm0[7]
11819 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm25, %ymm5
11820 ; AVX512F-FAST-NEXT: vpshufb %ymm10, %ymm5, %ymm5
11821 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm26, %ymm9
11822 ; AVX512F-FAST-NEXT: vpshufb %ymm4, %ymm9, %ymm4
11823 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3,4],ymm5[5],ymm4[6,7]
11824 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3,4,5],ymm0[6,7]
11825 ; AVX512F-FAST-NEXT: vpshufb %xmm1, %xmm6, %xmm4
11826 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm6, %xmm21
11827 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm16, %xmm5
11828 ; AVX512F-FAST-NEXT: vpshufb %xmm1, %xmm5, %xmm1
11829 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
11830 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm7, %xmm4
11831 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm31, %xmm16
11832 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm8, %xmm3
11833 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm22, %xmm6
11834 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
11835 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
11836 ; AVX512F-FAST-NEXT: vpsrlq $24, %zmm29, %zmm3
11837 ; AVX512F-FAST-NEXT: vpmovqb %zmm3, %xmm3
11838 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1],xmm1[2,3]
11839 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
11840 ; AVX512F-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm0
11841 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11842 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [1,3,2,3,1,3,5,7]
11843 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm2 # 32-byte Folded Reload
11844 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm17 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
11845 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm17, %ymm1
11846 ; AVX512F-FAST-NEXT: vpshufb %ymm1, %ymm2, %ymm1
11847 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm2, %ymm28
11848 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm3 # 32-byte Folded Reload
11849 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm18 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
11850 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm18, %ymm2
11851 ; AVX512F-FAST-NEXT: vpshufb %ymm2, %ymm3, %ymm2
11852 ; AVX512F-FAST-NEXT: vmovdqa %ymm3, %ymm4
11853 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5,6],ymm1[7]
11854 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm2 # 32-byte Folded Reload
11855 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm7 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
11856 ; AVX512F-FAST-NEXT: vpshufb %ymm7, %ymm2, %ymm9
11857 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm2, %ymm19
11858 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm2 # 32-byte Folded Reload
11859 ; AVX512F-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
11860 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm8 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
11861 ; AVX512F-FAST-NEXT: vpshufb %ymm8, %ymm2, %ymm10
11862 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm10[0,1,2,3,4],ymm9[5],ymm10[6,7]
11863 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm9[0,1,2,3,4,5],ymm1[6,7]
11864 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm11 = [0,0,4,12,0,0,4,12,0,0,4,12,0,0,4,12]
11865 ; AVX512F-FAST-NEXT: vpshufb %xmm11, %xmm13, %xmm9
11866 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm30, %xmm20
11867 ; AVX512F-FAST-NEXT: vpshufb %xmm11, %xmm14, %xmm10
11868 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm14, %xmm22
11869 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
11870 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm12 = [4,12,0,0,4,12,0,0,4,12,0,0,4,12,0,0]
11871 ; AVX512F-FAST-NEXT: vpshufb %xmm12, %xmm15, %xmm10
11872 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm15, %xmm23
11873 ; AVX512F-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
11874 ; AVX512F-FAST-NEXT: vpshufb %xmm12, %xmm2, %xmm13
11875 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm13[0],xmm10[0],xmm13[1],xmm10[1],xmm13[2],xmm10[2],xmm13[3],xmm10[3]
11876 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm9 = xmm10[0,1,2],xmm9[3]
11877 ; AVX512F-FAST-NEXT: vpsrlq $32, %zmm24, %zmm10
11878 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm24, %zmm25
11879 ; AVX512F-FAST-NEXT: vpmovqb %zmm10, %xmm10
11880 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm9 = xmm10[0,1],xmm9[2,3]
11881 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm9, %zmm0, %zmm13
11882 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm13 {%k1}
11883 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm2 # 32-byte Folded Reload
11884 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm17, %ymm1
11885 ; AVX512F-FAST-NEXT: vpshufb %ymm1, %ymm2, %ymm1
11886 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm2, %ymm27
11887 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm3 # 32-byte Folded Reload
11888 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm18, %ymm2
11889 ; AVX512F-FAST-NEXT: vpshufb %ymm2, %ymm3, %ymm15
11890 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm3, %ymm26
11891 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm15 = ymm15[0,1,2,3,4,5,6],ymm1[7]
11892 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm2 # 32-byte Folded Reload
11893 ; AVX512F-FAST-NEXT: vpshufb %ymm7, %ymm2, %ymm1
11894 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm2, %ymm31
11895 ; AVX512F-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
11896 ; AVX512F-FAST-NEXT: vpshufb %ymm8, %ymm0, %ymm14
11897 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm0, %ymm30
11898 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm14[0,1,2,3,4],ymm1[5],ymm14[6,7]
11899 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm15[6,7]
11900 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm21, %xmm10
11901 ; AVX512F-FAST-NEXT: vpshufb %xmm11, %xmm10, %xmm14
11902 ; AVX512F-FAST-NEXT: vpshufb %xmm11, %xmm5, %xmm11
11903 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm5, %xmm24
11904 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm14[0],xmm11[1],xmm14[1],xmm11[2],xmm14[2],xmm11[3],xmm14[3]
11905 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm16, %xmm8
11906 ; AVX512F-FAST-NEXT: vpshufb %xmm12, %xmm8, %xmm14
11907 ; AVX512F-FAST-NEXT: vpshufb %xmm12, %xmm6, %xmm12
11908 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm6, %xmm21
11909 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm12[0],xmm14[0],xmm12[1],xmm14[1],xmm12[2],xmm14[2],xmm12[3],xmm14[3]
11910 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm11 = xmm12[0,1,2],xmm11[3]
11911 ; AVX512F-FAST-NEXT: vpsrlq $32, %zmm29, %zmm12
11912 ; AVX512F-FAST-NEXT: vpmovqb %zmm12, %xmm12
11913 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm11 = xmm12[0,1],xmm11[2,3]
11914 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm11[0,1,2,3],ymm1[4,5,6,7]
11915 ; AVX512F-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm13, %zmm0
11916 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
11917 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm3 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
11918 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm28, %ymm0
11919 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm0, %ymm1
11920 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm5 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
11921 ; AVX512F-FAST-NEXT: vpshufb %ymm5, %ymm4, %ymm11
11922 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm4, %ymm16
11923 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm11[0,1,2,3,4,5,6],ymm1[7]
11924 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm6 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
11925 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm19, %ymm0
11926 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm0, %ymm11
11927 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm18 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
11928 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm18, %ymm7
11929 ; AVX512F-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
11930 ; AVX512F-FAST-NEXT: vpshufb %ymm7, %ymm2, %ymm12
11931 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm11 = ymm12[0,1,2,3,4],ymm11[5],ymm12[6,7]
11932 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm11[0,1,2,3,4,5],ymm1[6,7]
11933 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm11 = [0,0,5,13,0,0,5,13,0,0,5,13,0,0,5,13]
11934 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm20, %xmm2
11935 ; AVX512F-FAST-NEXT: vpshufb %xmm11, %xmm2, %xmm12
11936 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm20, %xmm19
11937 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm22, %xmm4
11938 ; AVX512F-FAST-NEXT: vpshufb %xmm11, %xmm4, %xmm13
11939 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
11940 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm13 = [5,13,0,0,5,13,0,0,5,13,0,0,5,13,0,0]
11941 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm23, %xmm9
11942 ; AVX512F-FAST-NEXT: vpshufb %xmm13, %xmm9, %xmm14
11943 ; AVX512F-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
11944 ; AVX512F-FAST-NEXT: vpshufb %xmm13, %xmm7, %xmm15
11945 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
11946 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm12 = xmm14[0,1,2],xmm12[3]
11947 ; AVX512F-FAST-NEXT: vpsrlq $40, %zmm25, %zmm14
11948 ; AVX512F-FAST-NEXT: vpmovqb %zmm14, %xmm14
11949 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm12 = xmm14[0,1],xmm12[2,3]
11950 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm12, %zmm0, %zmm12
11951 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm12 {%k1}
11952 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm27, %ymm1
11953 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm1, %ymm1
11954 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm26, %ymm2
11955 ; AVX512F-FAST-NEXT: vpshufb %ymm5, %ymm2, %ymm14
11956 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm14[0,1,2,3,4,5,6],ymm1[7]
11957 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm31, %ymm3
11958 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm3, %ymm14
11959 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm18, %ymm3
11960 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm30, %ymm2
11961 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm2, %ymm15
11962 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm14 = ymm15[0,1,2,3,4],ymm14[5],ymm15[6,7]
11963 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm14[0,1,2,3,4,5],ymm1[6,7]
11964 ; AVX512F-FAST-NEXT: vpshufb %xmm11, %xmm10, %xmm14
11965 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm10, %xmm20
11966 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm24, %xmm2
11967 ; AVX512F-FAST-NEXT: vpshufb %xmm11, %xmm2, %xmm11
11968 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm14[0],xmm11[1],xmm14[1],xmm11[2],xmm14[2],xmm11[3],xmm14[3]
11969 ; AVX512F-FAST-NEXT: vpshufb %xmm13, %xmm8, %xmm14
11970 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm8, %xmm22
11971 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm21, %xmm2
11972 ; AVX512F-FAST-NEXT: vpshufb %xmm13, %xmm2, %xmm13
11973 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3]
11974 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm11 = xmm13[0,1,2],xmm11[3]
11975 ; AVX512F-FAST-NEXT: vpsrlq $40, %zmm29, %zmm13
11976 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm29, %zmm18
11977 ; AVX512F-FAST-NEXT: vpmovqb %zmm13, %xmm13
11978 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm11 = xmm13[0,1],xmm11[2,3]
11979 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm11[0,1,2,3],ymm1[4,5,6,7]
11980 ; AVX512F-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm12, %zmm29
11981 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm3 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
11982 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm28, %ymm10
11983 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm10, %ymm1
11984 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm21 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
11985 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm16, %ymm5
11986 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm21, %ymm6
11987 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm5, %ymm11
11988 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm11[0,1,2,3,4,5,6],ymm1[7]
11989 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm6 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
11990 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm0, %ymm11
11991 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm0, %ymm17
11992 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm8 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
11993 ; AVX512F-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
11994 ; AVX512F-FAST-NEXT: vpshufb %ymm8, %ymm0, %ymm12
11995 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm11 = ymm12[0,1,2,3,4],ymm11[5],ymm12[6,7]
11996 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm11[0,1,2,3,4,5],ymm1[6,7]
11997 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm11 = [0,0,6,14,0,0,6,14,0,0,6,14,0,0,6,14]
11998 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm19, %xmm0
11999 ; AVX512F-FAST-NEXT: vpshufb %xmm11, %xmm0, %xmm12
12000 ; AVX512F-FAST-NEXT: vpshufb %xmm11, %xmm4, %xmm13
12001 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm4, %xmm23
12002 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
12003 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm13 = [6,14,0,0,6,14,0,0,6,14,0,0,6,14,0,0]
12004 ; AVX512F-FAST-NEXT: vpshufb %xmm13, %xmm9, %xmm14
12005 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm9, %xmm28
12006 ; AVX512F-FAST-NEXT: vpshufb %xmm13, %xmm7, %xmm15
12007 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm7, %xmm16
12008 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
12009 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm12 = xmm14[0,1,2],xmm12[3]
12010 ; AVX512F-FAST-NEXT: vpsrlq $48, %zmm25, %zmm14
12011 ; AVX512F-FAST-NEXT: vpmovqb %zmm14, %xmm14
12012 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm12 = xmm14[0,1],xmm12[2,3]
12013 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm12, %zmm0, %zmm12
12014 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm12 {%k1}
12015 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm27, %ymm0
12016 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm0, %ymm1
12017 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm26, %ymm9
12018 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm21, %ymm3
12019 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm9, %ymm14
12020 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm14[0,1,2,3,4,5,6],ymm1[7]
12021 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm31, %ymm3
12022 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm3, %ymm14
12023 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm30, %ymm3
12024 ; AVX512F-FAST-NEXT: vpshufb %ymm8, %ymm3, %ymm15
12025 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm14 = ymm15[0,1,2,3,4],ymm14[5],ymm15[6,7]
12026 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm14[0,1,2,3,4,5],ymm1[6,7]
12027 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm20, %xmm8
12028 ; AVX512F-FAST-NEXT: vpshufb %xmm11, %xmm8, %xmm14
12029 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm24, %xmm15
12030 ; AVX512F-FAST-NEXT: vpshufb %xmm11, %xmm15, %xmm11
12031 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm14[0],xmm11[1],xmm14[1],xmm11[2],xmm14[2],xmm11[3],xmm14[3]
12032 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm22, %xmm3
12033 ; AVX512F-FAST-NEXT: vpshufb %xmm13, %xmm3, %xmm14
12034 ; AVX512F-FAST-NEXT: vpshufb %xmm13, %xmm2, %xmm13
12035 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm2, %xmm20
12036 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3]
12037 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm11 = xmm13[0,1,2],xmm11[3]
12038 ; AVX512F-FAST-NEXT: vpsrlq $48, %zmm18, %zmm13
12039 ; AVX512F-FAST-NEXT: vpmovqb %zmm13, %xmm13
12040 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm11 = xmm13[0,1],xmm11[2,3]
12041 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm11[0,1,2,3],ymm1[4,5,6,7]
12042 ; AVX512F-FAST-NEXT: vinserti64x4 $0, %ymm1, %zmm12, %zmm21
12043 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm13 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
12044 ; AVX512F-FAST-NEXT: vpshufb %ymm13, %ymm10, %ymm6
12045 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm14 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
12046 ; AVX512F-FAST-NEXT: vpshufb %ymm14, %ymm5, %ymm5
12047 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5,6],ymm6[7]
12048 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm1 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
12049 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm17, %ymm2
12050 ; AVX512F-FAST-NEXT: vpshufb %ymm1, %ymm2, %ymm3
12051 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} ymm7 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
12052 ; AVX512F-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
12053 ; AVX512F-FAST-NEXT: vpshufb %ymm7, %ymm2, %ymm4
12054 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3,4],ymm3[5],ymm4[6,7]
12055 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm5[6,7]
12056 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm4 = [0,0,7,15,0,0,7,15,0,0,7,15,0,0,7,15]
12057 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm19, %xmm2
12058 ; AVX512F-FAST-NEXT: vpshufb %xmm4, %xmm2, %xmm5
12059 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm23, %xmm2
12060 ; AVX512F-FAST-NEXT: vpshufb %xmm4, %xmm2, %xmm6
12061 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
12062 ; AVX512F-FAST-NEXT: vpbroadcastd {{.*#+}} xmm6 = [7,15,0,0,7,15,0,0,7,15,0,0,7,15,0,0]
12063 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm28, %xmm2
12064 ; AVX512F-FAST-NEXT: vpshufb %xmm6, %xmm2, %xmm11
12065 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm16, %xmm2
12066 ; AVX512F-FAST-NEXT: vpshufb %xmm6, %xmm2, %xmm12
12067 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
12068 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm5 = xmm11[0,1,2],xmm5[3]
12069 ; AVX512F-FAST-NEXT: vpsrlq $56, %zmm25, %zmm11
12070 ; AVX512F-FAST-NEXT: vpmovqb %zmm11, %xmm11
12071 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm5 = xmm11[0,1],xmm5[2,3]
12072 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm5, %zmm0, %zmm5
12073 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm0, %zmm5 {%k1}
12074 ; AVX512F-FAST-NEXT: vpshufb %ymm13, %ymm0, %ymm3
12075 ; AVX512F-FAST-NEXT: vpshufb %ymm14, %ymm9, %ymm9
12076 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm9[0,1,2,3,4,5,6],ymm3[7]
12077 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm31, %ymm0
12078 ; AVX512F-FAST-NEXT: vpshufb %ymm1, %ymm0, %ymm2
12079 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm30, %ymm0
12080 ; AVX512F-FAST-NEXT: vpshufb %ymm7, %ymm0, %ymm0
12081 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm2[5],ymm0[6,7]
12082 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm3[6,7]
12083 ; AVX512F-FAST-NEXT: vpshufb %xmm4, %xmm8, %xmm2
12084 ; AVX512F-FAST-NEXT: vpshufb %xmm4, %xmm15, %xmm3
12085 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
12086 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm22, %xmm1
12087 ; AVX512F-FAST-NEXT: vpshufb %xmm6, %xmm1, %xmm3
12088 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm20, %xmm1
12089 ; AVX512F-FAST-NEXT: vpshufb %xmm6, %xmm1, %xmm4
12090 ; AVX512F-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
12091 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm3[0,1,2],xmm2[3]
12092 ; AVX512F-FAST-NEXT: vpsrlq $56, %zmm18, %zmm3
12093 ; AVX512F-FAST-NEXT: vpmovqb %zmm3, %xmm3
12094 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3]
12095 ; AVX512F-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
12096 ; AVX512F-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm5, %zmm0
12097 ; AVX512F-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12098 ; AVX512F-FAST-NEXT: vmovaps %zmm1, (%rsi)
12099 ; AVX512F-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12100 ; AVX512F-FAST-NEXT: vmovaps %zmm1, (%rdx)
12101 ; AVX512F-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12102 ; AVX512F-FAST-NEXT: vmovaps %zmm1, (%rcx)
12103 ; AVX512F-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12104 ; AVX512F-FAST-NEXT: vmovaps %zmm1, (%r8)
12105 ; AVX512F-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
12106 ; AVX512F-FAST-NEXT: vmovaps %zmm1, (%r9)
12107 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
12108 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm29, (%rax)
12109 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
12110 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm21, (%rax)
12111 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
12112 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm0, (%rax)
12113 ; AVX512F-FAST-NEXT: addq $440, %rsp # imm = 0x1B8
12114 ; AVX512F-FAST-NEXT: vzeroupper
12115 ; AVX512F-FAST-NEXT: retq
12117 ; AVX512BW-SLOW-LABEL: load_i8_stride8_vf64:
12118 ; AVX512BW-SLOW: # %bb.0:
12119 ; AVX512BW-SLOW-NEXT: subq $744, %rsp # imm = 0x2E8
12120 ; AVX512BW-SLOW-NEXT: vmovdqa64 (%rdi), %zmm5
12121 ; AVX512BW-SLOW-NEXT: vmovdqa64 128(%rdi), %zmm0
12122 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm0, %xmm0
12123 ; AVX512BW-SLOW-NEXT: vmovdqa64 256(%rdi), %zmm1
12124 ; AVX512BW-SLOW-NEXT: vmovdqa64 384(%rdi), %zmm2
12125 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm2, %xmm2
12126 ; AVX512BW-SLOW-NEXT: vmovdqa 496(%rdi), %xmm4
12127 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm12 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
12128 ; AVX512BW-SLOW-NEXT: vpshufb %xmm12, %xmm4, %xmm3
12129 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm4, %xmm24
12130 ; AVX512BW-SLOW-NEXT: vmovdqa 480(%rdi), %xmm6
12131 ; AVX512BW-SLOW-NEXT: vpshufb %xmm12, %xmm6, %xmm4
12132 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm6, %xmm25
12133 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
12134 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
12135 ; AVX512BW-SLOW-NEXT: vmovdqa 464(%rdi), %xmm6
12136 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm19 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
12137 ; AVX512BW-SLOW-NEXT: vpshufb %xmm19, %xmm6, %xmm4
12138 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm6, %xmm26
12139 ; AVX512BW-SLOW-NEXT: vmovdqa 448(%rdi), %xmm7
12140 ; AVX512BW-SLOW-NEXT: vpshufb %xmm19, %xmm7, %xmm6
12141 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm7, %xmm30
12142 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm6[0],xmm4[0],xmm6[1],xmm4[1],xmm6[2],xmm4[2],xmm6[3],xmm4[3]
12143 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
12144 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3,4,5,6],ymm3[7]
12145 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
12146 ; AVX512BW-SLOW-NEXT: vmovdqa 384(%rdi), %ymm4
12147 ; AVX512BW-SLOW-NEXT: vpmovqb %ymm4, %xmm4
12148 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
12149 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2,3,4],ymm2[5],ymm4[6,7]
12150 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm7 = ymm2[0,1,2,3,4,5],ymm3[6,7]
12151 ; AVX512BW-SLOW-NEXT: vmovdqa 368(%rdi), %xmm2
12152 ; AVX512BW-SLOW-NEXT: vpshufb %xmm12, %xmm2, %xmm4
12153 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm2, %xmm31
12154 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12155 ; AVX512BW-SLOW-NEXT: vmovdqa64 352(%rdi), %xmm27
12156 ; AVX512BW-SLOW-NEXT: vpshufb %xmm12, %xmm27, %xmm6
12157 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm6[0],xmm4[0],xmm6[1],xmm4[1],xmm6[2],xmm4[2],xmm6[3],xmm4[3]
12158 ; AVX512BW-SLOW-NEXT: vmovdqa 336(%rdi), %xmm2
12159 ; AVX512BW-SLOW-NEXT: vpshufb %xmm19, %xmm2, %xmm11
12160 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm2, %xmm22
12161 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12162 ; AVX512BW-SLOW-NEXT: vmovdqa 320(%rdi), %xmm2
12163 ; AVX512BW-SLOW-NEXT: vpshufb %xmm19, %xmm2, %xmm15
12164 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm2, %xmm9
12165 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm15[0],xmm11[0],xmm15[1],xmm11[1],xmm15[2],xmm11[2],xmm15[3],xmm11[3]
12166 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm10 = xmm11[0,1,2],xmm10[3]
12167 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm1, %xmm11
12168 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm1, %zmm15
12169 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm10 = xmm11[0,1],xmm10[2,3]
12170 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm10, %zmm0, %zmm20
12171 ; AVX512BW-SLOW-NEXT: movb $-64, %al
12172 ; AVX512BW-SLOW-NEXT: kmovd %eax, %k1
12173 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm0, %zmm20 {%k1}
12174 ; AVX512BW-SLOW-NEXT: vmovdqa64 240(%rdi), %xmm28
12175 ; AVX512BW-SLOW-NEXT: vpshufb %xmm12, %xmm28, %xmm7
12176 ; AVX512BW-SLOW-NEXT: vmovdqa 224(%rdi), %xmm1
12177 ; AVX512BW-SLOW-NEXT: vpshufb %xmm12, %xmm1, %xmm10
12178 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm1, %xmm18
12179 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12180 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm7 = xmm10[0],xmm7[0],xmm10[1],xmm7[1],xmm10[2],xmm7[2],xmm10[3],xmm7[3]
12181 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
12182 ; AVX512BW-SLOW-NEXT: vmovdqa64 208(%rdi), %xmm17
12183 ; AVX512BW-SLOW-NEXT: vpshufb %xmm19, %xmm17, %xmm10
12184 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12185 ; AVX512BW-SLOW-NEXT: vmovdqa 192(%rdi), %xmm8
12186 ; AVX512BW-SLOW-NEXT: vpshufb %xmm19, %xmm8, %xmm16
12187 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12188 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm16[0],xmm10[0],xmm16[1],xmm10[1],xmm16[2],xmm10[2],xmm16[3],xmm10[3]
12189 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
12190 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm7 = ymm10[0,1,2,3,4,5,6],ymm7[7]
12191 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
12192 ; AVX512BW-SLOW-NEXT: vmovdqa 128(%rdi), %ymm10
12193 ; AVX512BW-SLOW-NEXT: vpmovqb %ymm10, %xmm10
12194 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
12195 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm10[0,1,2,3,4],ymm0[5],ymm10[6,7]
12196 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3,4,5],ymm7[6,7]
12197 ; AVX512BW-SLOW-NEXT: vmovdqa 112(%rdi), %xmm0
12198 ; AVX512BW-SLOW-NEXT: vpshufb %xmm12, %xmm0, %xmm16
12199 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm0, %xmm10
12200 ; AVX512BW-SLOW-NEXT: vmovdqa 96(%rdi), %xmm0
12201 ; AVX512BW-SLOW-NEXT: vpshufb %xmm12, %xmm0, %xmm12
12202 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm0, %xmm14
12203 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm12[0],xmm16[0],xmm12[1],xmm16[1],xmm12[2],xmm16[2],xmm12[3],xmm16[3]
12204 ; AVX512BW-SLOW-NEXT: vmovdqa 80(%rdi), %xmm2
12205 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12206 ; AVX512BW-SLOW-NEXT: vpshufb %xmm19, %xmm2, %xmm21
12207 ; AVX512BW-SLOW-NEXT: vmovdqa 64(%rdi), %xmm2
12208 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12209 ; AVX512BW-SLOW-NEXT: vpshufb %xmm19, %xmm2, %xmm19
12210 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm19[0],xmm21[0],xmm19[1],xmm21[1],xmm19[2],xmm21[2],xmm19[3],xmm21[3]
12211 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm13[0,1,2],xmm0[3]
12212 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm5, %xmm13
12213 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm13[0,1],xmm0[2,3]
12214 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12215 ; AVX512BW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm20, %zmm0
12216 ; AVX512BW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12217 ; AVX512BW-SLOW-NEXT: vmovdqa 160(%rdi), %xmm6
12218 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12219 ; AVX512BW-SLOW-NEXT: vmovdqa 384(%rdi), %xmm7
12220 ; AVX512BW-SLOW-NEXT: vmovdqa 400(%rdi), %xmm3
12221 ; AVX512BW-SLOW-NEXT: vmovdqa64 416(%rdi), %xmm20
12222 ; AVX512BW-SLOW-NEXT: vmovdqa64 432(%rdi), %xmm29
12223 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm4 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
12224 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm24, %xmm19
12225 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm24, %xmm1
12226 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm25, %xmm11
12227 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm25, %xmm13
12228 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm13[0],xmm1[0],xmm13[1],xmm1[1],xmm13[2],xmm1[2],xmm13[3],xmm1[3]
12229 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
12230 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm13 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
12231 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm26, %xmm12
12232 ; AVX512BW-SLOW-NEXT: vpshufb %xmm13, %xmm26, %xmm24
12233 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm30, %xmm16
12234 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12235 ; AVX512BW-SLOW-NEXT: vpshufb %xmm13, %xmm30, %xmm25
12236 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm24 = xmm25[0],xmm24[0],xmm25[1],xmm24[1],xmm25[2],xmm24[2],xmm25[3],xmm24[3]
12237 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm24, %ymm0, %ymm2
12238 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5,6],ymm1[7]
12239 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm26 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
12240 ; AVX512BW-SLOW-NEXT: vpshufb %xmm26, %xmm29, %xmm24
12241 ; AVX512BW-SLOW-NEXT: vpshufb %xmm26, %xmm20, %xmm25
12242 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm20, %xmm23
12243 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm24 = xmm25[0],xmm24[0],xmm25[1],xmm24[1],xmm25[2],xmm24[2],xmm25[3],xmm24[3]
12244 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm24, %ymm0, %ymm2
12245 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm30 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
12246 ; AVX512BW-SLOW-NEXT: vpshufb %xmm30, %xmm3, %xmm24
12247 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm3, %xmm21
12248 ; AVX512BW-SLOW-NEXT: vpshufb %xmm30, %xmm7, %xmm25
12249 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm24 = xmm25[0],xmm24[0],xmm25[1],xmm24[1],xmm25[2],xmm24[2],xmm25[3],xmm24[3]
12250 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm24, %ymm0, %ymm3
12251 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
12252 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
12253 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm31, %xmm2
12254 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm27, %xmm3
12255 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
12256 ; AVX512BW-SLOW-NEXT: vpshufb %xmm13, %xmm22, %xmm3
12257 ; AVX512BW-SLOW-NEXT: vpshufb %xmm13, %xmm9, %xmm24
12258 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm9, %xmm31
12259 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12260 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm24[0],xmm3[0],xmm24[1],xmm3[1],xmm24[2],xmm3[2],xmm24[3],xmm3[3]
12261 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm2 = xmm3[0,1,2],xmm2[3]
12262 ; AVX512BW-SLOW-NEXT: vpsrlq $8, %zmm15, %zmm3
12263 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm15, %zmm22
12264 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm3, %xmm3
12265 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3]
12266 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm2, %zmm0, %zmm2
12267 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm2 {%k1}
12268 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm28, %xmm9
12269 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm28, %xmm1
12270 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm18, %xmm3
12271 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
12272 ; AVX512BW-SLOW-NEXT: vpshufb %xmm13, %xmm17, %xmm3
12273 ; AVX512BW-SLOW-NEXT: vpshufb %xmm13, %xmm8, %xmm24
12274 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm24[0],xmm3[0],xmm24[1],xmm3[1],xmm24[2],xmm3[2],xmm24[3],xmm3[3]
12275 ; AVX512BW-SLOW-NEXT: vmovdqa 176(%rdi), %xmm0
12276 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12277 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
12278 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
12279 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5,6],ymm1[7]
12280 ; AVX512BW-SLOW-NEXT: vpshufb %xmm26, %xmm0, %xmm3
12281 ; AVX512BW-SLOW-NEXT: vpshufb %xmm26, %xmm6, %xmm25
12282 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm25[0],xmm3[0],xmm25[1],xmm3[1],xmm25[2],xmm3[2],xmm25[3],xmm3[3]
12283 ; AVX512BW-SLOW-NEXT: vmovdqa 128(%rdi), %xmm6
12284 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12285 ; AVX512BW-SLOW-NEXT: vmovdqa 144(%rdi), %xmm0
12286 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12287 ; AVX512BW-SLOW-NEXT: vpshufb %xmm30, %xmm0, %xmm0
12288 ; AVX512BW-SLOW-NEXT: vpshufb %xmm30, %xmm6, %xmm30
12289 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm30[0],xmm0[0],xmm30[1],xmm0[1],xmm30[2],xmm0[2],xmm30[3],xmm0[3]
12290 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
12291 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
12292 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm3[5],ymm0[6,7]
12293 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
12294 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm10, %xmm1
12295 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12296 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm14, %xmm3
12297 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
12298 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12299 ; AVX512BW-SLOW-NEXT: vpshufb %xmm13, %xmm15, %xmm3
12300 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{[-0-9]+}}(%r{{[sb]}}p), %xmm28 # 16-byte Reload
12301 ; AVX512BW-SLOW-NEXT: vpshufb %xmm13, %xmm28, %xmm4
12302 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
12303 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
12304 ; AVX512BW-SLOW-NEXT: vpsrlq $8, %zmm5, %zmm3
12305 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm3, %xmm3
12306 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1],xmm1[2,3]
12307 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12308 ; AVX512BW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm0
12309 ; AVX512BW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12310 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm1 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
12311 ; AVX512BW-SLOW-NEXT: vpshufb %xmm1, %xmm19, %xmm2
12312 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm19, %xmm20
12313 ; AVX512BW-SLOW-NEXT: vpshufb %xmm1, %xmm11, %xmm3
12314 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
12315 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
12316 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm12, %xmm4
12317 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm12, %xmm25
12318 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12319 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm16, %xmm13
12320 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm13[0],xmm4[0],xmm13[1],xmm4[1],xmm13[2],xmm4[2],xmm13[3],xmm4[3]
12321 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
12322 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
12323 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2,3,4,5,6],ymm2[7]
12324 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm4 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
12325 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm29, %xmm24
12326 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm29, %xmm13
12327 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm23, %xmm30
12328 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm23, %xmm16
12329 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12330 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm30[0],xmm13[0],xmm30[1],xmm13[1],xmm30[2],xmm13[2],xmm30[3],xmm13[3]
12331 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm30 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
12332 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm21, %xmm18
12333 ; AVX512BW-SLOW-NEXT: vpshufb %xmm30, %xmm21, %xmm0
12334 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm7, %xmm17
12335 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12336 ; AVX512BW-SLOW-NEXT: vpshufb %xmm30, %xmm7, %xmm19
12337 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm19[0],xmm0[0],xmm19[1],xmm0[1],xmm19[2],xmm0[2],xmm19[3],xmm0[3]
12338 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
12339 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
12340 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm13[5],ymm0[6,7]
12341 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
12342 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
12343 ; AVX512BW-SLOW-NEXT: vpshufb %xmm1, %xmm12, %xmm2
12344 ; AVX512BW-SLOW-NEXT: vpshufb %xmm1, %xmm27, %xmm13
12345 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm13[0],xmm2[0],xmm13[1],xmm2[1],xmm13[2],xmm2[2],xmm13[3],xmm2[3]
12346 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
12347 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm8, %xmm13
12348 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm31, %xmm19
12349 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm19[0],xmm13[0],xmm19[1],xmm13[1],xmm19[2],xmm13[2],xmm19[3],xmm13[3]
12350 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm2 = xmm13[0,1,2],xmm2[3]
12351 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm22, %zmm6
12352 ; AVX512BW-SLOW-NEXT: vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12353 ; AVX512BW-SLOW-NEXT: vpsrlq $16, %zmm22, %zmm13
12354 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm13, %xmm13
12355 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm2 = xmm13[0,1],xmm2[2,3]
12356 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm2, %zmm0, %zmm2
12357 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm2 {%k1}
12358 ; AVX512BW-SLOW-NEXT: vpshufb %xmm1, %xmm9, %xmm0
12359 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm9, %xmm26
12360 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12361 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{[-0-9]+}}(%r{{[sb]}}p), %xmm21 # 16-byte Reload
12362 ; AVX512BW-SLOW-NEXT: vpshufb %xmm1, %xmm21, %xmm13
12363 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm13[0],xmm0[0],xmm13[1],xmm0[1],xmm13[2],xmm0[2],xmm13[3],xmm0[3]
12364 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{[-0-9]+}}(%r{{[sb]}}p), %xmm22 # 16-byte Reload
12365 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm22, %xmm13
12366 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{[-0-9]+}}(%r{{[sb]}}p), %xmm29 # 16-byte Reload
12367 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm29, %xmm19
12368 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm19[0],xmm13[0],xmm19[1],xmm13[1],xmm19[2],xmm13[2],xmm19[3],xmm13[3]
12369 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
12370 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
12371 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm13[0,1,2,3,4,5,6],ymm0[7]
12372 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
12373 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm9, %xmm13
12374 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
12375 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm7, %xmm4
12376 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm13[0],xmm4[1],xmm13[1],xmm4[2],xmm13[2],xmm4[3],xmm13[3]
12377 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{[-0-9]+}}(%r{{[sb]}}p), %xmm31 # 16-byte Reload
12378 ; AVX512BW-SLOW-NEXT: vpshufb %xmm30, %xmm31, %xmm13
12379 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{[-0-9]+}}(%r{{[sb]}}p), %xmm23 # 16-byte Reload
12380 ; AVX512BW-SLOW-NEXT: vpshufb %xmm30, %xmm23, %xmm19
12381 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm19[0],xmm13[0],xmm19[1],xmm13[1],xmm19[2],xmm13[2],xmm19[3],xmm13[3]
12382 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
12383 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
12384 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm13[0,1,2,3,4],ymm4[5],ymm13[6,7]
12385 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3,4,5],ymm0[6,7]
12386 ; AVX512BW-SLOW-NEXT: vpshufb %xmm1, %xmm10, %xmm4
12387 ; AVX512BW-SLOW-NEXT: vpshufb %xmm1, %xmm14, %xmm1
12388 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
12389 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm15, %xmm4
12390 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm28, %xmm3
12391 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
12392 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
12393 ; AVX512BW-SLOW-NEXT: vpsrlq $16, %zmm5, %zmm3
12394 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm5, %zmm28
12395 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm3, %xmm3
12396 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1],xmm1[2,3]
12397 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
12398 ; AVX512BW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm0
12399 ; AVX512BW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12400 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
12401 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12402 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm20, %xmm1
12403 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm11, %xmm2
12404 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
12405 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
12406 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm25, %xmm3
12407 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
12408 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm4, %xmm4
12409 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
12410 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
12411 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
12412 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5,6],ymm1[7]
12413 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
12414 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm24, %xmm4
12415 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm24, (%rsp) # 16-byte Spill
12416 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm16, %xmm19
12417 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm19[0],xmm4[0],xmm19[1],xmm4[1],xmm19[2],xmm4[2],xmm19[3],xmm4[3]
12418 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm19 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
12419 ; AVX512BW-SLOW-NEXT: vpshufb %xmm19, %xmm18, %xmm30
12420 ; AVX512BW-SLOW-NEXT: vpshufb %xmm19, %xmm17, %xmm13
12421 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm30[0],xmm13[1],xmm30[1],xmm13[2],xmm30[2],xmm13[3],xmm30[3]
12422 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
12423 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
12424 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm13[0,1,2,3,4],ymm4[5],ymm13[6,7]
12425 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm4[0,1,2,3,4,5],ymm1[6,7]
12426 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm12, %xmm10
12427 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm12, %xmm4
12428 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm27, %xmm13
12429 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm13[0],xmm4[0],xmm13[1],xmm4[1],xmm13[2],xmm4[2],xmm13[3],xmm4[3]
12430 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm8, %xmm13
12431 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
12432 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm8, %xmm30
12433 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm30[0],xmm13[0],xmm30[1],xmm13[1],xmm30[2],xmm13[2],xmm30[3],xmm13[3]
12434 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm13[0,1,2],xmm4[3]
12435 ; AVX512BW-SLOW-NEXT: vpsrlq $24, %zmm6, %zmm13
12436 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm13, %xmm13
12437 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm13[0,1],xmm4[2,3]
12438 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm4, %zmm0, %zmm4
12439 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm4 {%k1}
12440 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm26, %xmm1
12441 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm21, %xmm13
12442 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm13[0],xmm1[0],xmm13[1],xmm1[1],xmm13[2],xmm1[2],xmm13[3],xmm1[3]
12443 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm22, %xmm13
12444 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm22, %xmm21
12445 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm29, %xmm30
12446 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm29, %xmm22
12447 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm30[0],xmm13[0],xmm30[1],xmm13[1],xmm30[2],xmm13[2],xmm30[3],xmm13[3]
12448 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
12449 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
12450 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm13[0,1,2,3,4,5,6],ymm1[7]
12451 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm9, %xmm13
12452 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm9, %xmm25
12453 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm7, %xmm3
12454 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm7, %xmm9
12455 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm13[0],xmm3[1],xmm13[1],xmm3[2],xmm13[2],xmm3[3],xmm13[3]
12456 ; AVX512BW-SLOW-NEXT: vpshufb %xmm19, %xmm31, %xmm13
12457 ; AVX512BW-SLOW-NEXT: vpshufb %xmm19, %xmm23, %xmm19
12458 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm19[0],xmm13[0],xmm19[1],xmm13[1],xmm19[2],xmm13[2],xmm19[3],xmm13[3]
12459 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
12460 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
12461 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm13[0,1,2,3,4],ymm3[5],ymm13[6,7]
12462 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5],ymm1[6,7]
12463 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
12464 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm3
12465 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm14, %xmm0
12466 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm14, %xmm23
12467 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
12468 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm15, %xmm3
12469 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{[-0-9]+}}(%r{{[sb]}}p), %xmm29 # 16-byte Reload
12470 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm29, %xmm2
12471 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
12472 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1,2],xmm0[3]
12473 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm28, %zmm17
12474 ; AVX512BW-SLOW-NEXT: vpsrlq $24, %zmm28, %zmm2
12475 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm2, %xmm2
12476 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1],xmm0[2,3]
12477 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12478 ; AVX512BW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
12479 ; AVX512BW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12480 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
12481 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm20, %xmm1
12482 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm11, %xmm3
12483 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm11, %xmm28
12484 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12485 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
12486 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
12487 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
12488 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm12, %xmm4
12489 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{[-0-9]+}}(%r{{[sb]}}p), %xmm16 # 16-byte Reload
12490 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm16, %xmm13
12491 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm13[0],xmm4[0],xmm13[1],xmm4[1],xmm13[2],xmm4[2],xmm13[3],xmm4[3]
12492 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
12493 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
12494 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm4[0,1,2,3,4,5,6],ymm1[7]
12495 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm4 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
12496 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm24, %xmm13
12497 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{[-0-9]+}}(%r{{[sb]}}p), %xmm26 # 16-byte Reload
12498 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm26, %xmm19
12499 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm19[0],xmm13[0],xmm19[1],xmm13[1],xmm19[2],xmm13[2],xmm19[3],xmm13[3]
12500 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm19 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
12501 ; AVX512BW-SLOW-NEXT: vpshufb %xmm19, %xmm18, %xmm30
12502 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm18, %xmm24
12503 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{[-0-9]+}}(%r{{[sb]}}p), %xmm18 # 16-byte Reload
12504 ; AVX512BW-SLOW-NEXT: vpshufb %xmm19, %xmm18, %xmm2
12505 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm30[0],xmm2[1],xmm30[1],xmm2[2],xmm30[2],xmm2[3],xmm30[3]
12506 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
12507 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
12508 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm13[5],ymm2[6,7]
12509 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
12510 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm10, %xmm6
12511 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm10, %xmm2
12512 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm27, %xmm7
12513 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm27, %xmm13
12514 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm13[0],xmm2[0],xmm13[1],xmm2[1],xmm13[2],xmm2[2],xmm13[3],xmm2[3]
12515 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
12516 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm10, %xmm13
12517 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm8, %xmm27
12518 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm8, %xmm30
12519 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm30[0],xmm13[0],xmm30[1],xmm13[1],xmm30[2],xmm13[2],xmm30[3],xmm13[3]
12520 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm2 = xmm13[0,1,2],xmm2[3]
12521 ; AVX512BW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
12522 ; AVX512BW-SLOW-NEXT: vpsrlq $32, %zmm10, %zmm13
12523 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm13, %xmm13
12524 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm2 = xmm13[0,1],xmm2[2,3]
12525 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm2, %zmm0, %zmm2
12526 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm2 {%k1}
12527 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
12528 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm14, %xmm1
12529 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
12530 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm11, %xmm13
12531 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm13[0],xmm1[0],xmm13[1],xmm1[1],xmm13[2],xmm1[2],xmm13[3],xmm1[3]
12532 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm21, %xmm13
12533 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm22, %xmm30
12534 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm30[0],xmm13[0],xmm30[1],xmm13[1],xmm30[2],xmm13[2],xmm30[3],xmm13[3]
12535 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
12536 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
12537 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm13[0,1,2,3,4,5,6],ymm1[7]
12538 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm25, %xmm13
12539 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm9, %xmm4
12540 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm13[0],xmm4[1],xmm13[1],xmm4[2],xmm13[2],xmm4[3],xmm13[3]
12541 ; AVX512BW-SLOW-NEXT: vpshufb %xmm19, %xmm31, %xmm13
12542 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
12543 ; AVX512BW-SLOW-NEXT: vpshufb %xmm19, %xmm8, %xmm19
12544 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm19[0],xmm13[0],xmm19[1],xmm13[1],xmm19[2],xmm13[2],xmm19[3],xmm13[3]
12545 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
12546 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
12547 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm13[0,1,2,3,4],ymm4[5],ymm13[6,7]
12548 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm4[0,1,2,3,4,5],ymm1[6,7]
12549 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm5, %xmm4
12550 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm23, %xmm0
12551 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
12552 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm15, %xmm4
12553 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm29, %xmm3
12554 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
12555 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm3[0,1,2],xmm0[3]
12556 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm17, %zmm20
12557 ; AVX512BW-SLOW-NEXT: vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12558 ; AVX512BW-SLOW-NEXT: vpsrlq $32, %zmm17, %zmm3
12559 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm3, %xmm3
12560 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm3[0,1],xmm0[2,3]
12561 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12562 ; AVX512BW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm0
12563 ; AVX512BW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12564 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
12565 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
12566 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm1
12567 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm28, %xmm2
12568 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
12569 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
12570 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm12, %xmm3
12571 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm16, %xmm4
12572 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
12573 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
12574 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
12575 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5,6],ymm1[7]
12576 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
12577 ; AVX512BW-SLOW-NEXT: vmovdqa (%rsp), %xmm15 # 16-byte Reload
12578 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm15, %xmm4
12579 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm26, %xmm13
12580 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm13[0],xmm4[0],xmm13[1],xmm4[1],xmm13[2],xmm4[2],xmm13[3],xmm4[3]
12581 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm13 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
12582 ; AVX512BW-SLOW-NEXT: vpshufb %xmm13, %xmm24, %xmm19
12583 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm24, %xmm26
12584 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
12585 ; AVX512BW-SLOW-NEXT: vpshufb %xmm13, %xmm18, %xmm30
12586 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm19 = xmm30[0],xmm19[0],xmm30[1],xmm19[1],xmm30[2],xmm19[2],xmm30[3],xmm19[3]
12587 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
12588 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm19, %ymm0, %ymm5
12589 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2,3,4],ymm4[5],ymm5[6,7]
12590 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm4[0,1,2,3,4,5],ymm1[6,7]
12591 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm4
12592 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm6, %xmm24
12593 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm7, %xmm5
12594 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
12595 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
12596 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm5, %xmm5
12597 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm27, %xmm19
12598 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm19[0],xmm5[0],xmm19[1],xmm5[1],xmm19[2],xmm5[2],xmm19[3],xmm5[3]
12599 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3]
12600 ; AVX512BW-SLOW-NEXT: vpsrlq $40, %zmm10, %zmm5
12601 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm5, %xmm5
12602 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3]
12603 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm4, %zmm0, %zmm4
12604 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm4 {%k1}
12605 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm14, %xmm1
12606 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm11, %xmm5
12607 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm11, %xmm17
12608 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
12609 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm21, %xmm5
12610 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm22, %xmm19
12611 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm19[0],xmm5[0],xmm19[1],xmm5[1],xmm19[2],xmm5[2],xmm19[3],xmm5[3]
12612 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
12613 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
12614 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm5[0,1,2,3,4,5,6],ymm1[7]
12615 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm25, %xmm5
12616 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm9, %xmm3
12617 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
12618 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{[-0-9]+}}(%r{{[sb]}}p), %xmm31 # 16-byte Reload
12619 ; AVX512BW-SLOW-NEXT: vpshufb %xmm13, %xmm31, %xmm5
12620 ; AVX512BW-SLOW-NEXT: vpshufb %xmm13, %xmm8, %xmm13
12621 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm13[0],xmm5[0],xmm13[1],xmm5[1],xmm13[2],xmm5[2],xmm13[3],xmm5[3]
12622 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
12623 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
12624 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm5[0,1,2,3,4],ymm3[5],ymm5[6,7]
12625 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5],ymm1[6,7]
12626 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
12627 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm11, %xmm3
12628 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm23, %xmm0
12629 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
12630 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
12631 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm6, %xmm3
12632 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm29, %xmm2
12633 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
12634 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1,2],xmm0[3]
12635 ; AVX512BW-SLOW-NEXT: vpsrlq $40, %zmm20, %zmm2
12636 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm2, %xmm2
12637 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1],xmm0[2,3]
12638 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12639 ; AVX512BW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm4, %zmm0
12640 ; AVX512BW-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12641 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm0 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
12642 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{[-0-9]+}}(%r{{[sb]}}p), %xmm28 # 16-byte Reload
12643 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm28, %xmm1
12644 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
12645 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm8, %xmm2
12646 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
12647 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm2 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
12648 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm12, %xmm4
12649 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm16, %xmm5
12650 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
12651 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
12652 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
12653 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm4[0,1,2,3,4,5,6],ymm1[7]
12654 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm4 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
12655 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm15, %xmm5
12656 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
12657 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm15, %xmm13
12658 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm13[0],xmm5[0],xmm13[1],xmm5[1],xmm13[2],xmm5[2],xmm13[3],xmm5[3]
12659 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm13 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
12660 ; AVX512BW-SLOW-NEXT: vpshufb %xmm13, %xmm26, %xmm19
12661 ; AVX512BW-SLOW-NEXT: vpshufb %xmm13, %xmm18, %xmm30
12662 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm19 = xmm30[0],xmm19[0],xmm30[1],xmm19[1],xmm30[2],xmm19[2],xmm30[3],xmm19[3]
12663 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
12664 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm19, %ymm0, %ymm3
12665 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3,4],ymm5[5],ymm3[6,7]
12666 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3,4,5],ymm1[6,7]
12667 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm24, %xmm3
12668 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm7, %xmm5
12669 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
12670 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{[-0-9]+}}(%r{{[sb]}}p), %xmm27 # 16-byte Reload
12671 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm27, %xmm5
12672 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{[-0-9]+}}(%r{{[sb]}}p), %xmm30 # 16-byte Reload
12673 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm30, %xmm19
12674 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm19[0],xmm5[0],xmm19[1],xmm5[1],xmm19[2],xmm5[2],xmm19[3],xmm5[3]
12675 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm3 = xmm5[0,1,2],xmm3[3]
12676 ; AVX512BW-SLOW-NEXT: vpsrlq $48, %zmm10, %zmm5
12677 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm5, %xmm5
12678 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm3 = xmm5[0,1],xmm3[2,3]
12679 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm3, %zmm0, %zmm3
12680 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm3 {%k1}
12681 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm14, %xmm1
12682 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm17, %xmm5
12683 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
12684 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm21, %xmm5
12685 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm22, %xmm19
12686 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm19[0],xmm5[0],xmm19[1],xmm5[1],xmm19[2],xmm5[2],xmm19[3],xmm5[3]
12687 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
12688 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
12689 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm5[0,1,2,3,4,5,6],ymm1[7]
12690 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm25, %xmm5
12691 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm9, %xmm4
12692 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm9, %xmm18
12693 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
12694 ; AVX512BW-SLOW-NEXT: vpshufb %xmm13, %xmm31, %xmm5
12695 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{[-0-9]+}}(%r{{[sb]}}p), %xmm20 # 16-byte Reload
12696 ; AVX512BW-SLOW-NEXT: vpshufb %xmm13, %xmm20, %xmm13
12697 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm13[0],xmm5[0],xmm13[1],xmm5[1],xmm13[2],xmm5[2],xmm13[3],xmm5[3]
12698 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
12699 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
12700 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2,3,4],ymm4[5],ymm5[6,7]
12701 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm4[0,1,2,3,4,5],ymm1[6,7]
12702 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm11, %xmm4
12703 ; AVX512BW-SLOW-NEXT: vpshufb %xmm0, %xmm23, %xmm0
12704 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3]
12705 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm6, %xmm4
12706 ; AVX512BW-SLOW-NEXT: vmovdqa64 %xmm6, %xmm26
12707 ; AVX512BW-SLOW-NEXT: vpshufb %xmm2, %xmm29, %xmm2
12708 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
12709 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1,2],xmm0[3]
12710 ; AVX512BW-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
12711 ; AVX512BW-SLOW-NEXT: vpsrlq $48, %zmm19, %zmm2
12712 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm2, %xmm2
12713 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm0 = xmm2[0,1],xmm0[2,3]
12714 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
12715 ; AVX512BW-SLOW-NEXT: vinserti64x4 $0, %ymm0, %zmm3, %zmm0
12716 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm1 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
12717 ; AVX512BW-SLOW-NEXT: vpshufb %xmm1, %xmm28, %xmm2
12718 ; AVX512BW-SLOW-NEXT: vpshufb %xmm1, %xmm8, %xmm3
12719 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
12720 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm3 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
12721 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm12, %xmm4
12722 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm16, %xmm5
12723 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
12724 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
12725 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
12726 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2,3,4,5,6],ymm2[7]
12727 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm4 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
12728 ; AVX512BW-SLOW-NEXT: vmovdqa (%rsp), %xmm5 # 16-byte Reload
12729 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm5, %xmm5
12730 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm15, %xmm8
12731 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm8[0],xmm5[0],xmm8[1],xmm5[1],xmm8[2],xmm5[2],xmm8[3],xmm5[3]
12732 ; AVX512BW-SLOW-NEXT: vpbroadcastw {{.*#+}} xmm8 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
12733 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
12734 ; AVX512BW-SLOW-NEXT: vpshufb %xmm8, %xmm6, %xmm9
12735 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
12736 ; AVX512BW-SLOW-NEXT: vpshufb %xmm8, %xmm6, %xmm13
12737 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm13[0],xmm9[0],xmm13[1],xmm9[1],xmm13[2],xmm9[2],xmm13[3],xmm9[3]
12738 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
12739 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
12740 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm9[0,1,2,3,4],ymm5[5],ymm9[6,7]
12741 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm5[0,1,2,3,4,5],ymm2[6,7]
12742 ; AVX512BW-SLOW-NEXT: vpshufb %xmm1, %xmm24, %xmm5
12743 ; AVX512BW-SLOW-NEXT: vpshufb %xmm1, %xmm7, %xmm9
12744 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm9[0],xmm5[0],xmm9[1],xmm5[1],xmm9[2],xmm5[2],xmm9[3],xmm5[3]
12745 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm27, %xmm9
12746 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm30, %xmm6
12747 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm9[0],xmm6[1],xmm9[1],xmm6[2],xmm9[2],xmm6[3],xmm9[3]
12748 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0,1,2],xmm5[3]
12749 ; AVX512BW-SLOW-NEXT: vpsrlq $56, %zmm10, %zmm6
12750 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm6, %xmm6
12751 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm5 = xmm6[0,1],xmm5[2,3]
12752 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm5, %zmm0, %zmm5
12753 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm5 {%k1}
12754 ; AVX512BW-SLOW-NEXT: vpshufb %xmm1, %xmm14, %xmm2
12755 ; AVX512BW-SLOW-NEXT: vpshufb %xmm1, %xmm17, %xmm6
12756 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
12757 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm21, %xmm6
12758 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm22, %xmm9
12759 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm9[0],xmm6[0],xmm9[1],xmm6[1],xmm9[2],xmm6[2],xmm9[3],xmm6[3]
12760 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
12761 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
12762 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm6[0,1,2,3,4,5,6],ymm2[7]
12763 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm25, %xmm6
12764 ; AVX512BW-SLOW-NEXT: vpshufb %xmm4, %xmm18, %xmm4
12765 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm6[0],xmm4[1],xmm6[1],xmm4[2],xmm6[2],xmm4[3],xmm6[3]
12766 ; AVX512BW-SLOW-NEXT: vpshufb %xmm8, %xmm31, %xmm6
12767 ; AVX512BW-SLOW-NEXT: vpshufb %xmm8, %xmm20, %xmm8
12768 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3]
12769 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
12770 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
12771 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm4 = ymm6[0,1,2,3,4],ymm4[5],ymm6[6,7]
12772 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2,3,4,5],ymm2[6,7]
12773 ; AVX512BW-SLOW-NEXT: vpshufb %xmm1, %xmm11, %xmm4
12774 ; AVX512BW-SLOW-NEXT: vpshufb %xmm1, %xmm23, %xmm1
12775 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
12776 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm26, %xmm4
12777 ; AVX512BW-SLOW-NEXT: vpshufb %xmm3, %xmm29, %xmm3
12778 ; AVX512BW-SLOW-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
12779 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3]
12780 ; AVX512BW-SLOW-NEXT: vpsrlq $56, %zmm19, %zmm3
12781 ; AVX512BW-SLOW-NEXT: vpmovqb %zmm3, %xmm3
12782 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} xmm1 = xmm3[0,1],xmm1[2,3]
12783 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
12784 ; AVX512BW-SLOW-NEXT: vinserti64x4 $0, %ymm1, %zmm5, %zmm1
12785 ; AVX512BW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12786 ; AVX512BW-SLOW-NEXT: vmovaps %zmm2, (%rsi)
12787 ; AVX512BW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12788 ; AVX512BW-SLOW-NEXT: vmovaps %zmm2, (%rdx)
12789 ; AVX512BW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12790 ; AVX512BW-SLOW-NEXT: vmovaps %zmm2, (%rcx)
12791 ; AVX512BW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12792 ; AVX512BW-SLOW-NEXT: vmovaps %zmm2, (%r8)
12793 ; AVX512BW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12794 ; AVX512BW-SLOW-NEXT: vmovaps %zmm2, (%r9)
12795 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
12796 ; AVX512BW-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
12797 ; AVX512BW-SLOW-NEXT: vmovaps %zmm2, (%rax)
12798 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
12799 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm0, (%rax)
12800 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
12801 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm1, (%rax)
12802 ; AVX512BW-SLOW-NEXT: addq $744, %rsp # imm = 0x2E8
12803 ; AVX512BW-SLOW-NEXT: vzeroupper
12804 ; AVX512BW-SLOW-NEXT: retq
12806 ; AVX512BW-FAST-LABEL: load_i8_stride8_vf64:
12807 ; AVX512BW-FAST: # %bb.0:
12808 ; AVX512BW-FAST-NEXT: subq $328, %rsp # imm = 0x148
12809 ; AVX512BW-FAST-NEXT: vmovdqa64 256(%rdi), %zmm18
12810 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm2 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
12811 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [0,2,2,3,0,2,4,6]
12812 ; AVX512BW-FAST-NEXT: vmovdqa 480(%rdi), %ymm1
12813 ; AVX512BW-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12814 ; AVX512BW-FAST-NEXT: vpermd %ymm1, %ymm0, %ymm30
12815 ; AVX512BW-FAST-NEXT: vpshufb %ymm2, %ymm30, %ymm1
12816 ; AVX512BW-FAST-NEXT: vmovdqa %ymm2, %ymm11
12817 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm3 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
12818 ; AVX512BW-FAST-NEXT: vmovdqa 448(%rdi), %ymm2
12819 ; AVX512BW-FAST-NEXT: vmovdqu %ymm2, (%rsp) # 32-byte Spill
12820 ; AVX512BW-FAST-NEXT: vpermd %ymm2, %ymm0, %ymm31
12821 ; AVX512BW-FAST-NEXT: vpshufb %ymm3, %ymm31, %ymm2
12822 ; AVX512BW-FAST-NEXT: vmovdqa %ymm3, %ymm8
12823 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5,6],ymm1[7]
12824 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm3 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
12825 ; AVX512BW-FAST-NEXT: vmovdqa 416(%rdi), %ymm2
12826 ; AVX512BW-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12827 ; AVX512BW-FAST-NEXT: vpermd %ymm2, %ymm0, %ymm19
12828 ; AVX512BW-FAST-NEXT: vpshufb %ymm3, %ymm19, %ymm2
12829 ; AVX512BW-FAST-NEXT: vmovdqa %ymm3, %ymm9
12830 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm5 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
12831 ; AVX512BW-FAST-NEXT: vmovdqa64 384(%rdi), %ymm29
12832 ; AVX512BW-FAST-NEXT: vpermd %ymm29, %ymm0, %ymm14
12833 ; AVX512BW-FAST-NEXT: vpshufb %ymm5, %ymm14, %ymm3
12834 ; AVX512BW-FAST-NEXT: vmovdqa %ymm5, %ymm10
12835 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3,4],ymm2[5],ymm3[6,7]
12836 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
12837 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm7 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
12838 ; AVX512BW-FAST-NEXT: vmovdqa64 368(%rdi), %xmm21
12839 ; AVX512BW-FAST-NEXT: vpshufb %xmm7, %xmm21, %xmm2
12840 ; AVX512BW-FAST-NEXT: vmovdqa 352(%rdi), %xmm4
12841 ; AVX512BW-FAST-NEXT: vpshufb %xmm7, %xmm4, %xmm3
12842 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
12843 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm23 = [0,8,0,8,0,8,0,8,0,8,0,8,0,8,0,8]
12844 ; AVX512BW-FAST-NEXT: vmovdqa 336(%rdi), %xmm12
12845 ; AVX512BW-FAST-NEXT: vpshufb %xmm23, %xmm12, %xmm5
12846 ; AVX512BW-FAST-NEXT: vmovdqa64 320(%rdi), %xmm28
12847 ; AVX512BW-FAST-NEXT: vpshufb %xmm23, %xmm28, %xmm6
12848 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
12849 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm5[0,1,2],xmm2[3]
12850 ; AVX512BW-FAST-NEXT: vpmovqb %zmm18, %xmm5
12851 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm5[0,1],xmm2[2,3]
12852 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm2, %zmm0, %zmm20
12853 ; AVX512BW-FAST-NEXT: movb $-64, %al
12854 ; AVX512BW-FAST-NEXT: kmovd %eax, %k1
12855 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm20 {%k1}
12856 ; AVX512BW-FAST-NEXT: vmovdqa 224(%rdi), %ymm1
12857 ; AVX512BW-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12858 ; AVX512BW-FAST-NEXT: vpermd %ymm1, %ymm0, %ymm17
12859 ; AVX512BW-FAST-NEXT: vpshufb %ymm11, %ymm17, %ymm1
12860 ; AVX512BW-FAST-NEXT: vmovdqa 192(%rdi), %ymm2
12861 ; AVX512BW-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12862 ; AVX512BW-FAST-NEXT: vpermd %ymm2, %ymm0, %ymm3
12863 ; AVX512BW-FAST-NEXT: vpshufb %ymm8, %ymm3, %ymm5
12864 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3,4,5,6],ymm1[7]
12865 ; AVX512BW-FAST-NEXT: vmovdqa 160(%rdi), %ymm1
12866 ; AVX512BW-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
12867 ; AVX512BW-FAST-NEXT: vpermd %ymm1, %ymm0, %ymm2
12868 ; AVX512BW-FAST-NEXT: vpshufb %ymm9, %ymm2, %ymm6
12869 ; AVX512BW-FAST-NEXT: vmovdqa64 128(%rdi), %ymm27
12870 ; AVX512BW-FAST-NEXT: vpermd %ymm27, %ymm0, %ymm16
12871 ; AVX512BW-FAST-NEXT: vpshufb %ymm10, %ymm16, %ymm8
12872 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm8[0,1,2,3,4],ymm6[5],ymm8[6,7]
12873 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm6[0,1,2,3,4,5],ymm5[6,7]
12874 ; AVX512BW-FAST-NEXT: vmovdqa64 112(%rdi), %xmm26
12875 ; AVX512BW-FAST-NEXT: vpshufb %xmm7, %xmm26, %xmm8
12876 ; AVX512BW-FAST-NEXT: vmovdqa64 96(%rdi), %xmm24
12877 ; AVX512BW-FAST-NEXT: vpshufb %xmm7, %xmm24, %xmm7
12878 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm10 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
12879 ; AVX512BW-FAST-NEXT: vmovdqa64 80(%rdi), %xmm22
12880 ; AVX512BW-FAST-NEXT: vpshufb %xmm23, %xmm22, %xmm25
12881 ; AVX512BW-FAST-NEXT: vmovdqa 64(%rdi), %xmm8
12882 ; AVX512BW-FAST-NEXT: vpshufb %xmm23, %xmm8, %xmm23
12883 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm23[0],xmm25[0],xmm23[1],xmm25[1],xmm23[2],xmm25[2],xmm23[3],xmm25[3]
12884 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm15 = xmm15[0,1,2],xmm10[3]
12885 ; AVX512BW-FAST-NEXT: vmovdqa64 (%rdi), %zmm10
12886 ; AVX512BW-FAST-NEXT: vpmovqb %zmm10, %xmm13
12887 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm13 = xmm13[0,1],xmm15[2,3]
12888 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm0[4,5,6,7]
12889 ; AVX512BW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm20, %zmm0
12890 ; AVX512BW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12891 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm9 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
12892 ; AVX512BW-FAST-NEXT: vpshufb %ymm9, %ymm30, %ymm0
12893 ; AVX512BW-FAST-NEXT: vmovdqa %ymm9, %ymm11
12894 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm9 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
12895 ; AVX512BW-FAST-NEXT: vpshufb %ymm9, %ymm31, %ymm13
12896 ; AVX512BW-FAST-NEXT: vmovdqa %ymm9, %ymm6
12897 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm13[0,1,2,3,4,5,6],ymm0[7]
12898 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm9 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
12899 ; AVX512BW-FAST-NEXT: vpshufb %ymm9, %ymm19, %ymm13
12900 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm1 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
12901 ; AVX512BW-FAST-NEXT: vpshufb %ymm1, %ymm14, %ymm15
12902 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm13 = ymm15[0,1,2,3,4],ymm13[5],ymm15[6,7]
12903 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm13[0,1,2,3,4,5],ymm0[6,7]
12904 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm13 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
12905 ; AVX512BW-FAST-NEXT: vmovdqa64 %xmm21, %xmm5
12906 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm21, %xmm15
12907 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm4, %xmm23
12908 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm23[0],xmm15[0],xmm23[1],xmm15[1],xmm23[2],xmm15[2],xmm23[3],xmm15[3]
12909 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm23 = [1,9,1,9,1,9,1,9,1,9,1,9,1,9,1,9]
12910 ; AVX512BW-FAST-NEXT: vmovdqa %xmm12, %xmm7
12911 ; AVX512BW-FAST-NEXT: vpshufb %xmm23, %xmm12, %xmm25
12912 ; AVX512BW-FAST-NEXT: vpshufb %xmm23, %xmm28, %xmm20
12913 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm20[0],xmm25[0],xmm20[1],xmm25[1],xmm20[2],xmm25[2],xmm20[3],xmm25[3]
12914 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm12 = xmm12[0,1,2],xmm15[3]
12915 ; AVX512BW-FAST-NEXT: vpsrlq $8, %zmm18, %zmm15
12916 ; AVX512BW-FAST-NEXT: vpmovqb %zmm15, %xmm15
12917 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm12 = xmm15[0,1],xmm12[2,3]
12918 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm12, %zmm0, %zmm12
12919 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm12 {%k1}
12920 ; AVX512BW-FAST-NEXT: vpshufb %ymm11, %ymm17, %ymm0
12921 ; AVX512BW-FAST-NEXT: vpshufb %ymm6, %ymm3, %ymm15
12922 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm15[0,1,2,3,4,5,6],ymm0[7]
12923 ; AVX512BW-FAST-NEXT: vpshufb %ymm9, %ymm2, %ymm15
12924 ; AVX512BW-FAST-NEXT: vpshufb %ymm1, %ymm16, %ymm11
12925 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm11 = ymm11[0,1,2,3,4],ymm15[5],ymm11[6,7]
12926 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm11[0,1,2,3,4,5],ymm0[6,7]
12927 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm26, %xmm11
12928 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm24, %xmm13
12929 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm13[0],xmm11[0],xmm13[1],xmm11[1],xmm13[2],xmm11[2],xmm13[3],xmm11[3]
12930 ; AVX512BW-FAST-NEXT: vpshufb %xmm23, %xmm22, %xmm13
12931 ; AVX512BW-FAST-NEXT: vpshufb %xmm23, %xmm8, %xmm15
12932 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm13 = xmm15[0],xmm13[0],xmm15[1],xmm13[1],xmm15[2],xmm13[2],xmm15[3],xmm13[3]
12933 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm11 = xmm13[0,1,2],xmm11[3]
12934 ; AVX512BW-FAST-NEXT: vpsrlq $8, %zmm10, %zmm13
12935 ; AVX512BW-FAST-NEXT: vpmovqb %zmm13, %xmm13
12936 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm11 = xmm13[0,1],xmm11[2,3]
12937 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm11[0,1,2,3],ymm0[4,5,6,7]
12938 ; AVX512BW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
12939 ; AVX512BW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12940 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm9 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
12941 ; AVX512BW-FAST-NEXT: vpshufb %ymm9, %ymm30, %ymm0
12942 ; AVX512BW-FAST-NEXT: vmovdqa64 %ymm9, %ymm21
12943 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm6 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
12944 ; AVX512BW-FAST-NEXT: vpshufb %ymm6, %ymm31, %ymm11
12945 ; AVX512BW-FAST-NEXT: vmovdqa64 %ymm6, %ymm25
12946 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm11[0,1,2,3,4,5,6],ymm0[7]
12947 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm6 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
12948 ; AVX512BW-FAST-NEXT: vpshufb %ymm6, %ymm19, %ymm11
12949 ; AVX512BW-FAST-NEXT: vmovdqa %ymm6, %ymm9
12950 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm6 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
12951 ; AVX512BW-FAST-NEXT: vpshufb %ymm6, %ymm14, %ymm12
12952 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm11 = ymm12[0,1,2,3,4],ymm11[5],ymm12[6,7]
12953 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm11[0,1,2,3,4,5],ymm0[6,7]
12954 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm11 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
12955 ; AVX512BW-FAST-NEXT: vpshufb %xmm11, %xmm5, %xmm12
12956 ; AVX512BW-FAST-NEXT: vpshufb %xmm11, %xmm4, %xmm13
12957 ; AVX512BW-FAST-NEXT: vmovdqa %xmm4, %xmm1
12958 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
12959 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm13 = [2,10,2,10,2,10,2,10,2,10,2,10,2,10,2,10]
12960 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm7, %xmm15
12961 ; AVX512BW-FAST-NEXT: vmovdqa64 %xmm7, %xmm23
12962 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm28, %xmm20
12963 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm20[0],xmm15[0],xmm20[1],xmm15[1],xmm20[2],xmm15[2],xmm20[3],xmm15[3]
12964 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm12 = xmm15[0,1,2],xmm12[3]
12965 ; AVX512BW-FAST-NEXT: vpsrlq $16, %zmm18, %zmm15
12966 ; AVX512BW-FAST-NEXT: vpmovqb %zmm15, %xmm15
12967 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm12 = xmm15[0,1],xmm12[2,3]
12968 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm12, %zmm0, %zmm12
12969 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm12 {%k1}
12970 ; AVX512BW-FAST-NEXT: vpshufb %ymm21, %ymm17, %ymm0
12971 ; AVX512BW-FAST-NEXT: vpshufb %ymm25, %ymm3, %ymm15
12972 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm15[0,1,2,3,4,5,6],ymm0[7]
12973 ; AVX512BW-FAST-NEXT: vpshufb %ymm9, %ymm2, %ymm15
12974 ; AVX512BW-FAST-NEXT: vpshufb %ymm6, %ymm16, %ymm9
12975 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4],ymm15[5],ymm9[6,7]
12976 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm9[0,1,2,3,4,5],ymm0[6,7]
12977 ; AVX512BW-FAST-NEXT: vpshufb %xmm11, %xmm26, %xmm9
12978 ; AVX512BW-FAST-NEXT: vpshufb %xmm11, %xmm24, %xmm11
12979 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
12980 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm22, %xmm11
12981 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm8, %xmm13
12982 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm13[0],xmm11[0],xmm13[1],xmm11[1],xmm13[2],xmm11[2],xmm13[3],xmm11[3]
12983 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm9 = xmm11[0,1,2],xmm9[3]
12984 ; AVX512BW-FAST-NEXT: vpsrlq $16, %zmm10, %zmm11
12985 ; AVX512BW-FAST-NEXT: vpmovqb %zmm11, %xmm11
12986 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm9 = xmm11[0,1],xmm9[2,3]
12987 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
12988 ; AVX512BW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm12, %zmm0
12989 ; AVX512BW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
12990 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm4 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
12991 ; AVX512BW-FAST-NEXT: vpshufb %ymm4, %ymm30, %ymm0
12992 ; AVX512BW-FAST-NEXT: vmovdqa %ymm4, %ymm6
12993 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm4 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
12994 ; AVX512BW-FAST-NEXT: vpshufb %ymm4, %ymm31, %ymm9
12995 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm9[0,1,2,3,4,5,6],ymm0[7]
12996 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm31 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
12997 ; AVX512BW-FAST-NEXT: vpshufb %ymm31, %ymm19, %ymm9
12998 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm30 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
12999 ; AVX512BW-FAST-NEXT: vpshufb %ymm30, %ymm14, %ymm11
13000 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm11[0,1,2,3,4],ymm9[5],ymm11[6,7]
13001 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm9[0,1,2,3,4,5],ymm0[6,7]
13002 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm9 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
13003 ; AVX512BW-FAST-NEXT: vmovdqa64 %xmm5, %xmm19
13004 ; AVX512BW-FAST-NEXT: vpshufb %xmm9, %xmm5, %xmm11
13005 ; AVX512BW-FAST-NEXT: vmovdqa64 %xmm1, %xmm20
13006 ; AVX512BW-FAST-NEXT: vpshufb %xmm9, %xmm1, %xmm12
13007 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
13008 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm12 = [3,11,3,11,3,11,3,11,3,11,3,11,3,11,3,11]
13009 ; AVX512BW-FAST-NEXT: vpshufb %xmm12, %xmm7, %xmm14
13010 ; AVX512BW-FAST-NEXT: vpshufb %xmm12, %xmm28, %xmm15
13011 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
13012 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm11 = xmm14[0,1,2],xmm11[3]
13013 ; AVX512BW-FAST-NEXT: vpsrlq $24, %zmm18, %zmm14
13014 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm18, %zmm25
13015 ; AVX512BW-FAST-NEXT: vpmovqb %zmm14, %xmm14
13016 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm11 = xmm14[0,1],xmm11[2,3]
13017 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm11, %zmm0, %zmm11
13018 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm11 {%k1}
13019 ; AVX512BW-FAST-NEXT: vpshufb %ymm6, %ymm17, %ymm0
13020 ; AVX512BW-FAST-NEXT: vpshufb %ymm4, %ymm3, %ymm3
13021 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5,6],ymm0[7]
13022 ; AVX512BW-FAST-NEXT: vpshufb %ymm31, %ymm2, %ymm2
13023 ; AVX512BW-FAST-NEXT: vpshufb %ymm30, %ymm16, %ymm1
13024 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4],ymm2[5],ymm1[6,7]
13025 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
13026 ; AVX512BW-FAST-NEXT: vpshufb %xmm9, %xmm26, %xmm1
13027 ; AVX512BW-FAST-NEXT: vpshufb %xmm9, %xmm24, %xmm2
13028 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
13029 ; AVX512BW-FAST-NEXT: vpshufb %xmm12, %xmm22, %xmm2
13030 ; AVX512BW-FAST-NEXT: vpshufb %xmm12, %xmm8, %xmm3
13031 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
13032 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0,1,2],xmm1[3]
13033 ; AVX512BW-FAST-NEXT: vpsrlq $24, %zmm10, %zmm2
13034 ; AVX512BW-FAST-NEXT: vpmovqb %zmm2, %xmm2
13035 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3]
13036 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13037 ; AVX512BW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm11, %zmm0
13038 ; AVX512BW-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13039 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [1,3,2,3,1,3,5,7]
13040 ; AVX512BW-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm6 # 32-byte Folded Reload
13041 ; AVX512BW-FAST-NEXT: vpermd (%rsp), %ymm3, %ymm7 # 32-byte Folded Reload
13042 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm13 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
13043 ; AVX512BW-FAST-NEXT: vpshufb %ymm13, %ymm6, %ymm2
13044 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm1 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
13045 ; AVX512BW-FAST-NEXT: vpshufb %ymm1, %ymm7, %ymm9
13046 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5,6],ymm2[7]
13047 ; AVX512BW-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm5 # 32-byte Folded Reload
13048 ; AVX512BW-FAST-NEXT: vpermd %ymm29, %ymm3, %ymm14
13049 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm0 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
13050 ; AVX512BW-FAST-NEXT: vpshufb %ymm0, %ymm5, %ymm11
13051 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm4 = [0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12,0,4,8,12]
13052 ; AVX512BW-FAST-NEXT: vpshufb %ymm4, %ymm14, %ymm12
13053 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm11 = ymm12[0,1,2,3,4],ymm11[5],ymm12[6,7]
13054 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm11[0,1,2,3,4,5],ymm9[6,7]
13055 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm11 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
13056 ; AVX512BW-FAST-NEXT: vpshufb %xmm11, %xmm19, %xmm12
13057 ; AVX512BW-FAST-NEXT: vmovdqa64 %xmm19, %xmm16
13058 ; AVX512BW-FAST-NEXT: vpshufb %xmm11, %xmm20, %xmm15
13059 ; AVX512BW-FAST-NEXT: vmovdqa64 %xmm20, %xmm17
13060 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm15[0],xmm12[0],xmm15[1],xmm12[1],xmm15[2],xmm12[2],xmm15[3],xmm12[3]
13061 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm20 = [4,12,4,12,4,12,4,12,4,12,4,12,4,12,4,12]
13062 ; AVX512BW-FAST-NEXT: vpshufb %xmm20, %xmm23, %xmm15
13063 ; AVX512BW-FAST-NEXT: vpshufb %xmm20, %xmm28, %xmm29
13064 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm15 = xmm29[0],xmm15[0],xmm29[1],xmm15[1],xmm29[2],xmm15[2],xmm29[3],xmm15[3]
13065 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm12 = xmm15[0,1,2],xmm12[3]
13066 ; AVX512BW-FAST-NEXT: vpsrlq $32, %zmm18, %zmm15
13067 ; AVX512BW-FAST-NEXT: vpmovqb %zmm15, %xmm15
13068 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm12 = xmm15[0,1],xmm12[2,3]
13069 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm12, %zmm0, %zmm12
13070 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm12 {%k1}
13071 ; AVX512BW-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm19 # 32-byte Folded Reload
13072 ; AVX512BW-FAST-NEXT: vpshufb %ymm13, %ymm19, %ymm9
13073 ; AVX512BW-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm18 # 32-byte Folded Reload
13074 ; AVX512BW-FAST-NEXT: vpshufb %ymm1, %ymm18, %ymm15
13075 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm15[0,1,2,3,4,5,6],ymm9[7]
13076 ; AVX512BW-FAST-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm15 # 32-byte Folded Reload
13077 ; AVX512BW-FAST-NEXT: vpermd %ymm27, %ymm3, %ymm21
13078 ; AVX512BW-FAST-NEXT: vpshufb %ymm0, %ymm15, %ymm3
13079 ; AVX512BW-FAST-NEXT: vpshufb %ymm4, %ymm21, %ymm13
13080 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm13[0,1,2,3,4],ymm3[5],ymm13[6,7]
13081 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm9[6,7]
13082 ; AVX512BW-FAST-NEXT: vpshufb %xmm11, %xmm26, %xmm9
13083 ; AVX512BW-FAST-NEXT: vpshufb %xmm11, %xmm24, %xmm11
13084 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
13085 ; AVX512BW-FAST-NEXT: vpshufb %xmm20, %xmm22, %xmm11
13086 ; AVX512BW-FAST-NEXT: vpshufb %xmm20, %xmm8, %xmm13
13087 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm13[0],xmm11[0],xmm13[1],xmm11[1],xmm13[2],xmm11[2],xmm13[3],xmm11[3]
13088 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm9 = xmm11[0,1,2],xmm9[3]
13089 ; AVX512BW-FAST-NEXT: vpsrlq $32, %zmm10, %zmm11
13090 ; AVX512BW-FAST-NEXT: vpmovqb %zmm11, %xmm11
13091 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm9 = xmm11[0,1],xmm9[2,3]
13092 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm9[0,1,2,3],ymm3[4,5,6,7]
13093 ; AVX512BW-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm12, %zmm29
13094 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm4 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
13095 ; AVX512BW-FAST-NEXT: vpshufb %ymm4, %ymm6, %ymm9
13096 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm0 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
13097 ; AVX512BW-FAST-NEXT: vpshufb %ymm0, %ymm7, %ymm11
13098 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm11[0,1,2,3,4,5,6],ymm9[7]
13099 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm2 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
13100 ; AVX512BW-FAST-NEXT: vpshufb %ymm2, %ymm5, %ymm11
13101 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm1 = [1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13,1,5,9,13]
13102 ; AVX512BW-FAST-NEXT: vpshufb %ymm1, %ymm14, %ymm12
13103 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm11 = ymm12[0,1,2,3,4],ymm11[5],ymm12[6,7]
13104 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm11[0,1,2,3,4,5],ymm9[6,7]
13105 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm11 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
13106 ; AVX512BW-FAST-NEXT: vpshufb %xmm11, %xmm16, %xmm12
13107 ; AVX512BW-FAST-NEXT: vpshufb %xmm11, %xmm17, %xmm13
13108 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
13109 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm13 = [5,13,5,13,5,13,5,13,5,13,5,13,5,13,5,13]
13110 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm23, %xmm20
13111 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm28, %xmm27
13112 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm27[0],xmm20[0],xmm27[1],xmm20[1],xmm27[2],xmm20[2],xmm27[3],xmm20[3]
13113 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm3 = xmm3[0,1,2],xmm12[3]
13114 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm25, %zmm27
13115 ; AVX512BW-FAST-NEXT: vpsrlq $40, %zmm25, %zmm12
13116 ; AVX512BW-FAST-NEXT: vpmovqb %zmm12, %xmm12
13117 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm3 = xmm12[0,1],xmm3[2,3]
13118 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm3, %zmm0, %zmm3
13119 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm3 {%k1}
13120 ; AVX512BW-FAST-NEXT: vpshufb %ymm4, %ymm19, %ymm9
13121 ; AVX512BW-FAST-NEXT: vpshufb %ymm0, %ymm18, %ymm12
13122 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm12[0,1,2,3,4,5,6],ymm9[7]
13123 ; AVX512BW-FAST-NEXT: vpshufb %ymm2, %ymm15, %ymm12
13124 ; AVX512BW-FAST-NEXT: vpshufb %ymm1, %ymm21, %ymm4
13125 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3,4],ymm12[5],ymm4[6,7]
13126 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm9[6,7]
13127 ; AVX512BW-FAST-NEXT: vpshufb %xmm11, %xmm26, %xmm9
13128 ; AVX512BW-FAST-NEXT: vpshufb %xmm11, %xmm24, %xmm11
13129 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
13130 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm22, %xmm11
13131 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm8, %xmm12
13132 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
13133 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm9 = xmm11[0,1,2],xmm9[3]
13134 ; AVX512BW-FAST-NEXT: vpsrlq $40, %zmm10, %zmm11
13135 ; AVX512BW-FAST-NEXT: vpmovqb %zmm11, %xmm11
13136 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm9 = xmm11[0,1],xmm9[2,3]
13137 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm9[0,1,2,3],ymm4[4,5,6,7]
13138 ; AVX512BW-FAST-NEXT: vinserti64x4 $0, %ymm4, %zmm3, %zmm20
13139 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm0 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
13140 ; AVX512BW-FAST-NEXT: vpshufb %ymm0, %ymm6, %ymm3
13141 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm1 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
13142 ; AVX512BW-FAST-NEXT: vpshufb %ymm1, %ymm7, %ymm4
13143 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3,4,5,6],ymm3[7]
13144 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm2 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
13145 ; AVX512BW-FAST-NEXT: vpshufb %ymm2, %ymm5, %ymm4
13146 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm25 = [2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14,2,6,10,14]
13147 ; AVX512BW-FAST-NEXT: vpshufb %ymm25, %ymm14, %ymm9
13148 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm9[0,1,2,3,4],ymm4[5],ymm9[6,7]
13149 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3,4,5],ymm3[6,7]
13150 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm4 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
13151 ; AVX512BW-FAST-NEXT: vpshufb %xmm4, %xmm16, %xmm9
13152 ; AVX512BW-FAST-NEXT: vpshufb %xmm4, %xmm17, %xmm11
13153 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
13154 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm11 = [6,14,6,14,6,14,6,14,6,14,6,14,6,14,6,14]
13155 ; AVX512BW-FAST-NEXT: vpshufb %xmm11, %xmm23, %xmm12
13156 ; AVX512BW-FAST-NEXT: vpshufb %xmm11, %xmm28, %xmm13
13157 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm12 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
13158 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm9 = xmm12[0,1,2],xmm9[3]
13159 ; AVX512BW-FAST-NEXT: vpsrlq $48, %zmm27, %zmm12
13160 ; AVX512BW-FAST-NEXT: vpmovqb %zmm12, %xmm12
13161 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm9 = xmm12[0,1],xmm9[2,3]
13162 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm9, %zmm0, %zmm9
13163 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm0, %zmm9 {%k1}
13164 ; AVX512BW-FAST-NEXT: vpshufb %ymm0, %ymm19, %ymm3
13165 ; AVX512BW-FAST-NEXT: vpshufb %ymm1, %ymm18, %ymm12
13166 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm12[0,1,2,3,4,5,6],ymm3[7]
13167 ; AVX512BW-FAST-NEXT: vpshufb %ymm2, %ymm15, %ymm12
13168 ; AVX512BW-FAST-NEXT: vpshufb %ymm25, %ymm21, %ymm13
13169 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm12 = ymm13[0,1,2,3,4],ymm12[5],ymm13[6,7]
13170 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm12[0,1,2,3,4,5],ymm3[6,7]
13171 ; AVX512BW-FAST-NEXT: vpshufb %xmm4, %xmm26, %xmm12
13172 ; AVX512BW-FAST-NEXT: vpshufb %xmm4, %xmm24, %xmm4
13173 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm12[0],xmm4[1],xmm12[1],xmm4[2],xmm12[2],xmm4[3],xmm12[3]
13174 ; AVX512BW-FAST-NEXT: vpshufb %xmm11, %xmm22, %xmm12
13175 ; AVX512BW-FAST-NEXT: vpshufb %xmm11, %xmm8, %xmm11
13176 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
13177 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm4 = xmm11[0,1,2],xmm4[3]
13178 ; AVX512BW-FAST-NEXT: vpsrlq $48, %zmm10, %zmm11
13179 ; AVX512BW-FAST-NEXT: vpmovqb %zmm11, %xmm11
13180 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm4 = xmm11[0,1],xmm4[2,3]
13181 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
13182 ; AVX512BW-FAST-NEXT: vinserti64x4 $0, %ymm3, %zmm9, %zmm3
13183 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm12 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
13184 ; AVX512BW-FAST-NEXT: vpshufb %ymm12, %ymm6, %ymm0
13185 ; AVX512BW-FAST-NEXT: vpbroadcastd {{.*#+}} ymm6 = [3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15,3,7,11,15]
13186 ; AVX512BW-FAST-NEXT: vpshufb %ymm6, %ymm7, %ymm1
13187 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5,6],ymm0[7]
13188 ; AVX512BW-FAST-NEXT: vpshufb %ymm31, %ymm5, %ymm1
13189 ; AVX512BW-FAST-NEXT: vpshufb %ymm30, %ymm14, %ymm2
13190 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4],ymm1[5],ymm2[6,7]
13191 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
13192 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm1 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
13193 ; AVX512BW-FAST-NEXT: vpshufb %xmm1, %xmm16, %xmm2
13194 ; AVX512BW-FAST-NEXT: vpshufb %xmm1, %xmm17, %xmm4
13195 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
13196 ; AVX512BW-FAST-NEXT: vpbroadcastw {{.*#+}} xmm4 = [7,15,7,15,7,15,7,15,7,15,7,15,7,15,7,15]
13197 ; AVX512BW-FAST-NEXT: vpshufb %xmm4, %xmm23, %xmm9
13198 ; AVX512BW-FAST-NEXT: vpshufb %xmm4, %xmm28, %xmm11
13199 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3]
13200 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm9[0,1,2],xmm2[3]
13201 ; AVX512BW-FAST-NEXT: vpsrlq $56, %zmm27, %zmm9
13202 ; AVX512BW-FAST-NEXT: vpmovqb %zmm9, %xmm9
13203 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm2 = xmm9[0,1],xmm2[2,3]
13204 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm2, %zmm0, %zmm2
13205 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm2 {%k1}
13206 ; AVX512BW-FAST-NEXT: vpshufb %ymm12, %ymm19, %ymm0
13207 ; AVX512BW-FAST-NEXT: vpshufb %ymm6, %ymm18, %ymm9
13208 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm9[0,1,2,3,4,5,6],ymm0[7]
13209 ; AVX512BW-FAST-NEXT: vpshufb %ymm31, %ymm15, %ymm9
13210 ; AVX512BW-FAST-NEXT: vpshufb %ymm30, %ymm21, %ymm11
13211 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm9 = ymm11[0,1,2,3,4],ymm9[5],ymm11[6,7]
13212 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm9[0,1,2,3,4,5],ymm0[6,7]
13213 ; AVX512BW-FAST-NEXT: vpshufb %xmm1, %xmm26, %xmm5
13214 ; AVX512BW-FAST-NEXT: vpshufb %xmm1, %xmm24, %xmm1
13215 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
13216 ; AVX512BW-FAST-NEXT: vpshufb %xmm4, %xmm22, %xmm5
13217 ; AVX512BW-FAST-NEXT: vpshufb %xmm4, %xmm8, %xmm4
13218 ; AVX512BW-FAST-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
13219 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm4[0,1,2],xmm1[3]
13220 ; AVX512BW-FAST-NEXT: vpsrlq $56, %zmm10, %zmm4
13221 ; AVX512BW-FAST-NEXT: vpmovqb %zmm4, %xmm4
13222 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} xmm1 = xmm4[0,1],xmm1[2,3]
13223 ; AVX512BW-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
13224 ; AVX512BW-FAST-NEXT: vinserti64x4 $0, %ymm0, %zmm2, %zmm0
13225 ; AVX512BW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13226 ; AVX512BW-FAST-NEXT: vmovaps %zmm1, (%rsi)
13227 ; AVX512BW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13228 ; AVX512BW-FAST-NEXT: vmovaps %zmm1, (%rdx)
13229 ; AVX512BW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13230 ; AVX512BW-FAST-NEXT: vmovaps %zmm1, (%rcx)
13231 ; AVX512BW-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
13232 ; AVX512BW-FAST-NEXT: vmovaps %zmm1, (%r8)
13233 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm29, (%r9)
13234 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
13235 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm20, (%rax)
13236 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
13237 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm3, (%rax)
13238 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
13239 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm0, (%rax)
13240 ; AVX512BW-FAST-NEXT: addq $328, %rsp # imm = 0x148
13241 ; AVX512BW-FAST-NEXT: vzeroupper
13242 ; AVX512BW-FAST-NEXT: retq
13243 %wide.vec = load <512 x i8>, ptr %in.vec, align 64
13244 %strided.vec0 = shufflevector <512 x i8> %wide.vec, <512 x i8> poison, <64 x i32> <i32 0, i32 8, i32 16, i32 24, i32 32, i32 40, i32 48, i32 56, i32 64, i32 72, i32 80, i32 88, i32 96, i32 104, i32 112, i32 120, i32 128, i32 136, i32 144, i32 152, i32 160, i32 168, i32 176, i32 184, i32 192, i32 200, i32 208, i32 216, i32 224, i32 232, i32 240, i32 248, i32 256, i32 264, i32 272, i32 280, i32 288, i32 296, i32 304, i32 312, i32 320, i32 328, i32 336, i32 344, i32 352, i32 360, i32 368, i32 376, i32 384, i32 392, i32 400, i32 408, i32 416, i32 424, i32 432, i32 440, i32 448, i32 456, i32 464, i32 472, i32 480, i32 488, i32 496, i32 504>
13245 %strided.vec1 = shufflevector <512 x i8> %wide.vec, <512 x i8> poison, <64 x i32> <i32 1, i32 9, i32 17, i32 25, i32 33, i32 41, i32 49, i32 57, i32 65, i32 73, i32 81, i32 89, i32 97, i32 105, i32 113, i32 121, i32 129, i32 137, i32 145, i32 153, i32 161, i32 169, i32 177, i32 185, i32 193, i32 201, i32 209, i32 217, i32 225, i32 233, i32 241, i32 249, i32 257, i32 265, i32 273, i32 281, i32 289, i32 297, i32 305, i32 313, i32 321, i32 329, i32 337, i32 345, i32 353, i32 361, i32 369, i32 377, i32 385, i32 393, i32 401, i32 409, i32 417, i32 425, i32 433, i32 441, i32 449, i32 457, i32 465, i32 473, i32 481, i32 489, i32 497, i32 505>
13246 %strided.vec2 = shufflevector <512 x i8> %wide.vec, <512 x i8> poison, <64 x i32> <i32 2, i32 10, i32 18, i32 26, i32 34, i32 42, i32 50, i32 58, i32 66, i32 74, i32 82, i32 90, i32 98, i32 106, i32 114, i32 122, i32 130, i32 138, i32 146, i32 154, i32 162, i32 170, i32 178, i32 186, i32 194, i32 202, i32 210, i32 218, i32 226, i32 234, i32 242, i32 250, i32 258, i32 266, i32 274, i32 282, i32 290, i32 298, i32 306, i32 314, i32 322, i32 330, i32 338, i32 346, i32 354, i32 362, i32 370, i32 378, i32 386, i32 394, i32 402, i32 410, i32 418, i32 426, i32 434, i32 442, i32 450, i32 458, i32 466, i32 474, i32 482, i32 490, i32 498, i32 506>
13247 %strided.vec3 = shufflevector <512 x i8> %wide.vec, <512 x i8> poison, <64 x i32> <i32 3, i32 11, i32 19, i32 27, i32 35, i32 43, i32 51, i32 59, i32 67, i32 75, i32 83, i32 91, i32 99, i32 107, i32 115, i32 123, i32 131, i32 139, i32 147, i32 155, i32 163, i32 171, i32 179, i32 187, i32 195, i32 203, i32 211, i32 219, i32 227, i32 235, i32 243, i32 251, i32 259, i32 267, i32 275, i32 283, i32 291, i32 299, i32 307, i32 315, i32 323, i32 331, i32 339, i32 347, i32 355, i32 363, i32 371, i32 379, i32 387, i32 395, i32 403, i32 411, i32 419, i32 427, i32 435, i32 443, i32 451, i32 459, i32 467, i32 475, i32 483, i32 491, i32 499, i32 507>
13248 %strided.vec4 = shufflevector <512 x i8> %wide.vec, <512 x i8> poison, <64 x i32> <i32 4, i32 12, i32 20, i32 28, i32 36, i32 44, i32 52, i32 60, i32 68, i32 76, i32 84, i32 92, i32 100, i32 108, i32 116, i32 124, i32 132, i32 140, i32 148, i32 156, i32 164, i32 172, i32 180, i32 188, i32 196, i32 204, i32 212, i32 220, i32 228, i32 236, i32 244, i32 252, i32 260, i32 268, i32 276, i32 284, i32 292, i32 300, i32 308, i32 316, i32 324, i32 332, i32 340, i32 348, i32 356, i32 364, i32 372, i32 380, i32 388, i32 396, i32 404, i32 412, i32 420, i32 428, i32 436, i32 444, i32 452, i32 460, i32 468, i32 476, i32 484, i32 492, i32 500, i32 508>
13249 %strided.vec5 = shufflevector <512 x i8> %wide.vec, <512 x i8> poison, <64 x i32> <i32 5, i32 13, i32 21, i32 29, i32 37, i32 45, i32 53, i32 61, i32 69, i32 77, i32 85, i32 93, i32 101, i32 109, i32 117, i32 125, i32 133, i32 141, i32 149, i32 157, i32 165, i32 173, i32 181, i32 189, i32 197, i32 205, i32 213, i32 221, i32 229, i32 237, i32 245, i32 253, i32 261, i32 269, i32 277, i32 285, i32 293, i32 301, i32 309, i32 317, i32 325, i32 333, i32 341, i32 349, i32 357, i32 365, i32 373, i32 381, i32 389, i32 397, i32 405, i32 413, i32 421, i32 429, i32 437, i32 445, i32 453, i32 461, i32 469, i32 477, i32 485, i32 493, i32 501, i32 509>
13250 %strided.vec6 = shufflevector <512 x i8> %wide.vec, <512 x i8> poison, <64 x i32> <i32 6, i32 14, i32 22, i32 30, i32 38, i32 46, i32 54, i32 62, i32 70, i32 78, i32 86, i32 94, i32 102, i32 110, i32 118, i32 126, i32 134, i32 142, i32 150, i32 158, i32 166, i32 174, i32 182, i32 190, i32 198, i32 206, i32 214, i32 222, i32 230, i32 238, i32 246, i32 254, i32 262, i32 270, i32 278, i32 286, i32 294, i32 302, i32 310, i32 318, i32 326, i32 334, i32 342, i32 350, i32 358, i32 366, i32 374, i32 382, i32 390, i32 398, i32 406, i32 414, i32 422, i32 430, i32 438, i32 446, i32 454, i32 462, i32 470, i32 478, i32 486, i32 494, i32 502, i32 510>
13251 %strided.vec7 = shufflevector <512 x i8> %wide.vec, <512 x i8> poison, <64 x i32> <i32 7, i32 15, i32 23, i32 31, i32 39, i32 47, i32 55, i32 63, i32 71, i32 79, i32 87, i32 95, i32 103, i32 111, i32 119, i32 127, i32 135, i32 143, i32 151, i32 159, i32 167, i32 175, i32 183, i32 191, i32 199, i32 207, i32 215, i32 223, i32 231, i32 239, i32 247, i32 255, i32 263, i32 271, i32 279, i32 287, i32 295, i32 303, i32 311, i32 319, i32 327, i32 335, i32 343, i32 351, i32 359, i32 367, i32 375, i32 383, i32 391, i32 399, i32 407, i32 415, i32 423, i32 431, i32 439, i32 447, i32 455, i32 463, i32 471, i32 479, i32 487, i32 495, i32 503, i32 511>
13252 store <64 x i8> %strided.vec0, ptr %out.vec0, align 64
13253 store <64 x i8> %strided.vec1, ptr %out.vec1, align 64
13254 store <64 x i8> %strided.vec2, ptr %out.vec2, align 64
13255 store <64 x i8> %strided.vec3, ptr %out.vec3, align 64
13256 store <64 x i8> %strided.vec4, ptr %out.vec4, align 64
13257 store <64 x i8> %strided.vec5, ptr %out.vec5, align 64
13258 store <64 x i8> %strided.vec6, ptr %out.vec6, align 64
13259 store <64 x i8> %strided.vec7, ptr %out.vec7, align 64
13262 ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
13265 ; AVX512-FAST: {{.*}}
13266 ; AVX512-SLOW: {{.*}}
13267 ; AVX512BW-ONLY-FAST: {{.*}}
13268 ; AVX512BW-ONLY-SLOW: {{.*}}
13269 ; AVX512DQ-FAST: {{.*}}
13270 ; AVX512DQ-SLOW: {{.*}}
13271 ; AVX512DQBW-FAST: {{.*}}
13272 ; AVX512DQBW-SLOW: {{.*}}
13273 ; AVX512F-ONLY-FAST: {{.*}}
13274 ; AVX512F-ONLY-SLOW: {{.*}}
13275 ; FALLBACK0: {{.*}}
13276 ; FALLBACK1: {{.*}}
13277 ; FALLBACK10: {{.*}}
13278 ; FALLBACK11: {{.*}}
13279 ; FALLBACK12: {{.*}}
13280 ; FALLBACK2: {{.*}}
13281 ; FALLBACK3: {{.*}}
13282 ; FALLBACK4: {{.*}}
13283 ; FALLBACK5: {{.*}}
13284 ; FALLBACK6: {{.*}}
13285 ; FALLBACK7: {{.*}}
13286 ; FALLBACK8: {{.*}}
13287 ; FALLBACK9: {{.*}}