1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE,FALLBACK0
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx | FileCheck %s --check-prefixes=AVX,AVX1,AVX1-ONLY,FALLBACK1
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-SLOW,FALLBACK2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST,FALLBACK3
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST-PERLANE,FALLBACK4
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512F-ONLY-SLOW,FALLBACK5
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512F-ONLY-FAST,FALLBACK6
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512DQ-SLOW,FALLBACK7
10 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512DQ-FAST,FALLBACK8
11 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512BW-ONLY-SLOW,FALLBACK9
12 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512BW-ONLY-FAST,FALLBACK10
13 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512DQBW-SLOW,FALLBACK11
14 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512DQBW-FAST,FALLBACK12
16 ; These patterns are produced by LoopVectorizer for interleaved stores.
18 define void @store_i8_stride7_vf2(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %out.vec) nounwind {
19 ; SSE-LABEL: store_i8_stride7_vf2:
21 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
22 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
23 ; SSE-NEXT: movdqa (%rdi), %xmm0
24 ; SSE-NEXT: movdqa (%rdx), %xmm1
25 ; SSE-NEXT: movdqa (%r8), %xmm2
26 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
27 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
28 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
29 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
30 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1]
31 ; SSE-NEXT: pxor %xmm1, %xmm1
32 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
33 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm0[0,1,2,3,7,5,6,7]
34 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,1,2,3]
35 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[3,1,0,3,4,5,6,7]
36 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,2,1,3,4,5,6,7]
37 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,6,7]
38 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,1]
39 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,6,6,6,6]
40 ; SSE-NEXT: packuswb %xmm3, %xmm0
41 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255]
42 ; SSE-NEXT: pand %xmm3, %xmm0
43 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
44 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,0,2,1]
45 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,7,5,6,7]
46 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[0,2,2,3,4,5,6,7]
47 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,0,2]
48 ; SSE-NEXT: packuswb %xmm1, %xmm2
49 ; SSE-NEXT: pandn %xmm2, %xmm3
50 ; SSE-NEXT: por %xmm0, %xmm3
51 ; SSE-NEXT: pextrw $6, %xmm2, %ecx
52 ; SSE-NEXT: movw %cx, 12(%rax)
53 ; SSE-NEXT: movq %xmm3, (%rax)
54 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm3[2,3,2,3]
55 ; SSE-NEXT: movd %xmm0, 8(%rax)
58 ; AVX-LABEL: store_i8_stride7_vf2:
60 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
61 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %r10
62 ; AVX-NEXT: vmovdqa (%rdi), %xmm0
63 ; AVX-NEXT: vmovdqa (%rdx), %xmm1
64 ; AVX-NEXT: vmovdqa (%r8), %xmm2
65 ; AVX-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
66 ; AVX-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
67 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
68 ; AVX-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
69 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
70 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
71 ; AVX-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,1,5,9,2,6,10,14,3,7,11,u,u]
72 ; AVX-NEXT: vpextrw $6, %xmm0, 12(%rax)
73 ; AVX-NEXT: vpextrd $2, %xmm0, 8(%rax)
74 ; AVX-NEXT: vmovq %xmm0, (%rax)
76 %in.vec0 = load <2 x i8>, ptr %in.vecptr0, align 64
77 %in.vec1 = load <2 x i8>, ptr %in.vecptr1, align 64
78 %in.vec2 = load <2 x i8>, ptr %in.vecptr2, align 64
79 %in.vec3 = load <2 x i8>, ptr %in.vecptr3, align 64
80 %in.vec4 = load <2 x i8>, ptr %in.vecptr4, align 64
81 %in.vec5 = load <2 x i8>, ptr %in.vecptr5, align 64
82 %in.vec6 = load <2 x i8>, ptr %in.vecptr6, align 64
83 %1 = shufflevector <2 x i8> %in.vec0, <2 x i8> %in.vec1, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
84 %2 = shufflevector <2 x i8> %in.vec2, <2 x i8> %in.vec3, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
85 %3 = shufflevector <2 x i8> %in.vec4, <2 x i8> %in.vec5, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
86 %4 = shufflevector <4 x i8> %1, <4 x i8> %2, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
87 %5 = shufflevector <2 x i8> %in.vec6, <2 x i8> poison, <4 x i32> <i32 0, i32 1, i32 undef, i32 undef>
88 %6 = shufflevector <4 x i8> %3, <4 x i8> %5, <6 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5>
89 %7 = shufflevector <6 x i8> %6, <6 x i8> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 undef, i32 undef>
90 %8 = shufflevector <8 x i8> %4, <8 x i8> %7, <14 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13>
91 %interleaved.vec = shufflevector <14 x i8> %8, <14 x i8> poison, <14 x i32> <i32 0, i32 2, i32 4, i32 6, i32 8, i32 10, i32 12, i32 1, i32 3, i32 5, i32 7, i32 9, i32 11, i32 13>
92 store <14 x i8> %interleaved.vec, ptr %out.vec, align 64
96 define void @store_i8_stride7_vf4(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %out.vec) nounwind {
97 ; SSE-LABEL: store_i8_stride7_vf4:
99 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
100 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
101 ; SSE-NEXT: movdqa (%rdi), %xmm0
102 ; SSE-NEXT: movdqa (%rdx), %xmm3
103 ; SSE-NEXT: movdqa (%r8), %xmm5
104 ; SSE-NEXT: movdqa (%r10), %xmm2
105 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
106 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],mem[0],xmm3[1],mem[1]
107 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],mem[0],xmm5[1],mem[1]
108 ; SSE-NEXT: pxor %xmm7, %xmm7
109 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0],xmm7[0],xmm3[1],xmm7[1],xmm3[2],xmm7[2],xmm3[3],xmm7[3],xmm3[4],xmm7[4],xmm3[5],xmm7[5],xmm3[6],xmm7[6],xmm3[7],xmm7[7]
110 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm3[0,2,2,3]
111 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm1[0,1,3,3,4,5,6,7]
112 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm1[0,1,0,2,4,5,6,7]
113 ; SSE-NEXT: packuswb %xmm4, %xmm6
114 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255]
115 ; SSE-NEXT: movdqa %xmm1, %xmm4
116 ; SSE-NEXT: pandn %xmm6, %xmm4
117 ; SSE-NEXT: movdqa %xmm0, %xmm8
118 ; SSE-NEXT: punpcklbw {{.*#+}} xmm8 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3],xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
119 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm8[0,2,0,0]
120 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm6[0,2,2,3,4,5,6,7]
121 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[2,1,1,3]
122 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm8[1,1,1,1,4,5,6,7]
123 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,4,6]
124 ; SSE-NEXT: packuswb %xmm8, %xmm6
125 ; SSE-NEXT: pand %xmm1, %xmm6
126 ; SSE-NEXT: por %xmm4, %xmm6
127 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255]
128 ; SSE-NEXT: pand %xmm4, %xmm6
129 ; SSE-NEXT: punpcklbw {{.*#+}} xmm5 = xmm5[0],xmm7[0],xmm5[1],xmm7[1],xmm5[2],xmm7[2],xmm5[3],xmm7[3],xmm5[4],xmm7[4],xmm5[5],xmm7[5],xmm5[6],xmm7[6],xmm5[7],xmm7[7]
130 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm5[0,0,2,3]
131 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,5,5,5,5]
132 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm5[0,1,2,0]
133 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,6,4,6,7]
134 ; SSE-NEXT: packuswb %xmm7, %xmm8
135 ; SSE-NEXT: movdqa {{.*#+}} xmm7 = [255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255]
136 ; SSE-NEXT: pand %xmm7, %xmm8
137 ; SSE-NEXT: pshuflw {{.*#+}} xmm9 = xmm2[0,0,0,0,4,5,6,7]
138 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[0,0,0,0]
139 ; SSE-NEXT: pandn %xmm9, %xmm7
140 ; SSE-NEXT: por %xmm8, %xmm7
141 ; SSE-NEXT: pandn %xmm7, %xmm4
142 ; SSE-NEXT: por %xmm6, %xmm4
143 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[3,1,2,3]
144 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm5[0,3,1,3,4,5,6,7]
145 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm5[0,1,2,0,4,5,6,7]
146 ; SSE-NEXT: packuswb %xmm6, %xmm5
147 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm3[3,1,2,1]
148 ; SSE-NEXT: psrldq {{.*#+}} xmm3 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
149 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm6[2,0,2,3,4,5,6,7]
150 ; SSE-NEXT: packuswb %xmm3, %xmm6
151 ; SSE-NEXT: pand %xmm1, %xmm6
152 ; SSE-NEXT: pandn %xmm5, %xmm1
153 ; SSE-NEXT: por %xmm6, %xmm1
154 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [255,255,255,255,0,0,0,255,255,255,255,0,255,255,255,255]
155 ; SSE-NEXT: pand %xmm3, %xmm1
156 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[1,1,1,1,4,5,6,7]
157 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,0,0]
158 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255]
159 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
160 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[3,1,2,3]
161 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,1,3,1,4,5,6,7]
162 ; SSE-NEXT: pand %xmm5, %xmm0
163 ; SSE-NEXT: pandn %xmm2, %xmm5
164 ; SSE-NEXT: por %xmm0, %xmm5
165 ; SSE-NEXT: pandn %xmm5, %xmm3
166 ; SSE-NEXT: por %xmm1, %xmm3
167 ; SSE-NEXT: movq %xmm3, 16(%rax)
168 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm3[2,3,2,3]
169 ; SSE-NEXT: movd %xmm0, 24(%rax)
170 ; SSE-NEXT: movdqa %xmm4, (%rax)
173 ; AVX1-ONLY-LABEL: store_i8_stride7_vf4:
174 ; AVX1-ONLY: # %bb.0:
175 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
176 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
177 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm0
178 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm1
179 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm2
180 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
181 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
182 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
183 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
184 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],mem[0]
185 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm0[0,4,8,12],zero,zero,zero,xmm0[1,5,9,13],zero,zero,zero,xmm0[2,6]
186 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = zero,zero,zero,zero,xmm1[0,4,8],zero,zero,zero,zero,xmm1[1,5,9],zero,zero
187 ; AVX1-ONLY-NEXT: vpor %xmm3, %xmm2, %xmm2
188 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[10,14],zero,zero,zero,xmm0[3,7,11,15],zero,zero,zero,xmm0[u,u,u,u]
189 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,xmm1[2,6,10],zero,zero,zero,zero,xmm1[3,7,11,u,u,u,u]
190 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm0, %xmm0
191 ; AVX1-ONLY-NEXT: vpextrd $2, %xmm0, 24(%rax)
192 ; AVX1-ONLY-NEXT: vmovq %xmm0, 16(%rax)
193 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, (%rax)
194 ; AVX1-ONLY-NEXT: retq
196 ; AVX2-ONLY-LABEL: store_i8_stride7_vf4:
197 ; AVX2-ONLY: # %bb.0:
198 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
199 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
200 ; AVX2-ONLY-NEXT: vmovdqa (%rdi), %xmm0
201 ; AVX2-ONLY-NEXT: vmovdqa (%rsi), %xmm1
202 ; AVX2-ONLY-NEXT: vmovdqa (%rdx), %xmm2
203 ; AVX2-ONLY-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1]
204 ; AVX2-ONLY-NEXT: vinserti128 $1, (%r9), %ymm1, %ymm1
205 ; AVX2-ONLY-NEXT: vinserti128 $1, (%r8), %ymm0, %ymm0
206 ; AVX2-ONLY-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
207 ; AVX2-ONLY-NEXT: vinserti128 $1, (%r10), %ymm2, %ymm1
208 ; AVX2-ONLY-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
209 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,4,8,12],zero,zero,zero,ymm0[1,5,9,13],zero,zero,zero,ymm0[2,6],zero,zero,ymm0[18,22,26],zero,zero,zero,zero,ymm0[19,23,27],zero,zero,zero,zero
210 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
211 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm0[0,4,8],zero,zero,zero,zero,ymm0[1,5,9],zero,zero,ymm0[26,30],zero,zero,zero,ymm0[19,23,27,31],zero,zero,zero,zero,zero,zero,zero
212 ; AVX2-ONLY-NEXT: vpor %ymm0, %ymm1, %ymm0
213 ; AVX2-ONLY-NEXT: vextracti128 $1, %ymm0, %xmm1
214 ; AVX2-ONLY-NEXT: vpextrd $2, %xmm1, 24(%rax)
215 ; AVX2-ONLY-NEXT: vmovq %xmm1, 16(%rax)
216 ; AVX2-ONLY-NEXT: vmovdqa %xmm0, (%rax)
217 ; AVX2-ONLY-NEXT: vzeroupper
218 ; AVX2-ONLY-NEXT: retq
220 ; AVX512F-LABEL: store_i8_stride7_vf4:
222 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
223 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %r10
224 ; AVX512F-NEXT: vmovdqa (%rdi), %xmm0
225 ; AVX512F-NEXT: vmovdqa (%rsi), %xmm1
226 ; AVX512F-NEXT: vmovdqa (%rdx), %xmm2
227 ; AVX512F-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1]
228 ; AVX512F-NEXT: vinserti128 $1, (%r9), %ymm1, %ymm1
229 ; AVX512F-NEXT: vinserti128 $1, (%r8), %ymm0, %ymm0
230 ; AVX512F-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
231 ; AVX512F-NEXT: vinserti128 $1, (%r10), %ymm2, %ymm1
232 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
233 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,4,8,12],zero,zero,zero,ymm0[1,5,9,13],zero,zero,zero,ymm0[2,6],zero,zero,ymm0[18,22,26],zero,zero,zero,zero,ymm0[19,23,27,u,u,u,u]
234 ; AVX512F-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
235 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm0[0,4,8],zero,zero,zero,zero,ymm0[1,5,9],zero,zero,ymm0[26,30],zero,zero,zero,ymm0[19,23,27,31],zero,zero,zero,zero,zero,zero,zero
236 ; AVX512F-NEXT: vpor %ymm0, %ymm1, %ymm0
237 ; AVX512F-NEXT: vextracti128 $1, %ymm0, %xmm1
238 ; AVX512F-NEXT: vpextrd $2, %xmm1, 24(%rax)
239 ; AVX512F-NEXT: vmovq %xmm1, 16(%rax)
240 ; AVX512F-NEXT: vmovdqa %xmm0, (%rax)
241 ; AVX512F-NEXT: vzeroupper
244 ; AVX512BW-LABEL: store_i8_stride7_vf4:
246 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
247 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %r10
248 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm0
249 ; AVX512BW-NEXT: vmovdqa (%rsi), %xmm1
250 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm2
251 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1]
252 ; AVX512BW-NEXT: vinserti128 $1, (%r9), %ymm1, %ymm1
253 ; AVX512BW-NEXT: vinserti128 $1, (%r8), %ymm0, %ymm0
254 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
255 ; AVX512BW-NEXT: vinserti128 $1, (%r10), %ymm2, %ymm1
256 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[2],ymm1[2]
257 ; AVX512BW-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,4,8,12],zero,zero,zero,ymm0[1,5,9,13],zero,zero,zero,ymm0[2,6],zero,zero,ymm0[18,22,26],zero,zero,zero,zero,ymm0[19,23,27],zero,zero,zero,zero
258 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
259 ; AVX512BW-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm0[0,4,8],zero,zero,zero,zero,ymm0[1,5,9],zero,zero,ymm0[26,30],zero,zero,zero,ymm0[19,23,27,31],zero,zero,zero,zero,zero,zero,zero
260 ; AVX512BW-NEXT: vpor %ymm1, %ymm0, %ymm0
261 ; AVX512BW-NEXT: vextracti128 $1, %ymm0, %xmm1
262 ; AVX512BW-NEXT: vpextrd $2, %xmm1, 24(%rax)
263 ; AVX512BW-NEXT: vmovq %xmm1, 16(%rax)
264 ; AVX512BW-NEXT: vmovdqa %xmm0, (%rax)
265 ; AVX512BW-NEXT: vzeroupper
266 ; AVX512BW-NEXT: retq
267 %in.vec0 = load <4 x i8>, ptr %in.vecptr0, align 64
268 %in.vec1 = load <4 x i8>, ptr %in.vecptr1, align 64
269 %in.vec2 = load <4 x i8>, ptr %in.vecptr2, align 64
270 %in.vec3 = load <4 x i8>, ptr %in.vecptr3, align 64
271 %in.vec4 = load <4 x i8>, ptr %in.vecptr4, align 64
272 %in.vec5 = load <4 x i8>, ptr %in.vecptr5, align 64
273 %in.vec6 = load <4 x i8>, ptr %in.vecptr6, align 64
274 %1 = shufflevector <4 x i8> %in.vec0, <4 x i8> %in.vec1, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
275 %2 = shufflevector <4 x i8> %in.vec2, <4 x i8> %in.vec3, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
276 %3 = shufflevector <4 x i8> %in.vec4, <4 x i8> %in.vec5, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
277 %4 = shufflevector <8 x i8> %1, <8 x i8> %2, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
278 %5 = shufflevector <4 x i8> %in.vec6, <4 x i8> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef>
279 %6 = shufflevector <8 x i8> %3, <8 x i8> %5, <12 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11>
280 %7 = shufflevector <12 x i8> %6, <12 x i8> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 undef, i32 undef, i32 undef, i32 undef>
281 %8 = shufflevector <16 x i8> %4, <16 x i8> %7, <28 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27>
282 %interleaved.vec = shufflevector <28 x i8> %8, <28 x i8> poison, <28 x i32> <i32 0, i32 4, i32 8, i32 12, i32 16, i32 20, i32 24, i32 1, i32 5, i32 9, i32 13, i32 17, i32 21, i32 25, i32 2, i32 6, i32 10, i32 14, i32 18, i32 22, i32 26, i32 3, i32 7, i32 11, i32 15, i32 19, i32 23, i32 27>
283 store <28 x i8> %interleaved.vec, ptr %out.vec, align 64
287 define void @store_i8_stride7_vf8(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %out.vec) nounwind {
288 ; SSE-LABEL: store_i8_stride7_vf8:
290 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
291 ; SSE-NEXT: movq {{.*#+}} xmm3 = mem[0],zero
292 ; SSE-NEXT: movq {{.*#+}} xmm10 = mem[0],zero
293 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
294 ; SSE-NEXT: movq {{.*#+}} xmm2 = mem[0],zero
295 ; SSE-NEXT: movq {{.*#+}} xmm4 = mem[0],zero
296 ; SSE-NEXT: movq {{.*#+}} xmm14 = mem[0],zero
297 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
298 ; SSE-NEXT: movq {{.*#+}} xmm0 = mem[0],zero
299 ; SSE-NEXT: movq {{.*#+}} xmm5 = mem[0],zero
300 ; SSE-NEXT: punpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm0[0]
301 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm2[0,0,0,0,4,5,6,7]
302 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
303 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
304 ; SSE-NEXT: movdqa {{.*#+}} xmm6 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
305 ; SSE-NEXT: pand %xmm6, %xmm0
306 ; SSE-NEXT: movdqa %xmm4, %xmm7
307 ; SSE-NEXT: punpcklbw {{.*#+}} xmm7 = xmm7[0],xmm4[0],xmm7[1],xmm4[1],xmm7[2],xmm4[2],xmm7[3],xmm4[3],xmm7[4],xmm4[4],xmm7[5],xmm4[5],xmm7[6],xmm4[6],xmm7[7],xmm4[7]
308 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm7[0,0,2,1,4,5,6,7]
309 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,1,1,3]
310 ; SSE-NEXT: pandn %xmm8, %xmm6
311 ; SSE-NEXT: por %xmm0, %xmm6
312 ; SSE-NEXT: movdqa {{.*#+}} xmm9 = [255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255]
313 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm3[0,0,2,1,4,5,6,7]
314 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
315 ; SSE-NEXT: movdqa {{.*#+}} xmm8 = [255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0]
316 ; SSE-NEXT: pand %xmm8, %xmm0
317 ; SSE-NEXT: punpcklbw {{.*#+}} xmm10 = xmm10[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
318 ; SSE-NEXT: pshuflw {{.*#+}} xmm11 = xmm10[0,2,1,3,4,5,6,7]
319 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm11[0,1,1,0]
320 ; SSE-NEXT: movdqa %xmm8, %xmm12
321 ; SSE-NEXT: pandn %xmm11, %xmm12
322 ; SSE-NEXT: por %xmm0, %xmm12
323 ; SSE-NEXT: pand %xmm9, %xmm12
324 ; SSE-NEXT: pandn %xmm6, %xmm9
325 ; SSE-NEXT: por %xmm12, %xmm9
326 ; SSE-NEXT: pxor %xmm0, %xmm0
327 ; SSE-NEXT: movdqa %xmm5, %xmm12
328 ; SSE-NEXT: movdqa %xmm5, %xmm15
329 ; SSE-NEXT: punpckhbw {{.*#+}} xmm15 = xmm15[8],xmm0[8],xmm15[9],xmm0[9],xmm15[10],xmm0[10],xmm15[11],xmm0[11],xmm15[12],xmm0[12],xmm15[13],xmm0[13],xmm15[14],xmm0[14],xmm15[15],xmm0[15]
330 ; SSE-NEXT: punpcklbw {{.*#+}} xmm12 = xmm12[0],xmm0[0],xmm12[1],xmm0[1],xmm12[2],xmm0[2],xmm12[3],xmm0[3],xmm12[4],xmm0[4],xmm12[5],xmm0[5],xmm12[6],xmm0[6],xmm12[7],xmm0[7]
331 ; SSE-NEXT: movdqa %xmm12, %xmm13
332 ; SSE-NEXT: punpcklwd {{.*#+}} xmm13 = xmm13[0],xmm15[0],xmm13[1],xmm15[1],xmm13[2],xmm15[2],xmm13[3],xmm15[3]
333 ; SSE-NEXT: movdqa %xmm13, %xmm0
334 ; SSE-NEXT: pslldq {{.*#+}} xmm0 = zero,zero,xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
335 ; SSE-NEXT: movdqa %xmm13, %xmm6
336 ; SSE-NEXT: packuswb %xmm0, %xmm6
337 ; SSE-NEXT: movdqa {{.*#+}} xmm11 = [255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255]
338 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm14[0,0,0,0,4,5,6,7]
339 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
340 ; SSE-NEXT: movdqa %xmm11, %xmm14
341 ; SSE-NEXT: pandn %xmm0, %xmm14
342 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[0,0,2,3]
343 ; SSE-NEXT: pand %xmm11, %xmm0
344 ; SSE-NEXT: por %xmm0, %xmm14
345 ; SSE-NEXT: movdqa {{.*#+}} xmm6 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255]
346 ; SSE-NEXT: pand %xmm6, %xmm9
347 ; SSE-NEXT: pandn %xmm14, %xmm6
348 ; SSE-NEXT: por %xmm9, %xmm6
349 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm7[2,2,3,3]
350 ; SSE-NEXT: movdqa %xmm11, %xmm9
351 ; SSE-NEXT: pandn %xmm0, %xmm9
352 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm2[2,1,3,3,4,5,6,7]
353 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
354 ; SSE-NEXT: pand %xmm11, %xmm0
355 ; SSE-NEXT: por %xmm9, %xmm0
356 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255]
357 ; SSE-NEXT: movdqa %xmm1, %xmm9
358 ; SSE-NEXT: pandn %xmm0, %xmm9
359 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm10[0,1,2,3,5,6,6,7]
360 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm0[2,2,2,2]
361 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255]
362 ; SSE-NEXT: movdqa %xmm0, %xmm14
363 ; SSE-NEXT: pandn %xmm2, %xmm14
364 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm3[0,2,2,3,4,5,6,7]
365 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,1,3]
366 ; SSE-NEXT: pand %xmm0, %xmm2
367 ; SSE-NEXT: por %xmm2, %xmm14
368 ; SSE-NEXT: pand %xmm1, %xmm14
369 ; SSE-NEXT: por %xmm9, %xmm14
370 ; SSE-NEXT: movdqa %xmm15, %xmm2
371 ; SSE-NEXT: punpckhwd {{.*#+}} xmm2 = xmm2[4],xmm12[4],xmm2[5],xmm12[5],xmm2[6],xmm12[6],xmm2[7],xmm12[7]
372 ; SSE-NEXT: punpckhwd {{.*#+}} xmm12 = xmm12[4],xmm15[4],xmm12[5],xmm15[5],xmm12[6],xmm15[6],xmm12[7],xmm15[7]
373 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[2,2,2,2,4,5,6,7]
374 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,5,4]
375 ; SSE-NEXT: pshuflw {{.*#+}} xmm9 = xmm12[0,1,2,2,4,5,6,7]
376 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[0,1,2,1]
377 ; SSE-NEXT: packuswb %xmm2, %xmm9
378 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
379 ; SSE-NEXT: pand %xmm12, %xmm9
380 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
381 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm15[2,2,2,2,4,5,6,7]
382 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,0,0]
383 ; SSE-NEXT: pandn %xmm2, %xmm12
384 ; SSE-NEXT: por %xmm9, %xmm12
385 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,255,0,0,0,0,255,255,255,0,0,0,0,255,255]
386 ; SSE-NEXT: pand %xmm2, %xmm12
387 ; SSE-NEXT: pandn %xmm14, %xmm2
388 ; SSE-NEXT: por %xmm2, %xmm12
389 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm13[2,2,3,3]
390 ; SSE-NEXT: psrldq {{.*#+}} xmm13 = xmm13[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
391 ; SSE-NEXT: packuswb %xmm13, %xmm2
392 ; SSE-NEXT: pand %xmm0, %xmm2
393 ; SSE-NEXT: pshuflw {{.*#+}} xmm9 = xmm15[1,1,1,1,4,5,6,7]
394 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[0,0,0,0]
395 ; SSE-NEXT: pandn %xmm9, %xmm0
396 ; SSE-NEXT: por %xmm2, %xmm0
397 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm10[0,1,2,2]
398 ; SSE-NEXT: movdqa %xmm3, %xmm10
399 ; SSE-NEXT: pshuflw {{.*#+}} xmm9 = xmm3[1,1,2,3,4,5,6,7]
400 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[0,0,2,1]
401 ; SSE-NEXT: pand %xmm11, %xmm9
402 ; SSE-NEXT: pandn %xmm2, %xmm11
403 ; SSE-NEXT: por %xmm9, %xmm11
404 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
405 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm3[1,1,2,2,4,5,6,7]
406 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,2,1]
407 ; SSE-NEXT: pand %xmm8, %xmm2
408 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[1,1,2,1]
409 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,7,5,6,4]
410 ; SSE-NEXT: pandn %xmm7, %xmm8
411 ; SSE-NEXT: por %xmm2, %xmm8
412 ; SSE-NEXT: pand %xmm1, %xmm8
413 ; SSE-NEXT: pandn %xmm11, %xmm1
414 ; SSE-NEXT: por %xmm8, %xmm1
415 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
416 ; SSE-NEXT: pand %xmm2, %xmm1
417 ; SSE-NEXT: pandn %xmm0, %xmm2
418 ; SSE-NEXT: por %xmm2, %xmm1
419 ; SSE-NEXT: punpcklbw {{.*#+}} xmm4 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3],xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
420 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm4[0,1,2,3,7,7,7,7]
421 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm0[2,2,2,2]
422 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
423 ; SSE-NEXT: punpcklbw {{.*#+}} xmm4 = xmm4[0],xmm10[0],xmm4[1],xmm10[1],xmm4[2],xmm10[2],xmm4[3],xmm10[3],xmm4[4],xmm10[4],xmm4[5],xmm10[5],xmm4[6],xmm10[6],xmm4[7],xmm10[7]
424 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,0,0,255,255,255,255,255,255,255,255,255,255,255]
425 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,7,7,7,7]
426 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[2,2,2,2]
427 ; SSE-NEXT: pand %xmm0, %xmm4
428 ; SSE-NEXT: pandn %xmm2, %xmm0
429 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm5[3,1,2,3]
430 ; SSE-NEXT: por %xmm4, %xmm0
431 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm15[3,3,3,3,4,5,6,7]
432 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [255,255,255,255,255,0,0,255,255,255,255,255,255,255,255,255]
433 ; SSE-NEXT: pand %xmm4, %xmm3
434 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[1,3,2,3,4,5,6,7]
435 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
436 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[0,1,3,1,4,5,6,7]
437 ; SSE-NEXT: pandn %xmm2, %xmm4
438 ; SSE-NEXT: por %xmm3, %xmm4
439 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [0,255,255,255,255,0,0,0,255,255,255,255,255,255,255,255]
440 ; SSE-NEXT: pand %xmm2, %xmm0
441 ; SSE-NEXT: pandn %xmm4, %xmm2
442 ; SSE-NEXT: por %xmm0, %xmm2
443 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
444 ; SSE-NEXT: movq %xmm2, 48(%rax)
445 ; SSE-NEXT: movdqa %xmm1, 16(%rax)
446 ; SSE-NEXT: movdqa %xmm12, 32(%rax)
447 ; SSE-NEXT: movdqa %xmm6, (%rax)
450 ; AVX1-ONLY-LABEL: store_i8_stride7_vf8:
451 ; AVX1-ONLY: # %bb.0:
452 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
453 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
454 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
455 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
456 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
457 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
458 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
459 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
460 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
461 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
462 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
463 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
464 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm2[u,u,u],zero,zero,xmm2[5,13,u,u,u],zero,zero,xmm2[6,14,u,u]
465 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm5 = xmm1[u,u,u,5,13],zero,zero,xmm1[u,u,u,6,14],zero,zero,xmm1[u,u]
466 ; AVX1-ONLY-NEXT: vpor %xmm4, %xmm5, %xmm4
467 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm5 = xmm0[4,12],zero,xmm0[u,u,u,u,5,13],zero,xmm0[u,u,u,u,6,14]
468 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm6 = zero,zero,xmm3[4,u,u,u,u],zero,zero,xmm3[5,u,u,u,u],zero,zero
469 ; AVX1-ONLY-NEXT: vpor %xmm6, %xmm5, %xmm5
470 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm6 = [255,255,255,0,0,0,0,255,255,255,0,0,0,0,255,255]
471 ; AVX1-ONLY-NEXT: vpblendvb %xmm6, %xmm5, %xmm4, %xmm4
472 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm5 = xmm2[u],zero,zero,xmm2[7,15,u,u,u,u,u,u,u,u,u,u,u]
473 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm6 = xmm1[u,7,15],zero,zero,xmm1[u,u,u,u,u,u,u,u,u,u,u]
474 ; AVX1-ONLY-NEXT: vpor %xmm5, %xmm6, %xmm5
475 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm6 = zero,xmm0[u,u,u,u,7,15],zero,xmm0[u,u,u,u,u,u,u,u]
476 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm7 = xmm3[6,u,u,u,u],zero,zero,xmm3[7,u,u,u,u,u,u,u,u]
477 ; AVX1-ONLY-NEXT: vpor %xmm6, %xmm7, %xmm6
478 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm7 = [0,255,255,255,255,0,0,0,0,255,255,255,255,0,0,0]
479 ; AVX1-ONLY-NEXT: # xmm7 = mem[0,0]
480 ; AVX1-ONLY-NEXT: vpblendvb %xmm7, %xmm5, %xmm6, %xmm5
481 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm6 = zero,zero,xmm2[0,8,u,u,u],zero,zero,xmm2[1,9,u,u,u],zero,zero
482 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm7 = xmm1[0,8],zero,zero,xmm1[u,u,u,1,9],zero,zero,xmm1[u,u,u,2,10]
483 ; AVX1-ONLY-NEXT: vpor %xmm6, %xmm7, %xmm6
484 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm7 = xmm0[u,u,u,u,0,8],zero,xmm0[u,u,u,u,1,9],zero,xmm0[u,u]
485 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm8 = xmm3[u,u,u,u],zero,zero,xmm3[0,u,u,u,u],zero,zero,xmm3[1,u,u]
486 ; AVX1-ONLY-NEXT: vpor %xmm7, %xmm8, %xmm7
487 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm8 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255]
488 ; AVX1-ONLY-NEXT: vpblendvb %xmm8, %xmm6, %xmm7, %xmm6
489 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,xmm1[u,u,u,3,11],zero,zero,xmm1[u,u,u,4,12],zero,zero
490 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[2,10,u,u,u],zero,zero,xmm2[3,11,u,u,u],zero,zero,xmm2[4,12]
491 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm2, %xmm1
492 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[u,u,2,10],zero,xmm0[u,u,u,u,3,11],zero,xmm0[u,u,u,u]
493 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm3[u,u],zero,zero,xmm3[2,u,u,u,u],zero,zero,xmm3[3,u,u,u,u]
494 ; AVX1-ONLY-NEXT: vpor %xmm2, %xmm0, %xmm0
495 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm2 = [255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
496 ; AVX1-ONLY-NEXT: vpblendvb %xmm2, %xmm1, %xmm0, %xmm0
497 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, 16(%rax)
498 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, (%rax)
499 ; AVX1-ONLY-NEXT: vmovq %xmm5, 48(%rax)
500 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, 32(%rax)
501 ; AVX1-ONLY-NEXT: retq
503 ; AVX2-SLOW-LABEL: store_i8_stride7_vf8:
504 ; AVX2-SLOW: # %bb.0:
505 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
506 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
507 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
508 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
509 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
510 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
511 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
512 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
513 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
514 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
515 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
516 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
517 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
518 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
519 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,2,1,3]
520 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,zero,ymm2[5,13],zero,zero,zero,zero,zero,ymm2[6,14],zero,zero,zero,zero,zero,zero,zero,ymm2[23,31],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
521 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm2[2,3,0,1]
522 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm5 = zero,zero,zero,zero,zero,ymm4[5,13],zero,zero,zero,zero,zero,ymm4[6,14],zero,zero,zero,ymm4[23,31],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
523 ; AVX2-SLOW-NEXT: vpor %ymm5, %ymm3, %ymm3
524 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,0,2]
525 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm5 = zero,zero,ymm0[4],zero,zero,zero,zero,zero,zero,ymm0[5],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm0[23,31],zero,zero,zero,zero,zero,zero,zero,zero,zero
526 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = ymm1[4,12],zero,zero,zero,zero,zero,ymm1[5,13],zero,zero,zero,zero,zero,ymm1[6,14,22],zero,zero,zero,zero,zero,zero,ymm1[23],zero,zero,zero,zero,zero,zero,zero,zero
527 ; AVX2-SLOW-NEXT: vpor %ymm5, %ymm6, %ymm5
528 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm6 = <0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,u,u,u,u,u,u,u,u>
529 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm3, %ymm5, %ymm3
530 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[0,8],zero,zero,zero,zero,zero,ymm2[1,9],zero,zero,zero,zero,zero,ymm2[2,10,18,26],zero,zero,zero,zero,zero,ymm2[19,27],zero,zero,zero,zero,zero,ymm2[20,28]
531 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,ymm4[0,8],zero,zero,zero,zero,zero,ymm4[1,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm4[19,27],zero,zero,zero,zero,zero,ymm4[20,28],zero,zero
532 ; AVX2-SLOW-NEXT: vpor %ymm4, %ymm2, %ymm2
533 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,zero,zero,ymm0[0],zero,zero,zero,zero,zero,zero,ymm0[1],zero,zero,zero,zero,ymm0[18,26],zero,zero,zero,zero,zero,ymm0[19,27],zero,zero,zero,zero,zero
534 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,ymm1[0,8],zero,zero,zero,zero,zero,ymm1[1,9],zero,zero,zero,zero,zero,zero,zero,ymm1[18],zero,zero,zero,zero,zero,zero,ymm1[19],zero,zero,zero,zero
535 ; AVX2-SLOW-NEXT: vpor %ymm0, %ymm1, %ymm0
536 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm1 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
537 ; AVX2-SLOW-NEXT: vpblendvb %ymm1, %ymm2, %ymm0, %ymm0
538 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, (%rax)
539 ; AVX2-SLOW-NEXT: vextracti128 $1, %ymm3, %xmm0
540 ; AVX2-SLOW-NEXT: vmovq %xmm0, 48(%rax)
541 ; AVX2-SLOW-NEXT: vmovdqa %xmm3, 32(%rax)
542 ; AVX2-SLOW-NEXT: vzeroupper
543 ; AVX2-SLOW-NEXT: retq
545 ; AVX2-FAST-LABEL: store_i8_stride7_vf8:
546 ; AVX2-FAST: # %bb.0:
547 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
548 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
549 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
550 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
551 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
552 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
553 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
554 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
555 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
556 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
557 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
558 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
559 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
560 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
561 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
562 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [1,3,5,7,1,3,5,7]
563 ; AVX2-FAST-NEXT: # ymm1 = mem[0,1,0,1]
564 ; AVX2-FAST-NEXT: vpermd %ymm2, %ymm1, %ymm1
565 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,zero,ymm1[1,5,9,13],zero,zero,zero,ymm1[2,6,10,14],zero,zero,zero,ymm1[19,23,27,31],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
566 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = <1,3,5,u,5,1,3,u>
567 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm3, %ymm3
568 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[0,4,8],zero,zero,zero,zero,ymm3[1,5,9],zero,zero,zero,zero,ymm3[2,6,18],zero,zero,zero,zero,ymm3[23,27,19],zero,zero,zero,zero,zero,zero,zero,zero
569 ; AVX2-FAST-NEXT: vpor %ymm3, %ymm1, %ymm1
570 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm2[0,8],zero,zero,zero,zero,zero,ymm2[1,9],zero,zero,zero,zero,zero,ymm2[2,10,18,26],zero,zero,zero,zero,zero,ymm2[19,27],zero,zero,zero,zero,zero,ymm2[20,28]
571 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,0,1]
572 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = zero,zero,ymm2[0,8],zero,zero,zero,zero,zero,ymm2[1,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm2[19,27],zero,zero,zero,zero,zero,ymm2[20,28],zero,zero
573 ; AVX2-FAST-NEXT: vpor %ymm2, %ymm3, %ymm2
574 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [0,2,4,6,0,2,4,6]
575 ; AVX2-FAST-NEXT: # ymm3 = mem[0,1,0,1]
576 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm3, %ymm0
577 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,0,4,8,u,u,u,u,1,5,9,u,u,u,u,18,22,26,u,u,u,u,19,23,27,u,u,u,u]
578 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
579 ; AVX2-FAST-NEXT: vpblendvb %ymm3, %ymm2, %ymm0, %ymm0
580 ; AVX2-FAST-NEXT: vmovdqa %ymm0, (%rax)
581 ; AVX2-FAST-NEXT: vextracti128 $1, %ymm1, %xmm0
582 ; AVX2-FAST-NEXT: vmovq %xmm0, 48(%rax)
583 ; AVX2-FAST-NEXT: vmovdqa %xmm1, 32(%rax)
584 ; AVX2-FAST-NEXT: vzeroupper
585 ; AVX2-FAST-NEXT: retq
587 ; AVX2-FAST-PERLANE-LABEL: store_i8_stride7_vf8:
588 ; AVX2-FAST-PERLANE: # %bb.0:
589 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
590 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %r10
591 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
592 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
593 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
594 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
595 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
596 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
597 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
598 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
599 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
600 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
601 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
602 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
603 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,2,1,3]
604 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,zero,ymm2[5,13],zero,zero,zero,zero,zero,ymm2[6,14],zero,zero,zero,zero,zero,zero,zero,ymm2[23,31],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
605 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm2[2,3,0,1]
606 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm5 = zero,zero,zero,zero,zero,ymm4[5,13],zero,zero,zero,zero,zero,ymm4[6,14],zero,zero,zero,ymm4[23,31],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
607 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm5, %ymm3, %ymm3
608 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,0,2]
609 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm5 = zero,zero,ymm0[4],zero,zero,zero,zero,zero,zero,ymm0[5],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm0[23,31],zero,zero,zero,zero,zero,zero,zero,zero,zero
610 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm6 = ymm1[4,12],zero,zero,zero,zero,zero,ymm1[5,13],zero,zero,zero,zero,zero,ymm1[6,14,22],zero,zero,zero,zero,zero,zero,ymm1[23],zero,zero,zero,zero,zero,zero,zero,zero
611 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm5, %ymm6, %ymm5
612 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = <0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,u,u,u,u,u,u,u,u>
613 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm3, %ymm5, %ymm3
614 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[0,8],zero,zero,zero,zero,zero,ymm2[1,9],zero,zero,zero,zero,zero,ymm2[2,10,18,26],zero,zero,zero,zero,zero,ymm2[19,27],zero,zero,zero,zero,zero,ymm2[20,28]
615 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,ymm4[0,8],zero,zero,zero,zero,zero,ymm4[1,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm4[19,27],zero,zero,zero,zero,zero,ymm4[20,28],zero,zero
616 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm4, %ymm2, %ymm2
617 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,zero,zero,ymm0[0],zero,zero,zero,zero,zero,zero,ymm0[1],zero,zero,zero,zero,ymm0[18,26],zero,zero,zero,zero,zero,ymm0[19,27],zero,zero,zero,zero,zero
618 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,ymm1[0,8],zero,zero,zero,zero,zero,ymm1[1,9],zero,zero,zero,zero,zero,zero,zero,ymm1[18],zero,zero,zero,zero,zero,zero,ymm1[19],zero,zero,zero,zero
619 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm0, %ymm1, %ymm0
620 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm1 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
621 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm1, %ymm2, %ymm0, %ymm0
622 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, (%rax)
623 ; AVX2-FAST-PERLANE-NEXT: vextracti128 $1, %ymm3, %xmm0
624 ; AVX2-FAST-PERLANE-NEXT: vmovq %xmm0, 48(%rax)
625 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm3, 32(%rax)
626 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
627 ; AVX2-FAST-PERLANE-NEXT: retq
629 ; AVX512F-SLOW-LABEL: store_i8_stride7_vf8:
630 ; AVX512F-SLOW: # %bb.0:
631 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
632 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
633 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
634 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
635 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
636 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
637 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
638 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
639 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
640 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
641 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
642 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
643 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
644 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
645 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,2,1,3]
646 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm2[0,8],zero,zero,ymm2[u,u,u,1,9],zero,zero,ymm2[u,u,u,2,10,18,26,u,u,u],zero,zero,ymm2[19,27,u,u,u],zero,zero,ymm2[20,28]
647 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm2[u,u,u,5,13],zero,zero,ymm2[u,u,u,6,14],zero,zero,ymm2[u,u,u],zero,zero,ymm2[23,31,u,u,u,u,u,u,u,u,u,u,u]
648 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm3, %zmm3
649 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,0,1]
650 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,ymm2[0,8,u,u,u],zero,zero,ymm2[1,9,u,u,u],zero,zero,zero,zero,ymm2[u,u,u,19,27],zero,zero,ymm2[u,u,u,20,28],zero,zero
651 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u],zero,zero,ymm2[5,13,u,u,u],zero,zero,ymm2[6,14,u,u,u,23,31],zero,zero,ymm2[u,u,u],zero,zero,zero,zero,zero,zero,zero,zero
652 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm4, %zmm2
653 ; AVX512F-SLOW-NEXT: vporq %zmm2, %zmm3, %zmm2
654 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,0,2]
655 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm0[u,u,u,u],zero,zero,ymm0[0,u,u,u,u],zero,zero,ymm0[1,u,u,u,u,18,26],zero,ymm0[u,u,u,u,19,27],zero,ymm0[u,u,u,u]
656 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,ymm0[4,u,u,u,u],zero,zero,ymm0[5,u,u,u,u],zero,zero,zero,ymm0[u,u,u,u,23,31],zero,ymm0[u,u,u,u,u,u,u,u]
657 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm3, %zmm0
658 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm1[u,u,u,u,0,8],zero,ymm1[u,u,u,u,1,9],zero,ymm1[u,u,u,u],zero,zero,ymm1[18,u,u,u,u],zero,zero,ymm1[19,u,u,u,u]
659 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[4,12],zero,ymm1[u,u,u,u,5,13],zero,ymm1[u,u,u,u,6,14,22,u,u,u,u],zero,zero,ymm1[23,u,u,u,u,u,u,u,u]
660 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm3, %zmm1
661 ; AVX512F-SLOW-NEXT: vporq %zmm0, %zmm1, %zmm0
662 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm0
663 ; AVX512F-SLOW-NEXT: vextracti32x4 $2, %zmm0, 32(%rax)
664 ; AVX512F-SLOW-NEXT: vextracti32x4 $3, %zmm0, %xmm1
665 ; AVX512F-SLOW-NEXT: vmovq %xmm1, 48(%rax)
666 ; AVX512F-SLOW-NEXT: vmovdqa %ymm0, (%rax)
667 ; AVX512F-SLOW-NEXT: vzeroupper
668 ; AVX512F-SLOW-NEXT: retq
670 ; AVX512F-FAST-LABEL: store_i8_stride7_vf8:
671 ; AVX512F-FAST: # %bb.0:
672 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
673 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
674 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
675 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
676 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
677 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
678 ; AVX512F-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
679 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
680 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
681 ; AVX512F-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
682 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
683 ; AVX512F-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
684 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
685 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <0,2,4,u>
686 ; AVX512F-FAST-NEXT: vpermi2q %ymm3, %ymm0, %ymm1
687 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [0,2,4,6,0,2,4,6]
688 ; AVX512F-FAST-NEXT: # ymm0 = mem[0,1,0,1]
689 ; AVX512F-FAST-NEXT: vpermd %ymm1, %ymm0, %ymm0
690 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm0[0,4,8],zero,zero,zero,zero,ymm0[1,5,9],zero,zero,zero,zero,ymm0[18,22,26],zero,zero,zero,zero,ymm0[19,23,27],zero,zero,zero,zero
691 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = <1,3,5,u,5,1,3,u>
692 ; AVX512F-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm1
693 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[0,4,8],zero,zero,zero,zero,ymm1[1,5,9],zero,zero,zero,zero,ymm1[2,6,18],zero,zero,zero,zero,ymm1[23,27,19],zero,zero,zero,zero,zero,zero,zero,zero
694 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
695 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm2[0,8],zero,zero,ymm2[u,u,u,1,9],zero,zero,ymm2[u,u,u,2,10,18,26,u,u,u],zero,zero,ymm2[19,27,u,u,u],zero,zero,ymm2[20,28]
696 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm2[2,3,0,1]
697 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,ymm3[0,8,u,u,u],zero,zero,ymm3[1,9,u,u,u],zero,zero,zero,zero,ymm3[u,u,u,19,27],zero,zero,ymm3[u,u,u,20,28],zero,zero
698 ; AVX512F-FAST-NEXT: vpternlogq $168, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm3
699 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [1,3,5,7,1,3,5,7]
700 ; AVX512F-FAST-NEXT: # ymm1 = mem[0,1,0,1]
701 ; AVX512F-FAST-NEXT: vpermd %ymm2, %ymm1, %ymm1
702 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,zero,ymm1[1,5,9,13],zero,zero,zero,ymm1[2,6,10,14],zero,zero,zero,ymm1[19,23,27,31],zero,zero,zero,ymm1[u,u,u,u,u,u,u,u]
703 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm3, %zmm1
704 ; AVX512F-FAST-NEXT: vporq %zmm0, %zmm1, %zmm0
705 ; AVX512F-FAST-NEXT: vextracti32x4 $2, %zmm0, 32(%rax)
706 ; AVX512F-FAST-NEXT: vextracti32x4 $3, %zmm0, %xmm1
707 ; AVX512F-FAST-NEXT: vmovq %xmm1, 48(%rax)
708 ; AVX512F-FAST-NEXT: vmovdqa %ymm0, (%rax)
709 ; AVX512F-FAST-NEXT: vzeroupper
710 ; AVX512F-FAST-NEXT: retq
712 ; AVX512BW-SLOW-LABEL: store_i8_stride7_vf8:
713 ; AVX512BW-SLOW: # %bb.0:
714 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
715 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
716 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
717 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
718 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
719 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
720 ; AVX512BW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
721 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
722 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
723 ; AVX512BW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
724 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
725 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
726 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
727 ; AVX512BW-SLOW-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[2],ymm3[2]
728 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,2,1,3]
729 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm1, %zmm1
730 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} zmm1 = zero,zero,zero,zero,zmm1[0,8],zero,zero,zero,zero,zero,zmm1[1,9],zero,zero,zero,zero,zero,zero,zero,zmm1[18],zero,zero,zero,zero,zero,zero,zmm1[19],zero,zero,zero,zero,zmm1[36,44],zero,zero,zero,zero,zero,zmm1[37,45],zero,zero,zero,zero,zero,zmm1[38,46,54],zero,zero,zero,zero,zero,zero,zmm1[55],zero,zero,zero,zero,zero,zero,zero,zero
731 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,0,2]
732 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm0
733 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} zmm0 = zero,zero,zero,zero,zero,zero,zmm0[0],zero,zero,zero,zero,zero,zero,zmm0[1],zero,zero,zero,zero,zmm0[18,26],zero,zero,zero,zero,zero,zmm0[19,27],zero,zero,zero,zero,zero,zero,zero,zmm0[36],zero,zero,zero,zero,zero,zero,zmm0[37],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm0[55,63],zero,zero,zero,zero,zero,zero,zero,zero,zero
734 ; AVX512BW-SLOW-NEXT: vporq %zmm1, %zmm0, %zmm0
735 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm2, %zmm1
736 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} zmm1 = zmm1[0,8],zero,zero,zero,zero,zero,zmm1[1,9],zero,zero,zero,zero,zero,zmm1[2,10,18,26],zero,zero,zero,zero,zero,zmm1[19,27],zero,zero,zero,zero,zero,zmm1[20,28],zero,zero,zero,zmm1[37,45],zero,zero,zero,zero,zero,zmm1[38,46],zero,zero,zero,zero,zero,zero,zero,zmm1[55,63],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
737 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,0,1]
738 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm2, %zmm2
739 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} zmm2 = zero,zero,zmm2[0,8],zero,zero,zero,zero,zero,zmm2[1,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm2[19,27],zero,zero,zero,zero,zero,zmm2[20,28],zero,zero,zero,zero,zero,zero,zero,zmm2[37,45],zero,zero,zero,zero,zero,zmm2[38,46],zero,zero,zero,zmm2[55,63],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
740 ; AVX512BW-SLOW-NEXT: vporq %zmm1, %zmm2, %zmm1
741 ; AVX512BW-SLOW-NEXT: movabsq $63546854584629360, %rcx # imm = 0xE1C3870E1C3870
742 ; AVX512BW-SLOW-NEXT: kmovq %rcx, %k1
743 ; AVX512BW-SLOW-NEXT: vmovdqu8 %zmm0, %zmm1 {%k1}
744 ; AVX512BW-SLOW-NEXT: vextracti32x4 $2, %zmm1, 32(%rax)
745 ; AVX512BW-SLOW-NEXT: vextracti32x4 $3, %zmm1, %xmm0
746 ; AVX512BW-SLOW-NEXT: vmovq %xmm0, 48(%rax)
747 ; AVX512BW-SLOW-NEXT: vmovdqa %ymm1, (%rax)
748 ; AVX512BW-SLOW-NEXT: vzeroupper
749 ; AVX512BW-SLOW-NEXT: retq
751 ; AVX512BW-FAST-LABEL: store_i8_stride7_vf8:
752 ; AVX512BW-FAST: # %bb.0:
753 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
754 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
755 ; AVX512BW-FAST-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
756 ; AVX512BW-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
757 ; AVX512BW-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
758 ; AVX512BW-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
759 ; AVX512BW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
760 ; AVX512BW-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
761 ; AVX512BW-FAST-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
762 ; AVX512BW-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
763 ; AVX512BW-FAST-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
764 ; AVX512BW-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
765 ; AVX512BW-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
766 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <0,2,4,u>
767 ; AVX512BW-FAST-NEXT: vpermi2q %ymm3, %ymm0, %ymm1
768 ; AVX512BW-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [1,3,5,7,1,3,5,7]
769 ; AVX512BW-FAST-NEXT: # ymm0 = mem[0,1,0,1]
770 ; AVX512BW-FAST-NEXT: vpermd %ymm2, %ymm0, %ymm0
771 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,ymm0[1,5,9,13],zero,zero,zero,ymm0[2,6,10,14],zero,zero,zero,ymm0[19,23,27,31],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
772 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = <1,3,5,u,5,1,3,u>
773 ; AVX512BW-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm3
774 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[0,4,8],zero,zero,zero,zero,ymm3[1,5,9],zero,zero,zero,zero,ymm3[2,6,18],zero,zero,zero,zero,ymm3[23,27,19],zero,zero,zero,zero,zero,zero,zero,zero
775 ; AVX512BW-FAST-NEXT: vpor %ymm0, %ymm3, %ymm0
776 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm2[0,8],zero,zero,zero,zero,zero,ymm2[1,9],zero,zero,zero,zero,zero,ymm2[2,10,18,26],zero,zero,zero,zero,zero,ymm2[19,27],zero,zero,zero,zero,zero,ymm2[20,28]
777 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,0,1]
778 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm2 = zero,zero,ymm2[0,8],zero,zero,zero,zero,zero,ymm2[1,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm2[19,27],zero,zero,zero,zero,zero,ymm2[20,28],zero,zero
779 ; AVX512BW-FAST-NEXT: vpor %ymm3, %ymm2, %ymm2
780 ; AVX512BW-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [0,2,4,6,0,2,4,6]
781 ; AVX512BW-FAST-NEXT: # ymm3 = mem[0,1,0,1]
782 ; AVX512BW-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm1
783 ; AVX512BW-FAST-NEXT: movl $236730480, %ecx # imm = 0xE1C3870
784 ; AVX512BW-FAST-NEXT: kmovd %ecx, %k1
785 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm2 {%k1} = ymm1[u,u,u,u,0,4,8,u,u,u,u,1,5,9,u,u,u,u,18,22,26,u,u,u,u,19,23,27,u,u,u,u]
786 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0
787 ; AVX512BW-FAST-NEXT: vmovdqa %ymm2, (%rax)
788 ; AVX512BW-FAST-NEXT: vextracti32x4 $2, %zmm0, 32(%rax)
789 ; AVX512BW-FAST-NEXT: vextracti32x4 $3, %zmm0, %xmm0
790 ; AVX512BW-FAST-NEXT: vmovq %xmm0, 48(%rax)
791 ; AVX512BW-FAST-NEXT: vzeroupper
792 ; AVX512BW-FAST-NEXT: retq
793 %in.vec0 = load <8 x i8>, ptr %in.vecptr0, align 64
794 %in.vec1 = load <8 x i8>, ptr %in.vecptr1, align 64
795 %in.vec2 = load <8 x i8>, ptr %in.vecptr2, align 64
796 %in.vec3 = load <8 x i8>, ptr %in.vecptr3, align 64
797 %in.vec4 = load <8 x i8>, ptr %in.vecptr4, align 64
798 %in.vec5 = load <8 x i8>, ptr %in.vecptr5, align 64
799 %in.vec6 = load <8 x i8>, ptr %in.vecptr6, align 64
800 %1 = shufflevector <8 x i8> %in.vec0, <8 x i8> %in.vec1, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
801 %2 = shufflevector <8 x i8> %in.vec2, <8 x i8> %in.vec3, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
802 %3 = shufflevector <8 x i8> %in.vec4, <8 x i8> %in.vec5, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
803 %4 = shufflevector <16 x i8> %1, <16 x i8> %2, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
804 %5 = shufflevector <8 x i8> %in.vec6, <8 x i8> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
805 %6 = shufflevector <16 x i8> %3, <16 x i8> %5, <24 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23>
806 %7 = shufflevector <24 x i8> %6, <24 x i8> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
807 %8 = shufflevector <32 x i8> %4, <32 x i8> %7, <56 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55>
808 %interleaved.vec = shufflevector <56 x i8> %8, <56 x i8> poison, <56 x i32> <i32 0, i32 8, i32 16, i32 24, i32 32, i32 40, i32 48, i32 1, i32 9, i32 17, i32 25, i32 33, i32 41, i32 49, i32 2, i32 10, i32 18, i32 26, i32 34, i32 42, i32 50, i32 3, i32 11, i32 19, i32 27, i32 35, i32 43, i32 51, i32 4, i32 12, i32 20, i32 28, i32 36, i32 44, i32 52, i32 5, i32 13, i32 21, i32 29, i32 37, i32 45, i32 53, i32 6, i32 14, i32 22, i32 30, i32 38, i32 46, i32 54, i32 7, i32 15, i32 23, i32 31, i32 39, i32 47, i32 55>
809 store <56 x i8> %interleaved.vec, ptr %out.vec, align 64
813 define void @store_i8_stride7_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %out.vec) nounwind {
814 ; SSE-LABEL: store_i8_stride7_vf16:
816 ; SSE-NEXT: subq $56, %rsp
817 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
818 ; SSE-NEXT: movdqa (%rdi), %xmm3
819 ; SSE-NEXT: movdqa (%rsi), %xmm4
820 ; SSE-NEXT: movdqa (%rdx), %xmm0
821 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
822 ; SSE-NEXT: movdqa (%rcx), %xmm5
823 ; SSE-NEXT: movdqa (%r8), %xmm7
824 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
825 ; SSE-NEXT: movdqa (%r9), %xmm8
826 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,7,7,7,7]
827 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
828 ; SSE-NEXT: movdqa {{.*#+}} xmm13 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255]
829 ; SSE-NEXT: pand %xmm13, %xmm0
830 ; SSE-NEXT: movdqa %xmm5, %xmm1
831 ; SSE-NEXT: movdqa %xmm5, %xmm6
832 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
833 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
834 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
835 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,7,7]
836 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
837 ; SSE-NEXT: movdqa %xmm13, %xmm2
838 ; SSE-NEXT: pandn %xmm1, %xmm2
839 ; SSE-NEXT: por %xmm0, %xmm2
840 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255]
841 ; SSE-NEXT: movdqa %xmm0, %xmm1
842 ; SSE-NEXT: pandn %xmm2, %xmm1
843 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm3[0,1,2,3,7,7,7,7]
844 ; SSE-NEXT: movdqa %xmm3, %xmm12
845 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm2[2,2,2,2]
846 ; SSE-NEXT: movdqa {{.*#+}} xmm10 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
847 ; SSE-NEXT: pand %xmm10, %xmm3
848 ; SSE-NEXT: movdqa %xmm4, %xmm9
849 ; SSE-NEXT: movdqa %xmm4, %xmm5
850 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
851 ; SSE-NEXT: punpckhbw {{.*#+}} xmm9 = xmm9[8],xmm4[8],xmm9[9],xmm4[9],xmm9[10],xmm4[10],xmm9[11],xmm4[11],xmm9[12],xmm4[12],xmm9[13],xmm4[13],xmm9[14],xmm4[14],xmm9[15],xmm4[15]
852 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm9[0,1,2,3,4,6,6,7]
853 ; SSE-NEXT: movdqa %xmm9, (%rsp) # 16-byte Spill
854 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[2,1,3,3]
855 ; SSE-NEXT: pandn %xmm4, %xmm10
856 ; SSE-NEXT: por %xmm3, %xmm10
857 ; SSE-NEXT: pand %xmm0, %xmm10
858 ; SSE-NEXT: por %xmm1, %xmm10
859 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm7[0,1,2,3,7,7,7,7]
860 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
861 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255]
862 ; SSE-NEXT: movdqa %xmm2, %xmm4
863 ; SSE-NEXT: pandn %xmm1, %xmm4
864 ; SSE-NEXT: movdqa %xmm8, %xmm1
865 ; SSE-NEXT: movdqa %xmm8, %xmm3
866 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
867 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm8[8],xmm1[9],xmm8[9],xmm1[10],xmm8[10],xmm1[11],xmm8[11],xmm1[12],xmm8[12],xmm1[13],xmm8[13],xmm1[14],xmm8[14],xmm1[15],xmm8[15]
868 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
869 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,6,6,7]
870 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,3]
871 ; SSE-NEXT: pand %xmm2, %xmm1
872 ; SSE-NEXT: por %xmm4, %xmm1
873 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255]
874 ; SSE-NEXT: pand %xmm4, %xmm10
875 ; SSE-NEXT: pandn %xmm1, %xmm4
876 ; SSE-NEXT: movdqa (%rax), %xmm7
877 ; SSE-NEXT: por %xmm10, %xmm4
878 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm7[0,1,2,3,6,7,7,7]
879 ; SSE-NEXT: movdqa %xmm7, %xmm15
880 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,3,2]
881 ; SSE-NEXT: movdqa {{.*#+}} xmm11 = [255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0]
882 ; SSE-NEXT: movdqa %xmm11, %xmm7
883 ; SSE-NEXT: pandn %xmm1, %xmm7
884 ; SSE-NEXT: pand %xmm11, %xmm4
885 ; SSE-NEXT: por %xmm4, %xmm7
886 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
887 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[2,1,2,3]
888 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
889 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,0,3]
890 ; SSE-NEXT: movdqa {{.*#+}} xmm10 = [255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
891 ; SSE-NEXT: movdqa %xmm10, %xmm4
892 ; SSE-NEXT: pandn %xmm1, %xmm4
893 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm12[3,3,3,3,4,5,6,7]
894 ; SSE-NEXT: movdqa %xmm12, %xmm5
895 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
896 ; SSE-NEXT: pand %xmm10, %xmm1
897 ; SSE-NEXT: por %xmm1, %xmm4
898 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm6[2,1,2,3]
899 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
900 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,3,0,3]
901 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[3,3,3,3,4,5,6,7]
902 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
903 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255]
904 ; SSE-NEXT: movdqa %xmm12, %xmm14
905 ; SSE-NEXT: pandn %xmm1, %xmm14
906 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
907 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm8[3,3,3,3,4,5,6,7]
908 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
909 ; SSE-NEXT: pand %xmm12, %xmm1
910 ; SSE-NEXT: por %xmm1, %xmm14
911 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255]
912 ; SSE-NEXT: pand %xmm1, %xmm4
913 ; SSE-NEXT: pandn %xmm14, %xmm1
914 ; SSE-NEXT: por %xmm4, %xmm1
915 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm3[2,1,2,3]
916 ; SSE-NEXT: punpcklbw {{.*#+}} xmm4 = xmm4[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
917 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,3,2,0]
918 ; SSE-NEXT: movdqa %xmm2, %xmm14
919 ; SSE-NEXT: pandn %xmm4, %xmm14
920 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
921 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm6[3,3,3,3,4,5,6,7]
922 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,4,4,4]
923 ; SSE-NEXT: pand %xmm2, %xmm4
924 ; SSE-NEXT: por %xmm4, %xmm14
925 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255]
926 ; SSE-NEXT: pand %xmm4, %xmm14
927 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm15[3,3,3,3,4,5,6,7]
928 ; SSE-NEXT: movdqa %xmm15, %xmm3
929 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,4,4,4]
930 ; SSE-NEXT: pandn %xmm7, %xmm4
931 ; SSE-NEXT: por %xmm14, %xmm4
932 ; SSE-NEXT: movdqa {{.*#+}} xmm7 = [0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255]
933 ; SSE-NEXT: pand %xmm7, %xmm1
934 ; SSE-NEXT: pandn %xmm4, %xmm7
935 ; SSE-NEXT: por %xmm1, %xmm7
936 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
937 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm9[0,1,2,3,4,5,5,7]
938 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
939 ; SSE-NEXT: movdqa %xmm13, %xmm4
940 ; SSE-NEXT: pandn %xmm1, %xmm4
941 ; SSE-NEXT: movdqa %xmm5, %xmm15
942 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm5[0,1,2,3,6,6,6,6]
943 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
944 ; SSE-NEXT: pand %xmm13, %xmm1
945 ; SSE-NEXT: por %xmm1, %xmm4
946 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm8[0,1,2,3,6,6,6,6]
947 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
948 ; SSE-NEXT: movdqa %xmm2, %xmm7
949 ; SSE-NEXT: pandn %xmm1, %xmm7
950 ; SSE-NEXT: pshufd $230, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
951 ; SSE-NEXT: # xmm1 = mem[2,1,2,3]
952 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[3,1,2,0,4,5,6,7]
953 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,5,5,5]
954 ; SSE-NEXT: pand %xmm2, %xmm1
955 ; SSE-NEXT: por %xmm7, %xmm1
956 ; SSE-NEXT: pand %xmm0, %xmm1
957 ; SSE-NEXT: pandn %xmm4, %xmm0
958 ; SSE-NEXT: por %xmm1, %xmm0
959 ; SSE-NEXT: pshufd $229, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
960 ; SSE-NEXT: # xmm1 = mem[1,1,2,3]
961 ; SSE-NEXT: movdqa %xmm10, %xmm4
962 ; SSE-NEXT: pandn %xmm1, %xmm4
963 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm6[0,1,2,3,5,6,6,7]
964 ; SSE-NEXT: movdqa %xmm6, %xmm9
965 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,3,2]
966 ; SSE-NEXT: pand %xmm10, %xmm1
967 ; SSE-NEXT: por %xmm4, %xmm1
968 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm3[0,1,2,3,4,5,6,6]
969 ; SSE-NEXT: movdqa %xmm3, %xmm6
970 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[2,1,3,3]
971 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
972 ; SSE-NEXT: movdqa %xmm5, %xmm7
973 ; SSE-NEXT: pandn %xmm4, %xmm7
974 ; SSE-NEXT: pand %xmm5, %xmm1
975 ; SSE-NEXT: por %xmm1, %xmm7
976 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0]
977 ; SSE-NEXT: pand %xmm1, %xmm0
978 ; SSE-NEXT: pandn %xmm7, %xmm1
979 ; SSE-NEXT: por %xmm0, %xmm1
980 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
981 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
982 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
983 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
984 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,0,2,1,4,5,6,7]
985 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,1,3]
986 ; SSE-NEXT: movdqa %xmm5, %xmm1
987 ; SSE-NEXT: pandn %xmm0, %xmm1
988 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm8[0,0,0,0,4,5,6,7]
989 ; SSE-NEXT: movdqa %xmm8, %xmm3
990 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
991 ; SSE-NEXT: pand %xmm5, %xmm0
992 ; SSE-NEXT: por %xmm0, %xmm1
993 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
994 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
995 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
996 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,2,1,3,4,5,6,7]
997 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,1,0]
998 ; SSE-NEXT: movdqa %xmm11, %xmm7
999 ; SSE-NEXT: pandn %xmm0, %xmm7
1000 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm15[0,0,2,1,4,5,6,7]
1001 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
1002 ; SSE-NEXT: pand %xmm11, %xmm0
1003 ; SSE-NEXT: por %xmm0, %xmm7
1004 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255]
1005 ; SSE-NEXT: pand %xmm0, %xmm7
1006 ; SSE-NEXT: pandn %xmm1, %xmm0
1007 ; SSE-NEXT: por %xmm7, %xmm0
1008 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1009 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1010 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1011 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,1,3,4,5,6,7]
1012 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,2,1]
1013 ; SSE-NEXT: movdqa %xmm13, %xmm7
1014 ; SSE-NEXT: pandn %xmm1, %xmm7
1015 ; SSE-NEXT: movdqa %xmm9, %xmm5
1016 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm9[0,0,0,0,4,5,6,7]
1017 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
1018 ; SSE-NEXT: pand %xmm13, %xmm1
1019 ; SSE-NEXT: por %xmm1, %xmm7
1020 ; SSE-NEXT: movdqa %xmm6, %xmm8
1021 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm6[0,0,0,0,4,5,6,7]
1022 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
1023 ; SSE-NEXT: movdqa %xmm2, %xmm9
1024 ; SSE-NEXT: pandn %xmm1, %xmm9
1025 ; SSE-NEXT: pand %xmm2, %xmm7
1026 ; SSE-NEXT: por %xmm7, %xmm9
1027 ; SSE-NEXT: movdqa {{.*#+}} xmm14 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255]
1028 ; SSE-NEXT: pand %xmm14, %xmm0
1029 ; SSE-NEXT: pandn %xmm9, %xmm14
1030 ; SSE-NEXT: por %xmm0, %xmm14
1031 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
1032 ; SSE-NEXT: # xmm0 = mem[0,1,1,3]
1033 ; SSE-NEXT: movdqa %xmm10, %xmm7
1034 ; SSE-NEXT: pandn %xmm0, %xmm7
1035 ; SSE-NEXT: movdqa %xmm3, %xmm1
1036 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm3[0,1,2,3,4,5,5,7]
1037 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm0[2,1,3,2]
1038 ; SSE-NEXT: pand %xmm10, %xmm9
1039 ; SSE-NEXT: por %xmm7, %xmm9
1040 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm15[0,1,2,3,5,5,5,5]
1041 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
1042 ; SSE-NEXT: movdqa %xmm2, %xmm7
1043 ; SSE-NEXT: pandn %xmm0, %xmm7
1044 ; SSE-NEXT: pshuflw $233, (%rsp), %xmm0 # 16-byte Folded Reload
1045 ; SSE-NEXT: # xmm0 = mem[1,2,2,3,4,5,6,7]
1046 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm0[0,0,2,1]
1047 ; SSE-NEXT: pand %xmm2, %xmm6
1048 ; SSE-NEXT: por %xmm7, %xmm6
1049 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0]
1050 ; SSE-NEXT: pand %xmm0, %xmm6
1051 ; SSE-NEXT: pandn %xmm9, %xmm0
1052 ; SSE-NEXT: por %xmm6, %xmm0
1053 ; SSE-NEXT: pshuflw $233, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
1054 ; SSE-NEXT: # xmm3 = mem[1,2,2,3,4,5,6,7]
1055 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,0,0,0]
1056 ; SSE-NEXT: movdqa %xmm12, %xmm6
1057 ; SSE-NEXT: pandn %xmm3, %xmm6
1058 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm5[0,1,2,3,4,4,6,5]
1059 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,1,3,3]
1060 ; SSE-NEXT: pand %xmm12, %xmm3
1061 ; SSE-NEXT: por %xmm3, %xmm6
1062 ; SSE-NEXT: pand %xmm13, %xmm6
1063 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm8[0,1,2,3,4,5,5,7]
1064 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,3]
1065 ; SSE-NEXT: pandn %xmm3, %xmm13
1066 ; SSE-NEXT: por %xmm6, %xmm13
1067 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255]
1068 ; SSE-NEXT: pand %xmm3, %xmm0
1069 ; SSE-NEXT: pandn %xmm13, %xmm3
1070 ; SSE-NEXT: por %xmm0, %xmm3
1071 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
1072 ; SSE-NEXT: # xmm0 = mem[2,2,3,3]
1073 ; SSE-NEXT: movdqa %xmm2, %xmm6
1074 ; SSE-NEXT: pandn %xmm0, %xmm6
1075 ; SSE-NEXT: movdqa %xmm1, %xmm13
1076 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm1[2,1,3,3,4,5,6,7]
1077 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
1078 ; SSE-NEXT: pand %xmm2, %xmm0
1079 ; SSE-NEXT: por %xmm6, %xmm0
1080 ; SSE-NEXT: movdqa {{.*#+}} xmm6 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255]
1081 ; SSE-NEXT: movdqa %xmm6, %xmm7
1082 ; SSE-NEXT: pandn %xmm0, %xmm7
1083 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1084 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm1[0,1,2,3,5,6,6,7]
1085 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm0[2,2,2,2]
1086 ; SSE-NEXT: movdqa %xmm12, %xmm0
1087 ; SSE-NEXT: pandn %xmm9, %xmm0
1088 ; SSE-NEXT: pshuflw {{.*#+}} xmm9 = xmm15[0,2,2,3,4,5,6,7]
1089 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[0,1,1,3]
1090 ; SSE-NEXT: pand %xmm12, %xmm9
1091 ; SSE-NEXT: por %xmm9, %xmm0
1092 ; SSE-NEXT: pand %xmm6, %xmm0
1093 ; SSE-NEXT: por %xmm7, %xmm0
1094 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
1095 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm5[0,1,2,3,4,6,5,7]
1096 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[2,1,3,2]
1097 ; SSE-NEXT: movdqa %xmm11, %xmm9
1098 ; SSE-NEXT: pandn %xmm7, %xmm9
1099 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
1100 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm4[2,2,2,3,4,5,6,7]
1101 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,0,2,1]
1102 ; SSE-NEXT: pand %xmm11, %xmm7
1103 ; SSE-NEXT: por %xmm7, %xmm9
1104 ; SSE-NEXT: pand %xmm10, %xmm9
1105 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm8[2,2,2,2,4,5,6,7]
1106 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,0,0,0]
1107 ; SSE-NEXT: pandn %xmm7, %xmm10
1108 ; SSE-NEXT: por %xmm9, %xmm10
1109 ; SSE-NEXT: movdqa {{.*#+}} xmm7 = [255,255,255,0,0,0,0,255,255,255,0,0,0,0,255,255]
1110 ; SSE-NEXT: pand %xmm7, %xmm10
1111 ; SSE-NEXT: pandn %xmm0, %xmm7
1112 ; SSE-NEXT: por %xmm7, %xmm10
1113 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[0,1,2,2]
1114 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm15[1,1,2,3,4,5,6,7]
1115 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,0,2,1]
1116 ; SSE-NEXT: pand %xmm2, %xmm7
1117 ; SSE-NEXT: pandn %xmm0, %xmm2
1118 ; SSE-NEXT: por %xmm7, %xmm2
1119 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm13[1,1,2,2,4,5,6,7]
1120 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
1121 ; SSE-NEXT: pand %xmm11, %xmm0
1122 ; SSE-NEXT: pshufd $101, {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Folded Reload
1123 ; SSE-NEXT: # xmm7 = mem[1,1,2,1]
1124 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,7,5,6,4]
1125 ; SSE-NEXT: pandn %xmm7, %xmm11
1126 ; SSE-NEXT: por %xmm0, %xmm11
1127 ; SSE-NEXT: pand %xmm6, %xmm11
1128 ; SSE-NEXT: pandn %xmm2, %xmm6
1129 ; SSE-NEXT: por %xmm11, %xmm6
1130 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm4[1,1,1,1,4,5,6,7]
1131 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
1132 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
1133 ; SSE-NEXT: pand %xmm1, %xmm0
1134 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm5[0,2,2,3,4,5,6,7]
1135 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,1,3]
1136 ; SSE-NEXT: pandn %xmm2, %xmm1
1137 ; SSE-NEXT: por %xmm0, %xmm1
1138 ; SSE-NEXT: pand %xmm12, %xmm1
1139 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm8[1,1,1,1,4,5,6,7]
1140 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
1141 ; SSE-NEXT: pandn %xmm0, %xmm12
1142 ; SSE-NEXT: por %xmm1, %xmm12
1143 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
1144 ; SSE-NEXT: pand %xmm0, %xmm6
1145 ; SSE-NEXT: pandn %xmm12, %xmm0
1146 ; SSE-NEXT: por %xmm6, %xmm0
1147 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1148 ; SSE-NEXT: movdqa %xmm0, 16(%rax)
1149 ; SSE-NEXT: movdqa %xmm10, 32(%rax)
1150 ; SSE-NEXT: movdqa %xmm3, 64(%rax)
1151 ; SSE-NEXT: movdqa %xmm14, (%rax)
1152 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1153 ; SSE-NEXT: movaps %xmm0, 80(%rax)
1154 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1155 ; SSE-NEXT: movaps %xmm0, 48(%rax)
1156 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1157 ; SSE-NEXT: movaps %xmm0, 96(%rax)
1158 ; SSE-NEXT: addq $56, %rsp
1161 ; AVX1-ONLY-LABEL: store_i8_stride7_vf16:
1162 ; AVX1-ONLY: # %bb.0:
1163 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1164 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
1165 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm4
1166 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm5
1167 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm6
1168 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm7
1169 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm0
1170 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm2
1171 ; AVX1-ONLY-NEXT: vmovdqa (%r10), %xmm1
1172 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3],xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
1173 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = zero,zero,zero,zero,zero,xmm9[6,7],zero,zero,zero,zero,zero,xmm9[8,9],zero,zero
1174 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3],xmm6[4],xmm7[4],xmm6[5],xmm7[5],xmm6[6],xmm7[6],xmm6[7],xmm7[7]
1175 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm8 = xmm10[4,5],zero,zero,zero,zero,zero,xmm10[6,7],zero,zero,zero,zero,zero,xmm10[8,9]
1176 ; AVX1-ONLY-NEXT: vpor %xmm3, %xmm8, %xmm3
1177 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm8 = zero,zero,xmm10[0,1],zero,zero,zero,zero,zero,xmm10[2,3],zero,zero,zero,zero,zero
1178 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = xmm9[0,1],zero,zero,zero,zero,zero,xmm9[2,3],zero,zero,zero,zero,zero,xmm9[4,5]
1179 ; AVX1-ONLY-NEXT: vpor %xmm8, %xmm11, %xmm8
1180 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm8, %ymm3
1181 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3],xmm0[4],xmm2[4],xmm0[5],xmm2[5],xmm0[6],xmm2[6],xmm0[7],xmm2[7]
1182 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = xmm8[u,u,4,5],zero,xmm8[u,u,u,u,6,7],zero,xmm8[u,u,u,u]
1183 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm12 = xmm1[u,u],zero,zero,xmm1[2,u,u,u,u],zero,zero,xmm1[3,u,u,u,u]
1184 ; AVX1-ONLY-NEXT: vpor %xmm12, %xmm11, %xmm11
1185 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm12 = xmm8[u,u,u,u,0,1,u,u,u,u,u,2,3,u,u,u]
1186 ; AVX1-ONLY-NEXT: vpalignr {{.*#+}} xmm12 = xmm12[4,5,6,7,8,9,10,11,12,13,14,15],xmm1[0,1,2,3]
1187 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm12 = xmm12[u,u,u,u,0,1,12,u,u,u,u,7,8,13,u,u]
1188 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm12, %ymm11
1189 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm12 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
1190 ; AVX1-ONLY-NEXT: vandnps %ymm11, %ymm12, %ymm11
1191 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm11, %ymm3
1192 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = xmm5[u],zero,xmm5[7,u,u,u,u,u],zero,xmm5[8,u,u,u,u,u],zero
1193 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm12 = xmm4[u,7],zero,xmm4[u,u,u,u,u,8],zero,xmm4[u,u,u,u,u,9]
1194 ; AVX1-ONLY-NEXT: vpor %xmm11, %xmm12, %xmm11
1195 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm12 = xmm7[u,u,u],zero,xmm7[7,u,u,u,u,u],zero,xmm7[8,u,u,u,u]
1196 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm13 = xmm6[u,u,u,7],zero,xmm6[u,u,u,u,u,8],zero,xmm6[u,u,u,u]
1197 ; AVX1-ONLY-NEXT: vpor %xmm12, %xmm13, %xmm12
1198 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm13 = <u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255>
1199 ; AVX1-ONLY-NEXT: vpblendvb %xmm13, %xmm11, %xmm12, %xmm11
1200 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm10 = zero,zero,zero,zero,zero,xmm10[10,11],zero,zero,zero,zero,zero,xmm10[12,13],zero,zero
1201 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm9 = zero,zero,zero,xmm9[10,11],zero,zero,zero,zero,zero,xmm9[12,13],zero,zero,zero,zero
1202 ; AVX1-ONLY-NEXT: vpor %xmm10, %xmm9, %xmm9
1203 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm9, %ymm9
1204 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm10 = [0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255]
1205 ; AVX1-ONLY-NEXT: vandps %ymm10, %ymm9, %ymm9
1206 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = xmm2[u,u,u,u,u],zero,xmm2[7,u,u,u,u,u],zero,xmm2[8,u,u]
1207 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm12 = xmm0[u,u,u,u,u,7],zero,xmm0[u,u,u,u,u,8],zero,xmm0[u,u]
1208 ; AVX1-ONLY-NEXT: vpor %xmm11, %xmm12, %xmm11
1209 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = zero,xmm11[u,u,u,u,5,6],zero,xmm11[u,u,u,u,12,13],zero,xmm11[u]
1210 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm12 = xmm1[6,u,u,u,u],zero,zero,xmm1[7,u,u,u,u],zero,zero,xmm1[8,u]
1211 ; AVX1-ONLY-NEXT: vpor %xmm12, %xmm11, %xmm11
1212 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[8,9],zero,xmm8[u,u,u,u,10,11],zero,xmm8[u,u,u,u,12,13]
1213 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm12 = zero,zero,xmm1[4,u,u,u,u],zero,zero,xmm1[5,u,u,u,u],zero,zero
1214 ; AVX1-ONLY-NEXT: vpor %xmm12, %xmm8, %xmm8
1215 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm8, %ymm8
1216 ; AVX1-ONLY-NEXT: vandnps %ymm8, %ymm10, %ymm8
1217 ; AVX1-ONLY-NEXT: vorps %ymm8, %ymm9, %ymm8
1218 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm4[8],xmm5[8],xmm4[9],xmm5[9],xmm4[10],xmm5[10],xmm4[11],xmm5[11],xmm4[12],xmm5[12],xmm4[13],xmm5[13],xmm4[14],xmm5[14],xmm4[15],xmm5[15]
1219 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm10 = zero,zero,zero,zero,xmm9[8,9],zero,zero,zero,zero,zero,xmm9[10,11],zero,zero,zero
1220 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm11 = xmm7[8],xmm6[8],xmm7[9],xmm6[9],xmm7[10],xmm6[10],xmm7[11],xmm6[11],xmm7[12],xmm6[12],xmm7[13],xmm6[13],xmm7[14],xmm6[14],xmm7[15],xmm6[15]
1221 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = xmm11[6],zero,zero,zero,zero,zero,xmm11[9,8],zero,zero,zero,zero,zero,xmm11[11,10],zero
1222 ; AVX1-ONLY-NEXT: vpor %xmm10, %xmm11, %xmm10
1223 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm6[8],xmm7[8],xmm6[9],xmm7[9],xmm6[10],xmm7[10],xmm6[11],xmm7[11],xmm6[12],xmm7[12],xmm6[13],xmm7[13],xmm6[14],xmm7[14],xmm6[15],xmm7[15]
1224 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm7 = zero,xmm6[2,3],zero,zero,zero,zero,zero,xmm6[4,5],zero,zero,zero,zero,zero,xmm6[6]
1225 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm5[8],xmm4[8],xmm5[9],xmm4[9],xmm5[10],xmm4[10],xmm5[11],xmm4[11],xmm5[12],xmm4[12],xmm5[13],xmm4[13],xmm5[14],xmm4[14],xmm5[15],xmm4[15]
1226 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[2],zero,zero,zero,zero,zero,xmm4[5,4],zero,zero,zero,zero,zero,xmm4[7,6],zero
1227 ; AVX1-ONLY-NEXT: vpor %xmm7, %xmm4, %xmm4
1228 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm4, %ymm4
1229 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm0[8],xmm2[8],xmm0[9],xmm2[9],xmm0[10],xmm2[10],xmm0[11],xmm2[11],xmm0[12],xmm2[12],xmm0[13],xmm2[13],xmm0[14],xmm2[14],xmm0[15],xmm2[15]
1230 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm7 = xmm5[u,6,7],zero,xmm5[u,u,u,u,8,9],zero,xmm5[u,u,u,u,10]
1231 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm10 = xmm1[u],zero,zero,xmm1[11,u,u,u,u],zero,zero,xmm1[12,u,u,u,u],zero
1232 ; AVX1-ONLY-NEXT: vpor %xmm7, %xmm10, %xmm7
1233 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[u,u,u,2,3],zero,xmm5[u,u,u,u,4,5],zero,xmm5[u,u,u]
1234 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm10 = xmm1[u,u,u],zero,zero,xmm1[9,u,u,u,u],zero,zero,xmm1[10,u,u,u]
1235 ; AVX1-ONLY-NEXT: vpor %xmm5, %xmm10, %xmm5
1236 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm5, %ymm5
1237 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm7 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0]
1238 ; AVX1-ONLY-NEXT: vandnps %ymm5, %ymm7, %ymm5
1239 ; AVX1-ONLY-NEXT: vorps %ymm5, %ymm4, %ymm4
1240 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm5 = zero,zero,zero,zero,xmm6[12,13],zero,zero,zero,zero,zero,xmm6[14,15],zero,zero,zero
1241 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm6 = zero,zero,xmm9[12,13],zero,zero,zero,zero,zero,xmm9[14,15],zero,zero,zero,zero,zero
1242 ; AVX1-ONLY-NEXT: vpor %xmm5, %xmm6, %xmm5
1243 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm2[8],xmm0[8],xmm2[9],xmm0[9],xmm2[10],xmm0[10],xmm2[11],xmm0[11],xmm2[12],xmm0[12],xmm2[13],xmm0[13],xmm2[14],xmm0[14],xmm2[15],xmm0[15]
1244 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[10],zero,xmm0[u,u,u,u,13,12],zero,xmm0[u,u,u,u,15,14],zero
1245 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = zero,xmm1[13,u,u,u,u],zero,zero,xmm1[14,u,u,u,u],zero,zero,xmm1[15]
1246 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm0, %xmm0
1247 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm1 = [0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0]
1248 ; AVX1-ONLY-NEXT: vpblendvb %xmm1, %xmm5, %xmm0, %xmm0
1249 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, 96(%rax)
1250 ; AVX1-ONLY-NEXT: vmovaps %ymm4, 64(%rax)
1251 ; AVX1-ONLY-NEXT: vmovaps %ymm8, 32(%rax)
1252 ; AVX1-ONLY-NEXT: vmovaps %ymm3, (%rax)
1253 ; AVX1-ONLY-NEXT: vzeroupper
1254 ; AVX1-ONLY-NEXT: retq
1256 ; AVX2-SLOW-LABEL: store_i8_stride7_vf16:
1257 ; AVX2-SLOW: # %bb.0:
1258 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1259 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
1260 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm0
1261 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm1
1262 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm5
1263 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm6
1264 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm3
1265 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %xmm4
1266 ; AVX2-SLOW-NEXT: vmovdqa (%r10), %xmm2
1267 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm7
1268 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm5, %ymm8
1269 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm10
1270 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm9 = zero,zero,zero,ymm7[5],zero,zero,zero,zero,zero,zero,ymm7[6],zero,zero,zero,zero,zero,zero,zero,ymm7[23],zero,zero,zero,zero,zero,zero,ymm7[24],zero,zero,zero,zero,zero,zero
1271 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm7[2,3,0,1]
1272 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm11 = zero,zero,zero,zero,ymm11[5],zero,zero,zero,zero,zero,zero,ymm11[6],zero,zero,zero,zero,zero,ymm11[23],zero,zero,zero,zero,zero,zero,ymm11[24],zero,zero,zero,zero,zero,zero,ymm11[25]
1273 ; AVX2-SLOW-NEXT: vpor %ymm9, %ymm11, %ymm9
1274 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm11 = zero,zero,zero,zero,zero,ymm8[5],zero,zero,zero,zero,zero,zero,ymm8[6],zero,zero,zero,zero,zero,zero,zero,ymm8[23],zero,zero,zero,zero,zero,zero,ymm8[24],zero,zero,zero,zero
1275 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm8[2,3,0,1]
1276 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm12 = zero,zero,zero,zero,zero,zero,ymm12[5],zero,zero,zero,zero,zero,zero,ymm12[6],zero,zero,zero,zero,zero,ymm12[23],zero,zero,zero,zero,zero,zero,ymm12[24],zero,zero,zero,zero,zero
1277 ; AVX2-SLOW-NEXT: vpor %ymm12, %ymm11, %ymm11
1278 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm12 = <u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255>
1279 ; AVX2-SLOW-NEXT: vpblendvb %ymm12, %ymm9, %ymm11, %ymm9
1280 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm11 = ymm10[4],zero,zero,zero,zero,zero,zero,ymm10[5],zero,zero,zero,zero,zero,zero,ymm10[6],zero,zero,zero,zero,zero,zero,zero,ymm10[23],zero,zero,zero,zero,zero,zero,ymm10[24],zero,zero
1281 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm10[2,3,0,1]
1282 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm12 = zero,ymm12[4],zero,zero,zero,zero,zero,zero,ymm12[5],zero,zero,zero,zero,zero,zero,ymm12[6],zero,zero,zero,zero,zero,ymm12[23],zero,zero,zero,zero,zero,zero,ymm12[24],zero,zero,zero
1283 ; AVX2-SLOW-NEXT: vpor %ymm12, %ymm11, %ymm11
1284 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm12 = xmm2[4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
1285 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,0,1,0]
1286 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm13 = <255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u>
1287 ; AVX2-SLOW-NEXT: vpblendvb %ymm13, %ymm11, %ymm12, %ymm11
1288 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm12 = [0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255]
1289 ; AVX2-SLOW-NEXT: vpblendvb %ymm12, %ymm9, %ymm11, %ymm9
1290 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm10[0,2,0,2]
1291 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm11 = ymm11[u,u,u,u,0,8,u,u,u,u,u,1,9,u,u,u,u,u,18,26,u,u,u,u,u,19,27,u,u,u,u,u]
1292 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm12 = xmm2[1,1,0,0,4,5,6,7]
1293 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[0,1,2,0]
1294 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,0,1,0]
1295 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm13 = <u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u>
1296 ; AVX2-SLOW-NEXT: vpblendvb %ymm13, %ymm11, %ymm12, %ymm11
1297 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm8[0,2,0,2]
1298 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm12 = zero,zero,ymm12[0,8],zero,zero,zero,zero,zero,ymm12[1,9],zero,zero,zero,zero,zero,ymm12[18,26],zero,zero,zero,zero,zero,ymm12[19,27],zero,zero,zero,zero,zero,ymm12[20,28]
1299 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm13 = ymm7[0,2,0,2]
1300 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm13 = ymm13[0,8],zero,zero,zero,zero,zero,ymm13[1,9],zero,zero,zero,zero,zero,ymm13[2,10],zero,zero,zero,zero,zero,ymm13[19,27],zero,zero,zero,zero,zero,ymm13[20,28],zero,zero
1301 ; AVX2-SLOW-NEXT: vpor %ymm12, %ymm13, %ymm12
1302 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm13 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
1303 ; AVX2-SLOW-NEXT: vpblendvb %ymm13, %ymm12, %ymm11, %ymm11
1304 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[1,3,1,3]
1305 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,1,9,u,u,u,u,u,2,10,u,u,u,u,u,19,27,u,u,u,u,u,20,28,u,u,u,u,u,21]
1306 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm12 = xmm2[0,1,2,3,4,5,5,6]
1307 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[2,2,3,3]
1308 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,1,0,1]
1309 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm13 = <u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255>
1310 ; AVX2-SLOW-NEXT: vpblendvb %ymm13, %ymm10, %ymm12, %ymm10
1311 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[3,1,1,3]
1312 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm7[1],zero,zero,zero,zero,zero,ymm7[10,2],zero,zero,zero,zero,zero,ymm7[11,3],zero,zero,zero,zero,zero,ymm7[20,28],zero,zero,zero,zero,zero,ymm7[21,29],zero,zero,zero
1313 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[1,3,3,1]
1314 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = zero,ymm8[1,9],zero,zero,zero,zero,zero,ymm8[2,10],zero,zero,zero,zero,zero,ymm8[3,19],zero,zero,zero,zero,zero,ymm8[28,20],zero,zero,zero,zero,zero,ymm8[29,21],zero
1315 ; AVX2-SLOW-NEXT: vpor %ymm7, %ymm8, %ymm7
1316 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0]
1317 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm7, %ymm10, %ymm7
1318 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm5[8],xmm6[8],xmm5[9],xmm6[9],xmm5[10],xmm6[10],xmm5[11],xmm6[11],xmm5[12],xmm6[12],xmm5[13],xmm6[13],xmm5[14],xmm6[14],xmm5[15],xmm6[15]
1319 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm5 = zero,zero,zero,zero,xmm5[12,13],zero,zero,zero,zero,zero,xmm5[14,15],zero,zero,zero
1320 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
1321 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[12,13],zero,zero,zero,zero,zero,xmm0[14,15],zero,zero,zero,zero,zero
1322 ; AVX2-SLOW-NEXT: vpor %xmm5, %xmm0, %xmm0
1323 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm4[8],xmm3[8],xmm4[9],xmm3[9],xmm4[10],xmm3[10],xmm4[11],xmm3[11],xmm4[12],xmm3[12],xmm4[13],xmm3[13],xmm4[14],xmm3[14],xmm4[15],xmm3[15]
1324 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[10,u,u,u,u,u,13,12,u,u,u,u,u,15,14,u]
1325 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,6,7,7,7]
1326 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,1,3,2]
1327 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm3 = <255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0>
1328 ; AVX2-SLOW-NEXT: vpblendvb %xmm3, %xmm1, %xmm2, %xmm1
1329 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm2 = [0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0]
1330 ; AVX2-SLOW-NEXT: vpblendvb %xmm2, %xmm0, %xmm1, %xmm0
1331 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, 96(%rax)
1332 ; AVX2-SLOW-NEXT: vmovdqa %ymm7, 64(%rax)
1333 ; AVX2-SLOW-NEXT: vmovdqa %ymm11, (%rax)
1334 ; AVX2-SLOW-NEXT: vmovdqa %ymm9, 32(%rax)
1335 ; AVX2-SLOW-NEXT: vzeroupper
1336 ; AVX2-SLOW-NEXT: retq
1338 ; AVX2-FAST-LABEL: store_i8_stride7_vf16:
1339 ; AVX2-FAST: # %bb.0:
1340 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
1341 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
1342 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm0
1343 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm1
1344 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm5
1345 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm6
1346 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm3
1347 ; AVX2-FAST-NEXT: vmovdqa (%r9), %xmm4
1348 ; AVX2-FAST-NEXT: vmovdqa (%r10), %xmm2
1349 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm8
1350 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm6, %ymm5, %ymm9
1351 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm10
1352 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm7 = xmm2[1,1,0,0,4,5,6,7]
1353 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = [0,1,0,1,2,0,0,1]
1354 ; AVX2-FAST-NEXT: vpermd %ymm7, %ymm11, %ymm7
1355 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm10[0,2,0,2]
1356 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm11[u,u,u,u,0,8,u,u,u,u,u,1,9,u,u,u,u,u,18,26,u,u,u,u,u,19,27,u,u,u,u,u]
1357 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u>
1358 ; AVX2-FAST-NEXT: vpblendvb %ymm12, %ymm11, %ymm7, %ymm7
1359 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm9[0,2,0,2]
1360 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm11 = zero,zero,ymm11[0,8],zero,zero,zero,zero,zero,ymm11[1,9],zero,zero,zero,zero,zero,ymm11[18,26],zero,zero,zero,zero,zero,ymm11[19,27],zero,zero,zero,zero,zero,ymm11[20,28]
1361 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm8[0,2,0,2]
1362 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm12[0,8],zero,zero,zero,zero,zero,ymm12[1,9],zero,zero,zero,zero,zero,ymm12[2,10],zero,zero,zero,zero,zero,ymm12[19,27],zero,zero,zero,zero,zero,ymm12[20,28],zero,zero
1363 ; AVX2-FAST-NEXT: vpor %ymm11, %ymm12, %ymm11
1364 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
1365 ; AVX2-FAST-NEXT: vpblendvb %ymm12, %ymm11, %ymm7, %ymm7
1366 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm11 = xmm2[4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
1367 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,1,0]
1368 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm12 = [1,5,2,6,1,5,2,6]
1369 ; AVX2-FAST-NEXT: # ymm12 = mem[0,1,0,1]
1370 ; AVX2-FAST-NEXT: vpermd %ymm10, %ymm12, %ymm13
1371 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm13 = ymm13[0,4,u,u,u,u,u,1,5,u,u,u,u,u,2,6,u,u,u,u,u,19,23,u,u,u,u,u,24,28,u,u]
1372 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = <255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u>
1373 ; AVX2-FAST-NEXT: vpblendvb %ymm14, %ymm13, %ymm11, %ymm11
1374 ; AVX2-FAST-NEXT: vpermd %ymm9, %ymm12, %ymm13
1375 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm13 = zero,zero,zero,zero,zero,ymm13[1,5],zero,zero,zero,zero,zero,ymm13[2,6],zero,zero,zero,zero,zero,ymm13[19,23],zero,zero,zero,zero,zero,ymm13[24,28],zero,zero,zero,zero
1376 ; AVX2-FAST-NEXT: vpermd %ymm8, %ymm12, %ymm12
1377 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = zero,zero,zero,ymm12[1,5],zero,zero,zero,zero,zero,ymm12[2,6],zero,zero,zero,zero,zero,ymm12[19,23],zero,zero,zero,zero,zero,ymm12[24,28],zero,zero,zero,zero,zero,ymm12[25]
1378 ; AVX2-FAST-NEXT: vpor %ymm13, %ymm12, %ymm12
1379 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = [0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255]
1380 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm12, %ymm11, %ymm11
1381 ; AVX2-FAST-NEXT: vpshufhw {{.*#+}} xmm12 = xmm2[0,1,2,3,4,5,5,6]
1382 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm13 = [2,2,3,3,2,2,3,3]
1383 ; AVX2-FAST-NEXT: # ymm13 = mem[0,1,0,1]
1384 ; AVX2-FAST-NEXT: vpermd %ymm12, %ymm13, %ymm12
1385 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm10 = ymm10[1,3,1,3]
1386 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,1,9,u,u,u,u,u,2,10,u,u,u,u,u,19,27,u,u,u,u,u,20,28,u,u,u,u,u,21]
1387 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = <u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255>
1388 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm10, %ymm12, %ymm10
1389 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[3,1,1,3]
1390 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm8 = ymm8[1],zero,zero,zero,zero,zero,ymm8[10,2],zero,zero,zero,zero,zero,ymm8[11,3],zero,zero,zero,zero,zero,ymm8[20,28],zero,zero,zero,zero,zero,ymm8[21,29],zero,zero,zero
1391 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[1,3,3,1]
1392 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm9 = zero,ymm9[1,9],zero,zero,zero,zero,zero,ymm9[2,10],zero,zero,zero,zero,zero,ymm9[3,19],zero,zero,zero,zero,zero,ymm9[28,20],zero,zero,zero,zero,zero,ymm9[29,21],zero
1393 ; AVX2-FAST-NEXT: vpor %ymm8, %ymm9, %ymm8
1394 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0]
1395 ; AVX2-FAST-NEXT: vpblendvb %ymm9, %ymm8, %ymm10, %ymm8
1396 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm5[8],xmm6[8],xmm5[9],xmm6[9],xmm5[10],xmm6[10],xmm5[11],xmm6[11],xmm5[12],xmm6[12],xmm5[13],xmm6[13],xmm5[14],xmm6[14],xmm5[15],xmm6[15]
1397 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm5 = zero,zero,zero,zero,xmm5[12,13],zero,zero,zero,zero,zero,xmm5[14,15],zero,zero,zero
1398 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
1399 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[12,13],zero,zero,zero,zero,zero,xmm0[14,15],zero,zero,zero,zero,zero
1400 ; AVX2-FAST-NEXT: vpor %xmm5, %xmm0, %xmm0
1401 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm4[8],xmm3[8],xmm4[9],xmm3[9],xmm4[10],xmm3[10],xmm4[11],xmm3[11],xmm4[12],xmm3[12],xmm4[13],xmm3[13],xmm4[14],xmm3[14],xmm4[15],xmm3[15]
1402 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[10],zero,zero,zero,zero,zero,xmm1[13,12],zero,zero,zero,zero,zero,xmm1[15,14],zero
1403 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm2 = zero,xmm2[13,14,15,4,5],zero,zero,xmm2[14,15,14,15,12],zero,zero,xmm2[15]
1404 ; AVX2-FAST-NEXT: vpor %xmm2, %xmm1, %xmm1
1405 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = [0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0]
1406 ; AVX2-FAST-NEXT: vpblendvb %xmm2, %xmm0, %xmm1, %xmm0
1407 ; AVX2-FAST-NEXT: vmovdqa %xmm0, 96(%rax)
1408 ; AVX2-FAST-NEXT: vmovdqa %ymm8, 64(%rax)
1409 ; AVX2-FAST-NEXT: vmovdqa %ymm11, 32(%rax)
1410 ; AVX2-FAST-NEXT: vmovdqa %ymm7, (%rax)
1411 ; AVX2-FAST-NEXT: vzeroupper
1412 ; AVX2-FAST-NEXT: retq
1414 ; AVX2-FAST-PERLANE-LABEL: store_i8_stride7_vf16:
1415 ; AVX2-FAST-PERLANE: # %bb.0:
1416 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1417 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %r10
1418 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm0
1419 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm1
1420 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm4
1421 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm5
1422 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm2
1423 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %xmm3
1424 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm6
1425 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm4, %ymm7
1426 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm10
1427 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm8 = zero,zero,zero,ymm6[5],zero,zero,zero,zero,zero,zero,ymm6[6],zero,zero,zero,zero,zero,zero,zero,ymm6[23],zero,zero,zero,zero,zero,zero,ymm6[24],zero,zero,zero,zero,zero,zero
1428 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm6[2,3,0,1]
1429 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm9 = zero,zero,zero,zero,ymm9[5],zero,zero,zero,zero,zero,zero,ymm9[6],zero,zero,zero,zero,zero,ymm9[23],zero,zero,zero,zero,zero,zero,ymm9[24],zero,zero,zero,zero,zero,zero,ymm9[25]
1430 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm8, %ymm9, %ymm8
1431 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm9 = zero,zero,zero,zero,zero,ymm7[5],zero,zero,zero,zero,zero,zero,ymm7[6],zero,zero,zero,zero,zero,zero,zero,ymm7[23],zero,zero,zero,zero,zero,zero,ymm7[24],zero,zero,zero,zero
1432 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm7[2,3,0,1]
1433 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm11 = zero,zero,zero,zero,zero,zero,ymm11[5],zero,zero,zero,zero,zero,zero,ymm11[6],zero,zero,zero,zero,zero,ymm11[23],zero,zero,zero,zero,zero,zero,ymm11[24],zero,zero,zero,zero,zero
1434 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm11, %ymm9, %ymm9
1435 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = <u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255>
1436 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm8, %ymm9, %ymm9
1437 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm8 = ymm10[4],zero,zero,zero,zero,zero,zero,ymm10[5],zero,zero,zero,zero,zero,zero,ymm10[6],zero,zero,zero,zero,zero,zero,zero,ymm10[23],zero,zero,zero,zero,zero,zero,ymm10[24],zero,zero
1438 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm10[2,3,0,1]
1439 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm11 = zero,ymm11[4],zero,zero,zero,zero,zero,zero,ymm11[5],zero,zero,zero,zero,zero,zero,ymm11[6],zero,zero,zero,zero,zero,ymm11[23],zero,zero,zero,zero,zero,zero,ymm11[24],zero,zero,zero
1440 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm11, %ymm8, %ymm11
1441 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r10), %xmm8
1442 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm12 = xmm8[4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
1443 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,0,1,0]
1444 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm13 = <255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u>
1445 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm11, %ymm12, %ymm11
1446 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm12 = [0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255]
1447 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm12, %ymm9, %ymm11, %ymm9
1448 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm11 = xmm8[2,3,2,3,0,1,0,1,8,9,10,11,2,3,2,3]
1449 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,1,0]
1450 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm10[0,2,0,2]
1451 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm12 = ymm12[u,u,u,u,0,8,u,u,u,u,u,1,9,u,u,u,u,u,18,26,u,u,u,u,u,19,27,u,u,u,u,u]
1452 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm13 = <u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u>
1453 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm12, %ymm11, %ymm11
1454 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm7[0,2,0,2]
1455 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm12 = zero,zero,ymm12[0,8],zero,zero,zero,zero,zero,ymm12[1,9],zero,zero,zero,zero,zero,ymm12[18,26],zero,zero,zero,zero,zero,ymm12[19,27],zero,zero,zero,zero,zero,ymm12[20,28]
1456 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm13 = ymm6[0,2,0,2]
1457 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm13 = ymm13[0,8],zero,zero,zero,zero,zero,ymm13[1,9],zero,zero,zero,zero,zero,ymm13[2,10],zero,zero,zero,zero,zero,ymm13[19,27],zero,zero,zero,zero,zero,ymm13[20,28],zero,zero
1458 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm12, %ymm13, %ymm12
1459 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm13 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
1460 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm12, %ymm11, %ymm11
1461 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm12 = xmm8[8,9,10,11,8,9,10,11,10,11,12,13,10,11,12,13]
1462 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,1,0,1]
1463 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[1,3,1,3]
1464 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,1,9,u,u,u,u,u,2,10,u,u,u,u,u,19,27,u,u,u,u,u,20,28,u,u,u,u,u,21]
1465 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm13 = <u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255>
1466 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm10, %ymm12, %ymm10
1467 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[3,1,1,3]
1468 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm6 = ymm6[1],zero,zero,zero,zero,zero,ymm6[10,2],zero,zero,zero,zero,zero,ymm6[11,3],zero,zero,zero,zero,zero,ymm6[20,28],zero,zero,zero,zero,zero,ymm6[21,29],zero,zero,zero
1469 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[1,3,3,1]
1470 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm7 = zero,ymm7[1,9],zero,zero,zero,zero,zero,ymm7[2,10],zero,zero,zero,zero,zero,ymm7[3,19],zero,zero,zero,zero,zero,ymm7[28,20],zero,zero,zero,zero,zero,ymm7[29,21],zero
1471 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm6, %ymm7, %ymm6
1472 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm7 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0]
1473 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm6, %ymm10, %ymm6
1474 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm4[8],xmm5[8],xmm4[9],xmm5[9],xmm4[10],xmm5[10],xmm4[11],xmm5[11],xmm4[12],xmm5[12],xmm4[13],xmm5[13],xmm4[14],xmm5[14],xmm4[15],xmm5[15]
1475 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm4 = zero,zero,zero,zero,xmm4[12,13],zero,zero,zero,zero,zero,xmm4[14,15],zero,zero,zero
1476 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
1477 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[12,13],zero,zero,zero,zero,zero,xmm0[14,15],zero,zero,zero,zero,zero
1478 ; AVX2-FAST-PERLANE-NEXT: vpor %xmm4, %xmm0, %xmm0
1479 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm3[8],xmm2[8],xmm3[9],xmm2[9],xmm3[10],xmm2[10],xmm3[11],xmm2[11],xmm3[12],xmm2[12],xmm3[13],xmm2[13],xmm3[14],xmm2[14],xmm3[15],xmm2[15]
1480 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[10],zero,zero,zero,zero,zero,xmm1[13,12],zero,zero,zero,zero,zero,xmm1[15,14],zero
1481 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm2 = zero,xmm8[13,14,15,4,5],zero,zero,xmm8[14,15,14,15,12],zero,zero,xmm8[15]
1482 ; AVX2-FAST-PERLANE-NEXT: vpor %xmm2, %xmm1, %xmm1
1483 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm2 = [0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0]
1484 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %xmm2, %xmm0, %xmm1, %xmm0
1485 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, 96(%rax)
1486 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm6, 64(%rax)
1487 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm11, (%rax)
1488 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm9, 32(%rax)
1489 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
1490 ; AVX2-FAST-PERLANE-NEXT: retq
1492 ; AVX512F-SLOW-LABEL: store_i8_stride7_vf16:
1493 ; AVX512F-SLOW: # %bb.0:
1494 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1495 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
1496 ; AVX512F-SLOW-NEXT: vmovdqa (%rdi), %xmm4
1497 ; AVX512F-SLOW-NEXT: vmovdqa (%rsi), %xmm5
1498 ; AVX512F-SLOW-NEXT: vmovdqa (%rdx), %xmm6
1499 ; AVX512F-SLOW-NEXT: vmovdqa (%rcx), %xmm7
1500 ; AVX512F-SLOW-NEXT: vmovdqa (%r8), %xmm0
1501 ; AVX512F-SLOW-NEXT: vmovdqa (%r10), %xmm1
1502 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm6, %ymm3
1503 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm4, %ymm2
1504 ; AVX512F-SLOW-NEXT: vinserti128 $1, (%r9), %ymm0, %ymm0
1505 ; AVX512F-SLOW-NEXT: vinserti32x4 $2, %xmm1, %zmm0, %zmm0
1506 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm6[8],xmm7[8],xmm6[9],xmm7[9],xmm6[10],xmm7[10],xmm6[11],xmm7[11],xmm6[12],xmm7[12],xmm6[13],xmm7[13],xmm6[14],xmm7[14],xmm6[15],xmm7[15]
1507 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[u,u],zero,zero,xmm6[12,13,u,u,u],zero,zero,xmm6[14,15,u,u,u]
1508 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm4[8],xmm5[8],xmm4[9],xmm5[9],xmm4[10],xmm5[10],xmm4[11],xmm5[11],xmm4[12],xmm5[12],xmm4[13],xmm5[13],xmm4[14],xmm5[14],xmm4[15],xmm5[15]
1509 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[u,u,12,13],zero,zero,xmm4[u,u,u,14,15],zero,zero,xmm4[u,u,u]
1510 ; AVX512F-SLOW-NEXT: vpor %xmm6, %xmm4, %xmm4
1511 ; AVX512F-SLOW-NEXT: vextracti128 $1, %ymm0, %xmm5
1512 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm5[8],xmm0[8],xmm5[9],xmm0[9],xmm5[10],xmm0[10],xmm5[11],xmm0[11],xmm5[12],xmm0[12],xmm5[13],xmm0[13],xmm5[14],xmm0[14],xmm5[15],xmm0[15]
1513 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[10],zero,xmm5[u,u,u,u,13,12],zero,xmm5[u,u,u,u,15,14],zero
1514 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} xmm6 = zero,xmm1[13,u,u,u,u],zero,zero,xmm1[14,u,u,u,u],zero,zero,xmm1[15]
1515 ; AVX512F-SLOW-NEXT: vpor %xmm6, %xmm5, %xmm5
1516 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm4, %xmm5
1517 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm2[3,1,1,3]
1518 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[1],zero,zero,ymm4[u,u,u,10,2],zero,zero,ymm4[u,u,u,11,3],zero,zero,ymm4[u,u,u,20,28],zero,zero,ymm4[u,u,u,21,29],zero,zero,ymm4[u]
1519 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm3[1,3,3,1]
1520 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = zero,ymm6[1,9,u,u,u],zero,zero,ymm6[2,10,u,u,u],zero,zero,ymm6[3,19,u,u,u],zero,zero,ymm6[28,20,u,u,u],zero,zero,ymm6[29,21,u]
1521 ; AVX512F-SLOW-NEXT: vpor %ymm4, %ymm6, %ymm4
1522 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm6 = xmm1[0,1,2,3,4,5,5,6]
1523 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
1524 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
1525 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm0[1,3,1,3]
1526 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm7[u,u,u,1,9],zero,ymm7[u,u,u,u,2,10],zero,ymm7[u,u,u,u,19,27],zero,ymm7[u,u,u,u,20,28],zero,ymm7[u,u,u,u,21]
1527 ; AVX512F-SLOW-NEXT: vpternlogq $244, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm6, %ymm7
1528 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm4, %ymm7
1529 ; AVX512F-SLOW-NEXT: vinserti32x4 $2, %xmm5, %zmm7, %zmm4
1530 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = ymm3[u,u,u,u,u,5],zero,ymm3[u,u,u,u,u,6],zero,ymm3[u,u,u,u,u],zero,ymm3[23,u,u,u,u,u],zero,ymm3[24,u,u,u,u]
1531 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm3[2,3,0,1]
1532 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm7[u,u,u,u,u],zero,ymm7[5,u,u,u,u,u],zero,ymm7[6,u,u,u,u,u,23],zero,ymm7[u,u,u,u,u,24],zero,ymm7[u,u,u,u]
1533 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255]
1534 ; AVX512F-SLOW-NEXT: vpternlogq $50, %ymm6, %ymm8, %ymm7
1535 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,2,0,2]
1536 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,ymm3[0,8,u,u,u],zero,zero,ymm3[1,9,u,u,u],zero,zero,ymm3[18,26,u,u,u],zero,zero,ymm3[19,27,u,u,u],zero,zero,ymm3[20,28]
1537 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm3, %zmm3
1538 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = ymm2[u,u,u,5],zero,ymm2[u,u,u,u,u,6],zero,ymm2[u,u,u,u,u],zero,ymm2[23,u,u,u,u,u],zero,ymm2[24,u,u,u,u,u],zero
1539 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm2[2,3,0,1]
1540 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm7[u,u,u],zero,ymm7[5,u,u,u,u,u],zero,ymm7[6,u,u,u,u,u,23],zero,ymm7[u,u,u,u,u,24],zero,ymm7[u,u,u,u,u,25]
1541 ; AVX512F-SLOW-NEXT: vpternlogq $200, %ymm6, %ymm8, %ymm7
1542 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,0,2]
1543 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[0,8],zero,zero,ymm2[u,u,u,1,9],zero,zero,ymm2[u,u,u,2,10],zero,zero,ymm2[u,u,u,19,27],zero,zero,ymm2[u,u,u,20,28],zero,zero
1544 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm2, %zmm2
1545 ; AVX512F-SLOW-NEXT: vporq %zmm3, %zmm2, %zmm2
1546 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} xmm3 = xmm1[4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
1547 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,1,0]
1548 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255]
1549 ; AVX512F-SLOW-NEXT: vpandn %ymm3, %ymm6, %ymm3
1550 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[1,1,0,0,4,5,6,7]
1551 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[0,1,2,0]
1552 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,0]
1553 ; AVX512F-SLOW-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm1
1554 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm1, %zmm1
1555 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm0[4],zero,ymm0[u,u,u,u,u,5],zero,ymm0[u,u,u,u,u,6],zero,ymm0[u,u,u,u,u],zero,ymm0[23,u,u,u,u,u],zero,ymm0[24,u,u]
1556 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm0[2,3,0,1]
1557 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = zero,ymm7[4,u,u,u,u,u],zero,ymm7[5,u,u,u,u,u],zero,ymm7[6,u,u,u,u,u,23],zero,ymm7[u,u,u,u,u,24],zero,ymm7[u,u]
1558 ; AVX512F-SLOW-NEXT: vpternlogq $200, %ymm3, %ymm6, %ymm7
1559 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,0,2]
1560 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,0,8],zero,ymm0[u,u,u,u,1,9],zero,ymm0[u,u,u,u,18,26],zero,ymm0[u,u,u,u,19,27],zero,ymm0[u,u,u,u]
1561 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm0, %zmm0
1562 ; AVX512F-SLOW-NEXT: vporq %zmm1, %zmm0, %zmm0
1563 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm0
1564 ; AVX512F-SLOW-NEXT: vmovdqa %xmm5, 96(%rax)
1565 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm0, (%rax)
1566 ; AVX512F-SLOW-NEXT: vmovdqa %ymm4, 64(%rax)
1567 ; AVX512F-SLOW-NEXT: vzeroupper
1568 ; AVX512F-SLOW-NEXT: retq
1570 ; AVX512F-FAST-LABEL: store_i8_stride7_vf16:
1571 ; AVX512F-FAST: # %bb.0:
1572 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
1573 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
1574 ; AVX512F-FAST-NEXT: vmovdqa (%rdi), %xmm2
1575 ; AVX512F-FAST-NEXT: vmovdqa (%rsi), %xmm3
1576 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %xmm4
1577 ; AVX512F-FAST-NEXT: vmovdqa (%rcx), %xmm5
1578 ; AVX512F-FAST-NEXT: vmovdqa (%r8), %xmm1
1579 ; AVX512F-FAST-NEXT: vmovdqa (%r10), %xmm0
1580 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm5, %ymm4, %ymm6
1581 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm7
1582 ; AVX512F-FAST-NEXT: vinserti128 $1, (%r9), %ymm1, %ymm1
1583 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm1, %zmm1
1584 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm4[8],xmm5[8],xmm4[9],xmm5[9],xmm4[10],xmm5[10],xmm4[11],xmm5[11],xmm4[12],xmm5[12],xmm4[13],xmm5[13],xmm4[14],xmm5[14],xmm4[15],xmm5[15]
1585 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[u,u],zero,zero,xmm4[12,13,u,u,u],zero,zero,xmm4[14,15,u,u,u]
1586 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm2[8],xmm3[8],xmm2[9],xmm3[9],xmm2[10],xmm3[10],xmm2[11],xmm3[11],xmm2[12],xmm3[12],xmm2[13],xmm3[13],xmm2[14],xmm3[14],xmm2[15],xmm3[15]
1587 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[u,u,12,13],zero,zero,xmm2[u,u,u,14,15],zero,zero,xmm2[u,u,u]
1588 ; AVX512F-FAST-NEXT: vpor %xmm4, %xmm2, %xmm2
1589 ; AVX512F-FAST-NEXT: vextracti128 $1, %ymm1, %xmm3
1590 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm3[8],xmm1[8],xmm3[9],xmm1[9],xmm3[10],xmm1[10],xmm3[11],xmm1[11],xmm3[12],xmm1[12],xmm3[13],xmm1[13],xmm3[14],xmm1[14],xmm3[15],xmm1[15]
1591 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[10],zero,xmm3[u,u,u,u,13,12],zero,xmm3[u,u,u,u,15,14],zero
1592 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm4 = zero,xmm0[13,u,u,u,u],zero,zero,xmm0[14,u,u,u,u],zero,zero,xmm0[15]
1593 ; AVX512F-FAST-NEXT: vpor %xmm4, %xmm3, %xmm3
1594 ; AVX512F-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm2, %xmm3
1595 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm7[3,1,1,3]
1596 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[1],zero,zero,ymm2[u,u,u,10,2],zero,zero,ymm2[u,u,u,11,3],zero,zero,ymm2[u,u,u,20,28],zero,zero,ymm2[u,u,u,21,29],zero,zero,ymm2[u]
1597 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm6[1,3,3,1]
1598 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm4 = zero,ymm4[1,9,u,u,u],zero,zero,ymm4[2,10,u,u,u],zero,zero,ymm4[3,19,u,u,u],zero,zero,ymm4[28,20,u,u,u],zero,zero,ymm4[29,21,u]
1599 ; AVX512F-FAST-NEXT: vpor %ymm2, %ymm4, %ymm2
1600 ; AVX512F-FAST-NEXT: vpshufhw {{.*#+}} xmm4 = xmm0[0,1,2,3,4,5,5,6]
1601 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [2,2,3,3,2,2,3,3]
1602 ; AVX512F-FAST-NEXT: # ymm5 = mem[0,1,0,1]
1603 ; AVX512F-FAST-NEXT: vpermd %ymm4, %ymm5, %ymm4
1604 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm1[1,3,1,3]
1605 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[u,u,u,1,9],zero,ymm5[u,u,u,u,2,10],zero,ymm5[u,u,u,u,19,27],zero,ymm5[u,u,u,u,20,28],zero,ymm5[u,u,u,u,21]
1606 ; AVX512F-FAST-NEXT: vpternlogq $244, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm4, %ymm5
1607 ; AVX512F-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm2, %ymm5
1608 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm3, %zmm5, %zmm2
1609 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm6[0,2,0,2]
1610 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,ymm4[0,8,u,u,u],zero,zero,ymm4[1,9,u,u,u],zero,zero,ymm4[18,26,u,u,u],zero,zero,ymm4[19,27,u,u,u],zero,zero,ymm4[20,28]
1611 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [1,5,2,6,1,5,2,6]
1612 ; AVX512F-FAST-NEXT: # ymm5 = mem[0,1,0,1]
1613 ; AVX512F-FAST-NEXT: vpermd %ymm6, %ymm5, %ymm6
1614 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm6 = ymm6[u,u,u],zero,zero,ymm6[1,5,u,u,u],zero,zero,ymm6[2,6,u,u,u],zero,zero,ymm6[19,23,u,u,u],zero,zero,ymm6[24,28,u,u,u],zero
1615 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm6, %zmm4, %zmm4
1616 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm7[0,2,0,2]
1617 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm6 = ymm6[0,8],zero,zero,ymm6[u,u,u,1,9],zero,zero,ymm6[u,u,u,2,10],zero,zero,ymm6[u,u,u,19,27],zero,zero,ymm6[u,u,u,20,28],zero,zero
1618 ; AVX512F-FAST-NEXT: vpermd %ymm7, %ymm5, %ymm7
1619 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm7[u,u,u,1,5],zero,zero,ymm7[u,u,u,2,6],zero,zero,ymm7[u,u,u,19,23],zero,zero,ymm7[u,u,u,24,28],zero,zero,ymm7[u,u,u,25]
1620 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm6, %zmm6
1621 ; AVX512F-FAST-NEXT: vporq %zmm4, %zmm6, %zmm4
1622 ; AVX512F-FAST-NEXT: vpshuflw {{.*#+}} xmm6 = xmm0[1,1,0,0,4,5,6,7]
1623 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = <u,1,u,1,u,0,0,u>
1624 ; AVX512F-FAST-NEXT: vpermd %ymm6, %ymm7, %ymm6
1625 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
1626 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,0]
1627 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm6, %zmm0
1628 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm1[0,2,0,2]
1629 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm6 = ymm6[u,u,u,u,0,8],zero,ymm6[u,u,u,u,1,9],zero,ymm6[u,u,u,u,18,26],zero,ymm6[u,u,u,u,19,27],zero,ymm6[u,u,u,u]
1630 ; AVX512F-FAST-NEXT: vpermd %ymm1, %ymm5, %ymm1
1631 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[0,4],zero,ymm1[u,u,u,u,1,5],zero,ymm1[u,u,u,u,2,6],zero,ymm1[u,u,u,u,19,23],zero,ymm1[u,u,u,u,24,28],zero,ymm1[u]
1632 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm6, %zmm1
1633 ; AVX512F-FAST-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm1
1634 ; AVX512F-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm1
1635 ; AVX512F-FAST-NEXT: vmovdqa %xmm3, 96(%rax)
1636 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm1, (%rax)
1637 ; AVX512F-FAST-NEXT: vmovdqa %ymm2, 64(%rax)
1638 ; AVX512F-FAST-NEXT: vzeroupper
1639 ; AVX512F-FAST-NEXT: retq
1641 ; AVX512BW-SLOW-LABEL: store_i8_stride7_vf16:
1642 ; AVX512BW-SLOW: # %bb.0:
1643 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1644 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
1645 ; AVX512BW-SLOW-NEXT: vmovdqa (%rdi), %xmm2
1646 ; AVX512BW-SLOW-NEXT: vmovdqa (%rsi), %xmm3
1647 ; AVX512BW-SLOW-NEXT: vmovdqa (%rdx), %xmm4
1648 ; AVX512BW-SLOW-NEXT: vmovdqa (%rcx), %xmm5
1649 ; AVX512BW-SLOW-NEXT: vmovdqa (%r8), %xmm6
1650 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm0
1651 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm4, %ymm1
1652 ; AVX512BW-SLOW-NEXT: vinserti128 $1, (%r9), %ymm6, %ymm6
1653 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, (%r10), %zmm6, %zmm6
1654 ; AVX512BW-SLOW-NEXT: vextracti128 $1, %ymm6, %xmm7
1655 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm7[8],xmm6[8],xmm7[9],xmm6[9],xmm7[10],xmm6[10],xmm7[11],xmm6[11],xmm7[12],xmm6[12],xmm7[13],xmm6[13],xmm7[14],xmm6[14],xmm7[15],xmm6[15]
1656 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} xmm7 = xmm7[10,u,u,u,u,u,13,12,u,u,u,u,u,15,14,u]
1657 ; AVX512BW-SLOW-NEXT: vextracti64x4 $1, %zmm6, %ymm8
1658 ; AVX512BW-SLOW-NEXT: vpshufhw {{.*#+}} xmm9 = xmm8[0,1,2,3,6,7,7,7]
1659 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,1,3,2]
1660 ; AVX512BW-SLOW-NEXT: movw $-32510, %cx # imm = 0x8102
1661 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k1
1662 ; AVX512BW-SLOW-NEXT: vmovdqu8 %xmm9, %xmm7 {%k1}
1663 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm2[8],xmm3[8],xmm2[9],xmm3[9],xmm2[10],xmm3[10],xmm2[11],xmm3[11],xmm2[12],xmm3[12],xmm2[13],xmm3[13],xmm2[14],xmm3[14],xmm2[15],xmm3[15]
1664 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} xmm2 = zero,zero,xmm2[12,13],zero,zero,zero,zero,zero,xmm2[14,15],zero,zero,zero,zero,zero
1665 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm4[8],xmm5[8],xmm4[9],xmm5[9],xmm4[10],xmm5[10],xmm4[11],xmm5[11],xmm4[12],xmm5[12],xmm4[13],xmm5[13],xmm4[14],xmm5[14],xmm4[15],xmm5[15]
1666 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} xmm3 = zero,zero,zero,zero,xmm3[12,13],zero,zero,zero,zero,zero,xmm3[14,15],zero,zero,zero
1667 ; AVX512BW-SLOW-NEXT: vpor %xmm2, %xmm3, %xmm2
1668 ; AVX512BW-SLOW-NEXT: movw $-7741, %cx # imm = 0xE1C3
1669 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k1
1670 ; AVX512BW-SLOW-NEXT: vmovdqu8 %xmm7, %xmm2 {%k1}
1671 ; AVX512BW-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [4,5,4,5,5,6,5,6,4,5,4,5,5,6,5,6]
1672 ; AVX512BW-SLOW-NEXT: # ymm3 = mem[0,1,0,1]
1673 ; AVX512BW-SLOW-NEXT: vpermw %ymm8, %ymm3, %ymm3
1674 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm6[1,3,1,3]
1675 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,1,9,u,u,u,u,u,2,10,u,u,u,u,u,19,27,u,u,u,u,u,20,28,u,u,u,u,u,21]
1676 ; AVX512BW-SLOW-NEXT: movl $67637280, %ecx # imm = 0x4081020
1677 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k1
1678 ; AVX512BW-SLOW-NEXT: vmovdqu8 %ymm3, %ymm4 {%k1}
1679 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm1[1,3,3,1]
1680 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = zero,ymm3[1,9],zero,zero,zero,zero,zero,ymm3[2,10],zero,zero,zero,zero,zero,ymm3[3,19],zero,zero,zero,zero,zero,ymm3[28,20],zero,zero,zero,zero,zero,ymm3[29,21],zero
1681 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm0[3,1,1,3]
1682 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[1],zero,zero,zero,zero,zero,ymm5[10,2],zero,zero,zero,zero,zero,ymm5[11,3],zero,zero,zero,zero,zero,ymm5[20,28],zero,zero,zero,zero,zero,ymm5[21,29],zero,zero,zero
1683 ; AVX512BW-SLOW-NEXT: vpor %ymm3, %ymm5, %ymm3
1684 ; AVX512BW-SLOW-NEXT: movl $-2029118408, %ecx # imm = 0x870E1C38
1685 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k1
1686 ; AVX512BW-SLOW-NEXT: vmovdqu8 %ymm4, %ymm3 {%k1}
1687 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm2, %zmm3, %zmm3
1688 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [1,1,0,0,1,1,0,0,4,5,1,1,1,1,0,0,50,50,50,52,50,50,50,52,51,51,51,51,50,50,50,52]
1689 ; AVX512BW-SLOW-NEXT: vpermi2w %zmm6, %zmm8, %zmm4
1690 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm5 = ymm6[4],zero,zero,zero,zero,zero,zero,ymm6[5],zero,zero,zero,zero,zero,zero,ymm6[6],zero,zero,zero,zero,zero,zero,zero,ymm6[23],zero,zero,zero,zero,zero,zero,ymm6[24],zero,zero
1691 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm6[2,3,0,1]
1692 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = zero,ymm7[4],zero,zero,zero,zero,zero,zero,ymm7[5],zero,zero,zero,zero,zero,zero,ymm7[6],zero,zero,zero,zero,zero,ymm7[23],zero,zero,zero,zero,zero,zero,ymm7[24],zero,zero,zero
1693 ; AVX512BW-SLOW-NEXT: vpor %ymm5, %ymm7, %ymm5
1694 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,2,0,2]
1695 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = ymm6[u,u,u,u,0,8,u,u,u,u,u,1,9,u,u,u,u,u,18,26,u,u,u,u,u,19,27,u,u,u,u,u]
1696 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm6, %zmm5
1697 ; AVX512BW-SLOW-NEXT: movabsq $4647998506761461824, %rcx # imm = 0x4081020408102040
1698 ; AVX512BW-SLOW-NEXT: kmovq %rcx, %k1
1699 ; AVX512BW-SLOW-NEXT: vmovdqu8 %zmm4, %zmm5 {%k1}
1700 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,zero,zero,zero,ymm1[5],zero,zero,zero,zero,zero,zero,ymm1[6],zero,zero,zero,zero,zero,zero,zero,ymm1[23],zero,zero,zero,zero,zero,zero,ymm1[24],zero,zero,zero,zero
1701 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm1[2,3,0,1]
1702 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,zero,zero,zero,zero,ymm6[5],zero,zero,zero,zero,zero,zero,ymm6[6],zero,zero,zero,zero,zero,ymm6[23],zero,zero,zero,zero,zero,zero,ymm6[24],zero,zero,zero,zero,zero
1703 ; AVX512BW-SLOW-NEXT: vpor %ymm4, %ymm6, %ymm4
1704 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,zero,ymm0[5],zero,zero,zero,zero,zero,zero,ymm0[6],zero,zero,zero,zero,zero,zero,zero,ymm0[23],zero,zero,zero,zero,zero,zero,ymm0[24],zero,zero,zero,zero,zero,zero
1705 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm0[2,3,0,1]
1706 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = zero,zero,zero,zero,ymm7[5],zero,zero,zero,zero,zero,zero,ymm7[6],zero,zero,zero,zero,zero,ymm7[23],zero,zero,zero,zero,zero,zero,ymm7[24],zero,zero,zero,zero,zero,zero,ymm7[25]
1707 ; AVX512BW-SLOW-NEXT: vpor %ymm7, %ymm6, %ymm6
1708 ; AVX512BW-SLOW-NEXT: movl $202911840, %ecx # imm = 0xC183060
1709 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k1
1710 ; AVX512BW-SLOW-NEXT: vmovdqu8 %ymm4, %ymm6 {%k1}
1711 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,0,2]
1712 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,8],zero,zero,zero,zero,zero,ymm0[1,9],zero,zero,zero,zero,zero,ymm0[2,10],zero,zero,zero,zero,zero,ymm0[19,27],zero,zero,zero,zero,zero,ymm0[20,28],zero,zero
1713 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,0,2]
1714 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,ymm1[0,8],zero,zero,zero,zero,zero,ymm1[1,9],zero,zero,zero,zero,zero,ymm1[18,26],zero,zero,zero,zero,zero,ymm1[19,27],zero,zero,zero,zero,zero,ymm1[20,28]
1715 ; AVX512BW-SLOW-NEXT: vpor %ymm0, %ymm1, %ymm0
1716 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm6, %zmm0, %zmm0
1717 ; AVX512BW-SLOW-NEXT: movabsq $8133997386832558192, %rcx # imm = 0x70E1C3870E1C3870
1718 ; AVX512BW-SLOW-NEXT: kmovq %rcx, %k1
1719 ; AVX512BW-SLOW-NEXT: vmovdqu8 %zmm5, %zmm0 {%k1}
1720 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm2, 96(%rax)
1721 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm0, (%rax)
1722 ; AVX512BW-SLOW-NEXT: vmovdqa %ymm3, 64(%rax)
1723 ; AVX512BW-SLOW-NEXT: vzeroupper
1724 ; AVX512BW-SLOW-NEXT: retq
1726 ; AVX512BW-FAST-LABEL: store_i8_stride7_vf16:
1727 ; AVX512BW-FAST: # %bb.0:
1728 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
1729 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
1730 ; AVX512BW-FAST-NEXT: vmovdqa (%rdi), %xmm0
1731 ; AVX512BW-FAST-NEXT: vmovdqa (%rsi), %xmm1
1732 ; AVX512BW-FAST-NEXT: vmovdqa (%rdx), %xmm2
1733 ; AVX512BW-FAST-NEXT: vmovdqa (%rcx), %xmm3
1734 ; AVX512BW-FAST-NEXT: vmovdqa (%r8), %xmm4
1735 ; AVX512BW-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm5
1736 ; AVX512BW-FAST-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm6
1737 ; AVX512BW-FAST-NEXT: vinserti128 $1, (%r9), %ymm4, %ymm4
1738 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, (%r10), %zmm4, %zmm4
1739 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
1740 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[12,13],zero,zero,zero,zero,zero,xmm0[14,15],zero,zero,zero,zero,zero
1741 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm2[8],xmm3[8],xmm2[9],xmm3[9],xmm2[10],xmm3[10],xmm2[11],xmm3[11],xmm2[12],xmm3[12],xmm2[13],xmm3[13],xmm2[14],xmm3[14],xmm2[15],xmm3[15]
1742 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,zero,zero,xmm1[12,13],zero,zero,zero,zero,zero,xmm1[14,15],zero,zero,zero
1743 ; AVX512BW-FAST-NEXT: vpor %xmm0, %xmm1, %xmm0
1744 ; AVX512BW-FAST-NEXT: vextracti128 $1, %ymm4, %xmm1
1745 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm1[8],xmm4[8],xmm1[9],xmm4[9],xmm1[10],xmm4[10],xmm1[11],xmm4[11],xmm1[12],xmm4[12],xmm1[13],xmm4[13],xmm1[14],xmm4[14],xmm1[15],xmm4[15]
1746 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[10],zero,zero,zero,zero,zero,xmm1[13,12],zero,zero,zero,zero,zero,xmm1[15,14],zero
1747 ; AVX512BW-FAST-NEXT: vextracti64x4 $1, %zmm4, %ymm2
1748 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm3 = zero,xmm2[13],zero,zero,zero,zero,zero,zero,xmm2[14],zero,zero,zero,zero,zero,zero,xmm2[15]
1749 ; AVX512BW-FAST-NEXT: vpor %xmm1, %xmm3, %xmm1
1750 ; AVX512BW-FAST-NEXT: movw $-7741, %cx # imm = 0xE1C3
1751 ; AVX512BW-FAST-NEXT: kmovd %ecx, %k1
1752 ; AVX512BW-FAST-NEXT: vmovdqu8 %xmm1, %xmm0 {%k1}
1753 ; AVX512BW-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [4,5,4,5,5,6,5,6,4,5,4,5,5,6,5,6]
1754 ; AVX512BW-FAST-NEXT: # ymm1 = mem[0,1,0,1]
1755 ; AVX512BW-FAST-NEXT: vpermw %ymm2, %ymm1, %ymm1
1756 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm4[1,3,1,3]
1757 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,1,9,u,u,u,u,u,2,10,u,u,u,u,u,19,27,u,u,u,u,u,20,28,u,u,u,u,u,21]
1758 ; AVX512BW-FAST-NEXT: movl $67637280, %ecx # imm = 0x4081020
1759 ; AVX512BW-FAST-NEXT: kmovd %ecx, %k1
1760 ; AVX512BW-FAST-NEXT: vmovdqu8 %ymm1, %ymm3 {%k1}
1761 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm6[1,3,3,1]
1762 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm1 = zero,ymm1[1,9],zero,zero,zero,zero,zero,ymm1[2,10],zero,zero,zero,zero,zero,ymm1[3,19],zero,zero,zero,zero,zero,ymm1[28,20],zero,zero,zero,zero,zero,ymm1[29,21],zero
1763 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm7 = ymm5[3,1,1,3]
1764 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm7[1],zero,zero,zero,zero,zero,ymm7[10,2],zero,zero,zero,zero,zero,ymm7[11,3],zero,zero,zero,zero,zero,ymm7[20,28],zero,zero,zero,zero,zero,ymm7[21,29],zero,zero,zero
1765 ; AVX512BW-FAST-NEXT: vpor %ymm1, %ymm7, %ymm1
1766 ; AVX512BW-FAST-NEXT: movl $-2029118408, %ecx # imm = 0x870E1C38
1767 ; AVX512BW-FAST-NEXT: kmovd %ecx, %k1
1768 ; AVX512BW-FAST-NEXT: vmovdqu8 %ymm3, %ymm1 {%k1}
1769 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm1, %zmm1
1770 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = [1,1,0,0,1,1,0,0,4,5,1,1,1,1,0,0,50,50,50,52,50,50,50,52,51,51,51,51,50,50,50,52]
1771 ; AVX512BW-FAST-NEXT: vpermi2w %zmm4, %zmm2, %zmm3
1772 ; AVX512BW-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [1,5,2,6,1,5,2,6]
1773 ; AVX512BW-FAST-NEXT: # ymm2 = mem[0,1,0,1]
1774 ; AVX512BW-FAST-NEXT: vpermd %ymm4, %ymm2, %ymm7
1775 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,0,2]
1776 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm4, %zmm4
1777 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm4 = zmm4[u,u,u,u,0,8,u,u,u,u,u,1,9,u,u,u,u,u,18,26,u,u,u,u,u,19,27,u,u,u,u,u,32,36,u,u,u,u,u,33,37,u,u,u,u,u,34,38,u,u,u,u,u,51,55,u,u,u,u,u,56,60,u,u]
1778 ; AVX512BW-FAST-NEXT: movabsq $4647998506761461824, %rcx # imm = 0x4081020408102040
1779 ; AVX512BW-FAST-NEXT: kmovq %rcx, %k1
1780 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm3, %zmm4 {%k1}
1781 ; AVX512BW-FAST-NEXT: vpermd %ymm5, %ymm2, %ymm3
1782 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,2,0,2]
1783 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm5, %zmm3
1784 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm3 = zmm3[0,8],zero,zero,zero,zero,zero,zmm3[1,9],zero,zero,zero,zero,zero,zmm3[2,10],zero,zero,zero,zero,zero,zmm3[19,27],zero,zero,zero,zero,zero,zmm3[20,28],zero,zero,zero,zero,zero,zmm3[33,37],zero,zero,zero,zero,zero,zmm3[34,38],zero,zero,zero,zero,zero,zmm3[51,55],zero,zero,zero,zero,zero,zmm3[56,60],zero,zero,zero,zero,zero,zmm3[57]
1785 ; AVX512BW-FAST-NEXT: vpermd %ymm6, %ymm2, %ymm2
1786 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm6[0,2,0,2]
1787 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm5, %zmm2
1788 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm2 = zero,zero,zmm2[0,8],zero,zero,zero,zero,zero,zmm2[1,9],zero,zero,zero,zero,zero,zmm2[18,26],zero,zero,zero,zero,zero,zmm2[19,27],zero,zero,zero,zero,zero,zmm2[20,28],zero,zero,zero,zero,zero,zmm2[33,37],zero,zero,zero,zero,zero,zmm2[34,38],zero,zero,zero,zero,zero,zmm2[51,55],zero,zero,zero,zero,zero,zmm2[56,60],zero,zero,zero,zero
1789 ; AVX512BW-FAST-NEXT: vporq %zmm3, %zmm2, %zmm2
1790 ; AVX512BW-FAST-NEXT: movabsq $8133997386832558192, %rcx # imm = 0x70E1C3870E1C3870
1791 ; AVX512BW-FAST-NEXT: kmovq %rcx, %k1
1792 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm4, %zmm2 {%k1}
1793 ; AVX512BW-FAST-NEXT: vmovdqa %xmm0, 96(%rax)
1794 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm2, (%rax)
1795 ; AVX512BW-FAST-NEXT: vmovdqa %ymm1, 64(%rax)
1796 ; AVX512BW-FAST-NEXT: vzeroupper
1797 ; AVX512BW-FAST-NEXT: retq
1798 %in.vec0 = load <16 x i8>, ptr %in.vecptr0, align 64
1799 %in.vec1 = load <16 x i8>, ptr %in.vecptr1, align 64
1800 %in.vec2 = load <16 x i8>, ptr %in.vecptr2, align 64
1801 %in.vec3 = load <16 x i8>, ptr %in.vecptr3, align 64
1802 %in.vec4 = load <16 x i8>, ptr %in.vecptr4, align 64
1803 %in.vec5 = load <16 x i8>, ptr %in.vecptr5, align 64
1804 %in.vec6 = load <16 x i8>, ptr %in.vecptr6, align 64
1805 %1 = shufflevector <16 x i8> %in.vec0, <16 x i8> %in.vec1, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1806 %2 = shufflevector <16 x i8> %in.vec2, <16 x i8> %in.vec3, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1807 %3 = shufflevector <16 x i8> %in.vec4, <16 x i8> %in.vec5, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1808 %4 = shufflevector <32 x i8> %1, <32 x i8> %2, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
1809 %5 = shufflevector <16 x i8> %in.vec6, <16 x i8> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1810 %6 = shufflevector <32 x i8> %3, <32 x i8> %5, <48 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47>
1811 %7 = shufflevector <48 x i8> %6, <48 x i8> poison, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1812 %8 = shufflevector <64 x i8> %4, <64 x i8> %7, <112 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111>
1813 %interleaved.vec = shufflevector <112 x i8> %8, <112 x i8> poison, <112 x i32> <i32 0, i32 16, i32 32, i32 48, i32 64, i32 80, i32 96, i32 1, i32 17, i32 33, i32 49, i32 65, i32 81, i32 97, i32 2, i32 18, i32 34, i32 50, i32 66, i32 82, i32 98, i32 3, i32 19, i32 35, i32 51, i32 67, i32 83, i32 99, i32 4, i32 20, i32 36, i32 52, i32 68, i32 84, i32 100, i32 5, i32 21, i32 37, i32 53, i32 69, i32 85, i32 101, i32 6, i32 22, i32 38, i32 54, i32 70, i32 86, i32 102, i32 7, i32 23, i32 39, i32 55, i32 71, i32 87, i32 103, i32 8, i32 24, i32 40, i32 56, i32 72, i32 88, i32 104, i32 9, i32 25, i32 41, i32 57, i32 73, i32 89, i32 105, i32 10, i32 26, i32 42, i32 58, i32 74, i32 90, i32 106, i32 11, i32 27, i32 43, i32 59, i32 75, i32 91, i32 107, i32 12, i32 28, i32 44, i32 60, i32 76, i32 92, i32 108, i32 13, i32 29, i32 45, i32 61, i32 77, i32 93, i32 109, i32 14, i32 30, i32 46, i32 62, i32 78, i32 94, i32 110, i32 15, i32 31, i32 47, i32 63, i32 79, i32 95, i32 111>
1814 store <112 x i8> %interleaved.vec, ptr %out.vec, align 64
1818 define void @store_i8_stride7_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %out.vec) nounwind {
1819 ; SSE-LABEL: store_i8_stride7_vf32:
1821 ; SSE-NEXT: subq $360, %rsp # imm = 0x168
1822 ; SSE-NEXT: movdqa 16(%rdi), %xmm1
1823 ; SSE-NEXT: movdqa 16(%rsi), %xmm4
1824 ; SSE-NEXT: movdqa 16(%rdx), %xmm3
1825 ; SSE-NEXT: movdqa 16(%rcx), %xmm7
1826 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1827 ; SSE-NEXT: movdqa 16(%r8), %xmm6
1828 ; SSE-NEXT: movdqa 16(%r9), %xmm5
1829 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1830 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm1[0,1,2,3,6,6,6,6]
1831 ; SSE-NEXT: movdqa %xmm1, %xmm15
1832 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
1833 ; SSE-NEXT: movdqa {{.*#+}} xmm10 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255]
1834 ; SSE-NEXT: pand %xmm10, %xmm0
1835 ; SSE-NEXT: movdqa %xmm4, %xmm8
1836 ; SSE-NEXT: movdqa %xmm4, %xmm13
1837 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1838 ; SSE-NEXT: punpckhbw {{.*#+}} xmm8 = xmm8[8],xmm4[8],xmm8[9],xmm4[9],xmm8[10],xmm4[10],xmm8[11],xmm4[11],xmm8[12],xmm4[12],xmm8[13],xmm4[13],xmm8[14],xmm4[14],xmm8[15],xmm4[15]
1839 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm8[0,1,2,3,4,5,5,7]
1840 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1841 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
1842 ; SSE-NEXT: movdqa %xmm10, %xmm2
1843 ; SSE-NEXT: pandn %xmm1, %xmm2
1844 ; SSE-NEXT: por %xmm0, %xmm2
1845 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255]
1846 ; SSE-NEXT: movdqa %xmm1, %xmm0
1847 ; SSE-NEXT: movdqa %xmm1, %xmm11
1848 ; SSE-NEXT: pandn %xmm2, %xmm0
1849 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm3[0,1,2,3,6,6,6,6]
1850 ; SSE-NEXT: movdqa %xmm3, %xmm4
1851 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1852 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
1853 ; SSE-NEXT: movdqa {{.*#+}} xmm9 = [255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255]
1854 ; SSE-NEXT: movdqa %xmm9, %xmm3
1855 ; SSE-NEXT: pandn %xmm1, %xmm3
1856 ; SSE-NEXT: movdqa %xmm7, %xmm2
1857 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm7[8],xmm2[9],xmm7[9],xmm2[10],xmm7[10],xmm2[11],xmm7[11],xmm2[12],xmm7[12],xmm2[13],xmm7[13],xmm2[14],xmm7[14],xmm2[15],xmm7[15]
1858 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[2,1,2,3]
1859 ; SSE-NEXT: movdqa %xmm2, %xmm7
1860 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1861 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[3,1,2,0,4,5,6,7]
1862 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,5,5,5]
1863 ; SSE-NEXT: pand %xmm9, %xmm1
1864 ; SSE-NEXT: por %xmm3, %xmm1
1865 ; SSE-NEXT: pand %xmm11, %xmm1
1866 ; SSE-NEXT: por %xmm0, %xmm1
1867 ; SSE-NEXT: movdqa {{.*#+}} xmm11 = [255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0]
1868 ; SSE-NEXT: pand %xmm11, %xmm1
1869 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm6[0,1,2,3,5,6,6,7]
1870 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,3,2]
1871 ; SSE-NEXT: movdqa %xmm11, %xmm3
1872 ; SSE-NEXT: pandn %xmm0, %xmm3
1873 ; SSE-NEXT: por %xmm1, %xmm3
1874 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
1875 ; SSE-NEXT: movdqa %xmm5, %xmm1
1876 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
1877 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3]
1878 ; SSE-NEXT: movdqa %xmm1, %xmm5
1879 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1880 ; SSE-NEXT: movdqa %xmm12, %xmm1
1881 ; SSE-NEXT: pandn %xmm0, %xmm1
1882 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1883 ; SSE-NEXT: pand %xmm12, %xmm3
1884 ; SSE-NEXT: por %xmm3, %xmm1
1885 ; SSE-NEXT: movdqa 16(%rax), %xmm14
1886 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm14[0,1,2,3,4,5,6,6]
1887 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,3,3]
1888 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
1889 ; SSE-NEXT: movdqa %xmm3, %xmm2
1890 ; SSE-NEXT: pandn %xmm0, %xmm2
1891 ; SSE-NEXT: pand %xmm3, %xmm1
1892 ; SSE-NEXT: por %xmm1, %xmm2
1893 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1894 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm7[0,1,2,3,6,5,7,7]
1895 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
1896 ; SSE-NEXT: movdqa %xmm10, %xmm1
1897 ; SSE-NEXT: pandn %xmm0, %xmm1
1898 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm4[0,1,2,3,7,7,7,7]
1899 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
1900 ; SSE-NEXT: pand %xmm10, %xmm0
1901 ; SSE-NEXT: por %xmm0, %xmm1
1902 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255]
1903 ; SSE-NEXT: movdqa %xmm2, %xmm0
1904 ; SSE-NEXT: pandn %xmm1, %xmm0
1905 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm8[0,1,2,3,4,6,6,7]
1906 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,3,3]
1907 ; SSE-NEXT: movdqa %xmm3, %xmm4
1908 ; SSE-NEXT: pandn %xmm1, %xmm3
1909 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1910 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm15[0,1,2,3,7,7,7,7]
1911 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
1912 ; SSE-NEXT: pand %xmm4, %xmm1
1913 ; SSE-NEXT: por %xmm1, %xmm3
1914 ; SSE-NEXT: pand %xmm2, %xmm3
1915 ; SSE-NEXT: por %xmm0, %xmm3
1916 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm6[0,1,2,3,7,7,7,7]
1917 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1918 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
1919 ; SSE-NEXT: movdqa %xmm9, %xmm1
1920 ; SSE-NEXT: pandn %xmm0, %xmm1
1921 ; SSE-NEXT: pand %xmm9, %xmm3
1922 ; SSE-NEXT: por %xmm3, %xmm1
1923 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm5[0,1,2,3,5,6,6,7]
1924 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm0[2,2,2,3]
1925 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255]
1926 ; SSE-NEXT: movdqa %xmm0, %xmm4
1927 ; SSE-NEXT: pandn %xmm3, %xmm4
1928 ; SSE-NEXT: pand %xmm0, %xmm1
1929 ; SSE-NEXT: por %xmm1, %xmm4
1930 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1931 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm14[0,1,2,3,6,7,7,7]
1932 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,3,2]
1933 ; SSE-NEXT: movdqa %xmm11, %xmm2
1934 ; SSE-NEXT: pandn %xmm1, %xmm2
1935 ; SSE-NEXT: pand %xmm11, %xmm4
1936 ; SSE-NEXT: por %xmm4, %xmm2
1937 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1938 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm13[2,1,2,3]
1939 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1940 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,0,3]
1941 ; SSE-NEXT: movdqa %xmm12, %xmm4
1942 ; SSE-NEXT: pandn %xmm1, %xmm4
1943 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm15[3,3,3,3,4,5,6,7]
1944 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
1945 ; SSE-NEXT: pand %xmm12, %xmm1
1946 ; SSE-NEXT: por %xmm1, %xmm4
1947 ; SSE-NEXT: pshufd $230, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1948 ; SSE-NEXT: # xmm1 = mem[2,1,2,3]
1949 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1950 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,3,0,3]
1951 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[3,3,3,3,4,5,6,7]
1952 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
1953 ; SSE-NEXT: movdqa {{.*#+}} xmm15 = [255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255]
1954 ; SSE-NEXT: movdqa %xmm15, %xmm7
1955 ; SSE-NEXT: pandn %xmm1, %xmm7
1956 ; SSE-NEXT: pshuflw $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1957 ; SSE-NEXT: # xmm1 = mem[3,3,3,3,4,5,6,7]
1958 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
1959 ; SSE-NEXT: pand %xmm15, %xmm1
1960 ; SSE-NEXT: por %xmm1, %xmm7
1961 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255]
1962 ; SSE-NEXT: movdqa %xmm1, %xmm3
1963 ; SSE-NEXT: pandn %xmm7, %xmm3
1964 ; SSE-NEXT: pand %xmm1, %xmm4
1965 ; SSE-NEXT: por %xmm4, %xmm3
1966 ; SSE-NEXT: pshufd $230, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
1967 ; SSE-NEXT: # xmm4 = mem[2,1,2,3]
1968 ; SSE-NEXT: punpcklbw {{.*#+}} xmm4 = xmm4[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1969 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,3,2,0]
1970 ; SSE-NEXT: movdqa %xmm9, %xmm7
1971 ; SSE-NEXT: pandn %xmm4, %xmm7
1972 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm6[3,3,3,3,4,5,6,7]
1973 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,4,4,4]
1974 ; SSE-NEXT: pand %xmm9, %xmm4
1975 ; SSE-NEXT: por %xmm4, %xmm7
1976 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm14[3,3,3,3,4,5,6,7]
1977 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,4,4,4]
1978 ; SSE-NEXT: movdqa %xmm0, %xmm8
1979 ; SSE-NEXT: pandn %xmm4, %xmm8
1980 ; SSE-NEXT: pand %xmm0, %xmm7
1981 ; SSE-NEXT: por %xmm7, %xmm8
1982 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255]
1983 ; SSE-NEXT: movdqa %xmm5, %xmm2
1984 ; SSE-NEXT: pandn %xmm8, %xmm2
1985 ; SSE-NEXT: pand %xmm5, %xmm3
1986 ; SSE-NEXT: por %xmm3, %xmm2
1987 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1988 ; SSE-NEXT: movdqa (%rsi), %xmm2
1989 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm2[2,1,2,3]
1990 ; SSE-NEXT: movdqa %xmm2, %xmm6
1991 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1992 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1993 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm3[3,1,0,3]
1994 ; SSE-NEXT: movdqa %xmm12, %xmm3
1995 ; SSE-NEXT: pandn %xmm4, %xmm3
1996 ; SSE-NEXT: movdqa (%rdi), %xmm13
1997 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm13[3,3,3,3,4,5,6,7]
1998 ; SSE-NEXT: movdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1999 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,4,4,4]
2000 ; SSE-NEXT: pand %xmm12, %xmm4
2001 ; SSE-NEXT: por %xmm4, %xmm3
2002 ; SSE-NEXT: movdqa (%rcx), %xmm2
2003 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm2[2,1,2,3]
2004 ; SSE-NEXT: movdqa %xmm2, %xmm14
2005 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2006 ; SSE-NEXT: punpcklbw {{.*#+}} xmm4 = xmm4[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
2007 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,3,0,3]
2008 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[3,3,3,3,4,5,6,7]
2009 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,4,4,4]
2010 ; SSE-NEXT: movdqa %xmm15, %xmm7
2011 ; SSE-NEXT: pandn %xmm4, %xmm7
2012 ; SSE-NEXT: movdqa (%rdx), %xmm8
2013 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm8[3,3,3,3,4,5,6,7]
2014 ; SSE-NEXT: movdqa %xmm8, (%rsp) # 16-byte Spill
2015 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,4,4,4]
2016 ; SSE-NEXT: pand %xmm15, %xmm4
2017 ; SSE-NEXT: por %xmm4, %xmm7
2018 ; SSE-NEXT: pand %xmm1, %xmm3
2019 ; SSE-NEXT: pandn %xmm7, %xmm1
2020 ; SSE-NEXT: por %xmm3, %xmm1
2021 ; SSE-NEXT: movdqa (%r9), %xmm11
2022 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm11[2,1,2,3]
2023 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2024 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
2025 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,3,2,0]
2026 ; SSE-NEXT: movdqa %xmm9, %xmm4
2027 ; SSE-NEXT: pandn %xmm3, %xmm4
2028 ; SSE-NEXT: movdqa (%r8), %xmm7
2029 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm7[3,3,3,3,4,5,6,7]
2030 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2031 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,4,4]
2032 ; SSE-NEXT: pand %xmm9, %xmm3
2033 ; SSE-NEXT: por %xmm3, %xmm4
2034 ; SSE-NEXT: pand %xmm0, %xmm4
2035 ; SSE-NEXT: movdqa (%rax), %xmm10
2036 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm10[3,3,3,3,4,5,6,7]
2037 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2038 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,4,4]
2039 ; SSE-NEXT: pandn %xmm3, %xmm0
2040 ; SSE-NEXT: por %xmm4, %xmm0
2041 ; SSE-NEXT: pand %xmm5, %xmm1
2042 ; SSE-NEXT: pandn %xmm0, %xmm5
2043 ; SSE-NEXT: por %xmm1, %xmm5
2044 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2045 ; SSE-NEXT: movdqa %xmm6, %xmm0
2046 ; SSE-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8],xmm6[8],xmm0[9],xmm6[9],xmm0[10],xmm6[10],xmm0[11],xmm6[11],xmm0[12],xmm6[12],xmm0[13],xmm6[13],xmm0[14],xmm6[14],xmm0[15],xmm6[15]
2047 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2048 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5,5,7]
2049 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
2050 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255]
2051 ; SSE-NEXT: movdqa %xmm2, %xmm1
2052 ; SSE-NEXT: pandn %xmm0, %xmm1
2053 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm13[0,1,2,3,6,6,6,6]
2054 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
2055 ; SSE-NEXT: pand %xmm2, %xmm0
2056 ; SSE-NEXT: por %xmm0, %xmm1
2057 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255]
2058 ; SSE-NEXT: movdqa %xmm2, %xmm3
2059 ; SSE-NEXT: pandn %xmm1, %xmm3
2060 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm8[0,1,2,3,6,6,6,6]
2061 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
2062 ; SSE-NEXT: movdqa %xmm9, %xmm1
2063 ; SSE-NEXT: pandn %xmm0, %xmm1
2064 ; SSE-NEXT: movdqa %xmm14, %xmm0
2065 ; SSE-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8],xmm14[8],xmm0[9],xmm14[9],xmm0[10],xmm14[10],xmm0[11],xmm14[11],xmm0[12],xmm14[12],xmm0[13],xmm14[13],xmm0[14],xmm14[14],xmm0[15],xmm14[15]
2066 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2067 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
2068 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[3,1,2,0,4,5,6,7]
2069 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,5,5,5,5]
2070 ; SSE-NEXT: pand %xmm9, %xmm0
2071 ; SSE-NEXT: por %xmm1, %xmm0
2072 ; SSE-NEXT: pand %xmm2, %xmm0
2073 ; SSE-NEXT: por %xmm3, %xmm0
2074 ; SSE-NEXT: punpckhbw {{.*#+}} xmm11 = xmm11[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
2075 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2076 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm11[1,1,2,3]
2077 ; SSE-NEXT: movdqa %xmm12, %xmm3
2078 ; SSE-NEXT: pandn %xmm1, %xmm3
2079 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm7[0,1,2,3,5,6,6,7]
2080 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,3,2]
2081 ; SSE-NEXT: pand %xmm12, %xmm1
2082 ; SSE-NEXT: por %xmm3, %xmm1
2083 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm10[0,1,2,3,4,5,6,6]
2084 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,1,3,3]
2085 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
2086 ; SSE-NEXT: movdqa %xmm2, %xmm4
2087 ; SSE-NEXT: pandn %xmm3, %xmm4
2088 ; SSE-NEXT: pand %xmm2, %xmm1
2089 ; SSE-NEXT: por %xmm1, %xmm4
2090 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0]
2091 ; SSE-NEXT: pand %xmm1, %xmm0
2092 ; SSE-NEXT: pandn %xmm4, %xmm1
2093 ; SSE-NEXT: por %xmm0, %xmm1
2094 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2095 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
2096 ; SSE-NEXT: punpcklbw {{.*#+}} xmm11 = xmm11[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
2097 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm11[2,2,3,3]
2098 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2099 ; SSE-NEXT: movdqa %xmm9, %xmm1
2100 ; SSE-NEXT: pandn %xmm0, %xmm1
2101 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
2102 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm7[2,1,3,3,4,5,6,7]
2103 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
2104 ; SSE-NEXT: pand %xmm9, %xmm0
2105 ; SSE-NEXT: por %xmm1, %xmm0
2106 ; SSE-NEXT: movdqa {{.*#+}} xmm13 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255]
2107 ; SSE-NEXT: movdqa %xmm13, %xmm1
2108 ; SSE-NEXT: pandn %xmm0, %xmm1
2109 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
2110 ; SSE-NEXT: punpcklbw {{.*#+}} xmm6 = xmm6[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
2111 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm6[0,1,2,3,5,6,6,7]
2112 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2113 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
2114 ; SSE-NEXT: movdqa %xmm15, %xmm3
2115 ; SSE-NEXT: pandn %xmm0, %xmm3
2116 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
2117 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm4[0,2,2,3,4,5,6,7]
2118 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,1,3]
2119 ; SSE-NEXT: pand %xmm15, %xmm0
2120 ; SSE-NEXT: por %xmm0, %xmm3
2121 ; SSE-NEXT: pand %xmm13, %xmm3
2122 ; SSE-NEXT: por %xmm1, %xmm3
2123 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,0,0,0,0,255,255,255,0,0,0,0,255,255]
2124 ; SSE-NEXT: pandn %xmm3, %xmm0
2125 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
2126 ; SSE-NEXT: punpcklbw {{.*#+}} xmm8 = xmm8[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
2127 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm8[0,1,2,3,4,6,5,7]
2128 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2129 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,3,2]
2130 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0]
2131 ; SSE-NEXT: movdqa %xmm12, %xmm3
2132 ; SSE-NEXT: pandn %xmm1, %xmm3
2133 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
2134 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm10[2,2,2,3,4,5,6,7]
2135 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,2,1]
2136 ; SSE-NEXT: pand %xmm12, %xmm1
2137 ; SSE-NEXT: por %xmm1, %xmm3
2138 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
2139 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm5[2,2,2,2,4,5,6,7]
2140 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
2141 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
2142 ; SSE-NEXT: movdqa %xmm2, %xmm14
2143 ; SSE-NEXT: pandn %xmm1, %xmm14
2144 ; SSE-NEXT: pand %xmm2, %xmm3
2145 ; SSE-NEXT: por %xmm3, %xmm14
2146 ; SSE-NEXT: pand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm14
2147 ; SSE-NEXT: por %xmm0, %xmm14
2148 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2149 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[0,1,2,2]
2150 ; SSE-NEXT: movdqa %xmm9, %xmm1
2151 ; SSE-NEXT: pandn %xmm0, %xmm1
2152 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm4[1,1,2,3,4,5,6,7]
2153 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
2154 ; SSE-NEXT: pand %xmm9, %xmm0
2155 ; SSE-NEXT: por %xmm1, %xmm0
2156 ; SSE-NEXT: movdqa %xmm13, %xmm1
2157 ; SSE-NEXT: pandn %xmm0, %xmm1
2158 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm11[1,1,2,1]
2159 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm0[0,1,2,3,7,5,6,4]
2160 ; SSE-NEXT: movdqa %xmm12, %xmm0
2161 ; SSE-NEXT: pandn %xmm3, %xmm0
2162 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm7[1,1,2,2,4,5,6,7]
2163 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,0,2,1]
2164 ; SSE-NEXT: pand %xmm12, %xmm3
2165 ; SSE-NEXT: movdqa %xmm12, %xmm11
2166 ; SSE-NEXT: por %xmm3, %xmm0
2167 ; SSE-NEXT: pand %xmm13, %xmm0
2168 ; SSE-NEXT: por %xmm1, %xmm0
2169 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm8[0,2,2,3,4,5,6,7]
2170 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,1,3]
2171 ; SSE-NEXT: movdqa {{.*#+}} xmm13 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
2172 ; SSE-NEXT: movdqa %xmm13, %xmm3
2173 ; SSE-NEXT: pandn %xmm1, %xmm3
2174 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm10[1,1,1,1,4,5,6,7]
2175 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
2176 ; SSE-NEXT: pand %xmm13, %xmm1
2177 ; SSE-NEXT: por %xmm1, %xmm3
2178 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm5[1,1,1,1,4,5,6,7]
2179 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
2180 ; SSE-NEXT: movdqa %xmm15, %xmm4
2181 ; SSE-NEXT: pandn %xmm1, %xmm4
2182 ; SSE-NEXT: pand %xmm15, %xmm3
2183 ; SSE-NEXT: por %xmm3, %xmm4
2184 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
2185 ; SSE-NEXT: movdqa %xmm2, %xmm1
2186 ; SSE-NEXT: pandn %xmm4, %xmm1
2187 ; SSE-NEXT: pand %xmm2, %xmm0
2188 ; SSE-NEXT: por %xmm0, %xmm1
2189 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2190 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
2191 ; SSE-NEXT: # xmm0 = mem[0,1,1,3]
2192 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
2193 ; SSE-NEXT: movdqa %xmm12, %xmm1
2194 ; SSE-NEXT: pandn %xmm0, %xmm1
2195 ; SSE-NEXT: movdqa (%rsp), %xmm8 # 16-byte Reload
2196 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm8[0,1,2,3,4,5,5,7]
2197 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,3,2]
2198 ; SSE-NEXT: pand %xmm12, %xmm0
2199 ; SSE-NEXT: por %xmm1, %xmm0
2200 ; SSE-NEXT: movdqa {{.*#+}} xmm6 = [255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0]
2201 ; SSE-NEXT: movdqa %xmm6, %xmm3
2202 ; SSE-NEXT: pandn %xmm0, %xmm3
2203 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
2204 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm5[0,1,2,3,5,5,5,5]
2205 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
2206 ; SSE-NEXT: movdqa %xmm9, %xmm4
2207 ; SSE-NEXT: pandn %xmm0, %xmm4
2208 ; SSE-NEXT: pshuflw $233, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
2209 ; SSE-NEXT: # xmm0 = mem[1,2,2,3,4,5,6,7]
2210 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
2211 ; SSE-NEXT: pand %xmm9, %xmm0
2212 ; SSE-NEXT: por %xmm4, %xmm0
2213 ; SSE-NEXT: pand %xmm6, %xmm0
2214 ; SSE-NEXT: por %xmm3, %xmm0
2215 ; SSE-NEXT: pshuflw $233, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
2216 ; SSE-NEXT: # xmm3 = mem[1,2,2,3,4,5,6,7]
2217 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,0,0,0]
2218 ; SSE-NEXT: movdqa %xmm15, %xmm4
2219 ; SSE-NEXT: pandn %xmm3, %xmm4
2220 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
2221 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm13[0,1,2,3,4,4,6,5]
2222 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,1,3,3]
2223 ; SSE-NEXT: pand %xmm15, %xmm3
2224 ; SSE-NEXT: por %xmm3, %xmm4
2225 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
2226 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm12[0,1,2,3,4,5,5,7]
2227 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,3]
2228 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255]
2229 ; SSE-NEXT: movdqa %xmm1, %xmm10
2230 ; SSE-NEXT: pandn %xmm3, %xmm10
2231 ; SSE-NEXT: pand %xmm1, %xmm4
2232 ; SSE-NEXT: por %xmm4, %xmm10
2233 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255]
2234 ; SSE-NEXT: movdqa %xmm2, %xmm1
2235 ; SSE-NEXT: pandn %xmm10, %xmm1
2236 ; SSE-NEXT: pand %xmm2, %xmm0
2237 ; SSE-NEXT: por %xmm0, %xmm1
2238 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2239 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
2240 ; SSE-NEXT: punpcklbw {{.*#+}} xmm14 = xmm14[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
2241 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm14[0,1,2,2]
2242 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2243 ; SSE-NEXT: movdqa %xmm9, %xmm3
2244 ; SSE-NEXT: pandn %xmm0, %xmm3
2245 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm5[1,1,2,3,4,5,6,7]
2246 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
2247 ; SSE-NEXT: pand %xmm9, %xmm0
2248 ; SSE-NEXT: por %xmm3, %xmm0
2249 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255]
2250 ; SSE-NEXT: movdqa %xmm5, %xmm3
2251 ; SSE-NEXT: pandn %xmm0, %xmm3
2252 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
2253 ; SSE-NEXT: punpcklbw {{.*#+}} xmm7 = xmm7[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
2254 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm7[1,1,2,1]
2255 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2256 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm0[0,1,2,3,7,5,6,4]
2257 ; SSE-NEXT: movdqa %xmm11, %xmm0
2258 ; SSE-NEXT: pandn %xmm4, %xmm0
2259 ; SSE-NEXT: movdqa %xmm8, %xmm1
2260 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm8[1,1,2,2,4,5,6,7]
2261 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,0,2,1]
2262 ; SSE-NEXT: pand %xmm11, %xmm4
2263 ; SSE-NEXT: por %xmm4, %xmm0
2264 ; SSE-NEXT: pand %xmm5, %xmm0
2265 ; SSE-NEXT: por %xmm3, %xmm0
2266 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
2267 ; SSE-NEXT: punpcklbw {{.*#+}} xmm8 = xmm8[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
2268 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm8[0,2,2,3,4,5,6,7]
2269 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2270 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,1,1,3]
2271 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
2272 ; SSE-NEXT: movdqa %xmm2, %xmm4
2273 ; SSE-NEXT: pandn %xmm3, %xmm4
2274 ; SSE-NEXT: movdqa %xmm13, %xmm5
2275 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm13[1,1,1,1,4,5,6,7]
2276 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,0,0,0]
2277 ; SSE-NEXT: pand %xmm2, %xmm3
2278 ; SSE-NEXT: por %xmm3, %xmm4
2279 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm12[1,1,1,1,4,5,6,7]
2280 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,0,0,0]
2281 ; SSE-NEXT: movdqa %xmm15, %xmm10
2282 ; SSE-NEXT: pandn %xmm3, %xmm10
2283 ; SSE-NEXT: pand %xmm15, %xmm4
2284 ; SSE-NEXT: por %xmm4, %xmm10
2285 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
2286 ; SSE-NEXT: pand %xmm3, %xmm0
2287 ; SSE-NEXT: pandn %xmm10, %xmm3
2288 ; SSE-NEXT: por %xmm0, %xmm3
2289 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2290 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm7[0,0,2,1,4,5,6,7]
2291 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,1,3]
2292 ; SSE-NEXT: movdqa %xmm2, %xmm3
2293 ; SSE-NEXT: pandn %xmm0, %xmm3
2294 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm1[0,0,0,0,4,5,6,7]
2295 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
2296 ; SSE-NEXT: pand %xmm2, %xmm0
2297 ; SSE-NEXT: por %xmm0, %xmm3
2298 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255]
2299 ; SSE-NEXT: movdqa %xmm0, %xmm4
2300 ; SSE-NEXT: pandn %xmm3, %xmm4
2301 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm14[0,2,1,3,4,5,6,7]
2302 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm3[0,1,1,0]
2303 ; SSE-NEXT: movdqa %xmm11, %xmm3
2304 ; SSE-NEXT: pandn %xmm10, %xmm3
2305 ; SSE-NEXT: pshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Folded Reload
2306 ; SSE-NEXT: # xmm10 = mem[0,0,2,1,4,5,6,7]
2307 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[0,0,2,1]
2308 ; SSE-NEXT: pand %xmm11, %xmm10
2309 ; SSE-NEXT: por %xmm10, %xmm3
2310 ; SSE-NEXT: pand %xmm0, %xmm3
2311 ; SSE-NEXT: por %xmm4, %xmm3
2312 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm8[0,1,1,3,4,5,6,7]
2313 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,0,2,1]
2314 ; SSE-NEXT: movdqa {{.*#+}} xmm13 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255]
2315 ; SSE-NEXT: movdqa %xmm13, %xmm10
2316 ; SSE-NEXT: pandn %xmm4, %xmm10
2317 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm5[0,0,0,0,4,5,6,7]
2318 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,0,0,0]
2319 ; SSE-NEXT: pand %xmm13, %xmm4
2320 ; SSE-NEXT: por %xmm4, %xmm10
2321 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm12[0,0,0,0,4,5,6,7]
2322 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,0,0,0]
2323 ; SSE-NEXT: movdqa %xmm9, %xmm2
2324 ; SSE-NEXT: pandn %xmm4, %xmm2
2325 ; SSE-NEXT: pand %xmm9, %xmm10
2326 ; SSE-NEXT: por %xmm10, %xmm2
2327 ; SSE-NEXT: movdqa {{.*#+}} xmm7 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255]
2328 ; SSE-NEXT: movdqa %xmm7, %xmm1
2329 ; SSE-NEXT: pandn %xmm2, %xmm1
2330 ; SSE-NEXT: pand %xmm7, %xmm3
2331 ; SSE-NEXT: por %xmm3, %xmm1
2332 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2333 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2334 ; SSE-NEXT: # xmm2 = mem[0,1,1,3]
2335 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
2336 ; SSE-NEXT: movdqa %xmm1, %xmm3
2337 ; SSE-NEXT: pandn %xmm2, %xmm3
2338 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
2339 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm4[0,1,2,3,4,5,5,7]
2340 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,1,3,2]
2341 ; SSE-NEXT: pand %xmm1, %xmm2
2342 ; SSE-NEXT: por %xmm3, %xmm2
2343 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
2344 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm12[0,1,2,3,5,5,5,5]
2345 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,2,2,2]
2346 ; SSE-NEXT: movdqa %xmm9, %xmm5
2347 ; SSE-NEXT: pandn %xmm3, %xmm5
2348 ; SSE-NEXT: pshuflw $233, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
2349 ; SSE-NEXT: # xmm3 = mem[1,2,2,3,4,5,6,7]
2350 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,0,2,1]
2351 ; SSE-NEXT: pand %xmm9, %xmm3
2352 ; SSE-NEXT: por %xmm5, %xmm3
2353 ; SSE-NEXT: pand %xmm6, %xmm3
2354 ; SSE-NEXT: pandn %xmm2, %xmm6
2355 ; SSE-NEXT: por %xmm3, %xmm6
2356 ; SSE-NEXT: pshuflw $233, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2357 ; SSE-NEXT: # xmm2 = mem[1,2,2,3,4,5,6,7]
2358 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,0,0]
2359 ; SSE-NEXT: movdqa %xmm15, %xmm3
2360 ; SSE-NEXT: pandn %xmm2, %xmm3
2361 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
2362 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm8[0,1,2,3,4,4,6,5]
2363 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,1,3,3]
2364 ; SSE-NEXT: pand %xmm15, %xmm2
2365 ; SSE-NEXT: por %xmm2, %xmm3
2366 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
2367 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm10[0,1,2,3,4,5,5,7]
2368 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
2369 ; SSE-NEXT: movdqa %xmm13, %xmm5
2370 ; SSE-NEXT: pandn %xmm2, %xmm5
2371 ; SSE-NEXT: pand %xmm13, %xmm3
2372 ; SSE-NEXT: por %xmm3, %xmm5
2373 ; SSE-NEXT: movdqa {{.*#+}} xmm14 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255]
2374 ; SSE-NEXT: pand %xmm14, %xmm6
2375 ; SSE-NEXT: pandn %xmm5, %xmm14
2376 ; SSE-NEXT: por %xmm6, %xmm14
2377 ; SSE-NEXT: pshuflw $96, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2378 ; SSE-NEXT: # xmm1 = mem[0,0,2,1,4,5,6,7]
2379 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,1,3]
2380 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
2381 ; SSE-NEXT: movdqa %xmm3, %xmm2
2382 ; SSE-NEXT: pandn %xmm1, %xmm2
2383 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm4[0,0,0,0,4,5,6,7]
2384 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
2385 ; SSE-NEXT: pand %xmm3, %xmm1
2386 ; SSE-NEXT: movdqa %xmm3, %xmm4
2387 ; SSE-NEXT: por %xmm1, %xmm2
2388 ; SSE-NEXT: pshuflw $216, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2389 ; SSE-NEXT: # xmm1 = mem[0,2,1,3,4,5,6,7]
2390 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,1,0]
2391 ; SSE-NEXT: movdqa %xmm11, %xmm3
2392 ; SSE-NEXT: pandn %xmm1, %xmm3
2393 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm12[0,0,2,1,4,5,6,7]
2394 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,2,1]
2395 ; SSE-NEXT: pand %xmm11, %xmm1
2396 ; SSE-NEXT: por %xmm1, %xmm3
2397 ; SSE-NEXT: pand %xmm0, %xmm3
2398 ; SSE-NEXT: pandn %xmm2, %xmm0
2399 ; SSE-NEXT: por %xmm3, %xmm0
2400 ; SSE-NEXT: pshuflw $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2401 ; SSE-NEXT: # xmm1 = mem[0,1,1,3,4,5,6,7]
2402 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,2,1]
2403 ; SSE-NEXT: movdqa %xmm13, %xmm2
2404 ; SSE-NEXT: pandn %xmm1, %xmm2
2405 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm8[0,0,0,0,4,5,6,7]
2406 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
2407 ; SSE-NEXT: pand %xmm13, %xmm1
2408 ; SSE-NEXT: por %xmm1, %xmm2
2409 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm10[0,0,0,0,4,5,6,7]
2410 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
2411 ; SSE-NEXT: movdqa %xmm9, %xmm3
2412 ; SSE-NEXT: pandn %xmm1, %xmm3
2413 ; SSE-NEXT: pand %xmm9, %xmm2
2414 ; SSE-NEXT: por %xmm2, %xmm3
2415 ; SSE-NEXT: pand %xmm7, %xmm0
2416 ; SSE-NEXT: pandn %xmm3, %xmm7
2417 ; SSE-NEXT: por %xmm0, %xmm7
2418 ; SSE-NEXT: movdqa (%rsp), %xmm5 # 16-byte Reload
2419 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm5[0,1,2,3,7,7,7,7]
2420 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
2421 ; SSE-NEXT: pand %xmm13, %xmm0
2422 ; SSE-NEXT: pshufhw $246, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2423 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,6,5,7,7]
2424 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
2425 ; SSE-NEXT: pandn %xmm1, %xmm13
2426 ; SSE-NEXT: por %xmm0, %xmm13
2427 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
2428 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm6[0,1,2,3,7,7,7,7]
2429 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
2430 ; SSE-NEXT: movdqa %xmm4, %xmm3
2431 ; SSE-NEXT: pand %xmm4, %xmm0
2432 ; SSE-NEXT: pshufhw $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2433 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,4,6,6,7]
2434 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,3,3]
2435 ; SSE-NEXT: pandn %xmm1, %xmm3
2436 ; SSE-NEXT: por %xmm0, %xmm3
2437 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255]
2438 ; SSE-NEXT: pand %xmm0, %xmm3
2439 ; SSE-NEXT: pandn %xmm13, %xmm0
2440 ; SSE-NEXT: por %xmm3, %xmm0
2441 ; SSE-NEXT: movdqa %xmm0, %xmm3
2442 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
2443 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm4[0,1,2,3,7,7,7,7]
2444 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
2445 ; SSE-NEXT: movdqa %xmm9, %xmm1
2446 ; SSE-NEXT: pandn %xmm0, %xmm1
2447 ; SSE-NEXT: pshufhw $233, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
2448 ; SSE-NEXT: # xmm0 = mem[0,1,2,3,5,6,6,7]
2449 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,3]
2450 ; SSE-NEXT: pand %xmm9, %xmm0
2451 ; SSE-NEXT: por %xmm1, %xmm0
2452 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
2453 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm13[0,1,2,3,6,7,7,7]
2454 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,3,2]
2455 ; SSE-NEXT: movdqa %xmm11, %xmm2
2456 ; SSE-NEXT: pandn %xmm1, %xmm2
2457 ; SSE-NEXT: pand %xmm11, %xmm0
2458 ; SSE-NEXT: por %xmm0, %xmm2
2459 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0]
2460 ; SSE-NEXT: pand %xmm0, %xmm3
2461 ; SSE-NEXT: pandn %xmm2, %xmm0
2462 ; SSE-NEXT: por %xmm3, %xmm0
2463 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2464 ; SSE-NEXT: # xmm1 = mem[2,2,3,3]
2465 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm5[2,1,3,3,4,5,6,7]
2466 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,2,1]
2467 ; SSE-NEXT: pand %xmm9, %xmm2
2468 ; SSE-NEXT: pandn %xmm1, %xmm9
2469 ; SSE-NEXT: por %xmm2, %xmm9
2470 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm6[0,2,2,3,4,5,6,7]
2471 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,1,3]
2472 ; SSE-NEXT: pand %xmm15, %xmm1
2473 ; SSE-NEXT: pshufhw $233, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2474 ; SSE-NEXT: # xmm2 = mem[0,1,2,3,5,6,6,7]
2475 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,2,2,2]
2476 ; SSE-NEXT: pandn %xmm2, %xmm15
2477 ; SSE-NEXT: por %xmm1, %xmm15
2478 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255]
2479 ; SSE-NEXT: pand %xmm1, %xmm15
2480 ; SSE-NEXT: pandn %xmm9, %xmm1
2481 ; SSE-NEXT: por %xmm15, %xmm1
2482 ; SSE-NEXT: movdqa %xmm1, %xmm3
2483 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm4[2,2,2,3,4,5,6,7]
2484 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,2,1]
2485 ; SSE-NEXT: pand %xmm11, %xmm1
2486 ; SSE-NEXT: pshufhw $216, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2487 ; SSE-NEXT: # xmm2 = mem[0,1,2,3,4,6,5,7]
2488 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,1,3,2]
2489 ; SSE-NEXT: pandn %xmm2, %xmm11
2490 ; SSE-NEXT: por %xmm1, %xmm11
2491 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
2492 ; SSE-NEXT: pand %xmm4, %xmm11
2493 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm13[2,2,2,2,4,5,6,7]
2494 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
2495 ; SSE-NEXT: pandn %xmm1, %xmm4
2496 ; SSE-NEXT: por %xmm11, %xmm4
2497 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,255,0,0,0,0,255,255,255,0,0,0,0,255,255]
2498 ; SSE-NEXT: pand %xmm1, %xmm4
2499 ; SSE-NEXT: pandn %xmm3, %xmm1
2500 ; SSE-NEXT: por %xmm1, %xmm4
2501 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2502 ; SSE-NEXT: movdqa %xmm4, 32(%rax)
2503 ; SSE-NEXT: movdqa %xmm0, 96(%rax)
2504 ; SSE-NEXT: movdqa %xmm7, 112(%rax)
2505 ; SSE-NEXT: movdqa %xmm14, 176(%rax)
2506 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2507 ; SSE-NEXT: movaps %xmm0, (%rax)
2508 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2509 ; SSE-NEXT: movaps %xmm0, 16(%rax)
2510 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2511 ; SSE-NEXT: movaps %xmm0, 64(%rax)
2512 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2513 ; SSE-NEXT: movaps %xmm0, 128(%rax)
2514 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2515 ; SSE-NEXT: movaps %xmm0, 144(%rax)
2516 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2517 ; SSE-NEXT: movaps %xmm0, 80(%rax)
2518 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2519 ; SSE-NEXT: movaps %xmm0, 48(%rax)
2520 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2521 ; SSE-NEXT: movaps %xmm0, 160(%rax)
2522 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2523 ; SSE-NEXT: movaps %xmm0, 208(%rax)
2524 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2525 ; SSE-NEXT: movaps %xmm0, 192(%rax)
2526 ; SSE-NEXT: addq $360, %rsp # imm = 0x168
2529 ; AVX1-ONLY-LABEL: store_i8_stride7_vf32:
2530 ; AVX1-ONLY: # %bb.0:
2531 ; AVX1-ONLY-NEXT: subq $216, %rsp
2532 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
2533 ; AVX1-ONLY-NEXT: vmovdqa 16(%rax), %xmm14
2534 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = xmm14[u,u,u],zero,zero,xmm14[9,u,u,u,u],zero,zero,xmm14[10,u,u,u]
2535 ; AVX1-ONLY-NEXT: vmovdqa 16(%r9), %xmm2
2536 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2537 ; AVX1-ONLY-NEXT: vmovdqa 16(%r8), %xmm3
2538 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2539 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm3[8],xmm2[8],xmm3[9],xmm2[9],xmm3[10],xmm2[10],xmm3[11],xmm2[11],xmm3[12],xmm2[12],xmm3[13],xmm2[13],xmm3[14],xmm2[14],xmm3[15],xmm2[15]
2540 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2541 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[u,u,u,2,3],zero,xmm1[u,u,u,u,4,5],zero,xmm1[u,u,u]
2542 ; AVX1-ONLY-NEXT: vpor %xmm0, %xmm1, %xmm0
2543 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm11 = <u,u,u,u,u,128,7,u,u,u,u,u,128,8,u,u>
2544 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm2, %xmm1
2545 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm12 = <u,u,u,u,u,7,128,u,u,u,u,u,8,128,u,u>
2546 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm3, %xmm3
2547 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm3, %xmm1
2548 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm8 = <128,u,u,u,u,5,6,128,u,u,u,u,12,13,128,u>
2549 ; AVX1-ONLY-NEXT: vpshufb %xmm8, %xmm1, %xmm1
2550 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm5 = <6,u,u,u,u,128,128,7,u,u,u,u,128,128,8,u>
2551 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm14, %xmm3
2552 ; AVX1-ONLY-NEXT: vpor %xmm3, %xmm1, %xmm1
2553 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm7
2554 ; AVX1-ONLY-NEXT: vmovdqa 16(%rcx), %xmm1
2555 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2556 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm4 = <u,u,u,128,7,u,u,u,u,u,128,8,u,u,u,u>
2557 ; AVX1-ONLY-NEXT: vpshufb %xmm4, %xmm1, %xmm0
2558 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdx), %xmm2
2559 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2560 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm9 = <u,u,u,7,128,u,u,u,u,u,8,128,u,u,u,u>
2561 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm2, %xmm3
2562 ; AVX1-ONLY-NEXT: vpor %xmm0, %xmm3, %xmm0
2563 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
2564 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2565 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[u,2,3,u,u,u,u,u,4,5,u,u,u,u,u,6]
2566 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm15
2567 ; AVX1-ONLY-NEXT: vmovdqa 16(%rsi), %xmm10
2568 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm1 = <u,128,7,u,u,u,u,u,128,8,u,u,u,u,u,128>
2569 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm10, %xmm0
2570 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm6
2571 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm6[u,7],zero,xmm6[u,u,u,u,u,8],zero,xmm6[u,u,u,u,u,9]
2572 ; AVX1-ONLY-NEXT: vpor %xmm0, %xmm2, %xmm0
2573 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm10[8],xmm6[8],xmm10[9],xmm6[9],xmm10[10],xmm6[10],xmm10[11],xmm6[11],xmm10[12],xmm6[12],xmm10[13],xmm6[13],xmm10[14],xmm6[14],xmm10[15],xmm6[15]
2574 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[2,u,u,u,u,u,5,4,u,u,u,u,u,7,6,u]
2575 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
2576 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm2 = [255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0]
2577 ; AVX1-ONLY-NEXT: vandnps %ymm15, %ymm2, %ymm15
2578 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm0, %ymm0
2579 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm15, %ymm0
2580 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm2 = [0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255]
2581 ; AVX1-ONLY-NEXT: vandnps %ymm7, %ymm2, %ymm7
2582 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm0, %ymm0
2583 ; AVX1-ONLY-NEXT: vorps %ymm7, %ymm0, %ymm0
2584 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2585 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm2
2586 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm2, %xmm0
2587 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, %xmm7
2588 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2589 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm3
2590 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2591 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm3, %xmm2
2592 ; AVX1-ONLY-NEXT: vpor %xmm0, %xmm2, %xmm0
2593 ; AVX1-ONLY-NEXT: vpshufb %xmm8, %xmm0, %xmm0
2594 ; AVX1-ONLY-NEXT: vmovdqa (%rax), %xmm8
2595 ; AVX1-ONLY-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2596 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm8, %xmm2
2597 ; AVX1-ONLY-NEXT: vpor %xmm2, %xmm0, %xmm0
2598 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm5 = <128,128,4,u,u,u,u,128,128,5,u,u,u,u,128,128>
2599 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm8, %xmm2
2600 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, %xmm8
2601 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm3[0],xmm7[0],xmm3[1],xmm7[1],xmm3[2],xmm7[2],xmm3[3],xmm7[3],xmm3[4],xmm7[4],xmm3[5],xmm7[5],xmm3[6],xmm7[6],xmm3[7],xmm7[7]
2602 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2603 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm5 = xmm3[8,9],zero,xmm3[u,u,u,u,10,11],zero,xmm3[u,u,u,u,12,13]
2604 ; AVX1-ONLY-NEXT: vpor %xmm2, %xmm5, %xmm2
2605 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm15
2606 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm3
2607 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2608 ; AVX1-ONLY-NEXT: vpshufb %xmm4, %xmm3, %xmm0
2609 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm13
2610 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm13, %xmm2
2611 ; AVX1-ONLY-NEXT: vpor %xmm0, %xmm2, %xmm0
2612 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm13[0],xmm3[0],xmm13[1],xmm3[1],xmm13[2],xmm3[2],xmm13[3],xmm3[3],xmm13[4],xmm3[4],xmm13[5],xmm3[5],xmm13[6],xmm3[6],xmm13[7],xmm3[7]
2613 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2614 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[u,u,u,u,u,10,11,u,u,u,u,u,12,13,u,u]
2615 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm0
2616 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm5
2617 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm5, %xmm2
2618 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm3
2619 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = xmm3[u,7],zero,xmm3[u,u,u,u,u,8],zero,xmm3[u,u,u,u,u,9]
2620 ; AVX1-ONLY-NEXT: vpor %xmm2, %xmm11, %xmm2
2621 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3],xmm3[4],xmm5[4],xmm3[5],xmm5[5],xmm3[6],xmm5[6],xmm3[7],xmm5[7]
2622 ; AVX1-ONLY-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2623 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm12 = <u,u,u,10,11,u,u,u,u,u,12,13,u,u,u,u>
2624 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm9, %xmm11
2625 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm11, %ymm11
2626 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm4 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255]
2627 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm4, %ymm0
2628 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm11, %ymm11
2629 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm11, %ymm0
2630 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm11 = [0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255]
2631 ; AVX1-ONLY-NEXT: vandnps %ymm15, %ymm11, %ymm15
2632 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm11, %ymm0
2633 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm15, %ymm0
2634 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2635 ; AVX1-ONLY-NEXT: vmovdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2636 ; AVX1-ONLY-NEXT: vpshufb %xmm8, %xmm14, %xmm0
2637 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
2638 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
2639 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3],xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
2640 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, (%rsp) # 16-byte Spill
2641 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = xmm1[8,9],zero,xmm1[u,u,u,u,10,11],zero,xmm1[u,u,u,u,12,13]
2642 ; AVX1-ONLY-NEXT: vpor %xmm0, %xmm11, %xmm0
2643 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = xmm14[u,u],zero,zero,xmm14[2,u,u,u,u],zero,zero,xmm14[3,u,u,u,u]
2644 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm15 = xmm1[u,u,4,5],zero,xmm1[u,u,u,u,6,7],zero,xmm1[u,u,u,u]
2645 ; AVX1-ONLY-NEXT: vpor %xmm11, %xmm15, %xmm11
2646 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm11, %ymm15
2647 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm6[0],xmm10[0],xmm6[1],xmm10[1],xmm6[2],xmm10[2],xmm6[3],xmm10[3],xmm6[4],xmm10[4],xmm6[5],xmm10[5],xmm6[6],xmm10[6],xmm6[7],xmm10[7]
2648 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm11, %xmm0
2649 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm12 = xmm11[u,u,u,u,u,6,7,u,u,u,u,u,8,9,u,u]
2650 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm12, %ymm12
2651 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
2652 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2653 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
2654 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = xmm9[u,u,u,u,u,10,11,u,u,u,u,u,12,13,u,u]
2655 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm14 = xmm9[4,5,u,u,u,u,u,6,7,u,u,u,u,u,8,9]
2656 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm14, %ymm0
2657 ; AVX1-ONLY-NEXT: vandnps %ymm12, %ymm4, %ymm12
2658 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm0, %ymm0
2659 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm12, %ymm0
2660 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm4 = [255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0]
2661 ; AVX1-ONLY-NEXT: vandnps %ymm15, %ymm4, %ymm12
2662 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm0, %ymm0
2663 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm12, %ymm0
2664 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2665 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm6[8],xmm10[8],xmm6[9],xmm10[9],xmm6[10],xmm10[10],xmm6[11],xmm10[11],xmm6[12],xmm10[12],xmm6[13],xmm10[13],xmm6[14],xmm10[14],xmm6[15],xmm10[15]
2666 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm0[u,u,12,13,u,u,u,u,u,14,15,u,u,u,u,u]
2667 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm10 = <u,u,u,u,8,9,u,u,u,u,u,10,11,u,u,u>
2668 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm0, %xmm0
2669 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm0
2670 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm1[8],xmm2[8],xmm1[9],xmm2[9],xmm1[10],xmm2[10],xmm1[11],xmm2[11],xmm1[12],xmm2[12],xmm1[13],xmm2[13],xmm1[14],xmm2[14],xmm1[15],xmm2[15]
2671 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2672 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm6 = xmm1[u,u,u,u,12,13,u,u,u,u,u,14,15,u,u,u]
2673 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm15 = <6,u,u,u,u,u,9,8,u,u,u,u,u,11,10,u>
2674 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm4, %xmm4
2675 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm4, %ymm6
2676 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm4 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255]
2677 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm4, %ymm0
2678 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm6, %ymm6
2679 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm6, %ymm0
2680 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm8[8],xmm7[8],xmm8[9],xmm7[9],xmm8[10],xmm7[10],xmm8[11],xmm7[11],xmm8[12],xmm7[12],xmm8[13],xmm7[13],xmm8[14],xmm7[14],xmm8[15],xmm7[15]
2681 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
2682 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm12 = xmm12[u,6,7,u,u,u,u,u,8,9,u,u,u,u,u,10]
2683 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[10,u,u,u,u,u,13,12,u,u,u,u,u,15,14,u]
2684 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm12, %ymm6
2685 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm12 = [255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255]
2686 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm12, %ymm0
2687 ; AVX1-ONLY-NEXT: vandnps %ymm6, %ymm12, %ymm6
2688 ; AVX1-ONLY-NEXT: vorps %ymm6, %ymm0, %ymm0
2689 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm6
2690 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[0],zero,xmm6[2,3,4,5,6,7],zero,xmm6[9,10,11,12,13,14],zero
2691 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
2692 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm12 = zero,xmm14[13],zero,zero,zero,zero,zero,zero,xmm14[14],zero,zero,zero,zero,zero,zero,xmm14[15]
2693 ; AVX1-ONLY-NEXT: vpor %xmm6, %xmm12, %xmm1
2694 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2695 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,2],zero,xmm0[4,5,6,7,8,9],zero,xmm0[11,12,13,14,15]
2696 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm6 = zero,zero,zero,xmm14[11],zero,zero,zero,zero,zero,zero,xmm14[12],zero,zero,zero,zero,zero
2697 ; AVX1-ONLY-NEXT: vpor %xmm6, %xmm0, %xmm0
2698 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm12 = xmm3[8],xmm5[8],xmm3[9],xmm5[9],xmm3[10],xmm5[10],xmm3[11],xmm5[11],xmm3[12],xmm5[12],xmm3[13],xmm5[13],xmm3[14],xmm5[14],xmm3[15],xmm5[15]
2699 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm5[8],xmm3[8],xmm5[9],xmm3[9],xmm5[10],xmm3[10],xmm5[11],xmm3[11],xmm5[12],xmm3[12],xmm5[13],xmm3[13],xmm5[14],xmm3[14],xmm5[15],xmm3[15]
2700 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[2,u,u,u,u,u,5,4,u,u,u,u,u,7,6,u]
2701 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm12, %xmm2
2702 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
2703 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
2704 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm3[8],xmm13[8],xmm3[9],xmm13[9],xmm3[10],xmm13[10],xmm3[11],xmm13[11],xmm3[12],xmm13[12],xmm3[13],xmm13[13],xmm3[14],xmm13[14],xmm3[15],xmm13[15]
2705 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm2, %xmm2
2706 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm13[8],xmm3[8],xmm13[9],xmm3[9],xmm13[10],xmm3[10],xmm13[11],xmm3[11],xmm13[12],xmm3[12],xmm13[13],xmm3[13],xmm13[14],xmm3[14],xmm13[15],xmm3[15]
2707 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm5 = xmm3[u,2,3,u,u,u,u,u,4,5,u,u,u,u,u,6]
2708 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm5, %ymm2
2709 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm5 = [0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255]
2710 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm5, %ymm1
2711 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm2, %ymm2
2712 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm2, %ymm1
2713 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
2714 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm8[u,u,u],zero,zero,xmm8[9,u,u,u,u],zero,zero,xmm8[10,u,u,u]
2715 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
2716 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
2717 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm15[8],xmm10[8],xmm15[9],xmm10[9],xmm15[10],xmm10[10],xmm15[11],xmm10[11],xmm15[12],xmm10[12],xmm15[13],xmm10[13],xmm15[14],xmm10[14],xmm15[15],xmm10[15]
2718 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm6 = xmm5[u,u,u,2,3],zero,xmm5[u,u,u,u,4,5],zero,xmm5[u,u,u]
2719 ; AVX1-ONLY-NEXT: vpor %xmm2, %xmm6, %xmm2
2720 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[u,6,7],zero,xmm5[u,u,u,u,8,9],zero,xmm5[u,u,u,u,10]
2721 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm6 = xmm8[u],zero,zero,xmm8[11,u,u,u,u],zero,zero,xmm8[12,u,u,u,u],zero
2722 ; AVX1-ONLY-NEXT: vpor %xmm6, %xmm5, %xmm5
2723 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm2, %ymm2
2724 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm5 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0]
2725 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm1, %ymm1
2726 ; AVX1-ONLY-NEXT: vandnps %ymm2, %ymm5, %ymm2
2727 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm1, %ymm6
2728 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm3[u,u,u,u,12,13,u,u,u,u,u,14,15,u,u,u]
2729 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm2 = <u,u,0,1,u,u,u,u,u,2,3,u,u,u,u,u>
2730 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm9, %xmm3
2731 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
2732 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm5 = xmm12[u,u,12,13,u,u,u,u,u,14,15,u,u,u,u,u]
2733 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm3 = <0,1,u,u,u,u,u,2,3,u,u,u,u,u,4,5>
2734 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm11, %xmm7
2735 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm5, %ymm5
2736 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm4, %ymm1
2737 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm5, %ymm4
2738 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm4, %ymm4
2739 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm1 = <u,u,u,u,0,1,u,u,u,u,u,2,3,u,u,u>
2740 ; AVX1-ONLY-NEXT: vmovdqa (%rsp), %xmm5 # 16-byte Reload
2741 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm5, %xmm5
2742 ; AVX1-ONLY-NEXT: vpalignr {{.*#+}} xmm5 = xmm5[4,5,6,7,8,9,10,11,12,13,14,15],xmm14[0,1,2,3]
2743 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm10[8],xmm15[8],xmm10[9],xmm15[9],xmm10[10],xmm15[10],xmm10[11],xmm15[11],xmm10[12],xmm15[12],xmm10[13],xmm15[13],xmm10[14],xmm15[14],xmm10[15],xmm15[15]
2744 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm7 = xmm7[10],zero,xmm7[u,u,u,u,13,12],zero,xmm7[u,u,u,u,15,14],zero
2745 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm9 = zero,xmm8[13,u,u,u,u],zero,zero,xmm8[14,u,u,u,u],zero,zero,xmm8[15]
2746 ; AVX1-ONLY-NEXT: vpor %xmm7, %xmm9, %xmm7
2747 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm9 = <u,u,u,u,0,1,12,u,u,u,u,7,8,13,u,u>
2748 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm5, %xmm5
2749 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm7, %ymm5
2750 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm7 = [0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255]
2751 ; AVX1-ONLY-NEXT: vandps %ymm7, %ymm4, %ymm4
2752 ; AVX1-ONLY-NEXT: vandnps %ymm5, %ymm7, %ymm5
2753 ; AVX1-ONLY-NEXT: vorps %ymm5, %ymm4, %ymm5
2754 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
2755 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm7[4,5,u,u,u,u,u,6,7,u,u,u,u,u,8,9]
2756 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm7, %xmm2
2757 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
2758 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
2759 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm7[u,u,u,u,u,6,7,u,u,u,u,u,8,9,u,u]
2760 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm7, %xmm3
2761 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm3, %ymm3
2762 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm4 = [255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0]
2763 ; AVX1-ONLY-NEXT: vandnps %ymm2, %ymm4, %ymm2
2764 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm3, %ymm3
2765 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm3, %ymm2
2766 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm8[u,u],zero,zero,xmm8[2,u,u,u,u],zero,zero,xmm8[3,u,u,u,u]
2767 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
2768 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm7[u,u,4,5],zero,xmm7[u,u,u,u,6,7],zero,xmm7[u,u,u,u]
2769 ; AVX1-ONLY-NEXT: vpor %xmm3, %xmm4, %xmm3
2770 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm7, %xmm1
2771 ; AVX1-ONLY-NEXT: vpalignr {{.*#+}} xmm1 = xmm1[4,5,6,7,8,9,10,11,12,13,14,15],xmm8[0,1,2,3]
2772 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm1, %xmm1
2773 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
2774 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm3 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
2775 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm2, %ymm2
2776 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm3, %ymm1
2777 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm2, %ymm1
2778 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
2779 ; AVX1-ONLY-NEXT: vmovaps %ymm1, (%rax)
2780 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
2781 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 128(%rax)
2782 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
2783 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 32(%rax)
2784 ; AVX1-ONLY-NEXT: vmovaps %ymm5, 96(%rax)
2785 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
2786 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 160(%rax)
2787 ; AVX1-ONLY-NEXT: vmovaps %ymm6, 64(%rax)
2788 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, 192(%rax)
2789 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2790 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 208(%rax)
2791 ; AVX1-ONLY-NEXT: addq $216, %rsp
2792 ; AVX1-ONLY-NEXT: vzeroupper
2793 ; AVX1-ONLY-NEXT: retq
2795 ; AVX2-SLOW-LABEL: store_i8_stride7_vf32:
2796 ; AVX2-SLOW: # %bb.0:
2797 ; AVX2-SLOW-NEXT: pushq %rax
2798 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2799 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %ymm4
2800 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %ymm6
2801 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %ymm3
2802 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %ymm5
2803 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %ymm7
2804 ; AVX2-SLOW-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2805 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %ymm2
2806 ; AVX2-SLOW-NEXT: vmovdqa (%rax), %ymm1
2807 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2808 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27,u,u,u,u,30,u,28,u,u,u,u,31,u,29,u]
2809 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm8 = ymm3[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
2810 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm8[0,2,3,3,4,6,7,7]
2811 ; AVX2-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [255,0,0,0,0,255,0,255,0,0,0,0,255,0,255,0,255,0,0,0,0,255,0,255,0,0,0,0,255,0,255,0]
2812 ; AVX2-SLOW-NEXT: # ymm9 = mem[0,1,0,1]
2813 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm0, %ymm8, %ymm0
2814 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
2815 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,30,u,28,u,u,u,u,31,u,29,u,u,u]
2816 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm9 = ymm4[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
2817 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm9[2,2,3,3,6,6,7,7]
2818 ; AVX2-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm10 = [0,0,255,0,255,0,0,0,0,255,0,255,0,0,0,0,0,0,255,0,255,0,0,0,0,255,0,255,0,0,0,0]
2819 ; AVX2-SLOW-NEXT: # ymm10 = mem[0,1,0,1]
2820 ; AVX2-SLOW-NEXT: vpblendvb %ymm10, %ymm9, %ymm8, %ymm8
2821 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
2822 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = <255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u>
2823 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm0, %ymm8, %ymm0
2824 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm7[27],zero,ymm7[27,28,29,30],zero,ymm7[28],zero,ymm7[26,27,30,31],zero,ymm7[29]
2825 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
2826 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm9 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29],zero,ymm2[27],zero,zero,zero,zero,ymm2[30],zero,ymm2[28],zero,zero,zero,zero,ymm2[31],zero
2827 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
2828 ; AVX2-SLOW-NEXT: vpor %ymm8, %ymm9, %ymm8
2829 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = <255,0,0,u,255,255,255,255,0,0,u,255,255,255,255,0,0,u,255,255,255,255,0,0,u,255,255,255,255,0,0,u>
2830 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm0, %ymm8, %ymm0
2831 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
2832 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
2833 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0]
2834 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm0, %ymm8, %ymm0
2835 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2836 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm10
2837 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm11
2838 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm11[8],xmm10[8],xmm11[9],xmm10[9],xmm11[10],xmm10[10],xmm11[11],xmm10[11],xmm11[12],xmm10[12],xmm11[13],xmm10[13],xmm11[14],xmm10[14],xmm11[15],xmm10[15]
2839 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7]
2840 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm0[0,1,0,1]
2841 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm14
2842 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm0
2843 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm0[8],xmm14[8],xmm0[9],xmm14[9],xmm0[10],xmm14[10],xmm0[11],xmm14[11],xmm0[12],xmm14[12],xmm0[13],xmm14[13],xmm0[14],xmm14[14],xmm0[15],xmm14[15]
2844 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u]
2845 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,1,0,1]
2846 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm12 = <0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u>
2847 ; AVX2-SLOW-NEXT: vpblendvb %ymm12, %ymm8, %ymm9, %ymm8
2848 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %xmm12
2849 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm13
2850 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm13[8],xmm12[8],xmm13[9],xmm12[9],xmm13[10],xmm12[10],xmm13[11],xmm12[11],xmm13[12],xmm12[12],xmm13[13],xmm12[13],xmm13[14],xmm12[14],xmm13[15],xmm12[15]
2851 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
2852 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,1,0,1]
2853 ; AVX2-SLOW-NEXT: vmovdqa (%rax), %xmm15
2854 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm7 = xmm15[0,1,2,3,4,5,5,6]
2855 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
2856 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
2857 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm1 = <u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255>
2858 ; AVX2-SLOW-NEXT: vpblendvb %ymm1, %ymm9, %ymm7, %ymm1
2859 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm7 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0]
2860 ; AVX2-SLOW-NEXT: vpblendvb %ymm7, %ymm8, %ymm1, %ymm1
2861 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2862 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm1 = xmm0[u],zero,xmm0[7],zero,xmm0[5,u,u,u],zero,xmm0[8],zero,xmm0[6,u,u,u],zero
2863 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm7 = xmm14[u,7],zero,xmm14[5],zero,xmm14[u,u,u,8],zero,xmm14[6],zero,xmm14[u,u,u,9]
2864 ; AVX2-SLOW-NEXT: vpor %xmm1, %xmm7, %xmm1
2865 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm7 = xmm11[u,u,u],zero,xmm11[7],zero,xmm11[5,u,u,u],zero,xmm11[8],zero,xmm11[6,u,u]
2866 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm9 = xmm10[u,u,u,7],zero,xmm10[5],zero,xmm10[u,u,u,8],zero,xmm10[6],zero,xmm10[u,u]
2867 ; AVX2-SLOW-NEXT: vpor %xmm7, %xmm9, %xmm7
2868 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
2869 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
2870 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = <u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255>
2871 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm1, %ymm7, %ymm1
2872 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm7 = zero,xmm12[4,u,u,u],zero,xmm12[7],zero,xmm12[5,u,u,u],zero,xmm12[8],zero,xmm12[6]
2873 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm9 = xmm13[4],zero,xmm13[u,u,u,7],zero,xmm13[5],zero,xmm13[u,u,u,8],zero,xmm13[6],zero
2874 ; AVX2-SLOW-NEXT: vpor %xmm7, %xmm9, %xmm7
2875 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm9 = xmm15[4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
2876 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,1,0]
2877 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
2878 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = <255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u>
2879 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm7, %ymm9, %ymm7
2880 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = [0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255]
2881 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm1, %ymm7, %ymm9
2882 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm14[0],xmm0[0],xmm14[1],xmm0[1],xmm14[2],xmm0[2],xmm14[3],xmm0[3],xmm14[4],xmm0[4],xmm14[5],xmm0[5],xmm14[6],xmm0[6],xmm14[7],xmm0[7]
2883 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5]
2884 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
2885 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3],xmm10[4],xmm11[4],xmm10[5],xmm11[5],xmm10[6],xmm11[6],xmm10[7],xmm11[7]
2886 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9]
2887 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
2888 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm7 = <255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0>
2889 ; AVX2-SLOW-NEXT: vpblendvb %ymm7, %ymm0, %ymm1, %ymm0
2890 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3],xmm13[4],xmm12[4],xmm13[5],xmm12[5],xmm13[6],xmm12[6],xmm13[7],xmm12[7]
2891 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
2892 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
2893 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm7 = xmm15[1,1,0,0,4,5,6,7]
2894 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[0,1,2,0]
2895 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,1,0]
2896 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = <u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u>
2897 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm1, %ymm7, %ymm1
2898 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm7 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
2899 ; AVX2-SLOW-NEXT: vpblendvb %ymm7, %ymm0, %ymm1, %ymm10
2900 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,21,u,19,u,u,u,u,22,u,20,u,u]
2901 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm4[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
2902 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,0,1,1,4,4,5,5]
2903 ; AVX2-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [0,0,0,255,0,255,0,0,0,0,255,0,255,0,0,0,0,0,0,255,0,255,0,0,0,0,255,0,255,0,0,0]
2904 ; AVX2-SLOW-NEXT: # ymm7 = mem[0,1,0,1]
2905 ; AVX2-SLOW-NEXT: vpblendvb %ymm7, %ymm1, %ymm0, %ymm0
2906 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
2907 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm5[18],zero,zero,zero,zero,ymm5[21],zero,ymm5[19],zero,zero,zero,zero,ymm5[22],zero,ymm5[20]
2908 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
2909 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,zero,zero,zero,ymm3[21],zero,ymm3[19],zero,zero,zero,zero,ymm3[22],zero,ymm3[20],zero
2910 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,3,2,3]
2911 ; AVX2-SLOW-NEXT: vpor %ymm1, %ymm7, %ymm1
2912 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm7 = <255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u>
2913 ; AVX2-SLOW-NEXT: vpblendvb %ymm7, %ymm1, %ymm0, %ymm0
2914 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm2[20],zero,ymm2[18],zero,zero,zero,zero,ymm2[21],zero,ymm2[19],zero,zero,zero,zero,ymm2[22]
2915 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
2916 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
2917 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20],zero,ymm12[18],zero,zero,zero,zero,ymm12[21],zero,ymm12[19],zero,zero,zero,zero,ymm12[22],zero
2918 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,3,2,3]
2919 ; AVX2-SLOW-NEXT: vpor %ymm1, %ymm7, %ymm1
2920 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
2921 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm7 = ymm11[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
2922 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm7[0,1,1,3,4,5,5,7]
2923 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,2,3,2]
2924 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = <u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255>
2925 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm1, %ymm7, %ymm1
2926 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm7 = [255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0]
2927 ; AVX2-SLOW-NEXT: vpblendvb %ymm7, %ymm0, %ymm1, %ymm0
2928 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm4[23],zero,ymm4[27,20,21,26],zero,ymm4[24],zero,ymm4[26,27,26,27],zero,ymm4[25]
2929 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
2930 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25],zero,ymm6[23],zero,zero,zero,zero,ymm6[26],zero,ymm6[24],zero,zero,zero,zero,ymm6[27],zero
2931 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,3,2,3]
2932 ; AVX2-SLOW-NEXT: vpor %ymm1, %ymm7, %ymm1
2933 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm5[25],zero,ymm5[23],zero,zero,zero,zero,ymm5[26],zero,ymm5[24],zero,zero,zero,zero
2934 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,3,2,3]
2935 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm3[25],zero,ymm3[23],zero,zero,zero,zero,ymm3[26],zero,ymm3[24],zero,zero,zero,zero,ymm3[27]
2936 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
2937 ; AVX2-SLOW-NEXT: vpor %ymm7, %ymm8, %ymm7
2938 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = <u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0>
2939 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm1, %ymm7, %ymm1
2940 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm2[25],zero,ymm2[23],zero,zero,zero,zero,ymm2[26],zero,ymm2[24],zero,zero
2941 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,3,2,3]
2942 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm12[25],zero,ymm12[23],zero,zero,zero,zero,ymm12[26],zero,ymm12[24],zero,zero,zero
2943 ; AVX2-SLOW-NEXT: vmovdqa %ymm12, %ymm13
2944 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
2945 ; AVX2-SLOW-NEXT: vpor %ymm7, %ymm8, %ymm7
2946 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
2947 ; AVX2-SLOW-NEXT: vmovdqa %ymm11, %ymm12
2948 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
2949 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm11 = <0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u>
2950 ; AVX2-SLOW-NEXT: vpblendvb %ymm11, %ymm7, %ymm8, %ymm7
2951 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = [0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255]
2952 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm1, %ymm7, %ymm1
2953 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,zero,ymm6[14],zero,zero,zero,zero,zero,zero,ymm6[15],zero,zero,zero,zero,zero,zero,ymm6[16],zero,zero,zero,zero,zero,zero,ymm6[17],zero,zero,zero,zero,zero,zero,ymm6[18]
2954 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,ymm4[14],zero,zero,zero,zero,zero,zero,ymm4[15],zero,zero,zero,zero,zero,zero,ymm4[16],zero,zero,zero,zero,zero,zero,ymm4[17],zero,zero,zero,zero,zero,zero,ymm4[18],zero
2955 ; AVX2-SLOW-NEXT: vpor %ymm6, %ymm4, %ymm4
2956 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm5 = zero,zero,zero,zero,zero,ymm5[14],zero,zero,zero,zero,zero,zero,ymm5[15],zero,zero,zero,zero,zero,zero,ymm5[16],zero,zero,zero,zero,zero,zero,ymm5[17],zero,zero,zero,zero,zero
2957 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,zero,zero,ymm3[14],zero,zero,zero,zero,zero,zero,ymm3[15],zero,zero,zero,zero,zero,zero,ymm3[16],zero,zero,zero,zero,zero,zero,ymm3[17],zero,zero,zero,zero,zero,zero
2958 ; AVX2-SLOW-NEXT: vpor %ymm5, %ymm3, %ymm3
2959 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm5 = <u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255>
2960 ; AVX2-SLOW-NEXT: vpblendvb %ymm5, %ymm4, %ymm3, %ymm3
2961 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = zero,ymm13[1,2,3,0,1,14],zero,ymm13[0,1,0,1,14,15],zero,ymm13[15,16,17,18,19,16],zero,ymm13[30,31,16,17,16,17],zero,ymm13[31,30,31]
2962 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[13],zero,zero,zero,zero,zero,zero,ymm2[14],zero,zero,zero,zero,zero,zero,ymm2[15],zero,zero,zero,zero,zero,zero,ymm2[16],zero,zero,zero,zero,zero,zero,ymm2[17],zero,zero,zero
2963 ; AVX2-SLOW-NEXT: vpor %ymm4, %ymm2, %ymm2
2964 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm12[12,13,2,3,12,13,0,1,14,15,2,3,0,1,14,15,28,29,18,19,28,29,16,17,30,31,18,19,16,17,30,31]
2965 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm5 = <255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u>
2966 ; AVX2-SLOW-NEXT: vpblendvb %ymm5, %ymm2, %ymm4, %ymm2
2967 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = [0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255]
2968 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm3, %ymm2, %ymm2
2969 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2970 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, 96(%rax)
2971 ; AVX2-SLOW-NEXT: vmovdqa %ymm1, 160(%rax)
2972 ; AVX2-SLOW-NEXT: vmovdqa %ymm10, (%rax)
2973 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, 128(%rax)
2974 ; AVX2-SLOW-NEXT: vmovdqa %ymm9, 32(%rax)
2975 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2976 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 64(%rax)
2977 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2978 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 192(%rax)
2979 ; AVX2-SLOW-NEXT: popq %rax
2980 ; AVX2-SLOW-NEXT: vzeroupper
2981 ; AVX2-SLOW-NEXT: retq
2983 ; AVX2-FAST-LABEL: store_i8_stride7_vf32:
2984 ; AVX2-FAST: # %bb.0:
2985 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
2986 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %ymm1
2987 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %ymm3
2988 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %ymm0
2989 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %ymm2
2990 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm9
2991 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm10
2992 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm10[8],xmm9[8],xmm10[9],xmm9[9],xmm10[10],xmm9[10],xmm10[11],xmm9[11],xmm10[12],xmm9[12],xmm10[13],xmm9[13],xmm10[14],xmm9[14],xmm10[15],xmm9[15]
2993 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7]
2994 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,1]
2995 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm13
2996 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm15
2997 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm15[8],xmm13[8],xmm15[9],xmm13[9],xmm15[10],xmm13[10],xmm15[11],xmm13[11],xmm15[12],xmm13[12],xmm15[13],xmm13[13],xmm15[14],xmm13[14],xmm15[15],xmm13[15]
2998 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u]
2999 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,0,1]
3000 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = <0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u>
3001 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
3002 ; AVX2-FAST-NEXT: vmovdqa (%rax), %xmm11
3003 ; AVX2-FAST-NEXT: vpshufhw {{.*#+}} xmm5 = xmm11[0,1,2,3,4,5,5,6]
3004 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [2,2,3,3,2,2,3,3]
3005 ; AVX2-FAST-NEXT: # ymm6 = mem[0,1,0,1]
3006 ; AVX2-FAST-NEXT: vpermd %ymm5, %ymm6, %ymm5
3007 ; AVX2-FAST-NEXT: vmovdqa (%r9), %xmm12
3008 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm14
3009 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm14[8],xmm12[8],xmm14[9],xmm12[9],xmm14[10],xmm12[10],xmm14[11],xmm12[11],xmm14[12],xmm12[12],xmm14[13],xmm12[13],xmm14[14],xmm12[14],xmm14[15],xmm12[15]
3010 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
3011 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
3012 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = <u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255>
3013 ; AVX2-FAST-NEXT: vpblendvb %ymm7, %ymm6, %ymm5, %ymm5
3014 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0]
3015 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
3016 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm5 = xmm15[u],zero,xmm15[7],zero,xmm15[5,u,u,u],zero,xmm15[8],zero,xmm15[6,u,u,u],zero
3017 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm6 = xmm13[u,7],zero,xmm13[5],zero,xmm13[u,u,u,8],zero,xmm13[6],zero,xmm13[u,u,u,9]
3018 ; AVX2-FAST-NEXT: vpor %xmm5, %xmm6, %xmm5
3019 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,0,1]
3020 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm6 = xmm10[u,u,u],zero,xmm10[7],zero,xmm10[5,u,u,u],zero,xmm10[8],zero,xmm10[6,u,u]
3021 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm7 = xmm9[u,u,u,7],zero,xmm9[5],zero,xmm9[u,u,u,8],zero,xmm9[6],zero,xmm9[u,u]
3022 ; AVX2-FAST-NEXT: vpor %xmm6, %xmm7, %xmm6
3023 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
3024 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = <u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255>
3025 ; AVX2-FAST-NEXT: vpblendvb %ymm7, %ymm5, %ymm6, %ymm5
3026 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm6 = xmm11[4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
3027 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,1,0]
3028 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm7 = zero,xmm12[4,u,u,u],zero,xmm12[7],zero,xmm12[5,u,u,u],zero,xmm12[8],zero,xmm12[6]
3029 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm8 = xmm14[4],zero,xmm14[u,u,u,7],zero,xmm14[5],zero,xmm14[u,u,u,8],zero,xmm14[6],zero
3030 ; AVX2-FAST-NEXT: vpor %xmm7, %xmm8, %xmm7
3031 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
3032 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = <255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u>
3033 ; AVX2-FAST-NEXT: vpblendvb %ymm8, %ymm7, %ymm6, %ymm7
3034 ; AVX2-FAST-NEXT: vmovdqa (%r8), %ymm6
3035 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255]
3036 ; AVX2-FAST-NEXT: vpblendvb %ymm8, %ymm5, %ymm7, %ymm5
3037 ; AVX2-FAST-NEXT: vmovdqa (%r9), %ymm8
3038 ; AVX2-FAST-NEXT: vmovdqa (%rax), %ymm7
3039 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm13 = xmm13[0],xmm15[0],xmm13[1],xmm15[1],xmm13[2],xmm15[2],xmm13[3],xmm15[3],xmm13[4],xmm15[4],xmm13[5],xmm15[5],xmm13[6],xmm15[6],xmm13[7],xmm15[7]
3040 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm13 = xmm13[0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5]
3041 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm13 = ymm13[0,1,0,1]
3042 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3],xmm9[4],xmm10[4],xmm9[5],xmm10[5],xmm9[6],xmm10[6],xmm9[7],xmm10[7]
3043 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9]
3044 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,1,0,1]
3045 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = <255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0>
3046 ; AVX2-FAST-NEXT: vpblendvb %ymm10, %ymm13, %ymm9, %ymm9
3047 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm10 = xmm11[1,1,0,0,4,5,6,7]
3048 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = [0,1,0,1,2,0,0,1]
3049 ; AVX2-FAST-NEXT: vpermd %ymm10, %ymm11, %ymm10
3050 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm14[0],xmm12[0],xmm14[1],xmm12[1],xmm14[2],xmm12[2],xmm14[3],xmm12[3],xmm14[4],xmm12[4],xmm14[5],xmm12[5],xmm14[6],xmm12[6],xmm14[7],xmm12[7]
3051 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm11 = xmm11[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
3052 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,1,0,1]
3053 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u>
3054 ; AVX2-FAST-NEXT: vpblendvb %ymm12, %ymm11, %ymm10, %ymm10
3055 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
3056 ; AVX2-FAST-NEXT: vpblendvb %ymm11, %ymm9, %ymm10, %ymm9
3057 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm10 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm2[18],zero,zero,zero,zero,ymm2[21],zero,ymm2[19],zero,zero,zero,zero,ymm2[22],zero,ymm2[20]
3058 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
3059 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,zero,zero,zero,ymm0[21],zero,ymm0[19],zero,zero,zero,zero,ymm0[22],zero,ymm0[20],zero
3060 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
3061 ; AVX2-FAST-NEXT: vpor %ymm10, %ymm11, %ymm10
3062 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm3[21],zero,ymm3[19],zero,zero,zero,zero,ymm3[22],zero,ymm3[20],zero,zero
3063 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
3064 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm1[21],zero,ymm1[19],zero,zero,zero,zero,ymm1[22],zero,ymm1[20],zero,zero,zero
3065 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
3066 ; AVX2-FAST-NEXT: vpor %ymm11, %ymm12, %ymm11
3067 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u>
3068 ; AVX2-FAST-NEXT: vpblendvb %ymm12, %ymm10, %ymm11, %ymm10
3069 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} ymm11 = ymm7[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
3070 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = [4,5,4,5,5,7,4,5]
3071 ; AVX2-FAST-NEXT: vpermd %ymm11, %ymm12, %ymm11
3072 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm8[20],zero,ymm8[18],zero,zero,zero,zero,ymm8[21],zero,ymm8[19],zero,zero,zero,zero,ymm8[22]
3073 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
3074 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm13 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20],zero,ymm6[18],zero,zero,zero,zero,ymm6[21],zero,ymm6[19],zero,zero,zero,zero,ymm6[22],zero
3075 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
3076 ; AVX2-FAST-NEXT: vpor %ymm12, %ymm13, %ymm12
3077 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = <u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255>
3078 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm12, %ymm11, %ymm11
3079 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = [255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0]
3080 ; AVX2-FAST-NEXT: vpblendvb %ymm12, %ymm10, %ymm11, %ymm10
3081 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm0[17,18,19,30],zero,ymm0[28],zero,ymm0[28,29,30,31],zero,ymm0[29],zero,ymm0[31]
3082 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
3083 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27],zero,zero,zero,zero,ymm2[30],zero,ymm2[28],zero,zero,zero,zero,ymm2[31],zero,ymm2[29],zero
3084 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
3085 ; AVX2-FAST-NEXT: vpor %ymm11, %ymm12, %ymm11
3086 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm3[30],zero,ymm3[28],zero,zero,zero,zero,ymm3[31],zero,ymm3[29],zero,zero,zero
3087 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
3088 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm13 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero,zero,zero
3089 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
3090 ; AVX2-FAST-NEXT: vpor %ymm12, %ymm13, %ymm12
3091 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = <255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u>
3092 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm11, %ymm12, %ymm11
3093 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm6[27],zero,ymm6[27,28,29,30],zero,ymm6[28],zero,ymm6[26,27,30,31],zero,ymm6[29]
3094 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
3095 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm13 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29],zero,ymm8[27],zero,zero,zero,zero,ymm8[30],zero,ymm8[28],zero,zero,zero,zero,ymm8[31],zero
3096 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
3097 ; AVX2-FAST-NEXT: vpor %ymm12, %ymm13, %ymm12
3098 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = <255,0,0,u,255,255,255,255,0,0,u,255,255,255,255,0,0,u,255,255,255,255,0,0,u,255,255,255,255,0,0,u>
3099 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm11, %ymm12, %ymm11
3100 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
3101 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
3102 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0]
3103 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm11, %ymm12, %ymm11
3104 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm1[23],zero,ymm1[27,20,21,26],zero,ymm1[24],zero,ymm1[26,27,26,27],zero,ymm1[25]
3105 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
3106 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm13 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25],zero,ymm3[23],zero,zero,zero,zero,ymm3[26],zero,ymm3[24],zero,zero,zero,zero,ymm3[27],zero
3107 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
3108 ; AVX2-FAST-NEXT: vpor %ymm12, %ymm13, %ymm12
3109 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm13 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm2[25],zero,ymm2[23],zero,zero,zero,zero,ymm2[26],zero,ymm2[24],zero,zero,zero,zero
3110 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
3111 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm0[25],zero,ymm0[23],zero,zero,zero,zero,ymm0[26],zero,ymm0[24],zero,zero,zero,zero,ymm0[27]
3112 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,3,2,3]
3113 ; AVX2-FAST-NEXT: vpor %ymm13, %ymm14, %ymm13
3114 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = <u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0>
3115 ; AVX2-FAST-NEXT: vpblendvb %ymm14, %ymm12, %ymm13, %ymm12
3116 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm13 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm8[25],zero,ymm8[23],zero,zero,zero,zero,ymm8[26],zero,ymm8[24],zero,zero
3117 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
3118 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm6[25],zero,ymm6[23],zero,zero,zero,zero,ymm6[26],zero,ymm6[24],zero,zero,zero
3119 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,3,2,3]
3120 ; AVX2-FAST-NEXT: vpor %ymm13, %ymm14, %ymm13
3121 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
3122 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,3,2,3]
3123 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = <0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u>
3124 ; AVX2-FAST-NEXT: vpblendvb %ymm15, %ymm13, %ymm14, %ymm13
3125 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = [0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255]
3126 ; AVX2-FAST-NEXT: vpblendvb %ymm14, %ymm12, %ymm13, %ymm12
3127 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,zero,ymm3[14],zero,zero,zero,zero,zero,zero,ymm3[15],zero,zero,zero,zero,zero,zero,ymm3[16],zero,zero,zero,zero,zero,zero,ymm3[17],zero,zero,zero,zero,zero,zero,ymm3[18]
3128 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,ymm1[14],zero,zero,zero,zero,zero,zero,ymm1[15],zero,zero,zero,zero,zero,zero,ymm1[16],zero,zero,zero,zero,zero,zero,ymm1[17],zero,zero,zero,zero,zero,zero,ymm1[18],zero
3129 ; AVX2-FAST-NEXT: vpor %ymm3, %ymm1, %ymm1
3130 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = zero,zero,zero,zero,zero,ymm2[14],zero,zero,zero,zero,zero,zero,ymm2[15],zero,zero,zero,zero,zero,zero,ymm2[16],zero,zero,zero,zero,zero,zero,ymm2[17],zero,zero,zero,zero,zero
3131 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm0[14],zero,zero,zero,zero,zero,zero,ymm0[15],zero,zero,zero,zero,zero,zero,ymm0[16],zero,zero,zero,zero,zero,zero,ymm0[17],zero,zero,zero,zero,zero,zero
3132 ; AVX2-FAST-NEXT: vpor %ymm2, %ymm0, %ymm0
3133 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255>
3134 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm1, %ymm0, %ymm0
3135 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = zero,ymm6[1,2,3,0,1,14],zero,ymm6[0,1,0,1,14,15],zero,ymm6[15,16,17,18,19,16],zero,ymm6[30,31,16,17,16,17],zero,ymm6[31,30,31]
3136 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm8[13],zero,zero,zero,zero,zero,zero,ymm8[14],zero,zero,zero,zero,zero,zero,ymm8[15],zero,zero,zero,zero,zero,zero,ymm8[16],zero,zero,zero,zero,zero,zero,ymm8[17],zero,zero,zero
3137 ; AVX2-FAST-NEXT: vpor %ymm1, %ymm2, %ymm1
3138 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm7[12,13,2,3,12,13,0,1,14,15,2,3,0,1,14,15,28,29,18,19,28,29,16,17,30,31,18,19,16,17,30,31]
3139 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = <255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u>
3140 ; AVX2-FAST-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
3141 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255]
3142 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
3143 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
3144 ; AVX2-FAST-NEXT: vmovdqa %ymm0, 96(%rax)
3145 ; AVX2-FAST-NEXT: vmovdqa %ymm10, 128(%rax)
3146 ; AVX2-FAST-NEXT: vmovdqa %ymm12, 160(%rax)
3147 ; AVX2-FAST-NEXT: vmovdqa %ymm9, (%rax)
3148 ; AVX2-FAST-NEXT: vmovdqa %ymm11, 192(%rax)
3149 ; AVX2-FAST-NEXT: vmovdqa %ymm5, 32(%rax)
3150 ; AVX2-FAST-NEXT: vmovdqa %ymm4, 64(%rax)
3151 ; AVX2-FAST-NEXT: vzeroupper
3152 ; AVX2-FAST-NEXT: retq
3154 ; AVX2-FAST-PERLANE-LABEL: store_i8_stride7_vf32:
3155 ; AVX2-FAST-PERLANE: # %bb.0:
3156 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
3157 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %ymm1
3158 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %ymm3
3159 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %ymm0
3160 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %ymm2
3161 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm9
3162 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm10
3163 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm10[8],xmm9[8],xmm10[9],xmm9[9],xmm10[10],xmm9[10],xmm10[11],xmm9[11],xmm10[12],xmm9[12],xmm10[13],xmm9[13],xmm10[14],xmm9[14],xmm10[15],xmm9[15]
3164 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7]
3165 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,1]
3166 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm13
3167 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm15
3168 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm15[8],xmm13[8],xmm15[9],xmm13[9],xmm15[10],xmm13[10],xmm15[11],xmm13[11],xmm15[12],xmm13[12],xmm15[13],xmm13[13],xmm15[14],xmm13[14],xmm15[15],xmm13[15]
3169 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u]
3170 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,0,1]
3171 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = <0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u>
3172 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
3173 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rax), %xmm11
3174 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm5 = xmm11[8,9,10,11,8,9,10,11,10,11,12,13,10,11,12,13]
3175 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,0,1]
3176 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %xmm12
3177 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm14
3178 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm14[8],xmm12[8],xmm14[9],xmm12[9],xmm14[10],xmm12[10],xmm14[11],xmm12[11],xmm14[12],xmm12[12],xmm14[13],xmm12[13],xmm14[14],xmm12[14],xmm14[15],xmm12[15]
3179 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
3180 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
3181 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm7 = <u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255>
3182 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm6, %ymm5, %ymm5
3183 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0]
3184 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
3185 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm5 = xmm15[u],zero,xmm15[7],zero,xmm15[5,u,u,u],zero,xmm15[8],zero,xmm15[6,u,u,u],zero
3186 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm6 = xmm13[u,7],zero,xmm13[5],zero,xmm13[u,u,u,8],zero,xmm13[6],zero,xmm13[u,u,u,9]
3187 ; AVX2-FAST-PERLANE-NEXT: vpor %xmm5, %xmm6, %xmm5
3188 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,0,1]
3189 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm6 = xmm10[u,u,u],zero,xmm10[7],zero,xmm10[5,u,u,u],zero,xmm10[8],zero,xmm10[6,u,u]
3190 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm7 = xmm9[u,u,u,7],zero,xmm9[5],zero,xmm9[u,u,u,8],zero,xmm9[6],zero,xmm9[u,u]
3191 ; AVX2-FAST-PERLANE-NEXT: vpor %xmm6, %xmm7, %xmm6
3192 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
3193 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm7 = <u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255>
3194 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm5, %ymm6, %ymm5
3195 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm6 = xmm11[4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
3196 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,1,0]
3197 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm7 = zero,xmm12[4,u,u,u],zero,xmm12[7],zero,xmm12[5,u,u,u],zero,xmm12[8],zero,xmm12[6]
3198 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm8 = xmm14[4],zero,xmm14[u,u,u,7],zero,xmm14[5],zero,xmm14[u,u,u,8],zero,xmm14[6],zero
3199 ; AVX2-FAST-PERLANE-NEXT: vpor %xmm7, %xmm8, %xmm7
3200 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
3201 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm8 = <255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u>
3202 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm8, %ymm7, %ymm6, %ymm7
3203 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %ymm6
3204 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm8 = [0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255]
3205 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm8, %ymm5, %ymm7, %ymm5
3206 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %ymm8
3207 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rax), %ymm7
3208 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm13 = xmm13[0],xmm15[0],xmm13[1],xmm15[1],xmm13[2],xmm15[2],xmm13[3],xmm15[3],xmm13[4],xmm15[4],xmm13[5],xmm15[5],xmm13[6],xmm15[6],xmm13[7],xmm15[7]
3209 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm13 = xmm13[0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5]
3210 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm13 = ymm13[0,1,0,1]
3211 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3],xmm9[4],xmm10[4],xmm9[5],xmm10[5],xmm9[6],xmm10[6],xmm9[7],xmm10[7]
3212 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9]
3213 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,1,0,1]
3214 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm10 = <255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0>
3215 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm10, %ymm13, %ymm9, %ymm9
3216 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm10 = xmm11[2,3,2,3,0,1,0,1,8,9,10,11,2,3,2,3]
3217 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,1,0]
3218 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm14[0],xmm12[0],xmm14[1],xmm12[1],xmm14[2],xmm12[2],xmm14[3],xmm12[3],xmm14[4],xmm12[4],xmm14[5],xmm12[5],xmm14[6],xmm12[6],xmm14[7],xmm12[7]
3219 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm11 = xmm11[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
3220 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,1,0,1]
3221 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm12 = <u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u>
3222 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm12, %ymm11, %ymm10, %ymm10
3223 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
3224 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm9, %ymm10, %ymm9
3225 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm10 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm0[17,18,19,30],zero,ymm0[28],zero,ymm0[28,29,30,31],zero,ymm0[29],zero,ymm0[31]
3226 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
3227 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm11 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27],zero,zero,zero,zero,ymm2[30],zero,ymm2[28],zero,zero,zero,zero,ymm2[31],zero,ymm2[29],zero
3228 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
3229 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm10, %ymm11, %ymm10
3230 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm11 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm3[30],zero,ymm3[28],zero,zero,zero,zero,ymm3[31],zero,ymm3[29],zero,zero,zero
3231 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
3232 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm12 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero,zero,zero
3233 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
3234 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm11, %ymm12, %ymm11
3235 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm12 = <255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u>
3236 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm12, %ymm10, %ymm11, %ymm10
3237 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm11 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm6[27],zero,ymm6[27,28,29,30],zero,ymm6[28],zero,ymm6[26,27,30,31],zero,ymm6[29]
3238 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
3239 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm12 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29],zero,ymm8[27],zero,zero,zero,zero,ymm8[30],zero,ymm8[28],zero,zero,zero,zero,ymm8[31],zero
3240 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
3241 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm11, %ymm12, %ymm11
3242 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm12 = <255,0,0,u,255,255,255,255,0,0,u,255,255,255,255,0,0,u,255,255,255,255,0,0,u,255,255,255,255,0,0,u>
3243 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm12, %ymm10, %ymm11, %ymm10
3244 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm11 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
3245 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
3246 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm12 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0]
3247 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm12, %ymm10, %ymm11, %ymm10
3248 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm11 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm1[23],zero,ymm1[27,20,21,26],zero,ymm1[24],zero,ymm1[26,27,26,27],zero,ymm1[25]
3249 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
3250 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm12 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25],zero,ymm3[23],zero,zero,zero,zero,ymm3[26],zero,ymm3[24],zero,zero,zero,zero,ymm3[27],zero
3251 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
3252 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm11, %ymm12, %ymm11
3253 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm12 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm2[25],zero,ymm2[23],zero,zero,zero,zero,ymm2[26],zero,ymm2[24],zero,zero,zero,zero
3254 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
3255 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm13 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm0[25],zero,ymm0[23],zero,zero,zero,zero,ymm0[26],zero,ymm0[24],zero,zero,zero,zero,ymm0[27]
3256 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
3257 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm12, %ymm13, %ymm12
3258 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm13 = <u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0>
3259 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm11, %ymm12, %ymm11
3260 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm12 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm8[25],zero,ymm8[23],zero,zero,zero,zero,ymm8[26],zero,ymm8[24],zero,zero
3261 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
3262 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm13 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm6[25],zero,ymm6[23],zero,zero,zero,zero,ymm6[26],zero,ymm6[24],zero,zero,zero
3263 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
3264 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm12, %ymm13, %ymm12
3265 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm13 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
3266 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
3267 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm14 = <0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u>
3268 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm14, %ymm12, %ymm13, %ymm12
3269 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm13 = [0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255]
3270 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm11, %ymm12, %ymm11
3271 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm12 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm2[18],zero,zero,zero,zero,ymm2[21],zero,ymm2[19],zero,zero,zero,zero,ymm2[22],zero,ymm2[20]
3272 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
3273 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm13 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,zero,zero,zero,ymm0[21],zero,ymm0[19],zero,zero,zero,zero,ymm0[22],zero,ymm0[20],zero
3274 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
3275 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm12, %ymm13, %ymm12
3276 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm13 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm3[21],zero,ymm3[19],zero,zero,zero,zero,ymm3[22],zero,ymm3[20],zero,zero
3277 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
3278 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm14 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm1[21],zero,ymm1[19],zero,zero,zero,zero,ymm1[22],zero,ymm1[20],zero,zero,zero
3279 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,3,2,3]
3280 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm13, %ymm14, %ymm13
3281 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm14 = <255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u>
3282 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm14, %ymm12, %ymm13, %ymm12
3283 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm13 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm8[20],zero,ymm8[18],zero,zero,zero,zero,ymm8[21],zero,ymm8[19],zero,zero,zero,zero,ymm8[22]
3284 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
3285 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm14 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20],zero,ymm6[18],zero,zero,zero,zero,ymm6[21],zero,ymm6[19],zero,zero,zero,zero,ymm6[22],zero
3286 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,3,2,3]
3287 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm13, %ymm14, %ymm13
3288 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm14 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20,21,18,19,18,19,20,21,18,19,20,21,28,29,30,31]
3289 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,3,2]
3290 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = <u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255>
3291 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm13, %ymm14, %ymm13
3292 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm14 = [255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0]
3293 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm14, %ymm12, %ymm13, %ymm12
3294 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,zero,ymm3[14],zero,zero,zero,zero,zero,zero,ymm3[15],zero,zero,zero,zero,zero,zero,ymm3[16],zero,zero,zero,zero,zero,zero,ymm3[17],zero,zero,zero,zero,zero,zero,ymm3[18]
3295 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,ymm1[14],zero,zero,zero,zero,zero,zero,ymm1[15],zero,zero,zero,zero,zero,zero,ymm1[16],zero,zero,zero,zero,zero,zero,ymm1[17],zero,zero,zero,zero,zero,zero,ymm1[18],zero
3296 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm3, %ymm1, %ymm1
3297 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm2 = zero,zero,zero,zero,zero,ymm2[14],zero,zero,zero,zero,zero,zero,ymm2[15],zero,zero,zero,zero,zero,zero,ymm2[16],zero,zero,zero,zero,zero,zero,ymm2[17],zero,zero,zero,zero,zero
3298 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm0[14],zero,zero,zero,zero,zero,zero,ymm0[15],zero,zero,zero,zero,zero,zero,ymm0[16],zero,zero,zero,zero,zero,zero,ymm0[17],zero,zero,zero,zero,zero,zero
3299 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm2, %ymm0, %ymm0
3300 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255>
3301 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm1, %ymm0, %ymm0
3302 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = zero,ymm6[1,2,3,0,1,14],zero,ymm6[0,1,0,1,14,15],zero,ymm6[15,16,17,18,19,16],zero,ymm6[30,31,16,17,16,17],zero,ymm6[31,30,31]
3303 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm2 = ymm8[13],zero,zero,zero,zero,zero,zero,ymm8[14],zero,zero,zero,zero,zero,zero,ymm8[15],zero,zero,zero,zero,zero,zero,ymm8[16],zero,zero,zero,zero,zero,zero,ymm8[17],zero,zero,zero
3304 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm1, %ymm2, %ymm1
3305 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm2 = ymm7[12,13,2,3,12,13,0,1,14,15,2,3,0,1,14,15,28,29,18,19,28,29,16,17,30,31,18,19,16,17,30,31]
3306 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm3 = <255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u>
3307 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
3308 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = [0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255]
3309 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
3310 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
3311 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, 96(%rax)
3312 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm12, 128(%rax)
3313 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm11, 160(%rax)
3314 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm9, (%rax)
3315 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm10, 192(%rax)
3316 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm5, 32(%rax)
3317 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm4, 64(%rax)
3318 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
3319 ; AVX2-FAST-PERLANE-NEXT: retq
3321 ; AVX512F-ONLY-SLOW-LABEL: store_i8_stride7_vf32:
3322 ; AVX512F-ONLY-SLOW: # %bb.0:
3323 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3324 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
3325 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %ymm2
3326 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rsi), %ymm1
3327 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %ymm11
3328 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rcx), %ymm3
3329 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r8), %ymm5
3330 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r9), %ymm6
3331 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r10), %ymm4
3332 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,u,u,u,u,26,27,24,25]
3333 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm8 = ymm4[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
3334 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm9 = <u,5,4,u,5,u,4,u,20,21,u,23,u,21,u,23>
3335 ; AVX512F-ONLY-SLOW-NEXT: vpermi2d %zmm7, %zmm8, %zmm9
3336 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm6[20],zero,ymm6[18],zero,zero,zero,zero,ymm6[21],zero,ymm6[19],zero,zero,zero,zero,ymm6[22]
3337 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm6[25],zero,ymm6[23],zero,zero,zero,zero,ymm6[26],zero,ymm6[24],zero,zero
3338 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm8, %zmm7, %zmm7
3339 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm7 = zmm7[2,3,2,3,6,7,6,7]
3340 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20],zero,ymm5[18],zero,ymm5[20,21,20,21],zero,ymm5[19],zero,ymm5[19,20,21,22],zero
3341 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,ymm5[23],zero,ymm5[23,24,25,26],zero,ymm5[24],zero,ymm5[30,31]
3342 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm8, %zmm8
3343 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm8 = zmm8[2,3,2,3,6,7,6,7]
3344 ; AVX512F-ONLY-SLOW-NEXT: vporq %zmm7, %zmm8, %zmm7
3345 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm7
3346 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25,u,23,u,u,u,u,26,u,24,u,u,u,u,27,u]
3347 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
3348 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} ymm16 = [18374967954648269055,71777218572844800,18374967954648269055,71777218572844800]
3349 ; AVX512F-ONLY-SLOW-NEXT: # ymm16 = mem[0,1,2,3,0,1,2,3]
3350 ; AVX512F-ONLY-SLOW-NEXT: vpandq %ymm16, %ymm8, %ymm8
3351 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm9 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,ymm11[18,19,20,21],zero,ymm11[19],zero,ymm11[25,26,27,22],zero,ymm11[20],zero
3352 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
3353 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm8, %zmm9, %zmm8
3354 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm9 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm3[18],zero,zero,zero,zero,ymm3[21],zero,ymm3[19],zero,zero,zero,zero,ymm3[22],zero,ymm3[20]
3355 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm2[23],zero,zero,zero,zero,ymm2[26],zero,ymm2[24],zero,zero,zero,zero,ymm2[27],zero,ymm2[25]
3356 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm9, %zmm9
3357 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm9 = zmm9[2,3,2,3,6,7,6,7]
3358 ; AVX512F-ONLY-SLOW-NEXT: vporq %zmm9, %zmm8, %zmm9
3359 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm8 = ymm2[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
3360 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm8[0,0,1,1,4,4,5,5]
3361 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
3362 ; AVX512F-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} ymm17 = [18374966859431673855,18446463693966278655,18374966859431673855,18446463693966278655]
3363 ; AVX512F-ONLY-SLOW-NEXT: # ymm17 = mem[0,1,2,3,0,1,2,3]
3364 ; AVX512F-ONLY-SLOW-NEXT: vpandq %ymm17, %ymm8, %ymm8
3365 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25],zero,ymm11[23],zero,ymm11[21,22,23,26],zero,ymm11[24],zero,ymm11[28,29,26,27]
3366 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm11, %ymm20
3367 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
3368 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm8, %zmm8
3369 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[21],zero,ymm1[19],zero,zero,zero,zero,ymm1[22],zero,ymm1[20],zero,zero
3370 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm11 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm3[25],zero,ymm3[23],zero,zero,zero,zero,ymm3[26],zero,ymm3[24],zero,zero,zero,zero
3371 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm11, %zmm10, %zmm10
3372 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm10 = zmm10[2,3,2,3,6,7,6,7]
3373 ; AVX512F-ONLY-SLOW-NEXT: vporq %zmm10, %zmm8, %zmm8
3374 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm8
3375 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm8
3376 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rsi), %xmm11
3377 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} xmm7 = xmm11[u],zero,xmm11[7],zero,xmm11[5,u,u,u],zero,xmm11[8],zero,xmm11[6,u,u,u],zero
3378 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %xmm12
3379 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} xmm9 = xmm12[u,7],zero,xmm12[5],zero,xmm12[u,u,u,8],zero,xmm12[6],zero,xmm12[u,u,u,9]
3380 ; AVX512F-ONLY-SLOW-NEXT: vpor %xmm7, %xmm9, %xmm7
3381 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3],xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
3382 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5]
3383 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $2, %xmm7, %zmm9, %zmm7
3384 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm10 = zmm7[0,1,0,1,4,5,4,5]
3385 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rcx), %xmm7
3386 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} xmm13 = xmm7[u,u,u],zero,xmm7[7],zero,xmm7[5,u,u,u],zero,xmm7[8],zero,xmm7[6,u,u]
3387 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm9
3388 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} xmm14 = xmm9[u,u,u,7],zero,xmm9[5],zero,xmm9[u,u,u,8],zero,xmm9[6],zero,xmm9[u,u]
3389 ; AVX512F-ONLY-SLOW-NEXT: vpor %xmm13, %xmm14, %xmm13
3390 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3],xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
3391 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} xmm14 = xmm14[4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9]
3392 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $2, %xmm13, %zmm14, %zmm13
3393 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm18 = zmm13[0,1,0,1,4,5,4,5]
3394 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm10, %zmm18
3395 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r9), %xmm13
3396 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} xmm10 = zero,xmm13[4,u,u,u],zero,xmm13[7],zero,xmm13[5,u,u,u],zero,xmm13[8],zero,xmm13[6]
3397 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r8), %xmm14
3398 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} xmm15 = xmm14[4],zero,xmm14[u,u,u,7],zero,xmm14[5],zero,xmm14[u,u,u,8],zero,xmm14[6],zero
3399 ; AVX512F-ONLY-SLOW-NEXT: vpor %xmm10, %xmm15, %xmm10
3400 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3],xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
3401 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} xmm15 = xmm15[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
3402 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $2, %xmm10, %zmm15, %zmm10
3403 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm19 = zmm10[0,1,0,1,4,5,4,5]
3404 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r10), %xmm15
3405 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} xmm10 = xmm15[4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
3406 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm15[1,1,0,0,4,5,6,7]
3407 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,1,2,0]
3408 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm0, %zmm0
3409 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm10 = zmm0[0,0,1,0,4,4,5,4]
3410 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm19, %zmm10
3411 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm10
3412 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,ymm1[14,u,u],zero,zero,zero,zero,ymm1[15,u,u],zero,zero,zero,zero,ymm1[16,u,u],zero,zero,zero,zero,ymm1[17,u,u],zero,zero,zero,zero,ymm1[18]
3413 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm1, %ymm19
3414 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm2[0,1,14],zero,ymm2[u,u,0,1,14,15],zero,ymm2[u,u,13,2,3,16],zero,ymm2[u,u,28,29,16,17],zero,ymm2[u,u,19,28,29,18],zero
3415 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm2, %ymm18
3416 ; AVX512F-ONLY-SLOW-NEXT: vpor %ymm0, %ymm1, %ymm0
3417 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm7[8],xmm9[8],xmm7[9],xmm9[9],xmm7[10],xmm9[10],xmm7[11],xmm9[11],xmm7[12],xmm9[12],xmm7[13],xmm9[13],xmm7[14],xmm9[14],xmm7[15],xmm9[15]
3418 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7]
3419 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
3420 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
3421 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm3[u,u,u,u],zero,ymm3[14,u,u,u,u,u],zero,ymm3[15,u,u,u,u,u],zero,ymm3[16,u,u,u,u,u],zero,ymm3[17,u,u,u,u,u]
3422 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm20, %ymm2
3423 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm2[u,u,u,u,14],zero,ymm2[u,u,u,u,u,15],zero,ymm2[u,u,u,u,u,16],zero,ymm2[u,u,u,u,u,17],zero,ymm2[u,u,u,u,u]
3424 ; AVX512F-ONLY-SLOW-NEXT: vpor %ymm1, %ymm7, %ymm1
3425 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm11[8],xmm12[8],xmm11[9],xmm12[9],xmm11[10],xmm12[10],xmm11[11],xmm12[11],xmm11[12],xmm12[12],xmm11[13],xmm12[13],xmm11[14],xmm12[14],xmm11[15],xmm12[15]
3426 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} xmm7 = xmm7[2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u]
3427 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
3428 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm7, %zmm1
3429 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm1
3430 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = zero,ymm5[u,u,u,u,u,14],zero,ymm5[u,u,u,u,u,15],zero,ymm5[u,u,u,u,u,16],zero,ymm5[u,u,u,u,u,17],zero,ymm5[u,u,u]
3431 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm6[13,u,u,u,u,u],zero,ymm6[14,u,u,u,u,u],zero,ymm6[15,u,u,u,u,u],zero,ymm6[16,u,u,u,u,u],zero,ymm6[17,u,u,u]
3432 ; AVX512F-ONLY-SLOW-NEXT: vpor %ymm0, %ymm7, %ymm0
3433 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm14[8],xmm13[8],xmm14[9],xmm13[9],xmm14[10],xmm13[10],xmm14[11],xmm13[11],xmm14[12],xmm13[12],xmm14[13],xmm13[13],xmm14[14],xmm13[14],xmm14[15],xmm13[15]
3434 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} xmm7 = xmm7[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
3435 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
3436 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm7, %zmm0
3437 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm7 = xmm15[0,1,2,3,4,5,5,6]
3438 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
3439 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
3440 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
3441 ; AVX512F-ONLY-SLOW-NEXT: vpandn %ymm7, %ymm9, %ymm7
3442 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm9 = zero,ymm4[13,u,u,u,u],zero,zero,ymm4[14,u,u,u,u],zero,zero,ymm4[15,u,u,u,u],zero,zero,ymm4[16,u,u,u,u],zero,zero,ymm4[17,u,u]
3443 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm7, %zmm7
3444 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm7
3445 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm7
3446 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29,u,27,u,u,u,u,30,u,28,u,u,u,u,31,u]
3447 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
3448 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm5[27],zero,zero,zero,zero,ymm5[30],zero,ymm5[28],zero,zero,zero,zero,ymm5[31],zero,ymm5[29]
3449 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
3450 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $248, %ymm16, %ymm0, %ymm1
3451 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
3452 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
3453 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm0
3454 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27,u,u,u,u,30,u,28,u,u,u,u,31,u,29,u]
3455 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
3456 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm2[30],zero,ymm2[28],zero,zero,zero,zero,ymm2[31],zero,ymm2[29],zero,zero
3457 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
3458 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $248, %ymm17, %ymm1, %ymm2
3459 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm19, %ymm1
3460 ; AVX512F-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero,zero
3461 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
3462 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %ymm18, %ymm3
3463 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
3464 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[2,2,3,3,6,6,7,7]
3465 ; AVX512F-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,2,3]
3466 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $236, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm3
3467 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm2, %ymm3
3468 ; AVX512F-ONLY-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm3
3469 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa %ymm3, 192(%rax)
3470 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm10, (%rax)
3471 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm8, 128(%rax)
3472 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, 64(%rax)
3473 ; AVX512F-ONLY-SLOW-NEXT: vzeroupper
3474 ; AVX512F-ONLY-SLOW-NEXT: retq
3476 ; AVX512F-FAST-LABEL: store_i8_stride7_vf32:
3477 ; AVX512F-FAST: # %bb.0:
3478 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
3479 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
3480 ; AVX512F-FAST-NEXT: vmovdqa (%rdi), %ymm2
3481 ; AVX512F-FAST-NEXT: vmovdqa (%rsi), %ymm4
3482 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %ymm5
3483 ; AVX512F-FAST-NEXT: vmovdqa (%rcx), %ymm6
3484 ; AVX512F-FAST-NEXT: vmovdqa (%r8), %ymm1
3485 ; AVX512F-FAST-NEXT: vmovdqa (%r9), %ymm3
3486 ; AVX512F-FAST-NEXT: vmovdqa64 (%r10), %ymm17
3487 ; AVX512F-FAST-NEXT: vmovdqa (%rsi), %xmm8
3488 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm7 = xmm8[u],zero,xmm8[7],zero,xmm8[5,u,u,u],zero,xmm8[8],zero,xmm8[6,u,u,u],zero
3489 ; AVX512F-FAST-NEXT: vmovdqa (%rdi), %xmm9
3490 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm10 = xmm9[u,7],zero,xmm9[5],zero,xmm9[u,u,u,8],zero,xmm9[6],zero,xmm9[u,u,u,9]
3491 ; AVX512F-FAST-NEXT: vpor %xmm7, %xmm10, %xmm7
3492 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3],xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
3493 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm10 = xmm10[0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5]
3494 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm7, %zmm10, %zmm7
3495 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm7 = zmm7[0,1,0,1,4,5,4,5]
3496 ; AVX512F-FAST-NEXT: vmovdqa (%rcx), %xmm11
3497 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm10 = xmm11[u,u,u],zero,xmm11[7],zero,xmm11[5,u,u,u],zero,xmm11[8],zero,xmm11[6,u,u]
3498 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %xmm12
3499 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm13 = xmm12[u,u,u,7],zero,xmm12[5],zero,xmm12[u,u,u,8],zero,xmm12[6],zero,xmm12[u,u]
3500 ; AVX512F-FAST-NEXT: vpor %xmm10, %xmm13, %xmm10
3501 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm13 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3],xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
3502 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm13 = xmm13[4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9]
3503 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm10, %zmm13, %zmm10
3504 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm15 = zmm10[0,1,0,1,4,5,4,5]
3505 ; AVX512F-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm15
3506 ; AVX512F-FAST-NEXT: vmovdqa (%r10), %xmm10
3507 ; AVX512F-FAST-NEXT: vpshuflw {{.*#+}} xmm7 = xmm10[1,1,0,0,4,5,6,7]
3508 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = <u,1,u,1,u,0,0,u>
3509 ; AVX512F-FAST-NEXT: vpermd %ymm7, %ymm13, %ymm7
3510 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm13 = xmm10[4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
3511 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm13 = ymm13[0,0,1,0]
3512 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm13, %zmm7, %zmm16
3513 ; AVX512F-FAST-NEXT: vmovdqa (%r9), %xmm13
3514 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm7 = zero,xmm13[4,u,u,u],zero,xmm13[7],zero,xmm13[5,u,u,u],zero,xmm13[8],zero,xmm13[6]
3515 ; AVX512F-FAST-NEXT: vmovdqa (%r8), %xmm14
3516 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm14[4],zero,xmm14[u,u,u,7],zero,xmm14[5],zero,xmm14[u,u,u,8],zero,xmm14[6],zero
3517 ; AVX512F-FAST-NEXT: vpor %xmm7, %xmm0, %xmm0
3518 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3],xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
3519 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm7 = xmm7[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
3520 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm7, %zmm0
3521 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm7 = zmm0[0,1,0,1,4,5,4,5]
3522 ; AVX512F-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm16, %zmm7
3523 ; AVX512F-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm15, %zmm7
3524 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,ymm4[14,u,u],zero,zero,zero,zero,ymm4[15,u,u],zero,zero,zero,zero,ymm4[16,u,u],zero,zero,zero,zero,ymm4[17,u,u],zero,zero,zero,zero,ymm4[18]
3525 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm15 = ymm2[0,1,14],zero,ymm2[u,u,0,1,14,15],zero,ymm2[u,u,13,2,3,16],zero,ymm2[u,u,28,29,16,17],zero,ymm2[u,u,19,28,29,18],zero
3526 ; AVX512F-FAST-NEXT: vpor %ymm0, %ymm15, %ymm0
3527 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm11 = xmm11[8],xmm12[8],xmm11[9],xmm12[9],xmm11[10],xmm12[10],xmm11[11],xmm12[11],xmm11[12],xmm12[12],xmm11[13],xmm12[13],xmm11[14],xmm12[14],xmm11[15],xmm12[15]
3528 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm11 = xmm11[6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7]
3529 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,1,0,1]
3530 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm11, %zmm0
3531 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm6[u,u,u,u],zero,ymm6[14,u,u,u,u,u],zero,ymm6[15,u,u,u,u,u],zero,ymm6[16,u,u,u,u,u],zero,ymm6[17,u,u,u,u,u]
3532 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm5[u,u,u,u,14],zero,ymm5[u,u,u,u,u,15],zero,ymm5[u,u,u,u,u,16],zero,ymm5[u,u,u,u,u,17],zero,ymm5[u,u,u,u,u]
3533 ; AVX512F-FAST-NEXT: vpor %ymm11, %ymm12, %ymm11
3534 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm8 = xmm8[8],xmm9[8],xmm8[9],xmm9[9],xmm8[10],xmm9[10],xmm8[11],xmm9[11],xmm8[12],xmm9[12],xmm8[13],xmm9[13],xmm8[14],xmm9[14],xmm8[15],xmm9[15]
3535 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u]
3536 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,1,0,1]
3537 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm11, %zmm8, %zmm9
3538 ; AVX512F-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm9
3539 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm0 = zero,ymm1[u,u,u,u,u,14],zero,ymm1[u,u,u,u,u,15],zero,ymm1[u,u,u,u,u,16],zero,ymm1[u,u,u,u,u,17],zero,ymm1[u,u,u]
3540 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm8 = ymm3[13,u,u,u,u,u],zero,ymm3[14,u,u,u,u,u],zero,ymm3[15,u,u,u,u,u],zero,ymm3[16,u,u,u,u,u],zero,ymm3[17,u,u,u]
3541 ; AVX512F-FAST-NEXT: vpor %ymm0, %ymm8, %ymm0
3542 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm8 = xmm14[8],xmm13[8],xmm14[9],xmm13[9],xmm14[10],xmm13[10],xmm14[11],xmm13[11],xmm14[12],xmm13[12],xmm14[13],xmm13[13],xmm14[14],xmm13[14],xmm14[15],xmm13[15]
3543 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
3544 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,1,0,1]
3545 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm0
3546 ; AVX512F-FAST-NEXT: vpshufhw {{.*#+}} xmm8 = xmm10[0,1,2,3,4,5,5,6]
3547 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm10 = [2,2,3,3,2,2,3,3]
3548 ; AVX512F-FAST-NEXT: # ymm10 = mem[0,1,0,1]
3549 ; AVX512F-FAST-NEXT: vpermd %ymm8, %ymm10, %ymm8
3550 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
3551 ; AVX512F-FAST-NEXT: vpandn %ymm8, %ymm10, %ymm8
3552 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm17, %ymm13
3553 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm10 = zero,ymm13[13,u,u,u,u],zero,zero,ymm13[14,u,u,u,u],zero,zero,ymm13[15,u,u,u,u],zero,zero,ymm13[16,u,u,u,u],zero,zero,ymm13[17,u,u]
3554 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm10, %zmm8, %zmm8
3555 ; AVX512F-FAST-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm8
3556 ; AVX512F-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm8
3557 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25,u,23,u,u,u,u,26,u,24,u,u,u,u,27,u]
3558 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
3559 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [18374967954648269055,71777218572844800,18374967954648269055,71777218572844800]
3560 ; AVX512F-FAST-NEXT: # ymm9 = mem[0,1,0,1]
3561 ; AVX512F-FAST-NEXT: vpand %ymm0, %ymm9, %ymm0
3562 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm10 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,ymm5[18,19,20,21],zero,ymm5[19],zero,ymm5[25,26,27,22],zero,ymm5[20],zero
3563 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
3564 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm10, %zmm0
3565 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm10 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm6[18],zero,zero,zero,zero,ymm6[21],zero,ymm6[19],zero,zero,zero,zero,ymm6[22],zero,ymm6[20]
3566 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm2[23],zero,zero,zero,zero,ymm2[26],zero,ymm2[24],zero,zero,zero,zero,ymm2[27],zero,ymm2[25]
3567 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm11, %zmm10, %zmm10
3568 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm10 = zmm10[2,3,2,3,6,7,6,7]
3569 ; AVX512F-FAST-NEXT: vporq %zmm10, %zmm0, %zmm0
3570 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm10 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm4[21],zero,ymm4[19],zero,zero,zero,zero,ymm4[22],zero,ymm4[20],zero,zero
3571 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm6[25],zero,ymm6[23],zero,zero,zero,zero,ymm6[26],zero,ymm6[24],zero,zero,zero,zero
3572 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm11, %zmm10, %zmm10
3573 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm10 = zmm10[2,3,2,3,6,7,6,7]
3574 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21],zero,ymm2[19],zero,ymm2[21,20,21,22],zero,ymm2[20],zero,ymm2[22,23]
3575 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25],zero,ymm5[23],zero,ymm5[21,22,23,26],zero,ymm5[24],zero,ymm5[28,29,26,27]
3576 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm12, %zmm11, %zmm11
3577 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm11 = zmm11[2,3,2,3,6,7,6,7]
3578 ; AVX512F-FAST-NEXT: vporq %zmm10, %zmm11, %zmm10
3579 ; AVX512F-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm10
3580 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm3[20],zero,ymm3[18],zero,zero,zero,zero,ymm3[21],zero,ymm3[19],zero,zero,zero,zero,ymm3[22]
3581 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm3[25],zero,ymm3[23],zero,zero,zero,zero,ymm3[26],zero,ymm3[24],zero,zero
3582 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm11, %zmm0, %zmm0
3583 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm0 = zmm0[2,3,2,3,6,7,6,7]
3584 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20],zero,ymm1[18],zero,ymm1[20,21,20,21],zero,ymm1[19],zero,ymm1[19,20,21,22],zero
3585 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,ymm1[23],zero,ymm1[23,24,25,26],zero,ymm1[24],zero,ymm1[30,31]
3586 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm12, %zmm11, %zmm11
3587 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm11 = zmm11[2,3,2,3,6,7,6,7]
3588 ; AVX512F-FAST-NEXT: vporq %zmm0, %zmm11, %zmm0
3589 ; AVX512F-FAST-NEXT: vpshuflw {{.*#+}} ymm11 = ymm13[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
3590 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm12 = [5,5,4,0,5,5,4,0]
3591 ; AVX512F-FAST-NEXT: # ymm12 = mem[0,1,0,1]
3592 ; AVX512F-FAST-NEXT: vpermd %ymm11, %ymm12, %ymm11
3593 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm12 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
3594 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
3595 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm12, %zmm11, %zmm11
3596 ; AVX512F-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm11
3597 ; AVX512F-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm10, %zmm11
3598 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27,u,u,u,u,30,u,28,u,u,u,u,31,u,29,u]
3599 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
3600 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm5[30],zero,ymm5[28],zero,zero,zero,zero,ymm5[31],zero,ymm5[29],zero,zero
3601 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,3,2,3]
3602 ; AVX512F-FAST-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm5
3603 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm4[30],zero,ymm4[28],zero,zero,zero,zero,ymm4[31],zero,ymm4[29],zero,zero,zero
3604 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
3605 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,30],zero,ymm2[28],zero,ymm2[30,31,30,31],zero,ymm2[29],zero,ymm2[31,28,29]
3606 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
3607 ; AVX512F-FAST-NEXT: vpor %ymm0, %ymm2, %ymm0
3608 ; AVX512F-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm5, %ymm0
3609 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29,u,27,u,u,u,u,30,u,28,u,u,u,u,31,u]
3610 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
3611 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm1[27],zero,zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29]
3612 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
3613 ; AVX512F-FAST-NEXT: vpternlogq $248, %ymm9, %ymm2, %ymm1
3614 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
3615 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
3616 ; AVX512F-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm2
3617 ; AVX512F-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm2
3618 ; AVX512F-FAST-NEXT: vmovdqa %ymm2, 192(%rax)
3619 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm7, (%rax)
3620 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm11, 128(%rax)
3621 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm8, 64(%rax)
3622 ; AVX512F-FAST-NEXT: vzeroupper
3623 ; AVX512F-FAST-NEXT: retq
3625 ; AVX512DQ-SLOW-LABEL: store_i8_stride7_vf32:
3626 ; AVX512DQ-SLOW: # %bb.0:
3627 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3628 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
3629 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %ymm2
3630 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rsi), %ymm1
3631 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %ymm11
3632 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rcx), %ymm3
3633 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r8), %ymm5
3634 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r9), %ymm6
3635 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r10), %ymm4
3636 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,u,u,u,u,26,27,24,25]
3637 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm8 = ymm4[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
3638 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm9 = <u,5,4,u,5,u,4,u,20,21,u,23,u,21,u,23>
3639 ; AVX512DQ-SLOW-NEXT: vpermi2d %zmm7, %zmm8, %zmm9
3640 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm6[20],zero,ymm6[18],zero,zero,zero,zero,ymm6[21],zero,ymm6[19],zero,zero,zero,zero,ymm6[22]
3641 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm6[25],zero,ymm6[23],zero,zero,zero,zero,ymm6[26],zero,ymm6[24],zero,zero
3642 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm8, %zmm7, %zmm7
3643 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} zmm7 = zmm7[2,3,2,3,6,7,6,7]
3644 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20],zero,ymm5[18],zero,ymm5[20,21,20,21],zero,ymm5[19],zero,ymm5[19,20,21,22],zero
3645 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,ymm5[23],zero,ymm5[23,24,25,26],zero,ymm5[24],zero,ymm5[30,31]
3646 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm8, %zmm8
3647 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} zmm8 = zmm8[2,3,2,3,6,7,6,7]
3648 ; AVX512DQ-SLOW-NEXT: vporq %zmm7, %zmm8, %zmm7
3649 ; AVX512DQ-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm7
3650 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25,u,23,u,u,u,u,26,u,24,u,u,u,u,27,u]
3651 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
3652 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x2 {{.*#+}} ymm16 = [18374967954648269055,71777218572844800,18374967954648269055,71777218572844800]
3653 ; AVX512DQ-SLOW-NEXT: # ymm16 = mem[0,1,0,1]
3654 ; AVX512DQ-SLOW-NEXT: vpandq %ymm16, %ymm8, %ymm8
3655 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm9 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,ymm11[18,19,20,21],zero,ymm11[19],zero,ymm11[25,26,27,22],zero,ymm11[20],zero
3656 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
3657 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm8, %zmm9, %zmm8
3658 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm9 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm3[18],zero,zero,zero,zero,ymm3[21],zero,ymm3[19],zero,zero,zero,zero,ymm3[22],zero,ymm3[20]
3659 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm2[23],zero,zero,zero,zero,ymm2[26],zero,ymm2[24],zero,zero,zero,zero,ymm2[27],zero,ymm2[25]
3660 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm9, %zmm9
3661 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} zmm9 = zmm9[2,3,2,3,6,7,6,7]
3662 ; AVX512DQ-SLOW-NEXT: vporq %zmm9, %zmm8, %zmm9
3663 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} ymm8 = ymm2[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
3664 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm8[0,0,1,1,4,4,5,5]
3665 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
3666 ; AVX512DQ-SLOW-NEXT: vbroadcasti64x2 {{.*#+}} ymm17 = [18374966859431673855,18446463693966278655,18374966859431673855,18446463693966278655]
3667 ; AVX512DQ-SLOW-NEXT: # ymm17 = mem[0,1,0,1]
3668 ; AVX512DQ-SLOW-NEXT: vpandq %ymm17, %ymm8, %ymm8
3669 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25],zero,ymm11[23],zero,ymm11[21,22,23,26],zero,ymm11[24],zero,ymm11[28,29,26,27]
3670 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm11, %ymm20
3671 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
3672 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm8, %zmm8
3673 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[21],zero,ymm1[19],zero,zero,zero,zero,ymm1[22],zero,ymm1[20],zero,zero
3674 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm11 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm3[25],zero,ymm3[23],zero,zero,zero,zero,ymm3[26],zero,ymm3[24],zero,zero,zero,zero
3675 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm11, %zmm10, %zmm10
3676 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} zmm10 = zmm10[2,3,2,3,6,7,6,7]
3677 ; AVX512DQ-SLOW-NEXT: vporq %zmm10, %zmm8, %zmm8
3678 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm8
3679 ; AVX512DQ-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm8
3680 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rsi), %xmm11
3681 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} xmm7 = xmm11[u],zero,xmm11[7],zero,xmm11[5,u,u,u],zero,xmm11[8],zero,xmm11[6,u,u,u],zero
3682 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %xmm12
3683 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} xmm9 = xmm12[u,7],zero,xmm12[5],zero,xmm12[u,u,u,8],zero,xmm12[6],zero,xmm12[u,u,u,9]
3684 ; AVX512DQ-SLOW-NEXT: vpor %xmm7, %xmm9, %xmm7
3685 ; AVX512DQ-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3],xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
3686 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5]
3687 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $2, %xmm7, %zmm9, %zmm7
3688 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} zmm10 = zmm7[0,1,0,1,4,5,4,5]
3689 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rcx), %xmm7
3690 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} xmm13 = xmm7[u,u,u],zero,xmm7[7],zero,xmm7[5,u,u,u],zero,xmm7[8],zero,xmm7[6,u,u]
3691 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %xmm9
3692 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} xmm14 = xmm9[u,u,u,7],zero,xmm9[5],zero,xmm9[u,u,u,8],zero,xmm9[6],zero,xmm9[u,u]
3693 ; AVX512DQ-SLOW-NEXT: vpor %xmm13, %xmm14, %xmm13
3694 ; AVX512DQ-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3],xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
3695 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} xmm14 = xmm14[4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9]
3696 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $2, %xmm13, %zmm14, %zmm13
3697 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} zmm18 = zmm13[0,1,0,1,4,5,4,5]
3698 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm10, %zmm18
3699 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r9), %xmm13
3700 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} xmm10 = zero,xmm13[4,u,u,u],zero,xmm13[7],zero,xmm13[5,u,u,u],zero,xmm13[8],zero,xmm13[6]
3701 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r8), %xmm14
3702 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} xmm15 = xmm14[4],zero,xmm14[u,u,u,7],zero,xmm14[5],zero,xmm14[u,u,u,8],zero,xmm14[6],zero
3703 ; AVX512DQ-SLOW-NEXT: vpor %xmm10, %xmm15, %xmm10
3704 ; AVX512DQ-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3],xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
3705 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} xmm15 = xmm15[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
3706 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $2, %xmm10, %zmm15, %zmm10
3707 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} zmm19 = zmm10[0,1,0,1,4,5,4,5]
3708 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r10), %xmm15
3709 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} xmm10 = xmm15[4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
3710 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm15[1,1,0,0,4,5,6,7]
3711 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,1,2,0]
3712 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm0, %zmm0
3713 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} zmm10 = zmm0[0,0,1,0,4,4,5,4]
3714 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm19, %zmm10
3715 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm10
3716 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,ymm1[14,u,u],zero,zero,zero,zero,ymm1[15,u,u],zero,zero,zero,zero,ymm1[16,u,u],zero,zero,zero,zero,ymm1[17,u,u],zero,zero,zero,zero,ymm1[18]
3717 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm1, %ymm19
3718 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm2[0,1,14],zero,ymm2[u,u,0,1,14,15],zero,ymm2[u,u,13,2,3,16],zero,ymm2[u,u,28,29,16,17],zero,ymm2[u,u,19,28,29,18],zero
3719 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm2, %ymm18
3720 ; AVX512DQ-SLOW-NEXT: vpor %ymm0, %ymm1, %ymm0
3721 ; AVX512DQ-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm7[8],xmm9[8],xmm7[9],xmm9[9],xmm7[10],xmm9[10],xmm7[11],xmm9[11],xmm7[12],xmm9[12],xmm7[13],xmm9[13],xmm7[14],xmm9[14],xmm7[15],xmm9[15]
3722 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7]
3723 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
3724 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
3725 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm3[u,u,u,u],zero,ymm3[14,u,u,u,u,u],zero,ymm3[15,u,u,u,u,u],zero,ymm3[16,u,u,u,u,u],zero,ymm3[17,u,u,u,u,u]
3726 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm20, %ymm2
3727 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm2[u,u,u,u,14],zero,ymm2[u,u,u,u,u,15],zero,ymm2[u,u,u,u,u,16],zero,ymm2[u,u,u,u,u,17],zero,ymm2[u,u,u,u,u]
3728 ; AVX512DQ-SLOW-NEXT: vpor %ymm1, %ymm7, %ymm1
3729 ; AVX512DQ-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm11[8],xmm12[8],xmm11[9],xmm12[9],xmm11[10],xmm12[10],xmm11[11],xmm12[11],xmm11[12],xmm12[12],xmm11[13],xmm12[13],xmm11[14],xmm12[14],xmm11[15],xmm12[15]
3730 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} xmm7 = xmm7[2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u]
3731 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
3732 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm7, %zmm1
3733 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm1
3734 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = zero,ymm5[u,u,u,u,u,14],zero,ymm5[u,u,u,u,u,15],zero,ymm5[u,u,u,u,u,16],zero,ymm5[u,u,u,u,u,17],zero,ymm5[u,u,u]
3735 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm6[13,u,u,u,u,u],zero,ymm6[14,u,u,u,u,u],zero,ymm6[15,u,u,u,u,u],zero,ymm6[16,u,u,u,u,u],zero,ymm6[17,u,u,u]
3736 ; AVX512DQ-SLOW-NEXT: vpor %ymm0, %ymm7, %ymm0
3737 ; AVX512DQ-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm14[8],xmm13[8],xmm14[9],xmm13[9],xmm14[10],xmm13[10],xmm14[11],xmm13[11],xmm14[12],xmm13[12],xmm14[13],xmm13[13],xmm14[14],xmm13[14],xmm14[15],xmm13[15]
3738 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} xmm7 = xmm7[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
3739 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
3740 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm7, %zmm0
3741 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm7 = xmm15[0,1,2,3,4,5,5,6]
3742 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
3743 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
3744 ; AVX512DQ-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
3745 ; AVX512DQ-SLOW-NEXT: vpandn %ymm7, %ymm9, %ymm7
3746 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm9 = zero,ymm4[13,u,u,u,u],zero,zero,ymm4[14,u,u,u,u],zero,zero,ymm4[15,u,u,u,u],zero,zero,ymm4[16,u,u,u,u],zero,zero,ymm4[17,u,u]
3747 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm7, %zmm7
3748 ; AVX512DQ-SLOW-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm7
3749 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm7
3750 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29,u,27,u,u,u,u,30,u,28,u,u,u,u,31,u]
3751 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
3752 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm5[27],zero,zero,zero,zero,ymm5[30],zero,ymm5[28],zero,zero,zero,zero,ymm5[31],zero,ymm5[29]
3753 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
3754 ; AVX512DQ-SLOW-NEXT: vpternlogq $248, %ymm16, %ymm0, %ymm1
3755 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
3756 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
3757 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm0
3758 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27,u,u,u,u,30,u,28,u,u,u,u,31,u,29,u]
3759 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
3760 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm2[30],zero,ymm2[28],zero,zero,zero,zero,ymm2[31],zero,ymm2[29],zero,zero
3761 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
3762 ; AVX512DQ-SLOW-NEXT: vpternlogq $248, %ymm17, %ymm1, %ymm2
3763 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm19, %ymm1
3764 ; AVX512DQ-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero,zero
3765 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
3766 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %ymm18, %ymm3
3767 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
3768 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[2,2,3,3,6,6,7,7]
3769 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,2,3]
3770 ; AVX512DQ-SLOW-NEXT: vpternlogq $236, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm3
3771 ; AVX512DQ-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm2, %ymm3
3772 ; AVX512DQ-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm3
3773 ; AVX512DQ-SLOW-NEXT: vmovdqa %ymm3, 192(%rax)
3774 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, (%rax)
3775 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm8, 128(%rax)
3776 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm7, 64(%rax)
3777 ; AVX512DQ-SLOW-NEXT: vzeroupper
3778 ; AVX512DQ-SLOW-NEXT: retq
3780 ; AVX512BW-SLOW-LABEL: store_i8_stride7_vf32:
3781 ; AVX512BW-SLOW: # %bb.0:
3782 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3783 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
3784 ; AVX512BW-SLOW-NEXT: vmovdqa (%rdi), %ymm4
3785 ; AVX512BW-SLOW-NEXT: vmovdqa (%rsi), %ymm2
3786 ; AVX512BW-SLOW-NEXT: vmovdqa (%rdx), %ymm1
3787 ; AVX512BW-SLOW-NEXT: vmovdqa (%rcx), %ymm3
3788 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm1[0,1,0,1,14],zero,ymm1[14,15,0,1,14,15],zero,ymm1[13,14,15,16,17,16],zero,ymm1[30,31,30,31,16,17],zero,ymm1[31,28,29,30,31]
3789 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm5 = zero,zero,zero,zero,zero,ymm3[14],zero,zero,zero,zero,zero,zero,ymm3[15],zero,zero,zero,zero,zero,zero,ymm3[16],zero,zero,zero,zero,zero,zero,ymm3[17],zero,zero,zero,zero,zero
3790 ; AVX512BW-SLOW-NEXT: vpor %ymm0, %ymm5, %ymm0
3791 ; AVX512BW-SLOW-NEXT: vmovdqa (%rdi), %xmm8
3792 ; AVX512BW-SLOW-NEXT: vmovdqa (%rsi), %xmm10
3793 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm10[8],xmm8[8],xmm10[9],xmm8[9],xmm10[10],xmm8[10],xmm10[11],xmm8[11],xmm10[12],xmm8[12],xmm10[13],xmm8[13],xmm10[14],xmm8[14],xmm10[15],xmm8[15]
3794 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u]
3795 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,0,1]
3796 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm5, %zmm5
3797 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm4[0,1,14],zero,ymm4[12,13,0,1,14,15],zero,ymm4[3,12,13,2,3,16],zero,ymm4[30,31,28,29,16,17],zero,ymm4[31,18,19,28,29,18],zero
3798 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,zero,ymm2[14],zero,zero,zero,zero,zero,zero,ymm2[15],zero,zero,zero,zero,zero,zero,ymm2[16],zero,zero,zero,zero,zero,zero,ymm2[17],zero,zero,zero,zero,zero,zero,ymm2[18]
3799 ; AVX512BW-SLOW-NEXT: vpor %ymm0, %ymm6, %ymm0
3800 ; AVX512BW-SLOW-NEXT: vmovdqa (%rdx), %xmm12
3801 ; AVX512BW-SLOW-NEXT: vmovdqa (%rcx), %xmm14
3802 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm14[8],xmm12[8],xmm14[9],xmm12[9],xmm14[10],xmm12[10],xmm14[11],xmm12[11],xmm14[12],xmm12[12],xmm14[13],xmm12[13],xmm14[14],xmm12[14],xmm14[15],xmm12[15]
3803 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7]
3804 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
3805 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm6, %zmm0
3806 ; AVX512BW-SLOW-NEXT: movabsq $435749858791416001, %rcx # imm = 0x60C1830183060C1
3807 ; AVX512BW-SLOW-NEXT: kmovq %rcx, %k1
3808 ; AVX512BW-SLOW-NEXT: vmovdqu8 %zmm5, %zmm0 {%k1}
3809 ; AVX512BW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = mem[0,1,2,3,0,1,2,3]
3810 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm5[13],zero,zero,zero,zero,zero,zero,ymm5[14],zero,zero,zero,zero,zero,zero,ymm5[15],zero,zero,zero,zero,zero,zero,ymm5[16],zero,zero,zero,zero,zero,zero,ymm5[17],zero,zero,zero
3811 ; AVX512BW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = mem[0,1,2,3,0,1,2,3]
3812 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm9 = zero,zero,zero,zero,zero,zero,ymm6[14],zero,zero,zero,zero,zero,zero,ymm6[15],zero,zero,zero,zero,zero,zero,ymm6[16],zero,zero,zero,zero,zero,zero,ymm6[17],zero,zero,zero,zero
3813 ; AVX512BW-SLOW-NEXT: vpor %ymm7, %ymm9, %ymm7
3814 ; AVX512BW-SLOW-NEXT: vmovdqa (%r9), %xmm11
3815 ; AVX512BW-SLOW-NEXT: vmovdqa (%r8), %xmm13
3816 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm13[8],xmm11[8],xmm13[9],xmm11[9],xmm13[10],xmm11[10],xmm13[11],xmm11[11],xmm13[12],xmm11[12],xmm13[13],xmm11[13],xmm13[14],xmm11[14],xmm13[15],xmm11[15]
3817 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
3818 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,1,0,1]
3819 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm9, %zmm9
3820 ; AVX512BW-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = mem[0,1,2,3,0,1,2,3]
3821 ; AVX512BW-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm15 = [4,5,4,5,5,6,5,6,4,5,4,5,5,6,5,6]
3822 ; AVX512BW-SLOW-NEXT: # ymm15 = mem[0,1,0,1]
3823 ; AVX512BW-SLOW-NEXT: vpermw %ymm7, %ymm15, %ymm15
3824 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm16 = ymm7[12,13,2,3,12,13,0,1,14,15,2,3,0,1,14,15,28,29,18,19,28,29,16,17,30,31,18,19,16,17,30,31]
3825 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm16, %zmm15, %zmm15
3826 ; AVX512BW-SLOW-NEXT: movabsq $2323999253380730912, %rcx # imm = 0x2040810204081020
3827 ; AVX512BW-SLOW-NEXT: kmovq %rcx, %k1
3828 ; AVX512BW-SLOW-NEXT: vmovdqu8 %zmm15, %zmm9 {%k1}
3829 ; AVX512BW-SLOW-NEXT: movabsq $4066998693416279096, %rcx # imm = 0x3870E1C3870E1C38
3830 ; AVX512BW-SLOW-NEXT: kmovq %rcx, %k1
3831 ; AVX512BW-SLOW-NEXT: vmovdqu8 %zmm9, %zmm0 {%k1}
3832 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm9
3833 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} zmm9 = zmm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,zmm9[18,19,20,21],zero,zmm9[19],zero,zmm9[25,26,27,22],zero,zmm9[20],zero,zmm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,57],zero,zmm9[55],zero,zero,zero,zero,zmm9[58],zero,zmm9[56],zero,zero,zero,zero,zmm9[59],zero
3834 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} zmm9 = zmm9[2,3,2,3,6,7,6,7]
3835 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm3, %zmm15
3836 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} zmm15 = zmm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm15[18],zero,zero,zero,zero,zmm15[21],zero,zmm15[19],zero,zero,zero,zero,zmm15[22],zero,zmm15[20,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm15[55],zero,zero,zero,zero,zmm15[58],zero,zmm15[56],zero,zero,zero,zero,zmm15[59],zero,zmm15[57]
3837 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} zmm15 = zmm15[2,3,2,3,6,7,6,7]
3838 ; AVX512BW-SLOW-NEXT: vporq %zmm9, %zmm15, %zmm9
3839 ; AVX512BW-SLOW-NEXT: vpshuflw {{.*#+}} ymm15 = ymm4[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
3840 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} ymm15 = ymm15[0,0,1,1,4,4,5,5]
3841 ; AVX512BW-SLOW-NEXT: movl $676341840, %ecx # imm = 0x28502850
3842 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k1
3843 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm15 {%k1} = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,21,u,19,u,u,u,u,22,u,20,u,u]
3844 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,3,2,3]
3845 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm16 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25],zero,ymm1[23],zero,ymm1[21,22,23,26],zero,ymm1[24],zero,ymm1[28,29,26,27]
3846 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm16 = ymm16[2,3,2,3]
3847 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm17 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm3[25],zero,ymm3[23],zero,zero,zero,zero,ymm3[26],zero,ymm3[24],zero,zero,zero,zero
3848 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm17 = ymm17[2,3,2,3]
3849 ; AVX512BW-SLOW-NEXT: vporq %ymm16, %ymm17, %ymm16
3850 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm16, %zmm15, %zmm15
3851 ; AVX512BW-SLOW-NEXT: movabsq $-9005497107459067808, %rcx # imm = 0x83060C180C183060
3852 ; AVX512BW-SLOW-NEXT: kmovq %rcx, %k2
3853 ; AVX512BW-SLOW-NEXT: vmovdqu8 %zmm15, %zmm9 {%k2}
3854 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm15 = [10,9,9,10,10,9,9,10,9,10,14,15,10,9,9,10,27,29,28,27,28,29,29,28,27,29,28,27,28,29,29,28]
3855 ; AVX512BW-SLOW-NEXT: vpermw %zmm7, %zmm15, %zmm15
3856 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} zmm16 = zmm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20],zero,zmm6[18],zero,zmm6[20,21,20,21],zero,zmm6[19],zero,zmm6[19,20,21,22],zero,zmm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,56,57,56,57],zero,zmm6[55],zero,zmm6[55,56,57,58],zero,zmm6[56],zero,zmm6[62,63]
3857 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} zmm16 = zmm16[2,3,2,3,6,7,6,7]
3858 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} zmm17 = zmm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm5[20],zero,zmm5[18],zero,zero,zero,zero,zmm5[21],zero,zmm5[19],zero,zero,zero,zero,zmm5[22,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm5[57],zero,zmm5[55],zero,zero,zero,zero,zmm5[58],zero,zmm5[56],zero,zero
3859 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} zmm17 = zmm17[2,3,2,3,6,7,6,7]
3860 ; AVX512BW-SLOW-NEXT: vporq %zmm16, %zmm17, %zmm16
3861 ; AVX512BW-SLOW-NEXT: movabsq $1161999626690365456, %rcx # imm = 0x1020408102040810
3862 ; AVX512BW-SLOW-NEXT: kmovq %rcx, %k2
3863 ; AVX512BW-SLOW-NEXT: vmovdqu8 %zmm15, %zmm16 {%k2}
3864 ; AVX512BW-SLOW-NEXT: movabsq $2033499346708139548, %rcx # imm = 0x1C3870E1C3870E1C
3865 ; AVX512BW-SLOW-NEXT: kmovq %rcx, %k2
3866 ; AVX512BW-SLOW-NEXT: vmovdqu8 %zmm16, %zmm9 {%k2}
3867 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} xmm15 = xmm14[u,u,u],zero,xmm14[7],zero,xmm14[5,u,u,u],zero,xmm14[8],zero,xmm14[6,u,u]
3868 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} xmm16 = xmm12[u,u,u,7],zero,xmm12[5],zero,xmm12[u,u,u,8],zero,xmm12[6],zero,xmm12[u,u]
3869 ; AVX512BW-SLOW-NEXT: vporq %xmm15, %xmm16, %xmm15
3870 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm12[0],xmm14[0],xmm12[1],xmm14[1],xmm12[2],xmm14[2],xmm12[3],xmm14[3],xmm12[4],xmm14[4],xmm12[5],xmm14[5],xmm12[6],xmm14[6],xmm12[7],xmm14[7]
3871 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} xmm12 = xmm12[4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9]
3872 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm15, %zmm12, %zmm12
3873 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} zmm12 = zmm12[0,1,0,1,4,5,4,5]
3874 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} xmm14 = xmm10[u],zero,xmm10[7],zero,xmm10[5,u,u,u],zero,xmm10[8],zero,xmm10[6,u,u,u],zero
3875 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} xmm15 = xmm8[u,7],zero,xmm8[5],zero,xmm8[u,u,u,8],zero,xmm8[6],zero,xmm8[u,u,u,9]
3876 ; AVX512BW-SLOW-NEXT: vpor %xmm14, %xmm15, %xmm14
3877 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm8[0],xmm10[0],xmm8[1],xmm10[1],xmm8[2],xmm10[2],xmm8[3],xmm10[3],xmm8[4],xmm10[4],xmm8[5],xmm10[5],xmm8[6],xmm10[6],xmm8[7],xmm10[7]
3878 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5]
3879 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm14, %zmm8, %zmm8
3880 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} zmm8 = zmm8[0,1,0,1,4,5,4,5]
3881 ; AVX512BW-SLOW-NEXT: movabsq $871499720017774092, %rcx # imm = 0xC183060C183060C
3882 ; AVX512BW-SLOW-NEXT: kmovq %rcx, %k2
3883 ; AVX512BW-SLOW-NEXT: vmovdqu8 %zmm12, %zmm8 {%k2}
3884 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} xmm10 = zero,xmm11[4,u,u,u],zero,xmm11[7],zero,xmm11[5,u,u,u],zero,xmm11[8],zero,xmm11[6]
3885 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} xmm12 = xmm13[4],zero,xmm13[u,u,u,7],zero,xmm13[5],zero,xmm13[u,u,u,8],zero,xmm13[6],zero
3886 ; AVX512BW-SLOW-NEXT: vpor %xmm10, %xmm12, %xmm10
3887 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm13[0],xmm11[0],xmm13[1],xmm11[1],xmm13[2],xmm11[2],xmm13[3],xmm11[3],xmm13[4],xmm11[4],xmm13[5],xmm11[5],xmm13[6],xmm11[6],xmm13[7],xmm11[7]
3888 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} xmm11 = xmm11[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
3889 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm10, %zmm11, %zmm10
3890 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} zmm10 = zmm10[0,1,0,1,4,5,4,5]
3891 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm11 = [1,1,0,0,1,1,0,0,4,5,1,1,1,1,0,0,18,18,18,20,18,18,18,20,19,19,19,19,18,18,18,20]
3892 ; AVX512BW-SLOW-NEXT: vpermw %zmm7, %zmm11, %zmm11
3893 ; AVX512BW-SLOW-NEXT: movabsq $4647998506761461824, %rcx # imm = 0x4081020408102040
3894 ; AVX512BW-SLOW-NEXT: kmovq %rcx, %k2
3895 ; AVX512BW-SLOW-NEXT: vmovdqu8 %zmm11, %zmm10 {%k2}
3896 ; AVX512BW-SLOW-NEXT: movabsq $8133997386832558192, %rcx # imm = 0x70E1C3870E1C3870
3897 ; AVX512BW-SLOW-NEXT: kmovq %rcx, %k2
3898 ; AVX512BW-SLOW-NEXT: vmovdqu8 %zmm10, %zmm8 {%k2}
3899 ; AVX512BW-SLOW-NEXT: vpshufhw {{.*#+}} ymm4 = ymm4[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
3900 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[2,2,3,3,6,6,7,7]
3901 ; AVX512BW-SLOW-NEXT: movl $338170920, %ecx # imm = 0x14281428
3902 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k2
3903 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm4 {%k2} = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,30,u,28,u,u,u,u,31,u,29,u,u,u]
3904 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm4[2,3,2,3]
3905 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27,u,u,u,u,30,u,28,u,u,u,u,31,u,29,u]
3906 ; AVX512BW-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm1[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
3907 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,2,3,3,4,6,7,7]
3908 ; AVX512BW-SLOW-NEXT: vmovdqu8 %ymm1, %ymm3 {%k1}
3909 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm3[2,3,2,3]
3910 ; AVX512BW-SLOW-NEXT: movl $101455920, %ecx # imm = 0x60C1830
3911 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k1
3912 ; AVX512BW-SLOW-NEXT: vmovdqu8 %ymm2, %ymm1 {%k1}
3913 ; AVX512BW-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [14,13,14,15,15,14,14,15,14,13,14,15,15,14,14,15]
3914 ; AVX512BW-SLOW-NEXT: # ymm2 = mem[0,1,0,1]
3915 ; AVX512BW-SLOW-NEXT: vpermw %ymm7, %ymm2, %ymm2
3916 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29],zero,ymm5[27],zero,zero,zero,zero,ymm5[30],zero,ymm5[28],zero,zero,zero,zero,ymm5[31],zero
3917 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,2,3]
3918 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm6[27],zero,zero,zero,zero,ymm6[30],zero,ymm6[28],zero,zero,zero,zero,ymm6[31],zero,ymm6[29]
3919 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,3,2,3]
3920 ; AVX512BW-SLOW-NEXT: vpor %ymm3, %ymm4, %ymm3
3921 ; AVX512BW-SLOW-NEXT: movl $-2130574328, %ecx # imm = 0x81020408
3922 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k1
3923 ; AVX512BW-SLOW-NEXT: vmovdqu8 %ymm2, %ymm3 {%k1}
3924 ; AVX512BW-SLOW-NEXT: movl $-507279602, %ecx # imm = 0xE1C3870E
3925 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k1
3926 ; AVX512BW-SLOW-NEXT: vmovdqu8 %ymm3, %ymm1 {%k1}
3927 ; AVX512BW-SLOW-NEXT: vmovdqa %ymm1, 192(%rax)
3928 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm8, (%rax)
3929 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm9, 128(%rax)
3930 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm0, 64(%rax)
3931 ; AVX512BW-SLOW-NEXT: vzeroupper
3932 ; AVX512BW-SLOW-NEXT: retq
3934 ; AVX512BW-FAST-LABEL: store_i8_stride7_vf32:
3935 ; AVX512BW-FAST: # %bb.0:
3936 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
3937 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
3938 ; AVX512BW-FAST-NEXT: vmovdqa (%rdi), %ymm4
3939 ; AVX512BW-FAST-NEXT: vmovdqa (%rsi), %ymm3
3940 ; AVX512BW-FAST-NEXT: vmovdqa (%rdx), %ymm1
3941 ; AVX512BW-FAST-NEXT: vmovdqa (%rcx), %ymm2
3942 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm1[0,1,0,1,14],zero,ymm1[14,15,0,1,14,15],zero,ymm1[13,14,15,16,17,16],zero,ymm1[30,31,30,31,16,17],zero,ymm1[31,28,29,30,31]
3943 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm5 = zero,zero,zero,zero,zero,ymm2[14],zero,zero,zero,zero,zero,zero,ymm2[15],zero,zero,zero,zero,zero,zero,ymm2[16],zero,zero,zero,zero,zero,zero,ymm2[17],zero,zero,zero,zero,zero
3944 ; AVX512BW-FAST-NEXT: vpor %ymm0, %ymm5, %ymm0
3945 ; AVX512BW-FAST-NEXT: vmovdqa (%rdi), %xmm8
3946 ; AVX512BW-FAST-NEXT: vmovdqa (%rsi), %xmm9
3947 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm9[8],xmm8[8],xmm9[9],xmm8[9],xmm9[10],xmm8[10],xmm9[11],xmm8[11],xmm9[12],xmm8[12],xmm9[13],xmm8[13],xmm9[14],xmm8[14],xmm9[15],xmm8[15]
3948 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u]
3949 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,0,1]
3950 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm5, %zmm5
3951 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm4[0,1,14],zero,ymm4[12,13,0,1,14,15],zero,ymm4[3,12,13,2,3,16],zero,ymm4[30,31,28,29,16,17],zero,ymm4[31,18,19,28,29,18],zero
3952 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,zero,ymm3[14],zero,zero,zero,zero,zero,zero,ymm3[15],zero,zero,zero,zero,zero,zero,ymm3[16],zero,zero,zero,zero,zero,zero,ymm3[17],zero,zero,zero,zero,zero,zero,ymm3[18]
3953 ; AVX512BW-FAST-NEXT: vpor %ymm0, %ymm6, %ymm0
3954 ; AVX512BW-FAST-NEXT: vmovdqa (%rdx), %xmm12
3955 ; AVX512BW-FAST-NEXT: vmovdqa (%rcx), %xmm14
3956 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm14[8],xmm12[8],xmm14[9],xmm12[9],xmm14[10],xmm12[10],xmm14[11],xmm12[11],xmm14[12],xmm12[12],xmm14[13],xmm12[13],xmm14[14],xmm12[14],xmm14[15],xmm12[15]
3957 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7]
3958 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
3959 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm6, %zmm0
3960 ; AVX512BW-FAST-NEXT: movabsq $435749858791416001, %rcx # imm = 0x60C1830183060C1
3961 ; AVX512BW-FAST-NEXT: kmovq %rcx, %k1
3962 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm5, %zmm0 {%k1}
3963 ; AVX512BW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = mem[0,1,2,3,0,1,2,3]
3964 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm5[13],zero,zero,zero,zero,zero,zero,ymm5[14],zero,zero,zero,zero,zero,zero,ymm5[15],zero,zero,zero,zero,zero,zero,ymm5[16],zero,zero,zero,zero,zero,zero,ymm5[17],zero,zero,zero
3965 ; AVX512BW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = mem[0,1,2,3,0,1,2,3]
3966 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm10 = zero,zero,zero,zero,zero,zero,ymm6[14],zero,zero,zero,zero,zero,zero,ymm6[15],zero,zero,zero,zero,zero,zero,ymm6[16],zero,zero,zero,zero,zero,zero,ymm6[17],zero,zero,zero,zero
3967 ; AVX512BW-FAST-NEXT: vpor %ymm7, %ymm10, %ymm7
3968 ; AVX512BW-FAST-NEXT: vmovdqa (%r9), %xmm11
3969 ; AVX512BW-FAST-NEXT: vmovdqa (%r8), %xmm13
3970 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm10 = xmm13[8],xmm11[8],xmm13[9],xmm11[9],xmm13[10],xmm11[10],xmm13[11],xmm11[11],xmm13[12],xmm11[12],xmm13[13],xmm11[13],xmm13[14],xmm11[14],xmm13[15],xmm11[15]
3971 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm10 = xmm10[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
3972 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,1,0,1]
3973 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm10, %zmm10
3974 ; AVX512BW-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = mem[0,1,2,3,0,1,2,3]
3975 ; AVX512BW-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm15 = [4,5,4,5,5,6,5,6,4,5,4,5,5,6,5,6]
3976 ; AVX512BW-FAST-NEXT: # ymm15 = mem[0,1,0,1]
3977 ; AVX512BW-FAST-NEXT: vpermw %ymm7, %ymm15, %ymm15
3978 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm16 = ymm7[12,13,2,3,12,13,0,1,14,15,2,3,0,1,14,15,28,29,18,19,28,29,16,17,30,31,18,19,16,17,30,31]
3979 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm16, %zmm15, %zmm15
3980 ; AVX512BW-FAST-NEXT: movabsq $2323999253380730912, %rcx # imm = 0x2040810204081020
3981 ; AVX512BW-FAST-NEXT: kmovq %rcx, %k1
3982 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm15, %zmm10 {%k1}
3983 ; AVX512BW-FAST-NEXT: movabsq $4066998693416279096, %rcx # imm = 0x3870E1C3870E1C38
3984 ; AVX512BW-FAST-NEXT: kmovq %rcx, %k1
3985 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm10, %zmm0 {%k1}
3986 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm4, %zmm10
3987 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm10 = zmm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21],zero,zmm10[19],zero,zmm10[21,20,21,22],zero,zmm10[20],zero,zmm10[22,23,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,56,57],zero,zmm10[55],zero,zmm10[53,54,55,58],zero,zmm10[56],zero,zmm10[60,61,58,59]
3988 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm10 = zmm10[2,3,2,3,6,7,6,7]
3989 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm3, %zmm15
3990 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm15 = zmm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm15[21],zero,zmm15[19],zero,zero,zero,zero,zmm15[22],zero,zmm15[20],zero,zero,zmm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zmm15[57],zero,zmm15[55],zero,zero,zero,zero,zmm15[58],zero,zmm15[56],zero,zero,zero,zero
3991 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm15 = zmm15[2,3,2,3,6,7,6,7]
3992 ; AVX512BW-FAST-NEXT: vporq %zmm10, %zmm15, %zmm15
3993 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm1, %zmm10
3994 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm10 = zmm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,zmm10[18,19,20,21],zero,zmm10[19],zero,zmm10[25,26,27,22],zero,zmm10[20],zero,zmm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,57],zero,zmm10[55],zero,zero,zero,zero,zmm10[58],zero,zmm10[56],zero,zero,zero,zero,zmm10[59],zero
3995 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm10 = zmm10[2,3,2,3,6,7,6,7]
3996 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm2, %zmm16
3997 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm16 = zmm16[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm16[18],zero,zero,zero,zero,zmm16[21],zero,zmm16[19],zero,zero,zero,zero,zmm16[22],zero,zmm16[20,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm16[55],zero,zero,zero,zero,zmm16[58],zero,zmm16[56],zero,zero,zero,zero,zmm16[59],zero,zmm16[57]
3998 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm16 = zmm16[2,3,2,3,6,7,6,7]
3999 ; AVX512BW-FAST-NEXT: vporq %zmm10, %zmm16, %zmm10
4000 ; AVX512BW-FAST-NEXT: movabsq $-9005497107459067808, %rcx # imm = 0x83060C180C183060
4001 ; AVX512BW-FAST-NEXT: kmovq %rcx, %k1
4002 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm15, %zmm10 {%k1}
4003 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm15 = [10,9,9,10,10,9,9,10,9,10,14,15,10,9,9,10,27,29,28,27,28,29,29,28,27,29,28,27,28,29,29,28]
4004 ; AVX512BW-FAST-NEXT: vpermw %zmm7, %zmm15, %zmm15
4005 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm16 = zmm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20],zero,zmm6[18],zero,zmm6[20,21,20,21],zero,zmm6[19],zero,zmm6[19,20,21,22],zero,zmm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,56,57,56,57],zero,zmm6[55],zero,zmm6[55,56,57,58],zero,zmm6[56],zero,zmm6[62,63]
4006 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm16 = zmm16[2,3,2,3,6,7,6,7]
4007 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm17 = zmm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm5[20],zero,zmm5[18],zero,zero,zero,zero,zmm5[21],zero,zmm5[19],zero,zero,zero,zero,zmm5[22,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm5[57],zero,zmm5[55],zero,zero,zero,zero,zmm5[58],zero,zmm5[56],zero,zero
4008 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm17 = zmm17[2,3,2,3,6,7,6,7]
4009 ; AVX512BW-FAST-NEXT: vporq %zmm16, %zmm17, %zmm16
4010 ; AVX512BW-FAST-NEXT: movabsq $1161999626690365456, %rcx # imm = 0x1020408102040810
4011 ; AVX512BW-FAST-NEXT: kmovq %rcx, %k1
4012 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm15, %zmm16 {%k1}
4013 ; AVX512BW-FAST-NEXT: movabsq $2033499346708139548, %rcx # imm = 0x1C3870E1C3870E1C
4014 ; AVX512BW-FAST-NEXT: kmovq %rcx, %k1
4015 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm16, %zmm10 {%k1}
4016 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm15 = xmm14[u,u,u],zero,xmm14[7],zero,xmm14[5,u,u,u],zero,xmm14[8],zero,xmm14[6,u,u]
4017 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm16 = xmm12[u,u,u,7],zero,xmm12[5],zero,xmm12[u,u,u,8],zero,xmm12[6],zero,xmm12[u,u]
4018 ; AVX512BW-FAST-NEXT: vporq %xmm15, %xmm16, %xmm15
4019 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm12[0],xmm14[0],xmm12[1],xmm14[1],xmm12[2],xmm14[2],xmm12[3],xmm14[3],xmm12[4],xmm14[4],xmm12[5],xmm14[5],xmm12[6],xmm14[6],xmm12[7],xmm14[7]
4020 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm12 = xmm12[4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9]
4021 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm15, %zmm12, %zmm12
4022 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm12 = zmm12[0,1,0,1,4,5,4,5]
4023 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm14 = xmm9[u],zero,xmm9[7],zero,xmm9[5,u,u,u],zero,xmm9[8],zero,xmm9[6,u,u,u],zero
4024 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm15 = xmm8[u,7],zero,xmm8[5],zero,xmm8[u,u,u,8],zero,xmm8[6],zero,xmm8[u,u,u,9]
4025 ; AVX512BW-FAST-NEXT: vpor %xmm14, %xmm15, %xmm14
4026 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3],xmm8[4],xmm9[4],xmm8[5],xmm9[5],xmm8[6],xmm9[6],xmm8[7],xmm9[7]
4027 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5]
4028 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm14, %zmm8, %zmm8
4029 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm8 = zmm8[0,1,0,1,4,5,4,5]
4030 ; AVX512BW-FAST-NEXT: movabsq $871499720017774092, %rcx # imm = 0xC183060C183060C
4031 ; AVX512BW-FAST-NEXT: kmovq %rcx, %k1
4032 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm12, %zmm8 {%k1}
4033 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm9 = zero,xmm11[4,u,u,u],zero,xmm11[7],zero,xmm11[5,u,u,u],zero,xmm11[8],zero,xmm11[6]
4034 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm12 = xmm13[4],zero,xmm13[u,u,u,7],zero,xmm13[5],zero,xmm13[u,u,u,8],zero,xmm13[6],zero
4035 ; AVX512BW-FAST-NEXT: vpor %xmm9, %xmm12, %xmm9
4036 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm13[0],xmm11[0],xmm13[1],xmm11[1],xmm13[2],xmm11[2],xmm13[3],xmm11[3],xmm13[4],xmm11[4],xmm13[5],xmm11[5],xmm13[6],xmm11[6],xmm13[7],xmm11[7]
4037 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm11 = xmm11[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
4038 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm9, %zmm11, %zmm9
4039 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm9 = zmm9[0,1,0,1,4,5,4,5]
4040 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm11 = [1,1,0,0,1,1,0,0,4,5,1,1,1,1,0,0,18,18,18,20,18,18,18,20,19,19,19,19,18,18,18,20]
4041 ; AVX512BW-FAST-NEXT: vpermw %zmm7, %zmm11, %zmm11
4042 ; AVX512BW-FAST-NEXT: movabsq $4647998506761461824, %rcx # imm = 0x4081020408102040
4043 ; AVX512BW-FAST-NEXT: kmovq %rcx, %k1
4044 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm11, %zmm9 {%k1}
4045 ; AVX512BW-FAST-NEXT: movabsq $8133997386832558192, %rcx # imm = 0x70E1C3870E1C3870
4046 ; AVX512BW-FAST-NEXT: kmovq %rcx, %k1
4047 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm9, %zmm8 {%k1}
4048 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,30],zero,ymm4[28],zero,ymm4[30,31,30,31],zero,ymm4[29],zero,ymm4[31,28,29]
4049 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,3,2,3]
4050 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm3[30],zero,ymm3[28],zero,zero,zero,zero,ymm3[31],zero,ymm3[29],zero,zero,zero
4051 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,2,3]
4052 ; AVX512BW-FAST-NEXT: vpor %ymm4, %ymm3, %ymm3
4053 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27],zero,zero,zero,zero,ymm2[30],zero,ymm2[28],zero,zero,zero,zero,ymm2[31],zero,ymm2[29],zero
4054 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
4055 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero
4056 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
4057 ; AVX512BW-FAST-NEXT: vpor %ymm2, %ymm1, %ymm1
4058 ; AVX512BW-FAST-NEXT: movl $101455920, %ecx # imm = 0x60C1830
4059 ; AVX512BW-FAST-NEXT: kmovd %ecx, %k1
4060 ; AVX512BW-FAST-NEXT: vmovdqu8 %ymm3, %ymm1 {%k1}
4061 ; AVX512BW-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [14,13,14,15,15,14,14,15,14,13,14,15,15,14,14,15]
4062 ; AVX512BW-FAST-NEXT: # ymm2 = mem[0,1,0,1]
4063 ; AVX512BW-FAST-NEXT: vpermw %ymm7, %ymm2, %ymm2
4064 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29],zero,ymm5[27],zero,zero,zero,zero,ymm5[30],zero,ymm5[28],zero,zero,zero,zero,ymm5[31],zero
4065 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,2,3]
4066 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm6[27],zero,zero,zero,zero,ymm6[30],zero,ymm6[28],zero,zero,zero,zero,ymm6[31],zero,ymm6[29]
4067 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,3,2,3]
4068 ; AVX512BW-FAST-NEXT: vpor %ymm3, %ymm4, %ymm3
4069 ; AVX512BW-FAST-NEXT: movl $-2130574328, %ecx # imm = 0x81020408
4070 ; AVX512BW-FAST-NEXT: kmovd %ecx, %k1
4071 ; AVX512BW-FAST-NEXT: vmovdqu8 %ymm2, %ymm3 {%k1}
4072 ; AVX512BW-FAST-NEXT: movl $-507279602, %ecx # imm = 0xE1C3870E
4073 ; AVX512BW-FAST-NEXT: kmovd %ecx, %k1
4074 ; AVX512BW-FAST-NEXT: vmovdqu8 %ymm3, %ymm1 {%k1}
4075 ; AVX512BW-FAST-NEXT: vmovdqa %ymm1, 192(%rax)
4076 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm8, (%rax)
4077 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm10, 128(%rax)
4078 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm0, 64(%rax)
4079 ; AVX512BW-FAST-NEXT: vzeroupper
4080 ; AVX512BW-FAST-NEXT: retq
4081 %in.vec0 = load <32 x i8>, ptr %in.vecptr0, align 64
4082 %in.vec1 = load <32 x i8>, ptr %in.vecptr1, align 64
4083 %in.vec2 = load <32 x i8>, ptr %in.vecptr2, align 64
4084 %in.vec3 = load <32 x i8>, ptr %in.vecptr3, align 64
4085 %in.vec4 = load <32 x i8>, ptr %in.vecptr4, align 64
4086 %in.vec5 = load <32 x i8>, ptr %in.vecptr5, align 64
4087 %in.vec6 = load <32 x i8>, ptr %in.vecptr6, align 64
4088 %1 = shufflevector <32 x i8> %in.vec0, <32 x i8> %in.vec1, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
4089 %2 = shufflevector <32 x i8> %in.vec2, <32 x i8> %in.vec3, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
4090 %3 = shufflevector <32 x i8> %in.vec4, <32 x i8> %in.vec5, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
4091 %4 = shufflevector <64 x i8> %1, <64 x i8> %2, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
4092 %5 = shufflevector <32 x i8> %in.vec6, <32 x i8> poison, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
4093 %6 = shufflevector <64 x i8> %3, <64 x i8> %5, <96 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95>
4094 %7 = shufflevector <96 x i8> %6, <96 x i8> poison, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
4095 %8 = shufflevector <128 x i8> %4, <128 x i8> %7, <224 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223>
4096 %interleaved.vec = shufflevector <224 x i8> %8, <224 x i8> poison, <224 x i32> <i32 0, i32 32, i32 64, i32 96, i32 128, i32 160, i32 192, i32 1, i32 33, i32 65, i32 97, i32 129, i32 161, i32 193, i32 2, i32 34, i32 66, i32 98, i32 130, i32 162, i32 194, i32 3, i32 35, i32 67, i32 99, i32 131, i32 163, i32 195, i32 4, i32 36, i32 68, i32 100, i32 132, i32 164, i32 196, i32 5, i32 37, i32 69, i32 101, i32 133, i32 165, i32 197, i32 6, i32 38, i32 70, i32 102, i32 134, i32 166, i32 198, i32 7, i32 39, i32 71, i32 103, i32 135, i32 167, i32 199, i32 8, i32 40, i32 72, i32 104, i32 136, i32 168, i32 200, i32 9, i32 41, i32 73, i32 105, i32 137, i32 169, i32 201, i32 10, i32 42, i32 74, i32 106, i32 138, i32 170, i32 202, i32 11, i32 43, i32 75, i32 107, i32 139, i32 171, i32 203, i32 12, i32 44, i32 76, i32 108, i32 140, i32 172, i32 204, i32 13, i32 45, i32 77, i32 109, i32 141, i32 173, i32 205, i32 14, i32 46, i32 78, i32 110, i32 142, i32 174, i32 206, i32 15, i32 47, i32 79, i32 111, i32 143, i32 175, i32 207, i32 16, i32 48, i32 80, i32 112, i32 144, i32 176, i32 208, i32 17, i32 49, i32 81, i32 113, i32 145, i32 177, i32 209, i32 18, i32 50, i32 82, i32 114, i32 146, i32 178, i32 210, i32 19, i32 51, i32 83, i32 115, i32 147, i32 179, i32 211, i32 20, i32 52, i32 84, i32 116, i32 148, i32 180, i32 212, i32 21, i32 53, i32 85, i32 117, i32 149, i32 181, i32 213, i32 22, i32 54, i32 86, i32 118, i32 150, i32 182, i32 214, i32 23, i32 55, i32 87, i32 119, i32 151, i32 183, i32 215, i32 24, i32 56, i32 88, i32 120, i32 152, i32 184, i32 216, i32 25, i32 57, i32 89, i32 121, i32 153, i32 185, i32 217, i32 26, i32 58, i32 90, i32 122, i32 154, i32 186, i32 218, i32 27, i32 59, i32 91, i32 123, i32 155, i32 187, i32 219, i32 28, i32 60, i32 92, i32 124, i32 156, i32 188, i32 220, i32 29, i32 61, i32 93, i32 125, i32 157, i32 189, i32 221, i32 30, i32 62, i32 94, i32 126, i32 158, i32 190, i32 222, i32 31, i32 63, i32 95, i32 127, i32 159, i32 191, i32 223>
4097 store <224 x i8> %interleaved.vec, ptr %out.vec, align 64
4101 define void @store_i8_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %out.vec) nounwind {
4102 ; SSE-LABEL: store_i8_stride7_vf64:
4104 ; SSE-NEXT: subq $648, %rsp # imm = 0x288
4105 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
4106 ; SSE-NEXT: movdqa 48(%rdi), %xmm14
4107 ; SSE-NEXT: movdqa 48(%rsi), %xmm2
4108 ; SSE-NEXT: movdqa 48(%rdx), %xmm3
4109 ; SSE-NEXT: movdqa 48(%rcx), %xmm10
4110 ; SSE-NEXT: movdqa 48(%r8), %xmm5
4111 ; SSE-NEXT: movdqa 48(%r9), %xmm8
4112 ; SSE-NEXT: movdqa 48(%rax), %xmm13
4113 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm14[3,3,3,3,4,5,6,7]
4114 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
4115 ; SSE-NEXT: movdqa {{.*#+}} xmm6 = [255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
4116 ; SSE-NEXT: pand %xmm6, %xmm0
4117 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[2,1,2,3]
4118 ; SSE-NEXT: movdqa %xmm2, %xmm11
4119 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4120 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4121 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,0,3]
4122 ; SSE-NEXT: movdqa %xmm6, %xmm2
4123 ; SSE-NEXT: pandn %xmm1, %xmm2
4124 ; SSE-NEXT: por %xmm0, %xmm2
4125 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255]
4126 ; SSE-NEXT: pand %xmm12, %xmm2
4127 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm3[3,3,3,3,4,5,6,7]
4128 ; SSE-NEXT: movdqa %xmm3, %xmm6
4129 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
4130 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255]
4131 ; SSE-NEXT: pand %xmm1, %xmm0
4132 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm10[2,1,2,3]
4133 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4134 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4135 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,3,0,3]
4136 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[3,3,3,3,4,5,6,7]
4137 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,4,4]
4138 ; SSE-NEXT: movdqa %xmm1, %xmm4
4139 ; SSE-NEXT: pandn %xmm3, %xmm4
4140 ; SSE-NEXT: por %xmm0, %xmm4
4141 ; SSE-NEXT: movdqa %xmm12, %xmm0
4142 ; SSE-NEXT: pandn %xmm4, %xmm0
4143 ; SSE-NEXT: por %xmm2, %xmm0
4144 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255]
4145 ; SSE-NEXT: pand %xmm1, %xmm0
4146 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm5[3,3,3,3,4,5,6,7]
4147 ; SSE-NEXT: movdqa %xmm5, %xmm9
4148 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,2,2]
4149 ; SSE-NEXT: movdqa %xmm1, %xmm3
4150 ; SSE-NEXT: movdqa %xmm1, %xmm5
4151 ; SSE-NEXT: pandn %xmm2, %xmm3
4152 ; SSE-NEXT: por %xmm0, %xmm3
4153 ; SSE-NEXT: movdqa {{.*#+}} xmm7 = [255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255]
4154 ; SSE-NEXT: pand %xmm7, %xmm3
4155 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm8[2,1,2,3]
4156 ; SSE-NEXT: movdqa %xmm8, %xmm1
4157 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4158 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4159 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,3,2,0]
4160 ; SSE-NEXT: movdqa %xmm7, %xmm4
4161 ; SSE-NEXT: pandn %xmm0, %xmm4
4162 ; SSE-NEXT: por %xmm3, %xmm4
4163 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255]
4164 ; SSE-NEXT: pand %xmm2, %xmm4
4165 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm13[3,3,3,3,4,5,6,7]
4166 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
4167 ; SSE-NEXT: movdqa %xmm2, %xmm3
4168 ; SSE-NEXT: pandn %xmm0, %xmm3
4169 ; SSE-NEXT: por %xmm4, %xmm3
4170 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4171 ; SSE-NEXT: movdqa %xmm11, %xmm0
4172 ; SSE-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8],xmm11[8],xmm0[9],xmm11[9],xmm0[10],xmm11[10],xmm0[11],xmm11[11],xmm0[12],xmm11[12],xmm0[13],xmm11[13],xmm0[14],xmm11[14],xmm0[15],xmm11[15]
4173 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm0[0,1,2,3,4,5,5,7]
4174 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,3]
4175 ; SSE-NEXT: movdqa %xmm5, %xmm4
4176 ; SSE-NEXT: pandn %xmm3, %xmm5
4177 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm14[0,1,2,3,6,6,6,6]
4178 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,2,2,2]
4179 ; SSE-NEXT: pand %xmm4, %xmm3
4180 ; SSE-NEXT: por %xmm3, %xmm5
4181 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255]
4182 ; SSE-NEXT: movdqa %xmm4, %xmm3
4183 ; SSE-NEXT: pandn %xmm5, %xmm3
4184 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm6[0,1,2,3,6,6,6,6]
4185 ; SSE-NEXT: movdqa %xmm6, %xmm15
4186 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[2,2,2,2]
4187 ; SSE-NEXT: movdqa %xmm7, %xmm6
4188 ; SSE-NEXT: pandn %xmm5, %xmm6
4189 ; SSE-NEXT: movdqa %xmm10, %xmm5
4190 ; SSE-NEXT: punpckhbw {{.*#+}} xmm5 = xmm5[8],xmm10[8],xmm5[9],xmm10[9],xmm5[10],xmm10[10],xmm5[11],xmm10[11],xmm5[12],xmm10[12],xmm5[13],xmm10[13],xmm5[14],xmm10[14],xmm5[15],xmm10[15]
4191 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm5[2,1,2,3]
4192 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm8[3,1,2,0,4,5,6,7]
4193 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,5,5,5,5]
4194 ; SSE-NEXT: pand %xmm7, %xmm8
4195 ; SSE-NEXT: por %xmm6, %xmm8
4196 ; SSE-NEXT: pand %xmm4, %xmm8
4197 ; SSE-NEXT: por %xmm3, %xmm8
4198 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm9[0,1,2,3,5,6,6,7]
4199 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,1,3,2]
4200 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0]
4201 ; SSE-NEXT: movdqa %xmm4, %xmm6
4202 ; SSE-NEXT: pandn %xmm3, %xmm6
4203 ; SSE-NEXT: pand %xmm4, %xmm8
4204 ; SSE-NEXT: por %xmm8, %xmm6
4205 ; SSE-NEXT: movdqa %xmm1, %xmm3
4206 ; SSE-NEXT: punpckhbw {{.*#+}} xmm3 = xmm3[8],xmm1[8],xmm3[9],xmm1[9],xmm3[10],xmm1[10],xmm3[11],xmm1[11],xmm3[12],xmm1[12],xmm3[13],xmm1[13],xmm3[14],xmm1[14],xmm3[15],xmm1[15]
4207 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm3[1,1,2,3]
4208 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
4209 ; SSE-NEXT: movdqa %xmm4, %xmm11
4210 ; SSE-NEXT: pandn %xmm8, %xmm11
4211 ; SSE-NEXT: pand %xmm4, %xmm6
4212 ; SSE-NEXT: por %xmm6, %xmm11
4213 ; SSE-NEXT: movdqa %xmm13, %xmm10
4214 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm13[0,1,2,3,4,5,6,6]
4215 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[2,1,3,3]
4216 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
4217 ; SSE-NEXT: movdqa %xmm1, %xmm8
4218 ; SSE-NEXT: pandn %xmm6, %xmm8
4219 ; SSE-NEXT: pand %xmm1, %xmm11
4220 ; SSE-NEXT: por %xmm11, %xmm8
4221 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4222 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,1,1,3]
4223 ; SSE-NEXT: movdqa %xmm4, %xmm8
4224 ; SSE-NEXT: pandn %xmm6, %xmm8
4225 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm15[0,1,2,3,4,5,5,7]
4226 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4227 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[2,1,3,2]
4228 ; SSE-NEXT: pand %xmm4, %xmm6
4229 ; SSE-NEXT: por %xmm8, %xmm6
4230 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0]
4231 ; SSE-NEXT: movdqa %xmm1, %xmm8
4232 ; SSE-NEXT: pandn %xmm6, %xmm8
4233 ; SSE-NEXT: movdqa %xmm14, %xmm13
4234 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4235 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm14[0,1,2,3,5,5,5,5]
4236 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[2,2,2,2]
4237 ; SSE-NEXT: movdqa %xmm7, %xmm11
4238 ; SSE-NEXT: pandn %xmm6, %xmm11
4239 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm0[1,2,2,3,4,5,6,7]
4240 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,0,2,1]
4241 ; SSE-NEXT: pand %xmm7, %xmm6
4242 ; SSE-NEXT: por %xmm11, %xmm6
4243 ; SSE-NEXT: pand %xmm1, %xmm6
4244 ; SSE-NEXT: por %xmm8, %xmm6
4245 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm9[0,1,2,3,4,4,6,5]
4246 ; SSE-NEXT: movdqa %xmm9, %xmm1
4247 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4248 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[2,1,3,3]
4249 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
4250 ; SSE-NEXT: movdqa %xmm4, %xmm11
4251 ; SSE-NEXT: pandn %xmm8, %xmm11
4252 ; SSE-NEXT: pand %xmm4, %xmm6
4253 ; SSE-NEXT: por %xmm6, %xmm11
4254 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm3[1,2,2,3,4,5,6,7]
4255 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,0,0,0]
4256 ; SSE-NEXT: movdqa {{.*#+}} xmm9 = [255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255]
4257 ; SSE-NEXT: movdqa %xmm9, %xmm8
4258 ; SSE-NEXT: pandn %xmm6, %xmm8
4259 ; SSE-NEXT: pand %xmm9, %xmm11
4260 ; SSE-NEXT: movdqa %xmm9, %xmm14
4261 ; SSE-NEXT: por %xmm11, %xmm8
4262 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm10[0,1,2,3,4,5,5,7]
4263 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4264 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
4265 ; SSE-NEXT: movdqa {{.*#+}} xmm11 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255]
4266 ; SSE-NEXT: movdqa %xmm11, %xmm9
4267 ; SSE-NEXT: pandn %xmm6, %xmm9
4268 ; SSE-NEXT: pand %xmm11, %xmm8
4269 ; SSE-NEXT: por %xmm8, %xmm9
4270 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4271 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,6,5,7,7]
4272 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,2,2,3]
4273 ; SSE-NEXT: movdqa %xmm11, %xmm6
4274 ; SSE-NEXT: pandn %xmm5, %xmm6
4275 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm15[0,1,2,3,7,7,7,7]
4276 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[2,2,2,2]
4277 ; SSE-NEXT: pand %xmm11, %xmm5
4278 ; SSE-NEXT: por %xmm5, %xmm6
4279 ; SSE-NEXT: movdqa {{.*#+}} xmm8 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255]
4280 ; SSE-NEXT: movdqa %xmm8, %xmm5
4281 ; SSE-NEXT: pandn %xmm6, %xmm5
4282 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,6,7]
4283 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,3,3]
4284 ; SSE-NEXT: movdqa %xmm4, %xmm6
4285 ; SSE-NEXT: pandn %xmm0, %xmm6
4286 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm13[0,1,2,3,7,7,7,7]
4287 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
4288 ; SSE-NEXT: pand %xmm4, %xmm0
4289 ; SSE-NEXT: por %xmm0, %xmm6
4290 ; SSE-NEXT: pand %xmm8, %xmm6
4291 ; SSE-NEXT: por %xmm5, %xmm6
4292 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm1[0,1,2,3,7,7,7,7]
4293 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
4294 ; SSE-NEXT: movdqa %xmm7, %xmm5
4295 ; SSE-NEXT: pandn %xmm0, %xmm5
4296 ; SSE-NEXT: pand %xmm7, %xmm6
4297 ; SSE-NEXT: por %xmm6, %xmm5
4298 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm3[0,1,2,3,5,6,6,7]
4299 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,3]
4300 ; SSE-NEXT: movdqa %xmm2, %xmm3
4301 ; SSE-NEXT: pandn %xmm0, %xmm3
4302 ; SSE-NEXT: pand %xmm2, %xmm5
4303 ; SSE-NEXT: por %xmm5, %xmm3
4304 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm10[0,1,2,3,6,7,7,7]
4305 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,3,2]
4306 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0]
4307 ; SSE-NEXT: movdqa %xmm5, %xmm1
4308 ; SSE-NEXT: pandn %xmm0, %xmm1
4309 ; SSE-NEXT: pand %xmm5, %xmm3
4310 ; SSE-NEXT: por %xmm3, %xmm1
4311 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4312 ; SSE-NEXT: movdqa (%rsi), %xmm0
4313 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4314 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
4315 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4316 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[3,1,0,3]
4317 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
4318 ; SSE-NEXT: movdqa %xmm4, %xmm3
4319 ; SSE-NEXT: pandn %xmm0, %xmm3
4320 ; SSE-NEXT: movdqa (%rdi), %xmm0
4321 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4322 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[3,3,3,3,4,5,6,7]
4323 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
4324 ; SSE-NEXT: pand %xmm4, %xmm0
4325 ; SSE-NEXT: movdqa %xmm4, %xmm11
4326 ; SSE-NEXT: por %xmm0, %xmm3
4327 ; SSE-NEXT: movdqa (%rcx), %xmm0
4328 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4329 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
4330 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4331 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,3,0,3]
4332 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[3,3,3,3,4,5,6,7]
4333 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
4334 ; SSE-NEXT: movdqa %xmm14, %xmm5
4335 ; SSE-NEXT: pandn %xmm0, %xmm5
4336 ; SSE-NEXT: movdqa (%rdx), %xmm0
4337 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4338 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[3,3,3,3,4,5,6,7]
4339 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
4340 ; SSE-NEXT: pand %xmm14, %xmm0
4341 ; SSE-NEXT: movdqa %xmm14, %xmm9
4342 ; SSE-NEXT: por %xmm0, %xmm5
4343 ; SSE-NEXT: movdqa %xmm12, %xmm0
4344 ; SSE-NEXT: pandn %xmm5, %xmm0
4345 ; SSE-NEXT: pand %xmm12, %xmm3
4346 ; SSE-NEXT: por %xmm3, %xmm0
4347 ; SSE-NEXT: movdqa (%r9), %xmm15
4348 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm15[2,1,2,3]
4349 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4350 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4351 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,3,2,0]
4352 ; SSE-NEXT: movdqa %xmm7, %xmm6
4353 ; SSE-NEXT: pandn %xmm3, %xmm6
4354 ; SSE-NEXT: movdqa (%r8), %xmm1
4355 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm1[3,3,3,3,4,5,6,7]
4356 ; SSE-NEXT: movdqa %xmm1, %xmm8
4357 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4358 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,4,4]
4359 ; SSE-NEXT: pand %xmm7, %xmm3
4360 ; SSE-NEXT: por %xmm3, %xmm6
4361 ; SSE-NEXT: movdqa (%rax), %xmm4
4362 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm4[3,3,3,3,4,5,6,7]
4363 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4364 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,4,4]
4365 ; SSE-NEXT: movdqa %xmm2, %xmm14
4366 ; SSE-NEXT: pandn %xmm3, %xmm14
4367 ; SSE-NEXT: pand %xmm2, %xmm6
4368 ; SSE-NEXT: por %xmm6, %xmm14
4369 ; SSE-NEXT: movdqa {{.*#+}} xmm10 = [0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255]
4370 ; SSE-NEXT: movdqa %xmm10, %xmm1
4371 ; SSE-NEXT: pandn %xmm14, %xmm1
4372 ; SSE-NEXT: pand %xmm10, %xmm0
4373 ; SSE-NEXT: por %xmm0, %xmm1
4374 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4375 ; SSE-NEXT: movdqa 16(%rsi), %xmm0
4376 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4377 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
4378 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4379 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[3,1,0,3]
4380 ; SSE-NEXT: movdqa %xmm11, %xmm3
4381 ; SSE-NEXT: pandn %xmm0, %xmm3
4382 ; SSE-NEXT: movdqa 16(%rdi), %xmm0
4383 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4384 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[3,3,3,3,4,5,6,7]
4385 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
4386 ; SSE-NEXT: pand %xmm11, %xmm0
4387 ; SSE-NEXT: por %xmm0, %xmm3
4388 ; SSE-NEXT: movdqa 16(%rcx), %xmm0
4389 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4390 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
4391 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4392 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,3,0,3]
4393 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[3,3,3,3,4,5,6,7]
4394 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
4395 ; SSE-NEXT: movdqa %xmm9, %xmm6
4396 ; SSE-NEXT: pandn %xmm0, %xmm6
4397 ; SSE-NEXT: movdqa 16(%rdx), %xmm0
4398 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4399 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[3,3,3,3,4,5,6,7]
4400 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
4401 ; SSE-NEXT: pand %xmm9, %xmm0
4402 ; SSE-NEXT: por %xmm0, %xmm6
4403 ; SSE-NEXT: movdqa %xmm12, %xmm0
4404 ; SSE-NEXT: pandn %xmm6, %xmm0
4405 ; SSE-NEXT: pand %xmm12, %xmm3
4406 ; SSE-NEXT: por %xmm3, %xmm0
4407 ; SSE-NEXT: movdqa 16(%r9), %xmm1
4408 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4409 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm1[2,1,2,3]
4410 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4411 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,3,2,0]
4412 ; SSE-NEXT: movdqa %xmm7, %xmm6
4413 ; SSE-NEXT: pandn %xmm3, %xmm6
4414 ; SSE-NEXT: movdqa 16(%r8), %xmm1
4415 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4416 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm1[3,3,3,3,4,5,6,7]
4417 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,4,4]
4418 ; SSE-NEXT: pand %xmm7, %xmm3
4419 ; SSE-NEXT: por %xmm3, %xmm6
4420 ; SSE-NEXT: movdqa 16(%rax), %xmm1
4421 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4422 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm1[3,3,3,3,4,5,6,7]
4423 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,4,4]
4424 ; SSE-NEXT: movdqa %xmm2, %xmm14
4425 ; SSE-NEXT: pandn %xmm3, %xmm14
4426 ; SSE-NEXT: pand %xmm2, %xmm6
4427 ; SSE-NEXT: por %xmm6, %xmm14
4428 ; SSE-NEXT: movdqa %xmm10, %xmm1
4429 ; SSE-NEXT: pandn %xmm14, %xmm1
4430 ; SSE-NEXT: pand %xmm10, %xmm0
4431 ; SSE-NEXT: por %xmm0, %xmm1
4432 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4433 ; SSE-NEXT: movdqa 32(%rsi), %xmm0
4434 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4435 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
4436 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4437 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm0[3,1,0,3]
4438 ; SSE-NEXT: movdqa %xmm11, %xmm0
4439 ; SSE-NEXT: pandn %xmm3, %xmm0
4440 ; SSE-NEXT: movdqa 32(%rdi), %xmm1
4441 ; SSE-NEXT: movdqa %xmm1, (%rsp) # 16-byte Spill
4442 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm1[3,3,3,3,4,5,6,7]
4443 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,4,4]
4444 ; SSE-NEXT: pand %xmm11, %xmm3
4445 ; SSE-NEXT: por %xmm3, %xmm0
4446 ; SSE-NEXT: movdqa 32(%rcx), %xmm1
4447 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4448 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm1[2,1,2,3]
4449 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4450 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,3,0,3]
4451 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[3,3,3,3,4,5,6,7]
4452 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,4,4]
4453 ; SSE-NEXT: movdqa %xmm9, %xmm5
4454 ; SSE-NEXT: movdqa %xmm9, %xmm6
4455 ; SSE-NEXT: pandn %xmm3, %xmm6
4456 ; SSE-NEXT: movdqa 32(%rdx), %xmm9
4457 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm9[3,3,3,3,4,5,6,7]
4458 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4459 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,4,4]
4460 ; SSE-NEXT: pand %xmm5, %xmm3
4461 ; SSE-NEXT: por %xmm3, %xmm6
4462 ; SSE-NEXT: pand %xmm12, %xmm0
4463 ; SSE-NEXT: pandn %xmm6, %xmm12
4464 ; SSE-NEXT: por %xmm0, %xmm12
4465 ; SSE-NEXT: movdqa 32(%r9), %xmm0
4466 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4467 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,2,3]
4468 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4469 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,3,2,0]
4470 ; SSE-NEXT: movdqa %xmm7, %xmm3
4471 ; SSE-NEXT: pandn %xmm0, %xmm3
4472 ; SSE-NEXT: movdqa 32(%r8), %xmm11
4473 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm11[3,3,3,3,4,5,6,7]
4474 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4475 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
4476 ; SSE-NEXT: pand %xmm7, %xmm0
4477 ; SSE-NEXT: por %xmm0, %xmm3
4478 ; SSE-NEXT: pand %xmm2, %xmm3
4479 ; SSE-NEXT: movdqa 32(%rax), %xmm13
4480 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm13[3,3,3,3,4,5,6,7]
4481 ; SSE-NEXT: movdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4482 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
4483 ; SSE-NEXT: pandn %xmm0, %xmm2
4484 ; SSE-NEXT: por %xmm3, %xmm2
4485 ; SSE-NEXT: pand %xmm10, %xmm12
4486 ; SSE-NEXT: pandn %xmm2, %xmm10
4487 ; SSE-NEXT: por %xmm12, %xmm10
4488 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4489 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4490 ; SSE-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
4491 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4492 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5,5,7]
4493 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
4494 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255]
4495 ; SSE-NEXT: movdqa %xmm12, %xmm1
4496 ; SSE-NEXT: pandn %xmm0, %xmm1
4497 ; SSE-NEXT: pshufhw $170, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
4498 ; SSE-NEXT: # xmm0 = mem[0,1,2,3,6,6,6,6]
4499 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
4500 ; SSE-NEXT: pand %xmm12, %xmm0
4501 ; SSE-NEXT: por %xmm0, %xmm1
4502 ; SSE-NEXT: movdqa {{.*#+}} xmm14 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255]
4503 ; SSE-NEXT: movdqa %xmm14, %xmm0
4504 ; SSE-NEXT: pandn %xmm1, %xmm0
4505 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
4506 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm6[0,1,2,3,6,6,6,6]
4507 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
4508 ; SSE-NEXT: movdqa %xmm7, %xmm2
4509 ; SSE-NEXT: pandn %xmm1, %xmm2
4510 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
4511 ; SSE-NEXT: movdqa %xmm5, %xmm1
4512 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
4513 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4514 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,2,3]
4515 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[3,1,2,0,4,5,6,7]
4516 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,5,5,5]
4517 ; SSE-NEXT: pand %xmm7, %xmm1
4518 ; SSE-NEXT: por %xmm2, %xmm1
4519 ; SSE-NEXT: pand %xmm14, %xmm1
4520 ; SSE-NEXT: por %xmm0, %xmm1
4521 ; SSE-NEXT: punpckhbw {{.*#+}} xmm15 = xmm15[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
4522 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4523 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm15[1,1,2,3]
4524 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
4525 ; SSE-NEXT: movdqa %xmm0, %xmm3
4526 ; SSE-NEXT: pandn %xmm2, %xmm3
4527 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm8[0,1,2,3,5,6,6,7]
4528 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,1,3,2]
4529 ; SSE-NEXT: pand %xmm0, %xmm2
4530 ; SSE-NEXT: por %xmm3, %xmm2
4531 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm4[0,1,2,3,4,5,6,6]
4532 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,1,3,3]
4533 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
4534 ; SSE-NEXT: movdqa %xmm4, %xmm15
4535 ; SSE-NEXT: pandn %xmm3, %xmm15
4536 ; SSE-NEXT: pand %xmm4, %xmm2
4537 ; SSE-NEXT: por %xmm2, %xmm15
4538 ; SSE-NEXT: movdqa {{.*#+}} xmm10 = [255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0]
4539 ; SSE-NEXT: movdqa %xmm10, %xmm0
4540 ; SSE-NEXT: pandn %xmm15, %xmm0
4541 ; SSE-NEXT: pand %xmm10, %xmm1
4542 ; SSE-NEXT: por %xmm1, %xmm0
4543 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4544 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4545 ; SSE-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
4546 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4547 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,5,5,7]
4548 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
4549 ; SSE-NEXT: movdqa %xmm12, %xmm8
4550 ; SSE-NEXT: movdqa %xmm12, %xmm2
4551 ; SSE-NEXT: pandn %xmm1, %xmm2
4552 ; SSE-NEXT: pshufhw $170, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
4553 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,6,6,6,6]
4554 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
4555 ; SSE-NEXT: pand %xmm12, %xmm1
4556 ; SSE-NEXT: por %xmm1, %xmm2
4557 ; SSE-NEXT: movdqa %xmm14, %xmm12
4558 ; SSE-NEXT: movdqa %xmm14, %xmm3
4559 ; SSE-NEXT: pandn %xmm2, %xmm3
4560 ; SSE-NEXT: pshufhw $170, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
4561 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,6,6,6,6]
4562 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
4563 ; SSE-NEXT: movdqa %xmm7, %xmm2
4564 ; SSE-NEXT: pandn %xmm1, %xmm2
4565 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4566 ; SSE-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
4567 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4568 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,1,2,3]
4569 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[3,1,2,0,4,5,6,7]
4570 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,5,5,5]
4571 ; SSE-NEXT: pand %xmm7, %xmm1
4572 ; SSE-NEXT: por %xmm2, %xmm1
4573 ; SSE-NEXT: pand %xmm14, %xmm1
4574 ; SSE-NEXT: por %xmm3, %xmm1
4575 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4576 ; SSE-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
4577 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4578 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,2,3]
4579 ; SSE-NEXT: movdqa {{.*#+}} xmm14 = [255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
4580 ; SSE-NEXT: movdqa %xmm14, %xmm3
4581 ; SSE-NEXT: pandn %xmm2, %xmm3
4582 ; SSE-NEXT: pshufhw $233, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
4583 ; SSE-NEXT: # xmm2 = mem[0,1,2,3,5,6,6,7]
4584 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,1,3,2]
4585 ; SSE-NEXT: pand %xmm14, %xmm2
4586 ; SSE-NEXT: por %xmm3, %xmm2
4587 ; SSE-NEXT: pshufhw $164, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
4588 ; SSE-NEXT: # xmm3 = mem[0,1,2,3,4,5,6,6]
4589 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,1,3,3]
4590 ; SSE-NEXT: movdqa %xmm4, %xmm15
4591 ; SSE-NEXT: pandn %xmm3, %xmm15
4592 ; SSE-NEXT: pand %xmm4, %xmm2
4593 ; SSE-NEXT: por %xmm2, %xmm15
4594 ; SSE-NEXT: movdqa %xmm10, %xmm0
4595 ; SSE-NEXT: pandn %xmm15, %xmm0
4596 ; SSE-NEXT: pand %xmm10, %xmm1
4597 ; SSE-NEXT: por %xmm1, %xmm0
4598 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4599 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4600 ; SSE-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
4601 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4602 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,5,5,7]
4603 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
4604 ; SSE-NEXT: movdqa %xmm8, %xmm2
4605 ; SSE-NEXT: pandn %xmm1, %xmm2
4606 ; SSE-NEXT: pshufhw $170, (%rsp), %xmm1 # 16-byte Folded Reload
4607 ; SSE-NEXT: # xmm1 = mem[0,1,2,3,6,6,6,6]
4608 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
4609 ; SSE-NEXT: pand %xmm8, %xmm1
4610 ; SSE-NEXT: por %xmm1, %xmm2
4611 ; SSE-NEXT: movdqa %xmm12, %xmm3
4612 ; SSE-NEXT: pandn %xmm2, %xmm3
4613 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm9[0,1,2,3,6,6,6,6]
4614 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
4615 ; SSE-NEXT: movdqa %xmm7, %xmm2
4616 ; SSE-NEXT: pandn %xmm1, %xmm2
4617 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4618 ; SSE-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
4619 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4620 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,1,2,3]
4621 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[3,1,2,0,4,5,6,7]
4622 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,5,5,5]
4623 ; SSE-NEXT: pand %xmm7, %xmm1
4624 ; SSE-NEXT: por %xmm2, %xmm1
4625 ; SSE-NEXT: pand %xmm12, %xmm1
4626 ; SSE-NEXT: por %xmm3, %xmm1
4627 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4628 ; SSE-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
4629 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4630 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,2,3]
4631 ; SSE-NEXT: movdqa %xmm14, %xmm3
4632 ; SSE-NEXT: pandn %xmm2, %xmm3
4633 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm11[0,1,2,3,5,6,6,7]
4634 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,1,3,2]
4635 ; SSE-NEXT: pand %xmm14, %xmm2
4636 ; SSE-NEXT: por %xmm3, %xmm2
4637 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm13[0,1,2,3,4,5,6,6]
4638 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,1,3,3]
4639 ; SSE-NEXT: movdqa %xmm4, %xmm15
4640 ; SSE-NEXT: pandn %xmm3, %xmm15
4641 ; SSE-NEXT: pand %xmm4, %xmm2
4642 ; SSE-NEXT: por %xmm2, %xmm15
4643 ; SSE-NEXT: pand %xmm10, %xmm1
4644 ; SSE-NEXT: pandn %xmm15, %xmm10
4645 ; SSE-NEXT: por %xmm1, %xmm10
4646 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4647 ; SSE-NEXT: punpcklbw {{.*#+}} xmm5 = xmm5[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4648 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm5[0,0,2,1,4,5,6,7]
4649 ; SSE-NEXT: movdqa %xmm5, %xmm14
4650 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,1,3]
4651 ; SSE-NEXT: movdqa %xmm4, %xmm2
4652 ; SSE-NEXT: pandn %xmm1, %xmm2
4653 ; SSE-NEXT: movdqa %xmm6, %xmm8
4654 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm6[0,0,0,0,4,5,6,7]
4655 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,0,0]
4656 ; SSE-NEXT: pand %xmm4, %xmm1
4657 ; SSE-NEXT: por %xmm1, %xmm2
4658 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255]
4659 ; SSE-NEXT: movdqa %xmm0, %xmm15
4660 ; SSE-NEXT: pandn %xmm2, %xmm15
4661 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4662 ; SSE-NEXT: punpcklbw {{.*#+}} xmm4 = xmm4[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4663 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm4[0,2,1,3,4,5,6,7]
4664 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[0,1,1,0]
4665 ; SSE-NEXT: movdqa {{.*#+}} xmm9 = [255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0]
4666 ; SSE-NEXT: movdqa %xmm9, %xmm1
4667 ; SSE-NEXT: pandn %xmm2, %xmm1
4668 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
4669 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm3[0,0,2,1,4,5,6,7]
4670 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,2,1]
4671 ; SSE-NEXT: pand %xmm9, %xmm2
4672 ; SSE-NEXT: movdqa %xmm9, %xmm6
4673 ; SSE-NEXT: por %xmm2, %xmm1
4674 ; SSE-NEXT: pand %xmm0, %xmm1
4675 ; SSE-NEXT: por %xmm15, %xmm1
4676 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
4677 ; SSE-NEXT: punpcklbw {{.*#+}} xmm5 = xmm5[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4678 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm5[0,1,1,3,4,5,6,7]
4679 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,2,1]
4680 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255]
4681 ; SSE-NEXT: movdqa %xmm0, %xmm15
4682 ; SSE-NEXT: pandn %xmm2, %xmm15
4683 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
4684 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm13[0,0,0,0,4,5,6,7]
4685 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,0,0]
4686 ; SSE-NEXT: pand %xmm0, %xmm2
4687 ; SSE-NEXT: por %xmm2, %xmm15
4688 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
4689 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm11[0,0,0,0,4,5,6,7]
4690 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,0,0]
4691 ; SSE-NEXT: movdqa %xmm7, %xmm10
4692 ; SSE-NEXT: pandn %xmm2, %xmm10
4693 ; SSE-NEXT: pand %xmm7, %xmm15
4694 ; SSE-NEXT: por %xmm15, %xmm10
4695 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255]
4696 ; SSE-NEXT: movdqa %xmm2, %xmm0
4697 ; SSE-NEXT: pandn %xmm10, %xmm0
4698 ; SSE-NEXT: pand %xmm2, %xmm1
4699 ; SSE-NEXT: por %xmm1, %xmm0
4700 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4701 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm4[0,1,2,2]
4702 ; SSE-NEXT: movdqa %xmm7, %xmm2
4703 ; SSE-NEXT: pandn %xmm1, %xmm2
4704 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm3[1,1,2,3,4,5,6,7]
4705 ; SSE-NEXT: movdqa %xmm3, %xmm9
4706 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm1[0,0,2,1]
4707 ; SSE-NEXT: pand %xmm7, %xmm10
4708 ; SSE-NEXT: por %xmm2, %xmm10
4709 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255]
4710 ; SSE-NEXT: movdqa %xmm0, %xmm15
4711 ; SSE-NEXT: movdqa %xmm0, %xmm12
4712 ; SSE-NEXT: pandn %xmm10, %xmm15
4713 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm14[1,1,2,1]
4714 ; SSE-NEXT: pshufhw {{.*#+}} xmm10 = xmm2[0,1,2,3,7,5,6,4]
4715 ; SSE-NEXT: movdqa %xmm6, %xmm0
4716 ; SSE-NEXT: movdqa %xmm6, %xmm2
4717 ; SSE-NEXT: pandn %xmm10, %xmm2
4718 ; SSE-NEXT: pshuflw {{.*#+}} xmm10 = xmm8[1,1,2,2,4,5,6,7]
4719 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[0,0,2,1]
4720 ; SSE-NEXT: pand %xmm0, %xmm10
4721 ; SSE-NEXT: por %xmm10, %xmm2
4722 ; SSE-NEXT: pand %xmm12, %xmm2
4723 ; SSE-NEXT: por %xmm15, %xmm2
4724 ; SSE-NEXT: pshuflw {{.*#+}} xmm10 = xmm5[0,2,2,3,4,5,6,7]
4725 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[0,1,1,3]
4726 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
4727 ; SSE-NEXT: movdqa %xmm0, %xmm15
4728 ; SSE-NEXT: pandn %xmm10, %xmm15
4729 ; SSE-NEXT: movdqa %xmm13, %xmm3
4730 ; SSE-NEXT: pshuflw {{.*#+}} xmm10 = xmm13[1,1,1,1,4,5,6,7]
4731 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[0,0,0,0]
4732 ; SSE-NEXT: pand %xmm0, %xmm10
4733 ; SSE-NEXT: por %xmm10, %xmm15
4734 ; SSE-NEXT: pshuflw {{.*#+}} xmm10 = xmm11[1,1,1,1,4,5,6,7]
4735 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[0,0,0,0]
4736 ; SSE-NEXT: movdqa {{.*#+}} xmm13 = [255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255]
4737 ; SSE-NEXT: movdqa %xmm13, %xmm0
4738 ; SSE-NEXT: pandn %xmm10, %xmm0
4739 ; SSE-NEXT: pand %xmm13, %xmm15
4740 ; SSE-NEXT: por %xmm15, %xmm0
4741 ; SSE-NEXT: movdqa {{.*#+}} xmm10 = [255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
4742 ; SSE-NEXT: movdqa %xmm10, %xmm1
4743 ; SSE-NEXT: pandn %xmm0, %xmm1
4744 ; SSE-NEXT: pand %xmm10, %xmm2
4745 ; SSE-NEXT: por %xmm2, %xmm1
4746 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4747 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm14[2,2,3,3]
4748 ; SSE-NEXT: movdqa %xmm7, %xmm2
4749 ; SSE-NEXT: pandn %xmm0, %xmm2
4750 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm8[2,1,3,3,4,5,6,7]
4751 ; SSE-NEXT: movdqa %xmm8, %xmm10
4752 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
4753 ; SSE-NEXT: pand %xmm7, %xmm0
4754 ; SSE-NEXT: por %xmm2, %xmm0
4755 ; SSE-NEXT: movdqa %xmm12, %xmm2
4756 ; SSE-NEXT: pandn %xmm0, %xmm2
4757 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm4[0,1,2,3,5,6,6,7]
4758 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
4759 ; SSE-NEXT: movdqa %xmm13, %xmm15
4760 ; SSE-NEXT: movdqa %xmm13, %xmm8
4761 ; SSE-NEXT: pandn %xmm0, %xmm8
4762 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm9[0,2,2,3,4,5,6,7]
4763 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,1,3]
4764 ; SSE-NEXT: pand %xmm13, %xmm0
4765 ; SSE-NEXT: por %xmm0, %xmm8
4766 ; SSE-NEXT: pand %xmm12, %xmm8
4767 ; SSE-NEXT: por %xmm2, %xmm8
4768 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,255,0,0,0,0,255,255,255,0,0,0,0,255,255]
4769 ; SSE-NEXT: movdqa %xmm1, %xmm0
4770 ; SSE-NEXT: movdqa %xmm1, %xmm6
4771 ; SSE-NEXT: pandn %xmm8, %xmm0
4772 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm5[0,1,2,3,4,6,5,7]
4773 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,1,3,2]
4774 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0]
4775 ; SSE-NEXT: movdqa %xmm1, %xmm5
4776 ; SSE-NEXT: pandn %xmm2, %xmm5
4777 ; SSE-NEXT: movdqa %xmm3, %xmm4
4778 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm3[2,2,2,3,4,5,6,7]
4779 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,2,1]
4780 ; SSE-NEXT: pand %xmm1, %xmm2
4781 ; SSE-NEXT: por %xmm2, %xmm5
4782 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm11[2,2,2,2,4,5,6,7]
4783 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,0,0]
4784 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
4785 ; SSE-NEXT: movdqa %xmm1, %xmm8
4786 ; SSE-NEXT: pandn %xmm2, %xmm8
4787 ; SSE-NEXT: pand %xmm1, %xmm5
4788 ; SSE-NEXT: por %xmm5, %xmm8
4789 ; SSE-NEXT: pand %xmm6, %xmm8
4790 ; SSE-NEXT: por %xmm0, %xmm8
4791 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4792 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
4793 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[0,1,1,3]
4794 ; SSE-NEXT: movdqa %xmm1, %xmm2
4795 ; SSE-NEXT: pandn %xmm0, %xmm2
4796 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm10[0,1,2,3,4,5,5,7]
4797 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,3,2]
4798 ; SSE-NEXT: pand %xmm1, %xmm0
4799 ; SSE-NEXT: por %xmm2, %xmm0
4800 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0]
4801 ; SSE-NEXT: movdqa %xmm1, %xmm5
4802 ; SSE-NEXT: pandn %xmm0, %xmm5
4803 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm9[0,1,2,3,5,5,5,5]
4804 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
4805 ; SSE-NEXT: movdqa %xmm7, %xmm8
4806 ; SSE-NEXT: pandn %xmm0, %xmm8
4807 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
4808 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm14[1,2,2,3,4,5,6,7]
4809 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm0[0,0,2,1]
4810 ; SSE-NEXT: pand %xmm7, %xmm2
4811 ; SSE-NEXT: por %xmm8, %xmm2
4812 ; SSE-NEXT: pand %xmm1, %xmm2
4813 ; SSE-NEXT: por %xmm5, %xmm2
4814 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
4815 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm13[1,2,2,3,4,5,6,7]
4816 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
4817 ; SSE-NEXT: movdqa %xmm15, %xmm5
4818 ; SSE-NEXT: pandn %xmm0, %xmm5
4819 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm3[0,1,2,3,4,4,6,5]
4820 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,3,3]
4821 ; SSE-NEXT: pand %xmm15, %xmm0
4822 ; SSE-NEXT: por %xmm0, %xmm5
4823 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm11[0,1,2,3,4,5,5,7]
4824 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
4825 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255]
4826 ; SSE-NEXT: movdqa %xmm12, %xmm8
4827 ; SSE-NEXT: pandn %xmm0, %xmm8
4828 ; SSE-NEXT: pand %xmm12, %xmm5
4829 ; SSE-NEXT: por %xmm5, %xmm8
4830 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255]
4831 ; SSE-NEXT: movdqa %xmm3, %xmm0
4832 ; SSE-NEXT: pandn %xmm8, %xmm0
4833 ; SSE-NEXT: pand %xmm3, %xmm2
4834 ; SSE-NEXT: por %xmm2, %xmm0
4835 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4836 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm6[0,1,2,3,6,5,7,7]
4837 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
4838 ; SSE-NEXT: movdqa %xmm12, %xmm2
4839 ; SSE-NEXT: pandn %xmm0, %xmm2
4840 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm10[0,1,2,3,7,7,7,7]
4841 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
4842 ; SSE-NEXT: pand %xmm12, %xmm0
4843 ; SSE-NEXT: por %xmm0, %xmm2
4844 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255]
4845 ; SSE-NEXT: movdqa %xmm5, %xmm0
4846 ; SSE-NEXT: pandn %xmm2, %xmm0
4847 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm14[0,1,2,3,4,6,6,7]
4848 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm2[2,1,3,3]
4849 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
4850 ; SSE-NEXT: movdqa %xmm3, %xmm2
4851 ; SSE-NEXT: pandn %xmm6, %xmm2
4852 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm9[0,1,2,3,7,7,7,7]
4853 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[2,2,2,2]
4854 ; SSE-NEXT: pand %xmm3, %xmm6
4855 ; SSE-NEXT: movdqa %xmm3, %xmm10
4856 ; SSE-NEXT: por %xmm6, %xmm2
4857 ; SSE-NEXT: pand %xmm5, %xmm2
4858 ; SSE-NEXT: por %xmm0, %xmm2
4859 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm4[0,1,2,3,7,7,7,7]
4860 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
4861 ; SSE-NEXT: movdqa %xmm7, %xmm6
4862 ; SSE-NEXT: pandn %xmm0, %xmm6
4863 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm13[0,1,2,3,5,6,6,7]
4864 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,3]
4865 ; SSE-NEXT: pand %xmm7, %xmm0
4866 ; SSE-NEXT: por %xmm6, %xmm0
4867 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm11[0,1,2,3,6,7,7,7]
4868 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[2,1,3,2]
4869 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0]
4870 ; SSE-NEXT: movdqa %xmm1, %xmm8
4871 ; SSE-NEXT: pandn %xmm6, %xmm8
4872 ; SSE-NEXT: pand %xmm1, %xmm0
4873 ; SSE-NEXT: movdqa %xmm1, %xmm4
4874 ; SSE-NEXT: por %xmm0, %xmm8
4875 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0]
4876 ; SSE-NEXT: movdqa %xmm1, %xmm0
4877 ; SSE-NEXT: pandn %xmm8, %xmm0
4878 ; SSE-NEXT: pand %xmm1, %xmm2
4879 ; SSE-NEXT: por %xmm2, %xmm0
4880 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4881 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
4882 ; SSE-NEXT: punpcklbw {{.*#+}} xmm9 = xmm9[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4883 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm9[0,0,2,1,4,5,6,7]
4884 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,1,3]
4885 ; SSE-NEXT: movdqa %xmm3, %xmm2
4886 ; SSE-NEXT: pandn %xmm0, %xmm2
4887 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
4888 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm11[0,0,0,0,4,5,6,7]
4889 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
4890 ; SSE-NEXT: pand %xmm3, %xmm0
4891 ; SSE-NEXT: por %xmm0, %xmm2
4892 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255]
4893 ; SSE-NEXT: movdqa %xmm3, %xmm6
4894 ; SSE-NEXT: pandn %xmm2, %xmm6
4895 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
4896 ; SSE-NEXT: punpcklbw {{.*#+}} xmm5 = xmm5[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4897 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm5[0,2,1,3,4,5,6,7]
4898 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm0[0,1,1,0]
4899 ; SSE-NEXT: movdqa %xmm4, %xmm0
4900 ; SSE-NEXT: pandn %xmm2, %xmm0
4901 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4902 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm1[0,0,2,1,4,5,6,7]
4903 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,2,1]
4904 ; SSE-NEXT: pand %xmm4, %xmm2
4905 ; SSE-NEXT: por %xmm2, %xmm0
4906 ; SSE-NEXT: pand %xmm3, %xmm0
4907 ; SSE-NEXT: por %xmm6, %xmm0
4908 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
4909 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4910 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm3[0,1,1,3,4,5,6,7]
4911 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,2,1]
4912 ; SSE-NEXT: movdqa %xmm12, %xmm6
4913 ; SSE-NEXT: pandn %xmm2, %xmm6
4914 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
4915 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm13[0,0,0,0,4,5,6,7]
4916 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,0,0]
4917 ; SSE-NEXT: pand %xmm12, %xmm2
4918 ; SSE-NEXT: por %xmm2, %xmm6
4919 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
4920 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm14[0,0,0,0,4,5,6,7]
4921 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,0,0]
4922 ; SSE-NEXT: movdqa %xmm7, %xmm8
4923 ; SSE-NEXT: pandn %xmm2, %xmm8
4924 ; SSE-NEXT: pand %xmm7, %xmm6
4925 ; SSE-NEXT: por %xmm6, %xmm8
4926 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255]
4927 ; SSE-NEXT: movdqa %xmm2, %xmm6
4928 ; SSE-NEXT: pandn %xmm8, %xmm6
4929 ; SSE-NEXT: pand %xmm2, %xmm0
4930 ; SSE-NEXT: por %xmm0, %xmm6
4931 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4932 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm5[0,1,2,2]
4933 ; SSE-NEXT: movdqa %xmm7, %xmm2
4934 ; SSE-NEXT: pandn %xmm0, %xmm2
4935 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm1[1,1,2,3,4,5,6,7]
4936 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
4937 ; SSE-NEXT: pand %xmm7, %xmm0
4938 ; SSE-NEXT: por %xmm2, %xmm0
4939 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255]
4940 ; SSE-NEXT: movdqa %xmm12, %xmm2
4941 ; SSE-NEXT: pandn %xmm0, %xmm2
4942 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm9[1,1,2,1]
4943 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm0[0,1,2,3,7,5,6,4]
4944 ; SSE-NEXT: movdqa %xmm4, %xmm0
4945 ; SSE-NEXT: pandn %xmm6, %xmm0
4946 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm11[1,1,2,2,4,5,6,7]
4947 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,0,2,1]
4948 ; SSE-NEXT: pand %xmm4, %xmm6
4949 ; SSE-NEXT: por %xmm6, %xmm0
4950 ; SSE-NEXT: pand %xmm12, %xmm0
4951 ; SSE-NEXT: por %xmm2, %xmm0
4952 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm3[0,2,2,3,4,5,6,7]
4953 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,1,3]
4954 ; SSE-NEXT: movdqa %xmm10, %xmm6
4955 ; SSE-NEXT: pandn %xmm2, %xmm6
4956 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm13[1,1,1,1,4,5,6,7]
4957 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,0,0]
4958 ; SSE-NEXT: pand %xmm10, %xmm2
4959 ; SSE-NEXT: por %xmm2, %xmm6
4960 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm14[1,1,1,1,4,5,6,7]
4961 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,0,0]
4962 ; SSE-NEXT: movdqa %xmm15, %xmm8
4963 ; SSE-NEXT: pandn %xmm2, %xmm8
4964 ; SSE-NEXT: pand %xmm15, %xmm6
4965 ; SSE-NEXT: movdqa %xmm15, %xmm10
4966 ; SSE-NEXT: por %xmm6, %xmm8
4967 ; SSE-NEXT: movdqa {{.*#+}} xmm6 = [255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
4968 ; SSE-NEXT: movdqa %xmm6, %xmm2
4969 ; SSE-NEXT: pandn %xmm8, %xmm2
4970 ; SSE-NEXT: pand %xmm6, %xmm0
4971 ; SSE-NEXT: por %xmm0, %xmm2
4972 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4973 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm9[2,2,3,3]
4974 ; SSE-NEXT: movdqa %xmm7, %xmm2
4975 ; SSE-NEXT: pandn %xmm0, %xmm2
4976 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm11[2,1,3,3,4,5,6,7]
4977 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
4978 ; SSE-NEXT: pand %xmm7, %xmm0
4979 ; SSE-NEXT: por %xmm2, %xmm0
4980 ; SSE-NEXT: movdqa %xmm12, %xmm2
4981 ; SSE-NEXT: pandn %xmm0, %xmm2
4982 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm5[0,1,2,3,5,6,6,7]
4983 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
4984 ; SSE-NEXT: movdqa %xmm15, %xmm6
4985 ; SSE-NEXT: pandn %xmm0, %xmm6
4986 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm1[0,2,2,3,4,5,6,7]
4987 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,1,3]
4988 ; SSE-NEXT: pand %xmm15, %xmm0
4989 ; SSE-NEXT: por %xmm0, %xmm6
4990 ; SSE-NEXT: pand %xmm12, %xmm6
4991 ; SSE-NEXT: por %xmm2, %xmm6
4992 ; SSE-NEXT: movdqa {{.*#+}} xmm9 = [255,255,255,0,0,0,0,255,255,255,0,0,0,0,255,255]
4993 ; SSE-NEXT: movdqa %xmm9, %xmm0
4994 ; SSE-NEXT: pandn %xmm6, %xmm0
4995 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm3[0,1,2,3,4,6,5,7]
4996 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,1,3,2]
4997 ; SSE-NEXT: movdqa %xmm4, %xmm12
4998 ; SSE-NEXT: movdqa %xmm4, %xmm6
4999 ; SSE-NEXT: pandn %xmm2, %xmm6
5000 ; SSE-NEXT: movdqa %xmm13, %xmm5
5001 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm13[2,2,2,3,4,5,6,7]
5002 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,2,1]
5003 ; SSE-NEXT: pand %xmm4, %xmm2
5004 ; SSE-NEXT: por %xmm2, %xmm6
5005 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm14[2,2,2,2,4,5,6,7]
5006 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm2[0,0,0,0]
5007 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
5008 ; SSE-NEXT: movdqa %xmm2, %xmm4
5009 ; SSE-NEXT: pandn %xmm8, %xmm4
5010 ; SSE-NEXT: pand %xmm2, %xmm6
5011 ; SSE-NEXT: por %xmm6, %xmm4
5012 ; SSE-NEXT: pand %xmm9, %xmm4
5013 ; SSE-NEXT: por %xmm0, %xmm4
5014 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5015 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
5016 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm15[0,1,1,3]
5017 ; SSE-NEXT: movdqa %xmm2, %xmm6
5018 ; SSE-NEXT: pandn %xmm0, %xmm6
5019 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm11[0,1,2,3,4,5,5,7]
5020 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,3,2]
5021 ; SSE-NEXT: pand %xmm2, %xmm0
5022 ; SSE-NEXT: por %xmm6, %xmm0
5023 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0]
5024 ; SSE-NEXT: movdqa %xmm2, %xmm6
5025 ; SSE-NEXT: pandn %xmm0, %xmm6
5026 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm1[0,1,2,3,5,5,5,5]
5027 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
5028 ; SSE-NEXT: movdqa %xmm7, %xmm8
5029 ; SSE-NEXT: pandn %xmm0, %xmm8
5030 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
5031 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm9[1,2,2,3,4,5,6,7]
5032 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
5033 ; SSE-NEXT: pand %xmm7, %xmm0
5034 ; SSE-NEXT: por %xmm8, %xmm0
5035 ; SSE-NEXT: pand %xmm2, %xmm0
5036 ; SSE-NEXT: por %xmm6, %xmm0
5037 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
5038 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm13[1,2,2,3,4,5,6,7]
5039 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,0,0,0]
5040 ; SSE-NEXT: movdqa %xmm10, %xmm8
5041 ; SSE-NEXT: pandn %xmm6, %xmm8
5042 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm5[0,1,2,3,4,4,6,5]
5043 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[2,1,3,3]
5044 ; SSE-NEXT: pand %xmm10, %xmm6
5045 ; SSE-NEXT: por %xmm6, %xmm8
5046 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm14[0,1,2,3,4,5,5,7]
5047 ; SSE-NEXT: movdqa %xmm14, %xmm2
5048 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
5049 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255]
5050 ; SSE-NEXT: movdqa %xmm3, %xmm10
5051 ; SSE-NEXT: pandn %xmm6, %xmm10
5052 ; SSE-NEXT: pand %xmm3, %xmm8
5053 ; SSE-NEXT: movdqa %xmm3, %xmm6
5054 ; SSE-NEXT: por %xmm8, %xmm10
5055 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255]
5056 ; SSE-NEXT: movdqa %xmm3, %xmm4
5057 ; SSE-NEXT: pandn %xmm10, %xmm4
5058 ; SSE-NEXT: pand %xmm3, %xmm0
5059 ; SSE-NEXT: por %xmm0, %xmm4
5060 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5061 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm15[0,1,2,3,6,5,7,7]
5062 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
5063 ; SSE-NEXT: movdqa %xmm6, %xmm3
5064 ; SSE-NEXT: pandn %xmm0, %xmm6
5065 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm11[0,1,2,3,7,7,7,7]
5066 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
5067 ; SSE-NEXT: pand %xmm3, %xmm0
5068 ; SSE-NEXT: movdqa %xmm3, %xmm14
5069 ; SSE-NEXT: por %xmm0, %xmm6
5070 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255]
5071 ; SSE-NEXT: movdqa %xmm3, %xmm8
5072 ; SSE-NEXT: pandn %xmm6, %xmm8
5073 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm9[0,1,2,3,4,6,6,7]
5074 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm0[2,1,3,3]
5075 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
5076 ; SSE-NEXT: movdqa %xmm4, %xmm0
5077 ; SSE-NEXT: pandn %xmm6, %xmm0
5078 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm1[0,1,2,3,7,7,7,7]
5079 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[2,2,2,2]
5080 ; SSE-NEXT: pand %xmm4, %xmm6
5081 ; SSE-NEXT: movdqa %xmm4, %xmm11
5082 ; SSE-NEXT: por %xmm6, %xmm0
5083 ; SSE-NEXT: pand %xmm3, %xmm0
5084 ; SSE-NEXT: por %xmm8, %xmm0
5085 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm5[0,1,2,3,7,7,7,7]
5086 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[2,2,2,2]
5087 ; SSE-NEXT: movdqa %xmm7, %xmm8
5088 ; SSE-NEXT: pandn %xmm6, %xmm8
5089 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm13[0,1,2,3,5,6,6,7]
5090 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[2,2,2,3]
5091 ; SSE-NEXT: pand %xmm7, %xmm6
5092 ; SSE-NEXT: por %xmm8, %xmm6
5093 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm2[0,1,2,3,6,7,7,7]
5094 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[2,1,3,2]
5095 ; SSE-NEXT: movdqa %xmm12, %xmm10
5096 ; SSE-NEXT: pandn %xmm8, %xmm10
5097 ; SSE-NEXT: pand %xmm12, %xmm6
5098 ; SSE-NEXT: movdqa %xmm12, %xmm4
5099 ; SSE-NEXT: por %xmm6, %xmm10
5100 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0]
5101 ; SSE-NEXT: movdqa %xmm1, %xmm2
5102 ; SSE-NEXT: pandn %xmm10, %xmm2
5103 ; SSE-NEXT: pand %xmm1, %xmm0
5104 ; SSE-NEXT: por %xmm0, %xmm2
5105 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5106 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
5107 ; SSE-NEXT: punpcklbw {{.*#+}} xmm9 = xmm9[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
5108 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm9[0,0,2,1,4,5,6,7]
5109 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,1,3]
5110 ; SSE-NEXT: movdqa %xmm11, %xmm1
5111 ; SSE-NEXT: movdqa %xmm11, %xmm8
5112 ; SSE-NEXT: pandn %xmm0, %xmm8
5113 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
5114 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm11[0,0,0,0,4,5,6,7]
5115 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
5116 ; SSE-NEXT: pand %xmm1, %xmm0
5117 ; SSE-NEXT: por %xmm0, %xmm8
5118 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255]
5119 ; SSE-NEXT: movdqa %xmm3, %xmm10
5120 ; SSE-NEXT: pandn %xmm8, %xmm10
5121 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
5122 ; SSE-NEXT: punpcklbw {{.*#+}} xmm5 = xmm5[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
5123 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm5[0,2,1,3,4,5,6,7]
5124 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm0[0,1,1,0]
5125 ; SSE-NEXT: movdqa %xmm12, %xmm0
5126 ; SSE-NEXT: pandn %xmm8, %xmm0
5127 ; SSE-NEXT: movdqa (%rsp), %xmm13 # 16-byte Reload
5128 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm13[0,0,2,1,4,5,6,7]
5129 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,0,2,1]
5130 ; SSE-NEXT: pand %xmm12, %xmm8
5131 ; SSE-NEXT: por %xmm8, %xmm0
5132 ; SSE-NEXT: pand %xmm3, %xmm0
5133 ; SSE-NEXT: por %xmm10, %xmm0
5134 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
5135 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
5136 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm3[0,1,1,3,4,5,6,7]
5137 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,0,2,1]
5138 ; SSE-NEXT: movdqa %xmm14, %xmm10
5139 ; SSE-NEXT: pandn %xmm8, %xmm10
5140 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5141 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm1[0,0,0,0,4,5,6,7]
5142 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,0,0,0]
5143 ; SSE-NEXT: pand %xmm14, %xmm8
5144 ; SSE-NEXT: por %xmm8, %xmm10
5145 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
5146 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm6[0,0,0,0,4,5,6,7]
5147 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,0,0,0]
5148 ; SSE-NEXT: movdqa %xmm7, %xmm15
5149 ; SSE-NEXT: pandn %xmm8, %xmm15
5150 ; SSE-NEXT: pand %xmm7, %xmm10
5151 ; SSE-NEXT: por %xmm10, %xmm15
5152 ; SSE-NEXT: movdqa {{.*#+}} xmm8 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255]
5153 ; SSE-NEXT: movdqa %xmm8, %xmm10
5154 ; SSE-NEXT: pandn %xmm15, %xmm10
5155 ; SSE-NEXT: pand %xmm8, %xmm0
5156 ; SSE-NEXT: por %xmm0, %xmm10
5157 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5158 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm5[0,1,2,2]
5159 ; SSE-NEXT: movdqa %xmm7, %xmm8
5160 ; SSE-NEXT: pandn %xmm0, %xmm8
5161 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm13[1,1,2,3,4,5,6,7]
5162 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
5163 ; SSE-NEXT: pand %xmm7, %xmm0
5164 ; SSE-NEXT: por %xmm8, %xmm0
5165 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255]
5166 ; SSE-NEXT: movdqa %xmm12, %xmm8
5167 ; SSE-NEXT: pandn %xmm0, %xmm8
5168 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm9[1,1,2,1]
5169 ; SSE-NEXT: pshufhw {{.*#+}} xmm10 = xmm0[0,1,2,3,7,5,6,4]
5170 ; SSE-NEXT: movdqa %xmm4, %xmm14
5171 ; SSE-NEXT: movdqa %xmm4, %xmm0
5172 ; SSE-NEXT: pandn %xmm10, %xmm0
5173 ; SSE-NEXT: pshuflw {{.*#+}} xmm10 = xmm11[1,1,2,2,4,5,6,7]
5174 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[0,0,2,1]
5175 ; SSE-NEXT: pand %xmm4, %xmm10
5176 ; SSE-NEXT: por %xmm10, %xmm0
5177 ; SSE-NEXT: pand %xmm12, %xmm0
5178 ; SSE-NEXT: movdqa %xmm12, %xmm4
5179 ; SSE-NEXT: por %xmm8, %xmm0
5180 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm3[0,2,2,3,4,5,6,7]
5181 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,1,1,3]
5182 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
5183 ; SSE-NEXT: movdqa %xmm2, %xmm10
5184 ; SSE-NEXT: pandn %xmm8, %xmm10
5185 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm1[1,1,1,1,4,5,6,7]
5186 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,0,0,0]
5187 ; SSE-NEXT: pand %xmm2, %xmm8
5188 ; SSE-NEXT: por %xmm8, %xmm10
5189 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm6[1,1,1,1,4,5,6,7]
5190 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,0,0,0]
5191 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255]
5192 ; SSE-NEXT: movdqa %xmm2, %xmm15
5193 ; SSE-NEXT: pandn %xmm8, %xmm15
5194 ; SSE-NEXT: pand %xmm2, %xmm10
5195 ; SSE-NEXT: movdqa %xmm2, %xmm12
5196 ; SSE-NEXT: por %xmm10, %xmm15
5197 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
5198 ; SSE-NEXT: movdqa %xmm2, %xmm8
5199 ; SSE-NEXT: pandn %xmm15, %xmm8
5200 ; SSE-NEXT: pand %xmm2, %xmm0
5201 ; SSE-NEXT: por %xmm0, %xmm8
5202 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5203 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm9[2,2,3,3]
5204 ; SSE-NEXT: movdqa %xmm7, %xmm10
5205 ; SSE-NEXT: pandn %xmm0, %xmm10
5206 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm11[2,1,3,3,4,5,6,7]
5207 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
5208 ; SSE-NEXT: pand %xmm7, %xmm0
5209 ; SSE-NEXT: por %xmm10, %xmm0
5210 ; SSE-NEXT: movdqa %xmm4, %xmm10
5211 ; SSE-NEXT: pandn %xmm0, %xmm10
5212 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm5[0,1,2,3,5,6,6,7]
5213 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
5214 ; SSE-NEXT: movdqa %xmm12, %xmm15
5215 ; SSE-NEXT: pandn %xmm0, %xmm15
5216 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm13[0,2,2,3,4,5,6,7]
5217 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,1,3]
5218 ; SSE-NEXT: pand %xmm12, %xmm0
5219 ; SSE-NEXT: por %xmm0, %xmm15
5220 ; SSE-NEXT: pand %xmm4, %xmm15
5221 ; SSE-NEXT: por %xmm10, %xmm15
5222 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,255,0,0,0,0,255,255,255,0,0,0,0,255,255]
5223 ; SSE-NEXT: movdqa %xmm2, %xmm10
5224 ; SSE-NEXT: pandn %xmm15, %xmm10
5225 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm3[0,1,2,3,4,6,5,7]
5226 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,3,2]
5227 ; SSE-NEXT: movdqa %xmm14, %xmm15
5228 ; SSE-NEXT: pandn %xmm0, %xmm15
5229 ; SSE-NEXT: movdqa %xmm1, %xmm8
5230 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm1[2,2,2,3,4,5,6,7]
5231 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
5232 ; SSE-NEXT: pand %xmm14, %xmm0
5233 ; SSE-NEXT: movdqa %xmm14, %xmm4
5234 ; SSE-NEXT: por %xmm0, %xmm15
5235 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm6[2,2,2,2,4,5,6,7]
5236 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
5237 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
5238 ; SSE-NEXT: movdqa %xmm1, %xmm9
5239 ; SSE-NEXT: pandn %xmm0, %xmm9
5240 ; SSE-NEXT: pand %xmm1, %xmm15
5241 ; SSE-NEXT: por %xmm15, %xmm9
5242 ; SSE-NEXT: pand %xmm2, %xmm9
5243 ; SSE-NEXT: por %xmm10, %xmm9
5244 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
5245 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[0,1,1,3]
5246 ; SSE-NEXT: movdqa %xmm1, %xmm10
5247 ; SSE-NEXT: pandn %xmm0, %xmm10
5248 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm11[0,1,2,3,4,5,5,7]
5249 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,3,2]
5250 ; SSE-NEXT: pand %xmm1, %xmm0
5251 ; SSE-NEXT: por %xmm10, %xmm0
5252 ; SSE-NEXT: pshufhw {{.*#+}} xmm10 = xmm13[0,1,2,3,5,5,5,5]
5253 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[2,2,2,2]
5254 ; SSE-NEXT: movdqa %xmm7, %xmm15
5255 ; SSE-NEXT: pandn %xmm10, %xmm15
5256 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
5257 ; SSE-NEXT: pshuflw {{.*#+}} xmm10 = xmm3[1,2,2,3,4,5,6,7]
5258 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[0,0,2,1]
5259 ; SSE-NEXT: pand %xmm7, %xmm10
5260 ; SSE-NEXT: por %xmm15, %xmm10
5261 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0]
5262 ; SSE-NEXT: pand %xmm1, %xmm10
5263 ; SSE-NEXT: pandn %xmm0, %xmm1
5264 ; SSE-NEXT: por %xmm10, %xmm1
5265 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
5266 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm5[1,2,2,3,4,5,6,7]
5267 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
5268 ; SSE-NEXT: movdqa %xmm12, %xmm10
5269 ; SSE-NEXT: pandn %xmm0, %xmm10
5270 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm8[0,1,2,3,4,4,6,5]
5271 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,3,3]
5272 ; SSE-NEXT: pand %xmm12, %xmm0
5273 ; SSE-NEXT: por %xmm0, %xmm10
5274 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm6[0,1,2,3,4,5,5,7]
5275 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
5276 ; SSE-NEXT: movdqa {{.*#+}} xmm14 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255]
5277 ; SSE-NEXT: movdqa %xmm14, %xmm15
5278 ; SSE-NEXT: pandn %xmm0, %xmm15
5279 ; SSE-NEXT: pand %xmm14, %xmm10
5280 ; SSE-NEXT: por %xmm10, %xmm15
5281 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255]
5282 ; SSE-NEXT: pand %xmm0, %xmm1
5283 ; SSE-NEXT: pandn %xmm15, %xmm0
5284 ; SSE-NEXT: por %xmm1, %xmm0
5285 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5286 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm2[0,1,2,3,6,5,7,7]
5287 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
5288 ; SSE-NEXT: movdqa %xmm14, %xmm10
5289 ; SSE-NEXT: pandn %xmm0, %xmm10
5290 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm11[0,1,2,3,7,7,7,7]
5291 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
5292 ; SSE-NEXT: pand %xmm14, %xmm0
5293 ; SSE-NEXT: por %xmm0, %xmm10
5294 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm3[0,1,2,3,4,6,6,7]
5295 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,3,3]
5296 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
5297 ; SSE-NEXT: movdqa %xmm1, %xmm15
5298 ; SSE-NEXT: pandn %xmm0, %xmm15
5299 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm13[0,1,2,3,7,7,7,7]
5300 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
5301 ; SSE-NEXT: pand %xmm1, %xmm0
5302 ; SSE-NEXT: movdqa %xmm1, %xmm2
5303 ; SSE-NEXT: por %xmm0, %xmm15
5304 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255]
5305 ; SSE-NEXT: pand %xmm0, %xmm15
5306 ; SSE-NEXT: pandn %xmm10, %xmm0
5307 ; SSE-NEXT: por %xmm15, %xmm0
5308 ; SSE-NEXT: movdqa %xmm0, %xmm1
5309 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm8[0,1,2,3,7,7,7,7]
5310 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,2]
5311 ; SSE-NEXT: movdqa %xmm7, %xmm10
5312 ; SSE-NEXT: pandn %xmm0, %xmm10
5313 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm5[0,1,2,3,5,6,6,7]
5314 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,3]
5315 ; SSE-NEXT: pand %xmm7, %xmm0
5316 ; SSE-NEXT: por %xmm10, %xmm0
5317 ; SSE-NEXT: pshufhw {{.*#+}} xmm10 = xmm6[0,1,2,3,6,7,7,7]
5318 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[2,1,3,2]
5319 ; SSE-NEXT: movdqa %xmm4, %xmm14
5320 ; SSE-NEXT: movdqa %xmm4, %xmm15
5321 ; SSE-NEXT: pandn %xmm10, %xmm15
5322 ; SSE-NEXT: pand %xmm4, %xmm0
5323 ; SSE-NEXT: por %xmm0, %xmm15
5324 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0]
5325 ; SSE-NEXT: pand %xmm0, %xmm1
5326 ; SSE-NEXT: pandn %xmm15, %xmm0
5327 ; SSE-NEXT: por %xmm1, %xmm0
5328 ; SSE-NEXT: movdqa %xmm0, (%rsp) # 16-byte Spill
5329 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
5330 ; SSE-NEXT: punpcklbw {{.*#+}} xmm12 = xmm12[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
5331 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm12[0,0,2,1,4,5,6,7]
5332 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,1,3]
5333 ; SSE-NEXT: movdqa %xmm2, %xmm10
5334 ; SSE-NEXT: pandn %xmm0, %xmm10
5335 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
5336 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm11[0,0,0,0,4,5,6,7]
5337 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
5338 ; SSE-NEXT: pand %xmm2, %xmm0
5339 ; SSE-NEXT: movdqa %xmm2, %xmm6
5340 ; SSE-NEXT: por %xmm0, %xmm10
5341 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
5342 ; SSE-NEXT: punpcklbw {{.*#+}} xmm8 = xmm8[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
5343 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm8[0,2,1,3,4,5,6,7]
5344 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,1,0]
5345 ; SSE-NEXT: movdqa %xmm4, %xmm15
5346 ; SSE-NEXT: pandn %xmm0, %xmm15
5347 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
5348 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm13[0,0,2,1,4,5,6,7]
5349 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
5350 ; SSE-NEXT: pand %xmm4, %xmm0
5351 ; SSE-NEXT: por %xmm0, %xmm15
5352 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255]
5353 ; SSE-NEXT: pand %xmm0, %xmm15
5354 ; SSE-NEXT: pandn %xmm10, %xmm0
5355 ; SSE-NEXT: por %xmm15, %xmm0
5356 ; SSE-NEXT: movdqa %xmm0, %xmm3
5357 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
5358 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm2[0,0,0,0,4,5,6,7]
5359 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
5360 ; SSE-NEXT: movdqa {{.*#+}} xmm15 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255]
5361 ; SSE-NEXT: pand %xmm15, %xmm0
5362 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
5363 ; SSE-NEXT: punpcklbw {{.*#+}} xmm5 = xmm5[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
5364 ; SSE-NEXT: pshuflw {{.*#+}} xmm10 = xmm5[0,1,1,3,4,5,6,7]
5365 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[0,0,2,1]
5366 ; SSE-NEXT: pandn %xmm10, %xmm15
5367 ; SSE-NEXT: por %xmm0, %xmm15
5368 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5369 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm1[0,0,0,0,4,5,6,7]
5370 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
5371 ; SSE-NEXT: movdqa %xmm7, %xmm10
5372 ; SSE-NEXT: pandn %xmm0, %xmm10
5373 ; SSE-NEXT: pand %xmm7, %xmm15
5374 ; SSE-NEXT: por %xmm15, %xmm10
5375 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255]
5376 ; SSE-NEXT: pand %xmm0, %xmm3
5377 ; SSE-NEXT: pandn %xmm10, %xmm0
5378 ; SSE-NEXT: por %xmm3, %xmm0
5379 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5380 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm8[0,1,2,2]
5381 ; SSE-NEXT: movdqa %xmm7, %xmm3
5382 ; SSE-NEXT: pandn %xmm0, %xmm3
5383 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm13[1,1,2,3,4,5,6,7]
5384 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
5385 ; SSE-NEXT: pand %xmm7, %xmm0
5386 ; SSE-NEXT: por %xmm3, %xmm0
5387 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255]
5388 ; SSE-NEXT: movdqa %xmm4, %xmm10
5389 ; SSE-NEXT: pandn %xmm0, %xmm10
5390 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm12[1,1,2,1]
5391 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,7,5,6,4]
5392 ; SSE-NEXT: movdqa %xmm14, %xmm3
5393 ; SSE-NEXT: pandn %xmm0, %xmm3
5394 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm11[1,1,2,2,4,5,6,7]
5395 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
5396 ; SSE-NEXT: pand %xmm14, %xmm0
5397 ; SSE-NEXT: por %xmm0, %xmm3
5398 ; SSE-NEXT: pand %xmm4, %xmm3
5399 ; SSE-NEXT: por %xmm10, %xmm3
5400 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm2[1,1,1,1,4,5,6,7]
5401 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
5402 ; SSE-NEXT: movdqa %xmm6, %xmm4
5403 ; SSE-NEXT: pand %xmm6, %xmm0
5404 ; SSE-NEXT: pshuflw {{.*#+}} xmm10 = xmm5[0,2,2,3,4,5,6,7]
5405 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[0,1,1,3]
5406 ; SSE-NEXT: pandn %xmm10, %xmm4
5407 ; SSE-NEXT: por %xmm0, %xmm4
5408 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm1[1,1,1,1,4,5,6,7]
5409 ; SSE-NEXT: movdqa %xmm1, %xmm15
5410 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
5411 ; SSE-NEXT: movdqa {{.*#+}} xmm6 = [255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255]
5412 ; SSE-NEXT: movdqa %xmm6, %xmm10
5413 ; SSE-NEXT: pandn %xmm0, %xmm10
5414 ; SSE-NEXT: pand %xmm6, %xmm4
5415 ; SSE-NEXT: por %xmm4, %xmm10
5416 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
5417 ; SSE-NEXT: pand %xmm1, %xmm3
5418 ; SSE-NEXT: pandn %xmm10, %xmm1
5419 ; SSE-NEXT: por %xmm3, %xmm1
5420 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm12[2,2,3,3]
5421 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm11[2,1,3,3,4,5,6,7]
5422 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,0,2,1]
5423 ; SSE-NEXT: pand %xmm7, %xmm3
5424 ; SSE-NEXT: pandn %xmm0, %xmm7
5425 ; SSE-NEXT: por %xmm3, %xmm7
5426 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm13[0,2,2,3,4,5,6,7]
5427 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,1,3]
5428 ; SSE-NEXT: pand %xmm6, %xmm0
5429 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm8[0,1,2,3,5,6,6,7]
5430 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,2,2,2]
5431 ; SSE-NEXT: pandn %xmm3, %xmm6
5432 ; SSE-NEXT: por %xmm0, %xmm6
5433 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255]
5434 ; SSE-NEXT: pand %xmm0, %xmm6
5435 ; SSE-NEXT: pandn %xmm7, %xmm0
5436 ; SSE-NEXT: por %xmm6, %xmm0
5437 ; SSE-NEXT: movdqa %xmm0, %xmm4
5438 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm2[2,2,2,3,4,5,6,7]
5439 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
5440 ; SSE-NEXT: pand %xmm14, %xmm0
5441 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm5[0,1,2,3,4,6,5,7]
5442 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,1,3,2]
5443 ; SSE-NEXT: pandn %xmm3, %xmm14
5444 ; SSE-NEXT: por %xmm0, %xmm14
5445 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
5446 ; SSE-NEXT: pand %xmm3, %xmm14
5447 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm15[2,2,2,2,4,5,6,7]
5448 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0]
5449 ; SSE-NEXT: pandn %xmm0, %xmm3
5450 ; SSE-NEXT: por %xmm14, %xmm3
5451 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,0,0,0,0,255,255,255,0,0,0,0,255,255]
5452 ; SSE-NEXT: pand %xmm0, %xmm3
5453 ; SSE-NEXT: pandn %xmm4, %xmm0
5454 ; SSE-NEXT: por %xmm0, %xmm3
5455 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
5456 ; SSE-NEXT: movdqa %xmm3, 368(%rax)
5457 ; SSE-NEXT: movdqa %xmm1, 352(%rax)
5458 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5459 ; SSE-NEXT: movaps %xmm0, 336(%rax)
5460 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
5461 ; SSE-NEXT: movaps %xmm0, 320(%rax)
5462 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5463 ; SSE-NEXT: movaps %xmm0, 288(%rax)
5464 ; SSE-NEXT: movdqa %xmm9, 256(%rax)
5465 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5466 ; SSE-NEXT: movaps %xmm0, 240(%rax)
5467 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5468 ; SSE-NEXT: movaps %xmm0, 224(%rax)
5469 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5470 ; SSE-NEXT: movaps %xmm0, 208(%rax)
5471 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5472 ; SSE-NEXT: movaps %xmm0, 176(%rax)
5473 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5474 ; SSE-NEXT: movaps %xmm0, 144(%rax)
5475 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5476 ; SSE-NEXT: movaps %xmm0, 128(%rax)
5477 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5478 ; SSE-NEXT: movaps %xmm0, 112(%rax)
5479 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5480 ; SSE-NEXT: movaps %xmm0, 96(%rax)
5481 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5482 ; SSE-NEXT: movaps %xmm0, 64(%rax)
5483 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5484 ; SSE-NEXT: movaps %xmm0, 32(%rax)
5485 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5486 ; SSE-NEXT: movaps %xmm0, 16(%rax)
5487 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5488 ; SSE-NEXT: movaps %xmm0, (%rax)
5489 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5490 ; SSE-NEXT: movaps %xmm0, 304(%rax)
5491 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5492 ; SSE-NEXT: movaps %xmm0, 192(%rax)
5493 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5494 ; SSE-NEXT: movaps %xmm0, 80(%rax)
5495 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5496 ; SSE-NEXT: movaps %xmm0, 272(%rax)
5497 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5498 ; SSE-NEXT: movaps %xmm0, 160(%rax)
5499 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5500 ; SSE-NEXT: movaps %xmm0, 48(%rax)
5501 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5502 ; SSE-NEXT: movaps %xmm0, 432(%rax)
5503 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5504 ; SSE-NEXT: movaps %xmm0, 400(%rax)
5505 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5506 ; SSE-NEXT: movaps %xmm0, 416(%rax)
5507 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5508 ; SSE-NEXT: movaps %xmm0, 384(%rax)
5509 ; SSE-NEXT: addq $648, %rsp # imm = 0x288
5512 ; AVX1-ONLY-LABEL: store_i8_stride7_vf64:
5513 ; AVX1-ONLY: # %bb.0:
5514 ; AVX1-ONLY-NEXT: subq $616, %rsp # imm = 0x268
5515 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
5516 ; AVX1-ONLY-NEXT: vmovdqa 16(%rax), %xmm6
5517 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = zero,xmm6[13,u,u,u,u],zero,zero,xmm6[14,u,u,u,u],zero,zero,xmm6[15]
5518 ; AVX1-ONLY-NEXT: vmovdqa 16(%r8), %xmm3
5519 ; AVX1-ONLY-NEXT: vmovdqa 16(%r9), %xmm4
5520 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm4[8],xmm3[8],xmm4[9],xmm3[9],xmm4[10],xmm3[10],xmm4[11],xmm3[11],xmm4[12],xmm3[12],xmm4[13],xmm3[13],xmm4[14],xmm3[14],xmm4[15],xmm3[15]
5521 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[10],zero,xmm1[u,u,u,u,13,12],zero,xmm1[u,u,u,u,15,14],zero
5522 ; AVX1-ONLY-NEXT: vpor %xmm0, %xmm1, %xmm1
5523 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm6[u],zero,zero,xmm6[11,u,u,u,u],zero,zero,xmm6[12,u,u,u,u],zero
5524 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm3[8],xmm4[8],xmm3[9],xmm4[9],xmm3[10],xmm4[10],xmm3[11],xmm4[11],xmm3[12],xmm4[12],xmm3[13],xmm4[13],xmm3[14],xmm4[14],xmm3[15],xmm4[15]
5525 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, %xmm8
5526 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5527 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, %xmm10
5528 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5529 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm0[u,6,7],zero,xmm0[u,u,u,u,8,9],zero,xmm0[u,u,u,u,10]
5530 ; AVX1-ONLY-NEXT: vpor %xmm2, %xmm3, %xmm2
5531 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
5532 ; AVX1-ONLY-NEXT: vmovdqa 16(%rsi), %xmm7
5533 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm11
5534 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm11[8],xmm7[8],xmm11[9],xmm7[9],xmm11[10],xmm7[10],xmm11[11],xmm7[11],xmm11[12],xmm7[12],xmm11[13],xmm7[13],xmm11[14],xmm7[14],xmm11[15],xmm7[15]
5535 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm2[u,u,12,13,u,u,u,u,u,14,15,u,u,u,u,u]
5536 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[u,u,u,u,8,9,u,u,u,u,u,10,11,u,u,u]
5537 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm3
5538 ; AVX1-ONLY-NEXT: vmovdqa 16(%rcx), %xmm12
5539 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdx), %xmm13
5540 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm12[8],xmm13[8],xmm12[9],xmm13[9],xmm12[10],xmm13[10],xmm12[11],xmm13[11],xmm12[12],xmm13[12],xmm12[13],xmm13[13],xmm12[14],xmm13[14],xmm12[15],xmm13[15]
5541 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm2[6,u,u,u,u,u,9,8,u,u,u,u,u,11,10,u]
5542 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm13[8],xmm12[8],xmm13[9],xmm12[9],xmm13[10],xmm12[10],xmm13[11],xmm12[11],xmm13[12],xmm12[12],xmm13[13],xmm12[13],xmm13[14],xmm12[14],xmm13[15],xmm12[15]
5543 ; AVX1-ONLY-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5544 ; AVX1-ONLY-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5545 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm5 = xmm2[u,u,u,u,12,13,u,u,u,u,u,14,15,u,u,u]
5546 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm4, %ymm4
5547 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm5 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255]
5548 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm5, %ymm3
5549 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm4, %ymm4
5550 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm4, %ymm3
5551 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm4 = [255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0]
5552 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm4, %ymm1
5553 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm3, %ymm3
5554 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm3, %ymm1
5555 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5556 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[u,u,u,2,3],zero,xmm0[u,u,u,u,4,5],zero,xmm0[u,u,u]
5557 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5558 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm6[u,u,u],zero,zero,xmm6[9,u,u,u,u],zero,zero,xmm6[10,u,u,u]
5559 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm0, %xmm0
5560 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm3 = <u,u,u,u,u,128,7,u,u,u,u,u,128,8,u,u>
5561 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm8, %xmm1
5562 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, %xmm8
5563 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm4 = <u,u,u,u,u,7,128,u,u,u,u,u,8,128,u,u>
5564 ; AVX1-ONLY-NEXT: vpshufb %xmm4, %xmm10, %xmm3
5565 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, %xmm10
5566 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm3, %xmm1
5567 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm5 = <128,u,u,u,u,5,6,128,u,u,u,u,12,13,128,u>
5568 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm1, %xmm1
5569 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm4 = <6,u,u,u,u,128,128,7,u,u,u,u,128,128,8,u>
5570 ; AVX1-ONLY-NEXT: vpshufb %xmm4, %xmm6, %xmm3
5571 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, %xmm6
5572 ; AVX1-ONLY-NEXT: vpor %xmm3, %xmm1, %xmm1
5573 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
5574 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm3 = <u,u,u,128,7,u,u,u,u,u,128,8,u,u,u,u>
5575 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm12, %xmm1
5576 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, %xmm12
5577 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm9 = <u,u,u,7,128,u,u,u,u,u,8,128,u,u,u,u>
5578 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm13, %xmm3
5579 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm3, %xmm1
5580 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[u,2,3,u,u,u,u,u,4,5,u,u,u,u,u,6]
5581 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
5582 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm14 = <u,128,7,u,u,u,u,u,128,8,u,u,u,u,u,128>
5583 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5584 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm7, %xmm2
5585 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm4 = <u,7,128,u,u,u,u,u,8,128,u,u,u,u,u,9>
5586 ; AVX1-ONLY-NEXT: vmovdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5587 ; AVX1-ONLY-NEXT: vpshufb %xmm4, %xmm11, %xmm3
5588 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, %xmm13
5589 ; AVX1-ONLY-NEXT: vpor %xmm2, %xmm3, %xmm2
5590 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm7[8],xmm11[8],xmm7[9],xmm11[9],xmm7[10],xmm11[10],xmm7[11],xmm11[11],xmm7[12],xmm11[12],xmm7[13],xmm11[13],xmm7[14],xmm11[14],xmm7[15],xmm11[15]
5591 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm15 = <2,u,u,u,u,u,5,4,u,u,u,u,u,7,6,u>
5592 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm3, %xmm3
5593 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
5594 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm3 = [255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0]
5595 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm3, %ymm1
5596 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm2, %ymm2
5597 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm2, %ymm1
5598 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm2 = [0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255]
5599 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm2, %ymm0
5600 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm1, %ymm1
5601 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm1, %ymm0
5602 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5603 ; AVX1-ONLY-NEXT: vmovdqa 32(%r8), %xmm3
5604 ; AVX1-ONLY-NEXT: vmovdqa 32(%r9), %xmm4
5605 ; AVX1-ONLY-NEXT: vpshufb %xmm8, %xmm4, %xmm0
5606 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm3, %xmm1
5607 ; AVX1-ONLY-NEXT: vpor %xmm0, %xmm1, %xmm0
5608 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm0, %xmm0
5609 ; AVX1-ONLY-NEXT: vmovdqa 32(%rax), %xmm2
5610 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm2, %xmm1
5611 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm0, %xmm0
5612 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,xmm2[4,u,u,u,u],zero,zero,xmm2[5,u,u,u,u],zero,zero
5613 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, %xmm8
5614 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3],xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
5615 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, %xmm11
5616 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5617 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, %xmm10
5618 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5619 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5620 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[8,9],zero,xmm2[u,u,u,u,10,11],zero,xmm2[u,u,u,u,12,13]
5621 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm2, %xmm1
5622 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm4
5623 ; AVX1-ONLY-NEXT: vmovdqa 32(%rcx), %xmm0
5624 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdx), %xmm2
5625 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm0, %xmm1
5626 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm2, %xmm3
5627 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm3, %xmm1
5628 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3],xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7]
5629 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5630 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[u,u,u,u,u,10,11,u,u,u,u,u,12,13,u,u]
5631 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm3, %ymm5
5632 ; AVX1-ONLY-NEXT: vmovdqa 32(%rsi), %xmm1
5633 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdi), %xmm3
5634 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm1, %xmm6
5635 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm3, %xmm7
5636 ; AVX1-ONLY-NEXT: vpor %xmm6, %xmm7, %xmm6
5637 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3],xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
5638 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5639 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm12 = <u,u,u,10,11,u,u,u,u,u,12,13,u,u,u,u>
5640 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm7, %xmm7
5641 ; AVX1-ONLY-NEXT: vmovdqa %xmm12, %xmm9
5642 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm7, %ymm6
5643 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm7 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255]
5644 ; AVX1-ONLY-NEXT: vandnps %ymm5, %ymm7, %ymm5
5645 ; AVX1-ONLY-NEXT: vandps %ymm7, %ymm6, %ymm6
5646 ; AVX1-ONLY-NEXT: vorps %ymm5, %ymm6, %ymm5
5647 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm6 = [0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255]
5648 ; AVX1-ONLY-NEXT: vandnps %ymm4, %ymm6, %ymm4
5649 ; AVX1-ONLY-NEXT: vandps %ymm6, %ymm5, %ymm5
5650 ; AVX1-ONLY-NEXT: vorps %ymm4, %ymm5, %ymm4
5651 ; AVX1-ONLY-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5652 ; AVX1-ONLY-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5653 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm8[u],zero,zero,xmm8[11,u,u,u,u],zero,zero,xmm8[12,u,u,u,u],zero
5654 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm10[8],xmm11[8],xmm10[9],xmm11[9],xmm10[10],xmm11[10],xmm10[11],xmm11[11],xmm10[12],xmm11[12],xmm10[13],xmm11[13],xmm10[14],xmm11[14],xmm10[15],xmm11[15]
5655 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm6 = xmm5[u,6,7],zero,xmm5[u,u,u,u,8,9],zero,xmm5[u,u,u,u,10]
5656 ; AVX1-ONLY-NEXT: vpor %xmm4, %xmm6, %xmm4
5657 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[u,u,u,2,3],zero,xmm5[u,u,u,u,4,5],zero,xmm5[u,u,u]
5658 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm6 = xmm8[u,u,u],zero,zero,xmm8[9,u,u,u,u],zero,zero,xmm8[10,u,u,u]
5659 ; AVX1-ONLY-NEXT: vpor %xmm6, %xmm5, %xmm5
5660 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm5, %ymm4
5661 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm3[8],xmm1[8],xmm3[9],xmm1[9],xmm3[10],xmm1[10],xmm3[11],xmm1[11],xmm3[12],xmm1[12],xmm3[13],xmm1[13],xmm3[14],xmm1[14],xmm3[15],xmm1[15]
5662 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5663 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm1[8],xmm3[8],xmm1[9],xmm3[9],xmm1[10],xmm3[10],xmm1[11],xmm3[11],xmm1[12],xmm3[12],xmm1[13],xmm3[13],xmm1[14],xmm3[14],xmm1[15],xmm3[15]
5664 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm1, %xmm1
5665 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm5[u,u,u,u,8,9,u,u,u,u,u,10,11,u,u,u]
5666 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
5667 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm0[8],xmm2[8],xmm0[9],xmm2[9],xmm0[10],xmm2[10],xmm0[11],xmm2[11],xmm0[12],xmm2[12],xmm0[13],xmm2[13],xmm0[14],xmm2[14],xmm0[15],xmm2[15]
5668 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm2[8],xmm0[8],xmm2[9],xmm0[9],xmm2[10],xmm0[10],xmm2[11],xmm0[11],xmm2[12],xmm0[12],xmm2[13],xmm0[13],xmm2[14],xmm0[14],xmm2[15],xmm0[15]
5669 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5670 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm0 = <6,u,u,u,u,u,9,8,u,u,u,u,u,11,10,u>
5671 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm3, %xmm0
5672 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[u,2,3,u,u,u,u,u,4,5,u,u,u,u,u,6]
5673 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm0
5674 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm2 = [0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255]
5675 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm2, %ymm1
5676 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm0, %ymm0
5677 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm0, %ymm0
5678 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm2 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0]
5679 ; AVX1-ONLY-NEXT: vandnps %ymm4, %ymm2, %ymm1
5680 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm0, %ymm0
5681 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm0, %ymm0
5682 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5683 ; AVX1-ONLY-NEXT: vmovdqa 48(%rax), %xmm12
5684 ; AVX1-ONLY-NEXT: vmovdqa 48(%r8), %xmm2
5685 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5686 ; AVX1-ONLY-NEXT: vmovdqa 48(%r9), %xmm1
5687 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5688 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm5 = <128,128,4,u,u,u,u,128,128,5,u,u,u,u,128,128>
5689 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm12, %xmm0
5690 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
5691 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5692 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm14 = <8,9,128,u,u,u,u,10,11,128,u,u,u,u,12,13>
5693 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm2, %xmm1
5694 ; AVX1-ONLY-NEXT: vpor %xmm0, %xmm1, %xmm0
5695 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm12[u,u],zero,zero,xmm12[2,u,u,u,u],zero,zero,xmm12[3,u,u,u,u]
5696 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm2[u,u,4,5],zero,xmm2[u,u,u,u,6,7],zero,xmm2[u,u,u,u]
5697 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm3, %xmm1
5698 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
5699 ; AVX1-ONLY-NEXT: vmovdqa 48(%rsi), %xmm1
5700 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5701 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdi), %xmm10
5702 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm10[0],xmm1[0],xmm10[1],xmm1[1],xmm10[2],xmm1[2],xmm10[3],xmm1[3],xmm10[4],xmm1[4],xmm10[5],xmm1[5],xmm10[6],xmm1[6],xmm10[7],xmm1[7]
5703 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5704 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm2, %xmm1
5705 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm2[u,u,u,u,u,6,7,u,u,u,u,u,8,9,u,u]
5706 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm3, %ymm1
5707 ; AVX1-ONLY-NEXT: vmovdqa 48(%rcx), %xmm2
5708 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, (%rsp) # 16-byte Spill
5709 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdx), %xmm13
5710 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm13[0],xmm2[0],xmm13[1],xmm2[1],xmm13[2],xmm2[2],xmm13[3],xmm2[3],xmm13[4],xmm2[4],xmm13[5],xmm2[5],xmm13[6],xmm2[6],xmm13[7],xmm2[7]
5711 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5712 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm9 = <u,u,u,u,u,10,11,u,u,u,u,u,12,13,u,u>
5713 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm4, %xmm3
5714 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = xmm4[4,5,u,u,u,u,u,6,7,u,u,u,u,u,8,9]
5715 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm11, %ymm3
5716 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm7, %ymm1
5717 ; AVX1-ONLY-NEXT: vandps %ymm7, %ymm3, %ymm3
5718 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm3, %ymm1
5719 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm2 = [255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0]
5720 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm2, %ymm0
5721 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm1, %ymm1
5722 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm1, %ymm0
5723 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5724 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm6
5725 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = xmm6[u,u,u,u,u],zero,xmm6[7,u,u,u,u,u],zero,xmm6[8,u,u]
5726 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm8
5727 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm8[u,u,u,u,u,7],zero,xmm8[u,u,u,u,u,8],zero,xmm8[u,u]
5728 ; AVX1-ONLY-NEXT: vpor %xmm0, %xmm1, %xmm0
5729 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = zero,xmm0[u,u,u,u,5,6],zero,xmm0[u,u,u,u,12,13],zero,xmm0[u]
5730 ; AVX1-ONLY-NEXT: vmovdqa (%rax), %xmm7
5731 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm7[6,u,u,u,u],zero,zero,xmm7[7,u,u,u,u],zero,zero,xmm7[8,u]
5732 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm0, %xmm0
5733 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm7, %xmm1
5734 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3],xmm8[4],xmm6[4],xmm8[5],xmm6[5],xmm8[6],xmm6[6],xmm8[7],xmm6[7]
5735 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5736 ; AVX1-ONLY-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5737 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5738 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm4, %xmm15
5739 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm15, %xmm1
5740 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm15
5741 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm5
5742 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm3
5743 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = xmm5[u,u,u],zero,xmm5[7,u,u,u,u,u],zero,xmm5[8,u,u,u,u]
5744 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm14 = xmm3[u,u,u,7],zero,xmm3[u,u,u,u,u,8],zero,xmm3[u,u,u,u]
5745 ; AVX1-ONLY-NEXT: vpor %xmm0, %xmm14, %xmm0
5746 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3],xmm3[4],xmm5[4],xmm3[5],xmm5[5],xmm3[6],xmm5[6],xmm3[7],xmm5[7]
5747 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5748 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm1, %xmm14
5749 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm14, %ymm4
5750 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm14
5751 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm2
5752 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm14[u],zero,xmm14[7,u,u,u,u,u],zero,xmm14[8,u,u,u,u,u],zero
5753 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = xmm2[u,7],zero,xmm2[u,u,u,u,u,8],zero,xmm2[u,u,u,u,u,9]
5754 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm11, %xmm1
5755 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm2[0],xmm14[0],xmm2[1],xmm14[1],xmm2[2],xmm14[2],xmm2[3],xmm14[3],xmm2[4],xmm14[4],xmm2[5],xmm14[5],xmm2[6],xmm14[6],xmm2[7],xmm14[7]
5756 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5757 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = xmm0[u,u,u,10,11,u,u,u,u,u,12,13,u,u,u,u]
5758 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm11, %ymm1
5759 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm9 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255]
5760 ; AVX1-ONLY-NEXT: vandnps %ymm4, %ymm9, %ymm4
5761 ; AVX1-ONLY-NEXT: vandps %ymm1, %ymm9, %ymm1
5762 ; AVX1-ONLY-NEXT: vorps %ymm4, %ymm1, %ymm1
5763 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm0 = [0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255]
5764 ; AVX1-ONLY-NEXT: vandnps %ymm15, %ymm0, %ymm4
5765 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm1, %ymm1
5766 ; AVX1-ONLY-NEXT: vorps %ymm4, %ymm1, %ymm1
5767 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5768 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5769 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm7[u],zero,zero,xmm7[11,u,u,u,u],zero,zero,xmm7[12,u,u,u,u],zero
5770 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm8[8],xmm6[8],xmm8[9],xmm6[9],xmm8[10],xmm6[10],xmm8[11],xmm6[11],xmm8[12],xmm6[12],xmm8[13],xmm6[13],xmm8[14],xmm6[14],xmm8[15],xmm6[15]
5771 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = xmm4[u,6,7],zero,xmm4[u,u,u,u,8,9],zero,xmm4[u,u,u,u,10]
5772 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm11, %xmm1
5773 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[u,u,u,2,3],zero,xmm4[u,u,u,u,4,5],zero,xmm4[u,u,u]
5774 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = xmm7[u,u,u],zero,zero,xmm7[9,u,u,u,u],zero,zero,xmm7[10,u,u,u]
5775 ; AVX1-ONLY-NEXT: vpor %xmm4, %xmm11, %xmm4
5776 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm4, %ymm1
5777 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm2[8],xmm14[8],xmm2[9],xmm14[9],xmm2[10],xmm14[10],xmm2[11],xmm14[11],xmm2[12],xmm14[12],xmm2[13],xmm14[13],xmm2[14],xmm14[14],xmm2[15],xmm14[15]
5778 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5779 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm14[8],xmm2[8],xmm14[9],xmm2[9],xmm14[10],xmm2[10],xmm14[11],xmm2[11],xmm14[12],xmm2[12],xmm14[13],xmm2[13],xmm14[14],xmm2[14],xmm14[15],xmm2[15]
5780 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm6 = <2,u,u,u,u,u,5,4,u,u,u,u,u,7,6,u>
5781 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm0, %xmm0
5782 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm14 = <u,u,u,u,8,9,u,u,u,u,u,10,11,u,u,u>
5783 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm4, %xmm4
5784 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm0
5785 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm5[8],xmm3[8],xmm5[9],xmm3[9],xmm5[10],xmm3[10],xmm5[11],xmm3[11],xmm5[12],xmm3[12],xmm5[13],xmm3[13],xmm5[14],xmm3[14],xmm5[15],xmm3[15]
5786 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm3[8],xmm5[8],xmm3[9],xmm5[9],xmm3[10],xmm5[10],xmm3[11],xmm5[11],xmm3[12],xmm5[12],xmm3[13],xmm5[13],xmm3[14],xmm5[14],xmm3[15],xmm5[15]
5787 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5788 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm7 = <6,u,u,u,u,u,9,8,u,u,u,u,u,11,10,u>
5789 ; AVX1-ONLY-NEXT: vpshufb %xmm7, %xmm4, %xmm2
5790 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm5 = <u,2,3,u,u,u,u,u,4,5,u,u,u,u,u,6>
5791 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm3, %xmm3
5792 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2
5793 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm3 = [0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255]
5794 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm3, %ymm0
5795 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm2, %ymm2
5796 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm2, %ymm0
5797 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm2 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0]
5798 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm2, %ymm1
5799 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm0, %ymm0
5800 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm0, %ymm0
5801 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5802 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5803 ; AVX1-ONLY-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm1 # 16-byte Folded Reload
5804 ; AVX1-ONLY-NEXT: # xmm1 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
5805 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5806 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
5807 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm3[4,u,u,u,u],zero,zero,xmm3[5,u,u,u,u],zero,zero
5808 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm1[8,9],zero,xmm1[u,u,u,u,10,11],zero,xmm1[u,u,u,u,12,13]
5809 ; AVX1-ONLY-NEXT: vpor %xmm0, %xmm2, %xmm0
5810 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm3[u,u],zero,zero,xmm3[2,u,u,u,u],zero,zero,xmm3[3,u,u,u,u]
5811 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm1[u,u,4,5],zero,xmm1[u,u,u,u,6,7],zero,xmm1[u,u,u,u]
5812 ; AVX1-ONLY-NEXT: vpor %xmm2, %xmm3, %xmm2
5813 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm0
5814 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5815 ; AVX1-ONLY-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
5816 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3],xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
5817 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5818 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm1[u,u,u,10,11,u,u,u,u,u,12,13,u,u,u,u]
5819 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm1[u,u,u,u,u,6,7,u,u,u,u,u,8,9,u,u]
5820 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
5821 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5822 ; AVX1-ONLY-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
5823 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3],xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
5824 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5825 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm1[u,u,u,u,u,10,11,u,u,u,u,u,12,13,u,u]
5826 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm2 = <4,5,u,u,u,u,u,6,7,u,u,u,u,u,8,9>
5827 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm1, %xmm11
5828 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm11, %ymm4
5829 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm9, %ymm3
5830 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm9, %ymm4
5831 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm4, %ymm3
5832 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm1 = [255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0]
5833 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm1, %ymm0
5834 ; AVX1-ONLY-NEXT: vandps %ymm1, %ymm3, %ymm3
5835 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm3, %ymm0
5836 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5837 ; AVX1-ONLY-NEXT: vmovdqa (%rsp), %xmm9 # 16-byte Reload
5838 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = xmm9[u,u,u],zero,xmm9[7,u,u,u,u,u],zero,xmm9[8,u,u,u,u]
5839 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm13[u,u,u,7],zero,xmm13[u,u,u,u,u,8],zero,xmm13[u,u,u,u]
5840 ; AVX1-ONLY-NEXT: vpor %xmm0, %xmm3, %xmm3
5841 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm13[8],xmm9[8],xmm13[9],xmm9[9],xmm13[10],xmm9[10],xmm13[11],xmm9[11],xmm13[12],xmm9[12],xmm13[13],xmm9[13],xmm13[14],xmm9[14],xmm13[15],xmm9[15]
5842 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm0, %xmm4
5843 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm3, %ymm3
5844 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5845 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm1[u],zero,xmm1[7,u,u,u,u,u],zero,xmm1[8,u,u,u,u,u],zero
5846 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = xmm10[u,7],zero,xmm10[u,u,u,u,u,8],zero,xmm10[u,u,u,u,u,9]
5847 ; AVX1-ONLY-NEXT: vpor %xmm4, %xmm11, %xmm4
5848 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm11 = xmm1[8],xmm10[8],xmm1[9],xmm10[9],xmm1[10],xmm10[10],xmm1[11],xmm10[11],xmm1[12],xmm10[12],xmm1[13],xmm10[13],xmm1[14],xmm10[14],xmm1[15],xmm10[15]
5849 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, %xmm8
5850 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm11, %xmm11
5851 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm4, %ymm4
5852 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm1 = [255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0]
5853 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm1, %ymm3
5854 ; AVX1-ONLY-NEXT: vandps %ymm1, %ymm4, %ymm4
5855 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm4, %ymm3
5856 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm4
5857 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[0,1,2],zero,xmm4[u,u,6,7,8,9],zero,xmm4[u,u,13,14,15]
5858 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
5859 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = zero,zero,zero,xmm6[9,u,u],zero,zero,zero,zero,xmm6[10,u,u],zero,zero,zero
5860 ; AVX1-ONLY-NEXT: vpor %xmm4, %xmm11, %xmm4
5861 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[0,1,2,3],zero,xmm4[u,6,7,8,9,10],zero,xmm4[u,13,14,15]
5862 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
5863 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = zero,zero,zero,zero,xmm5[9,u],zero,zero,zero,zero,zero,xmm5[10,u],zero,zero,zero
5864 ; AVX1-ONLY-NEXT: vpor %xmm4, %xmm11, %xmm4
5865 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[0,1,2,3,4],zero,xmm4[6,7,8,9,10,11],zero,xmm4[13,14,15]
5866 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = zero,zero,zero,zero,zero,xmm12[9],zero,zero,zero,zero,zero,zero,xmm12[10],zero,zero,zero
5867 ; AVX1-ONLY-NEXT: vpor %xmm4, %xmm11, %xmm1
5868 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5869 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[u,1,2,3,4],zero,xmm3[u,u,8,9,10,11],zero,xmm3[u,u,15]
5870 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm6[u],zero,zero,zero,zero,xmm6[7,u,u],zero,zero,zero,zero,xmm6[8,u,u],zero
5871 ; AVX1-ONLY-NEXT: vpor %xmm4, %xmm3, %xmm3
5872 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[u,1,2,3,4,5],zero,xmm3[u,8,9,10,11,12],zero,xmm3[u,15]
5873 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm5[u],zero,zero,zero,zero,zero,xmm5[7,u],zero,zero,zero,zero,zero,xmm5[8,u],zero
5874 ; AVX1-ONLY-NEXT: vpor %xmm4, %xmm3, %xmm3
5875 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = zero,xmm3[1,2,3,4,5,6],zero,xmm3[8,9,10,11,12,13],zero,xmm3[15]
5876 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm12[6],zero,zero,zero,zero,zero,zero,xmm12[7],zero,zero,zero,zero,zero,zero,xmm12[8],zero
5877 ; AVX1-ONLY-NEXT: vpor %xmm4, %xmm3, %xmm1
5878 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5879 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm10[8],xmm8[8],xmm10[9],xmm8[9],xmm10[10],xmm8[10],xmm10[11],xmm8[11],xmm10[12],xmm8[12],xmm10[13],xmm8[13],xmm10[14],xmm8[14],xmm10[15],xmm8[15]
5880 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm3, %xmm4
5881 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm10 = <u,u,12,13,u,u,u,u,u,14,15,u,u,u,u,u>
5882 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm3, %xmm3
5883 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
5884 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm9[8],xmm13[8],xmm9[9],xmm13[9],xmm9[10],xmm13[10],xmm9[11],xmm13[11],xmm9[12],xmm13[12],xmm9[13],xmm13[13],xmm9[14],xmm13[14],xmm9[15],xmm13[15]
5885 ; AVX1-ONLY-NEXT: vpshufb %xmm7, %xmm4, %xmm4
5886 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm7 = <u,u,u,u,12,13,u,u,u,u,u,14,15,u,u,u>
5887 ; AVX1-ONLY-NEXT: vpshufb %xmm7, %xmm0, %xmm0
5888 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm4, %ymm0
5889 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm1 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255]
5890 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm1, %ymm3
5891 ; AVX1-ONLY-NEXT: vandps %ymm1, %ymm0, %ymm0
5892 ; AVX1-ONLY-NEXT: vmovaps %ymm1, %ymm8
5893 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm0, %ymm0
5894 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm0[0],zero,xmm0[u,u,4,5,6,7],zero,xmm0[u,u,11,12,13,14],zero
5895 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = zero,xmm6[11,u,u],zero,zero,zero,zero,xmm6[12,u,u],zero,zero,zero,zero,xmm6[13]
5896 ; AVX1-ONLY-NEXT: vpor %xmm4, %xmm3, %xmm3
5897 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[0,1],zero,xmm3[u,4,5,6,7,8],zero,xmm3[u,11,12,13,14,15]
5898 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = zero,zero,xmm5[11,u],zero,zero,zero,zero,zero,xmm5[12,u],zero,zero,zero,zero,zero
5899 ; AVX1-ONLY-NEXT: vpor %xmm4, %xmm3, %xmm3
5900 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[0,1,2],zero,xmm3[4,5,6,7,8,9],zero,xmm3[11,12,13,14,15]
5901 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = zero,zero,zero,xmm12[11],zero,zero,zero,zero,zero,zero,xmm12[12],zero,zero,zero,zero,zero
5902 ; AVX1-ONLY-NEXT: vpor %xmm4, %xmm3, %xmm1
5903 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5904 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm0
5905 ; AVX1-ONLY-NEXT: vpalignr {{.*#+}} xmm0 = xmm6[14,15],xmm0[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
5906 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = zero,xmm0[u,4,5,6,7,0],zero,xmm0[u,11,12,13,14,1],zero,xmm0[u]
5907 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm5[13,u],zero,zero,zero,zero,zero,xmm5[14,u],zero,zero,zero,zero,zero,xmm5[15,u]
5908 ; AVX1-ONLY-NEXT: vpor %xmm3, %xmm0, %xmm0
5909 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0],zero,xmm0[2,3,4,5,6,7],zero,xmm0[9,10,11,12,13,14],zero
5910 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = zero,xmm12[13],zero,zero,zero,zero,zero,zero,xmm12[14],zero,zero,zero,zero,zero,zero,xmm12[15]
5911 ; AVX1-ONLY-NEXT: vpor %xmm3, %xmm0, %xmm0
5912 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5913 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm15 = <u,u,0,1,u,u,u,u,u,2,3,u,u,u,u,u>
5914 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5915 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm0, %xmm0
5916 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5917 ; AVX1-ONLY-NEXT: vpshufb %xmm7, %xmm1, %xmm3
5918 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, %xmm9
5919 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm0
5920 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm3 = <0,1,u,u,u,u,u,2,3,u,u,u,u,u,4,5>
5921 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5922 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm1, %xmm4
5923 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5924 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm1, %xmm7
5925 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm7, %ymm4
5926 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm8, %ymm0
5927 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm8, %ymm4
5928 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm4, %ymm0
5929 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5930 ; AVX1-ONLY-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm4 # 16-byte Folded Reload
5931 ; AVX1-ONLY-NEXT: # xmm4 = xmm1[8],mem[8],xmm1[9],mem[9],xmm1[10],mem[10],xmm1[11],mem[11],xmm1[12],mem[12],xmm1[13],mem[13],xmm1[14],mem[14],xmm1[15],mem[15]
5932 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[10],zero,xmm4[u,u,u,u,13,12],zero,xmm4[u,u,u,u,15,14],zero
5933 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
5934 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm7 = zero,xmm14[13,u,u,u,u],zero,zero,xmm14[14,u,u,u,u],zero,zero,xmm14[15]
5935 ; AVX1-ONLY-NEXT: vpor %xmm7, %xmm4, %xmm7
5936 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm4 = <u,u,u,u,0,1,u,u,u,u,u,2,3,u,u,u>
5937 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5938 ; AVX1-ONLY-NEXT: vpshufb %xmm4, %xmm1, %xmm10
5939 ; AVX1-ONLY-NEXT: vpalignr {{.*#+}} xmm5 = xmm10[4,5,6,7,8,9,10,11,12,13,14,15],xmm12[0,1,2,3]
5940 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm11 = <u,u,u,u,0,1,12,u,u,u,u,7,8,13,u,u>
5941 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm5, %xmm5
5942 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm7, %ymm5
5943 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm7 = [0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255]
5944 ; AVX1-ONLY-NEXT: vandps %ymm7, %ymm0, %ymm0
5945 ; AVX1-ONLY-NEXT: vandnps %ymm5, %ymm7, %ymm5
5946 ; AVX1-ONLY-NEXT: vorps %ymm5, %ymm0, %ymm0
5947 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5948 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
5949 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm8, %xmm5
5950 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm8, %xmm10
5951 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm10, %ymm5
5952 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
5953 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm6 = <u,u,u,u,u,6,7,u,u,u,u,u,8,9,u,u>
5954 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm8, %xmm10
5955 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm8, %xmm12
5956 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm12, %ymm10
5957 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm12 = [255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0]
5958 ; AVX1-ONLY-NEXT: vandnps %ymm5, %ymm12, %ymm5
5959 ; AVX1-ONLY-NEXT: vandps %ymm12, %ymm10, %ymm10
5960 ; AVX1-ONLY-NEXT: vorps %ymm5, %ymm10, %ymm5
5961 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm0 = <u,u,128,128,2,u,u,u,u,128,128,3,u,u,u,u>
5962 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm14, %xmm10
5963 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
5964 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm1 = <u,u,4,5,128,u,u,u,u,6,7,128,u,u,u,u>
5965 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm8, %xmm13
5966 ; AVX1-ONLY-NEXT: vpor %xmm10, %xmm13, %xmm10
5967 ; AVX1-ONLY-NEXT: vpshufb %xmm4, %xmm8, %xmm13
5968 ; AVX1-ONLY-NEXT: vpalignr {{.*#+}} xmm13 = xmm13[4,5,6,7,8,9,10,11,12,13,14,15],xmm14[0,1,2,3]
5969 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm13, %xmm13
5970 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm13, %ymm10
5971 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm13 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
5972 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm13, %ymm5
5973 ; AVX1-ONLY-NEXT: vandnps %ymm10, %ymm13, %ymm10
5974 ; AVX1-ONLY-NEXT: vorps %ymm5, %ymm10, %ymm10
5975 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
5976 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm8, %xmm5
5977 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm8, %xmm14
5978 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm14, %ymm5
5979 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
5980 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm2, %xmm14
5981 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm2, %xmm8
5982 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm14, %ymm8, %ymm8
5983 ; AVX1-ONLY-NEXT: vandnps %ymm5, %ymm12, %ymm5
5984 ; AVX1-ONLY-NEXT: vandps %ymm12, %ymm8, %ymm8
5985 ; AVX1-ONLY-NEXT: vorps %ymm5, %ymm8, %ymm5
5986 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
5987 ; AVX1-ONLY-NEXT: vpshufb %xmm0, %xmm6, %xmm8
5988 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5989 ; AVX1-ONLY-NEXT: vpshufb %xmm1, %xmm0, %xmm12
5990 ; AVX1-ONLY-NEXT: vpor %xmm8, %xmm12, %xmm8
5991 ; AVX1-ONLY-NEXT: vpshufb %xmm4, %xmm0, %xmm12
5992 ; AVX1-ONLY-NEXT: vpalignr {{.*#+}} xmm12 = xmm12[4,5,6,7,8,9,10,11,12,13,14,15],xmm6[0,1,2,3]
5993 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm12, %xmm12
5994 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm12, %ymm8
5995 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm13, %ymm5
5996 ; AVX1-ONLY-NEXT: vandnps %ymm8, %ymm13, %ymm8
5997 ; AVX1-ONLY-NEXT: vorps %ymm5, %ymm8, %ymm5
5998 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5999 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm0, %xmm8
6000 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6001 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm0, %xmm9
6002 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm9, %ymm8
6003 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6004 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm0, %xmm2
6005 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6006 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm0[u,u,12,13,u,u,u,u,u,14,15,u,u,u,u,u]
6007 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2
6008 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm0 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255]
6009 ; AVX1-ONLY-NEXT: vandnps %ymm8, %ymm0, %ymm3
6010 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm2, %ymm2
6011 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm2, %ymm2
6012 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = zero,xmm6[13,u,u,u,u],zero,zero,xmm6[14,u,u,u,u],zero,zero,xmm6[15]
6013 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6014 ; AVX1-ONLY-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm6 # 16-byte Folded Reload
6015 ; AVX1-ONLY-NEXT: # xmm6 = xmm0[8],mem[8],xmm0[9],mem[9],xmm0[10],mem[10],xmm0[11],mem[11],xmm0[12],mem[12],xmm0[13],mem[13],xmm0[14],mem[14],xmm0[15],mem[15]
6016 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[10],zero,xmm6[u,u,u,u,13,12],zero,xmm6[u,u,u,u,15,14],zero
6017 ; AVX1-ONLY-NEXT: vpor %xmm3, %xmm6, %xmm3
6018 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6019 ; AVX1-ONLY-NEXT: vpshufb %xmm4, %xmm0, %xmm1
6020 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6021 ; AVX1-ONLY-NEXT: vpalignr {{.*#+}} xmm1 = xmm1[4,5,6,7,8,9,10,11,12,13,14,15],xmm0[0,1,2,3]
6022 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm1, %xmm1
6023 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm3, %ymm1
6024 ; AVX1-ONLY-NEXT: vandps %ymm7, %ymm2, %ymm2
6025 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm7, %ymm1
6026 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm2, %ymm1
6027 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
6028 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6029 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 128(%rax)
6030 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 96(%rax)
6031 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6032 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
6033 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6034 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
6035 ; AVX1-ONLY-NEXT: vmovaps %ymm5, (%rax)
6036 ; AVX1-ONLY-NEXT: vmovaps %ymm10, 224(%rax)
6037 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6038 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 352(%rax)
6039 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6040 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 320(%rax)
6041 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6042 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%rax)
6043 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6044 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
6045 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6046 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 160(%rax)
6047 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6048 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 192(%rax)
6049 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6050 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 432(%rax)
6051 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6052 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 416(%rax)
6053 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6054 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 384(%rax)
6055 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6056 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 400(%rax)
6057 ; AVX1-ONLY-NEXT: addq $616, %rsp # imm = 0x268
6058 ; AVX1-ONLY-NEXT: vzeroupper
6059 ; AVX1-ONLY-NEXT: retq
6061 ; AVX2-SLOW-LABEL: store_i8_stride7_vf64:
6062 ; AVX2-SLOW: # %bb.0:
6063 ; AVX2-SLOW-NEXT: subq $824, %rsp # imm = 0x338
6064 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
6065 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %ymm0
6066 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6067 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %ymm1
6068 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6069 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %ymm6
6070 ; AVX2-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6071 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %ymm7
6072 ; AVX2-SLOW-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6073 ; AVX2-SLOW-NEXT: vmovdqa 32(%r8), %ymm5
6074 ; AVX2-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6075 ; AVX2-SLOW-NEXT: vmovdqa 32(%r9), %ymm4
6076 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6077 ; AVX2-SLOW-NEXT: vmovdqa 32(%rax), %ymm3
6078 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6079 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm0[23],zero,ymm0[27,20,21,26],zero,ymm0[24],zero,ymm0[26,27,26,27],zero,ymm0[25]
6080 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
6081 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25],zero,ymm1[23],zero,zero,zero,zero,ymm1[26],zero,ymm1[24],zero,zero,zero,zero,ymm1[27],zero
6082 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6083 ; AVX2-SLOW-NEXT: vpor %ymm0, %ymm1, %ymm0
6084 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm7[25],zero,ymm7[23],zero,zero,zero,zero,ymm7[26],zero,ymm7[24],zero,zero,zero,zero
6085 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6086 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm6[25],zero,ymm6[23],zero,zero,zero,zero,ymm6[26],zero,ymm6[24],zero,zero,zero,zero,ymm6[27]
6087 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
6088 ; AVX2-SLOW-NEXT: vpor %ymm1, %ymm2, %ymm1
6089 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0>
6090 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
6091 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25,22,23,22,23,24,25,26,27,24,25,30,31]
6092 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6093 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <u,255,255,255,255,0,u,u,255,255,255,255,0,u,u,255,255,255,255,0,u,u,255,255,255,255,0,u,u,255,255,255>
6094 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
6095 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,25,24,23,u,u,u,u,u,u,u,u,u]
6096 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
6097 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <u,255,255,255,255,255,0,u,255,255,255,255,255,0,u,255,255,255,255,255,0,u,255,255,255,255,255,0,u,255,255,255>
6098 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
6099 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
6100 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6101 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = [0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255]
6102 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
6103 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6104 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %ymm0
6105 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6106 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm0[27],zero,ymm0[27,28,29,30],zero,ymm0[28],zero,ymm0[26,27,30,31],zero,ymm0[29]
6107 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
6108 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %ymm1
6109 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6110 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29],zero,ymm1[27],zero,zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero
6111 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6112 ; AVX2-SLOW-NEXT: vpor %ymm0, %ymm1, %ymm0
6113 ; AVX2-SLOW-NEXT: vmovdqa (%rax), %ymm1
6114 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6115 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
6116 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6117 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0>
6118 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
6119 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %ymm1
6120 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6121 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm1[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
6122 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,2,3,3,4,6,7,7]
6123 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %ymm2
6124 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6125 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[11,u,u,u,u,14,u,12,u,u,u,u,15,u,13,u,27,u,u,u,u,30,u,28,u,u,u,u,31,u,29,u]
6126 ; AVX2-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [255,0,0,0,0,255,0,255,0,0,0,0,255,0,255,0,255,0,0,0,0,255,0,255,0,0,0,0,255,0,255,0]
6127 ; AVX2-SLOW-NEXT: # ymm3 = mem[0,1,0,1]
6128 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6129 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm2, %ymm1, %ymm1
6130 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %ymm2
6131 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6132 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm2[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
6133 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,2,3,3,6,6,7,7]
6134 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %ymm3
6135 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6136 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,14,u,12,u,u,u,u,15,u,13,u,u,u,u,u,u,30,u,28,u,u,u,u,31,u,29,u,u,u]
6137 ; AVX2-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [0,0,255,0,255,0,0,0,0,255,0,255,0,0,0,0,0,0,255,0,255,0,0,0,0,255,0,255,0,0,0,0]
6138 ; AVX2-SLOW-NEXT: # ymm4 = mem[0,1,0,1]
6139 ; AVX2-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6140 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm2, %ymm3, %ymm2
6141 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6142 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
6143 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = <255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u>
6144 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
6145 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = [255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0]
6146 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm1, %ymm0, %ymm0
6147 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6148 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %xmm13
6149 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %xmm2
6150 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm2[0],xmm13[0],xmm2[1],xmm13[1],xmm2[2],xmm13[2],xmm2[3],xmm13[3],xmm2[4],xmm13[4],xmm2[5],xmm13[5],xmm2[6],xmm13[6],xmm2[7],xmm13[7]
6151 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, %xmm14
6152 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6153 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm1 = <0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5>
6154 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm0, %xmm0
6155 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
6156 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %xmm11
6157 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %xmm9
6158 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm9[0],xmm11[0],xmm9[1],xmm11[1],xmm9[2],xmm11[2],xmm9[3],xmm11[3],xmm9[4],xmm11[4],xmm9[5],xmm11[5],xmm9[6],xmm11[6],xmm9[7],xmm11[7]
6159 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm3 = <4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9>
6160 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm2, %xmm2
6161 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
6162 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm5 = <255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0>
6163 ; AVX2-SLOW-NEXT: vpblendvb %ymm5, %ymm0, %ymm2, %ymm10
6164 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm0
6165 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6166 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm6
6167 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3],xmm6[4],xmm0[4],xmm6[5],xmm0[5],xmm6[6],xmm0[6],xmm6[7],xmm0[7]
6168 ; AVX2-SLOW-NEXT: vmovdqa %xmm6, (%rsp) # 16-byte Spill
6169 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm0, %xmm0
6170 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm1
6171 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6172 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm7
6173 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm7[0],xmm1[0],xmm7[1],xmm1[1],xmm7[2],xmm1[2],xmm7[3],xmm1[3],xmm7[4],xmm1[4],xmm7[5],xmm1[5],xmm7[6],xmm1[6],xmm7[7],xmm1[7]
6174 ; AVX2-SLOW-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6175 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm1, %xmm1
6176 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
6177 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6178 ; AVX2-SLOW-NEXT: vpblendvb %ymm5, %ymm0, %ymm1, %ymm4
6179 ; AVX2-SLOW-NEXT: vmovdqa 32(%r9), %xmm0
6180 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6181 ; AVX2-SLOW-NEXT: vmovdqa 32(%r8), %xmm2
6182 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3],xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7]
6183 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, %xmm8
6184 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm0 = <u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u>
6185 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm1, %xmm1
6186 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6187 ; AVX2-SLOW-NEXT: vmovdqa 32(%rax), %xmm2
6188 ; AVX2-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6189 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm2[1,1,0,0,4,5,6,7]
6190 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,2,0]
6191 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,1,0]
6192 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u>
6193 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm1, %ymm3, %ymm1
6194 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %xmm3
6195 ; AVX2-SLOW-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6196 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm5
6197 ; AVX2-SLOW-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6198 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3],xmm5[4],xmm3[4],xmm5[5],xmm3[5],xmm5[6],xmm3[6],xmm5[7],xmm3[7]
6199 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm12, %xmm0
6200 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm0[0,1,0,1]
6201 ; AVX2-SLOW-NEXT: vmovdqa (%rax), %xmm0
6202 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6203 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm15 = xmm0[1,1,0,0,4,5,6,7]
6204 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[0,1,2,0]
6205 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,0,1,0]
6206 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm12, %ymm15, %ymm2
6207 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm12 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
6208 ; AVX2-SLOW-NEXT: vpblendvb %ymm12, %ymm10, %ymm1, %ymm0
6209 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6210 ; AVX2-SLOW-NEXT: vpblendvb %ymm12, %ymm4, %ymm2, %ymm0
6211 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6212 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm1 = <u,128,7,128,5,u,u,u,128,8,128,6,u,u,u,128>
6213 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm13, %xmm2
6214 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm4 = <u,7,128,5,128,u,u,u,8,128,6,128,u,u,u,9>
6215 ; AVX2-SLOW-NEXT: vpshufb %xmm4, %xmm14, %xmm12
6216 ; AVX2-SLOW-NEXT: vpor %xmm2, %xmm12, %xmm2
6217 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm15 = <u,u,u,128,7,128,5,u,u,u,128,8,128,6,u,u>
6218 ; AVX2-SLOW-NEXT: vpshufb %xmm15, %xmm11, %xmm12
6219 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm0 = <u,u,u,7,128,5,128,u,u,u,8,128,6,128,u,u>
6220 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm9, %xmm14
6221 ; AVX2-SLOW-NEXT: vpor %xmm12, %xmm14, %xmm12
6222 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
6223 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,1,0,1]
6224 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm14 = <u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255>
6225 ; AVX2-SLOW-NEXT: vpblendvb %ymm14, %ymm2, %ymm12, %ymm2
6226 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6227 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6228 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm2, %xmm1
6229 ; AVX2-SLOW-NEXT: vpshufb %xmm4, %xmm6, %xmm2
6230 ; AVX2-SLOW-NEXT: vpor %xmm1, %xmm2, %xmm1
6231 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
6232 ; AVX2-SLOW-NEXT: vpshufb %xmm15, %xmm6, %xmm2
6233 ; AVX2-SLOW-NEXT: vpshufb %xmm0, %xmm7, %xmm0
6234 ; AVX2-SLOW-NEXT: vpor %xmm2, %xmm0, %xmm0
6235 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6236 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
6237 ; AVX2-SLOW-NEXT: vpblendvb %ymm14, %ymm1, %ymm0, %ymm12
6238 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm1 = <128,4,u,u,u,128,7,128,5,u,u,u,128,8,128,6>
6239 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
6240 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm10, %xmm2
6241 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm4 = <4,128,u,u,u,7,128,5,128,u,u,u,8,128,6,128>
6242 ; AVX2-SLOW-NEXT: vmovdqa %xmm8, %xmm3
6243 ; AVX2-SLOW-NEXT: vpshufb %xmm4, %xmm8, %xmm14
6244 ; AVX2-SLOW-NEXT: vpor %xmm2, %xmm14, %xmm2
6245 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
6246 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm14 = [4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
6247 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
6248 ; AVX2-SLOW-NEXT: vpshufb %xmm14, %xmm5, %xmm15
6249 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,0,1,0]
6250 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = <255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u>
6251 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm2, %ymm15, %ymm2
6252 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
6253 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm8, %xmm1
6254 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
6255 ; AVX2-SLOW-NEXT: vpshufb %xmm4, %xmm15, %xmm4
6256 ; AVX2-SLOW-NEXT: vpor %xmm1, %xmm4, %xmm1
6257 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6258 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
6259 ; AVX2-SLOW-NEXT: vpshufb %xmm14, %xmm7, %xmm4
6260 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,1,0]
6261 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm4, %ymm0
6262 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm1 = [0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255]
6263 ; AVX2-SLOW-NEXT: vpblendvb %ymm1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm2 # 32-byte Folded Reload
6264 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6265 ; AVX2-SLOW-NEXT: vpblendvb %ymm1, %ymm12, %ymm0, %ymm0
6266 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6267 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm11[8],xmm9[8],xmm11[9],xmm9[9],xmm11[10],xmm9[10],xmm11[11],xmm9[11],xmm11[12],xmm9[12],xmm11[13],xmm9[13],xmm11[14],xmm9[14],xmm11[15],xmm9[15]
6268 ; AVX2-SLOW-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm1 # 16-byte Folded Reload
6269 ; AVX2-SLOW-NEXT: # xmm1 = xmm13[8],mem[8],xmm13[9],mem[9],xmm13[10],mem[10],xmm13[11],mem[11],xmm13[12],mem[12],xmm13[13],mem[13],xmm13[14],mem[14],xmm13[15],mem[15]
6270 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm2 = <6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7>
6271 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm0, %xmm0
6272 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
6273 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm9 = <2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u>
6274 ; AVX2-SLOW-NEXT: vpshufb %xmm9, %xmm1, %xmm1
6275 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6276 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = <0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u>
6277 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm0, %ymm1, %ymm0
6278 ; AVX2-SLOW-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm6, %xmm1 # 16-byte Folded Reload
6279 ; AVX2-SLOW-NEXT: # xmm1 = xmm6[8],mem[8],xmm6[9],mem[9],xmm6[10],mem[10],xmm6[11],mem[11],xmm6[12],mem[12],xmm6[13],mem[13],xmm6[14],mem[14],xmm6[15],mem[15]
6280 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm1, %xmm1
6281 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6282 ; AVX2-SLOW-NEXT: vpunpckhbw (%rsp), %xmm2, %xmm2 # 16-byte Folded Reload
6283 ; AVX2-SLOW-NEXT: # xmm2 = xmm2[8],mem[8],xmm2[9],mem[9],xmm2[10],mem[10],xmm2[11],mem[11],xmm2[12],mem[12],xmm2[13],mem[13],xmm2[14],mem[14],xmm2[15],mem[15]
6284 ; AVX2-SLOW-NEXT: vpshufb %xmm9, %xmm2, %xmm2
6285 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6286 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
6287 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm1, %ymm2, %ymm1
6288 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm3[8],xmm10[8],xmm3[9],xmm10[9],xmm3[10],xmm10[10],xmm3[11],xmm10[11],xmm3[12],xmm10[12],xmm3[13],xmm10[13],xmm3[14],xmm10[14],xmm3[15],xmm10[15]
6289 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm3 = <u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10>
6290 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm2, %xmm2
6291 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
6292 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm5[0,1,2,3,4,5,5,6]
6293 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
6294 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,1]
6295 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm5 = <u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255>
6296 ; AVX2-SLOW-NEXT: vpblendvb %ymm5, %ymm2, %ymm4, %ymm2
6297 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm15[8],xmm8[8],xmm15[9],xmm8[9],xmm15[10],xmm8[10],xmm15[11],xmm8[11],xmm15[12],xmm8[12],xmm15[13],xmm8[13],xmm15[14],xmm8[14],xmm15[15],xmm8[15]
6298 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm4, %xmm3
6299 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,0,1]
6300 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm7[0,1,2,3,4,5,5,6]
6301 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
6302 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,1]
6303 ; AVX2-SLOW-NEXT: vpblendvb %ymm5, %ymm3, %ymm4, %ymm3
6304 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0]
6305 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm0, %ymm2, %ymm0
6306 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6307 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm1, %ymm3, %ymm0
6308 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6309 ; AVX2-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22]
6310 ; AVX2-SLOW-NEXT: # ymm3 = mem[0,1,0,1]
6311 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6312 ; AVX2-SLOW-NEXT: vpshufb %ymm3, %ymm0, %ymm2
6313 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
6314 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128]
6315 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6316 ; AVX2-SLOW-NEXT: vpshufb %ymm4, %ymm0, %ymm5
6317 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,3,2,3]
6318 ; AVX2-SLOW-NEXT: vpor %ymm2, %ymm5, %ymm2
6319 ; AVX2-SLOW-NEXT: vpshuflw $150, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
6320 ; AVX2-SLOW-NEXT: # ymm5 = mem[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
6321 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[0,1,1,3,4,5,5,7]
6322 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,3,2]
6323 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm6 = <u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255>
6324 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm2, %ymm5, %ymm0
6325 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6326 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
6327 ; AVX2-SLOW-NEXT: vpshufb %ymm3, %ymm14, %ymm3
6328 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,2,3]
6329 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
6330 ; AVX2-SLOW-NEXT: vpshufb %ymm4, %ymm13, %ymm4
6331 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,3,2,3]
6332 ; AVX2-SLOW-NEXT: vpor %ymm3, %ymm4, %ymm3
6333 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
6334 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm4 = ymm8[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
6335 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[0,1,1,3,4,5,5,7]
6336 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,3,2]
6337 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm3, %ymm4, %ymm0
6338 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6339 ; AVX2-SLOW-NEXT: vpshuflw $233, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
6340 ; AVX2-SLOW-NEXT: # ymm4 = mem[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
6341 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[0,0,1,1,4,4,5,5]
6342 ; AVX2-SLOW-NEXT: vpbroadcastd {{.*#+}} ymm5 = [5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6]
6343 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
6344 ; AVX2-SLOW-NEXT: vpshufb %ymm5, %ymm11, %ymm6
6345 ; AVX2-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [0,0,0,255,0,255,0,0,0,0,255,0,255,0,0,0,0,0,0,255,0,255,0,0,0,0,255,0,255,0,0,0]
6346 ; AVX2-SLOW-NEXT: # ymm7 = mem[0,1,0,1]
6347 ; AVX2-SLOW-NEXT: vpblendvb %ymm7, %ymm4, %ymm6, %ymm4
6348 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6349 ; AVX2-SLOW-NEXT: vpshufb %ymm5, %ymm1, %ymm5
6350 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6351 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm6 = ymm0[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
6352 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[0,0,1,1,4,4,5,5]
6353 ; AVX2-SLOW-NEXT: vpblendvb %ymm7, %ymm6, %ymm5, %ymm5
6354 ; AVX2-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20]
6355 ; AVX2-SLOW-NEXT: # ymm6 = mem[0,1,0,1]
6356 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
6357 ; AVX2-SLOW-NEXT: vpshufb %ymm6, %ymm10, %ymm7
6358 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,3,2,3]
6359 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128]
6360 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
6361 ; AVX2-SLOW-NEXT: vpshufb %ymm3, %ymm2, %ymm9
6362 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
6363 ; AVX2-SLOW-NEXT: vpor %ymm7, %ymm9, %ymm7
6364 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,3,2,3]
6365 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = <255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u>
6366 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm7, %ymm4, %ymm4
6367 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
6368 ; AVX2-SLOW-NEXT: vpshufb %ymm6, %ymm15, %ymm6
6369 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,3,2,3]
6370 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
6371 ; AVX2-SLOW-NEXT: vpshufb %ymm3, %ymm12, %ymm7
6372 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,3,2,3]
6373 ; AVX2-SLOW-NEXT: vpor %ymm6, %ymm7, %ymm6
6374 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,3,2,3]
6375 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm6, %ymm5, %ymm5
6376 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0]
6377 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6378 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm4, %ymm3, %ymm3
6379 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6380 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6381 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm5, %ymm3, %ymm3
6382 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6383 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm0[23],zero,ymm0[27,20,21,26],zero,ymm0[24],zero,ymm0[26,27,26,27],zero,ymm0[25]
6384 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,3,2,3]
6385 ; AVX2-SLOW-NEXT: vmovdqa %ymm1, %ymm0
6386 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm5 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25],zero,ymm1[23],zero,zero,zero,zero,ymm1[26],zero,ymm1[24],zero,zero,zero,zero,ymm1[27],zero
6387 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,3,2,3]
6388 ; AVX2-SLOW-NEXT: vpor %ymm4, %ymm5, %ymm4
6389 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm5 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm15[25],zero,ymm15[23],zero,zero,zero,zero,ymm15[26],zero,ymm15[24],zero,zero,zero,zero
6390 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,3,2,3]
6391 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm12[25],zero,ymm12[23],zero,zero,zero,zero,ymm12[26],zero,ymm12[24],zero,zero,zero,zero,ymm12[27]
6392 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,3,2,3]
6393 ; AVX2-SLOW-NEXT: vpor %ymm5, %ymm6, %ymm5
6394 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = <u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0>
6395 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm4, %ymm5, %ymm4
6396 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm5 = ymm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm14[25],zero,ymm14[23],zero,zero,zero,zero,ymm14[26],zero,ymm14[24],zero,zero
6397 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,3,2,3]
6398 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm13[25],zero,ymm13[23],zero,zero,zero,zero,ymm13[26],zero,ymm13[24],zero,zero,zero
6399 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,3,2,3]
6400 ; AVX2-SLOW-NEXT: vpor %ymm5, %ymm6, %ymm5
6401 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
6402 ; AVX2-SLOW-NEXT: vmovdqa %ymm8, %ymm14
6403 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,3,2,3]
6404 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm7 = <0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u>
6405 ; AVX2-SLOW-NEXT: vpblendvb %ymm7, %ymm5, %ymm6, %ymm5
6406 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm6 = [0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255]
6407 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm1
6408 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6409 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm6 = [128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128,128,18]
6410 ; AVX2-SLOW-NEXT: vpshufb %ymm6, %ymm11, %ymm5
6411 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm7 = [0,128,14,128,128,128,128,1,128,15,128,128,128,128,2,128,16,128,30,128,128,128,128,17,128,31,128,128,128,128,18,128]
6412 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6413 ; AVX2-SLOW-NEXT: vpshufb %ymm7, %ymm4, %ymm8
6414 ; AVX2-SLOW-NEXT: vpor %ymm5, %ymm8, %ymm5
6415 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
6416 ; AVX2-SLOW-NEXT: vpshufb %ymm8, %ymm10, %ymm9
6417 ; AVX2-SLOW-NEXT: vmovdqa %ymm10, %ymm3
6418 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm10 = [128,128,0,128,14,128,128,128,128,1,128,15,128,128,128,128,128,128,16,128,30,128,128,128,128,17,128,31,128,128,128,128]
6419 ; AVX2-SLOW-NEXT: vpshufb %ymm10, %ymm2, %ymm11
6420 ; AVX2-SLOW-NEXT: vpor %ymm9, %ymm11, %ymm9
6421 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm11 = <u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255>
6422 ; AVX2-SLOW-NEXT: vpblendvb %ymm11, %ymm5, %ymm9, %ymm5
6423 ; AVX2-SLOW-NEXT: vpshufb %ymm6, %ymm0, %ymm6
6424 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6425 ; AVX2-SLOW-NEXT: vpshufb %ymm7, %ymm0, %ymm7
6426 ; AVX2-SLOW-NEXT: vpor %ymm6, %ymm7, %ymm6
6427 ; AVX2-SLOW-NEXT: vpshufb %ymm8, %ymm15, %ymm7
6428 ; AVX2-SLOW-NEXT: vpshufb %ymm10, %ymm12, %ymm8
6429 ; AVX2-SLOW-NEXT: vpor %ymm7, %ymm8, %ymm7
6430 ; AVX2-SLOW-NEXT: vpblendvb %ymm11, %ymm6, %ymm7, %ymm6
6431 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm7 = [128,1,2,3,0,128,14,128,0,1,0,1,128,15,128,15,128,17,18,19,16,128,30,128,16,17,16,17,128,31,128,31]
6432 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6433 ; AVX2-SLOW-NEXT: vpshufb %ymm7, %ymm0, %ymm8
6434 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = [13,128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128]
6435 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
6436 ; AVX2-SLOW-NEXT: vpshufb %ymm9, %ymm15, %ymm10
6437 ; AVX2-SLOW-NEXT: vpor %ymm8, %ymm10, %ymm8
6438 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm10 = [12,13,2,3,12,13,0,1,14,15,2,3,0,1,14,15,28,29,18,19,28,29,16,17,30,31,18,19,16,17,30,31]
6439 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6440 ; AVX2-SLOW-NEXT: vpshufb %ymm10, %ymm1, %ymm11
6441 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm12 = <255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u>
6442 ; AVX2-SLOW-NEXT: vpblendvb %ymm12, %ymm8, %ymm11, %ymm8
6443 ; AVX2-SLOW-NEXT: vpshufb %ymm7, %ymm13, %ymm7
6444 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
6445 ; AVX2-SLOW-NEXT: vpshufb %ymm9, %ymm11, %ymm9
6446 ; AVX2-SLOW-NEXT: vpor %ymm7, %ymm9, %ymm7
6447 ; AVX2-SLOW-NEXT: vpshufb %ymm10, %ymm14, %ymm9
6448 ; AVX2-SLOW-NEXT: vpblendvb %ymm12, %ymm7, %ymm9, %ymm7
6449 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = [0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255]
6450 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm5, %ymm8, %ymm5
6451 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm6, %ymm7, %ymm6
6452 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm3[11,u,u,u,u,14,u,12,u,u,u,u,15,u,13,u,27,u,u,u,u,30,u,28,u,u,u,u,31,u,29,u]
6453 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm8 = ymm2[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
6454 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm8[0,2,3,3,4,6,7,7]
6455 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
6456 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm7, %ymm8, %ymm7
6457 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
6458 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = ymm2[u,u,u,14,u,12,u,u,u,u,15,u,13,u,u,u,u,u,u,30,u,28,u,u,u,u,31,u,29,u,u,u]
6459 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm9 = ymm4[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
6460 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm9[2,2,3,3,6,6,7,7]
6461 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
6462 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm9, %ymm8, %ymm8
6463 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,3,2,3]
6464 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
6465 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = <255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u>
6466 ; AVX2-SLOW-NEXT: vpblendvb %ymm2, %ymm7, %ymm8, %ymm7
6467 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,26,27,30,31,30,31,28,29,28,29,28,29,28,29]
6468 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,2]
6469 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = <255,0,u,u,255,255,255,255,0,u,u,255,255,255,255,0,u,u,255,255,255,255,0,u,u,255,255,255,255,0,u,u>
6470 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm7, %ymm8, %ymm7
6471 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29,28,27,u,u,u,31,30,u,u,u,u,u,u,u,u]
6472 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,2,2,2]
6473 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = <255,255,0,u,255,255,255,255,255,0,u,255,255,255,255,255,0,u,255,255,255,255,255,0,u,255,255,255,255,255,0,u>
6474 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm7, %ymm8, %ymm7
6475 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
6476 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
6477 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0]
6478 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm7, %ymm8, %ymm7
6479 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
6480 ; AVX2-SLOW-NEXT: vmovdqa %ymm6, 96(%rax)
6481 ; AVX2-SLOW-NEXT: vmovdqa %ymm5, 320(%rax)
6482 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6483 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 160(%rax)
6484 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6485 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 128(%rax)
6486 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6487 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 64(%rax)
6488 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6489 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 32(%rax)
6490 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6491 ; AVX2-SLOW-NEXT: vmovaps %ymm0, (%rax)
6492 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6493 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 224(%rax)
6494 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6495 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 352(%rax)
6496 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6497 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 288(%rax)
6498 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6499 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 256(%rax)
6500 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6501 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 192(%rax)
6502 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6503 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 384(%rax)
6504 ; AVX2-SLOW-NEXT: vmovdqa %ymm7, 416(%rax)
6505 ; AVX2-SLOW-NEXT: addq $824, %rsp # imm = 0x338
6506 ; AVX2-SLOW-NEXT: vzeroupper
6507 ; AVX2-SLOW-NEXT: retq
6509 ; AVX2-FAST-LABEL: store_i8_stride7_vf64:
6510 ; AVX2-FAST: # %bb.0:
6511 ; AVX2-FAST-NEXT: subq $648, %rsp # imm = 0x288
6512 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
6513 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %ymm1
6514 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %ymm7
6515 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %ymm6
6516 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %ymm2
6517 ; AVX2-FAST-NEXT: vmovdqa 32(%r8), %ymm4
6518 ; AVX2-FAST-NEXT: vmovdqa 32(%r9), %ymm5
6519 ; AVX2-FAST-NEXT: vmovdqa 32(%rax), %ymm3
6520 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm1[23],zero,ymm1[27,20,21,26],zero,ymm1[24],zero,ymm1[26,27,26,27],zero,ymm1[25]
6521 ; AVX2-FAST-NEXT: vmovdqa %ymm1, %ymm8
6522 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6523 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
6524 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25],zero,ymm7[23],zero,zero,zero,zero,ymm7[26],zero,ymm7[24],zero,zero,zero,zero,ymm7[27],zero
6525 ; AVX2-FAST-NEXT: vmovdqa %ymm7, %ymm9
6526 ; AVX2-FAST-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6527 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6528 ; AVX2-FAST-NEXT: vpor %ymm0, %ymm1, %ymm0
6529 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm2[25],zero,ymm2[23],zero,zero,zero,zero,ymm2[26],zero,ymm2[24],zero,zero,zero,zero
6530 ; AVX2-FAST-NEXT: vmovdqa %ymm2, %ymm7
6531 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6532 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6533 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm6[25],zero,ymm6[23],zero,zero,zero,zero,ymm6[26],zero,ymm6[24],zero,zero,zero,zero,ymm6[27]
6534 ; AVX2-FAST-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6535 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
6536 ; AVX2-FAST-NEXT: vpor %ymm1, %ymm2, %ymm1
6537 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0>
6538 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
6539 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25,22,23,22,23,24,25,26,27,24,25,30,31]
6540 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6541 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6542 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <u,255,255,255,255,0,u,u,255,255,255,255,0,u,u,255,255,255,255,0,u,u,255,255,255,255,0,u,u,255,255,255>
6543 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
6544 ; AVX2-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6545 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,25,24,23,u,u,u,u,u,u,u,u,u]
6546 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
6547 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <u,255,255,255,255,255,0,u,255,255,255,255,255,0,u,255,255,255,255,255,0,u,255,255,255,255,255,0,u,255,255,255>
6548 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
6549 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
6550 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6551 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6552 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255]
6553 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
6554 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6555 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm6[17,18,19,30],zero,ymm6[28],zero,ymm6[28,29,30,31],zero,ymm6[29],zero,ymm6[31]
6556 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
6557 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27],zero,zero,zero,zero,ymm7[30],zero,ymm7[28],zero,zero,zero,zero,ymm7[31],zero,ymm7[29],zero
6558 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6559 ; AVX2-FAST-NEXT: vpor %ymm0, %ymm1, %ymm0
6560 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm9[30],zero,ymm9[28],zero,zero,zero,zero,ymm9[31],zero,ymm9[29],zero,zero,zero
6561 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6562 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm8[30],zero,ymm8[28],zero,zero,zero,zero,ymm8[31],zero,ymm8[29],zero,zero,zero,zero
6563 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
6564 ; AVX2-FAST-NEXT: vpor %ymm1, %ymm2, %ymm1
6565 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u>
6566 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
6567 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,26,27,30,31,30,31,28,29,28,29,28,29,28,29]
6568 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,2]
6569 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <255,0,u,u,255,255,255,255,0,u,u,255,255,255,255,0,u,u,255,255,255,255,0,u,u,255,255,255,255,0,u,u>
6570 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
6571 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29,28,27,u,u,u,31,30,u,u,u,u,u,u,u,u]
6572 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
6573 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,0,u,255,255,255,255,255,0,u,255,255,255,255,255,0,u,255,255,255,255,255,0,u,255,255,255,255,255,0,u>
6574 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
6575 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
6576 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6577 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0]
6578 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
6579 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6580 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %xmm1
6581 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %xmm0
6582 ; AVX2-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6583 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
6584 ; AVX2-FAST-NEXT: vmovdqa %xmm1, %xmm14
6585 ; AVX2-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6586 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm1 = <0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5>
6587 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm0
6588 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
6589 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %xmm10
6590 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %xmm7
6591 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm7[0],xmm10[0],xmm7[1],xmm10[1],xmm7[2],xmm10[2],xmm7[3],xmm10[3],xmm7[4],xmm10[4],xmm7[5],xmm10[5],xmm7[6],xmm10[6],xmm7[7],xmm10[7]
6592 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm3 = <4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9>
6593 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm2, %xmm2
6594 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
6595 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0>
6596 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm0, %ymm2, %ymm0
6597 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6598 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm11
6599 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm12
6600 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3],xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
6601 ; AVX2-FAST-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6602 ; AVX2-FAST-NEXT: vmovdqa %xmm11, (%rsp) # 16-byte Spill
6603 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm0
6604 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm2
6605 ; AVX2-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6606 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm1
6607 ; AVX2-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6608 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3],xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
6609 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm1, %xmm1
6610 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
6611 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6612 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm0, %ymm1, %ymm6
6613 ; AVX2-FAST-NEXT: vmovdqa 32(%rax), %xmm0
6614 ; AVX2-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6615 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm1 = xmm0[1,1,0,0,4,5,6,7]
6616 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,0,1,2,0,0,1]
6617 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm4
6618 ; AVX2-FAST-NEXT: vmovdqa 32(%r9), %xmm8
6619 ; AVX2-FAST-NEXT: vmovdqa 32(%r8), %xmm13
6620 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm13[0],xmm8[0],xmm13[1],xmm8[1],xmm13[2],xmm8[2],xmm13[3],xmm8[3],xmm13[4],xmm8[4],xmm13[5],xmm8[5],xmm13[6],xmm8[6],xmm13[7],xmm8[7]
6621 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = <u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u>
6622 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm5, %xmm5
6623 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,0,1]
6624 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u>
6625 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm5, %ymm4, %ymm0
6626 ; AVX2-FAST-NEXT: vmovdqa (%rax), %xmm4
6627 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6628 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[1,1,0,0,4,5,6,7]
6629 ; AVX2-FAST-NEXT: vpermd %ymm4, %ymm3, %ymm3
6630 ; AVX2-FAST-NEXT: vmovdqa (%r9), %xmm9
6631 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm5
6632 ; AVX2-FAST-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6633 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm5[0],xmm9[0],xmm5[1],xmm9[1],xmm5[2],xmm9[2],xmm5[3],xmm9[3],xmm5[4],xmm9[4],xmm5[5],xmm9[5],xmm5[6],xmm9[6],xmm5[7],xmm9[7]
6634 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm15, %xmm2
6635 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
6636 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm2, %ymm3, %ymm1
6637 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
6638 ; AVX2-FAST-NEXT: vpblendvb %ymm2, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6639 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6640 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm6, %ymm1, %ymm0
6641 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6642 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm1 = <u,128,7,128,5,u,u,u,128,8,128,6,u,u,u,128>
6643 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm14, %xmm0
6644 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = <u,7,128,5,128,u,u,u,8,128,6,128,u,u,u,9>
6645 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
6646 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm3, %xmm3
6647 ; AVX2-FAST-NEXT: vpor %xmm0, %xmm3, %xmm0
6648 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm3 = <u,u,u,128,7,128,5,u,u,u,128,8,128,6,u,u>
6649 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm10, %xmm6
6650 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm15 = <u,u,u,7,128,5,128,u,u,u,8,128,6,128,u,u>
6651 ; AVX2-FAST-NEXT: vpshufb %xmm15, %xmm7, %xmm14
6652 ; AVX2-FAST-NEXT: vpor %xmm6, %xmm14, %xmm6
6653 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
6654 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
6655 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = <u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255>
6656 ; AVX2-FAST-NEXT: vpblendvb %ymm14, %ymm0, %ymm6, %ymm0
6657 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6658 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm11, %xmm1
6659 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm12, %xmm2
6660 ; AVX2-FAST-NEXT: vpor %xmm1, %xmm2, %xmm1
6661 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
6662 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm5, %xmm2
6663 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
6664 ; AVX2-FAST-NEXT: vpshufb %xmm15, %xmm4, %xmm3
6665 ; AVX2-FAST-NEXT: vpor %xmm2, %xmm3, %xmm2
6666 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6667 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
6668 ; AVX2-FAST-NEXT: vpblendvb %ymm14, %ymm1, %ymm2, %ymm1
6669 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = <128,4,u,u,u,128,7,128,5,u,u,u,128,8,128,6>
6670 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm8, %xmm3
6671 ; AVX2-FAST-NEXT: vmovdqa %xmm8, %xmm11
6672 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm6 = <4,128,u,u,u,7,128,5,128,u,u,u,8,128,6,128>
6673 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm13, %xmm14
6674 ; AVX2-FAST-NEXT: vpor %xmm3, %xmm14, %xmm3
6675 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,0,1]
6676 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm14 = [4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
6677 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
6678 ; AVX2-FAST-NEXT: vpshufb %xmm14, %xmm8, %xmm15
6679 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,0,1,0]
6680 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u>
6681 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm3, %ymm15, %ymm3
6682 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm9, %xmm2
6683 ; AVX2-FAST-NEXT: vmovdqa %xmm9, %xmm12
6684 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
6685 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm9, %xmm6
6686 ; AVX2-FAST-NEXT: vpor %xmm2, %xmm6, %xmm2
6687 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
6688 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
6689 ; AVX2-FAST-NEXT: vpshufb %xmm14, %xmm15, %xmm6
6690 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,1,0]
6691 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm2, %ymm6, %ymm0
6692 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255]
6693 ; AVX2-FAST-NEXT: vpblendvb %ymm2, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
6694 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6695 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm1, %ymm0, %ymm0
6696 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6697 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm10[8],xmm7[8],xmm10[9],xmm7[9],xmm10[10],xmm7[10],xmm10[11],xmm7[11],xmm10[12],xmm7[12],xmm10[13],xmm7[13],xmm10[14],xmm7[14],xmm10[15],xmm7[15]
6698 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6699 ; AVX2-FAST-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
6700 ; AVX2-FAST-NEXT: # xmm1 = xmm1[8],mem[8],xmm1[9],mem[9],xmm1[10],mem[10],xmm1[11],mem[11],xmm1[12],mem[12],xmm1[13],mem[13],xmm1[14],mem[14],xmm1[15],mem[15]
6701 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = <6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7>
6702 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm0, %xmm0
6703 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
6704 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm3 = <2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u>
6705 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm1, %xmm1
6706 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6707 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = <0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u>
6708 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm0, %ymm1, %ymm0
6709 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm5[8],xmm4[8],xmm5[9],xmm4[9],xmm5[10],xmm4[10],xmm5[11],xmm4[11],xmm5[12],xmm4[12],xmm5[13],xmm4[13],xmm5[14],xmm4[14],xmm5[15],xmm4[15]
6710 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm1, %xmm1
6711 ; AVX2-FAST-NEXT: vmovdqa (%rsp), %xmm2 # 16-byte Reload
6712 ; AVX2-FAST-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
6713 ; AVX2-FAST-NEXT: # xmm2 = xmm2[8],mem[8],xmm2[9],mem[9],xmm2[10],mem[10],xmm2[11],mem[11],xmm2[12],mem[12],xmm2[13],mem[13],xmm2[14],mem[14],xmm2[15],mem[15]
6714 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm2, %xmm2
6715 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
6716 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
6717 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm1, %ymm2, %ymm1
6718 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm13[8],xmm11[8],xmm13[9],xmm11[9],xmm13[10],xmm11[10],xmm13[11],xmm11[11],xmm13[12],xmm11[12],xmm13[13],xmm11[13],xmm13[14],xmm11[14],xmm13[15],xmm11[15]
6719 ; AVX2-FAST-NEXT: vpshufhw {{.*#+}} xmm3 = xmm8[0,1,2,3,4,5,5,6]
6720 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [2,2,3,3,2,2,3,3]
6721 ; AVX2-FAST-NEXT: # ymm6 = mem[0,1,0,1]
6722 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm6, %ymm3
6723 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm7 = <u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10>
6724 ; AVX2-FAST-NEXT: vpshufb %xmm7, %xmm2, %xmm2
6725 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
6726 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = <u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255>
6727 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm2, %ymm3, %ymm2
6728 ; AVX2-FAST-NEXT: vpshufhw {{.*#+}} xmm3 = xmm15[0,1,2,3,4,5,5,6]
6729 ; AVX2-FAST-NEXT: vpermd %ymm3, %ymm6, %ymm3
6730 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm9[8],xmm12[8],xmm9[9],xmm12[9],xmm9[10],xmm12[10],xmm9[11],xmm12[11],xmm9[12],xmm12[12],xmm9[13],xmm12[13],xmm9[14],xmm12[14],xmm9[15],xmm12[15]
6731 ; AVX2-FAST-NEXT: vpshufb %xmm7, %xmm4, %xmm4
6732 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,1]
6733 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm4, %ymm3, %ymm3
6734 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0]
6735 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm0, %ymm2, %ymm0
6736 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6737 ; AVX2-FAST-NEXT: vpblendvb %ymm4, %ymm1, %ymm3, %ymm0
6738 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6739 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %ymm1
6740 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm1[17,18,19,30],zero,ymm1[28],zero,ymm1[28,29,30,31],zero,ymm1[29],zero,ymm1[31]
6741 ; AVX2-FAST-NEXT: vmovdqa %ymm1, %ymm4
6742 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
6743 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %ymm2
6744 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27],zero,zero,zero,zero,ymm2[30],zero,ymm2[28],zero,zero,zero,zero,ymm2[31],zero,ymm2[29],zero
6745 ; AVX2-FAST-NEXT: vmovdqa %ymm2, %ymm10
6746 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6747 ; AVX2-FAST-NEXT: vpor %ymm0, %ymm1, %ymm2
6748 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %ymm5
6749 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm5[30],zero,ymm5[28],zero,zero,zero,zero,ymm5[31],zero,ymm5[29],zero,zero,zero
6750 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm0[2,3,2,3]
6751 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %ymm0
6752 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm0[30],zero,ymm0[28],zero,zero,zero,zero,ymm0[31],zero,ymm0[29],zero,zero,zero,zero
6753 ; AVX2-FAST-NEXT: vmovdqa %ymm0, %ymm12
6754 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,3,2,3]
6755 ; AVX2-FAST-NEXT: vpor %ymm3, %ymm6, %ymm3
6756 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u>
6757 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm2, %ymm3, %ymm6
6758 ; AVX2-FAST-NEXT: vmovdqa (%r8), %ymm0
6759 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm0[27],zero,ymm0[27,28,29,30],zero,ymm0[28],zero,ymm0[26,27,30,31],zero,ymm0[29]
6760 ; AVX2-FAST-NEXT: vmovdqa %ymm0, %ymm1
6761 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6762 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
6763 ; AVX2-FAST-NEXT: vmovdqa (%r9), %ymm0
6764 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29],zero,ymm0[27],zero,zero,zero,zero,ymm0[30],zero,ymm0[28],zero,zero,zero,zero,ymm0[31],zero
6765 ; AVX2-FAST-NEXT: vmovdqa %ymm0, %ymm3
6766 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6767 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,3,2,3]
6768 ; AVX2-FAST-NEXT: vpor %ymm2, %ymm7, %ymm7
6769 ; AVX2-FAST-NEXT: vmovdqa (%rax), %ymm0
6770 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm8 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
6771 ; AVX2-FAST-NEXT: vmovdqa %ymm0, %ymm11
6772 ; AVX2-FAST-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
6773 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
6774 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = <u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0>
6775 ; AVX2-FAST-NEXT: vpblendvb %ymm9, %ymm7, %ymm8, %ymm7
6776 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0]
6777 ; AVX2-FAST-NEXT: vpblendvb %ymm8, %ymm6, %ymm7, %ymm0
6778 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6779 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm12[23],zero,ymm12[27,20,21,26],zero,ymm12[24],zero,ymm12[26,27,26,27],zero,ymm12[25]
6780 ; AVX2-FAST-NEXT: vmovdqa %ymm12, %ymm13
6781 ; AVX2-FAST-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6782 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,3,2,3]
6783 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25],zero,ymm5[23],zero,zero,zero,zero,ymm5[26],zero,ymm5[24],zero,zero,zero,zero,ymm5[27],zero
6784 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,3,2,3]
6785 ; AVX2-FAST-NEXT: vpor %ymm6, %ymm7, %ymm6
6786 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm10[25],zero,ymm10[23],zero,zero,zero,zero,ymm10[26],zero,ymm10[24],zero,zero,zero,zero
6787 ; AVX2-FAST-NEXT: vmovdqa %ymm10, %ymm14
6788 ; AVX2-FAST-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6789 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,3,2,3]
6790 ; AVX2-FAST-NEXT: vmovdqa %ymm4, %ymm2
6791 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6792 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm8 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm4[25],zero,ymm4[23],zero,zero,zero,zero,ymm4[26],zero,ymm4[24],zero,zero,zero,zero,ymm4[27]
6793 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
6794 ; AVX2-FAST-NEXT: vpor %ymm7, %ymm8, %ymm7
6795 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0>
6796 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm6, %ymm7, %ymm6
6797 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm3[25],zero,ymm3[23],zero,zero,zero,zero,ymm3[26],zero,ymm3[24],zero,zero
6798 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,3,2,3]
6799 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm8 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm1[25],zero,ymm1[23],zero,zero,zero,zero,ymm1[26],zero,ymm1[24],zero,zero,zero
6800 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
6801 ; AVX2-FAST-NEXT: vpor %ymm7, %ymm8, %ymm7
6802 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm8 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
6803 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
6804 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = <0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u>
6805 ; AVX2-FAST-NEXT: vpblendvb %ymm9, %ymm7, %ymm8, %ymm7
6806 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255]
6807 ; AVX2-FAST-NEXT: vpblendvb %ymm8, %ymm6, %ymm7, %ymm0
6808 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6809 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm8 = [128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20]
6810 ; AVX2-FAST-NEXT: # ymm8 = mem[0,1,0,1]
6811 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6812 ; AVX2-FAST-NEXT: vpshufb %ymm8, %ymm0, %ymm7
6813 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,3,2,3]
6814 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128]
6815 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
6816 ; AVX2-FAST-NEXT: vpshufb %ymm9, %ymm6, %ymm10
6817 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
6818 ; AVX2-FAST-NEXT: vpor %ymm7, %ymm10, %ymm7
6819 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm10 = [128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,128,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,128]
6820 ; AVX2-FAST-NEXT: # ymm10 = mem[0,1,0,1]
6821 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6822 ; AVX2-FAST-NEXT: vpshufb %ymm10, %ymm4, %ymm11
6823 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
6824 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,128,128]
6825 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6826 ; AVX2-FAST-NEXT: vpshufb %ymm12, %ymm1, %ymm15
6827 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,3,2,3]
6828 ; AVX2-FAST-NEXT: vpor %ymm11, %ymm15, %ymm11
6829 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = <255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u>
6830 ; AVX2-FAST-NEXT: vpblendvb %ymm15, %ymm7, %ymm11, %ymm0
6831 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6832 ; AVX2-FAST-NEXT: vpshufb %ymm8, %ymm14, %ymm8
6833 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
6834 ; AVX2-FAST-NEXT: vpshufb %ymm9, %ymm2, %ymm9
6835 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
6836 ; AVX2-FAST-NEXT: vpor %ymm8, %ymm9, %ymm8
6837 ; AVX2-FAST-NEXT: vpshufb %ymm10, %ymm5, %ymm9
6838 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
6839 ; AVX2-FAST-NEXT: vpshufb %ymm12, %ymm13, %ymm10
6840 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
6841 ; AVX2-FAST-NEXT: vpor %ymm9, %ymm10, %ymm9
6842 ; AVX2-FAST-NEXT: vpblendvb %ymm15, %ymm8, %ymm9, %ymm8
6843 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22]
6844 ; AVX2-FAST-NEXT: # ymm9 = mem[0,1,0,1]
6845 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
6846 ; AVX2-FAST-NEXT: vpshufb %ymm9, %ymm14, %ymm10
6847 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
6848 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128]
6849 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6850 ; AVX2-FAST-NEXT: vpshufb %ymm11, %ymm3, %ymm12
6851 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
6852 ; AVX2-FAST-NEXT: vpor %ymm10, %ymm12, %ymm10
6853 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
6854 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} ymm12 = ymm13[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
6855 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = [4,5,4,5,5,7,4,5]
6856 ; AVX2-FAST-NEXT: vpermd %ymm12, %ymm15, %ymm12
6857 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255>
6858 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm10, %ymm12, %ymm10
6859 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
6860 ; AVX2-FAST-NEXT: vpshufb %ymm9, %ymm2, %ymm9
6861 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
6862 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
6863 ; AVX2-FAST-NEXT: vpshufb %ymm11, %ymm2, %ymm11
6864 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
6865 ; AVX2-FAST-NEXT: vpor %ymm9, %ymm11, %ymm9
6866 ; AVX2-FAST-NEXT: vmovdqu (%rsp), %ymm7 # 32-byte Reload
6867 ; AVX2-FAST-NEXT: vpshuflw {{.*#+}} ymm11 = ymm7[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
6868 ; AVX2-FAST-NEXT: vpermd %ymm11, %ymm15, %ymm11
6869 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm9, %ymm11, %ymm0
6870 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0]
6871 ; AVX2-FAST-NEXT: vpblendvb %ymm9, {{[-0-9]+}}(%r{{[sb]}}p), %ymm10, %ymm10 # 32-byte Folded Reload
6872 ; AVX2-FAST-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6873 ; AVX2-FAST-NEXT: vpblendvb %ymm9, %ymm8, %ymm0, %ymm8
6874 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128,128,18]
6875 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm4, %ymm9
6876 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = [0,128,14,128,128,128,128,1,128,15,128,128,128,128,2,128,16,128,30,128,128,128,128,17,128,31,128,128,128,128,18,128]
6877 ; AVX2-FAST-NEXT: vpshufb %ymm10, %ymm1, %ymm11
6878 ; AVX2-FAST-NEXT: vpor %ymm9, %ymm11, %ymm9
6879 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
6880 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6881 ; AVX2-FAST-NEXT: vpshufb %ymm11, %ymm1, %ymm12
6882 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = [128,128,0,128,14,128,128,128,128,1,128,15,128,128,128,128,128,128,16,128,30,128,128,128,128,17,128,31,128,128,128,128]
6883 ; AVX2-FAST-NEXT: vpshufb %ymm15, %ymm6, %ymm6
6884 ; AVX2-FAST-NEXT: vpor %ymm6, %ymm12, %ymm6
6885 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255>
6886 ; AVX2-FAST-NEXT: vpblendvb %ymm12, %ymm9, %ymm6, %ymm9
6887 ; AVX2-FAST-NEXT: vpshufb %ymm0, %ymm5, %ymm0
6888 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6889 ; AVX2-FAST-NEXT: vpshufb %ymm10, %ymm1, %ymm1
6890 ; AVX2-FAST-NEXT: vpor %ymm0, %ymm1, %ymm0
6891 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6892 ; AVX2-FAST-NEXT: vpshufb %ymm11, %ymm1, %ymm1
6893 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6894 ; AVX2-FAST-NEXT: vpshufb %ymm15, %ymm4, %ymm4
6895 ; AVX2-FAST-NEXT: vpor %ymm1, %ymm4, %ymm1
6896 ; AVX2-FAST-NEXT: vpblendvb %ymm12, %ymm0, %ymm1, %ymm0
6897 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [128,1,2,3,0,128,14,128,0,1,0,1,128,15,128,15,128,17,18,19,16,128,30,128,16,17,16,17,128,31,128,31]
6898 ; AVX2-FAST-NEXT: vpshufb %ymm1, %ymm3, %ymm4
6899 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = [13,128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128]
6900 ; AVX2-FAST-NEXT: vpshufb %ymm5, %ymm14, %ymm6
6901 ; AVX2-FAST-NEXT: vpor %ymm4, %ymm6, %ymm4
6902 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [12,13,2,3,12,13,0,1,14,15,2,3,0,1,14,15,28,29,18,19,28,29,16,17,30,31,18,19,16,17,30,31]
6903 ; AVX2-FAST-NEXT: vpshufb %ymm6, %ymm13, %ymm10
6904 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = <255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u>
6905 ; AVX2-FAST-NEXT: vpblendvb %ymm11, %ymm4, %ymm10, %ymm4
6906 ; AVX2-FAST-NEXT: vpshufb %ymm1, %ymm2, %ymm1
6907 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
6908 ; AVX2-FAST-NEXT: vpshufb %ymm5, %ymm2, %ymm3
6909 ; AVX2-FAST-NEXT: vpor %ymm1, %ymm3, %ymm1
6910 ; AVX2-FAST-NEXT: vpshufb %ymm6, %ymm7, %ymm2
6911 ; AVX2-FAST-NEXT: vpblendvb %ymm11, %ymm1, %ymm2, %ymm1
6912 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255]
6913 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm9, %ymm4, %ymm3
6914 ; AVX2-FAST-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
6915 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
6916 ; AVX2-FAST-NEXT: vmovdqa %ymm0, 96(%rax)
6917 ; AVX2-FAST-NEXT: vmovdqa %ymm3, 320(%rax)
6918 ; AVX2-FAST-NEXT: vmovdqa %ymm8, 128(%rax)
6919 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6920 ; AVX2-FAST-NEXT: vmovaps %ymm0, 352(%rax)
6921 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6922 ; AVX2-FAST-NEXT: vmovaps %ymm0, 160(%rax)
6923 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6924 ; AVX2-FAST-NEXT: vmovaps %ymm0, 192(%rax)
6925 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6926 ; AVX2-FAST-NEXT: vmovaps %ymm0, 64(%rax)
6927 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6928 ; AVX2-FAST-NEXT: vmovaps %ymm0, 32(%rax)
6929 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6930 ; AVX2-FAST-NEXT: vmovaps %ymm0, (%rax)
6931 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6932 ; AVX2-FAST-NEXT: vmovaps %ymm0, 224(%rax)
6933 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6934 ; AVX2-FAST-NEXT: vmovaps %ymm0, 288(%rax)
6935 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6936 ; AVX2-FAST-NEXT: vmovaps %ymm0, 256(%rax)
6937 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6938 ; AVX2-FAST-NEXT: vmovaps %ymm0, 416(%rax)
6939 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6940 ; AVX2-FAST-NEXT: vmovaps %ymm0, 384(%rax)
6941 ; AVX2-FAST-NEXT: addq $648, %rsp # imm = 0x288
6942 ; AVX2-FAST-NEXT: vzeroupper
6943 ; AVX2-FAST-NEXT: retq
6945 ; AVX2-FAST-PERLANE-LABEL: store_i8_stride7_vf64:
6946 ; AVX2-FAST-PERLANE: # %bb.0:
6947 ; AVX2-FAST-PERLANE-NEXT: subq $648, %rsp # imm = 0x288
6948 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
6949 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %ymm1
6950 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %ymm7
6951 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %ymm6
6952 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %ymm2
6953 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r8), %ymm4
6954 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r9), %ymm5
6955 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rax), %ymm3
6956 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm1[23],zero,ymm1[27,20,21,26],zero,ymm1[24],zero,ymm1[26,27,26,27],zero,ymm1[25]
6957 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm1, %ymm8
6958 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6959 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
6960 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25],zero,ymm7[23],zero,zero,zero,zero,ymm7[26],zero,ymm7[24],zero,zero,zero,zero,ymm7[27],zero
6961 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm7, %ymm9
6962 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6963 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6964 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm0, %ymm1, %ymm0
6965 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm2[25],zero,ymm2[23],zero,zero,zero,zero,ymm2[26],zero,ymm2[24],zero,zero,zero,zero
6966 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm2, %ymm7
6967 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6968 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6969 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm6[25],zero,ymm6[23],zero,zero,zero,zero,ymm6[26],zero,ymm6[24],zero,zero,zero,zero,ymm6[27]
6970 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6971 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
6972 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm1, %ymm2, %ymm1
6973 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0>
6974 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
6975 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25,22,23,22,23,24,25,26,27,24,25,30,31]
6976 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6977 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6978 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <u,255,255,255,255,0,u,u,255,255,255,255,0,u,u,255,255,255,255,0,u,u,255,255,255,255,0,u,u,255,255,255>
6979 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
6980 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6981 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,25,24,23,u,u,u,u,u,u,u,u,u]
6982 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
6983 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <u,255,255,255,255,255,0,u,255,255,255,255,255,0,u,255,255,255,255,255,0,u,255,255,255,255,255,0,u,255,255,255>
6984 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
6985 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
6986 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6987 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6988 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = [0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255]
6989 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
6990 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6991 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm6[17,18,19,30],zero,ymm6[28],zero,ymm6[28,29,30,31],zero,ymm6[29],zero,ymm6[31]
6992 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
6993 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27],zero,zero,zero,zero,ymm7[30],zero,ymm7[28],zero,zero,zero,zero,ymm7[31],zero,ymm7[29],zero
6994 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6995 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm0, %ymm1, %ymm0
6996 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm9[30],zero,ymm9[28],zero,zero,zero,zero,ymm9[31],zero,ymm9[29],zero,zero,zero
6997 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
6998 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm2 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm8[30],zero,ymm8[28],zero,zero,zero,zero,ymm8[31],zero,ymm8[29],zero,zero,zero,zero
6999 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
7000 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm1, %ymm2, %ymm1
7001 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u>
7002 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
7003 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,26,27,30,31,30,31,28,29,28,29,28,29,28,29]
7004 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,2]
7005 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <255,0,u,u,255,255,255,255,0,u,u,255,255,255,255,0,u,u,255,255,255,255,0,u,u,255,255,255,255,0,u,u>
7006 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
7007 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29,28,27,u,u,u,31,30,u,u,u,u,u,u,u,u]
7008 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,2]
7009 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <255,255,0,u,255,255,255,255,255,0,u,255,255,255,255,255,0,u,255,255,255,255,255,0,u,255,255,255,255,255,0,u>
7010 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
7011 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
7012 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
7013 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0]
7014 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
7015 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7016 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %xmm1
7017 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %xmm2
7018 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
7019 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, %xmm7
7020 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7021 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, %xmm15
7022 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7023 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm1 = <0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5>
7024 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm0, %xmm0
7025 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
7026 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %xmm9
7027 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %xmm5
7028 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm5[0],xmm9[0],xmm5[1],xmm9[1],xmm5[2],xmm9[2],xmm5[3],xmm9[3],xmm5[4],xmm9[4],xmm5[5],xmm9[5],xmm5[6],xmm9[6],xmm5[7],xmm9[7]
7029 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm5, %xmm11
7030 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm3 = <4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9>
7031 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm2, %xmm2
7032 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
7033 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm5 = <255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0>
7034 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm0, %ymm2, %ymm0
7035 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7036 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm0
7037 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7038 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm4
7039 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3],xmm4[4],xmm0[4],xmm4[5],xmm0[5],xmm4[6],xmm0[6],xmm4[7],xmm0[7]
7040 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7041 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm0, %xmm0
7042 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm1
7043 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7044 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm6
7045 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm6[0],xmm1[0],xmm6[1],xmm1[1],xmm6[2],xmm1[2],xmm6[3],xmm1[3],xmm6[4],xmm1[4],xmm6[5],xmm1[5],xmm6[6],xmm1[6],xmm6[7],xmm1[7]
7046 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm6, %xmm10
7047 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7048 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm1, %xmm1
7049 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
7050 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
7051 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm0, %ymm1, %ymm3
7052 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rax), %xmm0
7053 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7054 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm6 = [2,3,2,3,0,1,0,1,8,9,10,11,2,3,2,3]
7055 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm6, %xmm0, %xmm1
7056 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,1,0]
7057 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r9), %xmm0
7058 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
7059 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r8), %xmm14
7060 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm14[0],xmm0[0],xmm14[1],xmm0[1],xmm14[2],xmm0[2],xmm14[3],xmm0[3],xmm14[4],xmm0[4],xmm14[5],xmm0[5],xmm14[6],xmm0[6],xmm14[7],xmm0[7]
7061 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm0 = <u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u>
7062 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm5, %xmm5
7063 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,0,1]
7064 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = <u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u>
7065 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm5, %ymm1, %ymm1
7066 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %xmm5
7067 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7068 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm8
7069 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7070 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm8[0],xmm5[0],xmm8[1],xmm5[1],xmm8[2],xmm5[2],xmm8[3],xmm5[3],xmm8[4],xmm5[4],xmm8[5],xmm5[5],xmm8[6],xmm5[6],xmm8[7],xmm5[7]
7071 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm12, %xmm12
7072 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rax), %xmm13
7073 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm6, %xmm13, %xmm6
7074 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,1,0]
7075 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,1,0,1]
7076 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm12, %ymm6, %ymm2
7077 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255]
7078 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm0 # 32-byte Folded Reload
7079 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7080 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm3, %ymm2, %ymm0
7081 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7082 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm1 = <u,128,7,128,5,u,u,u,128,8,128,6,u,u,u,128>
7083 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm15, %xmm2
7084 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm3 = <u,7,128,5,128,u,u,u,8,128,6,128,u,u,u,9>
7085 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm7, %xmm6
7086 ; AVX2-FAST-PERLANE-NEXT: vpor %xmm2, %xmm6, %xmm2
7087 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm6 = <u,u,u,128,7,128,5,u,u,u,128,8,128,6,u,u>
7088 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm6, %xmm9, %xmm12
7089 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm0 = <u,u,u,7,128,5,128,u,u,u,8,128,6,128,u,u>
7090 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm11, %xmm7
7091 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm11, %xmm15
7092 ; AVX2-FAST-PERLANE-NEXT: vpor %xmm12, %xmm15, %xmm12
7093 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
7094 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,1,0,1]
7095 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = <u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255>
7096 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm2, %ymm12, %ymm2
7097 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7098 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
7099 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm12, %xmm1
7100 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm4, %xmm2
7101 ; AVX2-FAST-PERLANE-NEXT: vpor %xmm1, %xmm2, %xmm1
7102 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7103 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm6, %xmm5, %xmm2
7104 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm0, %xmm10, %xmm0
7105 ; AVX2-FAST-PERLANE-NEXT: vpor %xmm2, %xmm0, %xmm0
7106 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
7107 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
7108 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm1, %ymm0, %ymm11
7109 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm1 = <128,4,u,u,u,128,7,128,5,u,u,u,128,8,128,6>
7110 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsp), %xmm8 # 16-byte Reload
7111 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm8, %xmm2
7112 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm3 = <4,128,u,u,u,7,128,5,128,u,u,u,8,128,6,128>
7113 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm14, %xmm6
7114 ; AVX2-FAST-PERLANE-NEXT: vpor %xmm2, %xmm6, %xmm2
7115 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
7116 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm6 = [4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
7117 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7118 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm6, %xmm4, %xmm15
7119 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,0,1,0]
7120 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm0 = <255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u>
7121 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm2, %ymm15, %ymm2
7122 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
7123 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm10, %xmm1
7124 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
7125 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm15, %xmm3
7126 ; AVX2-FAST-PERLANE-NEXT: vpor %xmm1, %xmm3, %xmm1
7127 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
7128 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm6, %xmm13, %xmm3
7129 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,1,0]
7130 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm1, %ymm3, %ymm0
7131 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm1 = [0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255]
7132 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm2 # 32-byte Folded Reload
7133 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7134 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm1, %ymm11, %ymm0, %ymm0
7135 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7136 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm9[8],xmm7[8],xmm9[9],xmm7[9],xmm9[10],xmm7[10],xmm9[11],xmm7[11],xmm9[12],xmm7[12],xmm9[13],xmm7[13],xmm9[14],xmm7[14],xmm9[15],xmm7[15]
7137 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7138 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
7139 ; AVX2-FAST-PERLANE-NEXT: # xmm1 = xmm1[8],mem[8],xmm1[9],mem[9],xmm1[10],mem[10],xmm1[11],mem[11],xmm1[12],mem[12],xmm1[13],mem[13],xmm1[14],mem[14],xmm1[15],mem[15]
7140 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm2 = <6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7>
7141 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm0
7142 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
7143 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm3 = <2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u>
7144 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm1, %xmm1
7145 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
7146 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = <0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u>
7147 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm0, %ymm1, %ymm0
7148 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm1 # 16-byte Folded Reload
7149 ; AVX2-FAST-PERLANE-NEXT: # xmm1 = xmm5[8],mem[8],xmm5[9],mem[9],xmm5[10],mem[10],xmm5[11],mem[11],xmm5[12],mem[12],xmm5[13],mem[13],xmm5[14],mem[14],xmm5[15],mem[15]
7150 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm1, %xmm1
7151 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm12, %xmm2 # 16-byte Folded Reload
7152 ; AVX2-FAST-PERLANE-NEXT: # xmm2 = xmm12[8],mem[8],xmm12[9],mem[9],xmm12[10],mem[10],xmm12[11],mem[11],xmm12[12],mem[12],xmm12[13],mem[13],xmm12[14],mem[14],xmm12[15],mem[15]
7153 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm2, %xmm2
7154 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
7155 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
7156 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm1, %ymm2, %ymm1
7157 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm14[8],xmm8[8],xmm14[9],xmm8[9],xmm14[10],xmm8[10],xmm14[11],xmm8[11],xmm14[12],xmm8[12],xmm14[13],xmm8[13],xmm14[14],xmm8[14],xmm14[15],xmm8[15]
7158 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm3 = [8,9,10,11,8,9,10,11,10,11,12,13,10,11,12,13]
7159 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm4, %xmm4
7160 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,1]
7161 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm6 = <u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10>
7162 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm6, %xmm2, %xmm2
7163 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,1,0,1]
7164 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm7 = <u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255>
7165 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm2, %ymm4, %ymm2
7166 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm13, %xmm3
7167 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm15[8],xmm10[8],xmm15[9],xmm10[9],xmm15[10],xmm10[10],xmm15[11],xmm10[11],xmm15[12],xmm10[12],xmm15[13],xmm10[13],xmm15[14],xmm10[14],xmm15[15],xmm10[15]
7168 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm6, %xmm4, %xmm4
7169 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,1,0,1]
7170 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,1]
7171 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm7, %ymm4, %ymm3, %ymm3
7172 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm4 = [255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0]
7173 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm0, %ymm2, %ymm0
7174 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7175 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm4, %ymm1, %ymm3, %ymm0
7176 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7177 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %ymm1
7178 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm1[17,18,19,30],zero,ymm1[28],zero,ymm1[28,29,30,31],zero,ymm1[29],zero,ymm1[31]
7179 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm1, %ymm4
7180 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
7181 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %ymm2
7182 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27],zero,zero,zero,zero,ymm2[30],zero,ymm2[28],zero,zero,zero,zero,ymm2[31],zero,ymm2[29],zero
7183 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm2, %ymm10
7184 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
7185 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm0, %ymm1, %ymm2
7186 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %ymm1
7187 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero,zero
7188 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm1, %ymm12
7189 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm0[2,3,2,3]
7190 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %ymm5
7191 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm5[30],zero,ymm5[28],zero,zero,zero,zero,ymm5[31],zero,ymm5[29],zero,zero,zero,zero
7192 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,3,2,3]
7193 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm3, %ymm6, %ymm3
7194 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm0 = <255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u>
7195 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm2, %ymm3, %ymm6
7196 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %ymm0
7197 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm2 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm0[27],zero,ymm0[27,28,29,30],zero,ymm0[28],zero,ymm0[26,27,30,31],zero,ymm0[29]
7198 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, %ymm1
7199 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
7200 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
7201 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %ymm0
7202 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm7 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29],zero,ymm0[27],zero,zero,zero,zero,ymm0[30],zero,ymm0[28],zero,zero,zero,zero,ymm0[31],zero
7203 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, %ymm3
7204 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7205 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,3,2,3]
7206 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm2, %ymm7, %ymm7
7207 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rax), %ymm0
7208 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm8 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
7209 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, %ymm11
7210 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7211 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
7212 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm9 = <u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0>
7213 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm9, %ymm7, %ymm8, %ymm7
7214 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm8 = [255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0]
7215 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm8, %ymm6, %ymm7, %ymm0
7216 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7217 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm5[23],zero,ymm5[27,20,21,26],zero,ymm5[24],zero,ymm5[26,27,26,27],zero,ymm5[25]
7218 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,3,2,3]
7219 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm7 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25],zero,ymm12[23],zero,zero,zero,zero,ymm12[26],zero,ymm12[24],zero,zero,zero,zero,ymm12[27],zero
7220 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm12, %ymm14
7221 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7222 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,3,2,3]
7223 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm6, %ymm7, %ymm6
7224 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm7 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm10[25],zero,ymm10[23],zero,zero,zero,zero,ymm10[26],zero,ymm10[24],zero,zero,zero,zero
7225 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm10, %ymm13
7226 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7227 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,3,2,3]
7228 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm4, %ymm2
7229 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7230 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm8 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm4[25],zero,ymm4[23],zero,zero,zero,zero,ymm4[26],zero,ymm4[24],zero,zero,zero,zero,ymm4[27]
7231 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
7232 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm7, %ymm8, %ymm7
7233 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm0 = <u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0>
7234 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm6, %ymm7, %ymm6
7235 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm7 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm3[25],zero,ymm3[23],zero,zero,zero,zero,ymm3[26],zero,ymm3[24],zero,zero
7236 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,3,2,3]
7237 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm8 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm1[25],zero,ymm1[23],zero,zero,zero,zero,ymm1[26],zero,ymm1[24],zero,zero,zero
7238 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
7239 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm7, %ymm8, %ymm7
7240 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm8 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
7241 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
7242 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm9 = <0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u>
7243 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm9, %ymm7, %ymm8, %ymm7
7244 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm8 = [0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255]
7245 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm8, %ymm6, %ymm7, %ymm0
7246 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7247 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm8 = [128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20]
7248 ; AVX2-FAST-PERLANE-NEXT: # ymm8 = mem[0,1,0,1]
7249 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7250 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm8, %ymm0, %ymm7
7251 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,3,2,3]
7252 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm9 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128]
7253 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
7254 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm9, %ymm6, %ymm10
7255 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
7256 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm7, %ymm10, %ymm7
7257 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm10 = [128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,128,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,128]
7258 ; AVX2-FAST-PERLANE-NEXT: # ymm10 = mem[0,1,0,1]
7259 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
7260 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm10, %ymm4, %ymm11
7261 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
7262 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm12 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,128,128]
7263 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7264 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm12, %ymm1, %ymm15
7265 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,3,2,3]
7266 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm11, %ymm15, %ymm11
7267 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = <255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u,u,0,0,255,255,u,u>
7268 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm7, %ymm11, %ymm0
7269 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7270 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm8, %ymm13, %ymm8
7271 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
7272 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm9, %ymm2, %ymm9
7273 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
7274 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm8, %ymm9, %ymm8
7275 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm10, %ymm14, %ymm9
7276 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
7277 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm12, %ymm5, %ymm10
7278 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
7279 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm9, %ymm10, %ymm9
7280 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm8, %ymm9, %ymm8
7281 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22]
7282 ; AVX2-FAST-PERLANE-NEXT: # ymm9 = mem[0,1,0,1]
7283 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
7284 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm9, %ymm14, %ymm10
7285 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
7286 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128]
7287 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7288 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm11, %ymm3, %ymm12
7289 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
7290 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm10, %ymm12, %ymm10
7291 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm12 = [20,21,18,19,18,19,20,21,18,19,20,21,28,29,30,31,20,21,18,19,18,19,20,21,18,19,20,21,28,29,30,31]
7292 ; AVX2-FAST-PERLANE-NEXT: # ymm12 = mem[0,1,0,1]
7293 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
7294 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm12, %ymm13, %ymm15
7295 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,3,2]
7296 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm0 = <u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255>
7297 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm10, %ymm15, %ymm10
7298 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
7299 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm9, %ymm15, %ymm9
7300 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
7301 ; AVX2-FAST-PERLANE-NEXT: vmovdqu (%rsp), %ymm2 # 32-byte Reload
7302 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm11, %ymm2, %ymm11
7303 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
7304 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm9, %ymm11, %ymm9
7305 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
7306 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm12, %ymm7, %ymm11
7307 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,3,2]
7308 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm9, %ymm11, %ymm0
7309 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm9 = [255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0]
7310 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm9, {{[-0-9]+}}(%r{{[sb]}}p), %ymm10, %ymm10 # 32-byte Folded Reload
7311 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7312 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm9, %ymm8, %ymm0, %ymm8
7313 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm0 = [128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128,128,18]
7314 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm0, %ymm4, %ymm9
7315 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm10 = [0,128,14,128,128,128,128,1,128,15,128,128,128,128,2,128,16,128,30,128,128,128,128,17,128,31,128,128,128,128,18,128]
7316 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm10, %ymm1, %ymm11
7317 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm9, %ymm11, %ymm9
7318 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
7319 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7320 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm11, %ymm1, %ymm12
7321 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm4 = [128,128,0,128,14,128,128,128,128,1,128,15,128,128,128,128,128,128,16,128,30,128,128,128,128,17,128,31,128,128,128,128]
7322 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm4, %ymm6, %ymm6
7323 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm6, %ymm12, %ymm6
7324 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm12 = <u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255,0,0,u,u,u,255,255>
7325 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm12, %ymm9, %ymm6, %ymm9
7326 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7327 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm0, %ymm1, %ymm0
7328 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm10, %ymm5, %ymm1
7329 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm0, %ymm1, %ymm0
7330 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7331 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm11, %ymm1, %ymm1
7332 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
7333 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm4, %ymm5, %ymm4
7334 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm1, %ymm4, %ymm1
7335 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm12, %ymm0, %ymm1, %ymm0
7336 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm1 = [128,1,2,3,0,128,14,128,0,1,0,1,128,15,128,15,128,17,18,19,16,128,30,128,16,17,16,17,128,31,128,31]
7337 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm1, %ymm3, %ymm4
7338 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm5 = [13,128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128]
7339 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm5, %ymm14, %ymm6
7340 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm4, %ymm6, %ymm4
7341 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = [12,13,2,3,12,13,0,1,14,15,2,3,0,1,14,15,28,29,18,19,28,29,16,17,30,31,18,19,16,17,30,31]
7342 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm6, %ymm13, %ymm10
7343 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = <255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u,u,u,255,255,0,u,u>
7344 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm4, %ymm10, %ymm4
7345 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm1, %ymm2, %ymm1
7346 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm5, %ymm15, %ymm3
7347 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm1, %ymm3, %ymm1
7348 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm6, %ymm7, %ymm2
7349 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm1, %ymm2, %ymm1
7350 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm2 = [0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255,255,255,0,0,0,255,255]
7351 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm9, %ymm4, %ymm3
7352 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
7353 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
7354 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, 96(%rax)
7355 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm3, 320(%rax)
7356 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm8, 128(%rax)
7357 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7358 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 352(%rax)
7359 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7360 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 160(%rax)
7361 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7362 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 192(%rax)
7363 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7364 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 64(%rax)
7365 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7366 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 32(%rax)
7367 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7368 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, (%rax)
7369 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7370 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 224(%rax)
7371 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7372 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 288(%rax)
7373 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7374 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 256(%rax)
7375 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7376 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 416(%rax)
7377 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7378 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 384(%rax)
7379 ; AVX2-FAST-PERLANE-NEXT: addq $648, %rsp # imm = 0x288
7380 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
7381 ; AVX2-FAST-PERLANE-NEXT: retq
7383 ; AVX512F-SLOW-LABEL: store_i8_stride7_vf64:
7384 ; AVX512F-SLOW: # %bb.0:
7385 ; AVX512F-SLOW-NEXT: subq $1416, %rsp # imm = 0x588
7386 ; AVX512F-SLOW-NEXT: vmovdqa (%rsi), %ymm1
7387 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,ymm1[14],zero,zero,zero,zero,zero,zero,ymm1[15],zero,zero,zero,zero,zero,zero,ymm1[16],zero,zero,zero,zero,zero,zero,ymm1[17],zero,zero,zero,zero,zero,zero,ymm1[18]
7388 ; AVX512F-SLOW-NEXT: vmovdqa %ymm1, %ymm12
7389 ; AVX512F-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7390 ; AVX512F-SLOW-NEXT: vmovdqa (%rdi), %ymm2
7391 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm2[0,1,14],zero,ymm2[12,13,0,1,14,15],zero,ymm2[3,12,13,2,3,16],zero,ymm2[30,31,28,29,16,17],zero,ymm2[31,18,19,28,29,18],zero
7392 ; AVX512F-SLOW-NEXT: vmovdqa %ymm2, %ymm9
7393 ; AVX512F-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7394 ; AVX512F-SLOW-NEXT: vpor %ymm0, %ymm1, %ymm0
7395 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7396 ; AVX512F-SLOW-NEXT: vmovdqa (%rcx), %ymm7
7397 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} ymm1 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
7398 ; AVX512F-SLOW-NEXT: vpshufb %ymm1, %ymm7, %ymm0
7399 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm1, %ymm27
7400 ; AVX512F-SLOW-NEXT: vmovdqa (%rdx), %ymm8
7401 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} ymm2 = [0,1,0,1,14,128,14,15,0,1,14,15,128,13,14,15,16,17,16,128,30,31,30,31,16,17,128,31,28,29,30,31]
7402 ; AVX512F-SLOW-NEXT: vpshufb %ymm2, %ymm8, %ymm1
7403 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm2, %ymm28
7404 ; AVX512F-SLOW-NEXT: vpor %ymm0, %ymm1, %ymm0
7405 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7406 ; AVX512F-SLOW-NEXT: vmovdqa (%r8), %ymm0
7407 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7408 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,zero,zero,ymm0[14],zero,zero,zero,zero,zero,zero,ymm0[15],zero,zero,zero,zero,zero,zero,ymm0[16],zero,zero,zero,zero,zero,zero,ymm0[17],zero,zero,zero,zero
7409 ; AVX512F-SLOW-NEXT: vmovdqa (%r9), %ymm2
7410 ; AVX512F-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [13,0,0,0,128,16,128,14,0,0,0,128,17,128,15,0,13,0,0,0,128,16,128,14,0,0,0,128,17,128,15,0]
7411 ; AVX512F-SLOW-NEXT: # ymm3 = mem[0,1,0,1]
7412 ; AVX512F-SLOW-NEXT: vpshufb %ymm3, %ymm2, %ymm1
7413 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm3, %ymm17
7414 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm2, %ymm16
7415 ; AVX512F-SLOW-NEXT: vporq %ymm0, %ymm1, %ymm23
7416 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r9), %ymm10
7417 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r8), %ymm11
7418 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm11[27],zero,zero,zero,zero,ymm11[30],zero,ymm11[28],zero,zero,zero,zero,ymm11[31],zero,ymm11[29]
7419 ; AVX512F-SLOW-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7420 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm10[25],zero,ymm10[23],zero,zero,zero,zero,ymm10[26],zero,ymm10[24],zero,zero
7421 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
7422 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7423 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rcx), %ymm5
7424 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rdx), %ymm6
7425 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm6[30],zero,ymm6[28],zero,zero,zero,zero,ymm6[31],zero,ymm6[29],zero,zero
7426 ; AVX512F-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7427 ; AVX512F-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [128,128,25,128,23,128,128,128,128,26,128,24,128,128,128,128,128,128,25,128,23,128,128,128,128,26,128,24,128,128,128,128]
7428 ; AVX512F-SLOW-NEXT: # ymm2 = mem[0,1,0,1]
7429 ; AVX512F-SLOW-NEXT: vpshufb %ymm2, %ymm5, %ymm1
7430 ; AVX512F-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7431 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
7432 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7433 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rsi), %ymm1
7434 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero,zero
7435 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm1, %ymm21
7436 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rdi), %ymm4
7437 ; AVX512F-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [128,23,128,128,128,128,26,128,24,128,128,128,128,27,128,25,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128,25]
7438 ; AVX512F-SLOW-NEXT: # ymm1 = mem[0,1,0,1]
7439 ; AVX512F-SLOW-NEXT: vpshufb %ymm1, %ymm4, %ymm3
7440 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm4, %ymm20
7441 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm3, %zmm0
7442 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7443 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
7444 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rax), %ymm4
7445 ; AVX512F-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7446 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
7447 ; AVX512F-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
7448 ; AVX512F-SLOW-NEXT: # ymm0 = mem[0,1,0,1]
7449 ; AVX512F-SLOW-NEXT: vpshufb %ymm0, %ymm4, %ymm4
7450 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm4, %zmm3
7451 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7452 ; AVX512F-SLOW-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7453 ; AVX512F-SLOW-NEXT: vpshufb %ymm2, %ymm7, %ymm2
7454 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm7[18],zero,zero,zero,zero,ymm7[21],zero,ymm7[19],zero,zero,zero,zero,ymm7[22],zero,ymm7[20]
7455 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm3, %zmm2
7456 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7457 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25],zero,ymm8[23],zero,ymm8[21,22,23,26],zero,ymm8[24],zero,ymm8[28,29,26,27]
7458 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,ymm8[18,19,20,21],zero,ymm8[19],zero,ymm8[25,26,27,22],zero,ymm8[20],zero
7459 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm8, %ymm18
7460 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm3, %zmm2
7461 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7462 ; AVX512F-SLOW-NEXT: vpshufb %ymm1, %ymm9, %ymm1
7463 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm12[21],zero,ymm12[19],zero,zero,zero,zero,ymm12[22],zero,ymm12[20],zero,zero
7464 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1
7465 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7466 ; AVX512F-SLOW-NEXT: vmovdqa (%rax), %ymm1
7467 ; AVX512F-SLOW-NEXT: vpshufb %ymm0, %ymm1, %ymm0
7468 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm25 = <u,5,4,u,5,u,4,u,20,21,u,23,u,21,u,23>
7469 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm1[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
7470 ; AVX512F-SLOW-NEXT: vpermi2d %zmm0, %zmm2, %zmm25
7471 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rdx), %xmm3
7472 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rcx), %xmm15
7473 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm2 = <u,u,u,128,7,128,5,u,u,u,128,8,128,6,u,u>
7474 ; AVX512F-SLOW-NEXT: vpshufb %xmm2, %xmm15, %xmm0
7475 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm2, %xmm19
7476 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm4 = <u,u,u,7,128,5,128,u,u,u,8,128,6,128,u,u>
7477 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm3, %xmm2
7478 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm4, %xmm29
7479 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm3, %xmm30
7480 ; AVX512F-SLOW-NEXT: vpor %xmm0, %xmm2, %xmm0
7481 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7482 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rdi), %xmm8
7483 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rsi), %xmm0
7484 ; AVX512F-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7485 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm7 = <u,128,7,128,5,u,u,u,128,8,128,6,u,u,u,128>
7486 ; AVX512F-SLOW-NEXT: vpshufb %xmm7, %xmm0, %xmm0
7487 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm4 = <u,7,128,5,128,u,u,u,8,128,6,128,u,u,u,9>
7488 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm8, %xmm2
7489 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm8, %xmm22
7490 ; AVX512F-SLOW-NEXT: vpor %xmm0, %xmm2, %xmm0
7491 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7492 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm8 = <0,u,0,u,2,3,u,1,u,18,u,19,18,u,19,u>
7493 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rax), %xmm2
7494 ; AVX512F-SLOW-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7495 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm2[0,1,2,3,4,5,5,6]
7496 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
7497 ; AVX512F-SLOW-NEXT: vpermi2d %zmm0, %zmm2, %zmm8
7498 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7499 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r9), %xmm0
7500 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r8), %xmm13
7501 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm12 = <128,4,u,u,u,128,7,128,5,u,u,u,128,8,128,6>
7502 ; AVX512F-SLOW-NEXT: vpshufb %xmm12, %xmm0, %xmm8
7503 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm0, %xmm26
7504 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm14 = <4,128,u,u,u,7,128,5,128,u,u,u,8,128,6,128>
7505 ; AVX512F-SLOW-NEXT: vpshufb %xmm14, %xmm13, %xmm9
7506 ; AVX512F-SLOW-NEXT: vporq %xmm8, %xmm9, %xmm24
7507 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm27, %ymm0
7508 ; AVX512F-SLOW-NEXT: vpshufb %ymm0, %ymm5, %ymm8
7509 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm28, %ymm0
7510 ; AVX512F-SLOW-NEXT: vpshufb %ymm0, %ymm6, %ymm9
7511 ; AVX512F-SLOW-NEXT: vpor %ymm8, %ymm9, %ymm0
7512 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7513 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm21, %ymm3
7514 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = zero,zero,zero,ymm3[14],zero,zero,zero,zero,zero,zero,ymm3[15],zero,zero,zero,zero,zero,zero,ymm3[16],zero,zero,zero,zero,zero,zero,ymm3[17],zero,zero,zero,zero,zero,zero,ymm3[18]
7515 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm20, %ymm0
7516 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm9 = ymm0[0,1,14],zero,ymm0[12,13,0,1,14,15],zero,ymm0[3,12,13,2,3,16],zero,ymm0[30,31,28,29,16,17],zero,ymm0[31,18,19,28,29,18],zero
7517 ; AVX512F-SLOW-NEXT: vpor %ymm8, %ymm9, %ymm5
7518 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7519 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm8 = zero,zero,zero,zero,zero,zero,ymm11[14],zero,zero,zero,zero,zero,zero,ymm11[15],zero,zero,zero,zero,zero,zero,ymm11[16],zero,zero,zero,zero,zero,zero,ymm11[17],zero,zero,zero,zero
7520 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm17, %ymm6
7521 ; AVX512F-SLOW-NEXT: vpshufb %ymm6, %ymm10, %ymm9
7522 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm10, %ymm28
7523 ; AVX512F-SLOW-NEXT: vpor %ymm8, %ymm9, %ymm5
7524 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7525 ; AVX512F-SLOW-NEXT: vmovdqa (%rsi), %xmm6
7526 ; AVX512F-SLOW-NEXT: vpshufb %xmm7, %xmm6, %xmm5
7527 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm6, %xmm20
7528 ; AVX512F-SLOW-NEXT: vmovdqa (%rdi), %xmm9
7529 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm9, %xmm4
7530 ; AVX512F-SLOW-NEXT: vporq %xmm5, %xmm4, %xmm21
7531 ; AVX512F-SLOW-NEXT: vmovdqa (%rcx), %xmm2
7532 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm19, %xmm4
7533 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm2, %xmm4
7534 ; AVX512F-SLOW-NEXT: vmovdqa (%rdx), %xmm10
7535 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm29, %xmm5
7536 ; AVX512F-SLOW-NEXT: vpshufb %xmm5, %xmm10, %xmm7
7537 ; AVX512F-SLOW-NEXT: vporq %xmm4, %xmm7, %xmm19
7538 ; AVX512F-SLOW-NEXT: vmovdqa (%r9), %xmm5
7539 ; AVX512F-SLOW-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7540 ; AVX512F-SLOW-NEXT: vpshufb %xmm12, %xmm5, %xmm4
7541 ; AVX512F-SLOW-NEXT: vmovdqa (%r8), %xmm7
7542 ; AVX512F-SLOW-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7543 ; AVX512F-SLOW-NEXT: vpshufb %xmm14, %xmm7, %xmm6
7544 ; AVX512F-SLOW-NEXT: vpor %xmm4, %xmm6, %xmm4
7545 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7546 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm23, %zmm0, %zmm4
7547 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm7[8],xmm5[8],xmm7[9],xmm5[9],xmm7[10],xmm5[10],xmm7[11],xmm5[11],xmm7[12],xmm5[12],xmm7[13],xmm5[13],xmm7[14],xmm5[14],xmm7[15],xmm5[15]
7548 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm5 = <u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10>
7549 ; AVX512F-SLOW-NEXT: vpshufb %xmm5, %xmm6, %xmm6
7550 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm5, %xmm27
7551 ; AVX512F-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm4 = zmm6[0,1,0,1],zmm4[4,5,6,7]
7552 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7553 ; AVX512F-SLOW-NEXT: vmovdqa (%rax), %xmm12
7554 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm12[0,1,2,3,4,5,5,6]
7555 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
7556 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,1,0,1]
7557 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} ymm11 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
7558 ; AVX512F-SLOW-NEXT: vpandn %ymm4, %ymm11, %ymm4
7559 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm11 = zero,ymm1[13],zero,zero,zero,zero,zero,zero,ymm1[14],zero,zero,zero,zero,zero,zero,ymm1[15],zero,zero,zero,zero,zero,zero,ymm1[16],zero,zero,zero,zero,zero,zero,ymm1[17],zero,zero
7560 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm11, %zmm4, %zmm23
7561 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm18, %ymm4
7562 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm4[30],zero,ymm4[28],zero,zero,zero,zero,ymm4[31],zero,ymm4[29],zero,zero
7563 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm4, %ymm18
7564 ; AVX512F-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm5 = [13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14]
7565 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm16, %ymm14
7566 ; AVX512F-SLOW-NEXT: vpshufb %ymm5, %ymm14, %ymm4
7567 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm5, %ymm29
7568 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,3,2,3]
7569 ; AVX512F-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
7570 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm11 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm8[27],zero,zero,zero,zero,ymm8[30],zero,ymm8[28],zero,zero,zero,zero,ymm8[31],zero,ymm8[29]
7571 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
7572 ; AVX512F-SLOW-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm4, %ymm11
7573 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm26, %xmm7
7574 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm13[0],xmm7[0],xmm13[1],xmm7[1],xmm13[2],xmm7[2],xmm13[3],xmm7[3],xmm13[4],xmm7[4],xmm13[5],xmm7[5],xmm13[6],xmm7[6],xmm13[7],xmm7[7]
7575 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
7576 ; AVX512F-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm4 = zmm11[0,1,2,3],zmm4[0,1,0,1]
7577 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7578 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
7579 ; AVX512F-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7580 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm0[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
7581 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm0, %ymm17
7582 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[2,2,3,3,6,6,7,7]
7583 ; AVX512F-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm4 = [9,8,7,0,0,0,11,10,9,8,7,0,0,0,11,10,9,8,7,0,0,0,11,10,9,8,7,0,0,0,11,10]
7584 ; AVX512F-SLOW-NEXT: vmovdqa %ymm3, %ymm6
7585 ; AVX512F-SLOW-NEXT: vpshufb %ymm4, %ymm3, %ymm11
7586 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm11, %zmm26
7587 ; AVX512F-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7588 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero,zero
7589 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7590 ; AVX512F-SLOW-NEXT: vpshufb %ymm4, %ymm1, %ymm1
7591 ; AVX512F-SLOW-NEXT: vpshuflw $233, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
7592 ; AVX512F-SLOW-NEXT: # ymm4 = mem[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
7593 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[0,0,1,1,4,4,5,5]
7594 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm4, %zmm0
7595 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7596 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm30, %xmm0
7597 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm0[0],xmm15[0],xmm0[1],xmm15[1],xmm0[2],xmm15[2],xmm0[3],xmm15[3],xmm0[4],xmm15[4],xmm0[5],xmm15[5],xmm0[6],xmm15[6],xmm0[7],xmm15[7]
7598 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm1, %xmm16
7599 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm15[8],xmm0[8],xmm15[9],xmm0[9],xmm15[10],xmm0[10],xmm15[11],xmm0[11],xmm15[12],xmm0[12],xmm15[13],xmm0[13],xmm15[14],xmm0[14],xmm15[15],xmm0[15]
7600 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm2[8],xmm10[8],xmm2[9],xmm10[9],xmm2[10],xmm10[10],xmm2[11],xmm10[11],xmm2[12],xmm10[12],xmm2[13],xmm10[13],xmm2[14],xmm10[14],xmm2[15],xmm10[15]
7601 ; AVX512F-SLOW-NEXT: vmovdqa %xmm2, %xmm11
7602 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm15 = <6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7>
7603 ; AVX512F-SLOW-NEXT: vpshufb %xmm15, %xmm4, %xmm0
7604 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7605 ; AVX512F-SLOW-NEXT: vpshufb %xmm15, %xmm1, %xmm1
7606 ; AVX512F-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7607 ; AVX512F-SLOW-NEXT: vinserti32x4 $2, %xmm1, %zmm0, %zmm30
7608 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm22, %xmm0
7609 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7610 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
7611 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
7612 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm20, %xmm5
7613 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm5[8],xmm9[8],xmm5[9],xmm9[9],xmm5[10],xmm9[10],xmm5[11],xmm9[11],xmm5[12],xmm9[12],xmm5[13],xmm9[13],xmm5[14],xmm9[14],xmm5[15],xmm9[15]
7614 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm4 = <2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u>
7615 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm3, %xmm0
7616 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7617 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm1, %xmm1
7618 ; AVX512F-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
7619 ; AVX512F-SLOW-NEXT: vinserti32x4 $2, %xmm1, %zmm0, %zmm22
7620 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm13[8],xmm7[8],xmm13[9],xmm7[9],xmm13[10],xmm7[10],xmm13[11],xmm7[11],xmm13[12],xmm7[12],xmm13[13],xmm7[13],xmm13[14],xmm7[14],xmm13[15],xmm7[15]
7621 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm27, %xmm1
7622 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm0, %xmm0
7623 ; AVX512F-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm24 = zmm24[0,1,0,1],zmm0[0,1,0,1]
7624 ; AVX512F-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm2 = [11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12]
7625 ; AVX512F-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7626 ; AVX512F-SLOW-NEXT: vpshufb %ymm2, %ymm0, %ymm4
7627 ; AVX512F-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7628 ; AVX512F-SLOW-NEXT: vpshufb %ymm2, %ymm0, %ymm13
7629 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm0[18],zero,zero,zero,zero,ymm0[21],zero,ymm0[19],zero,zero,zero,zero,ymm0[22],zero,ymm0[20]
7630 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm0, %ymm31
7631 ; AVX512F-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7632 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm7 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25],zero,ymm0[23],zero,ymm0[21,22,23,26],zero,ymm0[24],zero,ymm0[28,29,26,27]
7633 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,ymm0[18,19,20,21],zero,ymm0[19],zero,ymm0[25,26,27,22],zero,ymm0[20],zero
7634 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm0, %ymm20
7635 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm6[21],zero,ymm6[19],zero,zero,zero,zero,ymm6[22],zero,ymm6[20],zero,zero
7636 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm28, %ymm1
7637 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm29, %ymm0
7638 ; AVX512F-SLOW-NEXT: vpshufb %ymm0, %ymm1, %ymm3
7639 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm14[25],zero,ymm14[23],zero,zero,zero,zero,ymm14[26],zero,ymm14[24],zero,zero
7640 ; AVX512F-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7641 ; AVX512F-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22]
7642 ; AVX512F-SLOW-NEXT: # ymm2 = mem[0,1,0,1]
7643 ; AVX512F-SLOW-NEXT: vpshufb %ymm2, %ymm14, %ymm0
7644 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7645 ; AVX512F-SLOW-NEXT: vpshufb %ymm2, %ymm1, %ymm0
7646 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7647 ; AVX512F-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [24,25,24,25,128,23,128,23,24,25,26,128,24,128,30,31,24,25,24,25,128,23,128,23,24,25,26,128,24,128,30,31]
7648 ; AVX512F-SLOW-NEXT: # ymm2 = mem[0,1,0,1]
7649 ; AVX512F-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7650 ; AVX512F-SLOW-NEXT: vpshufb %ymm2, %ymm0, %ymm1
7651 ; AVX512F-SLOW-NEXT: vpshufb %ymm2, %ymm8, %ymm2
7652 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm2, %ymm29
7653 ; AVX512F-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [20,128,18,128,20,21,20,21,128,19,128,19,20,21,22,128,20,128,18,128,20,21,20,21,128,19,128,19,20,21,22,128]
7654 ; AVX512F-SLOW-NEXT: # ymm2 = mem[0,1,0,1]
7655 ; AVX512F-SLOW-NEXT: vpshufb %ymm2, %ymm8, %ymm8
7656 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm8, %ymm27
7657 ; AVX512F-SLOW-NEXT: vpshufb %ymm2, %ymm0, %ymm0
7658 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7659 ; AVX512F-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7660 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = zero,ymm0[13],zero,zero,zero,zero,zero,zero,ymm0[14],zero,zero,zero,zero,zero,zero,ymm0[15],zero,zero,zero,zero,zero,zero,ymm0[16],zero,zero,zero,zero,zero,zero,ymm0[17],zero,zero
7661 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} ymm14 = ymm0[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
7662 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm14[0,1,1,3,4,5,5,7]
7663 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,3,2]
7664 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm28 = [255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
7665 ; AVX512F-SLOW-NEXT: vpandnq %ymm14, %ymm28, %ymm14
7666 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm14, %zmm2, %zmm2
7667 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm9[0],xmm5[0],xmm9[1],xmm5[1],xmm9[2],xmm5[2],xmm9[3],xmm5[3],xmm9[4],xmm5[4],xmm9[5],xmm5[5],xmm9[6],xmm5[6],xmm9[7],xmm5[7]
7668 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm14 = <0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5>
7669 ; AVX512F-SLOW-NEXT: vpshufb %xmm14, %xmm15, %xmm15
7670 ; AVX512F-SLOW-NEXT: vpshufb %xmm14, %xmm9, %xmm9
7671 ; AVX512F-SLOW-NEXT: vinserti32x4 $2, %xmm21, %zmm9, %zmm14
7672 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3],xmm10[4],xmm11[4],xmm10[5],xmm11[5],xmm10[6],xmm11[6],xmm10[7],xmm11[7]
7673 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm10 = <4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9>
7674 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm16, %xmm0
7675 ; AVX512F-SLOW-NEXT: vpshufb %xmm10, %xmm0, %xmm8
7676 ; AVX512F-SLOW-NEXT: vpshufb %xmm10, %xmm9, %xmm9
7677 ; AVX512F-SLOW-NEXT: vinserti32x4 $2, %xmm19, %zmm9, %zmm9
7678 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm4[2,3,2,3]
7679 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm18[2,3,2,3]
7680 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm19 = ymm3[2,3,2,3]
7681 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm1[2,3,2,3]
7682 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
7683 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm7[2,3,2,3]
7684 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm6[2,3,2,3]
7685 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm17, %ymm1
7686 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
7687 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm21 = ymm1[0,0,1,1,4,4,5,5]
7688 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7689 ; AVX512F-SLOW-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
7690 ; AVX512F-SLOW-NEXT: # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3],xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
7691 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
7692 ; AVX512F-SLOW-NEXT: vshufi64x2 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 64-byte Folded Reload
7693 ; AVX512F-SLOW-NEXT: # zmm1 = zmm1[0,1,0,1],mem[0,1,0,1]
7694 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} xmm3 = xmm12[4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
7695 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm12[1,1,0,0,4,5,6,7]
7696 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[0,1,2,0]
7697 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm6, %zmm3
7698 ; AVX512F-SLOW-NEXT: vpandq {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm19, %ymm6
7699 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm6, %zmm0, %zmm0
7700 ; AVX512F-SLOW-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Folded Reload
7701 ; AVX512F-SLOW-NEXT: # zmm6 = mem[2,3,2,3,6,7,6,7]
7702 ; AVX512F-SLOW-NEXT: vporq %zmm6, %zmm0, %zmm0
7703 ; AVX512F-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [18374966859431673855,18446463693966278655,18374966859431673855,18446463693966278655]
7704 ; AVX512F-SLOW-NEXT: # ymm6 = mem[0,1,0,1]
7705 ; AVX512F-SLOW-NEXT: vpand %ymm6, %ymm13, %ymm7
7706 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm5, %zmm5
7707 ; AVX512F-SLOW-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Folded Reload
7708 ; AVX512F-SLOW-NEXT: # zmm7 = mem[2,3,2,3,6,7,6,7]
7709 ; AVX512F-SLOW-NEXT: vporq %zmm7, %zmm5, %zmm5
7710 ; AVX512F-SLOW-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Folded Reload
7711 ; AVX512F-SLOW-NEXT: # zmm7 = mem[2,3,2,3,6,7,6,7]
7712 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} zmm12 = zmm26[2,3,2,3,6,7,6,7]
7713 ; AVX512F-SLOW-NEXT: vpternlogq $236, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm12
7714 ; AVX512F-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm12
7715 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm5 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255]
7716 ; AVX512F-SLOW-NEXT: vpternlogq $226, %zmm0, %zmm5, %zmm12
7717 ; AVX512F-SLOW-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Folded Reload
7718 ; AVX512F-SLOW-NEXT: # zmm0 = mem[2,3,2,3,6,7,6,7]
7719 ; AVX512F-SLOW-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Folded Reload
7720 ; AVX512F-SLOW-NEXT: # zmm7 = mem[2,3,2,3,6,7,6,7]
7721 ; AVX512F-SLOW-NEXT: vporq %zmm0, %zmm7, %zmm0
7722 ; AVX512F-SLOW-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Folded Reload
7723 ; AVX512F-SLOW-NEXT: # zmm7 = mem[2,3,2,3,6,7,6,7]
7724 ; AVX512F-SLOW-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Folded Reload
7725 ; AVX512F-SLOW-NEXT: # zmm13 = mem[2,3,2,3,6,7,6,7]
7726 ; AVX512F-SLOW-NEXT: vpternlogq $236, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm13
7727 ; AVX512F-SLOW-NEXT: vpternlogq $184, %zmm0, %zmm5, %zmm13
7728 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} zmm0 = zmm30[0,1,0,1,4,5,4,5]
7729 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} zmm7 = zmm22[0,1,0,1,4,5,4,5]
7730 ; AVX512F-SLOW-NEXT: vpternlogq $226, %zmm0, %zmm5, %zmm7
7731 ; AVX512F-SLOW-NEXT: vpternlogq $248, %ymm6, %ymm10, %ymm11
7732 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm21[2,3,2,3]
7733 ; AVX512F-SLOW-NEXT: vpternlogq $236, %ymm6, %ymm4, %ymm0
7734 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm8[0,1,0,1]
7735 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm11, %zmm4
7736 ; AVX512F-SLOW-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
7737 ; AVX512F-SLOW-NEXT: # ymm5 = mem[2,3,2,3]
7738 ; AVX512F-SLOW-NEXT: vpshufhw $190, {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Folded Reload
7739 ; AVX512F-SLOW-NEXT: # ymm6 = mem[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
7740 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[2,2,3,3,6,6,7,7]
7741 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,3,2,3]
7742 ; AVX512F-SLOW-NEXT: vpternlogq $236, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm5, %ymm6
7743 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm15[0,1,0,1]
7744 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm6, %zmm5
7745 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm6 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255]
7746 ; AVX512F-SLOW-NEXT: vpternlogq $184, %zmm4, %zmm6, %zmm5
7747 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm31[2,3,2,3]
7748 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm20[2,3,2,3]
7749 ; AVX512F-SLOW-NEXT: vpor %ymm4, %ymm8, %ymm4
7750 ; AVX512F-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
7751 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm8, %zmm4
7752 ; AVX512F-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
7753 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm0
7754 ; AVX512F-SLOW-NEXT: vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Folded Reload
7755 ; AVX512F-SLOW-NEXT: # ymm8 = mem[0,1,0,1]
7756 ; AVX512F-SLOW-NEXT: vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Folded Reload
7757 ; AVX512F-SLOW-NEXT: # ymm10 = mem[0,1,0,1]
7758 ; AVX512F-SLOW-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
7759 ; AVX512F-SLOW-NEXT: # ymm11 = mem[2,3,2,3]
7760 ; AVX512F-SLOW-NEXT: vpshuflw $5, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
7761 ; AVX512F-SLOW-NEXT: # xmm15 = mem[1,1,0,0,4,5,6,7]
7762 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[0,1,2,0]
7763 ; AVX512F-SLOW-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %ymm17 # 32-byte Folded Reload
7764 ; AVX512F-SLOW-NEXT: # ymm17 = mem[2,3,2,3]
7765 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm18 = ymm29[2,3,2,3]
7766 ; AVX512F-SLOW-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %ymm19 # 32-byte Folded Reload
7767 ; AVX512F-SLOW-NEXT: # ymm19 = mem[2,3,2,3]
7768 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm20 = ymm27[2,3,2,3]
7769 ; AVX512F-SLOW-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %ymm21 # 32-byte Folded Reload
7770 ; AVX512F-SLOW-NEXT: # ymm21 = mem[2,3,2,3]
7771 ; AVX512F-SLOW-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %ymm22 # 32-byte Folded Reload
7772 ; AVX512F-SLOW-NEXT: # ymm22 = mem[2,3,2,3]
7773 ; AVX512F-SLOW-NEXT: vpternlogq $226, %zmm4, %zmm6, %zmm0
7774 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm4 # 32-byte Folded Reload
7775 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm6 # 32-byte Folded Reload
7776 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm6
7777 ; AVX512F-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
7778 ; AVX512F-SLOW-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm23
7779 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm23
7780 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm15[0,0,1,0]
7781 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm11, %zmm4
7782 ; AVX512F-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7783 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm4
7784 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm4
7785 ; AVX512F-SLOW-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Folded Reload
7786 ; AVX512F-SLOW-NEXT: # zmm5 = mem[2,3,2,3,6,7,6,7]
7787 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm12, %zmm5
7788 ; AVX512F-SLOW-NEXT: vporq %ymm17, %ymm18, %ymm6
7789 ; AVX512F-SLOW-NEXT: vporq %ymm19, %ymm20, %ymm8
7790 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm6, %zmm0, %zmm6
7791 ; AVX512F-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm6 = zmm8[0,1,2,3],zmm6[4,5,6,7]
7792 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm25
7793 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm13, %zmm25
7794 ; AVX512F-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
7795 ; AVX512F-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm24
7796 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm24
7797 ; AVX512F-SLOW-NEXT: vporq %ymm21, %ymm22, %ymm6
7798 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm6, %zmm0, %zmm6
7799 ; AVX512F-SLOW-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
7800 ; AVX512F-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm6 = zmm7[0,1,2,3],zmm6[4,5,6,7]
7801 ; AVX512F-SLOW-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm2
7802 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm2
7803 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} zmm0 = zmm14[0,1,0,1,4,5,4,5]
7804 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} zmm6 = zmm9[0,1,0,1,4,5,4,5]
7805 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm6
7806 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} zmm0 = zmm3[0,0,1,0,4,4,5,4]
7807 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm0
7808 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm0
7809 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
7810 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm0, (%rax)
7811 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm2, 320(%rax)
7812 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm24, 256(%rax)
7813 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm25, 128(%rax)
7814 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm5, 384(%rax)
7815 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm4, 192(%rax)
7816 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm23, 64(%rax)
7817 ; AVX512F-SLOW-NEXT: addq $1416, %rsp # imm = 0x588
7818 ; AVX512F-SLOW-NEXT: vzeroupper
7819 ; AVX512F-SLOW-NEXT: retq
7821 ; AVX512F-ONLY-FAST-LABEL: store_i8_stride7_vf64:
7822 ; AVX512F-ONLY-FAST: # %bb.0:
7823 ; AVX512F-ONLY-FAST-NEXT: subq $1496, %rsp # imm = 0x5D8
7824 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
7825 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rcx), %ymm7
7826 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdx), %ymm15
7827 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm15[30],zero,ymm15[28],zero,zero,zero,zero,ymm15[31],zero,ymm15[29],zero,zero
7828 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm15, %ymm17
7829 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm7[25],zero,ymm7[23],zero,zero,zero,zero,ymm7[26],zero,ymm7[24],zero,zero,zero,zero
7830 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
7831 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7832 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rsi), %ymm15
7833 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdi), %ymm3
7834 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm15[30],zero,ymm15[28],zero,zero,zero,zero,ymm15[31],zero,ymm15[29],zero,zero,zero
7835 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7836 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm3[23],zero,zero,zero,zero,ymm3[26],zero,ymm3[24],zero,zero,zero,zero,ymm3[27],zero,ymm3[25]
7837 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7838 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
7839 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7840 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r9), %ymm4
7841 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r8), %ymm1
7842 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm1[27],zero,zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29]
7843 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm1, %ymm19
7844 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm4[25],zero,ymm4[23],zero,zero,zero,zero,ymm4[26],zero,ymm4[24],zero,zero
7845 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7846 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
7847 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7848 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rax), %ymm1
7849 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7850 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
7851 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
7852 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
7853 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7854 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rsi), %ymm1
7855 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,ymm1[14],zero,zero,zero,zero,zero,zero,ymm1[15],zero,zero,zero,zero,zero,zero,ymm1[16],zero,zero,zero,zero,zero,zero,ymm1[17],zero,zero,zero,zero,zero,zero,ymm1[18]
7856 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm1, %ymm26
7857 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdi), %ymm1
7858 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7859 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[0,1,14],zero,ymm1[12,13,0,1,14,15],zero,ymm1[3,12,13,2,3,16],zero,ymm1[30,31,28,29,16,17],zero,ymm1[31,18,19,28,29,18],zero
7860 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm0, %ymm1, %ymm0
7861 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7862 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rcx), %ymm1
7863 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
7864 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm6, %ymm1, %ymm0
7865 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm1, %ymm30
7866 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %ymm10
7867 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [0,1,0,1,14,128,14,15,0,1,14,15,128,13,14,15,16,17,16,128,30,31,30,31,16,17,128,31,28,29,30,31]
7868 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm2, %ymm10, %ymm1
7869 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm2, %ymm25
7870 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm0, %ymm1, %ymm0
7871 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7872 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r8), %ymm1
7873 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,zero,zero,ymm1[14],zero,zero,zero,zero,zero,zero,ymm1[15],zero,zero,zero,zero,zero,zero,ymm1[16],zero,zero,zero,zero,zero,zero,ymm1[17],zero,zero,zero,zero
7874 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm1, %ymm22
7875 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7876 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r9), %ymm2
7877 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [13,0,0,0,128,16,128,14,0,0,0,128,17,128,15,0,13,0,0,0,128,16,128,14,0,0,0,128,17,128,15,0]
7878 ; AVX512F-ONLY-FAST-NEXT: # ymm5 = mem[0,1,0,1]
7879 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm5, %ymm2, %ymm1
7880 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm5, %ymm29
7881 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm2, %ymm31
7882 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7883 ; AVX512F-ONLY-FAST-NEXT: vporq %ymm0, %ymm1, %ymm23
7884 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdx), %xmm5
7885 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rcx), %xmm1
7886 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = <u,u,u,128,7,128,5,u,u,u,128,8,128,6,u,u>
7887 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm2, %xmm1, %xmm0
7888 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm2, %xmm18
7889 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm1, %xmm20
7890 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = <u,u,u,7,128,5,128,u,u,u,8,128,6,128,u,u>
7891 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm2, %xmm5, %xmm1
7892 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm5, %xmm21
7893 ; AVX512F-ONLY-FAST-NEXT: vpor %xmm0, %xmm1, %xmm0
7894 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7895 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rdi), %xmm11
7896 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rsi), %xmm1
7897 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm14 = <u,128,7,128,5,u,u,u,128,8,128,6,u,u,u,128>
7898 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm14, %xmm1, %xmm0
7899 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm1, %xmm28
7900 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm1 = <u,7,128,5,128,u,u,u,8,128,6,128,u,u,u,9>
7901 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm1, %xmm11, %xmm5
7902 ; AVX512F-ONLY-FAST-NEXT: vpor %xmm0, %xmm5, %xmm0
7903 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7904 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r9), %xmm0
7905 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%r8), %xmm9
7906 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm8 = <128,4,u,u,u,128,7,128,5,u,u,u,128,8,128,6>
7907 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm8, %xmm0, %xmm12
7908 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm0, %xmm16
7909 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm0 = <4,128,u,u,u,7,128,5,128,u,u,u,8,128,6,128>
7910 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm0, %xmm9, %xmm13
7911 ; AVX512F-ONLY-FAST-NEXT: vpor %xmm12, %xmm13, %xmm12
7912 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7913 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm6, %ymm7, %ymm6
7914 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm7, %ymm24
7915 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm17, %ymm13
7916 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm25, %ymm7
7917 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm7, %ymm13, %ymm7
7918 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm6, %ymm7, %ymm6
7919 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7920 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,zero,ymm15[14],zero,zero,zero,zero,zero,zero,ymm15[15],zero,zero,zero,zero,zero,zero,ymm15[16],zero,zero,zero,zero,zero,zero,ymm15[17],zero,zero,zero,zero,zero,zero,ymm15[18]
7921 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm3[0,1,14],zero,ymm3[12,13,0,1,14,15],zero,ymm3[3,12,13,2,3,16],zero,ymm3[30,31,28,29,16,17],zero,ymm3[31,18,19,28,29,18],zero
7922 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm6, %ymm7, %ymm3
7923 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7924 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm19, %ymm3
7925 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %ymm19, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7926 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,zero,zero,zero,zero,ymm3[14],zero,zero,zero,zero,zero,zero,ymm3[15],zero,zero,zero,zero,zero,zero,ymm3[16],zero,zero,zero,zero,zero,zero,ymm3[17],zero,zero,zero,zero
7927 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm29, %ymm6
7928 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm6, %ymm4, %ymm6
7929 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm3, %ymm6, %ymm3
7930 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7931 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rsi), %xmm4
7932 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm14, %xmm4, %xmm3
7933 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm4, %xmm17
7934 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdi), %xmm7
7935 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm1, %xmm7, %xmm1
7936 ; AVX512F-ONLY-FAST-NEXT: vpor %xmm3, %xmm1, %xmm1
7937 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7938 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rcx), %xmm3
7939 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm18, %xmm1
7940 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm1, %xmm3, %xmm1
7941 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm3, %xmm12
7942 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm5
7943 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm2, %xmm5, %xmm2
7944 ; AVX512F-ONLY-FAST-NEXT: vpor %xmm1, %xmm2, %xmm1
7945 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7946 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r9), %xmm2
7947 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm8, %xmm2, %xmm1
7948 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm2, %xmm3
7949 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7950 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r8), %xmm4
7951 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm0, %xmm4, %xmm2
7952 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7953 ; AVX512F-ONLY-FAST-NEXT: vpor %xmm1, %xmm2, %xmm0
7954 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7955 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7956 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm0[23],zero,zero,zero,zero,ymm0[26],zero,ymm0[24],zero,zero,zero,zero,ymm0[27],zero,ymm0[25]
7957 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm26, %ymm6
7958 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm6[21],zero,ymm6[19],zero,zero,zero,zero,ymm6[22],zero,ymm6[20],zero,zero
7959 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm0
7960 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7961 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm30, %ymm15
7962 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm15[25],zero,ymm15[23],zero,zero,zero,zero,ymm15[26],zero,ymm15[24],zero,zero,zero,zero
7963 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20]
7964 ; AVX512F-ONLY-FAST-NEXT: # ymm0 = mem[0,1,0,1]
7965 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm0, %ymm15, %ymm2
7966 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm0, %ymm25
7967 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm30, %ymm8
7968 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm0
7969 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7970 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm14 = [24,25,128,23,128,21,22,23,26,128,24,128,28,29,26,27,24,25,128,23,128,21,22,23,26,128,24,128,28,29,26,27]
7971 ; AVX512F-ONLY-FAST-NEXT: # ymm14 = mem[0,1,0,1]
7972 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [18,128,18,19,20,21,128,19,128,25,26,27,22,128,20,128,18,128,18,19,20,21,128,19,128,25,26,27,22,128,20,128]
7973 ; AVX512F-ONLY-FAST-NEXT: # ymm0 = mem[0,1,0,1]
7974 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm14, %ymm10, %ymm1
7975 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm0, %ymm10, %ymm2
7976 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm0, %ymm29
7977 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm0
7978 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7979 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm23, %zmm0, %zmm1
7980 ; AVX512F-ONLY-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm4[8],xmm3[8],xmm4[9],xmm3[9],xmm4[10],xmm3[10],xmm4[11],xmm3[11],xmm4[12],xmm3[12],xmm4[13],xmm3[13],xmm4[14],xmm3[14],xmm4[15],xmm3[15]
7981 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm4 = <u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10>
7982 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm4, %xmm2, %xmm2
7983 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm2[0,1,0,1],zmm1[4,5,6,7]
7984 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7985 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm23 = [2,2,3,3,2,2,3,3]
7986 ; AVX512F-ONLY-FAST-NEXT: # ymm23 = mem[0,1,2,3,0,1,2,3]
7987 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rax), %xmm0
7988 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7989 ; AVX512F-ONLY-FAST-NEXT: vpshufhw {{.*#+}} xmm2 = xmm0[0,1,2,3,4,5,5,6]
7990 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm2, %ymm23, %ymm2
7991 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
7992 ; AVX512F-ONLY-FAST-NEXT: vpandn %ymm2, %ymm3, %ymm2
7993 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rax), %ymm0
7994 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7995 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [128,13,128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128]
7996 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm1, %ymm0, %ymm3
7997 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm1, %ymm19
7998 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm18
7999 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm10[30],zero,ymm10[28],zero,zero,zero,zero,ymm10[31],zero,ymm10[29],zero,zero
8000 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm0, %ymm26
8001 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq {{.*#+}} ymm1 = [13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14]
8002 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm31, %ymm0
8003 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm1, %ymm0, %ymm2
8004 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm1, %ymm30
8005 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
8006 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm22, %ymm0
8007 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm0[27],zero,zero,zero,zero,ymm0[30],zero,ymm0[28],zero,zero,zero,zero,ymm0[31],zero,ymm0[29]
8008 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,2,3]
8009 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm31 = [18374967954648269055,71777218572844800,18374967954648269055,71777218572844800]
8010 ; AVX512F-ONLY-FAST-NEXT: # ymm31 = mem[0,1,2,3,0,1,2,3]
8011 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $248, %ymm31, %ymm2, %ymm3
8012 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm16, %xmm10
8013 ; AVX512F-ONLY-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3],xmm9[4],xmm10[4],xmm9[5],xmm10[5],xmm9[6],xmm10[6],xmm9[7],xmm10[7]
8014 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
8015 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm16 = zmm3[0,1,2,3],zmm2[0,1,0,1]
8016 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm21, %xmm0
8017 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm20, %xmm1
8018 ; AVX512F-ONLY-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
8019 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm2, %xmm22
8020 ; AVX512F-ONLY-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
8021 ; AVX512F-ONLY-FAST-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm12[8],xmm5[8],xmm12[9],xmm5[9],xmm12[10],xmm5[10],xmm12[11],xmm5[11],xmm12[12],xmm5[12],xmm12[13],xmm5[13],xmm12[14],xmm5[14],xmm12[15],xmm5[15]
8022 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm5, %xmm20
8023 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm12, %xmm21
8024 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm0 = <6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7>
8025 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm0, %xmm3, %xmm1
8026 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8027 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm0, %xmm2, %xmm0
8028 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8029 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm1, %zmm0
8030 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8031 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm28, %xmm0
8032 ; AVX512F-ONLY-FAST-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm11[0],xmm0[0],xmm11[1],xmm0[1],xmm11[2],xmm0[2],xmm11[3],xmm0[3],xmm11[4],xmm0[4],xmm11[5],xmm0[5],xmm11[6],xmm0[6],xmm11[7],xmm0[7]
8033 ; AVX512F-ONLY-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm0[8],xmm11[8],xmm0[9],xmm11[9],xmm0[10],xmm11[10],xmm0[11],xmm11[11],xmm0[12],xmm11[12],xmm0[13],xmm11[13],xmm0[14],xmm11[14],xmm0[15],xmm11[15]
8034 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm17, %xmm3
8035 ; AVX512F-ONLY-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm3[8],xmm7[8],xmm3[9],xmm7[9],xmm3[10],xmm7[10],xmm3[11],xmm7[11],xmm3[12],xmm7[12],xmm3[13],xmm7[13],xmm3[14],xmm7[14],xmm3[15],xmm7[15]
8036 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm1 = <2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u>
8037 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm1, %xmm2, %xmm2
8038 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8039 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm0
8040 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8041 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm1, %zmm0
8042 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8043 ; AVX512F-ONLY-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm9[8],xmm10[8],xmm9[9],xmm10[9],xmm9[10],xmm10[10],xmm9[11],xmm10[11],xmm9[12],xmm10[12],xmm9[13],xmm10[13],xmm9[14],xmm10[14],xmm9[15],xmm10[15]
8044 ; AVX512F-ONLY-FAST-NEXT: vpbroadcastq {{.*#+}} ymm1 = [11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12]
8045 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm1, %ymm8, %ymm12
8046 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm24, %ymm11
8047 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm1, %ymm11, %ymm1
8048 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm27 = ymm1[2,3,2,3]
8049 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm14, %ymm13, %ymm1
8050 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm28 = ymm1[2,3,2,3]
8051 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm4, %xmm0, %xmm0
8052 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8053 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,0,1],zmm0[0,1,0,1]
8054 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8055 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 32(%rax), %xmm0
8056 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8057 ; AVX512F-ONLY-FAST-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5,5,6]
8058 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm0, %ymm23, %ymm0
8059 ; AVX512F-ONLY-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8060 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm25, %ymm0
8061 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm0, %ymm11, %ymm10
8062 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
8063 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm15[9,u,7,u,u,u,u,10,u,8,u,u,u,u,11,u,25,u,23,u,u,u,u,26,u,24,u,u,u,u,27,u]
8064 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm24 = ymm0[2,3,2,3]
8065 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm29, %ymm0
8066 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm0, %ymm13, %ymm8
8067 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm15[21],zero,ymm15[19],zero,zero,zero,zero,ymm15[22],zero,ymm15[20],zero,zero
8068 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8069 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm19, %ymm1
8070 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm1, %ymm0, %ymm4
8071 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
8072 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [4,5,4,5,5,7,4,5]
8073 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm0, %ymm1, %ymm0
8074 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} ymm25 = [255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
8075 ; AVX512F-ONLY-FAST-NEXT: vpandnq %ymm0, %ymm25, %ymm0
8076 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm4, %zmm23
8077 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [28,29,30,128,28,128,30,31,30,31,128,29,128,31,28,29,28,29,30,128,28,128,30,31,30,31,128,29,128,31,28,29]
8078 ; AVX512F-ONLY-FAST-NEXT: # ymm0 = mem[0,1,0,1]
8079 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
8080 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm0, %ymm15, %ymm9
8081 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm17 = ymm9[2,3,2,3]
8082 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8083 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm30, %ymm2
8084 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm2, %ymm1, %ymm13
8085 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm1, %ymm29
8086 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm25 = ymm13[2,3,2,3]
8087 ; AVX512F-ONLY-FAST-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm7[0],xmm3[0],xmm7[1],xmm3[1],xmm7[2],xmm3[2],xmm7[3],xmm3[3],xmm7[4],xmm3[4],xmm7[5],xmm3[5],xmm7[6],xmm3[6],xmm7[7],xmm3[7]
8088 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm13 = <0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5>
8089 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm13, %xmm5, %xmm5
8090 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm13, %xmm7, %xmm7
8091 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm1 # 16-byte Folded Reload
8092 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8093 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8094 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,ymm1[23],zero,ymm1[23,24,25,26],zero,ymm1[24],zero,ymm1[30,31]
8095 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm1, %ymm30
8096 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm19 = ymm7[2,3,2,3]
8097 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %ymm6, %ymm2
8098 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm6[30],zero,ymm6[28],zero,zero,zero,zero,ymm6[31],zero,ymm6[29],zero,zero,zero
8099 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm7[2,3,2,3]
8100 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
8101 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm0, %ymm3, %ymm0
8102 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm0[2,3,2,3]
8103 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,0,1]
8104 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm7 = ymm12[2,3,2,3]
8105 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm26[2,3,2,3]
8106 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm20, %xmm1
8107 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm21, %xmm4
8108 ; AVX512F-ONLY-FAST-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3],xmm1[4],xmm4[4],xmm1[5],xmm4[5],xmm1[6],xmm4[6],xmm1[7],xmm4[7]
8109 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm1 = <4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9>
8110 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm22, %xmm4
8111 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm1, %xmm4, %xmm13
8112 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm13 = ymm13[0,1,0,1]
8113 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
8114 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
8115 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm11[2,3,2,3]
8116 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm1, %xmm12, %xmm1
8117 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm12 = [18,19,20,21,128,19,128,21,20,21,22,128,20,128,22,23,18,19,20,21,128,19,128,21,20,21,22,128,20,128,22,23]
8118 ; AVX512F-ONLY-FAST-NEXT: # ymm12 = mem[0,1,0,1]
8119 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm12, %ymm15, %ymm9
8120 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
8121 ; AVX512F-ONLY-FAST-NEXT: vinserti32x4 $2, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm11 # 16-byte Folded Reload
8122 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm2[9,u,7,u,u,u,u,10,u,8,u,u,u,u,11,u,25,u,23,u,u,u,u,26,u,24,u,u,u,u,27,u]
8123 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
8124 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm12, %ymm3, %ymm12
8125 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
8126 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm6, %ymm14, %ymm2
8127 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm2, %zmm2
8128 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [18374966859431673855,18446463693966278655,18374966859431673855,18446463693966278655]
8129 ; AVX512F-ONLY-FAST-NEXT: # ymm5 = mem[0,1,0,1]
8130 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $248, %ymm5, %ymm7, %ymm0
8131 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm13, %zmm0, %zmm7
8132 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255]
8133 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, %zmm2, %zmm0, %zmm7
8134 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm10, %ymm8, %ymm2
8135 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8136 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm3, %zmm2
8137 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm4, %ymm9, %ymm3
8138 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
8139 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm4, %zmm6
8140 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, %zmm2, %zmm0, %zmm6
8141 ; AVX512F-ONLY-FAST-NEXT: vpandq %ymm5, %ymm27, %ymm0
8142 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm28, %zmm0
8143 ; AVX512F-ONLY-FAST-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
8144 ; AVX512F-ONLY-FAST-NEXT: # zmm2 = mem[2,3,2,3,6,7,6,7]
8145 ; AVX512F-ONLY-FAST-NEXT: vporq %zmm2, %zmm0, %zmm0
8146 ; AVX512F-ONLY-FAST-NEXT: vpandq %ymm31, %ymm24, %ymm2
8147 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm17, %zmm2, %zmm2
8148 ; AVX512F-ONLY-FAST-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Folded Reload
8149 ; AVX512F-ONLY-FAST-NEXT: # zmm3 = mem[2,3,2,3,6,7,6,7]
8150 ; AVX512F-ONLY-FAST-NEXT: vporq %zmm3, %zmm2, %zmm2
8151 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm2
8152 ; AVX512F-ONLY-FAST-NEXT: vpandq %ymm31, %ymm25, %ymm0
8153 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm19, %zmm0
8154 ; AVX512F-ONLY-FAST-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Folded Reload
8155 ; AVX512F-ONLY-FAST-NEXT: # zmm3 = mem[2,3,2,3,6,7,6,7]
8156 ; AVX512F-ONLY-FAST-NEXT: vporq %zmm3, %zmm0, %zmm3
8157 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255]
8158 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $184, %zmm2, %zmm0, %zmm3
8159 ; AVX512F-ONLY-FAST-NEXT: vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
8160 ; AVX512F-ONLY-FAST-NEXT: # zmm2 = mem[0,1,0,1,4,5,4,5]
8161 ; AVX512F-ONLY-FAST-NEXT: vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Folded Reload
8162 ; AVX512F-ONLY-FAST-NEXT: # zmm8 = mem[0,1,0,1,4,5,4,5]
8163 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, %zmm2, %zmm0, %zmm8
8164 ; AVX512F-ONLY-FAST-NEXT: vpandq %ymm31, %ymm1, %ymm1
8165 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm12, %zmm1
8166 ; AVX512F-ONLY-FAST-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
8167 ; AVX512F-ONLY-FAST-NEXT: # zmm2 = mem[2,3,2,3,6,7,6,7]
8168 ; AVX512F-ONLY-FAST-NEXT: vporq %zmm2, %zmm1, %zmm1
8169 ; AVX512F-ONLY-FAST-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
8170 ; AVX512F-ONLY-FAST-NEXT: # zmm2 = mem[2,3,2,3,6,7,6,7]
8171 ; AVX512F-ONLY-FAST-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Folded Reload
8172 ; AVX512F-ONLY-FAST-NEXT: # zmm5 = mem[2,3,2,3,6,7,6,7]
8173 ; AVX512F-ONLY-FAST-NEXT: vporq %zmm2, %zmm5, %zmm22
8174 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $226, %zmm1, %zmm0, %zmm22
8175 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8176 ; AVX512F-ONLY-FAST-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
8177 ; AVX512F-ONLY-FAST-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
8178 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
8179 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm26 # 64-byte Folded Reload
8180 ; AVX512F-ONLY-FAST-NEXT: # zmm26 = zmm0[0,1,0,1],mem[0,1,0,1]
8181 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8182 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm1 = xmm4[1,1,0,0,4,5,6,7]
8183 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [0,1,0,1,2,0,0,1]
8184 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm1, %ymm2, %ymm19
8185 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8186 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} xmm5 = xmm0[1,1,0,0,4,5,6,7]
8187 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm5, %ymm2, %ymm17
8188 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm5 = [4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
8189 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm5, %xmm4, %xmm10
8190 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm5, %xmm0, %xmm5
8191 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm12 = [128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22]
8192 ; AVX512F-ONLY-FAST-NEXT: # ymm12 = mem[0,1,0,1]
8193 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm29, %ymm0
8194 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm12, %ymm0, %ymm13
8195 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8196 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm0[25],zero,ymm0[23],zero,zero,zero,zero,ymm0[26],zero,ymm0[24],zero,zero
8197 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm12, %ymm0, %ymm12
8198 ; AVX512F-ONLY-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [20,128,18,128,20,21,20,21,128,19,128,19,20,21,22,128,20,128,18,128,20,21,20,21,128,19,128,19,20,21,22,128]
8199 ; AVX512F-ONLY-FAST-NEXT: # ymm0 = mem[0,1,0,1]
8200 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm30, %ymm1
8201 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm0, %ymm1, %ymm2
8202 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
8203 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,ymm9[23],zero,ymm9[23,24,25,26],zero,ymm9[24],zero,ymm9[30,31]
8204 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm0, %ymm9, %ymm0
8205 ; AVX512F-ONLY-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
8206 ; AVX512F-ONLY-FAST-NEXT: vpshuflw {{.*#+}} ymm15 = ymm4[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
8207 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [4,5,4,5,5,7,4,5]
8208 ; AVX512F-ONLY-FAST-NEXT: vpermd %ymm15, %ymm9, %ymm20
8209 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm15 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
8210 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm9 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
8211 ; AVX512F-ONLY-FAST-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Folded Reload
8212 ; AVX512F-ONLY-FAST-NEXT: # zmm24 = mem[2,3,2,3,6,7,6,7]
8213 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm24
8214 ; AVX512F-ONLY-FAST-NEXT: vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
8215 ; AVX512F-ONLY-FAST-NEXT: # ymm3 = mem[0,1,0,1]
8216 ; AVX512F-ONLY-FAST-NEXT: vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %ymm25 # 32-byte Folded Reload
8217 ; AVX512F-ONLY-FAST-NEXT: # ymm25 = mem[0,1,0,1]
8218 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,3,2,3]
8219 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,1,0]
8220 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
8221 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
8222 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,1,0]
8223 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,3,2,3]
8224 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
8225 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
8226 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
8227 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
8228 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 32-byte Folded Reload
8229 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm25, %zmm25 # 32-byte Folded Reload
8230 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm25
8231 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8232 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm18
8233 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm25, %zmm18
8234 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm19, %zmm15, %zmm3
8235 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm16
8236 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm16
8237 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm3 # 32-byte Folded Reload
8238 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
8239 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm3
8240 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm3
8241 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm2, %ymm13, %ymm2
8242 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm2
8243 ; AVX512F-ONLY-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
8244 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm2 = zmm4[0,1,2,3],zmm2[4,5,6,7]
8245 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm23
8246 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm23
8247 ; AVX512F-ONLY-FAST-NEXT: vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
8248 ; AVX512F-ONLY-FAST-NEXT: # zmm2 = mem[0,1,0,1,4,5,4,5]
8249 ; AVX512F-ONLY-FAST-NEXT: vpermq {{.*#+}} zmm6 = zmm11[0,1,0,1,4,5,4,5]
8250 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm6
8251 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm17, %zmm2
8252 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm26, %zmm2
8253 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm2
8254 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm1, %ymm14, %ymm1
8255 ; AVX512F-ONLY-FAST-NEXT: vpor %ymm0, %ymm12, %ymm0
8256 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm1
8257 ; AVX512F-ONLY-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm1[4,5,6,7]
8258 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm20, %zmm1
8259 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm1
8260 ; AVX512F-ONLY-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm22, %zmm1
8261 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
8262 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 128(%rax)
8263 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm2, (%rax)
8264 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm23, 320(%rax)
8265 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, 256(%rax)
8266 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm16, 192(%rax)
8267 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm18, 64(%rax)
8268 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm24, 384(%rax)
8269 ; AVX512F-ONLY-FAST-NEXT: addq $1496, %rsp # imm = 0x5D8
8270 ; AVX512F-ONLY-FAST-NEXT: vzeroupper
8271 ; AVX512F-ONLY-FAST-NEXT: retq
8273 ; AVX512DQ-FAST-LABEL: store_i8_stride7_vf64:
8274 ; AVX512DQ-FAST: # %bb.0:
8275 ; AVX512DQ-FAST-NEXT: subq $1496, %rsp # imm = 0x5D8
8276 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
8277 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rcx), %ymm7
8278 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdx), %ymm15
8279 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm15[30],zero,ymm15[28],zero,zero,zero,zero,ymm15[31],zero,ymm15[29],zero,zero
8280 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm15, %ymm17
8281 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm7[25],zero,ymm7[23],zero,zero,zero,zero,ymm7[26],zero,ymm7[24],zero,zero,zero,zero
8282 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
8283 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8284 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rsi), %ymm15
8285 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdi), %ymm3
8286 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm15[30],zero,ymm15[28],zero,zero,zero,zero,ymm15[31],zero,ymm15[29],zero,zero,zero
8287 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8288 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm3[23],zero,zero,zero,zero,ymm3[26],zero,ymm3[24],zero,zero,zero,zero,ymm3[27],zero,ymm3[25]
8289 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8290 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
8291 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8292 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r9), %ymm4
8293 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r8), %ymm1
8294 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm1[27],zero,zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29]
8295 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm1, %ymm19
8296 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm4[25],zero,ymm4[23],zero,zero,zero,zero,ymm4[26],zero,ymm4[24],zero,zero
8297 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8298 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
8299 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8300 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rax), %ymm1
8301 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8302 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
8303 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
8304 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
8305 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8306 ; AVX512DQ-FAST-NEXT: vmovdqa (%rsi), %ymm1
8307 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,ymm1[14],zero,zero,zero,zero,zero,zero,ymm1[15],zero,zero,zero,zero,zero,zero,ymm1[16],zero,zero,zero,zero,zero,zero,ymm1[17],zero,zero,zero,zero,zero,zero,ymm1[18]
8308 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm1, %ymm26
8309 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdi), %ymm1
8310 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8311 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[0,1,14],zero,ymm1[12,13,0,1,14,15],zero,ymm1[3,12,13,2,3,16],zero,ymm1[30,31,28,29,16,17],zero,ymm1[31,18,19,28,29,18],zero
8312 ; AVX512DQ-FAST-NEXT: vpor %ymm0, %ymm1, %ymm0
8313 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8314 ; AVX512DQ-FAST-NEXT: vmovdqa (%rcx), %ymm1
8315 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
8316 ; AVX512DQ-FAST-NEXT: vpshufb %ymm6, %ymm1, %ymm0
8317 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm1, %ymm30
8318 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %ymm10
8319 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [0,1,0,1,14,128,14,15,0,1,14,15,128,13,14,15,16,17,16,128,30,31,30,31,16,17,128,31,28,29,30,31]
8320 ; AVX512DQ-FAST-NEXT: vpshufb %ymm2, %ymm10, %ymm1
8321 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm2, %ymm25
8322 ; AVX512DQ-FAST-NEXT: vpor %ymm0, %ymm1, %ymm0
8323 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8324 ; AVX512DQ-FAST-NEXT: vmovdqa (%r8), %ymm1
8325 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,zero,zero,ymm1[14],zero,zero,zero,zero,zero,zero,ymm1[15],zero,zero,zero,zero,zero,zero,ymm1[16],zero,zero,zero,zero,zero,zero,ymm1[17],zero,zero,zero,zero
8326 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm1, %ymm22
8327 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8328 ; AVX512DQ-FAST-NEXT: vmovdqa (%r9), %ymm2
8329 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [13,0,0,0,128,16,128,14,0,0,0,128,17,128,15,0,13,0,0,0,128,16,128,14,0,0,0,128,17,128,15,0]
8330 ; AVX512DQ-FAST-NEXT: # ymm5 = mem[0,1,0,1]
8331 ; AVX512DQ-FAST-NEXT: vpshufb %ymm5, %ymm2, %ymm1
8332 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm5, %ymm29
8333 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm2, %ymm31
8334 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8335 ; AVX512DQ-FAST-NEXT: vporq %ymm0, %ymm1, %ymm23
8336 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdx), %xmm5
8337 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rcx), %xmm1
8338 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = <u,u,u,128,7,128,5,u,u,u,128,8,128,6,u,u>
8339 ; AVX512DQ-FAST-NEXT: vpshufb %xmm2, %xmm1, %xmm0
8340 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm2, %xmm18
8341 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm1, %xmm20
8342 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = <u,u,u,7,128,5,128,u,u,u,8,128,6,128,u,u>
8343 ; AVX512DQ-FAST-NEXT: vpshufb %xmm2, %xmm5, %xmm1
8344 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm5, %xmm21
8345 ; AVX512DQ-FAST-NEXT: vpor %xmm0, %xmm1, %xmm0
8346 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8347 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rdi), %xmm11
8348 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rsi), %xmm1
8349 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm14 = <u,128,7,128,5,u,u,u,128,8,128,6,u,u,u,128>
8350 ; AVX512DQ-FAST-NEXT: vpshufb %xmm14, %xmm1, %xmm0
8351 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm1, %xmm28
8352 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm1 = <u,7,128,5,128,u,u,u,8,128,6,128,u,u,u,9>
8353 ; AVX512DQ-FAST-NEXT: vpshufb %xmm1, %xmm11, %xmm5
8354 ; AVX512DQ-FAST-NEXT: vpor %xmm0, %xmm5, %xmm0
8355 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8356 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r9), %xmm0
8357 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%r8), %xmm9
8358 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm8 = <128,4,u,u,u,128,7,128,5,u,u,u,128,8,128,6>
8359 ; AVX512DQ-FAST-NEXT: vpshufb %xmm8, %xmm0, %xmm12
8360 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm0, %xmm16
8361 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm0 = <4,128,u,u,u,7,128,5,128,u,u,u,8,128,6,128>
8362 ; AVX512DQ-FAST-NEXT: vpshufb %xmm0, %xmm9, %xmm13
8363 ; AVX512DQ-FAST-NEXT: vpor %xmm12, %xmm13, %xmm12
8364 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8365 ; AVX512DQ-FAST-NEXT: vpshufb %ymm6, %ymm7, %ymm6
8366 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm7, %ymm24
8367 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm17, %ymm13
8368 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm25, %ymm7
8369 ; AVX512DQ-FAST-NEXT: vpshufb %ymm7, %ymm13, %ymm7
8370 ; AVX512DQ-FAST-NEXT: vpor %ymm6, %ymm7, %ymm6
8371 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8372 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,zero,ymm15[14],zero,zero,zero,zero,zero,zero,ymm15[15],zero,zero,zero,zero,zero,zero,ymm15[16],zero,zero,zero,zero,zero,zero,ymm15[17],zero,zero,zero,zero,zero,zero,ymm15[18]
8373 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm3[0,1,14],zero,ymm3[12,13,0,1,14,15],zero,ymm3[3,12,13,2,3,16],zero,ymm3[30,31,28,29,16,17],zero,ymm3[31,18,19,28,29,18],zero
8374 ; AVX512DQ-FAST-NEXT: vpor %ymm6, %ymm7, %ymm3
8375 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8376 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm19, %ymm3
8377 ; AVX512DQ-FAST-NEXT: vmovdqu64 %ymm19, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8378 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,zero,zero,zero,zero,ymm3[14],zero,zero,zero,zero,zero,zero,ymm3[15],zero,zero,zero,zero,zero,zero,ymm3[16],zero,zero,zero,zero,zero,zero,ymm3[17],zero,zero,zero,zero
8379 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm29, %ymm6
8380 ; AVX512DQ-FAST-NEXT: vpshufb %ymm6, %ymm4, %ymm6
8381 ; AVX512DQ-FAST-NEXT: vpor %ymm3, %ymm6, %ymm3
8382 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8383 ; AVX512DQ-FAST-NEXT: vmovdqa (%rsi), %xmm4
8384 ; AVX512DQ-FAST-NEXT: vpshufb %xmm14, %xmm4, %xmm3
8385 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm4, %xmm17
8386 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdi), %xmm7
8387 ; AVX512DQ-FAST-NEXT: vpshufb %xmm1, %xmm7, %xmm1
8388 ; AVX512DQ-FAST-NEXT: vpor %xmm3, %xmm1, %xmm1
8389 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8390 ; AVX512DQ-FAST-NEXT: vmovdqa (%rcx), %xmm3
8391 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm18, %xmm1
8392 ; AVX512DQ-FAST-NEXT: vpshufb %xmm1, %xmm3, %xmm1
8393 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm3, %xmm12
8394 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %xmm5
8395 ; AVX512DQ-FAST-NEXT: vpshufb %xmm2, %xmm5, %xmm2
8396 ; AVX512DQ-FAST-NEXT: vpor %xmm1, %xmm2, %xmm1
8397 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8398 ; AVX512DQ-FAST-NEXT: vmovdqa (%r9), %xmm2
8399 ; AVX512DQ-FAST-NEXT: vpshufb %xmm8, %xmm2, %xmm1
8400 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm2, %xmm3
8401 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8402 ; AVX512DQ-FAST-NEXT: vmovdqa (%r8), %xmm4
8403 ; AVX512DQ-FAST-NEXT: vpshufb %xmm0, %xmm4, %xmm2
8404 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8405 ; AVX512DQ-FAST-NEXT: vpor %xmm1, %xmm2, %xmm0
8406 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8407 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8408 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm0[23],zero,zero,zero,zero,ymm0[26],zero,ymm0[24],zero,zero,zero,zero,ymm0[27],zero,ymm0[25]
8409 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm26, %ymm6
8410 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm6[21],zero,ymm6[19],zero,zero,zero,zero,ymm6[22],zero,ymm6[20],zero,zero
8411 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm0
8412 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8413 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm30, %ymm15
8414 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm15[25],zero,ymm15[23],zero,zero,zero,zero,ymm15[26],zero,ymm15[24],zero,zero,zero,zero
8415 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20]
8416 ; AVX512DQ-FAST-NEXT: # ymm0 = mem[0,1,0,1]
8417 ; AVX512DQ-FAST-NEXT: vpshufb %ymm0, %ymm15, %ymm2
8418 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm0, %ymm25
8419 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm30, %ymm8
8420 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm0
8421 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8422 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm14 = [24,25,128,23,128,21,22,23,26,128,24,128,28,29,26,27,24,25,128,23,128,21,22,23,26,128,24,128,28,29,26,27]
8423 ; AVX512DQ-FAST-NEXT: # ymm14 = mem[0,1,0,1]
8424 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [18,128,18,19,20,21,128,19,128,25,26,27,22,128,20,128,18,128,18,19,20,21,128,19,128,25,26,27,22,128,20,128]
8425 ; AVX512DQ-FAST-NEXT: # ymm0 = mem[0,1,0,1]
8426 ; AVX512DQ-FAST-NEXT: vpshufb %ymm14, %ymm10, %ymm1
8427 ; AVX512DQ-FAST-NEXT: vpshufb %ymm0, %ymm10, %ymm2
8428 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm0, %ymm29
8429 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm0
8430 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8431 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm23, %zmm0, %zmm1
8432 ; AVX512DQ-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm4[8],xmm3[8],xmm4[9],xmm3[9],xmm4[10],xmm3[10],xmm4[11],xmm3[11],xmm4[12],xmm3[12],xmm4[13],xmm3[13],xmm4[14],xmm3[14],xmm4[15],xmm3[15]
8433 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm4 = <u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10>
8434 ; AVX512DQ-FAST-NEXT: vpshufb %xmm4, %xmm2, %xmm2
8435 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm2[0,1,0,1],zmm1[4,5,6,7]
8436 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8437 ; AVX512DQ-FAST-NEXT: vbroadcasti32x4 {{.*#+}} ymm23 = [2,2,3,3,2,2,3,3]
8438 ; AVX512DQ-FAST-NEXT: # ymm23 = mem[0,1,2,3,0,1,2,3]
8439 ; AVX512DQ-FAST-NEXT: vmovdqa (%rax), %xmm0
8440 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8441 ; AVX512DQ-FAST-NEXT: vpshufhw {{.*#+}} xmm2 = xmm0[0,1,2,3,4,5,5,6]
8442 ; AVX512DQ-FAST-NEXT: vpermd %ymm2, %ymm23, %ymm2
8443 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
8444 ; AVX512DQ-FAST-NEXT: vpandn %ymm2, %ymm3, %ymm2
8445 ; AVX512DQ-FAST-NEXT: vmovdqa (%rax), %ymm0
8446 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8447 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [128,13,128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128]
8448 ; AVX512DQ-FAST-NEXT: vpshufb %ymm1, %ymm0, %ymm3
8449 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm1, %ymm19
8450 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm18
8451 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm10[30],zero,ymm10[28],zero,zero,zero,zero,ymm10[31],zero,ymm10[29],zero,zero
8452 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm0, %ymm26
8453 ; AVX512DQ-FAST-NEXT: vpbroadcastq {{.*#+}} ymm1 = [13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14]
8454 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm31, %ymm0
8455 ; AVX512DQ-FAST-NEXT: vpshufb %ymm1, %ymm0, %ymm2
8456 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm1, %ymm30
8457 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
8458 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm22, %ymm0
8459 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm0[27],zero,zero,zero,zero,ymm0[30],zero,ymm0[28],zero,zero,zero,zero,ymm0[31],zero,ymm0[29]
8460 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,2,3]
8461 ; AVX512DQ-FAST-NEXT: vbroadcasti64x2 {{.*#+}} ymm31 = [18374967954648269055,71777218572844800,18374967954648269055,71777218572844800]
8462 ; AVX512DQ-FAST-NEXT: # ymm31 = mem[0,1,0,1]
8463 ; AVX512DQ-FAST-NEXT: vpternlogq $248, %ymm31, %ymm2, %ymm3
8464 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm16, %xmm10
8465 ; AVX512DQ-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3],xmm9[4],xmm10[4],xmm9[5],xmm10[5],xmm9[6],xmm10[6],xmm9[7],xmm10[7]
8466 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
8467 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm16 = zmm3[0,1,2,3],zmm2[0,1,0,1]
8468 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm21, %xmm0
8469 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm20, %xmm1
8470 ; AVX512DQ-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
8471 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm2, %xmm22
8472 ; AVX512DQ-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
8473 ; AVX512DQ-FAST-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm12[8],xmm5[8],xmm12[9],xmm5[9],xmm12[10],xmm5[10],xmm12[11],xmm5[11],xmm12[12],xmm5[12],xmm12[13],xmm5[13],xmm12[14],xmm5[14],xmm12[15],xmm5[15]
8474 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm5, %xmm20
8475 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm12, %xmm21
8476 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm0 = <6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7>
8477 ; AVX512DQ-FAST-NEXT: vpshufb %xmm0, %xmm3, %xmm1
8478 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8479 ; AVX512DQ-FAST-NEXT: vpshufb %xmm0, %xmm2, %xmm0
8480 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8481 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm1, %zmm0
8482 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8483 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm28, %xmm0
8484 ; AVX512DQ-FAST-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm11[0],xmm0[0],xmm11[1],xmm0[1],xmm11[2],xmm0[2],xmm11[3],xmm0[3],xmm11[4],xmm0[4],xmm11[5],xmm0[5],xmm11[6],xmm0[6],xmm11[7],xmm0[7]
8485 ; AVX512DQ-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm0[8],xmm11[8],xmm0[9],xmm11[9],xmm0[10],xmm11[10],xmm0[11],xmm11[11],xmm0[12],xmm11[12],xmm0[13],xmm11[13],xmm0[14],xmm11[14],xmm0[15],xmm11[15]
8486 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm17, %xmm3
8487 ; AVX512DQ-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm3[8],xmm7[8],xmm3[9],xmm7[9],xmm3[10],xmm7[10],xmm3[11],xmm7[11],xmm3[12],xmm7[12],xmm3[13],xmm7[13],xmm3[14],xmm7[14],xmm3[15],xmm7[15]
8488 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm1 = <2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u>
8489 ; AVX512DQ-FAST-NEXT: vpshufb %xmm1, %xmm2, %xmm2
8490 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8491 ; AVX512DQ-FAST-NEXT: vpshufb %xmm1, %xmm0, %xmm0
8492 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8493 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm1, %zmm0
8494 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8495 ; AVX512DQ-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm9[8],xmm10[8],xmm9[9],xmm10[9],xmm9[10],xmm10[10],xmm9[11],xmm10[11],xmm9[12],xmm10[12],xmm9[13],xmm10[13],xmm9[14],xmm10[14],xmm9[15],xmm10[15]
8496 ; AVX512DQ-FAST-NEXT: vpbroadcastq {{.*#+}} ymm1 = [11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12]
8497 ; AVX512DQ-FAST-NEXT: vpshufb %ymm1, %ymm8, %ymm12
8498 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm24, %ymm11
8499 ; AVX512DQ-FAST-NEXT: vpshufb %ymm1, %ymm11, %ymm1
8500 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm27 = ymm1[2,3,2,3]
8501 ; AVX512DQ-FAST-NEXT: vpshufb %ymm14, %ymm13, %ymm1
8502 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm28 = ymm1[2,3,2,3]
8503 ; AVX512DQ-FAST-NEXT: vpshufb %xmm4, %xmm0, %xmm0
8504 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
8505 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,0,1],zmm0[0,1,0,1]
8506 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8507 ; AVX512DQ-FAST-NEXT: vmovdqa 32(%rax), %xmm0
8508 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8509 ; AVX512DQ-FAST-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5,5,6]
8510 ; AVX512DQ-FAST-NEXT: vpermd %ymm0, %ymm23, %ymm0
8511 ; AVX512DQ-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8512 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm25, %ymm0
8513 ; AVX512DQ-FAST-NEXT: vpshufb %ymm0, %ymm11, %ymm10
8514 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
8515 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm15[9,u,7,u,u,u,u,10,u,8,u,u,u,u,11,u,25,u,23,u,u,u,u,26,u,24,u,u,u,u,27,u]
8516 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm24 = ymm0[2,3,2,3]
8517 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm29, %ymm0
8518 ; AVX512DQ-FAST-NEXT: vpshufb %ymm0, %ymm13, %ymm8
8519 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm15[21],zero,ymm15[19],zero,zero,zero,zero,ymm15[22],zero,ymm15[20],zero,zero
8520 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8521 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm19, %ymm1
8522 ; AVX512DQ-FAST-NEXT: vpshufb %ymm1, %ymm0, %ymm4
8523 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm0 = ymm0[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
8524 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [4,5,4,5,5,7,4,5]
8525 ; AVX512DQ-FAST-NEXT: vpermd %ymm0, %ymm1, %ymm0
8526 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} ymm25 = [255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255]
8527 ; AVX512DQ-FAST-NEXT: vpandnq %ymm0, %ymm25, %ymm0
8528 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm4, %zmm23
8529 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [28,29,30,128,28,128,30,31,30,31,128,29,128,31,28,29,28,29,30,128,28,128,30,31,30,31,128,29,128,31,28,29]
8530 ; AVX512DQ-FAST-NEXT: # ymm0 = mem[0,1,0,1]
8531 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
8532 ; AVX512DQ-FAST-NEXT: vpshufb %ymm0, %ymm15, %ymm9
8533 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm17 = ymm9[2,3,2,3]
8534 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8535 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm30, %ymm2
8536 ; AVX512DQ-FAST-NEXT: vpshufb %ymm2, %ymm1, %ymm13
8537 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm1, %ymm29
8538 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm25 = ymm13[2,3,2,3]
8539 ; AVX512DQ-FAST-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm7[0],xmm3[0],xmm7[1],xmm3[1],xmm7[2],xmm3[2],xmm7[3],xmm3[3],xmm7[4],xmm3[4],xmm7[5],xmm3[5],xmm7[6],xmm3[6],xmm7[7],xmm3[7]
8540 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm13 = <0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5>
8541 ; AVX512DQ-FAST-NEXT: vpshufb %xmm13, %xmm5, %xmm5
8542 ; AVX512DQ-FAST-NEXT: vpshufb %xmm13, %xmm7, %xmm7
8543 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm1 # 16-byte Folded Reload
8544 ; AVX512DQ-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8545 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8546 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,ymm1[23],zero,ymm1[23,24,25,26],zero,ymm1[24],zero,ymm1[30,31]
8547 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm1, %ymm30
8548 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm19 = ymm7[2,3,2,3]
8549 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm6, %ymm2
8550 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm6[30],zero,ymm6[28],zero,zero,zero,zero,ymm6[31],zero,ymm6[29],zero,zero,zero
8551 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm7[2,3,2,3]
8552 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
8553 ; AVX512DQ-FAST-NEXT: vpshufb %ymm0, %ymm3, %ymm0
8554 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm0[2,3,2,3]
8555 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,1,0,1]
8556 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm7 = ymm12[2,3,2,3]
8557 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm26[2,3,2,3]
8558 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm20, %xmm1
8559 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm21, %xmm4
8560 ; AVX512DQ-FAST-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3],xmm1[4],xmm4[4],xmm1[5],xmm4[5],xmm1[6],xmm4[6],xmm1[7],xmm4[7]
8561 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm1 = <4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9>
8562 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm22, %xmm4
8563 ; AVX512DQ-FAST-NEXT: vpshufb %xmm1, %xmm4, %xmm13
8564 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm13 = ymm13[0,1,0,1]
8565 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
8566 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
8567 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm11[2,3,2,3]
8568 ; AVX512DQ-FAST-NEXT: vpshufb %xmm1, %xmm12, %xmm1
8569 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm12 = [18,19,20,21,128,19,128,21,20,21,22,128,20,128,22,23,18,19,20,21,128,19,128,21,20,21,22,128,20,128,22,23]
8570 ; AVX512DQ-FAST-NEXT: # ymm12 = mem[0,1,0,1]
8571 ; AVX512DQ-FAST-NEXT: vpshufb %ymm12, %ymm15, %ymm9
8572 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
8573 ; AVX512DQ-FAST-NEXT: vinserti32x4 $2, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm11 # 16-byte Folded Reload
8574 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm2[9,u,7,u,u,u,u,10,u,8,u,u,u,u,11,u,25,u,23,u,u,u,u,26,u,24,u,u,u,u,27,u]
8575 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
8576 ; AVX512DQ-FAST-NEXT: vpshufb %ymm12, %ymm3, %ymm12
8577 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
8578 ; AVX512DQ-FAST-NEXT: vpor %ymm6, %ymm14, %ymm2
8579 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm2, %zmm2
8580 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [18374966859431673855,18446463693966278655,18374966859431673855,18446463693966278655]
8581 ; AVX512DQ-FAST-NEXT: # ymm5 = mem[0,1,0,1]
8582 ; AVX512DQ-FAST-NEXT: vpternlogq $248, %ymm5, %ymm7, %ymm0
8583 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm13, %zmm0, %zmm7
8584 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255]
8585 ; AVX512DQ-FAST-NEXT: vpternlogq $226, %zmm2, %zmm0, %zmm7
8586 ; AVX512DQ-FAST-NEXT: vpor %ymm10, %ymm8, %ymm2
8587 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8588 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm3, %zmm2
8589 ; AVX512DQ-FAST-NEXT: vpor %ymm4, %ymm9, %ymm3
8590 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
8591 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm4, %zmm6
8592 ; AVX512DQ-FAST-NEXT: vpternlogq $226, %zmm2, %zmm0, %zmm6
8593 ; AVX512DQ-FAST-NEXT: vpandq %ymm5, %ymm27, %ymm0
8594 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm28, %zmm0
8595 ; AVX512DQ-FAST-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
8596 ; AVX512DQ-FAST-NEXT: # zmm2 = mem[2,3,2,3,6,7,6,7]
8597 ; AVX512DQ-FAST-NEXT: vporq %zmm2, %zmm0, %zmm0
8598 ; AVX512DQ-FAST-NEXT: vpandq %ymm31, %ymm24, %ymm2
8599 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm17, %zmm2, %zmm2
8600 ; AVX512DQ-FAST-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Folded Reload
8601 ; AVX512DQ-FAST-NEXT: # zmm3 = mem[2,3,2,3,6,7,6,7]
8602 ; AVX512DQ-FAST-NEXT: vporq %zmm3, %zmm2, %zmm2
8603 ; AVX512DQ-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm2
8604 ; AVX512DQ-FAST-NEXT: vpandq %ymm31, %ymm25, %ymm0
8605 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm19, %zmm0
8606 ; AVX512DQ-FAST-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Folded Reload
8607 ; AVX512DQ-FAST-NEXT: # zmm3 = mem[2,3,2,3,6,7,6,7]
8608 ; AVX512DQ-FAST-NEXT: vporq %zmm3, %zmm0, %zmm3
8609 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255]
8610 ; AVX512DQ-FAST-NEXT: vpternlogq $184, %zmm2, %zmm0, %zmm3
8611 ; AVX512DQ-FAST-NEXT: vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
8612 ; AVX512DQ-FAST-NEXT: # zmm2 = mem[0,1,0,1,4,5,4,5]
8613 ; AVX512DQ-FAST-NEXT: vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Folded Reload
8614 ; AVX512DQ-FAST-NEXT: # zmm8 = mem[0,1,0,1,4,5,4,5]
8615 ; AVX512DQ-FAST-NEXT: vpternlogq $226, %zmm2, %zmm0, %zmm8
8616 ; AVX512DQ-FAST-NEXT: vpandq %ymm31, %ymm1, %ymm1
8617 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm12, %zmm1
8618 ; AVX512DQ-FAST-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
8619 ; AVX512DQ-FAST-NEXT: # zmm2 = mem[2,3,2,3,6,7,6,7]
8620 ; AVX512DQ-FAST-NEXT: vporq %zmm2, %zmm1, %zmm1
8621 ; AVX512DQ-FAST-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
8622 ; AVX512DQ-FAST-NEXT: # zmm2 = mem[2,3,2,3,6,7,6,7]
8623 ; AVX512DQ-FAST-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Folded Reload
8624 ; AVX512DQ-FAST-NEXT: # zmm5 = mem[2,3,2,3,6,7,6,7]
8625 ; AVX512DQ-FAST-NEXT: vporq %zmm2, %zmm5, %zmm22
8626 ; AVX512DQ-FAST-NEXT: vpternlogq $226, %zmm1, %zmm0, %zmm22
8627 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8628 ; AVX512DQ-FAST-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
8629 ; AVX512DQ-FAST-NEXT: # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
8630 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
8631 ; AVX512DQ-FAST-NEXT: vshufi64x2 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm26 # 64-byte Folded Reload
8632 ; AVX512DQ-FAST-NEXT: # zmm26 = zmm0[0,1,0,1],mem[0,1,0,1]
8633 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
8634 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm1 = xmm4[1,1,0,0,4,5,6,7]
8635 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [0,1,0,1,2,0,0,1]
8636 ; AVX512DQ-FAST-NEXT: vpermd %ymm1, %ymm2, %ymm19
8637 ; AVX512DQ-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
8638 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} xmm5 = xmm0[1,1,0,0,4,5,6,7]
8639 ; AVX512DQ-FAST-NEXT: vpermd %ymm5, %ymm2, %ymm17
8640 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm5 = [4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
8641 ; AVX512DQ-FAST-NEXT: vpshufb %xmm5, %xmm4, %xmm10
8642 ; AVX512DQ-FAST-NEXT: vpshufb %xmm5, %xmm0, %xmm5
8643 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm12 = [128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22]
8644 ; AVX512DQ-FAST-NEXT: # ymm12 = mem[0,1,0,1]
8645 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm29, %ymm0
8646 ; AVX512DQ-FAST-NEXT: vpshufb %ymm12, %ymm0, %ymm13
8647 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
8648 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm0[25],zero,ymm0[23],zero,zero,zero,zero,ymm0[26],zero,ymm0[24],zero,zero
8649 ; AVX512DQ-FAST-NEXT: vpshufb %ymm12, %ymm0, %ymm12
8650 ; AVX512DQ-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [20,128,18,128,20,21,20,21,128,19,128,19,20,21,22,128,20,128,18,128,20,21,20,21,128,19,128,19,20,21,22,128]
8651 ; AVX512DQ-FAST-NEXT: # ymm0 = mem[0,1,0,1]
8652 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm30, %ymm1
8653 ; AVX512DQ-FAST-NEXT: vpshufb %ymm0, %ymm1, %ymm2
8654 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
8655 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,ymm9[23],zero,ymm9[23,24,25,26],zero,ymm9[24],zero,ymm9[30,31]
8656 ; AVX512DQ-FAST-NEXT: vpshufb %ymm0, %ymm9, %ymm0
8657 ; AVX512DQ-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
8658 ; AVX512DQ-FAST-NEXT: vpshuflw {{.*#+}} ymm15 = ymm4[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
8659 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [4,5,4,5,5,7,4,5]
8660 ; AVX512DQ-FAST-NEXT: vpermd %ymm15, %ymm9, %ymm20
8661 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm15 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
8662 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm9 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
8663 ; AVX512DQ-FAST-NEXT: vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Folded Reload
8664 ; AVX512DQ-FAST-NEXT: # zmm24 = mem[2,3,2,3,6,7,6,7]
8665 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm24
8666 ; AVX512DQ-FAST-NEXT: vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
8667 ; AVX512DQ-FAST-NEXT: # ymm3 = mem[0,1,0,1]
8668 ; AVX512DQ-FAST-NEXT: vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %ymm25 # 32-byte Folded Reload
8669 ; AVX512DQ-FAST-NEXT: # ymm25 = mem[0,1,0,1]
8670 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,3,2,3]
8671 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,1,0]
8672 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
8673 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
8674 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,1,0]
8675 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,3,2,3]
8676 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
8677 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
8678 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
8679 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
8680 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm3 # 32-byte Folded Reload
8681 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm25, %zmm25 # 32-byte Folded Reload
8682 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm25
8683 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8684 ; AVX512DQ-FAST-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm18
8685 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm25, %zmm18
8686 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm19, %zmm15, %zmm3
8687 ; AVX512DQ-FAST-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm16
8688 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm16
8689 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm3 # 32-byte Folded Reload
8690 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
8691 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm3
8692 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm3
8693 ; AVX512DQ-FAST-NEXT: vpor %ymm2, %ymm13, %ymm2
8694 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm2
8695 ; AVX512DQ-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
8696 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm2 = zmm4[0,1,2,3],zmm2[4,5,6,7]
8697 ; AVX512DQ-FAST-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm23
8698 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm23
8699 ; AVX512DQ-FAST-NEXT: vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
8700 ; AVX512DQ-FAST-NEXT: # zmm2 = mem[0,1,0,1,4,5,4,5]
8701 ; AVX512DQ-FAST-NEXT: vpermq {{.*#+}} zmm6 = zmm11[0,1,0,1,4,5,4,5]
8702 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm6
8703 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm17, %zmm2
8704 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm26, %zmm2
8705 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm2
8706 ; AVX512DQ-FAST-NEXT: vpor %ymm1, %ymm14, %ymm1
8707 ; AVX512DQ-FAST-NEXT: vpor %ymm0, %ymm12, %ymm0
8708 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm1
8709 ; AVX512DQ-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm1[4,5,6,7]
8710 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm20, %zmm1
8711 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm1
8712 ; AVX512DQ-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm22, %zmm1
8713 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
8714 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, 128(%rax)
8715 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, (%rax)
8716 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm23, 320(%rax)
8717 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, 256(%rax)
8718 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, 192(%rax)
8719 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm18, 64(%rax)
8720 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm24, 384(%rax)
8721 ; AVX512DQ-FAST-NEXT: addq $1496, %rsp # imm = 0x5D8
8722 ; AVX512DQ-FAST-NEXT: vzeroupper
8723 ; AVX512DQ-FAST-NEXT: retq
8725 ; AVX512BW-ONLY-SLOW-LABEL: store_i8_stride7_vf64:
8726 ; AVX512BW-ONLY-SLOW: # %bb.0:
8727 ; AVX512BW-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
8728 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rax), %ymm15
8729 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 32(%rax), %ymm2
8730 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = [12,13,2,3,12,13,0,1,14,15,2,3,0,1,14,15,28,29,18,19,28,29,16,17,30,31,18,19,16,17,30,31]
8731 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm9, %ymm15, %ymm0
8732 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [4,5,4,5,5,6,5,6,4,5,4,5,5,6,5,6]
8733 ; AVX512BW-ONLY-SLOW-NEXT: # ymm1 = mem[0,1,0,1]
8734 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %ymm15, %ymm1, %ymm1
8735 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm3
8736 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%r9), %ymm10
8737 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm20 = [13,128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128]
8738 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm20, %ymm10, %ymm0
8739 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%r8), %ymm11
8740 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm24 = [128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128]
8741 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm24, %ymm11, %ymm1
8742 ; AVX512BW-ONLY-SLOW-NEXT: vpor %ymm0, %ymm1, %ymm5
8743 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%r9), %xmm0
8744 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%r8), %xmm1
8745 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
8746 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} xmm4 = <u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10>
8747 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm4, %xmm6, %xmm6
8748 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
8749 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm6, %zmm21
8750 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $2323999253380730912, %r10 # imm = 0x2040810204081020
8751 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %r10, %k1
8752 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm3, %zmm21 {%k1}
8753 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %ymm16
8754 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm14 = [0,1,0,1,14,128,14,15,0,1,14,15,128,13,14,15,16,17,16,128,30,31,30,31,16,17,128,31,28,29,30,31]
8755 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm14, %ymm16, %ymm3
8756 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %ymm17
8757 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm23 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
8758 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm23, %ymm17, %ymm5
8759 ; AVX512BW-ONLY-SLOW-NEXT: vpor %ymm3, %ymm5, %ymm3
8760 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm5
8761 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rcx), %xmm6
8762 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm6[8],xmm5[8],xmm6[9],xmm5[9],xmm6[10],xmm5[10],xmm6[11],xmm5[11],xmm6[12],xmm5[12],xmm6[13],xmm5[13],xmm6[14],xmm5[14],xmm6[15],xmm5[15]
8763 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} xmm12 = <6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7>
8764 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm12, %xmm7, %xmm7
8765 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
8766 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm7, %zmm22
8767 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdi), %ymm18
8768 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm25 = [0,1,14,128,12,13,0,1,14,15,128,3,12,13,2,3,16,128,30,31,28,29,16,17,128,31,18,19,28,29,18,128]
8769 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm25, %ymm18, %ymm3
8770 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rsi), %ymm19
8771 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm26 = [128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128,128,18]
8772 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm26, %ymm19, %ymm7
8773 ; AVX512BW-ONLY-SLOW-NEXT: vpor %ymm3, %ymm7, %ymm3
8774 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rdi), %xmm7
8775 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rsi), %xmm8
8776 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm27 = xmm8[8],xmm7[8],xmm8[9],xmm7[9],xmm8[10],xmm7[10],xmm8[11],xmm7[11],xmm8[12],xmm7[12],xmm8[13],xmm7[13],xmm8[14],xmm7[14],xmm8[15],xmm7[15]
8777 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} xmm13 = <2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u>
8778 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm13, %xmm27, %xmm27
8779 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm27 = ymm27[0,1,0,1]
8780 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm27, %zmm3
8781 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $435749860008887046, %r10 # imm = 0x60C183060C18306
8782 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %r10, %k1
8783 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm22, %zmm3 {%k1}
8784 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $4066998693416279096, %r10 # imm = 0x3870E1C3870E1C38
8785 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %r10, %k1
8786 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm21, %zmm3 {%k1}
8787 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm21 = [10,9,9,10,10,9,9,10,9,10,14,15,10,9,9,10]
8788 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %ymm2, %ymm21, %ymm21
8789 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm9, %ymm2, %ymm9
8790 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm21, %zmm9, %zmm9
8791 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 32(%r9), %ymm21
8792 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm20, %ymm21, %ymm20
8793 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 32(%r8), %ymm22
8794 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm24, %ymm22, %ymm24
8795 ; AVX512BW-ONLY-SLOW-NEXT: vporq %ymm20, %ymm24, %ymm20
8796 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm24 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm22[20],zero,ymm22[18],zero,ymm22[20,21,20,21],zero,ymm22[19],zero,ymm22[19,20,21,22],zero
8797 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm24 = ymm24[2,3,2,3]
8798 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm27 = ymm21[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm21[20],zero,ymm21[18],zero,zero,zero,zero,ymm21[21],zero,ymm21[19],zero,zero,zero,zero,ymm21[22]
8799 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm27 = ymm27[2,3,2,3]
8800 ; AVX512BW-ONLY-SLOW-NEXT: vporq %ymm24, %ymm27, %ymm24
8801 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm24, %zmm20, %zmm24
8802 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $145249953336295682, %r10 # imm = 0x204081020408102
8803 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %r10, %k1
8804 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm9, %zmm24 {%k1}
8805 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 32(%rsi), %ymm27
8806 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 32(%rdi), %ymm28
8807 ; AVX512BW-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm9 = ymm28[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
8808 ; AVX512BW-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm9[0,0,1,1,4,4,5,5]
8809 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastd {{.*#+}} ymm20 = [5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6]
8810 ; AVX512BW-ONLY-SLOW-NEXT: movl $676341840, %r10d # imm = 0x28502850
8811 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %r10d, %k1
8812 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm20, %ymm27, %ymm9 {%k1}
8813 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
8814 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm25, %ymm28, %ymm25
8815 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm26, %ymm27, %ymm26
8816 ; AVX512BW-ONLY-SLOW-NEXT: vporq %ymm25, %ymm26, %ymm25
8817 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm25, %zmm9
8818 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 32(%rdx), %ymm26
8819 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm14, %ymm26, %ymm14
8820 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 32(%rcx), %ymm29
8821 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm23, %ymm29, %ymm23
8822 ; AVX512BW-ONLY-SLOW-NEXT: vporq %ymm14, %ymm23, %ymm14
8823 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm23 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm26[18],zero,ymm26[18,19,20,21],zero,ymm26[19],zero,ymm26[25,26,27,22],zero,ymm26[20],zero
8824 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm23 = ymm23[2,3,2,3]
8825 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm25 = ymm29[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm29[18],zero,zero,zero,zero,ymm29[21],zero,ymm29[19],zero,zero,zero,zero,ymm29[22],zero,ymm29[20]
8826 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm25 = ymm25[2,3,2,3]
8827 ; AVX512BW-ONLY-SLOW-NEXT: vporq %ymm23, %ymm25, %ymm23
8828 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm23, %zmm14, %zmm14
8829 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $3485998880071096368, %r10 # imm = 0x3060C183060C1830
8830 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %r10, %k2
8831 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm14, %zmm9 {%k2}
8832 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $-4357498600088870461, %r10 # imm = 0xC3870E1C3870E1C3
8833 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %r10, %k3
8834 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm24, %zmm9 {%k3}
8835 ; AVX512BW-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm14 = ymm28[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
8836 ; AVX512BW-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm14[2,2,3,3,6,6,7,7]
8837 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastd {{.*#+}} ymm24 = [13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14]
8838 ; AVX512BW-ONLY-SLOW-NEXT: movl $338170920, %r10d # imm = 0x14281428
8839 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %r10d, %k4
8840 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm24, %ymm27, %ymm14 {%k4}
8841 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,3,2,3]
8842 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} ymm23 = [25,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128,25,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128]
8843 ; AVX512BW-ONLY-SLOW-NEXT: # ymm23 = mem[0,1,2,3,0,1,2,3]
8844 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm23, %ymm27, %ymm25
8845 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm25 = ymm25[2,3,2,3]
8846 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm27 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128,25]
8847 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm27, %ymm28, %ymm28
8848 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm28 = ymm28[2,3,2,3]
8849 ; AVX512BW-ONLY-SLOW-NEXT: vporq %ymm25, %ymm28, %ymm25
8850 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm14, %zmm25, %zmm14
8851 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm25 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm26[24,25],zero,ymm26[23],zero,ymm26[21,22,23,26],zero,ymm26[24],zero,ymm26[28,29,26,27]
8852 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm25 = ymm25[2,3,2,3]
8853 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm28 = ymm29[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm29[25],zero,ymm29[23],zero,zero,zero,zero,ymm29[26],zero,ymm29[24],zero,zero,zero,zero
8854 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm28 = ymm28[2,3,2,3]
8855 ; AVX512BW-ONLY-SLOW-NEXT: vporq %ymm25, %ymm28, %ymm28
8856 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm25 = [11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12]
8857 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm25, %ymm29, %ymm29
8858 ; AVX512BW-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm26 = ymm26[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
8859 ; AVX512BW-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm26 = ymm26[0,2,3,3,4,6,7,7]
8860 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %ymm26, %ymm29 {%k1}
8861 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %zmm26
8862 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm29 = ymm29[2,3,2,3]
8863 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm29, %zmm28, %zmm28
8864 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %zmm29
8865 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $1742999440035548184, %r10 # imm = 0x183060C183060C18
8866 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %r10, %k3
8867 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm28, %zmm14 {%k3}
8868 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %zmm28
8869 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm22 = zmm22[0,1,2,3],zmm28[4,5,6,7]
8870 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb {{.*#+}} zmm22 = zmm22[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,zmm22[23],zero,zmm22[23,24,25,26],zero,zmm22[24],zero,zmm22[30,31,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,61],zero,zmm22[59],zero,zero,zero,zero,zmm22[62],zero,zmm22[60],zero,zero,zero,zero,zmm22[63],zero
8871 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm22 = zmm22[2,3,2,3,6,7,6,7]
8872 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm21 = zmm21[0,1,2,3],zmm29[4,5,6,7]
8873 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb {{.*#+}} zmm21 = zmm21[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm21[25],zero,zmm21[23],zero,zero,zero,zero,zmm21[26],zero,zmm21[24],zero,zero,zmm21[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm21[59],zero,zero,zero,zero,zmm21[62],zero,zmm21[60],zero,zero,zero,zero,zmm21[63],zero,zmm21[61]
8874 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm21 = zmm21[2,3,2,3,6,7,6,7]
8875 ; AVX512BW-ONLY-SLOW-NEXT: vporq %zmm22, %zmm21, %zmm22
8876 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rax), %zmm21
8877 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $6971997760142192736, %rax # imm = 0x60C183060C183060
8878 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %rax, %k3
8879 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm22, %zmm14 {%k3}
8880 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm22 = [11,13,12,11,12,13,13,12,11,13,12,11,12,13,13,12,62,61,62,63,63,62,62,63,62,61,62,63,63,62,62,63]
8881 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2w %zmm21, %zmm2, %zmm22
8882 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $-9150747060186627967, %rax # imm = 0x8102040810204081
8883 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %rax, %k5
8884 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm22, %zmm14 {%k5}
8885 ; AVX512BW-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} ymm22 = ymm18[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
8886 ; AVX512BW-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm30 = ymm22[0,0,1,1,4,4,5,5]
8887 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm20, %ymm19, %ymm30 {%k1}
8888 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %zmm20
8889 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm23, %ymm19, %ymm22
8890 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm22 = ymm22[2,3,2,3]
8891 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm27, %ymm18, %ymm23
8892 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm23 = ymm23[2,3,2,3]
8893 ; AVX512BW-ONLY-SLOW-NEXT: vporq %ymm22, %ymm23, %ymm23
8894 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 32(%r9), %xmm22
8895 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm27 = ymm30[2,3,2,3]
8896 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm23, %zmm27, %zmm27
8897 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 32(%r8), %xmm23
8898 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm16, %zmm26, %zmm26
8899 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb {{.*#+}} zmm26 = zmm26[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,zmm26[18,19,20,21],zero,zmm26[19],zero,zmm26[25,26,27,22],zero,zmm26[20],zero,zmm26[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,56,57],zero,zmm26[55],zero,zmm26[53,54,55,58],zero,zmm26[56],zero,zmm26[60,61,58,59]
8900 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm26 = zmm26[2,3,2,3,6,7,6,7]
8901 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm17, %zmm20, %zmm20
8902 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb {{.*#+}} zmm20 = zmm20[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm20[18],zero,zero,zero,zero,zmm20[21],zero,zmm20[19],zero,zero,zero,zero,zmm20[22],zero,zmm20[20,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zmm20[57],zero,zmm20[55],zero,zero,zero,zero,zmm20[58],zero,zmm20[56],zero,zero,zero,zero
8903 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm20 = zmm20[2,3,2,3,6,7,6,7]
8904 ; AVX512BW-ONLY-SLOW-NEXT: vporq %zmm26, %zmm20, %zmm20
8905 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 32(%rdx), %xmm26
8906 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm27, %zmm20 {%k3}
8907 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 32(%rcx), %xmm27
8908 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm11, %zmm29, %zmm29
8909 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb {{.*#+}} zmm29 = zmm29[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20],zero,zmm29[18],zero,zmm29[20,21,20,21],zero,zmm29[19],zero,zmm29[19,20,21,22],zero,zmm29[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,56,57,56,57],zero,zmm29[55],zero,zmm29[55,56,57,58],zero,zmm29[56],zero,zmm29[62,63]
8910 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm29 = zmm29[2,3,2,3,6,7,6,7]
8911 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm28, %zmm28
8912 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb {{.*#+}} zmm28 = zmm28[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm28[20],zero,zmm28[18],zero,zero,zero,zero,zmm28[21],zero,zmm28[19],zero,zero,zero,zero,zmm28[22,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm28[57],zero,zmm28[55],zero,zero,zero,zero,zmm28[58],zero,zmm28[56],zero,zero
8913 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm28 = zmm28[2,3,2,3,6,7,6,7]
8914 ; AVX512BW-ONLY-SLOW-NEXT: vporq %zmm29, %zmm28, %zmm29
8915 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm21, %zmm15
8916 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm28 = [10,9,9,10,10,9,9,10,9,10,14,15,10,9,9,10,27,29,28,27,28,29,29,28,27,29,28,27,28,29,29,28]
8917 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %zmm15, %zmm28, %zmm28
8918 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $1161999626690365456, %rax # imm = 0x1020408102040810
8919 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %rax, %k5
8920 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm28, %zmm29 {%k5}
8921 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 32(%rdi), %xmm28
8922 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $2033499346708139548, %rax # imm = 0x1C3870E1C3870E1C
8923 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %rax, %k5
8924 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm29, %zmm20 {%k5}
8925 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 32(%rsi), %xmm29
8926 ; AVX512BW-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm18 = ymm18[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
8927 ; AVX512BW-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm30 = ymm18[2,2,3,3,6,6,7,7]
8928 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm24, %ymm19, %ymm30 {%k4}
8929 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm19 = xmm28[0],xmm29[0],xmm28[1],xmm29[1],xmm28[2],xmm29[2],xmm28[3],xmm29[3],xmm28[4],xmm29[4],xmm28[5],xmm29[5],xmm28[6],xmm29[6],xmm28[7],xmm29[7]
8930 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} xmm18 = <0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5>
8931 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm18, %xmm19, %xmm19
8932 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm19 = zmm30[2,3,2,3],zmm19[0,1,0,1]
8933 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm24 = xmm26[0],xmm27[0],xmm26[1],xmm27[1],xmm26[2],xmm27[2],xmm26[3],xmm27[3],xmm26[4],xmm27[4],xmm26[5],xmm27[5],xmm26[6],xmm27[6],xmm26[7],xmm27[7]
8934 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm25, %ymm17, %ymm25
8935 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} xmm17 = <4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9>
8936 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm17, %xmm24, %xmm24
8937 ; AVX512BW-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} ymm16 = ymm16[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
8938 ; AVX512BW-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm16 = ymm16[0,2,3,3,4,6,7,7]
8939 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %ymm16, %ymm25 {%k1}
8940 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm16 = zmm25[2,3,2,3],zmm24[0,1,0,1]
8941 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm19, %zmm16 {%k2}
8942 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29],zero,ymm10[27],zero,zero,zero,zero,ymm10[30],zero,ymm10[28],zero,zero,zero,zero,ymm10[31],zero
8943 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
8944 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb {{.*#+}} ymm11 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm11[27],zero,zero,zero,zero,ymm11[30],zero,ymm11[28],zero,zero,zero,zero,ymm11[31],zero,ymm11[29]
8945 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
8946 ; AVX512BW-ONLY-SLOW-NEXT: vpor %ymm10, %ymm11, %ymm11
8947 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm19 = xmm23[0],xmm22[0],xmm23[1],xmm22[1],xmm23[2],xmm22[2],xmm23[3],xmm22[3],xmm23[4],xmm22[4],xmm23[5],xmm22[5],xmm23[6],xmm22[6],xmm23[7],xmm22[7]
8948 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} xmm10 = <u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u>
8949 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm10, %xmm19, %xmm19
8950 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm19 = ymm19[0,1,0,1]
8951 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm19, %zmm11, %zmm11
8952 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm19 = [14,13,14,15,15,14,14,15,14,13,14,15,15,14,14,15,17,17,16,16,17,17,16,16,20,21,17,17,17,17,16,16]
8953 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %zmm21, %zmm19, %zmm19
8954 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $580999813345182728, %rax # imm = 0x810204081020408
8955 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %rax, %k1
8956 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm19, %zmm11 {%k1}
8957 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $1016749673354069774, %rax # imm = 0xE1C3870E1C3870E
8958 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %rax, %k1
8959 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm11, %zmm16 {%k1}
8960 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} xmm19 = <u,u,u,128,7,128,5,u,u,u,128,8,128,6,u,u>
8961 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm19, %xmm27, %xmm11
8962 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} xmm24 = <u,u,u,7,128,5,128,u,u,u,8,128,6,128,u,u>
8963 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm24, %xmm26, %xmm25
8964 ; AVX512BW-ONLY-SLOW-NEXT: vporq %xmm11, %xmm25, %xmm11
8965 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm25 = xmm27[8],xmm26[8],xmm27[9],xmm26[9],xmm27[10],xmm26[10],xmm27[11],xmm26[11],xmm27[12],xmm26[12],xmm27[13],xmm26[13],xmm27[14],xmm26[14],xmm27[15],xmm26[15]
8966 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm12, %xmm25, %xmm12
8967 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, %xmm12, %zmm11, %zmm11
8968 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} xmm12 = <u,128,7,128,5,u,u,u,128,8,128,6,u,u,u,128>
8969 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm12, %xmm29, %xmm25
8970 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} xmm26 = <u,7,128,5,128,u,u,u,8,128,6,128,u,u,u,9>
8971 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm26, %xmm28, %xmm27
8972 ; AVX512BW-ONLY-SLOW-NEXT: vporq %xmm25, %xmm27, %xmm25
8973 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm27 = xmm29[8],xmm28[8],xmm29[9],xmm28[9],xmm29[10],xmm28[10],xmm29[11],xmm28[11],xmm29[12],xmm28[12],xmm29[13],xmm28[13],xmm29[14],xmm28[14],xmm29[15],xmm28[15]
8974 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm28 = zmm11[0,1,0,1,4,5,4,5]
8975 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm13, %xmm27, %xmm11
8976 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, %xmm11, %zmm25, %zmm11
8977 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm11 = zmm11[0,1,0,1,4,5,4,5]
8978 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm28, %zmm11 {%k3}
8979 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} xmm13 = <128,4,u,u,u,128,7,128,5,u,u,u,128,8,128,6>
8980 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm13, %xmm22, %xmm25
8981 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm27 = [2,2,2,4,2,2,2,4,3,3,3,3,2,2,2,4,52,53,52,53,53,54,53,54,52,53,52,53,53,54,53,54]
8982 ; AVX512BW-ONLY-SLOW-NEXT: vpermi2w %zmm21, %zmm2, %zmm27
8983 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} xmm2 = <4,128,u,u,u,7,128,5,128,u,u,u,8,128,6,128>
8984 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm2, %xmm23, %xmm21
8985 ; AVX512BW-ONLY-SLOW-NEXT: vporq %xmm25, %xmm21, %xmm21
8986 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm22 = xmm23[8],xmm22[8],xmm23[9],xmm22[9],xmm23[10],xmm22[10],xmm23[11],xmm22[11],xmm23[12],xmm22[12],xmm23[13],xmm22[13],xmm23[14],xmm22[14],xmm23[15],xmm22[15]
8987 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm4, %xmm22, %xmm4
8988 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, %xmm4, %zmm21, %zmm4
8989 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm4 = zmm4[0,1,0,1,4,5,4,5]
8990 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $290499906672591364, %rax # imm = 0x408102040810204
8991 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %rax, %k1
8992 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm27, %zmm4 {%k1}
8993 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $-8714997200177740921, %rax # imm = 0x870E1C3870E1C387
8994 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %rax, %k1
8995 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm4, %zmm11 {%k1}
8996 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm19, %xmm6, %xmm4
8997 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm24, %xmm5, %xmm19
8998 ; AVX512BW-ONLY-SLOW-NEXT: vporq %xmm4, %xmm19, %xmm4
8999 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3],xmm5[4],xmm6[4],xmm5[5],xmm6[5],xmm5[6],xmm6[6],xmm5[7],xmm6[7]
9000 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm17, %xmm5, %xmm5
9001 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, %xmm4, %zmm5, %zmm4
9002 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm12, %xmm8, %xmm5
9003 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm26, %xmm7, %xmm6
9004 ; AVX512BW-ONLY-SLOW-NEXT: vpor %xmm5, %xmm6, %xmm5
9005 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm6 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3],xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
9006 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm18, %xmm6, %xmm6
9007 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, %xmm5, %zmm6, %zmm5
9008 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm4 = zmm4[0,1,0,1,4,5,4,5]
9009 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm5 = zmm5[0,1,0,1,4,5,4,5]
9010 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $871499720017774092, %rax # imm = 0xC183060C183060C
9011 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %rax, %k1
9012 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm4, %zmm5 {%k1}
9013 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm13, %xmm0, %xmm4
9014 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm2, %xmm1, %xmm2
9015 ; AVX512BW-ONLY-SLOW-NEXT: vpor %xmm4, %xmm2, %xmm2
9016 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
9017 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm10, %xmm0, %xmm0
9018 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, %xmm2, %zmm0, %zmm0
9019 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [1,1,0,0,1,1,0,0,4,5,1,1,1,1,0,0,18,18,18,20,18,18,18,20,19,19,19,19,18,18,18,20]
9020 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %zmm15, %zmm1, %zmm1
9021 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm0 = zmm0[0,1,0,1,4,5,4,5]
9022 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $4647998506761461824, %rax # imm = 0x4081020408102040
9023 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %rax, %k1
9024 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm1, %zmm0 {%k1}
9025 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $8133997386832558192, %rax # imm = 0x70E1C3870E1C3870
9026 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %rax, %k1
9027 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm0, %zmm5 {%k1}
9028 ; AVX512BW-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
9029 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm5, (%rax)
9030 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, 320(%rax)
9031 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm11, 256(%rax)
9032 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, 192(%rax)
9033 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm20, 128(%rax)
9034 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm3, 64(%rax)
9035 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm14, 384(%rax)
9036 ; AVX512BW-ONLY-SLOW-NEXT: vzeroupper
9037 ; AVX512BW-ONLY-SLOW-NEXT: retq
9039 ; AVX512BW-FAST-LABEL: store_i8_stride7_vf64:
9040 ; AVX512BW-FAST: # %bb.0:
9041 ; AVX512BW-FAST-NEXT: subq $200, %rsp
9042 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
9043 ; AVX512BW-FAST-NEXT: vmovdqa64 (%rdi), %zmm9
9044 ; AVX512BW-FAST-NEXT: vmovdqu64 %zmm9, (%rsp) # 64-byte Spill
9045 ; AVX512BW-FAST-NEXT: vmovdqa64 (%rsi), %zmm5
9046 ; AVX512BW-FAST-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9047 ; AVX512BW-FAST-NEXT: vmovdqa64 (%rdx), %zmm2
9048 ; AVX512BW-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9049 ; AVX512BW-FAST-NEXT: vmovdqa (%rax), %ymm4
9050 ; AVX512BW-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9051 ; AVX512BW-FAST-NEXT: vmovdqa 32(%rax), %ymm13
9052 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [12,13,2,3,12,13,0,1,14,15,2,3,0,1,14,15,28,29,18,19,28,29,16,17,30,31,18,19,16,17,30,31]
9053 ; AVX512BW-FAST-NEXT: vpshufb %ymm0, %ymm4, %ymm1
9054 ; AVX512BW-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [4,5,4,5,5,6,5,6,4,5,4,5,5,6,5,6]
9055 ; AVX512BW-FAST-NEXT: # ymm3 = mem[0,1,0,1]
9056 ; AVX512BW-FAST-NEXT: vpermw %ymm4, %ymm3, %ymm3
9057 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm3, %zmm6
9058 ; AVX512BW-FAST-NEXT: vmovdqa (%r9), %ymm15
9059 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} ymm17 = [13,128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128]
9060 ; AVX512BW-FAST-NEXT: vpshufb %ymm17, %ymm15, %ymm7
9061 ; AVX512BW-FAST-NEXT: vmovdqa (%r8), %ymm1
9062 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} ymm20 = [128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128]
9063 ; AVX512BW-FAST-NEXT: vpshufb %ymm20, %ymm1, %ymm8
9064 ; AVX512BW-FAST-NEXT: vmovdqa64 %ymm1, %ymm24
9065 ; AVX512BW-FAST-NEXT: vpor %ymm7, %ymm8, %ymm7
9066 ; AVX512BW-FAST-NEXT: vmovdqa64 (%r9), %xmm25
9067 ; AVX512BW-FAST-NEXT: vmovdqa (%r8), %xmm10
9068 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm8 = xmm10[8],xmm25[8],xmm10[9],xmm25[9],xmm10[10],xmm25[10],xmm10[11],xmm25[11],xmm10[12],xmm25[12],xmm10[13],xmm25[13],xmm10[14],xmm25[14],xmm10[15],xmm25[15]
9069 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
9070 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,1,0,1]
9071 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm8, %zmm22
9072 ; AVX512BW-FAST-NEXT: movabsq $2323999253380730912, %r10 # imm = 0x2040810204081020
9073 ; AVX512BW-FAST-NEXT: kmovq %r10, %k1
9074 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm6, %zmm22 {%k1}
9075 ; AVX512BW-FAST-NEXT: vmovdqa (%rdx), %ymm1
9076 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} ymm21 = [0,1,0,1,14,128,14,15,0,1,14,15,128,13,14,15,16,17,16,128,30,31,30,31,16,17,128,31,28,29,30,31]
9077 ; AVX512BW-FAST-NEXT: vpshufb %ymm21, %ymm1, %ymm6
9078 ; AVX512BW-FAST-NEXT: vmovdqa %ymm1, %ymm7
9079 ; AVX512BW-FAST-NEXT: vmovdqa (%rcx), %ymm1
9080 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} ymm23 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
9081 ; AVX512BW-FAST-NEXT: vpshufb %ymm23, %ymm1, %ymm11
9082 ; AVX512BW-FAST-NEXT: vmovdqa %ymm1, %ymm8
9083 ; AVX512BW-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9084 ; AVX512BW-FAST-NEXT: vpor %ymm6, %ymm11, %ymm6
9085 ; AVX512BW-FAST-NEXT: vmovdqa (%rdx), %xmm14
9086 ; AVX512BW-FAST-NEXT: vmovdqa64 (%rcx), %xmm16
9087 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm11 = xmm16[8],xmm14[8],xmm16[9],xmm14[9],xmm16[10],xmm14[10],xmm16[11],xmm14[11],xmm16[12],xmm14[12],xmm16[13],xmm14[13],xmm16[14],xmm14[14],xmm16[15],xmm14[15]
9088 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm11 = xmm11[6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7]
9089 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,1,0,1]
9090 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm6, %zmm11, %zmm26
9091 ; AVX512BW-FAST-NEXT: vmovdqa (%rdi), %ymm11
9092 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} ymm28 = [0,1,14,128,12,13,0,1,14,15,128,3,12,13,2,3,16,128,30,31,28,29,16,17,128,31,18,19,28,29,18,128]
9093 ; AVX512BW-FAST-NEXT: vpshufb %ymm28, %ymm11, %ymm6
9094 ; AVX512BW-FAST-NEXT: vmovdqa (%rsi), %ymm12
9095 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} ymm29 = [128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128,128,18]
9096 ; AVX512BW-FAST-NEXT: vpshufb %ymm29, %ymm12, %ymm18
9097 ; AVX512BW-FAST-NEXT: vporq %ymm6, %ymm18, %ymm6
9098 ; AVX512BW-FAST-NEXT: vmovdqa64 (%rdi), %xmm18
9099 ; AVX512BW-FAST-NEXT: vmovdqa64 (%rsi), %xmm19
9100 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm27 = xmm19[8],xmm18[8],xmm19[9],xmm18[9],xmm19[10],xmm18[10],xmm19[11],xmm18[11],xmm19[12],xmm18[12],xmm19[13],xmm18[13],xmm19[14],xmm18[14],xmm19[15],xmm18[15]
9101 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm27 = xmm27[2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u]
9102 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm27 = ymm27[0,1,0,1]
9103 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm6, %zmm27, %zmm6
9104 ; AVX512BW-FAST-NEXT: movabsq $435749860008887046, %r10 # imm = 0x60C183060C18306
9105 ; AVX512BW-FAST-NEXT: kmovq %r10, %k1
9106 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm26, %zmm6 {%k1}
9107 ; AVX512BW-FAST-NEXT: movabsq $4066998693416279096, %r10 # imm = 0x3870E1C3870E1C38
9108 ; AVX512BW-FAST-NEXT: kmovq %r10, %k1
9109 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm22, %zmm6 {%k1}
9110 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} ymm22 = [10,9,9,10,10,9,9,10,9,10,14,15,10,9,9,10]
9111 ; AVX512BW-FAST-NEXT: vpermw %ymm13, %ymm22, %ymm22
9112 ; AVX512BW-FAST-NEXT: vpshufb %ymm0, %ymm13, %ymm0
9113 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm22, %zmm0, %zmm22
9114 ; AVX512BW-FAST-NEXT: vmovdqa64 32(%r9), %ymm27
9115 ; AVX512BW-FAST-NEXT: vpshufb %ymm17, %ymm27, %ymm17
9116 ; AVX512BW-FAST-NEXT: vmovdqa 32(%r8), %ymm1
9117 ; AVX512BW-FAST-NEXT: vpshufb %ymm20, %ymm1, %ymm20
9118 ; AVX512BW-FAST-NEXT: vporq %ymm17, %ymm20, %ymm17
9119 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm20 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm1[20],zero,ymm1[18],zero,ymm1[20,21,20,21],zero,ymm1[19],zero,ymm1[19,20,21,22],zero
9120 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm20 = ymm20[2,3,2,3]
9121 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm26 = ymm27[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm27[20],zero,ymm27[18],zero,zero,zero,zero,ymm27[21],zero,ymm27[19],zero,zero,zero,zero,ymm27[22]
9122 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm26 = ymm26[2,3,2,3]
9123 ; AVX512BW-FAST-NEXT: vporq %ymm20, %ymm26, %ymm20
9124 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm20, %zmm17, %zmm26
9125 ; AVX512BW-FAST-NEXT: movabsq $145249953336295682, %r10 # imm = 0x204081020408102
9126 ; AVX512BW-FAST-NEXT: kmovq %r10, %k1
9127 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm22, %zmm26 {%k1}
9128 ; AVX512BW-FAST-NEXT: vmovdqa64 32(%rdx), %ymm22
9129 ; AVX512BW-FAST-NEXT: vpshufb %ymm21, %ymm22, %ymm17
9130 ; AVX512BW-FAST-NEXT: vmovdqa64 32(%rcx), %ymm30
9131 ; AVX512BW-FAST-NEXT: vpshufb %ymm23, %ymm30, %ymm20
9132 ; AVX512BW-FAST-NEXT: vporq %ymm17, %ymm20, %ymm17
9133 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm20 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm22[18],zero,ymm22[18,19,20,21],zero,ymm22[19],zero,ymm22[25,26,27,22],zero,ymm22[20],zero
9134 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm20 = ymm20[2,3,2,3]
9135 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm21 = ymm30[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm30[18],zero,zero,zero,zero,ymm30[21],zero,ymm30[19],zero,zero,zero,zero,ymm30[22],zero,ymm30[20]
9136 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm21 = ymm21[2,3,2,3]
9137 ; AVX512BW-FAST-NEXT: vporq %ymm20, %ymm21, %ymm20
9138 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm20, %zmm17, %zmm21
9139 ; AVX512BW-FAST-NEXT: vmovdqa64 32(%rdi), %ymm31
9140 ; AVX512BW-FAST-NEXT: vpshufb %ymm28, %ymm31, %ymm17
9141 ; AVX512BW-FAST-NEXT: vmovdqa 32(%rsi), %ymm0
9142 ; AVX512BW-FAST-NEXT: vpshufb %ymm29, %ymm0, %ymm20
9143 ; AVX512BW-FAST-NEXT: vporq %ymm17, %ymm20, %ymm17
9144 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm20 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm31[18,19,20,21],zero,ymm31[19],zero,ymm31[21,20,21,22],zero,ymm31[20],zero,ymm31[22,23]
9145 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm20 = ymm20[2,3,2,3]
9146 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm23 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm0[21],zero,ymm0[19],zero,zero,zero,zero,ymm0[22],zero,ymm0[20],zero,zero
9147 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm23 = ymm23[2,3,2,3]
9148 ; AVX512BW-FAST-NEXT: vporq %ymm20, %ymm23, %ymm20
9149 ; AVX512BW-FAST-NEXT: vmovdqa64 (%rcx), %zmm23
9150 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm20, %zmm17, %zmm17
9151 ; AVX512BW-FAST-NEXT: vmovdqa64 (%r8), %zmm3
9152 ; AVX512BW-FAST-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9153 ; AVX512BW-FAST-NEXT: movabsq $3485998880071096368, %r10 # imm = 0x3060C183060C1830
9154 ; AVX512BW-FAST-NEXT: kmovq %r10, %k2
9155 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm21, %zmm17 {%k2}
9156 ; AVX512BW-FAST-NEXT: vmovdqa64 (%r9), %zmm21
9157 ; AVX512BW-FAST-NEXT: movabsq $-4357498600088870461, %r10 # imm = 0xC3870E1C3870E1C3
9158 ; AVX512BW-FAST-NEXT: kmovq %r10, %k1
9159 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm26, %zmm17 {%k1}
9160 ; AVX512BW-FAST-NEXT: vmovdqa64 (%rax), %zmm26
9161 ; AVX512BW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm22 = zmm22[0,1,2,3],zmm23[4,5,6,7]
9162 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm22 = zmm22[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25],zero,zmm22[23],zero,zmm22[21,22,23,26],zero,zmm22[24],zero,zmm22[28,29,26,27,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,59],zero,zero,zero,zero,zmm22[62],zero,zmm22[60],zero,zero,zero,zero,zmm22[63],zero,zmm22[61],zero
9163 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm22 = zmm22[2,3,2,3,6,7,6,7]
9164 ; AVX512BW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm28 = zmm30[0,1,2,3],zmm2[4,5,6,7]
9165 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm28 = zmm28[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zmm28[25],zero,zmm28[23],zero,zero,zero,zero,zmm28[26],zero,zmm28[24],zero,zero,zero,zero,zmm28[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm28[62],zero,zmm28[60],zero,zero,zero,zero,zmm28[63],zero,zmm28[61],zero,zero
9166 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm28 = zmm28[2,3,2,3,6,7,6,7]
9167 ; AVX512BW-FAST-NEXT: vporq %zmm22, %zmm28, %zmm29
9168 ; AVX512BW-FAST-NEXT: vmovdqa64 32(%rdi), %xmm28
9169 ; AVX512BW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm9[4,5,6,7]
9170 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm0 = zmm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25],zero,zmm0[23],zero,zero,zero,zero,zmm0[26],zero,zmm0[24],zero,zero,zero,zero,zmm0[27],zero,zmm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,60,61,62],zero,zmm0[60],zero,zmm0[62,63,62,63],zero,zmm0[61],zero,zmm0[63,60,61]
9171 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm0 = zmm0[2,3,2,3,6,7,6,7]
9172 ; AVX512BW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm22 = zmm31[0,1,2,3],zmm5[4,5,6,7]
9173 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm22 = zmm22[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm22[23],zero,zero,zero,zero,zmm22[26],zero,zmm22[24],zero,zero,zero,zero,zmm22[27],zero,zmm22[25,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zmm22[62],zero,zmm22[60],zero,zero,zero,zero,zmm22[63],zero,zmm22[61],zero,zero,zero
9174 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm22 = zmm22[2,3,2,3,6,7,6,7]
9175 ; AVX512BW-FAST-NEXT: vporq %zmm0, %zmm22, %zmm22
9176 ; AVX512BW-FAST-NEXT: vmovdqa64 32(%rsi), %xmm30
9177 ; AVX512BW-FAST-NEXT: movabsq $1742999440035548184, %rax # imm = 0x183060C183060C18
9178 ; AVX512BW-FAST-NEXT: kmovq %rax, %k1
9179 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm29, %zmm22 {%k1}
9180 ; AVX512BW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm21[4,5,6,7]
9181 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm0 = zmm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,zmm0[23],zero,zmm0[23,24,25,26],zero,zmm0[24],zero,zmm0[30,31,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,61],zero,zmm0[59],zero,zero,zero,zero,zmm0[62],zero,zmm0[60],zero,zero,zero,zero,zmm0[63],zero
9182 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm0 = zmm0[2,3,2,3,6,7,6,7]
9183 ; AVX512BW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm27[0,1,2,3],zmm3[4,5,6,7]
9184 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm1 = zmm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm1[25],zero,zmm1[23],zero,zero,zero,zero,zmm1[26],zero,zmm1[24],zero,zero,zmm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm1[59],zero,zero,zero,zero,zmm1[62],zero,zmm1[60],zero,zero,zero,zero,zmm1[63],zero,zmm1[61]
9185 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm1 = zmm1[2,3,2,3,6,7,6,7]
9186 ; AVX512BW-FAST-NEXT: vporq %zmm0, %zmm1, %zmm0
9187 ; AVX512BW-FAST-NEXT: movabsq $6971997760142192736, %rax # imm = 0x60C183060C183060
9188 ; AVX512BW-FAST-NEXT: kmovq %rax, %k1
9189 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm0, %zmm22 {%k1}
9190 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm0 = [11,13,12,11,12,13,13,12,11,13,12,11,12,13,13,12,62,61,62,63,63,62,62,63,62,61,62,63,63,62,62,63]
9191 ; AVX512BW-FAST-NEXT: vpermi2w %zmm26, %zmm13, %zmm0
9192 ; AVX512BW-FAST-NEXT: movabsq $-9150747060186627967, %rax # imm = 0x8102040810204081
9193 ; AVX512BW-FAST-NEXT: kmovq %rax, %k3
9194 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm0, %zmm22 {%k3}
9195 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,30],zero,ymm11[28],zero,ymm11[30,31,30,31],zero,ymm11[29],zero,ymm11[31,28,29]
9196 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
9197 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm12[30],zero,ymm12[28],zero,zero,zero,zero,ymm12[31],zero,ymm12[29],zero,zero,zero
9198 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
9199 ; AVX512BW-FAST-NEXT: vpor %ymm0, %ymm1, %ymm0
9200 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm28[0],xmm30[0],xmm28[1],xmm30[1],xmm28[2],xmm30[2],xmm28[3],xmm30[3],xmm28[4],xmm30[4],xmm28[5],xmm30[5],xmm28[6],xmm30[6],xmm28[7],xmm30[7]
9201 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5]
9202 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
9203 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm3
9204 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27],zero,zero,zero,zero,ymm8[30],zero,ymm8[28],zero,zero,zero,zero,ymm8[31],zero,ymm8[29],zero
9205 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
9206 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm27 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm7[30],zero,ymm7[28],zero,zero,zero,zero,ymm7[31],zero,ymm7[29],zero,zero
9207 ; AVX512BW-FAST-NEXT: vmovdqa64 %ymm7, %ymm20
9208 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm27 = ymm27[2,3,2,3]
9209 ; AVX512BW-FAST-NEXT: vporq %ymm0, %ymm27, %ymm27
9210 ; AVX512BW-FAST-NEXT: vmovdqa64 32(%rdx), %xmm31
9211 ; AVX512BW-FAST-NEXT: vmovdqa 32(%rcx), %xmm1
9212 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm31[0],xmm1[0],xmm31[1],xmm1[1],xmm31[2],xmm1[2],xmm31[3],xmm1[3],xmm31[4],xmm1[4],xmm31[5],xmm1[5],xmm31[6],xmm1[6],xmm31[7],xmm1[7]
9213 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} xmm7 = <4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9>
9214 ; AVX512BW-FAST-NEXT: vpshufb %xmm7, %xmm0, %xmm0
9215 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
9216 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm27, %zmm27
9217 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm3, %zmm27 {%k2}
9218 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29],zero,ymm15[27],zero,zero,zero,zero,ymm15[30],zero,ymm15[28],zero,zero,zero,zero,ymm15[31],zero
9219 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
9220 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm24[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm24[27],zero,zero,zero,zero,ymm24[30],zero,ymm24[28],zero,zero,zero,zero,ymm24[31],zero,ymm24[29]
9221 ; AVX512BW-FAST-NEXT: vmovdqa64 %ymm24, %ymm9
9222 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,2,3]
9223 ; AVX512BW-FAST-NEXT: vpor %ymm0, %ymm3, %ymm2
9224 ; AVX512BW-FAST-NEXT: vmovdqa 32(%r9), %xmm4
9225 ; AVX512BW-FAST-NEXT: vmovdqa 32(%r8), %xmm3
9226 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3],xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
9227 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} xmm8 = <u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u>
9228 ; AVX512BW-FAST-NEXT: vpshufb %xmm8, %xmm0, %xmm0
9229 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,1,0,1]
9230 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0
9231 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = [14,13,14,15,15,14,14,15,14,13,14,15,15,14,14,15,17,17,16,16,17,17,16,16,20,21,17,17,17,17,16,16]
9232 ; AVX512BW-FAST-NEXT: vpermw %zmm26, %zmm2, %zmm2
9233 ; AVX512BW-FAST-NEXT: movabsq $580999813345182728, %rax # imm = 0x810204081020408
9234 ; AVX512BW-FAST-NEXT: kmovq %rax, %k2
9235 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm2, %zmm0 {%k2}
9236 ; AVX512BW-FAST-NEXT: movabsq $1016749673354069774, %rax # imm = 0xE1C3870E1C3870E
9237 ; AVX512BW-FAST-NEXT: kmovq %rax, %k2
9238 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm0, %zmm27 {%k2}
9239 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} xmm5 = <u,u,u,128,7,128,5,u,u,u,128,8,128,6,u,u>
9240 ; AVX512BW-FAST-NEXT: vpshufb %xmm5, %xmm1, %xmm2
9241 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} xmm0 = <u,u,u,7,128,5,128,u,u,u,8,128,6,128,u,u>
9242 ; AVX512BW-FAST-NEXT: vpshufb %xmm0, %xmm31, %xmm24
9243 ; AVX512BW-FAST-NEXT: vporq %xmm2, %xmm24, %xmm2
9244 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm1[8],xmm31[8],xmm1[9],xmm31[9],xmm1[10],xmm31[10],xmm1[11],xmm31[11],xmm1[12],xmm31[12],xmm1[13],xmm31[13],xmm1[14],xmm31[14],xmm1[15],xmm31[15]
9245 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7]
9246 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm1, %zmm2, %zmm1
9247 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = <u,128,7,128,5,u,u,u,128,8,128,6,u,u,u,128>
9248 ; AVX512BW-FAST-NEXT: vpshufb %xmm2, %xmm30, %xmm24
9249 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} xmm31 = <u,7,128,5,128,u,u,u,8,128,6,128,u,u,u,9>
9250 ; AVX512BW-FAST-NEXT: vpshufb %xmm31, %xmm28, %xmm29
9251 ; AVX512BW-FAST-NEXT: vporq %xmm24, %xmm29, %xmm24
9252 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm28 = xmm30[8],xmm28[8],xmm30[9],xmm28[9],xmm30[10],xmm28[10],xmm30[11],xmm28[11],xmm30[12],xmm28[12],xmm30[13],xmm28[13],xmm30[14],xmm28[14],xmm30[15],xmm28[15]
9253 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm28 = xmm28[2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u]
9254 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm28, %zmm24, %zmm24
9255 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm1 = zmm1[0,1,0,1,4,5,4,5]
9256 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm24 = zmm24[0,1,0,1,4,5,4,5]
9257 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm1, %zmm24 {%k1}
9258 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} xmm1 = <128,4,u,u,u,128,7,128,5,u,u,u,128,8,128,6>
9259 ; AVX512BW-FAST-NEXT: vpshufb %xmm1, %xmm4, %xmm28
9260 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} xmm29 = <4,128,u,u,u,7,128,5,128,u,u,u,8,128,6,128>
9261 ; AVX512BW-FAST-NEXT: vpshufb %xmm29, %xmm3, %xmm30
9262 ; AVX512BW-FAST-NEXT: vporq %xmm28, %xmm30, %xmm28
9263 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm3[8],xmm4[8],xmm3[9],xmm4[9],xmm3[10],xmm4[10],xmm3[11],xmm4[11],xmm3[12],xmm4[12],xmm3[13],xmm4[13],xmm3[14],xmm4[14],xmm3[15],xmm4[15]
9264 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
9265 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm3, %zmm28, %zmm3
9266 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm4 = [2,2,2,4,2,2,2,4,3,3,3,3,2,2,2,4,52,53,52,53,53,54,53,54,52,53,52,53,53,54,53,54]
9267 ; AVX512BW-FAST-NEXT: vpermi2w %zmm26, %zmm13, %zmm4
9268 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm3 = zmm3[0,1,0,1,4,5,4,5]
9269 ; AVX512BW-FAST-NEXT: movabsq $290499906672591364, %rax # imm = 0x408102040810204
9270 ; AVX512BW-FAST-NEXT: kmovq %rax, %k2
9271 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm4, %zmm3 {%k2}
9272 ; AVX512BW-FAST-NEXT: movabsq $-8714997200177740921, %rax # imm = 0x870E1C3870E1C387
9273 ; AVX512BW-FAST-NEXT: kmovq %rax, %k2
9274 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm3, %zmm24 {%k2}
9275 ; AVX512BW-FAST-NEXT: vpshufb %xmm5, %xmm16, %xmm3
9276 ; AVX512BW-FAST-NEXT: vpshufb %xmm0, %xmm14, %xmm0
9277 ; AVX512BW-FAST-NEXT: vpor %xmm3, %xmm0, %xmm0
9278 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm14[0],xmm16[0],xmm14[1],xmm16[1],xmm14[2],xmm16[2],xmm14[3],xmm16[3],xmm14[4],xmm16[4],xmm14[5],xmm16[5],xmm14[6],xmm16[6],xmm14[7],xmm16[7]
9279 ; AVX512BW-FAST-NEXT: vpshufb %xmm7, %xmm3, %xmm3
9280 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm3, %zmm0
9281 ; AVX512BW-FAST-NEXT: vpshufb %xmm2, %xmm19, %xmm2
9282 ; AVX512BW-FAST-NEXT: vpshufb %xmm31, %xmm18, %xmm3
9283 ; AVX512BW-FAST-NEXT: vpor %xmm2, %xmm3, %xmm2
9284 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm18[0],xmm19[0],xmm18[1],xmm19[1],xmm18[2],xmm19[2],xmm18[3],xmm19[3],xmm18[4],xmm19[4],xmm18[5],xmm19[5],xmm18[6],xmm19[6],xmm18[7],xmm19[7]
9285 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5]
9286 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm2, %zmm3, %zmm2
9287 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm3 = zmm0[0,1,0,1,4,5,4,5]
9288 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm0 = zmm2[0,1,0,1,4,5,4,5]
9289 ; AVX512BW-FAST-NEXT: movabsq $871499720017774092, %rax # imm = 0xC183060C183060C
9290 ; AVX512BW-FAST-NEXT: kmovq %rax, %k2
9291 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm3, %zmm0 {%k2}
9292 ; AVX512BW-FAST-NEXT: vpshufb %xmm1, %xmm25, %xmm1
9293 ; AVX512BW-FAST-NEXT: vpshufb %xmm29, %xmm10, %xmm2
9294 ; AVX512BW-FAST-NEXT: vpor %xmm1, %xmm2, %xmm1
9295 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm10[0],xmm25[0],xmm10[1],xmm25[1],xmm10[2],xmm25[2],xmm10[3],xmm25[3],xmm10[4],xmm25[4],xmm10[5],xmm25[5],xmm10[6],xmm25[6],xmm10[7],xmm25[7]
9296 ; AVX512BW-FAST-NEXT: vpshufb %xmm8, %xmm2, %xmm2
9297 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm1, %zmm2, %zmm1
9298 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm1 = zmm1[0,1,0,1,4,5,4,5]
9299 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm26, %zmm2 # 32-byte Folded Reload
9300 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = [1,1,0,0,1,1,0,0,4,5,1,1,1,1,0,0,18,18,18,20,18,18,18,20,19,19,19,19,18,18,18,20]
9301 ; AVX512BW-FAST-NEXT: vpermw %zmm2, %zmm3, %zmm3
9302 ; AVX512BW-FAST-NEXT: movabsq $4647998506761461824, %rax # imm = 0x4081020408102040
9303 ; AVX512BW-FAST-NEXT: kmovq %rax, %k2
9304 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm3, %zmm1 {%k2}
9305 ; AVX512BW-FAST-NEXT: movabsq $8133997386832558192, %rax # imm = 0x70E1C3870E1C3870
9306 ; AVX512BW-FAST-NEXT: kmovq %rax, %k2
9307 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm1, %zmm0 {%k2}
9308 ; AVX512BW-FAST-NEXT: vmovdqu64 (%rsp), %zmm1 # 64-byte Reload
9309 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm12, %zmm1, %zmm1
9310 ; AVX512BW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9311 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm11, %zmm3, %zmm3
9312 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm1 = zmm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21],zero,zmm1[19],zero,zmm1[21,20,21,22],zero,zmm1[20],zero,zmm1[22,23,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,57],zero,zmm1[55],zero,zero,zero,zero,zmm1[58],zero,zmm1[56],zero,zero,zero,zero,zmm1[59],zero
9313 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm1 = zmm1[2,3,2,3,6,7,6,7]
9314 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm3 = zmm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm3[21],zero,zmm3[19],zero,zero,zero,zero,zmm3[22],zero,zmm3[20],zero,zero,zmm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm3[55],zero,zero,zero,zero,zmm3[58],zero,zmm3[56],zero,zero,zero,zero,zmm3[59],zero,zmm3[57]
9315 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm3 = zmm3[2,3,2,3,6,7,6,7]
9316 ; AVX512BW-FAST-NEXT: vporq %zmm1, %zmm3, %zmm1
9317 ; AVX512BW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9318 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm20, %zmm3, %zmm3
9319 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm23, %zmm4 # 32-byte Folded Reload
9320 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm3 = zmm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,zmm3[18,19,20,21],zero,zmm3[19],zero,zmm3[25,26,27,22],zero,zmm3[20],zero,zmm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,56,57],zero,zmm3[55],zero,zmm3[53,54,55,58],zero,zmm3[56],zero,zmm3[60,61,58,59]
9321 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm3 = zmm3[2,3,2,3,6,7,6,7]
9322 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm4 = zmm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm4[18],zero,zero,zero,zero,zmm4[21],zero,zmm4[19],zero,zero,zero,zero,zmm4[22],zero,zmm4[20,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zmm4[57],zero,zmm4[55],zero,zero,zero,zero,zmm4[58],zero,zmm4[56],zero,zero,zero,zero
9323 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm4 = zmm4[2,3,2,3,6,7,6,7]
9324 ; AVX512BW-FAST-NEXT: vporq %zmm3, %zmm4, %zmm3
9325 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm1, %zmm3 {%k1}
9326 ; AVX512BW-FAST-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
9327 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm1, %zmm1
9328 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm15, %zmm21, %zmm4
9329 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm1 = zmm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20],zero,zmm1[18],zero,zmm1[20,21,20,21],zero,zmm1[19],zero,zmm1[19,20,21,22],zero,zmm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,56,57,56,57],zero,zmm1[55],zero,zmm1[55,56,57,58],zero,zmm1[56],zero,zmm1[62,63]
9330 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm1 = zmm1[2,3,2,3,6,7,6,7]
9331 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm4 = zmm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm4[20],zero,zmm4[18],zero,zero,zero,zero,zmm4[21],zero,zmm4[19],zero,zero,zero,zero,zmm4[22,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm4[57],zero,zmm4[55],zero,zero,zero,zero,zmm4[58],zero,zmm4[56],zero,zero
9332 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm4 = zmm4[2,3,2,3,6,7,6,7]
9333 ; AVX512BW-FAST-NEXT: vporq %zmm1, %zmm4, %zmm1
9334 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm4 = [10,9,9,10,10,9,9,10,9,10,14,15,10,9,9,10,27,29,28,27,28,29,29,28,27,29,28,27,28,29,29,28]
9335 ; AVX512BW-FAST-NEXT: vpermw %zmm2, %zmm4, %zmm2
9336 ; AVX512BW-FAST-NEXT: movabsq $1161999626690365456, %rax # imm = 0x1020408102040810
9337 ; AVX512BW-FAST-NEXT: kmovq %rax, %k1
9338 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm2, %zmm1 {%k1}
9339 ; AVX512BW-FAST-NEXT: movabsq $2033499346708139548, %rax # imm = 0x1C3870E1C3870E1C
9340 ; AVX512BW-FAST-NEXT: kmovq %rax, %k1
9341 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm1, %zmm3 {%k1}
9342 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
9343 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm3, 128(%rax)
9344 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm17, 320(%rax)
9345 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm0, (%rax)
9346 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm24, 256(%rax)
9347 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm27, 192(%rax)
9348 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm6, 64(%rax)
9349 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm22, 384(%rax)
9350 ; AVX512BW-FAST-NEXT: addq $200, %rsp
9351 ; AVX512BW-FAST-NEXT: vzeroupper
9352 ; AVX512BW-FAST-NEXT: retq
9354 ; AVX512DQBW-SLOW-LABEL: store_i8_stride7_vf64:
9355 ; AVX512DQBW-SLOW: # %bb.0:
9356 ; AVX512DQBW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
9357 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rax), %ymm15
9358 ; AVX512DQBW-SLOW-NEXT: vmovdqa 32(%rax), %ymm2
9359 ; AVX512DQBW-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = [12,13,2,3,12,13,0,1,14,15,2,3,0,1,14,15,28,29,18,19,28,29,16,17,30,31,18,19,16,17,30,31]
9360 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm9, %ymm15, %ymm0
9361 ; AVX512DQBW-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [4,5,4,5,5,6,5,6,4,5,4,5,5,6,5,6]
9362 ; AVX512DQBW-SLOW-NEXT: # ymm1 = mem[0,1,0,1]
9363 ; AVX512DQBW-SLOW-NEXT: vpermw %ymm15, %ymm1, %ymm1
9364 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm3
9365 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%r9), %ymm10
9366 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm20 = [13,128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128]
9367 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm20, %ymm10, %ymm0
9368 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%r8), %ymm11
9369 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm24 = [128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128]
9370 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm24, %ymm11, %ymm1
9371 ; AVX512DQBW-SLOW-NEXT: vpor %ymm0, %ymm1, %ymm5
9372 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%r9), %xmm0
9373 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%r8), %xmm1
9374 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
9375 ; AVX512DQBW-SLOW-NEXT: vmovdqa {{.*#+}} xmm4 = <u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10>
9376 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm4, %xmm6, %xmm6
9377 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
9378 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm6, %zmm21
9379 ; AVX512DQBW-SLOW-NEXT: movabsq $2323999253380730912, %r10 # imm = 0x2040810204081020
9380 ; AVX512DQBW-SLOW-NEXT: kmovq %r10, %k1
9381 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm3, %zmm21 {%k1}
9382 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdx), %ymm16
9383 ; AVX512DQBW-SLOW-NEXT: vmovdqa {{.*#+}} ymm14 = [0,1,0,1,14,128,14,15,0,1,14,15,128,13,14,15,16,17,16,128,30,31,30,31,16,17,128,31,28,29,30,31]
9384 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm14, %ymm16, %ymm3
9385 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rcx), %ymm17
9386 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm23 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
9387 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm23, %ymm17, %ymm5
9388 ; AVX512DQBW-SLOW-NEXT: vpor %ymm3, %ymm5, %ymm3
9389 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rdx), %xmm5
9390 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rcx), %xmm6
9391 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm6[8],xmm5[8],xmm6[9],xmm5[9],xmm6[10],xmm5[10],xmm6[11],xmm5[11],xmm6[12],xmm5[12],xmm6[13],xmm5[13],xmm6[14],xmm5[14],xmm6[15],xmm5[15]
9392 ; AVX512DQBW-SLOW-NEXT: vmovdqa {{.*#+}} xmm12 = <6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7>
9393 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm12, %xmm7, %xmm7
9394 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
9395 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm7, %zmm22
9396 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdi), %ymm18
9397 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm25 = [0,1,14,128,12,13,0,1,14,15,128,3,12,13,2,3,16,128,30,31,28,29,16,17,128,31,18,19,28,29,18,128]
9398 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm25, %ymm18, %ymm3
9399 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rsi), %ymm19
9400 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm26 = [128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128,128,18]
9401 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm26, %ymm19, %ymm7
9402 ; AVX512DQBW-SLOW-NEXT: vpor %ymm3, %ymm7, %ymm3
9403 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rdi), %xmm7
9404 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rsi), %xmm8
9405 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm27 = xmm8[8],xmm7[8],xmm8[9],xmm7[9],xmm8[10],xmm7[10],xmm8[11],xmm7[11],xmm8[12],xmm7[12],xmm8[13],xmm7[13],xmm8[14],xmm7[14],xmm8[15],xmm7[15]
9406 ; AVX512DQBW-SLOW-NEXT: vmovdqa {{.*#+}} xmm13 = <2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u>
9407 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm13, %xmm27, %xmm27
9408 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm27 = ymm27[0,1,0,1]
9409 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm27, %zmm3
9410 ; AVX512DQBW-SLOW-NEXT: movabsq $435749860008887046, %r10 # imm = 0x60C183060C18306
9411 ; AVX512DQBW-SLOW-NEXT: kmovq %r10, %k1
9412 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm22, %zmm3 {%k1}
9413 ; AVX512DQBW-SLOW-NEXT: movabsq $4066998693416279096, %r10 # imm = 0x3870E1C3870E1C38
9414 ; AVX512DQBW-SLOW-NEXT: kmovq %r10, %k1
9415 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm21, %zmm3 {%k1}
9416 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm21 = [10,9,9,10,10,9,9,10,9,10,14,15,10,9,9,10]
9417 ; AVX512DQBW-SLOW-NEXT: vpermw %ymm2, %ymm21, %ymm21
9418 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm9, %ymm2, %ymm9
9419 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm21, %zmm9, %zmm9
9420 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 32(%r9), %ymm21
9421 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm20, %ymm21, %ymm20
9422 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 32(%r8), %ymm22
9423 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm24, %ymm22, %ymm24
9424 ; AVX512DQBW-SLOW-NEXT: vporq %ymm20, %ymm24, %ymm20
9425 ; AVX512DQBW-SLOW-NEXT: vpshufb {{.*#+}} ymm24 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm22[20],zero,ymm22[18],zero,ymm22[20,21,20,21],zero,ymm22[19],zero,ymm22[19,20,21,22],zero
9426 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm24 = ymm24[2,3,2,3]
9427 ; AVX512DQBW-SLOW-NEXT: vpshufb {{.*#+}} ymm27 = ymm21[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm21[20],zero,ymm21[18],zero,zero,zero,zero,ymm21[21],zero,ymm21[19],zero,zero,zero,zero,ymm21[22]
9428 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm27 = ymm27[2,3,2,3]
9429 ; AVX512DQBW-SLOW-NEXT: vporq %ymm24, %ymm27, %ymm24
9430 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm24, %zmm20, %zmm24
9431 ; AVX512DQBW-SLOW-NEXT: movabsq $145249953336295682, %r10 # imm = 0x204081020408102
9432 ; AVX512DQBW-SLOW-NEXT: kmovq %r10, %k1
9433 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm9, %zmm24 {%k1}
9434 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 32(%rsi), %ymm27
9435 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 32(%rdi), %ymm28
9436 ; AVX512DQBW-SLOW-NEXT: vpshuflw {{.*#+}} ymm9 = ymm28[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
9437 ; AVX512DQBW-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm9[0,0,1,1,4,4,5,5]
9438 ; AVX512DQBW-SLOW-NEXT: vpbroadcastd {{.*#+}} ymm20 = [5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6]
9439 ; AVX512DQBW-SLOW-NEXT: movl $676341840, %r10d # imm = 0x28502850
9440 ; AVX512DQBW-SLOW-NEXT: kmovd %r10d, %k1
9441 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm20, %ymm27, %ymm9 {%k1}
9442 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
9443 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm25, %ymm28, %ymm25
9444 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm26, %ymm27, %ymm26
9445 ; AVX512DQBW-SLOW-NEXT: vporq %ymm25, %ymm26, %ymm25
9446 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm25, %zmm9
9447 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 32(%rdx), %ymm26
9448 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm14, %ymm26, %ymm14
9449 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 32(%rcx), %ymm29
9450 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm23, %ymm29, %ymm23
9451 ; AVX512DQBW-SLOW-NEXT: vporq %ymm14, %ymm23, %ymm14
9452 ; AVX512DQBW-SLOW-NEXT: vpshufb {{.*#+}} ymm23 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm26[18],zero,ymm26[18,19,20,21],zero,ymm26[19],zero,ymm26[25,26,27,22],zero,ymm26[20],zero
9453 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm23 = ymm23[2,3,2,3]
9454 ; AVX512DQBW-SLOW-NEXT: vpshufb {{.*#+}} ymm25 = ymm29[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm29[18],zero,zero,zero,zero,ymm29[21],zero,ymm29[19],zero,zero,zero,zero,ymm29[22],zero,ymm29[20]
9455 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm25 = ymm25[2,3,2,3]
9456 ; AVX512DQBW-SLOW-NEXT: vporq %ymm23, %ymm25, %ymm23
9457 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm23, %zmm14, %zmm14
9458 ; AVX512DQBW-SLOW-NEXT: movabsq $3485998880071096368, %r10 # imm = 0x3060C183060C1830
9459 ; AVX512DQBW-SLOW-NEXT: kmovq %r10, %k2
9460 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm14, %zmm9 {%k2}
9461 ; AVX512DQBW-SLOW-NEXT: movabsq $-4357498600088870461, %r10 # imm = 0xC3870E1C3870E1C3
9462 ; AVX512DQBW-SLOW-NEXT: kmovq %r10, %k3
9463 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm24, %zmm9 {%k3}
9464 ; AVX512DQBW-SLOW-NEXT: vpshufhw {{.*#+}} ymm14 = ymm28[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
9465 ; AVX512DQBW-SLOW-NEXT: vpshufd {{.*#+}} ymm14 = ymm14[2,2,3,3,6,6,7,7]
9466 ; AVX512DQBW-SLOW-NEXT: vpbroadcastd {{.*#+}} ymm24 = [13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14]
9467 ; AVX512DQBW-SLOW-NEXT: movl $338170920, %r10d # imm = 0x14281428
9468 ; AVX512DQBW-SLOW-NEXT: kmovd %r10d, %k4
9469 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm24, %ymm27, %ymm14 {%k4}
9470 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,3,2,3]
9471 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x2 {{.*#+}} ymm23 = [25,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128,25,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128]
9472 ; AVX512DQBW-SLOW-NEXT: # ymm23 = mem[0,1,0,1]
9473 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm23, %ymm27, %ymm25
9474 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm25 = ymm25[2,3,2,3]
9475 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm27 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128,25]
9476 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm27, %ymm28, %ymm28
9477 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm28 = ymm28[2,3,2,3]
9478 ; AVX512DQBW-SLOW-NEXT: vporq %ymm25, %ymm28, %ymm25
9479 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm14, %zmm25, %zmm14
9480 ; AVX512DQBW-SLOW-NEXT: vpshufb {{.*#+}} ymm25 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm26[24,25],zero,ymm26[23],zero,ymm26[21,22,23,26],zero,ymm26[24],zero,ymm26[28,29,26,27]
9481 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm25 = ymm25[2,3,2,3]
9482 ; AVX512DQBW-SLOW-NEXT: vpshufb {{.*#+}} ymm28 = ymm29[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm29[25],zero,ymm29[23],zero,zero,zero,zero,ymm29[26],zero,ymm29[24],zero,zero,zero,zero
9483 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm28 = ymm28[2,3,2,3]
9484 ; AVX512DQBW-SLOW-NEXT: vporq %ymm25, %ymm28, %ymm28
9485 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm25 = [11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12]
9486 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm25, %ymm29, %ymm29
9487 ; AVX512DQBW-SLOW-NEXT: vpshufhw {{.*#+}} ymm26 = ymm26[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
9488 ; AVX512DQBW-SLOW-NEXT: vpshufd {{.*#+}} ymm26 = ymm26[0,2,3,3,4,6,7,7]
9489 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %ymm26, %ymm29 {%k1}
9490 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdx), %zmm26
9491 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm29 = ymm29[2,3,2,3]
9492 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm29, %zmm28, %zmm28
9493 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r8), %zmm29
9494 ; AVX512DQBW-SLOW-NEXT: movabsq $1742999440035548184, %r10 # imm = 0x183060C183060C18
9495 ; AVX512DQBW-SLOW-NEXT: kmovq %r10, %k3
9496 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm28, %zmm14 {%k3}
9497 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r9), %zmm28
9498 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm22 = zmm22[0,1,2,3],zmm28[4,5,6,7]
9499 ; AVX512DQBW-SLOW-NEXT: vpshufb {{.*#+}} zmm22 = zmm22[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,zmm22[23],zero,zmm22[23,24,25,26],zero,zmm22[24],zero,zmm22[30,31,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,61],zero,zmm22[59],zero,zero,zero,zero,zmm22[62],zero,zmm22[60],zero,zero,zero,zero,zmm22[63],zero
9500 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} zmm22 = zmm22[2,3,2,3,6,7,6,7]
9501 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm21 = zmm21[0,1,2,3],zmm29[4,5,6,7]
9502 ; AVX512DQBW-SLOW-NEXT: vpshufb {{.*#+}} zmm21 = zmm21[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm21[25],zero,zmm21[23],zero,zero,zero,zero,zmm21[26],zero,zmm21[24],zero,zero,zmm21[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm21[59],zero,zero,zero,zero,zmm21[62],zero,zmm21[60],zero,zero,zero,zero,zmm21[63],zero,zmm21[61]
9503 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} zmm21 = zmm21[2,3,2,3,6,7,6,7]
9504 ; AVX512DQBW-SLOW-NEXT: vporq %zmm22, %zmm21, %zmm22
9505 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rax), %zmm21
9506 ; AVX512DQBW-SLOW-NEXT: movabsq $6971997760142192736, %rax # imm = 0x60C183060C183060
9507 ; AVX512DQBW-SLOW-NEXT: kmovq %rax, %k3
9508 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm22, %zmm14 {%k3}
9509 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm22 = [11,13,12,11,12,13,13,12,11,13,12,11,12,13,13,12,62,61,62,63,63,62,62,63,62,61,62,63,63,62,62,63]
9510 ; AVX512DQBW-SLOW-NEXT: vpermi2w %zmm21, %zmm2, %zmm22
9511 ; AVX512DQBW-SLOW-NEXT: movabsq $-9150747060186627967, %rax # imm = 0x8102040810204081
9512 ; AVX512DQBW-SLOW-NEXT: kmovq %rax, %k5
9513 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm22, %zmm14 {%k5}
9514 ; AVX512DQBW-SLOW-NEXT: vpshuflw {{.*#+}} ymm22 = ymm18[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
9515 ; AVX512DQBW-SLOW-NEXT: vpshufd {{.*#+}} ymm30 = ymm22[0,0,1,1,4,4,5,5]
9516 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm20, %ymm19, %ymm30 {%k1}
9517 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rcx), %zmm20
9518 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm23, %ymm19, %ymm22
9519 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm22 = ymm22[2,3,2,3]
9520 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm27, %ymm18, %ymm23
9521 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm23 = ymm23[2,3,2,3]
9522 ; AVX512DQBW-SLOW-NEXT: vporq %ymm22, %ymm23, %ymm23
9523 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 32(%r9), %xmm22
9524 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm27 = ymm30[2,3,2,3]
9525 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm23, %zmm27, %zmm27
9526 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 32(%r8), %xmm23
9527 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm16, %zmm26, %zmm26
9528 ; AVX512DQBW-SLOW-NEXT: vpshufb {{.*#+}} zmm26 = zmm26[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,zmm26[18,19,20,21],zero,zmm26[19],zero,zmm26[25,26,27,22],zero,zmm26[20],zero,zmm26[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,56,57],zero,zmm26[55],zero,zmm26[53,54,55,58],zero,zmm26[56],zero,zmm26[60,61,58,59]
9529 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} zmm26 = zmm26[2,3,2,3,6,7,6,7]
9530 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm17, %zmm20, %zmm20
9531 ; AVX512DQBW-SLOW-NEXT: vpshufb {{.*#+}} zmm20 = zmm20[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm20[18],zero,zero,zero,zero,zmm20[21],zero,zmm20[19],zero,zero,zero,zero,zmm20[22],zero,zmm20[20,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zmm20[57],zero,zmm20[55],zero,zero,zero,zero,zmm20[58],zero,zmm20[56],zero,zero,zero,zero
9532 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} zmm20 = zmm20[2,3,2,3,6,7,6,7]
9533 ; AVX512DQBW-SLOW-NEXT: vporq %zmm26, %zmm20, %zmm20
9534 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 32(%rdx), %xmm26
9535 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm27, %zmm20 {%k3}
9536 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 32(%rcx), %xmm27
9537 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm11, %zmm29, %zmm29
9538 ; AVX512DQBW-SLOW-NEXT: vpshufb {{.*#+}} zmm29 = zmm29[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20],zero,zmm29[18],zero,zmm29[20,21,20,21],zero,zmm29[19],zero,zmm29[19,20,21,22],zero,zmm29[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,56,57,56,57],zero,zmm29[55],zero,zmm29[55,56,57,58],zero,zmm29[56],zero,zmm29[62,63]
9539 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} zmm29 = zmm29[2,3,2,3,6,7,6,7]
9540 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm28, %zmm28
9541 ; AVX512DQBW-SLOW-NEXT: vpshufb {{.*#+}} zmm28 = zmm28[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm28[20],zero,zmm28[18],zero,zero,zero,zero,zmm28[21],zero,zmm28[19],zero,zero,zero,zero,zmm28[22,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm28[57],zero,zmm28[55],zero,zero,zero,zero,zmm28[58],zero,zmm28[56],zero,zero
9542 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} zmm28 = zmm28[2,3,2,3,6,7,6,7]
9543 ; AVX512DQBW-SLOW-NEXT: vporq %zmm29, %zmm28, %zmm29
9544 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm21, %zmm15
9545 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm28 = [10,9,9,10,10,9,9,10,9,10,14,15,10,9,9,10,27,29,28,27,28,29,29,28,27,29,28,27,28,29,29,28]
9546 ; AVX512DQBW-SLOW-NEXT: vpermw %zmm15, %zmm28, %zmm28
9547 ; AVX512DQBW-SLOW-NEXT: movabsq $1161999626690365456, %rax # imm = 0x1020408102040810
9548 ; AVX512DQBW-SLOW-NEXT: kmovq %rax, %k5
9549 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm28, %zmm29 {%k5}
9550 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 32(%rdi), %xmm28
9551 ; AVX512DQBW-SLOW-NEXT: movabsq $2033499346708139548, %rax # imm = 0x1C3870E1C3870E1C
9552 ; AVX512DQBW-SLOW-NEXT: kmovq %rax, %k5
9553 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm29, %zmm20 {%k5}
9554 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 32(%rsi), %xmm29
9555 ; AVX512DQBW-SLOW-NEXT: vpshufhw {{.*#+}} ymm18 = ymm18[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
9556 ; AVX512DQBW-SLOW-NEXT: vpshufd {{.*#+}} ymm30 = ymm18[2,2,3,3,6,6,7,7]
9557 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm24, %ymm19, %ymm30 {%k4}
9558 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm19 = xmm28[0],xmm29[0],xmm28[1],xmm29[1],xmm28[2],xmm29[2],xmm28[3],xmm29[3],xmm28[4],xmm29[4],xmm28[5],xmm29[5],xmm28[6],xmm29[6],xmm28[7],xmm29[7]
9559 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} xmm18 = <0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5>
9560 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm18, %xmm19, %xmm19
9561 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm19 = zmm30[2,3,2,3],zmm19[0,1,0,1]
9562 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm24 = xmm26[0],xmm27[0],xmm26[1],xmm27[1],xmm26[2],xmm27[2],xmm26[3],xmm27[3],xmm26[4],xmm27[4],xmm26[5],xmm27[5],xmm26[6],xmm27[6],xmm26[7],xmm27[7]
9563 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm25, %ymm17, %ymm25
9564 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} xmm17 = <4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9>
9565 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm17, %xmm24, %xmm24
9566 ; AVX512DQBW-SLOW-NEXT: vpshufhw {{.*#+}} ymm16 = ymm16[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
9567 ; AVX512DQBW-SLOW-NEXT: vpshufd {{.*#+}} ymm16 = ymm16[0,2,3,3,4,6,7,7]
9568 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %ymm16, %ymm25 {%k1}
9569 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm16 = zmm25[2,3,2,3],zmm24[0,1,0,1]
9570 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm19, %zmm16 {%k2}
9571 ; AVX512DQBW-SLOW-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29],zero,ymm10[27],zero,zero,zero,zero,ymm10[30],zero,ymm10[28],zero,zero,zero,zero,ymm10[31],zero
9572 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
9573 ; AVX512DQBW-SLOW-NEXT: vpshufb {{.*#+}} ymm11 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm11[27],zero,zero,zero,zero,ymm11[30],zero,ymm11[28],zero,zero,zero,zero,ymm11[31],zero,ymm11[29]
9574 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
9575 ; AVX512DQBW-SLOW-NEXT: vpor %ymm10, %ymm11, %ymm11
9576 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm19 = xmm23[0],xmm22[0],xmm23[1],xmm22[1],xmm23[2],xmm22[2],xmm23[3],xmm22[3],xmm23[4],xmm22[4],xmm23[5],xmm22[5],xmm23[6],xmm22[6],xmm23[7],xmm22[7]
9577 ; AVX512DQBW-SLOW-NEXT: vmovdqa {{.*#+}} xmm10 = <u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u>
9578 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm10, %xmm19, %xmm19
9579 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm19 = ymm19[0,1,0,1]
9580 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm19, %zmm11, %zmm11
9581 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm19 = [14,13,14,15,15,14,14,15,14,13,14,15,15,14,14,15,17,17,16,16,17,17,16,16,20,21,17,17,17,17,16,16]
9582 ; AVX512DQBW-SLOW-NEXT: vpermw %zmm21, %zmm19, %zmm19
9583 ; AVX512DQBW-SLOW-NEXT: movabsq $580999813345182728, %rax # imm = 0x810204081020408
9584 ; AVX512DQBW-SLOW-NEXT: kmovq %rax, %k1
9585 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm19, %zmm11 {%k1}
9586 ; AVX512DQBW-SLOW-NEXT: movabsq $1016749673354069774, %rax # imm = 0xE1C3870E1C3870E
9587 ; AVX512DQBW-SLOW-NEXT: kmovq %rax, %k1
9588 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm11, %zmm16 {%k1}
9589 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} xmm19 = <u,u,u,128,7,128,5,u,u,u,128,8,128,6,u,u>
9590 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm19, %xmm27, %xmm11
9591 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} xmm24 = <u,u,u,7,128,5,128,u,u,u,8,128,6,128,u,u>
9592 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm24, %xmm26, %xmm25
9593 ; AVX512DQBW-SLOW-NEXT: vporq %xmm11, %xmm25, %xmm11
9594 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm25 = xmm27[8],xmm26[8],xmm27[9],xmm26[9],xmm27[10],xmm26[10],xmm27[11],xmm26[11],xmm27[12],xmm26[12],xmm27[13],xmm26[13],xmm27[14],xmm26[14],xmm27[15],xmm26[15]
9595 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm12, %xmm25, %xmm12
9596 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, %xmm12, %zmm11, %zmm11
9597 ; AVX512DQBW-SLOW-NEXT: vmovdqa {{.*#+}} xmm12 = <u,128,7,128,5,u,u,u,128,8,128,6,u,u,u,128>
9598 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm12, %xmm29, %xmm25
9599 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} xmm26 = <u,7,128,5,128,u,u,u,8,128,6,128,u,u,u,9>
9600 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm26, %xmm28, %xmm27
9601 ; AVX512DQBW-SLOW-NEXT: vporq %xmm25, %xmm27, %xmm25
9602 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm27 = xmm29[8],xmm28[8],xmm29[9],xmm28[9],xmm29[10],xmm28[10],xmm29[11],xmm28[11],xmm29[12],xmm28[12],xmm29[13],xmm28[13],xmm29[14],xmm28[14],xmm29[15],xmm28[15]
9603 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} zmm28 = zmm11[0,1,0,1,4,5,4,5]
9604 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm13, %xmm27, %xmm11
9605 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, %xmm11, %zmm25, %zmm11
9606 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} zmm11 = zmm11[0,1,0,1,4,5,4,5]
9607 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm28, %zmm11 {%k3}
9608 ; AVX512DQBW-SLOW-NEXT: vmovdqa {{.*#+}} xmm13 = <128,4,u,u,u,128,7,128,5,u,u,u,128,8,128,6>
9609 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm13, %xmm22, %xmm25
9610 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm27 = [2,2,2,4,2,2,2,4,3,3,3,3,2,2,2,4,52,53,52,53,53,54,53,54,52,53,52,53,53,54,53,54]
9611 ; AVX512DQBW-SLOW-NEXT: vpermi2w %zmm21, %zmm2, %zmm27
9612 ; AVX512DQBW-SLOW-NEXT: vmovdqa {{.*#+}} xmm2 = <4,128,u,u,u,7,128,5,128,u,u,u,8,128,6,128>
9613 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm2, %xmm23, %xmm21
9614 ; AVX512DQBW-SLOW-NEXT: vporq %xmm25, %xmm21, %xmm21
9615 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm22 = xmm23[8],xmm22[8],xmm23[9],xmm22[9],xmm23[10],xmm22[10],xmm23[11],xmm22[11],xmm23[12],xmm22[12],xmm23[13],xmm22[13],xmm23[14],xmm22[14],xmm23[15],xmm22[15]
9616 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm4, %xmm22, %xmm4
9617 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, %xmm4, %zmm21, %zmm4
9618 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} zmm4 = zmm4[0,1,0,1,4,5,4,5]
9619 ; AVX512DQBW-SLOW-NEXT: movabsq $290499906672591364, %rax # imm = 0x408102040810204
9620 ; AVX512DQBW-SLOW-NEXT: kmovq %rax, %k1
9621 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm27, %zmm4 {%k1}
9622 ; AVX512DQBW-SLOW-NEXT: movabsq $-8714997200177740921, %rax # imm = 0x870E1C3870E1C387
9623 ; AVX512DQBW-SLOW-NEXT: kmovq %rax, %k1
9624 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm4, %zmm11 {%k1}
9625 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm19, %xmm6, %xmm4
9626 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm24, %xmm5, %xmm19
9627 ; AVX512DQBW-SLOW-NEXT: vporq %xmm4, %xmm19, %xmm4
9628 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3],xmm5[4],xmm6[4],xmm5[5],xmm6[5],xmm5[6],xmm6[6],xmm5[7],xmm6[7]
9629 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm17, %xmm5, %xmm5
9630 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, %xmm4, %zmm5, %zmm4
9631 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm12, %xmm8, %xmm5
9632 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm26, %xmm7, %xmm6
9633 ; AVX512DQBW-SLOW-NEXT: vpor %xmm5, %xmm6, %xmm5
9634 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm6 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3],xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
9635 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm18, %xmm6, %xmm6
9636 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, %xmm5, %zmm6, %zmm5
9637 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} zmm4 = zmm4[0,1,0,1,4,5,4,5]
9638 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} zmm5 = zmm5[0,1,0,1,4,5,4,5]
9639 ; AVX512DQBW-SLOW-NEXT: movabsq $871499720017774092, %rax # imm = 0xC183060C183060C
9640 ; AVX512DQBW-SLOW-NEXT: kmovq %rax, %k1
9641 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm4, %zmm5 {%k1}
9642 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm13, %xmm0, %xmm4
9643 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm2, %xmm1, %xmm2
9644 ; AVX512DQBW-SLOW-NEXT: vpor %xmm4, %xmm2, %xmm2
9645 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
9646 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm10, %xmm0, %xmm0
9647 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, %xmm2, %zmm0, %zmm0
9648 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [1,1,0,0,1,1,0,0,4,5,1,1,1,1,0,0,18,18,18,20,18,18,18,20,19,19,19,19,18,18,18,20]
9649 ; AVX512DQBW-SLOW-NEXT: vpermw %zmm15, %zmm1, %zmm1
9650 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} zmm0 = zmm0[0,1,0,1,4,5,4,5]
9651 ; AVX512DQBW-SLOW-NEXT: movabsq $4647998506761461824, %rax # imm = 0x4081020408102040
9652 ; AVX512DQBW-SLOW-NEXT: kmovq %rax, %k1
9653 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm1, %zmm0 {%k1}
9654 ; AVX512DQBW-SLOW-NEXT: movabsq $8133997386832558192, %rax # imm = 0x70E1C3870E1C3870
9655 ; AVX512DQBW-SLOW-NEXT: kmovq %rax, %k1
9656 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm0, %zmm5 {%k1}
9657 ; AVX512DQBW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
9658 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm5, (%rax)
9659 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, 320(%rax)
9660 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm11, 256(%rax)
9661 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm16, 192(%rax)
9662 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm20, 128(%rax)
9663 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm3, 64(%rax)
9664 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm14, 384(%rax)
9665 ; AVX512DQBW-SLOW-NEXT: vzeroupper
9666 ; AVX512DQBW-SLOW-NEXT: retq
9667 %in.vec0 = load <64 x i8>, ptr %in.vecptr0, align 64
9668 %in.vec1 = load <64 x i8>, ptr %in.vecptr1, align 64
9669 %in.vec2 = load <64 x i8>, ptr %in.vecptr2, align 64
9670 %in.vec3 = load <64 x i8>, ptr %in.vecptr3, align 64
9671 %in.vec4 = load <64 x i8>, ptr %in.vecptr4, align 64
9672 %in.vec5 = load <64 x i8>, ptr %in.vecptr5, align 64
9673 %in.vec6 = load <64 x i8>, ptr %in.vecptr6, align 64
9674 %1 = shufflevector <64 x i8> %in.vec0, <64 x i8> %in.vec1, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
9675 %2 = shufflevector <64 x i8> %in.vec2, <64 x i8> %in.vec3, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
9676 %3 = shufflevector <64 x i8> %in.vec4, <64 x i8> %in.vec5, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
9677 %4 = shufflevector <128 x i8> %1, <128 x i8> %2, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
9678 %5 = shufflevector <64 x i8> %in.vec6, <64 x i8> poison, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
9679 %6 = shufflevector <128 x i8> %3, <128 x i8> %5, <192 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191>
9680 %7 = shufflevector <192 x i8> %6, <192 x i8> poison, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
9681 %8 = shufflevector <256 x i8> %4, <256 x i8> %7, <448 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255, i32 256, i32 257, i32 258, i32 259, i32 260, i32 261, i32 262, i32 263, i32 264, i32 265, i32 266, i32 267, i32 268, i32 269, i32 270, i32 271, i32 272, i32 273, i32 274, i32 275, i32 276, i32 277, i32 278, i32 279, i32 280, i32 281, i32 282, i32 283, i32 284, i32 285, i32 286, i32 287, i32 288, i32 289, i32 290, i32 291, i32 292, i32 293, i32 294, i32 295, i32 296, i32 297, i32 298, i32 299, i32 300, i32 301, i32 302, i32 303, i32 304, i32 305, i32 306, i32 307, i32 308, i32 309, i32 310, i32 311, i32 312, i32 313, i32 314, i32 315, i32 316, i32 317, i32 318, i32 319, i32 320, i32 321, i32 322, i32 323, i32 324, i32 325, i32 326, i32 327, i32 328, i32 329, i32 330, i32 331, i32 332, i32 333, i32 334, i32 335, i32 336, i32 337, i32 338, i32 339, i32 340, i32 341, i32 342, i32 343, i32 344, i32 345, i32 346, i32 347, i32 348, i32 349, i32 350, i32 351, i32 352, i32 353, i32 354, i32 355, i32 356, i32 357, i32 358, i32 359, i32 360, i32 361, i32 362, i32 363, i32 364, i32 365, i32 366, i32 367, i32 368, i32 369, i32 370, i32 371, i32 372, i32 373, i32 374, i32 375, i32 376, i32 377, i32 378, i32 379, i32 380, i32 381, i32 382, i32 383, i32 384, i32 385, i32 386, i32 387, i32 388, i32 389, i32 390, i32 391, i32 392, i32 393, i32 394, i32 395, i32 396, i32 397, i32 398, i32 399, i32 400, i32 401, i32 402, i32 403, i32 404, i32 405, i32 406, i32 407, i32 408, i32 409, i32 410, i32 411, i32 412, i32 413, i32 414, i32 415, i32 416, i32 417, i32 418, i32 419, i32 420, i32 421, i32 422, i32 423, i32 424, i32 425, i32 426, i32 427, i32 428, i32 429, i32 430, i32 431, i32 432, i32 433, i32 434, i32 435, i32 436, i32 437, i32 438, i32 439, i32 440, i32 441, i32 442, i32 443, i32 444, i32 445, i32 446, i32 447>
9682 %interleaved.vec = shufflevector <448 x i8> %8, <448 x i8> poison, <448 x i32> <i32 0, i32 64, i32 128, i32 192, i32 256, i32 320, i32 384, i32 1, i32 65, i32 129, i32 193, i32 257, i32 321, i32 385, i32 2, i32 66, i32 130, i32 194, i32 258, i32 322, i32 386, i32 3, i32 67, i32 131, i32 195, i32 259, i32 323, i32 387, i32 4, i32 68, i32 132, i32 196, i32 260, i32 324, i32 388, i32 5, i32 69, i32 133, i32 197, i32 261, i32 325, i32 389, i32 6, i32 70, i32 134, i32 198, i32 262, i32 326, i32 390, i32 7, i32 71, i32 135, i32 199, i32 263, i32 327, i32 391, i32 8, i32 72, i32 136, i32 200, i32 264, i32 328, i32 392, i32 9, i32 73, i32 137, i32 201, i32 265, i32 329, i32 393, i32 10, i32 74, i32 138, i32 202, i32 266, i32 330, i32 394, i32 11, i32 75, i32 139, i32 203, i32 267, i32 331, i32 395, i32 12, i32 76, i32 140, i32 204, i32 268, i32 332, i32 396, i32 13, i32 77, i32 141, i32 205, i32 269, i32 333, i32 397, i32 14, i32 78, i32 142, i32 206, i32 270, i32 334, i32 398, i32 15, i32 79, i32 143, i32 207, i32 271, i32 335, i32 399, i32 16, i32 80, i32 144, i32 208, i32 272, i32 336, i32 400, i32 17, i32 81, i32 145, i32 209, i32 273, i32 337, i32 401, i32 18, i32 82, i32 146, i32 210, i32 274, i32 338, i32 402, i32 19, i32 83, i32 147, i32 211, i32 275, i32 339, i32 403, i32 20, i32 84, i32 148, i32 212, i32 276, i32 340, i32 404, i32 21, i32 85, i32 149, i32 213, i32 277, i32 341, i32 405, i32 22, i32 86, i32 150, i32 214, i32 278, i32 342, i32 406, i32 23, i32 87, i32 151, i32 215, i32 279, i32 343, i32 407, i32 24, i32 88, i32 152, i32 216, i32 280, i32 344, i32 408, i32 25, i32 89, i32 153, i32 217, i32 281, i32 345, i32 409, i32 26, i32 90, i32 154, i32 218, i32 282, i32 346, i32 410, i32 27, i32 91, i32 155, i32 219, i32 283, i32 347, i32 411, i32 28, i32 92, i32 156, i32 220, i32 284, i32 348, i32 412, i32 29, i32 93, i32 157, i32 221, i32 285, i32 349, i32 413, i32 30, i32 94, i32 158, i32 222, i32 286, i32 350, i32 414, i32 31, i32 95, i32 159, i32 223, i32 287, i32 351, i32 415, i32 32, i32 96, i32 160, i32 224, i32 288, i32 352, i32 416, i32 33, i32 97, i32 161, i32 225, i32 289, i32 353, i32 417, i32 34, i32 98, i32 162, i32 226, i32 290, i32 354, i32 418, i32 35, i32 99, i32 163, i32 227, i32 291, i32 355, i32 419, i32 36, i32 100, i32 164, i32 228, i32 292, i32 356, i32 420, i32 37, i32 101, i32 165, i32 229, i32 293, i32 357, i32 421, i32 38, i32 102, i32 166, i32 230, i32 294, i32 358, i32 422, i32 39, i32 103, i32 167, i32 231, i32 295, i32 359, i32 423, i32 40, i32 104, i32 168, i32 232, i32 296, i32 360, i32 424, i32 41, i32 105, i32 169, i32 233, i32 297, i32 361, i32 425, i32 42, i32 106, i32 170, i32 234, i32 298, i32 362, i32 426, i32 43, i32 107, i32 171, i32 235, i32 299, i32 363, i32 427, i32 44, i32 108, i32 172, i32 236, i32 300, i32 364, i32 428, i32 45, i32 109, i32 173, i32 237, i32 301, i32 365, i32 429, i32 46, i32 110, i32 174, i32 238, i32 302, i32 366, i32 430, i32 47, i32 111, i32 175, i32 239, i32 303, i32 367, i32 431, i32 48, i32 112, i32 176, i32 240, i32 304, i32 368, i32 432, i32 49, i32 113, i32 177, i32 241, i32 305, i32 369, i32 433, i32 50, i32 114, i32 178, i32 242, i32 306, i32 370, i32 434, i32 51, i32 115, i32 179, i32 243, i32 307, i32 371, i32 435, i32 52, i32 116, i32 180, i32 244, i32 308, i32 372, i32 436, i32 53, i32 117, i32 181, i32 245, i32 309, i32 373, i32 437, i32 54, i32 118, i32 182, i32 246, i32 310, i32 374, i32 438, i32 55, i32 119, i32 183, i32 247, i32 311, i32 375, i32 439, i32 56, i32 120, i32 184, i32 248, i32 312, i32 376, i32 440, i32 57, i32 121, i32 185, i32 249, i32 313, i32 377, i32 441, i32 58, i32 122, i32 186, i32 250, i32 314, i32 378, i32 442, i32 59, i32 123, i32 187, i32 251, i32 315, i32 379, i32 443, i32 60, i32 124, i32 188, i32 252, i32 316, i32 380, i32 444, i32 61, i32 125, i32 189, i32 253, i32 317, i32 381, i32 445, i32 62, i32 126, i32 190, i32 254, i32 318, i32 382, i32 446, i32 63, i32 127, i32 191, i32 255, i32 319, i32 383, i32 447>
9683 store <448 x i8> %interleaved.vec, ptr %out.vec, align 64
9686 ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
9690 ; AVX512-FAST: {{.*}}
9691 ; AVX512-SLOW: {{.*}}
9692 ; AVX512BW-ONLY-FAST: {{.*}}
9693 ; AVX512DQBW-FAST: {{.*}}
9696 ; FALLBACK10: {{.*}}
9697 ; FALLBACK11: {{.*}}
9698 ; FALLBACK12: {{.*}}