1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE,FALLBACK0
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx | FileCheck %s --check-prefixes=AVX,AVX1,AVX1-ONLY,FALLBACK1
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-SLOW,FALLBACK2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST,FALLBACK3
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST-PERLANE,FALLBACK4
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512F-ONLY-SLOW,FALLBACK5
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512F-ONLY-FAST,FALLBACK6
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512DQ-SLOW,FALLBACK7
10 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512DQ-FAST,FALLBACK8
11 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512BW-ONLY-SLOW,FALLBACK9
12 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512BW-ONLY-FAST,FALLBACK10
13 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512DQBW-SLOW,FALLBACK11
14 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512DQBW-FAST,FALLBACK12
16 ; These patterns are produced by LoopVectorizer for interleaved stores.
18 define void @store_i8_stride6_vf2(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
19 ; SSE-LABEL: store_i8_stride6_vf2:
21 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
22 ; SSE-NEXT: movdqa (%rdi), %xmm0
23 ; SSE-NEXT: movdqa (%rdx), %xmm1
24 ; SSE-NEXT: movdqa (%r8), %xmm2
25 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
26 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
27 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
28 ; SSE-NEXT: pxor %xmm1, %xmm1
29 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
30 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,5,7,6,7]
31 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
32 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,2,1,3,4,5,6,7]
33 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,6,7]
34 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,1]
35 ; SSE-NEXT: packuswb %xmm1, %xmm0
36 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,65535,0,65535,65535,0,65535,65535]
37 ; SSE-NEXT: pand %xmm1, %xmm0
38 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
39 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,0,0]
40 ; SSE-NEXT: pandn %xmm2, %xmm1
41 ; SSE-NEXT: por %xmm0, %xmm1
42 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
43 ; SSE-NEXT: pand %xmm0, %xmm1
44 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3],xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
45 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,0,0]
46 ; SSE-NEXT: pandn %xmm2, %xmm0
47 ; SSE-NEXT: por %xmm1, %xmm0
48 ; SSE-NEXT: movq %xmm0, (%rax)
49 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,3,2,3]
50 ; SSE-NEXT: movd %xmm0, 8(%rax)
53 ; AVX-LABEL: store_i8_stride6_vf2:
55 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
56 ; AVX-NEXT: vmovdqa (%rdi), %xmm0
57 ; AVX-NEXT: vmovdqa (%rdx), %xmm1
58 ; AVX-NEXT: vmovdqa (%r9), %xmm2
59 ; AVX-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
60 ; AVX-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
61 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
62 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
63 ; AVX-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,1],zero,xmm0[2,6,10,14,3],zero,xmm0[u,u,u,u]
64 ; AVX-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,zero,zero,zero,xmm2[0],zero,zero,zero,zero,zero,xmm2[1,u,u,u,u]
65 ; AVX-NEXT: vpor %xmm1, %xmm0, %xmm0
66 ; AVX-NEXT: vpextrd $2, %xmm0, 8(%rax)
67 ; AVX-NEXT: vmovq %xmm0, (%rax)
69 %in.vec0 = load <2 x i8>, ptr %in.vecptr0, align 64
70 %in.vec1 = load <2 x i8>, ptr %in.vecptr1, align 64
71 %in.vec2 = load <2 x i8>, ptr %in.vecptr2, align 64
72 %in.vec3 = load <2 x i8>, ptr %in.vecptr3, align 64
73 %in.vec4 = load <2 x i8>, ptr %in.vecptr4, align 64
74 %in.vec5 = load <2 x i8>, ptr %in.vecptr5, align 64
75 %1 = shufflevector <2 x i8> %in.vec0, <2 x i8> %in.vec1, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
76 %2 = shufflevector <2 x i8> %in.vec2, <2 x i8> %in.vec3, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
77 %3 = shufflevector <2 x i8> %in.vec4, <2 x i8> %in.vec5, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
78 %4 = shufflevector <4 x i8> %1, <4 x i8> %2, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
79 %5 = shufflevector <4 x i8> %3, <4 x i8> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef>
80 %6 = shufflevector <8 x i8> %4, <8 x i8> %5, <12 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11>
81 %interleaved.vec = shufflevector <12 x i8> %6, <12 x i8> poison, <12 x i32> <i32 0, i32 2, i32 4, i32 6, i32 8, i32 10, i32 1, i32 3, i32 5, i32 7, i32 9, i32 11>
82 store <12 x i8> %interleaved.vec, ptr %out.vec, align 64
86 define void @store_i8_stride6_vf4(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
87 ; SSE-LABEL: store_i8_stride6_vf4:
89 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
90 ; SSE-NEXT: movdqa (%rdi), %xmm1
91 ; SSE-NEXT: movdqa (%rdx), %xmm2
92 ; SSE-NEXT: movdqa (%r8), %xmm0
93 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
94 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1]
95 ; SSE-NEXT: pxor %xmm3, %xmm3
96 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3],xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
97 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm2[0,2,2,3]
98 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[0,1,0,2,4,5,6,7]
99 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[0,2,1,3]
100 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm5[1,3,2,3,4,5,6,7]
101 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,4,6]
102 ; SSE-NEXT: packuswb %xmm5, %xmm4
103 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [65535,0,65535,65535,0,65535,65535,0]
104 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3],xmm1[4],xmm3[4],xmm1[5],xmm3[5],xmm1[6],xmm3[6],xmm1[7],xmm3[7]
105 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm1[0,1,1,3]
106 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,6,6,7]
107 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm1[0,2,2,0]
108 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm6[0,2,2,3,4,5,6,7]
109 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,5,7,5]
110 ; SSE-NEXT: packuswb %xmm3, %xmm6
111 ; SSE-NEXT: pand %xmm5, %xmm6
112 ; SSE-NEXT: pandn %xmm4, %xmm5
113 ; SSE-NEXT: por %xmm6, %xmm5
114 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [65535,65535,0,65535,65535,0,65535,65535]
115 ; SSE-NEXT: pand %xmm3, %xmm5
116 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
117 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm0[0,0,0,0]
118 ; SSE-NEXT: pandn %xmm4, %xmm3
119 ; SSE-NEXT: por %xmm5, %xmm3
120 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,1,3]
121 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,5,7,6,7]
122 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,3]
123 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,3,1,4,5,6,7]
124 ; SSE-NEXT: packuswb %xmm2, %xmm1
125 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,3,2,3]
126 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [65535,0,0,65535,65535,65535,65535,65535]
127 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
128 ; SSE-NEXT: pand %xmm2, %xmm0
129 ; SSE-NEXT: pandn %xmm1, %xmm2
130 ; SSE-NEXT: por %xmm0, %xmm2
131 ; SSE-NEXT: movq %xmm2, 16(%rax)
132 ; SSE-NEXT: movdqa %xmm3, (%rax)
135 ; AVX1-ONLY-LABEL: store_i8_stride6_vf4:
136 ; AVX1-ONLY: # %bb.0:
137 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
138 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm0
139 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm1
140 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm2
141 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
142 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
143 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
144 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
145 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm0[0,4,8,12],zero,zero,xmm0[1,5,9,13],zero,zero,xmm0[2,6,10,14]
146 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = zero,zero,zero,zero,xmm1[0,4],zero,zero,zero,zero,xmm1[1,5],zero,zero,zero,zero
147 ; AVX1-ONLY-NEXT: vpor %xmm3, %xmm2, %xmm2
148 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[3,7,11,15],zero,zero,xmm0[u,u,u,u,u,u,u,u]
149 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[2,6],zero,zero,zero,zero,xmm1[3,7,u,u,u,u,u,u,u,u]
150 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm0, %xmm0
151 ; AVX1-ONLY-NEXT: vmovq %xmm0, 16(%rax)
152 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, (%rax)
153 ; AVX1-ONLY-NEXT: retq
155 ; AVX2-ONLY-LABEL: store_i8_stride6_vf4:
156 ; AVX2-ONLY: # %bb.0:
157 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
158 ; AVX2-ONLY-NEXT: vmovdqa (%rdi), %xmm0
159 ; AVX2-ONLY-NEXT: vmovdqa (%rdx), %xmm1
160 ; AVX2-ONLY-NEXT: vmovdqa (%r8), %xmm2
161 ; AVX2-ONLY-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
162 ; AVX2-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
163 ; AVX2-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
164 ; AVX2-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
165 ; AVX2-ONLY-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
166 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,4,8,12],zero,zero,ymm0[1,5,9,13],zero,zero,ymm0[2,6,10,14,18,22],zero,zero,zero,zero,ymm0[19,23],zero,zero,zero,zero,zero,zero,zero,zero
167 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
168 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm0[0,4],zero,zero,zero,zero,ymm0[1,5],zero,zero,zero,zero,zero,zero,ymm0[19,23,27,31],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
169 ; AVX2-ONLY-NEXT: vpor %ymm0, %ymm1, %ymm0
170 ; AVX2-ONLY-NEXT: vextracti128 $1, %ymm0, %xmm1
171 ; AVX2-ONLY-NEXT: vmovq %xmm1, 16(%rax)
172 ; AVX2-ONLY-NEXT: vmovdqa %xmm0, (%rax)
173 ; AVX2-ONLY-NEXT: vzeroupper
174 ; AVX2-ONLY-NEXT: retq
176 ; AVX512F-LABEL: store_i8_stride6_vf4:
178 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
179 ; AVX512F-NEXT: vmovdqa (%rdi), %xmm0
180 ; AVX512F-NEXT: vmovdqa (%rdx), %xmm1
181 ; AVX512F-NEXT: vmovdqa (%r8), %xmm2
182 ; AVX512F-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
183 ; AVX512F-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
184 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
185 ; AVX512F-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
186 ; AVX512F-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
187 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,4,8,12],zero,zero,ymm0[1,5,9,13],zero,zero,ymm0[2,6,10,14,18,22],zero,zero,zero,zero,ymm0[19,23,u,u,u,u,u,u,u,u]
188 ; AVX512F-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
189 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm0[0,4],zero,zero,zero,zero,ymm0[1,5],zero,zero,zero,zero,zero,zero,ymm0[19,23,27,31],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
190 ; AVX512F-NEXT: vpor %ymm0, %ymm1, %ymm0
191 ; AVX512F-NEXT: vextracti128 $1, %ymm0, %xmm1
192 ; AVX512F-NEXT: vmovq %xmm1, 16(%rax)
193 ; AVX512F-NEXT: vmovdqa %xmm0, (%rax)
194 ; AVX512F-NEXT: vzeroupper
197 ; AVX512BW-LABEL: store_i8_stride6_vf4:
199 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
200 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm0
201 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm1
202 ; AVX512BW-NEXT: vmovdqa (%r8), %xmm2
203 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
204 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
205 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
206 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
207 ; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
208 ; AVX512BW-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,4,8,12,u,u,1,5,9,13,u,u,2,6,10,14,18,22,u,u,u,u,19,23,u,u,u,u,u,u,u,u]
209 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
210 ; AVX512BW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,0,4,u,u,u,u,1,5,u,u,u,u,u,u,19,23,27,31,u,u,u,u,u,u,u,u,u,u]
211 ; AVX512BW-NEXT: movw $1572, %cx # imm = 0x624
212 ; AVX512BW-NEXT: kmovd %ecx, %k1
213 ; AVX512BW-NEXT: vmovdqu16 %ymm0, %ymm1 {%k1}
214 ; AVX512BW-NEXT: vextracti128 $1, %ymm1, %xmm0
215 ; AVX512BW-NEXT: vmovq %xmm0, 16(%rax)
216 ; AVX512BW-NEXT: vmovdqa %xmm1, (%rax)
217 ; AVX512BW-NEXT: vzeroupper
218 ; AVX512BW-NEXT: retq
219 %in.vec0 = load <4 x i8>, ptr %in.vecptr0, align 64
220 %in.vec1 = load <4 x i8>, ptr %in.vecptr1, align 64
221 %in.vec2 = load <4 x i8>, ptr %in.vecptr2, align 64
222 %in.vec3 = load <4 x i8>, ptr %in.vecptr3, align 64
223 %in.vec4 = load <4 x i8>, ptr %in.vecptr4, align 64
224 %in.vec5 = load <4 x i8>, ptr %in.vecptr5, align 64
225 %1 = shufflevector <4 x i8> %in.vec0, <4 x i8> %in.vec1, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
226 %2 = shufflevector <4 x i8> %in.vec2, <4 x i8> %in.vec3, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
227 %3 = shufflevector <4 x i8> %in.vec4, <4 x i8> %in.vec5, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
228 %4 = shufflevector <8 x i8> %1, <8 x i8> %2, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
229 %5 = shufflevector <8 x i8> %3, <8 x i8> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
230 %6 = shufflevector <16 x i8> %4, <16 x i8> %5, <24 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23>
231 %interleaved.vec = shufflevector <24 x i8> %6, <24 x i8> poison, <24 x i32> <i32 0, i32 4, i32 8, i32 12, i32 16, i32 20, i32 1, i32 5, i32 9, i32 13, i32 17, i32 21, i32 2, i32 6, i32 10, i32 14, i32 18, i32 22, i32 3, i32 7, i32 11, i32 15, i32 19, i32 23>
232 store <24 x i8> %interleaved.vec, ptr %out.vec, align 64
236 define void @store_i8_stride6_vf8(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
237 ; SSE-LABEL: store_i8_stride6_vf8:
239 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
240 ; SSE-NEXT: movq {{.*#+}} xmm0 = mem[0],zero
241 ; SSE-NEXT: movq {{.*#+}} xmm1 = mem[0],zero
242 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
243 ; SSE-NEXT: movq {{.*#+}} xmm2 = mem[0],zero
244 ; SSE-NEXT: movq {{.*#+}} xmm1 = mem[0],zero
245 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
246 ; SSE-NEXT: movq {{.*#+}} xmm1 = mem[0],zero
247 ; SSE-NEXT: movq {{.*#+}} xmm5 = mem[0],zero
248 ; SSE-NEXT: pxor %xmm4, %xmm4
249 ; SSE-NEXT: movdqa %xmm1, %xmm3
250 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3],xmm1[4],xmm5[4],xmm1[5],xmm5[5],xmm1[6],xmm5[6],xmm1[7],xmm5[7]
251 ; SSE-NEXT: movdqa %xmm5, %xmm7
252 ; SSE-NEXT: punpcklbw {{.*#+}} xmm7 = xmm7[0],xmm4[0],xmm7[1],xmm4[1],xmm7[2],xmm4[2],xmm7[3],xmm4[3],xmm7[4],xmm4[4],xmm7[5],xmm4[5],xmm7[6],xmm4[6],xmm7[7],xmm4[7]
253 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3],xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
254 ; SSE-NEXT: movdqa %xmm3, %xmm4
255 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm7[0],xmm4[1],xmm7[1],xmm4[2],xmm7[2],xmm4[3],xmm7[3]
256 ; SSE-NEXT: packuswb %xmm4, %xmm4
257 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,0,2,3]
258 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [65535,65535,0,65535,65535,0,65535,65535]
259 ; SSE-NEXT: movdqa %xmm4, %xmm8
260 ; SSE-NEXT: pandn %xmm5, %xmm8
261 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm0[0,0,1,1]
262 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [65535,0,65535,65535,0,65535,65535,0]
263 ; SSE-NEXT: pand %xmm5, %xmm9
264 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm2[1,0,2,2,4,5,6,7]
265 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm6[0,1,0,1]
266 ; SSE-NEXT: movdqa %xmm5, %xmm6
267 ; SSE-NEXT: pandn %xmm10, %xmm6
268 ; SSE-NEXT: por %xmm9, %xmm6
269 ; SSE-NEXT: pand %xmm4, %xmm6
270 ; SSE-NEXT: por %xmm8, %xmm6
271 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm7[4],xmm3[5],xmm7[5],xmm3[6],xmm7[6],xmm3[7],xmm7[7]
272 ; SSE-NEXT: packuswb %xmm3, %xmm3
273 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,1,3,3]
274 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm0[3,3,3,3]
275 ; SSE-NEXT: movdqa %xmm4, %xmm8
276 ; SSE-NEXT: pandn %xmm7, %xmm8
277 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm2[0,1,2,3,5,6,7,7]
278 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[2,2,2,3]
279 ; SSE-NEXT: pand %xmm4, %xmm7
280 ; SSE-NEXT: por %xmm8, %xmm7
281 ; SSE-NEXT: pand %xmm5, %xmm7
282 ; SSE-NEXT: pandn %xmm3, %xmm5
283 ; SSE-NEXT: por %xmm7, %xmm5
284 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,2]
285 ; SSE-NEXT: pand %xmm4, %xmm0
286 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[3,3,3,3,4,5,6,7]
287 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,4,4,4]
288 ; SSE-NEXT: pandn %xmm2, %xmm4
289 ; SSE-NEXT: por %xmm0, %xmm4
290 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,65535,65535,0,65535,65535,0,65535]
291 ; SSE-NEXT: pand %xmm0, %xmm4
292 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,2,2]
293 ; SSE-NEXT: pandn %xmm1, %xmm0
294 ; SSE-NEXT: por %xmm4, %xmm0
295 ; SSE-NEXT: movdqa %xmm5, 32(%rax)
296 ; SSE-NEXT: movdqa %xmm0, 16(%rax)
297 ; SSE-NEXT: movdqa %xmm6, (%rax)
300 ; AVX1-ONLY-LABEL: store_i8_stride6_vf8:
301 ; AVX1-ONLY: # %bb.0:
302 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
303 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
304 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
305 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
306 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
307 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
308 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
309 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
310 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
311 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
312 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm1[u,u],zero,zero,xmm1[3,11,u,u],zero,zero,xmm1[4,12,u,u],zero,zero
313 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm0[u,u,3,11],zero,zero,xmm0[u,u,4,12],zero,zero,xmm0[u,u,5,13]
314 ; AVX1-ONLY-NEXT: vpor %xmm3, %xmm4, %xmm3
315 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm2[2,10,u,u,u,u,3,11,u,u,u,u,4,12,u,u]
316 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm4[0],xmm3[1,2],xmm4[3],xmm3[4,5],xmm4[6],xmm3[7]
317 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = zero,zero,xmm1[0,8,u,u],zero,zero,xmm1[1,9,u,u],zero,zero,xmm1[2,10]
318 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm5 = xmm0[0,8],zero,zero,xmm0[u,u,1,9],zero,zero,xmm0[u,u,2,10],zero,zero
319 ; AVX1-ONLY-NEXT: vpor %xmm4, %xmm5, %xmm4
320 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm5 = xmm2[u,u,u,u,0,8,u,u,u,u,1,9,u,u,u,u]
321 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1],xmm5[2],xmm4[3,4],xmm5[5],xmm4[6,7]
322 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
323 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[u,u,6,14],zero,zero,xmm0[u,u,7,15],zero,zero,xmm0[u,u]
324 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[5,13,u,u],zero,zero,xmm1[6,14,u,u],zero,zero,xmm1[7,15,u,u]
325 ; AVX1-ONLY-NEXT: vpor %xmm0, %xmm1, %xmm0
326 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm2[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15]
327 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
328 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, 32(%rax)
329 ; AVX1-ONLY-NEXT: vmovaps %ymm3, (%rax)
330 ; AVX1-ONLY-NEXT: vzeroupper
331 ; AVX1-ONLY-NEXT: retq
333 ; AVX2-ONLY-LABEL: store_i8_stride6_vf8:
334 ; AVX2-ONLY: # %bb.0:
335 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
336 ; AVX2-ONLY-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
337 ; AVX2-ONLY-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
338 ; AVX2-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
339 ; AVX2-ONLY-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
340 ; AVX2-ONLY-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
341 ; AVX2-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
342 ; AVX2-ONLY-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm2
343 ; AVX2-ONLY-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
344 ; AVX2-ONLY-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
345 ; AVX2-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
346 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} ymm4 = ymm2[0,8,u,u,u,u,1,9,u,u,u,u,2,10,u,u,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u]
347 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,0,1]
348 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,0,8,u,u,u,u,1,9,u,u,u,u,2,10,u,u,19,27,u,u,u,u,20,28,u,u,u,u,21,29]
349 ; AVX2-ONLY-NEXT: vpblendw {{.*#+}} ymm2 = ymm4[0],ymm2[1],ymm4[2,3],ymm2[4],ymm4[5,6],ymm2[7],ymm4[8],ymm2[9],ymm4[10,11],ymm2[12],ymm4[13,14],ymm2[15]
350 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm3[2,10,1,9,0,8,3,11,u,u,u,u,4,12,u,u]
351 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
352 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm5 = [255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255]
353 ; AVX2-ONLY-NEXT: vpblendvb %ymm5, %ymm2, %ymm4, %ymm2
354 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[u,u,6,14],zero,zero,xmm0[u,u,7,15],zero,zero,xmm0[u,u]
355 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[5,13,u,u],zero,zero,xmm1[6,14,u,u],zero,zero,xmm1[7,15,u,u]
356 ; AVX2-ONLY-NEXT: vpor %xmm0, %xmm1, %xmm0
357 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm3[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15]
358 ; AVX2-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
359 ; AVX2-ONLY-NEXT: vmovdqa %xmm0, 32(%rax)
360 ; AVX2-ONLY-NEXT: vmovdqa %ymm2, (%rax)
361 ; AVX2-ONLY-NEXT: vzeroupper
362 ; AVX2-ONLY-NEXT: retq
364 ; AVX512F-LABEL: store_i8_stride6_vf8:
366 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
367 ; AVX512F-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
368 ; AVX512F-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
369 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
370 ; AVX512F-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
371 ; AVX512F-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
372 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
373 ; AVX512F-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
374 ; AVX512F-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
375 ; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
376 ; AVX512F-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm3
377 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm4 = ymm3[0,8,u,u,u,u,1,9,u,u,u,u,2,10,u,u,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u]
378 ; AVX512F-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,0,1]
379 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[u,u,0,8,u,u,u,u,1,9,u,u,u,u,2,10,u,u,19,27,u,u,u,u,20,28,u,u,u,u,21,29]
380 ; AVX512F-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2,3],ymm3[4],ymm4[5,6],ymm3[7],ymm4[8],ymm3[9],ymm4[10,11],ymm3[12],ymm4[13,14],ymm3[15]
381 ; AVX512F-NEXT: vpshufb {{.*#+}} xmm4 = xmm2[2,10,1,9,0,8,3,11,u,u,u,u,4,12,u,u]
382 ; AVX512F-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
383 ; AVX512F-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm3, %ymm4
384 ; AVX512F-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[u,u,6,14],zero,zero,xmm0[u,u,7,15],zero,zero,xmm0[u,u]
385 ; AVX512F-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[5,13,u,u],zero,zero,xmm1[6,14,u,u],zero,zero,xmm1[7,15,u,u]
386 ; AVX512F-NEXT: vpor %xmm0, %xmm1, %xmm0
387 ; AVX512F-NEXT: vpshufb {{.*#+}} xmm1 = xmm2[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15]
388 ; AVX512F-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
389 ; AVX512F-NEXT: vinserti32x4 $2, %xmm0, %zmm4, %zmm1
390 ; AVX512F-NEXT: vmovdqa %xmm0, 32(%rax)
391 ; AVX512F-NEXT: vmovdqa %ymm1, (%rax)
392 ; AVX512F-NEXT: vzeroupper
395 ; AVX512BW-LABEL: store_i8_stride6_vf8:
397 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
398 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
399 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
400 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
401 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
402 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
403 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
404 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
405 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
406 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
407 ; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm3
408 ; AVX512BW-NEXT: vpshufb {{.*#+}} ymm4 = ymm3[0,8,u,u,u,u,1,9,u,u,u,u,2,10,u,u,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u]
409 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,0,1]
410 ; AVX512BW-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[u,u,0,8,u,u,u,u,1,9,u,u,u,u,2,10,u,u,19,27,u,u,u,u,20,28,u,u,u,u,21,29]
411 ; AVX512BW-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2,3],ymm3[4],ymm4[5,6],ymm3[7],ymm4[8],ymm3[9],ymm4[10,11],ymm3[12],ymm4[13,14],ymm3[15]
412 ; AVX512BW-NEXT: vpshufb {{.*#+}} xmm4 = xmm2[2,10,1,9,0,8,3,11,u,u,u,u,4,12,u,u]
413 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
414 ; AVX512BW-NEXT: movw $18724, %cx # imm = 0x4924
415 ; AVX512BW-NEXT: kmovd %ecx, %k1
416 ; AVX512BW-NEXT: vmovdqu16 %ymm4, %ymm3 {%k1}
417 ; AVX512BW-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[u,u,6,14],zero,zero,xmm0[u,u,7,15],zero,zero,xmm0[u,u]
418 ; AVX512BW-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[5,13,u,u],zero,zero,xmm1[6,14,u,u],zero,zero,xmm1[7,15,u,u]
419 ; AVX512BW-NEXT: vpor %xmm0, %xmm1, %xmm0
420 ; AVX512BW-NEXT: vpshufb {{.*#+}} xmm1 = xmm2[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15]
421 ; AVX512BW-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
422 ; AVX512BW-NEXT: vinserti32x4 $2, %xmm0, %zmm3, %zmm1
423 ; AVX512BW-NEXT: vmovdqa %xmm0, 32(%rax)
424 ; AVX512BW-NEXT: vmovdqa %ymm1, (%rax)
425 ; AVX512BW-NEXT: vzeroupper
426 ; AVX512BW-NEXT: retq
427 %in.vec0 = load <8 x i8>, ptr %in.vecptr0, align 64
428 %in.vec1 = load <8 x i8>, ptr %in.vecptr1, align 64
429 %in.vec2 = load <8 x i8>, ptr %in.vecptr2, align 64
430 %in.vec3 = load <8 x i8>, ptr %in.vecptr3, align 64
431 %in.vec4 = load <8 x i8>, ptr %in.vecptr4, align 64
432 %in.vec5 = load <8 x i8>, ptr %in.vecptr5, align 64
433 %1 = shufflevector <8 x i8> %in.vec0, <8 x i8> %in.vec1, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
434 %2 = shufflevector <8 x i8> %in.vec2, <8 x i8> %in.vec3, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
435 %3 = shufflevector <8 x i8> %in.vec4, <8 x i8> %in.vec5, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
436 %4 = shufflevector <16 x i8> %1, <16 x i8> %2, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
437 %5 = shufflevector <16 x i8> %3, <16 x i8> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
438 %6 = shufflevector <32 x i8> %4, <32 x i8> %5, <48 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47>
439 %interleaved.vec = shufflevector <48 x i8> %6, <48 x i8> poison, <48 x i32> <i32 0, i32 8, i32 16, i32 24, i32 32, i32 40, i32 1, i32 9, i32 17, i32 25, i32 33, i32 41, i32 2, i32 10, i32 18, i32 26, i32 34, i32 42, i32 3, i32 11, i32 19, i32 27, i32 35, i32 43, i32 4, i32 12, i32 20, i32 28, i32 36, i32 44, i32 5, i32 13, i32 21, i32 29, i32 37, i32 45, i32 6, i32 14, i32 22, i32 30, i32 38, i32 46, i32 7, i32 15, i32 23, i32 31, i32 39, i32 47>
440 store <48 x i8> %interleaved.vec, ptr %out.vec, align 64
444 define void @store_i8_stride6_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
445 ; SSE-LABEL: store_i8_stride6_vf16:
447 ; SSE-NEXT: movdqa (%rdi), %xmm12
448 ; SSE-NEXT: movdqa (%rsi), %xmm8
449 ; SSE-NEXT: movdqa (%rdx), %xmm13
450 ; SSE-NEXT: movdqa (%rcx), %xmm2
451 ; SSE-NEXT: movdqa (%r8), %xmm11
452 ; SSE-NEXT: movdqa (%r9), %xmm10
453 ; SSE-NEXT: movdqa %xmm12, %xmm0
454 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm8[0],xmm0[1],xmm8[1],xmm0[2],xmm8[2],xmm0[3],xmm8[3],xmm0[4],xmm8[4],xmm0[5],xmm8[5],xmm0[6],xmm8[6],xmm0[7],xmm8[7]
455 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
456 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,1,1]
457 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [65535,0,65535,65535,0,65535,65535,0]
458 ; SSE-NEXT: pand %xmm4, %xmm0
459 ; SSE-NEXT: movdqa %xmm13, %xmm7
460 ; SSE-NEXT: punpcklbw {{.*#+}} xmm7 = xmm7[0],xmm2[0],xmm7[1],xmm2[1],xmm7[2],xmm2[2],xmm7[3],xmm2[3],xmm7[4],xmm2[4],xmm7[5],xmm2[5],xmm7[6],xmm2[6],xmm7[7],xmm2[7]
461 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm7[1,0,2,2,4,5,6,7]
462 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,0,1]
463 ; SSE-NEXT: movdqa %xmm4, %xmm3
464 ; SSE-NEXT: pandn %xmm1, %xmm3
465 ; SSE-NEXT: por %xmm0, %xmm3
466 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,65535,0,65535,65535,0,65535,65535]
467 ; SSE-NEXT: pand %xmm1, %xmm3
468 ; SSE-NEXT: movdqa %xmm11, %xmm6
469 ; SSE-NEXT: punpcklbw {{.*#+}} xmm6 = xmm6[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
470 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[0,0,0,0]
471 ; SSE-NEXT: movdqa %xmm1, %xmm9
472 ; SSE-NEXT: pandn %xmm0, %xmm9
473 ; SSE-NEXT: por %xmm3, %xmm9
474 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
475 ; SSE-NEXT: pand %xmm3, %xmm9
476 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm10[0],xmm0[1],xmm10[1],xmm0[2],xmm10[2],xmm0[3],xmm10[3],xmm0[4],xmm10[4],xmm0[5],xmm10[5],xmm0[6],xmm10[6],xmm0[7],xmm10[7]
477 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
478 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm0[0,0,0,0]
479 ; SSE-NEXT: movdqa %xmm3, %xmm0
480 ; SSE-NEXT: pandn %xmm14, %xmm0
481 ; SSE-NEXT: por %xmm9, %xmm0
482 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
483 ; SSE-NEXT: punpckhbw {{.*#+}} xmm12 = xmm12[8],xmm8[8],xmm12[9],xmm8[9],xmm12[10],xmm8[10],xmm12[11],xmm8[11],xmm12[12],xmm8[12],xmm12[13],xmm8[13],xmm12[14],xmm8[14],xmm12[15],xmm8[15]
484 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm12[1,1,2,2]
485 ; SSE-NEXT: pand %xmm1, %xmm8
486 ; SSE-NEXT: punpckhbw {{.*#+}} xmm13 = xmm13[8],xmm2[8],xmm13[9],xmm2[9],xmm13[10],xmm2[10],xmm13[11],xmm2[11],xmm13[12],xmm2[12],xmm13[13],xmm2[13],xmm13[14],xmm2[14],xmm13[15],xmm2[15]
487 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm13[3,3,3,3,4,5,6,7]
488 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,4,4,4]
489 ; SSE-NEXT: movdqa %xmm1, %xmm9
490 ; SSE-NEXT: pandn %xmm2, %xmm9
491 ; SSE-NEXT: por %xmm8, %xmm9
492 ; SSE-NEXT: movdqa {{.*#+}} xmm8 = [0,65535,65535,0,65535,65535,0,65535]
493 ; SSE-NEXT: pand %xmm8, %xmm9
494 ; SSE-NEXT: punpckhbw {{.*#+}} xmm11 = xmm11[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
495 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm11[1,1,2,2]
496 ; SSE-NEXT: movdqa %xmm8, %xmm14
497 ; SSE-NEXT: pandn %xmm2, %xmm14
498 ; SSE-NEXT: por %xmm9, %xmm14
499 ; SSE-NEXT: movdqa {{.*#+}} xmm9 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
500 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm10[8],xmm2[9],xmm10[9],xmm2[10],xmm10[10],xmm2[11],xmm10[11],xmm2[12],xmm10[12],xmm2[13],xmm10[13],xmm2[14],xmm10[14],xmm2[15],xmm10[15]
501 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm2[1,1,2,2]
502 ; SSE-NEXT: movdqa %xmm9, %xmm10
503 ; SSE-NEXT: pandn %xmm15, %xmm10
504 ; SSE-NEXT: pand %xmm9, %xmm14
505 ; SSE-NEXT: por %xmm14, %xmm10
506 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm12[3,3,3,3]
507 ; SSE-NEXT: movdqa %xmm1, %xmm15
508 ; SSE-NEXT: pandn %xmm14, %xmm15
509 ; SSE-NEXT: pshufhw {{.*#+}} xmm14 = xmm13[0,1,2,3,5,6,7,7]
510 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[2,2,2,3]
511 ; SSE-NEXT: pand %xmm1, %xmm14
512 ; SSE-NEXT: por %xmm15, %xmm14
513 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm11[2,2,3,3]
514 ; SSE-NEXT: movdqa %xmm4, %xmm5
515 ; SSE-NEXT: pandn %xmm15, %xmm5
516 ; SSE-NEXT: pand %xmm4, %xmm14
517 ; SSE-NEXT: por %xmm14, %xmm5
518 ; SSE-NEXT: movdqa {{.*#+}} xmm14 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
519 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[2,2,3,3]
520 ; SSE-NEXT: movdqa %xmm14, %xmm15
521 ; SSE-NEXT: pandn %xmm0, %xmm15
522 ; SSE-NEXT: pand %xmm14, %xmm5
523 ; SSE-NEXT: por %xmm5, %xmm15
524 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm13[1,0,2,2,4,5,6,7]
525 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,0,1]
526 ; SSE-NEXT: movdqa %xmm4, %xmm5
527 ; SSE-NEXT: pandn %xmm0, %xmm5
528 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm12[0,0,1,1]
529 ; SSE-NEXT: pand %xmm4, %xmm0
530 ; SSE-NEXT: por %xmm0, %xmm5
531 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm11[0,0,0,0]
532 ; SSE-NEXT: movdqa %xmm1, %xmm11
533 ; SSE-NEXT: pandn %xmm0, %xmm11
534 ; SSE-NEXT: pand %xmm1, %xmm5
535 ; SSE-NEXT: por %xmm5, %xmm11
536 ; SSE-NEXT: pand %xmm3, %xmm11
537 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[0,0,0,0]
538 ; SSE-NEXT: pandn %xmm0, %xmm3
539 ; SSE-NEXT: por %xmm11, %xmm3
540 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
541 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm5[3,3,3,3]
542 ; SSE-NEXT: movdqa %xmm1, %xmm2
543 ; SSE-NEXT: pandn %xmm0, %xmm2
544 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm7[0,1,2,3,5,6,7,7]
545 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,3]
546 ; SSE-NEXT: pand %xmm1, %xmm0
547 ; SSE-NEXT: por %xmm2, %xmm0
548 ; SSE-NEXT: pand %xmm4, %xmm0
549 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm6[2,2,3,3]
550 ; SSE-NEXT: pandn %xmm2, %xmm4
551 ; SSE-NEXT: por %xmm0, %xmm4
552 ; SSE-NEXT: pand %xmm14, %xmm4
553 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
554 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm11[2,2,3,3]
555 ; SSE-NEXT: pandn %xmm0, %xmm14
556 ; SSE-NEXT: por %xmm4, %xmm14
557 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm5[1,1,2,2]
558 ; SSE-NEXT: pand %xmm1, %xmm0
559 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm7[3,3,3,3,4,5,6,7]
560 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,4,4,4]
561 ; SSE-NEXT: pandn %xmm2, %xmm1
562 ; SSE-NEXT: por %xmm0, %xmm1
563 ; SSE-NEXT: pand %xmm8, %xmm1
564 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[1,1,2,2]
565 ; SSE-NEXT: pandn %xmm0, %xmm8
566 ; SSE-NEXT: por %xmm1, %xmm8
567 ; SSE-NEXT: pand %xmm9, %xmm8
568 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm11[1,1,2,2]
569 ; SSE-NEXT: pandn %xmm0, %xmm9
570 ; SSE-NEXT: por %xmm8, %xmm9
571 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
572 ; SSE-NEXT: movdqa %xmm9, 16(%rax)
573 ; SSE-NEXT: movdqa %xmm14, 32(%rax)
574 ; SSE-NEXT: movdqa %xmm3, 48(%rax)
575 ; SSE-NEXT: movdqa %xmm15, 80(%rax)
576 ; SSE-NEXT: movdqa %xmm10, 64(%rax)
577 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
578 ; SSE-NEXT: movaps %xmm0, (%rax)
581 ; AVX1-ONLY-LABEL: store_i8_stride6_vf16:
582 ; AVX1-ONLY: # %bb.0:
583 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
584 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm1
585 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm2
586 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm3
587 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm4
588 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm5
589 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm6
590 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3],xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
591 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm7[1,1,2,2]
592 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3],xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
593 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm9 = xmm8[3,3,3,3,4,5,6,7]
594 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,4,4,4]
595 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm9[2],xmm0[3,4],xmm9[5],xmm0[6,7]
596 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3],xmm5[4],xmm6[4],xmm5[5],xmm6[5],xmm5[6],xmm6[6],xmm5[7],xmm6[7]
597 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm9[1,1,2,2]
598 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm10[0],xmm0[1,2],xmm10[3],xmm0[4,5],xmm10[6],xmm0[7]
599 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm7[0,0,1,1]
600 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm11 = xmm8[1,0,2,2,4,5,6,7]
601 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[0,1,0,1]
602 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0],xmm11[1],xmm10[2,3],xmm11[4],xmm10[5,6],xmm11[7]
603 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm9[0,0,0,0]
604 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0,1],xmm11[2],xmm10[3,4],xmm11[5],xmm10[6,7]
605 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm10, %ymm0
606 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm1[8],xmm2[8],xmm1[9],xmm2[9],xmm1[10],xmm2[10],xmm1[11],xmm2[11],xmm1[12],xmm2[12],xmm1[13],xmm2[13],xmm1[14],xmm2[14],xmm1[15],xmm2[15]
607 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
608 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm3[8],xmm4[8],xmm3[9],xmm4[9],xmm3[10],xmm4[10],xmm3[11],xmm4[11],xmm3[12],xmm4[12],xmm3[13],xmm4[13],xmm3[14],xmm4[14],xmm3[15],xmm4[15]
609 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm3[1,0,2,2,4,5,6,7]
610 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[0,1,0,1]
611 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm4[1],xmm2[2,3],xmm4[4],xmm2[5,6],xmm4[7]
612 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm5[8],xmm6[8],xmm5[9],xmm6[9],xmm5[10],xmm6[10],xmm5[11],xmm6[11],xmm5[12],xmm6[12],xmm5[13],xmm6[13],xmm5[14],xmm6[14],xmm5[15],xmm6[15]
613 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm4[0,0,0,0]
614 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm5[2],xmm2[3,4],xmm5[5],xmm2[6,7]
615 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm7[3,3,3,3]
616 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm6 = xmm8[0,1,2,3,5,6,7,7]
617 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,2,3]
618 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2],xmm6[3,4],xmm5[5],xmm6[6,7]
619 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm9[2,2,3,3]
620 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0],xmm6[1],xmm5[2,3],xmm6[4],xmm5[5,6],xmm6[7]
621 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm5, %ymm2
622 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm1[3,3,3,3]
623 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm6 = xmm3[0,1,2,3,5,6,7,7]
624 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,2,3]
625 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2],xmm6[3,4],xmm5[5],xmm6[6,7]
626 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm4[2,2,3,3]
627 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0],xmm6[1],xmm5[2,3],xmm6[4],xmm5[5,6],xmm6[7]
628 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[1,1,2,2]
629 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[3,3,3,3,4,5,6,7]
630 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,4,4]
631 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1],xmm3[2],xmm1[3,4],xmm3[5],xmm1[6,7]
632 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm4[1,1,2,2]
633 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0],xmm1[1,2],xmm3[3],xmm1[4,5],xmm3[6],xmm1[7]
634 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm1, %ymm1
635 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 64(%rax)
636 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 32(%rax)
637 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rax)
638 ; AVX1-ONLY-NEXT: vzeroupper
639 ; AVX1-ONLY-NEXT: retq
641 ; AVX2-ONLY-LABEL: store_i8_stride6_vf16:
642 ; AVX2-ONLY: # %bb.0:
643 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
644 ; AVX2-ONLY-NEXT: vmovdqa (%rdi), %xmm0
645 ; AVX2-ONLY-NEXT: vmovdqa (%rdx), %xmm1
646 ; AVX2-ONLY-NEXT: vmovdqa (%r8), %xmm2
647 ; AVX2-ONLY-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
648 ; AVX2-ONLY-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
649 ; AVX2-ONLY-NEXT: vinserti128 $1, (%r9), %ymm2, %ymm2
650 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm3 = ymm1[0,2,0,2]
651 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,ymm3[0,8],zero,zero,zero,zero,ymm3[1,9],zero,zero,zero,zero,ymm3[2,10],zero,zero,zero,zero,ymm3[19,27],zero,zero,zero,zero,ymm3[20,28],zero,zero,zero,zero
652 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,0,2]
653 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[0,8],zero,zero,zero,zero,ymm4[1,9],zero,zero,zero,zero,ymm4[2,10],zero,zero,zero,zero,ymm4[19,27],zero,zero,zero,zero,ymm4[20,28],zero,zero,zero,zero,ymm4[21,29]
654 ; AVX2-ONLY-NEXT: vpor %ymm3, %ymm4, %ymm3
655 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm4 = ymm2[0,2,0,2]
656 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,0,8,u,u,u,u,1,9,u,u,u,u,18,26,u,u,u,u,19,27,u,u,u,u,20,28,u,u]
657 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm5 = [255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255]
658 ; AVX2-ONLY-NEXT: vpblendvb %ymm5, %ymm3, %ymm4, %ymm3
659 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,1,3]
660 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,zero,zero,ymm4[6,14],zero,zero,zero,zero,ymm4[7,15],zero,zero,zero,zero,ymm4[16,24],zero,zero,zero,zero,ymm4[17,25],zero,zero,zero,zero,ymm4[18,26],zero,zero
661 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm5 = ymm1[0,2,1,3]
662 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[5,13],zero,zero,zero,zero,ymm5[6,14],zero,zero,zero,zero,ymm5[7,15],zero,zero,zero,zero,ymm5[16,24],zero,zero,zero,zero,ymm5[17,25],zero,zero,zero,zero,ymm5[18,26]
663 ; AVX2-ONLY-NEXT: vpor %ymm4, %ymm5, %ymm4
664 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm5 = ymm2[0,2,1,3]
665 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15,u,u,u,u,16,24,u,u,u,u,17,25,u,u,u,u]
666 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255]
667 ; AVX2-ONLY-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
668 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
669 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,ymm1[3,11],zero,zero,zero,zero,ymm1[4,12],zero,zero,zero,zero,ymm1[21,29],zero,zero,zero,zero,ymm1[22,30],zero,zero,zero,zero,ymm1[23,31],zero,zero
670 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
671 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,ymm0[3,11],zero,zero,zero,zero,ymm0[4,12],zero,zero,zero,zero,ymm0[5,13],zero,zero,zero,zero,ymm0[22,30],zero,zero,zero,zero,ymm0[23,31],zero,zero,zero,zero
672 ; AVX2-ONLY-NEXT: vpor %ymm1, %ymm0, %ymm0
673 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
674 ; AVX2-ONLY-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,10,u,u,u,u,3,11,u,u,u,u,4,12,u,u,u,u,21,29,u,u,u,u,22,30,u,u,u,u,23,31]
675 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm2 = [0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0]
676 ; AVX2-ONLY-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
677 ; AVX2-ONLY-NEXT: vmovdqa %ymm0, 64(%rax)
678 ; AVX2-ONLY-NEXT: vmovdqa %ymm4, 32(%rax)
679 ; AVX2-ONLY-NEXT: vmovdqa %ymm3, (%rax)
680 ; AVX2-ONLY-NEXT: vzeroupper
681 ; AVX2-ONLY-NEXT: retq
683 ; AVX512F-LABEL: store_i8_stride6_vf16:
685 ; AVX512F-NEXT: movq {{[0-9]+}}(%rsp), %rax
686 ; AVX512F-NEXT: vmovdqa (%rdi), %xmm0
687 ; AVX512F-NEXT: vmovdqa (%rdx), %xmm1
688 ; AVX512F-NEXT: vmovdqa (%r8), %xmm2
689 ; AVX512F-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
690 ; AVX512F-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
691 ; AVX512F-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm2
692 ; AVX512F-NEXT: vpermq {{.*#+}} ymm3 = ymm1[0,2,0,2]
693 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,ymm3[0,8,u,u],zero,zero,ymm3[1,9,u,u],zero,zero,ymm3[2,10,u,u],zero,zero,ymm3[19,27,u,u],zero,zero,ymm3[20,28,u,u],zero,zero
694 ; AVX512F-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,1,3]
695 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,ymm4[u,u,6,14],zero,zero,ymm4[u,u,7,15],zero,zero,ymm4[u,u,16,24],zero,zero,ymm4[u,u,17,25],zero,zero,ymm4[u,u,18,26],zero,zero
696 ; AVX512F-NEXT: vinserti64x4 $1, %ymm4, %zmm3, %zmm3
697 ; AVX512F-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,0,2]
698 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[0,8],zero,zero,ymm4[u,u,1,9],zero,zero,ymm4[u,u,2,10],zero,zero,ymm4[u,u,19,27],zero,zero,ymm4[u,u,20,28],zero,zero,ymm4[u,u,21,29]
699 ; AVX512F-NEXT: vpermq {{.*#+}} ymm5 = ymm1[0,2,1,3]
700 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[5,13,u,u],zero,zero,ymm5[6,14,u,u],zero,zero,ymm5[7,15,u,u],zero,zero,ymm5[16,24,u,u],zero,zero,ymm5[17,25,u,u],zero,zero,ymm5[18,26]
701 ; AVX512F-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm4
702 ; AVX512F-NEXT: vporq %zmm3, %zmm4, %zmm3
703 ; AVX512F-NEXT: vpermq {{.*#+}} ymm4 = ymm2[0,2,0,2]
704 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,zero,zero,ymm4[0,8],zero,zero,zero,zero,ymm4[1,9],zero,zero,zero,zero,ymm4[18,26],zero,zero,zero,zero,ymm4[19,27],zero,zero,zero,zero,ymm4[20,28],zero,zero
705 ; AVX512F-NEXT: vpermq {{.*#+}} ymm5 = ymm2[0,2,1,3]
706 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm5 = zero,zero,ymm5[5,13],zero,zero,zero,zero,ymm5[6,14],zero,zero,zero,zero,ymm5[7,15],zero,zero,zero,zero,ymm5[16,24],zero,zero,zero,zero,ymm5[17,25],zero,zero,zero,zero
707 ; AVX512F-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm4
708 ; AVX512F-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm4
709 ; AVX512F-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
710 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u],zero,zero,ymm1[3,11,u,u],zero,zero,ymm1[4,12,u,u],zero,zero,ymm1[21,29,u,u],zero,zero,ymm1[22,30,u,u],zero,zero,ymm1[23,31,u,u]
711 ; AVX512F-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
712 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,3,11],zero,zero,ymm0[u,u,4,12],zero,zero,ymm0[u,u,5,13],zero,zero,ymm0[u,u,22,30],zero,zero,ymm0[u,u,23,31],zero,zero,ymm0[u,u]
713 ; AVX512F-NEXT: vpor %ymm1, %ymm0, %ymm0
714 ; AVX512F-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
715 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,10],zero,zero,zero,zero,ymm1[3,11],zero,zero,zero,zero,ymm1[4,12],zero,zero,zero,zero,ymm1[21,29],zero,zero,zero,zero,ymm1[22,30],zero,zero,zero,zero,ymm1[23,31]
716 ; AVX512F-NEXT: vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1
717 ; AVX512F-NEXT: vmovdqa %ymm1, 64(%rax)
718 ; AVX512F-NEXT: vmovdqa64 %zmm4, (%rax)
719 ; AVX512F-NEXT: vzeroupper
722 ; AVX512BW-SLOW-LABEL: store_i8_stride6_vf16:
723 ; AVX512BW-SLOW: # %bb.0:
724 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
725 ; AVX512BW-SLOW-NEXT: vmovdqa (%rdi), %xmm0
726 ; AVX512BW-SLOW-NEXT: vmovdqa (%rdx), %xmm1
727 ; AVX512BW-SLOW-NEXT: vmovdqa (%r8), %xmm2
728 ; AVX512BW-SLOW-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
729 ; AVX512BW-SLOW-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
730 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm2
731 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm1[0,2,0,2]
732 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,1,3]
733 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm3, %zmm3
734 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} zmm3 = zmm3[u,u,0,8,u,u,u,u,1,9,u,u,u,u,2,10,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u,u,u,u,u,38,46,u,u,u,u,39,47,u,u,u,u,48,56,u,u,u,u,49,57,u,u,u,u,50,58,u,u]
735 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,0,2]
736 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm1[0,2,1,3]
737 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm4
738 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} zmm4 = zmm4[0,8,u,u,u,u,1,9,u,u,u,u,2,10,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u,21,29,37,45,u,u,u,u,38,46,u,u,u,u,39,47,u,u,u,u,48,56,u,u,u,u,49,57,u,u,u,u,50,58]
739 ; AVX512BW-SLOW-NEXT: movl $1227105426, %ecx # imm = 0x49242492
740 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k1
741 ; AVX512BW-SLOW-NEXT: vmovdqu16 %zmm3, %zmm4 {%k1}
742 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm2[0,2,0,2]
743 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm2[0,2,1,3]
744 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm3, %zmm3
745 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} zmm3 = zmm3[u,u,u,u,0,8,u,u,u,u,1,9,u,u,u,u,18,26,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u,37,45,u,u,u,u,38,46,u,u,u,u,39,47,u,u,u,u,48,56,u,u,u,u,49,57,u,u,u,u]
746 ; AVX512BW-SLOW-NEXT: movl $613566756, %ecx # imm = 0x24924924
747 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k1
748 ; AVX512BW-SLOW-NEXT: vmovdqu16 %zmm3, %zmm4 {%k1}
749 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
750 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,3,11,u,u,u,u,4,12,u,u,u,u,21,29,u,u,u,u,22,30,u,u,u,u,23,31,u,u]
751 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
752 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,3,11,u,u,u,u,4,12,u,u,u,u,5,13,u,u,u,u,22,30,u,u,u,u,23,31,u,u,u,u]
753 ; AVX512BW-SLOW-NEXT: movw $18724, %cx # imm = 0x4924
754 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k1
755 ; AVX512BW-SLOW-NEXT: vmovdqu16 %ymm1, %ymm0 {%k1}
756 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
757 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,10,u,u,u,u,3,11,u,u,u,u,4,12,u,u,u,u,21,29,u,u,u,u,22,30,u,u,u,u,23,31]
758 ; AVX512BW-SLOW-NEXT: movw $-28087, %cx # imm = 0x9249
759 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k1
760 ; AVX512BW-SLOW-NEXT: vmovdqu16 %ymm1, %ymm0 {%k1}
761 ; AVX512BW-SLOW-NEXT: vmovdqa %ymm0, 64(%rax)
762 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm4, (%rax)
763 ; AVX512BW-SLOW-NEXT: vzeroupper
764 ; AVX512BW-SLOW-NEXT: retq
766 ; AVX512BW-FAST-LABEL: store_i8_stride6_vf16:
767 ; AVX512BW-FAST: # %bb.0:
768 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
769 ; AVX512BW-FAST-NEXT: vmovdqa (%rdi), %xmm0
770 ; AVX512BW-FAST-NEXT: vmovdqa (%rdx), %xmm1
771 ; AVX512BW-FAST-NEXT: vmovdqa (%r8), %xmm2
772 ; AVX512BW-FAST-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
773 ; AVX512BW-FAST-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
774 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm3
775 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm2
776 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,2,0,2,8,10,9,11]
777 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm1, %zmm5
778 ; AVX512BW-FAST-NEXT: vpermt2q %zmm3, %zmm4, %zmm5
779 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm5 = zmm5[u,u,0,8,u,u,u,u,1,9,u,u,u,u,2,10,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u,u,u,u,u,38,46,u,u,u,u,39,47,u,u,u,u,48,56,u,u,u,u,49,57,u,u,u,u,50,58,u,u]
780 ; AVX512BW-FAST-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
781 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm3 = zmm3[0,8,u,u,u,u,1,9,u,u,u,u,2,10,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u,21,29,37,45,u,u,u,u,38,46,u,u,u,u,39,47,u,u,u,u,48,56,u,u,u,u,49,57,u,u,u,u,50,58]
782 ; AVX512BW-FAST-NEXT: movl $1227105426, %ecx # imm = 0x49242492
783 ; AVX512BW-FAST-NEXT: kmovd %ecx, %k1
784 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm5, %zmm3 {%k1}
785 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,2,0,2,0,2,1,3]
786 ; AVX512BW-FAST-NEXT: vpermq %zmm2, %zmm4, %zmm4
787 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} zmm4 = zmm4[u,u,u,u,0,8,u,u,u,u,1,9,u,u,u,u,18,26,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u,37,45,u,u,u,u,38,46,u,u,u,u,39,47,u,u,u,u,48,56,u,u,u,u,49,57,u,u,u,u]
788 ; AVX512BW-FAST-NEXT: movl $613566756, %ecx # imm = 0x24924924
789 ; AVX512BW-FAST-NEXT: kmovd %ecx, %k1
790 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm4, %zmm3 {%k1}
791 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
792 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,3,11,u,u,u,u,4,12,u,u,u,u,21,29,u,u,u,u,22,30,u,u,u,u,23,31,u,u]
793 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
794 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,3,11,u,u,u,u,4,12,u,u,u,u,5,13,u,u,u,u,22,30,u,u,u,u,23,31,u,u,u,u]
795 ; AVX512BW-FAST-NEXT: movw $18724, %cx # imm = 0x4924
796 ; AVX512BW-FAST-NEXT: kmovd %ecx, %k1
797 ; AVX512BW-FAST-NEXT: vmovdqu16 %ymm1, %ymm0 {%k1}
798 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
799 ; AVX512BW-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,10,u,u,u,u,3,11,u,u,u,u,4,12,u,u,u,u,21,29,u,u,u,u,22,30,u,u,u,u,23,31]
800 ; AVX512BW-FAST-NEXT: movw $-28087, %cx # imm = 0x9249
801 ; AVX512BW-FAST-NEXT: kmovd %ecx, %k1
802 ; AVX512BW-FAST-NEXT: vmovdqu16 %ymm1, %ymm0 {%k1}
803 ; AVX512BW-FAST-NEXT: vmovdqa %ymm0, 64(%rax)
804 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm3, (%rax)
805 ; AVX512BW-FAST-NEXT: vzeroupper
806 ; AVX512BW-FAST-NEXT: retq
807 %in.vec0 = load <16 x i8>, ptr %in.vecptr0, align 64
808 %in.vec1 = load <16 x i8>, ptr %in.vecptr1, align 64
809 %in.vec2 = load <16 x i8>, ptr %in.vecptr2, align 64
810 %in.vec3 = load <16 x i8>, ptr %in.vecptr3, align 64
811 %in.vec4 = load <16 x i8>, ptr %in.vecptr4, align 64
812 %in.vec5 = load <16 x i8>, ptr %in.vecptr5, align 64
813 %1 = shufflevector <16 x i8> %in.vec0, <16 x i8> %in.vec1, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
814 %2 = shufflevector <16 x i8> %in.vec2, <16 x i8> %in.vec3, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
815 %3 = shufflevector <16 x i8> %in.vec4, <16 x i8> %in.vec5, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
816 %4 = shufflevector <32 x i8> %1, <32 x i8> %2, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
817 %5 = shufflevector <32 x i8> %3, <32 x i8> poison, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
818 %6 = shufflevector <64 x i8> %4, <64 x i8> %5, <96 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95>
819 %interleaved.vec = shufflevector <96 x i8> %6, <96 x i8> poison, <96 x i32> <i32 0, i32 16, i32 32, i32 48, i32 64, i32 80, i32 1, i32 17, i32 33, i32 49, i32 65, i32 81, i32 2, i32 18, i32 34, i32 50, i32 66, i32 82, i32 3, i32 19, i32 35, i32 51, i32 67, i32 83, i32 4, i32 20, i32 36, i32 52, i32 68, i32 84, i32 5, i32 21, i32 37, i32 53, i32 69, i32 85, i32 6, i32 22, i32 38, i32 54, i32 70, i32 86, i32 7, i32 23, i32 39, i32 55, i32 71, i32 87, i32 8, i32 24, i32 40, i32 56, i32 72, i32 88, i32 9, i32 25, i32 41, i32 57, i32 73, i32 89, i32 10, i32 26, i32 42, i32 58, i32 74, i32 90, i32 11, i32 27, i32 43, i32 59, i32 75, i32 91, i32 12, i32 28, i32 44, i32 60, i32 76, i32 92, i32 13, i32 29, i32 45, i32 61, i32 77, i32 93, i32 14, i32 30, i32 46, i32 62, i32 78, i32 94, i32 15, i32 31, i32 47, i32 63, i32 79, i32 95>
820 store <96 x i8> %interleaved.vec, ptr %out.vec, align 64
824 define void @store_i8_stride6_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
825 ; SSE-LABEL: store_i8_stride6_vf32:
827 ; SSE-NEXT: subq $200, %rsp
828 ; SSE-NEXT: movdqa 16(%rdi), %xmm8
829 ; SSE-NEXT: movdqa 16(%rsi), %xmm5
830 ; SSE-NEXT: movdqa 16(%rdx), %xmm12
831 ; SSE-NEXT: movdqa 16(%rcx), %xmm4
832 ; SSE-NEXT: movdqa 16(%r8), %xmm11
833 ; SSE-NEXT: movdqa 16(%r9), %xmm0
834 ; SSE-NEXT: movdqa %xmm8, %xmm1
835 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
836 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
837 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
838 ; SSE-NEXT: movdqa {{.*#+}} xmm9 = [65535,0,65535,65535,0,65535,65535,0]
839 ; SSE-NEXT: pand %xmm9, %xmm2
840 ; SSE-NEXT: movdqa %xmm12, %xmm1
841 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm4[8],xmm1[9],xmm4[9],xmm1[10],xmm4[10],xmm1[11],xmm4[11],xmm1[12],xmm4[12],xmm1[13],xmm4[13],xmm1[14],xmm4[14],xmm1[15],xmm4[15]
842 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
843 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm1[1,0,2,2,4,5,6,7]
844 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,1,0,1]
845 ; SSE-NEXT: movdqa %xmm9, %xmm6
846 ; SSE-NEXT: pandn %xmm3, %xmm6
847 ; SSE-NEXT: por %xmm2, %xmm6
848 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [65535,65535,0,65535,65535,0,65535,65535]
849 ; SSE-NEXT: pand %xmm3, %xmm6
850 ; SSE-NEXT: movdqa %xmm11, %xmm1
851 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
852 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
853 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[0,0,0,0]
854 ; SSE-NEXT: movdqa %xmm3, %xmm7
855 ; SSE-NEXT: pandn %xmm2, %xmm7
856 ; SSE-NEXT: por %xmm6, %xmm7
857 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
858 ; SSE-NEXT: pand %xmm2, %xmm7
859 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
860 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
861 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm1[0,0,0,0]
862 ; SSE-NEXT: movdqa %xmm2, %xmm1
863 ; SSE-NEXT: pandn %xmm6, %xmm1
864 ; SSE-NEXT: por %xmm7, %xmm1
865 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
866 ; SSE-NEXT: punpcklbw {{.*#+}} xmm8 = xmm8[0],xmm5[0],xmm8[1],xmm5[1],xmm8[2],xmm5[2],xmm8[3],xmm5[3],xmm8[4],xmm5[4],xmm8[5],xmm5[5],xmm8[6],xmm5[6],xmm8[7],xmm5[7]
867 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
868 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm8[3,3,3,3]
869 ; SSE-NEXT: movdqa %xmm3, %xmm6
870 ; SSE-NEXT: pandn %xmm5, %xmm6
871 ; SSE-NEXT: punpcklbw {{.*#+}} xmm12 = xmm12[0],xmm4[0],xmm12[1],xmm4[1],xmm12[2],xmm4[2],xmm12[3],xmm4[3],xmm12[4],xmm4[4],xmm12[5],xmm4[5],xmm12[6],xmm4[6],xmm12[7],xmm4[7]
872 ; SSE-NEXT: movdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
873 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm12[0,1,2,3,5,6,7,7]
874 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[2,2,2,3]
875 ; SSE-NEXT: pand %xmm3, %xmm4
876 ; SSE-NEXT: por %xmm6, %xmm4
877 ; SSE-NEXT: pand %xmm9, %xmm4
878 ; SSE-NEXT: punpcklbw {{.*#+}} xmm11 = xmm11[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
879 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
880 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm11[2,2,3,3]
881 ; SSE-NEXT: movdqa %xmm9, %xmm6
882 ; SSE-NEXT: pandn %xmm5, %xmm6
883 ; SSE-NEXT: por %xmm4, %xmm6
884 ; SSE-NEXT: movdqa {{.*#+}} xmm10 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
885 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
886 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
887 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
888 ; SSE-NEXT: movdqa %xmm10, %xmm1
889 ; SSE-NEXT: pandn %xmm0, %xmm1
890 ; SSE-NEXT: movdqa (%rdi), %xmm0
891 ; SSE-NEXT: pand %xmm10, %xmm6
892 ; SSE-NEXT: por %xmm6, %xmm1
893 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
894 ; SSE-NEXT: movdqa (%rsi), %xmm14
895 ; SSE-NEXT: movdqa %xmm0, %xmm1
896 ; SSE-NEXT: movdqa %xmm0, %xmm8
897 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm14[8],xmm1[9],xmm14[9],xmm1[10],xmm14[10],xmm1[11],xmm14[11],xmm1[12],xmm14[12],xmm1[13],xmm14[13],xmm1[14],xmm14[14],xmm1[15],xmm14[15]
898 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[3,3,3,3]
899 ; SSE-NEXT: movdqa %xmm1, %xmm2
900 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
901 ; SSE-NEXT: movdqa %xmm3, %xmm6
902 ; SSE-NEXT: pandn %xmm0, %xmm6
903 ; SSE-NEXT: movdqa (%rdx), %xmm11
904 ; SSE-NEXT: movdqa (%rcx), %xmm12
905 ; SSE-NEXT: movdqa %xmm11, %xmm1
906 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm12[8],xmm1[9],xmm12[9],xmm1[10],xmm12[10],xmm1[11],xmm12[11],xmm1[12],xmm12[12],xmm1[13],xmm12[13],xmm1[14],xmm12[14],xmm1[15],xmm12[15]
907 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm1[0,1,2,3,5,6,7,7]
908 ; SSE-NEXT: movdqa %xmm1, %xmm5
909 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
910 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,3]
911 ; SSE-NEXT: pand %xmm3, %xmm0
912 ; SSE-NEXT: por %xmm6, %xmm0
913 ; SSE-NEXT: movdqa (%r8), %xmm13
914 ; SSE-NEXT: movdqa %xmm13, %xmm7
915 ; SSE-NEXT: punpckhbw {{.*#+}} xmm7 = xmm7[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
916 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm7[2,2,3,3]
917 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
918 ; SSE-NEXT: movdqa %xmm9, %xmm15
919 ; SSE-NEXT: pandn %xmm6, %xmm15
920 ; SSE-NEXT: pand %xmm9, %xmm0
921 ; SSE-NEXT: por %xmm0, %xmm15
922 ; SSE-NEXT: movdqa (%r9), %xmm6
923 ; SSE-NEXT: punpckhbw {{.*#+}} xmm4 = xmm4[8],xmm6[8],xmm4[9],xmm6[9],xmm4[10],xmm6[10],xmm4[11],xmm6[11],xmm4[12],xmm6[12],xmm4[13],xmm6[13],xmm4[14],xmm6[14],xmm4[15],xmm6[15]
924 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm4[2,2,3,3]
925 ; SSE-NEXT: movdqa %xmm4, (%rsp) # 16-byte Spill
926 ; SSE-NEXT: movdqa %xmm10, %xmm1
927 ; SSE-NEXT: pandn %xmm0, %xmm1
928 ; SSE-NEXT: pand %xmm10, %xmm15
929 ; SSE-NEXT: por %xmm15, %xmm1
930 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
931 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm5[3,3,3,3,4,5,6,7]
932 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
933 ; SSE-NEXT: movdqa %xmm3, %xmm15
934 ; SSE-NEXT: pandn %xmm0, %xmm15
935 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,2,2]
936 ; SSE-NEXT: pand %xmm3, %xmm0
937 ; SSE-NEXT: por %xmm0, %xmm15
938 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [0,65535,65535,0,65535,65535,0,65535]
939 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm7[1,1,2,2]
940 ; SSE-NEXT: movdqa %xmm1, %xmm0
941 ; SSE-NEXT: pandn %xmm2, %xmm0
942 ; SSE-NEXT: pand %xmm1, %xmm15
943 ; SSE-NEXT: por %xmm15, %xmm0
944 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
945 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[1,1,2,2]
946 ; SSE-NEXT: movdqa %xmm5, %xmm15
947 ; SSE-NEXT: pandn %xmm2, %xmm15
948 ; SSE-NEXT: pand %xmm5, %xmm0
949 ; SSE-NEXT: por %xmm0, %xmm15
950 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
951 ; SSE-NEXT: punpcklbw {{.*#+}} xmm8 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3],xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
952 ; SSE-NEXT: punpcklbw {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3],xmm11[4],xmm12[4],xmm11[5],xmm12[5],xmm11[6],xmm12[6],xmm11[7],xmm12[7]
953 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm11[3,3,3,3,4,5,6,7]
954 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
955 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
956 ; SSE-NEXT: movdqa %xmm3, %xmm2
957 ; SSE-NEXT: pandn %xmm0, %xmm2
958 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm8[1,1,2,2]
959 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
960 ; SSE-NEXT: pand %xmm3, %xmm0
961 ; SSE-NEXT: por %xmm0, %xmm2
962 ; SSE-NEXT: punpcklbw {{.*#+}} xmm13 = xmm13[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
963 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm13[1,1,2,2]
964 ; SSE-NEXT: movdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
965 ; SSE-NEXT: movdqa %xmm1, %xmm4
966 ; SSE-NEXT: pandn %xmm0, %xmm4
967 ; SSE-NEXT: pand %xmm1, %xmm2
968 ; SSE-NEXT: por %xmm2, %xmm4
969 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1],xmm2[2],xmm6[2],xmm2[3],xmm6[3],xmm2[4],xmm6[4],xmm2[5],xmm6[5],xmm2[6],xmm6[6],xmm2[7],xmm6[7]
970 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,2,2]
971 ; SSE-NEXT: movdqa %xmm2, %xmm6
972 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
973 ; SSE-NEXT: movdqa %xmm5, %xmm12
974 ; SSE-NEXT: pandn %xmm0, %xmm12
975 ; SSE-NEXT: pand %xmm5, %xmm4
976 ; SSE-NEXT: por %xmm4, %xmm12
977 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm11[1,0,2,2,4,5,6,7]
978 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,0,1]
979 ; SSE-NEXT: movdqa %xmm9, %xmm2
980 ; SSE-NEXT: pandn %xmm0, %xmm2
981 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm8[0,0,1,1]
982 ; SSE-NEXT: pand %xmm9, %xmm0
983 ; SSE-NEXT: por %xmm0, %xmm2
984 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm13[0,0,0,0]
985 ; SSE-NEXT: movdqa %xmm3, %xmm4
986 ; SSE-NEXT: pandn %xmm0, %xmm4
987 ; SSE-NEXT: pand %xmm3, %xmm2
988 ; SSE-NEXT: por %xmm2, %xmm4
989 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[0,0,0,0]
990 ; SSE-NEXT: movdqa {{.*#+}} xmm7 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
991 ; SSE-NEXT: movdqa %xmm7, %xmm11
992 ; SSE-NEXT: pandn %xmm0, %xmm11
993 ; SSE-NEXT: pand %xmm7, %xmm4
994 ; SSE-NEXT: por %xmm4, %xmm11
995 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
996 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm4[3,3,3,3]
997 ; SSE-NEXT: movdqa %xmm3, %xmm2
998 ; SSE-NEXT: pandn %xmm0, %xmm2
999 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
1000 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm13[0,1,2,3,5,6,7,7]
1001 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,3]
1002 ; SSE-NEXT: pand %xmm3, %xmm0
1003 ; SSE-NEXT: por %xmm2, %xmm0
1004 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
1005 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm14[2,2,3,3]
1006 ; SSE-NEXT: movdqa %xmm9, %xmm8
1007 ; SSE-NEXT: pandn %xmm2, %xmm8
1008 ; SSE-NEXT: pand %xmm9, %xmm0
1009 ; SSE-NEXT: por %xmm0, %xmm8
1010 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
1011 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm15[2,2,3,3]
1012 ; SSE-NEXT: movdqa %xmm10, %xmm6
1013 ; SSE-NEXT: pandn %xmm0, %xmm6
1014 ; SSE-NEXT: pand %xmm10, %xmm8
1015 ; SSE-NEXT: por %xmm8, %xmm6
1016 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm13[3,3,3,3,4,5,6,7]
1017 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
1018 ; SSE-NEXT: movdqa %xmm3, %xmm2
1019 ; SSE-NEXT: pandn %xmm0, %xmm2
1020 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm4[1,1,2,2]
1021 ; SSE-NEXT: pand %xmm3, %xmm0
1022 ; SSE-NEXT: por %xmm0, %xmm2
1023 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm14[1,1,2,2]
1024 ; SSE-NEXT: movdqa %xmm1, %xmm8
1025 ; SSE-NEXT: pandn %xmm0, %xmm8
1026 ; SSE-NEXT: pand %xmm1, %xmm2
1027 ; SSE-NEXT: por %xmm2, %xmm8
1028 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm15[1,1,2,2]
1029 ; SSE-NEXT: movdqa %xmm5, %xmm15
1030 ; SSE-NEXT: pandn %xmm0, %xmm15
1031 ; SSE-NEXT: pand %xmm5, %xmm8
1032 ; SSE-NEXT: por %xmm8, %xmm15
1033 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1034 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm2[3,3,3,3,4,5,6,7]
1035 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
1036 ; SSE-NEXT: movdqa %xmm3, %xmm8
1037 ; SSE-NEXT: pandn %xmm0, %xmm8
1038 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
1039 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm13[1,1,2,2]
1040 ; SSE-NEXT: pand %xmm3, %xmm0
1041 ; SSE-NEXT: por %xmm0, %xmm8
1042 ; SSE-NEXT: pand %xmm1, %xmm8
1043 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
1044 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm14[1,1,2,2]
1045 ; SSE-NEXT: pandn %xmm0, %xmm1
1046 ; SSE-NEXT: por %xmm8, %xmm1
1047 ; SSE-NEXT: pand %xmm5, %xmm1
1048 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
1049 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm4[1,1,2,2]
1050 ; SSE-NEXT: pandn %xmm0, %xmm5
1051 ; SSE-NEXT: por %xmm1, %xmm5
1052 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm2[1,0,2,2,4,5,6,7]
1053 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,0,1]
1054 ; SSE-NEXT: movdqa %xmm9, %xmm1
1055 ; SSE-NEXT: pandn %xmm0, %xmm1
1056 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm13[0,0,1,1]
1057 ; SSE-NEXT: pand %xmm9, %xmm0
1058 ; SSE-NEXT: por %xmm0, %xmm1
1059 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm14[0,0,0,0]
1060 ; SSE-NEXT: movdqa %xmm3, %xmm8
1061 ; SSE-NEXT: pandn %xmm0, %xmm8
1062 ; SSE-NEXT: pand %xmm3, %xmm1
1063 ; SSE-NEXT: por %xmm1, %xmm8
1064 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm4[0,0,0,0]
1065 ; SSE-NEXT: movdqa %xmm7, %xmm0
1066 ; SSE-NEXT: pandn %xmm1, %xmm0
1067 ; SSE-NEXT: pand %xmm7, %xmm8
1068 ; SSE-NEXT: por %xmm8, %xmm0
1069 ; SSE-NEXT: pshuflw $161, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1070 ; SSE-NEXT: # xmm1 = mem[1,0,2,2,4,5,6,7]
1071 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,0,1]
1072 ; SSE-NEXT: movdqa %xmm9, %xmm8
1073 ; SSE-NEXT: pandn %xmm1, %xmm8
1074 ; SSE-NEXT: pshufd $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1075 ; SSE-NEXT: # xmm1 = mem[0,0,1,1]
1076 ; SSE-NEXT: pand %xmm9, %xmm1
1077 ; SSE-NEXT: por %xmm1, %xmm8
1078 ; SSE-NEXT: pshufd $0, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1079 ; SSE-NEXT: # xmm1 = mem[0,0,0,0]
1080 ; SSE-NEXT: movdqa %xmm3, %xmm13
1081 ; SSE-NEXT: pandn %xmm1, %xmm13
1082 ; SSE-NEXT: pand %xmm3, %xmm8
1083 ; SSE-NEXT: por %xmm8, %xmm13
1084 ; SSE-NEXT: pand %xmm7, %xmm13
1085 ; SSE-NEXT: pshufd $0, (%rsp), %xmm1 # 16-byte Folded Reload
1086 ; SSE-NEXT: # xmm1 = mem[0,0,0,0]
1087 ; SSE-NEXT: pandn %xmm1, %xmm7
1088 ; SSE-NEXT: por %xmm13, %xmm7
1089 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1090 ; SSE-NEXT: # xmm1 = mem[3,3,3,3]
1091 ; SSE-NEXT: pshufhw $249, {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Folded Reload
1092 ; SSE-NEXT: # xmm8 = mem[0,1,2,3,5,6,7,7]
1093 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[2,2,2,3]
1094 ; SSE-NEXT: pand %xmm3, %xmm8
1095 ; SSE-NEXT: pandn %xmm1, %xmm3
1096 ; SSE-NEXT: por %xmm8, %xmm3
1097 ; SSE-NEXT: pand %xmm9, %xmm3
1098 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1099 ; SSE-NEXT: # xmm1 = mem[2,2,3,3]
1100 ; SSE-NEXT: pandn %xmm1, %xmm9
1101 ; SSE-NEXT: por %xmm3, %xmm9
1102 ; SSE-NEXT: pand %xmm10, %xmm9
1103 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1104 ; SSE-NEXT: # xmm1 = mem[2,2,3,3]
1105 ; SSE-NEXT: pandn %xmm1, %xmm10
1106 ; SSE-NEXT: por %xmm9, %xmm10
1107 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1108 ; SSE-NEXT: movdqa %xmm10, 32(%rax)
1109 ; SSE-NEXT: movdqa %xmm7, 48(%rax)
1110 ; SSE-NEXT: movdqa %xmm0, 96(%rax)
1111 ; SSE-NEXT: movdqa %xmm5, 112(%rax)
1112 ; SSE-NEXT: movdqa %xmm15, 160(%rax)
1113 ; SSE-NEXT: movdqa %xmm6, 176(%rax)
1114 ; SSE-NEXT: movdqa %xmm11, (%rax)
1115 ; SSE-NEXT: movdqa %xmm12, 16(%rax)
1116 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1117 ; SSE-NEXT: movaps %xmm0, 64(%rax)
1118 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1119 ; SSE-NEXT: movaps %xmm0, 80(%rax)
1120 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1121 ; SSE-NEXT: movaps %xmm0, 128(%rax)
1122 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1123 ; SSE-NEXT: movaps %xmm0, 144(%rax)
1124 ; SSE-NEXT: addq $200, %rsp
1127 ; AVX1-ONLY-LABEL: store_i8_stride6_vf32:
1128 ; AVX1-ONLY: # %bb.0:
1129 ; AVX1-ONLY-NEXT: vmovdqa 16(%rsi), %xmm0
1130 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm1
1131 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
1132 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm11[3,3,3,3]
1133 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
1134 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,1,1]
1135 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
1136 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm10 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
1137 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm10, %ymm2
1138 ; AVX1-ONLY-NEXT: vmovdqa 16(%rcx), %xmm1
1139 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdx), %xmm3
1140 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3],xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
1141 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm14[0,1,2,3,5,6,7,7]
1142 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,2,3]
1143 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm3[8],xmm1[8],xmm3[9],xmm1[9],xmm3[10],xmm1[10],xmm3[11],xmm1[11],xmm3[12],xmm1[12],xmm3[13],xmm1[13],xmm3[14],xmm1[14],xmm3[15],xmm1[15]
1144 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm1[1,0,2,2,4,5,6,7]
1145 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,0,1]
1146 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
1147 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm10, %ymm3
1148 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm3, %ymm2
1149 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm3
1150 ; AVX1-ONLY-NEXT: vmovdqa 16(%r8), %xmm12
1151 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = zero,zero,zero,zero,xmm12[8,u],zero,zero,zero,zero,xmm12[9,u],zero,zero,zero,zero
1152 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm4[2],xmm3[3,4],xmm4[5],xmm3[6,7]
1153 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[0,1,2,3,4],zero,xmm3[6,7,8,9,10],zero,xmm3[12,13,14,15]
1154 ; AVX1-ONLY-NEXT: vmovdqa 16(%r9), %xmm13
1155 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = zero,zero,zero,zero,zero,xmm13[8],zero,zero,zero,zero,zero,xmm13[9],zero,zero,zero,zero
1156 ; AVX1-ONLY-NEXT: vpor %xmm4, %xmm3, %xmm3
1157 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1158 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = zero,zero,xmm12[5,u],zero,zero,zero,zero,xmm12[6,u],zero,zero,zero,zero,xmm12[7,u]
1159 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm3[1],xmm2[2,3],xmm3[4],xmm2[5,6],xmm3[7]
1160 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm9 = [0,1,2,128,4,5,6,7,8,128,10,11,12,13,14,128]
1161 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm2, %xmm2
1162 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = zero,zero,zero,xmm13[5],zero,zero,zero,zero,zero,xmm13[6],zero,zero,zero,zero,zero,xmm13[7]
1163 ; AVX1-ONLY-NEXT: vpor %xmm3, %xmm2, %xmm2
1164 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1165 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[1,1,2,2]
1166 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[3,3,3,3]
1167 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm0
1168 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm10, %ymm0
1169 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm1[3,3,3,3,4,5,6,7]
1170 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,4,4,4]
1171 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,6,7,7]
1172 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,2,3]
1173 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
1174 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm10, %ymm1
1175 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm0, %ymm0
1176 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm12[10,u],zero,zero,zero,zero,xmm12[11,u],zero,zero,zero,zero,xmm12[12,u],zero,zero
1177 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0],xmm0[1,2],xmm1[3],xmm0[4,5],xmm1[6],xmm0[7]
1178 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[0],zero,xmm1[2,3,4,5,6],zero,xmm1[8,9,10,11,12],zero,xmm1[14,15]
1179 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = zero,xmm13[10],zero,zero,zero,zero,zero,xmm13[11],zero,zero,zero,zero,zero,xmm13[12],zero,zero
1180 ; AVX1-ONLY-NEXT: vpor %xmm3, %xmm1, %xmm1
1181 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1182 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm0
1183 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm2 = <128,128,13,u,128,128,128,128,14,u,128,128,128,128,15,u>
1184 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm12, %xmm1
1185 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
1186 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm0, %xmm0
1187 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,zero,xmm13[13],zero,zero,zero,zero,zero,xmm13[14],zero,zero,zero,zero,zero,xmm13[15]
1188 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm0, %xmm0
1189 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1190 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm8
1191 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm6
1192 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm6[8],xmm8[8],xmm6[9],xmm8[9],xmm6[10],xmm8[10],xmm6[11],xmm8[11],xmm6[12],xmm8[12],xmm6[13],xmm8[13],xmm6[14],xmm8[14],xmm6[15],xmm8[15]
1193 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm5[1,1,2,2]
1194 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm5[3,3,3,3]
1195 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
1196 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm7
1197 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm4
1198 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm4[8],xmm7[8],xmm4[9],xmm7[9],xmm4[10],xmm7[10],xmm4[11],xmm7[11],xmm4[12],xmm7[12],xmm4[13],xmm7[13],xmm4[14],xmm7[14],xmm4[15],xmm7[15]
1199 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm3[3,3,3,3,4,5,6,7]
1200 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
1201 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm15 = xmm3[0,1,2,3,5,6,7,7]
1202 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[2,2,2,3]
1203 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm1, %ymm1
1204 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm10, %ymm0
1205 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm10, %ymm1
1206 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm0, %ymm15
1207 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm1
1208 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm1, %xmm0
1209 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm15, %xmm2
1210 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm2[0],xmm0[1],xmm2[2,3],xmm0[4],xmm2[5,6],xmm0[7]
1211 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm0, %xmm2
1212 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm0
1213 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm9 = zero,zero,zero,xmm0[13],zero,zero,zero,zero,zero,xmm0[14],zero,zero,zero,zero,zero,xmm0[15]
1214 ; AVX1-ONLY-NEXT: vpor %xmm2, %xmm9, %xmm2
1215 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1216 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm1[10,u],zero,zero,zero,zero,xmm1[11,u],zero,zero,zero,zero,xmm1[12,u],zero,zero
1217 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm15[1,2],xmm2[3],xmm15[4,5],xmm2[6],xmm15[7]
1218 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[0],zero,xmm2[2,3,4,5,6],zero,xmm2[8,9,10,11,12],zero,xmm2[14,15]
1219 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm9 = zero,xmm0[10],zero,zero,zero,zero,zero,xmm0[11],zero,zero,zero,zero,zero,xmm0[12],zero,zero
1220 ; AVX1-ONLY-NEXT: vpor %xmm2, %xmm9, %xmm2
1221 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1222 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm11[0,0,1,1]
1223 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm11[1,1,2,2]
1224 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm2, %ymm9
1225 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm14[1,0,2,2,4,5,6,7]
1226 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,0,1]
1227 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm11 = xmm14[3,3,3,3,4,5,6,7]
1228 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm11 = xmm11[0,1,2,3,4,4,4,4]
1229 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm2, %ymm11
1230 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm2 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
1231 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm9, %ymm9
1232 ; AVX1-ONLY-NEXT: vandnps %ymm11, %ymm2, %ymm11
1233 ; AVX1-ONLY-NEXT: vorps %ymm11, %ymm9, %ymm15
1234 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm11 = zero,zero,zero,zero,xmm12[0,u],zero,zero,zero,zero,xmm12[1,u],zero,zero,zero,zero
1235 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm11 = xmm15[0,1],xmm11[2],xmm15[3,4],xmm11[5],xmm15[6,7]
1236 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm14 = [0,1,2,3,4,128,6,7,8,9,10,128,12,13,14,15]
1237 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm11, %xmm11
1238 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm10 = zero,zero,zero,zero,zero,xmm13[0],zero,zero,zero,zero,zero,xmm13[1],zero,zero,zero,zero
1239 ; AVX1-ONLY-NEXT: vpor %xmm10, %xmm11, %xmm11
1240 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm15, %xmm10
1241 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm15 = <2,u,128,128,128,128,3,u,128,128,128,128,4,u,128,128>
1242 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm12, %xmm12
1243 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm12[0],xmm10[1,2],xmm12[3],xmm10[4,5],xmm12[6],xmm10[7]
1244 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm9 = [0,128,2,3,4,5,6,128,8,9,10,11,12,128,14,15]
1245 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm10, %xmm10
1246 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm12 = [128,2,128,128,128,128,128,3,128,128,128,128,128,4,128,128]
1247 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm13, %xmm13
1248 ; AVX1-ONLY-NEXT: vpor %xmm13, %xmm10, %xmm10
1249 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm6 = xmm6[0],xmm8[0],xmm6[1],xmm8[1],xmm6[2],xmm8[2],xmm6[3],xmm8[3],xmm6[4],xmm8[4],xmm6[5],xmm8[5],xmm6[6],xmm8[6],xmm6[7],xmm8[7]
1250 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm6[0,0,1,1]
1251 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm13 = xmm6[1,1,2,2]
1252 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm8, %ymm8
1253 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm4[0],xmm7[0],xmm4[1],xmm7[1],xmm4[2],xmm7[2],xmm4[3],xmm7[3],xmm4[4],xmm7[4],xmm4[5],xmm7[5],xmm4[6],xmm7[6],xmm4[7],xmm7[7]
1254 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm7 = xmm4[1,0,2,2,4,5,6,7]
1255 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[0,1,0,1]
1256 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm13 = xmm4[3,3,3,3,4,5,6,7]
1257 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm13 = xmm13[0,1,2,3,4,4,4,4]
1258 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm7, %ymm7
1259 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm8, %ymm8
1260 ; AVX1-ONLY-NEXT: vandnps %ymm7, %ymm2, %ymm2
1261 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm8, %ymm7
1262 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm1, %xmm2
1263 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm7, %xmm8
1264 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm8[1,2],xmm2[3],xmm8[4,5],xmm2[6],xmm8[7]
1265 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm2, %xmm2
1266 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm0, %xmm8
1267 ; AVX1-ONLY-NEXT: vpor %xmm2, %xmm8, %xmm2
1268 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm8 = zero,zero,zero,zero,xmm1[0,u],zero,zero,zero,zero,xmm1[1,u],zero,zero,zero,zero
1269 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1],xmm8[2],xmm7[3,4],xmm8[5],xmm7[6,7]
1270 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm7, %xmm7
1271 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm8 = zero,zero,zero,zero,zero,xmm0[0],zero,zero,zero,zero,zero,xmm0[1],zero,zero,zero,zero
1272 ; AVX1-ONLY-NEXT: vpor %xmm7, %xmm8, %xmm7
1273 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[3,3,3,3]
1274 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,0,1,1]
1275 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm6, %ymm5
1276 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,5,6,7,7]
1277 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,2,3]
1278 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[1,0,2,2,4,5,6,7]
1279 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,0,1]
1280 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
1281 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm6 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
1282 ; AVX1-ONLY-NEXT: vandnps %ymm5, %ymm6, %ymm4
1283 ; AVX1-ONLY-NEXT: vandps %ymm6, %ymm3, %ymm3
1284 ; AVX1-ONLY-NEXT: vorps %ymm4, %ymm3, %ymm3
1285 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = zero,zero,xmm1[5,u],zero,zero,zero,zero,xmm1[6,u],zero,zero,zero,zero,xmm1[7,u]
1286 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm3[0],xmm4[1],xmm3[2,3],xmm4[4],xmm3[5,6],xmm4[7]
1287 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[0,1,2],zero,xmm4[4,5,6,7,8],zero,xmm4[10,11,12,13,14],zero
1288 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm5 = zero,zero,zero,xmm0[5],zero,zero,zero,zero,zero,xmm0[6],zero,zero,zero,zero,zero,xmm0[7]
1289 ; AVX1-ONLY-NEXT: vpor %xmm5, %xmm4, %xmm4
1290 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,zero,zero,xmm1[8,u],zero,zero,zero,zero,xmm1[9,u],zero,zero,zero,zero
1291 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm3, %xmm3
1292 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0,1],xmm1[2],xmm3[3,4],xmm1[5],xmm3[6,7]
1293 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm1, %xmm1
1294 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,zero,zero,zero,xmm0[8],zero,zero,zero,zero,zero,xmm0[9],zero,zero,zero,zero
1295 ; AVX1-ONLY-NEXT: vpor %xmm0, %xmm1, %xmm0
1296 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1297 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, 48(%rax)
1298 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, 32(%rax)
1299 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, (%rax)
1300 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, 16(%rax)
1301 ; AVX1-ONLY-NEXT: vmovdqa %xmm10, 112(%rax)
1302 ; AVX1-ONLY-NEXT: vmovdqa %xmm11, 96(%rax)
1303 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1304 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 64(%rax)
1305 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1306 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 80(%rax)
1307 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1308 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 176(%rax)
1309 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1310 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 160(%rax)
1311 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1312 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 128(%rax)
1313 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1314 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 144(%rax)
1315 ; AVX1-ONLY-NEXT: vzeroupper
1316 ; AVX1-ONLY-NEXT: retq
1318 ; AVX2-SLOW-LABEL: store_i8_stride6_vf32:
1319 ; AVX2-SLOW: # %bb.0:
1320 ; AVX2-SLOW-NEXT: pushq %rax
1321 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %ymm1
1322 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %ymm3
1323 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %ymm0
1324 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1325 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %ymm2
1326 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1327 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %ymm4
1328 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm6
1329 ; AVX2-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm5 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
1330 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm6, %xmm7
1331 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm8
1332 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm8, %xmm5
1333 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm5[0],xmm7[0],xmm5[1],xmm7[1],xmm5[2],xmm7[2],xmm5[3],xmm7[3],xmm5[4],xmm7[4],xmm5[5],xmm7[5],xmm5[6],xmm7[6],xmm5[7],xmm7[7]
1334 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
1335 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm11
1336 ; AVX2-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm7 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
1337 ; AVX2-SLOW-NEXT: vpshufb %xmm7, %xmm11, %xmm9
1338 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm13
1339 ; AVX2-SLOW-NEXT: vpshufb %xmm7, %xmm13, %xmm7
1340 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm7[8],xmm9[8],xmm7[9],xmm9[9],xmm7[10],xmm9[10],xmm7[11],xmm9[11],xmm7[12],xmm9[12],xmm7[13],xmm9[13],xmm7[14],xmm9[14],xmm7[15],xmm9[15]
1341 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,0,1]
1342 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = <255,255,u,u,0,0,255,255,u,u,0,0,255,255,u,u,0,0,255,255,u,u,0,0,255,255,u,u,0,0,255,255>
1343 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm5, %ymm7, %ymm7
1344 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm5
1345 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm10 = xmm5[6,u,5,u,8,u,7,u,9,u,9,u,9,u,9,u]
1346 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,0,1]
1347 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm12 = [255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255]
1348 ; AVX2-SLOW-NEXT: vpblendvb %ymm12, %ymm7, %ymm10, %ymm14
1349 ; AVX2-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm7 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
1350 ; AVX2-SLOW-NEXT: vpshufb %ymm7, %ymm2, %ymm10
1351 ; AVX2-SLOW-NEXT: vpshufb %ymm7, %ymm0, %ymm7
1352 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm10 = ymm7[0],ymm10[0],ymm7[1],ymm10[1],ymm7[2],ymm10[2],ymm7[3],ymm10[3],ymm7[4],ymm10[4],ymm7[5],ymm10[5],ymm7[6],ymm10[6],ymm7[7],ymm10[7],ymm7[16],ymm10[16],ymm7[17],ymm10[17],ymm7[18],ymm10[18],ymm7[19],ymm10[19],ymm7[20],ymm10[20],ymm7[21],ymm10[21],ymm7[22],ymm10[22],ymm7[23],ymm10[23]
1353 ; AVX2-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm7 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
1354 ; AVX2-SLOW-NEXT: vpshufb %ymm7, %ymm3, %ymm15
1355 ; AVX2-SLOW-NEXT: vmovdqa %ymm1, %ymm2
1356 ; AVX2-SLOW-NEXT: vpshufb %ymm7, %ymm1, %ymm7
1357 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm15 = ymm7[0],ymm15[0],ymm7[1],ymm15[1],ymm7[2],ymm15[2],ymm7[3],ymm15[3],ymm7[4],ymm15[4],ymm7[5],ymm15[5],ymm7[6],ymm15[6],ymm7[7],ymm15[7],ymm7[16],ymm15[16],ymm7[17],ymm15[17],ymm7[18],ymm15[18],ymm7[19],ymm15[19],ymm7[20],ymm15[20],ymm7[21],ymm15[21],ymm7[22],ymm15[22],ymm7[23],ymm15[23]
1358 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %ymm7
1359 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,2,2,3]
1360 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
1361 ; AVX2-SLOW-NEXT: vpblendvb %ymm9, %ymm10, %ymm15, %ymm9
1362 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %xmm10
1363 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm15 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u]
1364 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
1365 ; AVX2-SLOW-NEXT: vpblendvb %ymm12, %ymm9, %ymm15, %ymm12
1366 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm9 = xmm10[u,6,u,5,u,8,u,7,u,9,u,9,u,9,u,9]
1367 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,0,1]
1368 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm15 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
1369 ; AVX2-SLOW-NEXT: vpblendvb %ymm15, %ymm14, %ymm9, %ymm0
1370 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1371 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm14 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u]
1372 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,3]
1373 ; AVX2-SLOW-NEXT: vpblendvb %ymm15, %ymm12, %ymm14, %ymm0
1374 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1375 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm13[0],xmm11[0],xmm13[1],xmm11[1],xmm13[2],xmm11[2],xmm13[3],xmm11[3],xmm13[4],xmm11[4],xmm13[5],xmm11[5],xmm13[6],xmm11[6],xmm13[7],xmm11[7]
1376 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm14 = xmm14[0,3,2,1,4,5,6,7]
1377 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,4,5,6,5]
1378 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
1379 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3],xmm8[4],xmm6[4],xmm8[5],xmm6[5],xmm8[6],xmm6[6],xmm8[7],xmm6[7]
1380 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm15 = xmm15[1,0,3,2,4,5,6,7]
1381 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,4,4,4]
1382 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,0,0,1]
1383 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = <255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255>
1384 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm14, %ymm15, %ymm14
1385 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm15 = ymm1[0],ymm3[0],ymm1[1],ymm3[1],ymm1[2],ymm3[2],ymm1[3],ymm3[3],ymm1[4],ymm3[4],ymm1[5],ymm3[5],ymm1[6],ymm3[6],ymm1[7],ymm3[7],ymm1[16],ymm3[16],ymm1[17],ymm3[17],ymm1[18],ymm3[18],ymm1[19],ymm3[19],ymm1[20],ymm3[20],ymm1[21],ymm3[21],ymm1[22],ymm3[22],ymm1[23],ymm3[23]
1386 ; AVX2-SLOW-NEXT: vmovdqa %ymm3, %ymm12
1387 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm15 = ymm15[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
1388 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm15 = ymm15[0,1,2,3,4,5,6,5,8,9,10,11,12,13,14,13]
1389 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
1390 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
1391 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
1392 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm9 = ymm3[0],ymm1[0],ymm3[1],ymm1[1],ymm3[2],ymm1[2],ymm3[3],ymm1[3],ymm3[4],ymm1[4],ymm3[5],ymm1[5],ymm3[6],ymm1[6],ymm3[7],ymm1[7],ymm3[16],ymm1[16],ymm3[17],ymm1[17],ymm3[18],ymm1[18],ymm3[19],ymm1[19],ymm3[20],ymm1[20],ymm3[21],ymm1[21],ymm3[22],ymm1[22],ymm3[23],ymm1[23]
1393 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm9 = ymm9[1,0,3,2,4,5,6,7,9,8,11,10,12,13,14,15]
1394 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm9 = ymm9[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
1395 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
1396 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm15, %ymm9, %ymm0
1397 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm9 = xmm5[2,u,1,u,0,u,3,u,4,u,4,u,4,u,4,u]
1398 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,0,1]
1399 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm15 = [255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255]
1400 ; AVX2-SLOW-NEXT: vpblendvb %ymm15, %ymm14, %ymm9, %ymm9
1401 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm14 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,u,17,u,16,u,19,u,u,u,u,u,20,u,u,u]
1402 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,3]
1403 ; AVX2-SLOW-NEXT: vpblendvb %ymm15, %ymm0, %ymm14, %ymm0
1404 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm14 = xmm10[u,2,u,1,u,0,u,3,u,4,u,4,u,4,u,4]
1405 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
1406 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm15 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
1407 ; AVX2-SLOW-NEXT: vpblendvb %ymm15, %ymm9, %ymm14, %ymm14
1408 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm9 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,u,17,u,16,u,19,u,u,u,u,u,20,u,u]
1409 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
1410 ; AVX2-SLOW-NEXT: vpblendvb %ymm15, %ymm0, %ymm9, %ymm15
1411 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm13[8],xmm11[8],xmm13[9],xmm11[9],xmm13[10],xmm11[10],xmm13[11],xmm11[11],xmm13[12],xmm11[12],xmm13[13],xmm11[13],xmm13[14],xmm11[14],xmm13[15],xmm11[15]
1412 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
1413 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
1414 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm8[8],xmm6[8],xmm8[9],xmm6[9],xmm8[10],xmm6[10],xmm8[11],xmm6[11],xmm8[12],xmm6[12],xmm8[13],xmm6[13],xmm8[14],xmm6[14],xmm8[15],xmm6[15]
1415 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
1416 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,0,1]
1417 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = <u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u>
1418 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm0, %ymm6, %ymm0
1419 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm2 = ymm2[8],ymm12[8],ymm2[9],ymm12[9],ymm2[10],ymm12[10],ymm2[11],ymm12[11],ymm2[12],ymm12[12],ymm2[13],ymm12[13],ymm2[14],ymm12[14],ymm2[15],ymm12[15],ymm2[24],ymm12[24],ymm2[25],ymm12[25],ymm2[26],ymm12[26],ymm2[27],ymm12[27],ymm2[28],ymm12[28],ymm2[29],ymm12[29],ymm2[30],ymm12[30],ymm2[31],ymm12[31]
1420 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm1 = ymm3[8],ymm1[8],ymm3[9],ymm1[9],ymm3[10],ymm1[10],ymm3[11],ymm1[11],ymm3[12],ymm1[12],ymm3[13],ymm1[13],ymm3[14],ymm1[14],ymm3[15],ymm1[15],ymm3[24],ymm1[24],ymm3[25],ymm1[25],ymm3[26],ymm1[26],ymm3[27],ymm1[27],ymm3[28],ymm1[28],ymm3[29],ymm1[29],ymm3[30],ymm1[30],ymm3[31],ymm1[31]
1421 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
1422 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
1423 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
1424 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
1425 ; AVX2-SLOW-NEXT: vpblendvb %ymm8, %ymm2, %ymm1, %ymm1
1426 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm2 = xmm5[10,u,13,u,12,u,11,u,14,u,13,u,14,u,15,u]
1427 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
1428 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = [0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0]
1429 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm0, %ymm2, %ymm0
1430 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,u,29,u,28,u,27,u,30,u,u,u,u,u,31,u]
1431 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
1432 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
1433 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} xmm2 = xmm10[u,10,u,13,u,12,u,11,u,14,u,13,u,14,u,15]
1434 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
1435 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm3 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
1436 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm0, %ymm2, %ymm0
1437 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,u,29,u,28,u,27,u,30,u,u,u,u,u,31]
1438 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
1439 ; AVX2-SLOW-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
1440 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1441 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
1442 ; AVX2-SLOW-NEXT: vmovaps %ymm2, 128(%rax)
1443 ; AVX2-SLOW-NEXT: vmovdqa %ymm1, 160(%rax)
1444 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, 64(%rax)
1445 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1446 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 32(%rax)
1447 ; AVX2-SLOW-NEXT: vmovdqa %ymm15, 96(%rax)
1448 ; AVX2-SLOW-NEXT: vmovdqa %ymm14, (%rax)
1449 ; AVX2-SLOW-NEXT: popq %rax
1450 ; AVX2-SLOW-NEXT: vzeroupper
1451 ; AVX2-SLOW-NEXT: retq
1453 ; AVX2-FAST-LABEL: store_i8_stride6_vf32:
1454 ; AVX2-FAST: # %bb.0:
1455 ; AVX2-FAST-NEXT: subq $40, %rsp
1456 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %ymm2
1457 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %ymm6
1458 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %ymm3
1459 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %ymm4
1460 ; AVX2-FAST-NEXT: vmovdqa (%r8), %ymm0
1461 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1462 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm1
1463 ; AVX2-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1464 ; AVX2-FAST-NEXT: vpbroadcastq {{.*#+}} xmm7 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
1465 ; AVX2-FAST-NEXT: vpshufb %xmm7, %xmm1, %xmm8
1466 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm1
1467 ; AVX2-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1468 ; AVX2-FAST-NEXT: vpshufb %xmm7, %xmm1, %xmm7
1469 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3],xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
1470 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,0,1]
1471 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm1
1472 ; AVX2-FAST-NEXT: vpbroadcastq {{.*#+}} xmm9 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
1473 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm1, %xmm11
1474 ; AVX2-FAST-NEXT: vmovdqa %xmm1, %xmm10
1475 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm5
1476 ; AVX2-FAST-NEXT: vpshufb %xmm9, %xmm5, %xmm9
1477 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm9[8],xmm11[8],xmm9[9],xmm11[9],xmm9[10],xmm11[10],xmm9[11],xmm11[11],xmm9[12],xmm11[12],xmm9[13],xmm11[13],xmm9[14],xmm11[14],xmm9[15],xmm11[15]
1478 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,0,1]
1479 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = <255,255,u,u,0,0,255,255,u,u,0,0,255,255,u,u,0,0,255,255,u,u,0,0,255,255,u,u,0,0,255,255>
1480 ; AVX2-FAST-NEXT: vpblendvb %ymm11, %ymm7, %ymm9, %ymm9
1481 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm7
1482 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm12 = xmm7[6,u,5,u,8,u,7,u,9,u,9,u,9,u,9,u]
1483 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,0,0,1]
1484 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = [255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255]
1485 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm9, %ymm12, %ymm14
1486 ; AVX2-FAST-NEXT: vpbroadcastq {{.*#+}} ymm9 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
1487 ; AVX2-FAST-NEXT: vpshufb %ymm9, %ymm4, %ymm12
1488 ; AVX2-FAST-NEXT: vpshufb %ymm9, %ymm3, %ymm9
1489 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} ymm12 = ymm9[0],ymm12[0],ymm9[1],ymm12[1],ymm9[2],ymm12[2],ymm9[3],ymm12[3],ymm9[4],ymm12[4],ymm9[5],ymm12[5],ymm9[6],ymm12[6],ymm9[7],ymm12[7],ymm9[16],ymm12[16],ymm9[17],ymm12[17],ymm9[18],ymm12[18],ymm9[19],ymm12[19],ymm9[20],ymm12[20],ymm9[21],ymm12[21],ymm9[22],ymm12[22],ymm9[23],ymm12[23]
1490 ; AVX2-FAST-NEXT: vpbroadcastq {{.*#+}} ymm9 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
1491 ; AVX2-FAST-NEXT: vpshufb %ymm9, %ymm6, %ymm15
1492 ; AVX2-FAST-NEXT: vpshufb %ymm9, %ymm2, %ymm9
1493 ; AVX2-FAST-NEXT: vmovdqa %ymm2, %ymm8
1494 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} ymm15 = ymm9[0],ymm15[0],ymm9[1],ymm15[1],ymm9[2],ymm15[2],ymm9[3],ymm15[3],ymm9[4],ymm15[4],ymm9[5],ymm15[5],ymm9[6],ymm15[6],ymm9[7],ymm15[7],ymm9[16],ymm15[16],ymm9[17],ymm15[17],ymm9[18],ymm15[18],ymm9[19],ymm15[19],ymm9[20],ymm15[20],ymm9[21],ymm15[21],ymm9[22],ymm15[22],ymm9[23],ymm15[23]
1495 ; AVX2-FAST-NEXT: vmovdqa (%r9), %ymm1
1496 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1497 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,2,2,3]
1498 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
1499 ; AVX2-FAST-NEXT: vpblendvb %ymm11, %ymm12, %ymm15, %ymm11
1500 ; AVX2-FAST-NEXT: vmovdqa (%r9), %xmm12
1501 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm15 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u]
1502 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
1503 ; AVX2-FAST-NEXT: vpblendvb %ymm13, %ymm11, %ymm15, %ymm13
1504 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm11 = xmm12[u,6,u,5,u,8,u,7,u,9,u,9,u,9,u,9]
1505 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,0,1]
1506 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
1507 ; AVX2-FAST-NEXT: vpblendvb %ymm15, %ymm14, %ymm11, %ymm0
1508 ; AVX2-FAST-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
1509 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u]
1510 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,3]
1511 ; AVX2-FAST-NEXT: vpblendvb %ymm15, %ymm13, %ymm14, %ymm0
1512 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1513 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm14 = xmm5[8],xmm10[8],xmm5[9],xmm10[9],xmm5[10],xmm10[10],xmm5[11],xmm10[11],xmm5[12],xmm10[12],xmm5[13],xmm10[13],xmm5[14],xmm10[14],xmm5[15],xmm10[15]
1514 ; AVX2-FAST-NEXT: vmovdqa %xmm10, %xmm13
1515 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm14 = xmm14[8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
1516 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
1517 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1518 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1519 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm15 = xmm1[8],xmm2[8],xmm1[9],xmm2[9],xmm1[10],xmm2[10],xmm1[11],xmm2[11],xmm1[12],xmm2[12],xmm1[13],xmm2[13],xmm1[14],xmm2[14],xmm1[15],xmm2[15]
1520 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm15 = xmm15[10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
1521 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,0,0,1]
1522 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u>
1523 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm14, %ymm15, %ymm14
1524 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} ymm15 = ymm8[8],ymm6[8],ymm8[9],ymm6[9],ymm8[10],ymm6[10],ymm8[11],ymm6[11],ymm8[12],ymm6[12],ymm8[13],ymm6[13],ymm8[14],ymm6[14],ymm8[15],ymm6[15],ymm8[24],ymm6[24],ymm8[25],ymm6[25],ymm8[26],ymm6[26],ymm8[27],ymm6[27],ymm8[28],ymm6[28],ymm8[29],ymm6[29],ymm8[30],ymm6[30],ymm8[31],ymm6[31]
1525 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm15 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
1526 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
1527 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} ymm11 = ymm3[8],ymm4[8],ymm3[9],ymm4[9],ymm3[10],ymm4[10],ymm3[11],ymm4[11],ymm3[12],ymm4[12],ymm3[13],ymm4[13],ymm3[14],ymm4[14],ymm3[15],ymm4[15],ymm3[24],ymm4[24],ymm3[25],ymm4[25],ymm3[26],ymm4[26],ymm3[27],ymm4[27],ymm3[28],ymm4[28],ymm3[29],ymm4[29],ymm3[30],ymm4[30],ymm3[31],ymm4[31]
1528 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
1529 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
1530 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm15, %ymm11, %ymm0
1531 ; AVX2-FAST-NEXT: vmovdqa %xmm7, %xmm9
1532 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm11 = xmm7[10,u,13,u,12,u,11,u,14,u,13,u,14,u,15,u]
1533 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,0,1]
1534 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = [0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0]
1535 ; AVX2-FAST-NEXT: vpblendvb %ymm15, %ymm14, %ymm11, %ymm11
1536 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
1537 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,u,29,u,28,u,27,u,30,u,u,u,u,u,31,u]
1538 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,3]
1539 ; AVX2-FAST-NEXT: vpblendvb %ymm15, %ymm0, %ymm14, %ymm0
1540 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm14 = xmm12[u,10,u,13,u,12,u,11,u,14,u,13,u,14,u,15]
1541 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
1542 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
1543 ; AVX2-FAST-NEXT: vpblendvb %ymm15, %ymm11, %ymm14, %ymm14
1544 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
1545 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm11 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,u,29,u,28,u,27,u,30,u,u,u,u,u,31]
1546 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
1547 ; AVX2-FAST-NEXT: vpblendvb %ymm15, %ymm0, %ymm11, %ymm15
1548 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm5[0],xmm13[0],xmm5[1],xmm13[1],xmm5[2],xmm13[2],xmm5[3],xmm13[3],xmm5[4],xmm13[4],xmm5[5],xmm13[5],xmm5[6],xmm13[6],xmm5[7],xmm13[7]
1549 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,6,7,4,5,2,3,8,9,10,11,12,13,10,11]
1550 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
1551 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3],xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
1552 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[2,3,0,1,6,7,4,5,8,9,8,9,8,9,8,9]
1553 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
1554 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = <255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255>
1555 ; AVX2-FAST-NEXT: vpblendvb %ymm11, %ymm0, %ymm5, %ymm0
1556 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} ymm2 = ymm8[0],ymm6[0],ymm8[1],ymm6[1],ymm8[2],ymm6[2],ymm8[3],ymm6[3],ymm8[4],ymm6[4],ymm8[5],ymm6[5],ymm8[6],ymm6[6],ymm8[7],ymm6[7],ymm8[16],ymm6[16],ymm8[17],ymm6[17],ymm8[18],ymm6[18],ymm8[19],ymm6[19],ymm8[20],ymm6[20],ymm8[21],ymm6[21],ymm8[22],ymm6[22],ymm8[23],ymm6[23]
1557 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} ymm1 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[4],ymm4[4],ymm3[5],ymm4[5],ymm3[6],ymm4[6],ymm3[7],ymm4[7],ymm3[16],ymm4[16],ymm3[17],ymm4[17],ymm3[18],ymm4[18],ymm3[19],ymm4[19],ymm3[20],ymm4[20],ymm3[21],ymm4[21],ymm3[22],ymm4[22],ymm3[23],ymm4[23]
1558 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27]
1559 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
1560 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,16,17,22,23,20,21,24,25,24,25,24,25,24,25]
1561 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
1562 ; AVX2-FAST-NEXT: vpblendvb %ymm11, %ymm2, %ymm1, %ymm1
1563 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm2 = xmm9[2,u,1,u,0,u,3,u,4,u,4,u,4,u,4,u]
1564 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
1565 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255]
1566 ; AVX2-FAST-NEXT: vpblendvb %ymm3, %ymm0, %ymm2, %ymm0
1567 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,u,17,u,16,u,19,u,u,u,u,u,20,u,u,u]
1568 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
1569 ; AVX2-FAST-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
1570 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm2 = xmm12[u,2,u,1,u,0,u,3,u,4,u,4,u,4,u,4]
1571 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
1572 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
1573 ; AVX2-FAST-NEXT: vpblendvb %ymm3, %ymm0, %ymm2, %ymm0
1574 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,u,17,u,16,u,19,u,u,u,u,u,20,u,u]
1575 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
1576 ; AVX2-FAST-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
1577 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
1578 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
1579 ; AVX2-FAST-NEXT: vmovaps %ymm2, 128(%rax)
1580 ; AVX2-FAST-NEXT: vmovdqa %ymm1, 96(%rax)
1581 ; AVX2-FAST-NEXT: vmovdqa %ymm15, 160(%rax)
1582 ; AVX2-FAST-NEXT: vmovdqa %ymm14, 64(%rax)
1583 ; AVX2-FAST-NEXT: vmovups (%rsp), %ymm1 # 32-byte Reload
1584 ; AVX2-FAST-NEXT: vmovaps %ymm1, 32(%rax)
1585 ; AVX2-FAST-NEXT: vmovdqa %ymm0, (%rax)
1586 ; AVX2-FAST-NEXT: addq $40, %rsp
1587 ; AVX2-FAST-NEXT: vzeroupper
1588 ; AVX2-FAST-NEXT: retq
1590 ; AVX2-FAST-PERLANE-LABEL: store_i8_stride6_vf32:
1591 ; AVX2-FAST-PERLANE: # %bb.0:
1592 ; AVX2-FAST-PERLANE-NEXT: subq $40, %rsp
1593 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %ymm2
1594 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %ymm6
1595 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %ymm3
1596 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %ymm4
1597 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %ymm0
1598 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1599 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm1
1600 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1601 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq {{.*#+}} xmm7 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
1602 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm7, %xmm1, %xmm8
1603 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm1
1604 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1605 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm7, %xmm1, %xmm7
1606 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3],xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
1607 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,0,1]
1608 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm1
1609 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq {{.*#+}} xmm9 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
1610 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm9, %xmm1, %xmm11
1611 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, %xmm10
1612 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm5
1613 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm9, %xmm5, %xmm9
1614 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm9[8],xmm11[8],xmm9[9],xmm11[9],xmm9[10],xmm11[10],xmm9[11],xmm11[11],xmm9[12],xmm11[12],xmm9[13],xmm11[13],xmm9[14],xmm11[14],xmm9[15],xmm11[15]
1615 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,0,1]
1616 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = <255,255,u,u,0,0,255,255,u,u,0,0,255,255,u,u,0,0,255,255,u,u,0,0,255,255,u,u,0,0,255,255>
1617 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm7, %ymm9, %ymm9
1618 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm7
1619 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm12 = xmm7[6,u,5,u,8,u,7,u,9,u,9,u,9,u,9,u]
1620 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,0,0,1]
1621 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm13 = [255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255]
1622 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm9, %ymm12, %ymm14
1623 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq {{.*#+}} ymm9 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
1624 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm9, %ymm4, %ymm12
1625 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm9, %ymm3, %ymm9
1626 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} ymm12 = ymm9[0],ymm12[0],ymm9[1],ymm12[1],ymm9[2],ymm12[2],ymm9[3],ymm12[3],ymm9[4],ymm12[4],ymm9[5],ymm12[5],ymm9[6],ymm12[6],ymm9[7],ymm12[7],ymm9[16],ymm12[16],ymm9[17],ymm12[17],ymm9[18],ymm12[18],ymm9[19],ymm12[19],ymm9[20],ymm12[20],ymm9[21],ymm12[21],ymm9[22],ymm12[22],ymm9[23],ymm12[23]
1627 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq {{.*#+}} ymm9 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
1628 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm9, %ymm6, %ymm15
1629 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm9, %ymm2, %ymm9
1630 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm2, %ymm8
1631 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} ymm15 = ymm9[0],ymm15[0],ymm9[1],ymm15[1],ymm9[2],ymm15[2],ymm9[3],ymm15[3],ymm9[4],ymm15[4],ymm9[5],ymm15[5],ymm9[6],ymm15[6],ymm9[7],ymm15[7],ymm9[16],ymm15[16],ymm9[17],ymm15[17],ymm9[18],ymm15[18],ymm9[19],ymm15[19],ymm9[20],ymm15[20],ymm9[21],ymm15[21],ymm9[22],ymm15[22],ymm9[23],ymm15[23]
1632 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %ymm1
1633 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1634 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,2,2,3]
1635 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
1636 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm12, %ymm15, %ymm11
1637 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %xmm12
1638 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm15 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u]
1639 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
1640 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm13, %ymm11, %ymm15, %ymm13
1641 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm11 = xmm12[u,6,u,5,u,8,u,7,u,9,u,9,u,9,u,9]
1642 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,0,1]
1643 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
1644 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm14, %ymm11, %ymm0
1645 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
1646 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm14 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u]
1647 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,3]
1648 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm13, %ymm14, %ymm0
1649 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1650 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm14 = xmm5[8],xmm10[8],xmm5[9],xmm10[9],xmm5[10],xmm10[10],xmm5[11],xmm10[11],xmm5[12],xmm10[12],xmm5[13],xmm10[13],xmm5[14],xmm10[14],xmm5[15],xmm10[15]
1651 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm10, %xmm13
1652 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm14 = xmm14[8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
1653 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
1654 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1655 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1656 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm15 = xmm1[8],xmm2[8],xmm1[9],xmm2[9],xmm1[10],xmm2[10],xmm1[11],xmm2[11],xmm1[12],xmm2[12],xmm1[13],xmm2[13],xmm1[14],xmm2[14],xmm1[15],xmm2[15]
1657 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm15 = xmm15[10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
1658 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,0,0,1]
1659 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm0 = <u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u>
1660 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm14, %ymm15, %ymm14
1661 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} ymm15 = ymm8[8],ymm6[8],ymm8[9],ymm6[9],ymm8[10],ymm6[10],ymm8[11],ymm6[11],ymm8[12],ymm6[12],ymm8[13],ymm6[13],ymm8[14],ymm6[14],ymm8[15],ymm6[15],ymm8[24],ymm6[24],ymm8[25],ymm6[25],ymm8[26],ymm6[26],ymm8[27],ymm6[27],ymm8[28],ymm6[28],ymm8[29],ymm6[29],ymm8[30],ymm6[30],ymm8[31],ymm6[31]
1662 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm15 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
1663 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
1664 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} ymm11 = ymm3[8],ymm4[8],ymm3[9],ymm4[9],ymm3[10],ymm4[10],ymm3[11],ymm4[11],ymm3[12],ymm4[12],ymm3[13],ymm4[13],ymm3[14],ymm4[14],ymm3[15],ymm4[15],ymm3[24],ymm4[24],ymm3[25],ymm4[25],ymm3[26],ymm4[26],ymm3[27],ymm4[27],ymm3[28],ymm4[28],ymm3[29],ymm4[29],ymm3[30],ymm4[30],ymm3[31],ymm4[31]
1665 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm11 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
1666 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
1667 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm15, %ymm11, %ymm0
1668 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm7, %xmm9
1669 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm11 = xmm7[10,u,13,u,12,u,11,u,14,u,13,u,14,u,15,u]
1670 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,0,1]
1671 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = [0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0]
1672 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm14, %ymm11, %ymm11
1673 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
1674 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm14 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,u,29,u,28,u,27,u,30,u,u,u,u,u,31,u]
1675 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,3]
1676 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm0, %ymm14, %ymm0
1677 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm14 = xmm12[u,10,u,13,u,12,u,11,u,14,u,13,u,14,u,15]
1678 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
1679 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
1680 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm11, %ymm14, %ymm14
1681 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
1682 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm11 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,u,29,u,28,u,27,u,30,u,u,u,u,u,31]
1683 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
1684 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm15, %ymm0, %ymm11, %ymm15
1685 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm5[0],xmm13[0],xmm5[1],xmm13[1],xmm5[2],xmm13[2],xmm5[3],xmm13[3],xmm5[4],xmm13[4],xmm5[5],xmm13[5],xmm5[6],xmm13[6],xmm5[7],xmm13[7]
1686 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,6,7,4,5,2,3,8,9,10,11,12,13,10,11]
1687 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
1688 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3],xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
1689 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[2,3,0,1,6,7,4,5,8,9,8,9,8,9,8,9]
1690 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
1691 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = <255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255>
1692 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm0, %ymm5, %ymm0
1693 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} ymm2 = ymm8[0],ymm6[0],ymm8[1],ymm6[1],ymm8[2],ymm6[2],ymm8[3],ymm6[3],ymm8[4],ymm6[4],ymm8[5],ymm6[5],ymm8[6],ymm6[6],ymm8[7],ymm6[7],ymm8[16],ymm6[16],ymm8[17],ymm6[17],ymm8[18],ymm6[18],ymm8[19],ymm6[19],ymm8[20],ymm6[20],ymm8[21],ymm6[21],ymm8[22],ymm6[22],ymm8[23],ymm6[23]
1694 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} ymm1 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[4],ymm4[4],ymm3[5],ymm4[5],ymm3[6],ymm4[6],ymm3[7],ymm4[7],ymm3[16],ymm4[16],ymm3[17],ymm4[17],ymm3[18],ymm4[18],ymm3[19],ymm4[19],ymm3[20],ymm4[20],ymm3[21],ymm4[21],ymm3[22],ymm4[22],ymm3[23],ymm4[23]
1695 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27]
1696 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
1697 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,16,17,22,23,20,21,24,25,24,25,24,25,24,25]
1698 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
1699 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm11, %ymm2, %ymm1, %ymm1
1700 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm2 = xmm9[2,u,1,u,0,u,3,u,4,u,4,u,4,u,4,u]
1701 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
1702 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm3 = [255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255]
1703 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm3, %ymm0, %ymm2, %ymm0
1704 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm2 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,u,17,u,16,u,19,u,u,u,u,u,20,u,u,u]
1705 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
1706 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
1707 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm2 = xmm12[u,2,u,1,u,0,u,3,u,4,u,4,u,4,u,4]
1708 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
1709 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm3 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
1710 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm3, %ymm0, %ymm2, %ymm0
1711 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm2 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,u,17,u,16,u,19,u,u,u,u,u,20,u,u]
1712 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
1713 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
1714 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1715 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
1716 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm2, 128(%rax)
1717 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm1, 96(%rax)
1718 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm15, 160(%rax)
1719 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm14, 64(%rax)
1720 ; AVX2-FAST-PERLANE-NEXT: vmovups (%rsp), %ymm1 # 32-byte Reload
1721 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm1, 32(%rax)
1722 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, (%rax)
1723 ; AVX2-FAST-PERLANE-NEXT: addq $40, %rsp
1724 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
1725 ; AVX2-FAST-PERLANE-NEXT: retq
1727 ; AVX512F-SLOW-LABEL: store_i8_stride6_vf32:
1728 ; AVX512F-SLOW: # %bb.0:
1729 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1730 ; AVX512F-SLOW-NEXT: vmovdqa (%rdi), %ymm2
1731 ; AVX512F-SLOW-NEXT: vmovdqa (%rsi), %ymm3
1732 ; AVX512F-SLOW-NEXT: vmovdqa (%rdx), %ymm4
1733 ; AVX512F-SLOW-NEXT: vmovdqa (%rcx), %ymm5
1734 ; AVX512F-SLOW-NEXT: vmovdqa (%r8), %ymm0
1735 ; AVX512F-SLOW-NEXT: vmovdqa (%r9), %ymm1
1736 ; AVX512F-SLOW-NEXT: vmovdqa (%rcx), %xmm7
1737 ; AVX512F-SLOW-NEXT: vmovdqa (%rdx), %xmm8
1738 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm8[8],xmm7[8],xmm8[9],xmm7[9],xmm8[10],xmm7[10],xmm8[11],xmm7[11],xmm8[12],xmm7[12],xmm8[13],xmm7[13],xmm8[14],xmm7[14],xmm8[15],xmm7[15]
1739 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
1740 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,0,1]
1741 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm9 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[4],ymm5[4],ymm4[5],ymm5[5],ymm4[6],ymm5[6],ymm4[7],ymm5[7],ymm4[16],ymm5[16],ymm4[17],ymm5[17],ymm4[18],ymm5[18],ymm4[19],ymm5[19],ymm4[20],ymm5[20],ymm4[21],ymm5[21],ymm4[22],ymm5[22],ymm4[23],ymm5[23]
1742 ; AVX512F-SLOW-NEXT: vprold $16, %ymm9, %ymm9
1743 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
1744 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm6, %zmm6
1745 ; AVX512F-SLOW-NEXT: vmovdqa (%rsi), %xmm9
1746 ; AVX512F-SLOW-NEXT: vmovdqa (%rdi), %xmm10
1747 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm11 = xmm10[8],xmm9[8],xmm10[9],xmm9[9],xmm10[10],xmm9[10],xmm10[11],xmm9[11],xmm10[12],xmm9[12],xmm10[13],xmm9[13],xmm10[14],xmm9[14],xmm10[15],xmm9[15]
1748 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} xmm11 = xmm11[8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
1749 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,0,1]
1750 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm12 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[4],ymm3[4],ymm2[5],ymm3[5],ymm2[6],ymm3[6],ymm2[7],ymm3[7],ymm2[16],ymm3[16],ymm2[17],ymm3[17],ymm2[18],ymm3[18],ymm2[19],ymm3[19],ymm2[20],ymm3[20],ymm2[21],ymm3[21],ymm2[22],ymm3[22],ymm2[23],ymm3[23]
1751 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} ymm12 = ymm12[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
1752 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5,6,5,8,9,10,11,12,13,14,13]
1753 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,2,2,3]
1754 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm12, %zmm11, %zmm13
1755 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm14 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
1756 ; AVX512F-SLOW-NEXT: vpternlogq $226, %zmm6, %zmm14, %zmm13
1757 ; AVX512F-SLOW-NEXT: vmovdqa (%r9), %xmm11
1758 ; AVX512F-SLOW-NEXT: vmovdqa (%r8), %xmm12
1759 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm12[8],xmm11[8],xmm12[9],xmm11[9],xmm12[10],xmm11[10],xmm12[11],xmm11[11],xmm12[12],xmm11[12],xmm12[13],xmm11[13],xmm12[14],xmm11[14],xmm12[15],xmm11[15]
1760 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[4,5,10,11,8,9,6,7,12,13,10,11,12,13,14,15]
1761 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,0,1]
1762 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm15 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[4],ymm1[4],ymm0[5],ymm1[5],ymm0[6],ymm1[6],ymm0[7],ymm1[7],ymm0[16],ymm1[16],ymm0[17],ymm1[17],ymm0[18],ymm1[18],ymm0[19],ymm1[19],ymm0[20],ymm1[20],ymm0[21],ymm1[21],ymm0[22],ymm1[22],ymm0[23],ymm1[23]
1763 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} ymm15 = ymm15[2,1,0,3,4,5,6,7,10,9,8,11,12,13,14,15]
1764 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} ymm15 = ymm15[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
1765 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
1766 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm6, %zmm6
1767 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm13, %zmm6
1768 ; AVX512F-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm13 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
1769 ; AVX512F-SLOW-NEXT: vpshufb %xmm13, %xmm9, %xmm15
1770 ; AVX512F-SLOW-NEXT: vpshufb %xmm13, %xmm10, %xmm13
1771 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm13 = xmm13[8],xmm15[8],xmm13[9],xmm15[9],xmm13[10],xmm15[10],xmm13[11],xmm15[11],xmm13[12],xmm15[12],xmm13[13],xmm15[13],xmm13[14],xmm15[14],xmm13[15],xmm15[15]
1772 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3],xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
1773 ; AVX512F-SLOW-NEXT: vprold $16, %xmm15, %xmm15
1774 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm13, %zmm15, %zmm13
1775 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} zmm13 = zmm13[0,0,0,1,4,4,4,5]
1776 ; AVX512F-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm15 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
1777 ; AVX512F-SLOW-NEXT: vpshufb %xmm15, %xmm7, %xmm7
1778 ; AVX512F-SLOW-NEXT: vpshufb %xmm15, %xmm8, %xmm8
1779 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3],xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
1780 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3],xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
1781 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm8 = xmm8[0,3,2,1,4,5,6,7]
1782 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,6,5]
1783 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm8, %zmm7
1784 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} zmm7 = zmm7[0,0,0,1,4,4,4,5]
1785 ; AVX512F-SLOW-NEXT: vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm13, %zmm7
1786 ; AVX512F-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm8 = [6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0]
1787 ; AVX512F-SLOW-NEXT: vpshufb %xmm8, %xmm11, %xmm9
1788 ; AVX512F-SLOW-NEXT: vpshufb %xmm8, %xmm12, %xmm8
1789 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3],xmm8[4],xmm9[4],xmm8[5],xmm9[5],xmm8[6],xmm9[6],xmm8[7],xmm9[7]
1790 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3],xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
1791 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm9 = xmm9[2,1,0,3,4,5,6,7]
1792 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,4,4,4]
1793 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm8, %zmm9, %zmm8
1794 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} zmm8 = zmm8[0,0,0,1,4,4,4,5]
1795 ; AVX512F-SLOW-NEXT: vpternlogq $184, %zmm7, %zmm14, %zmm8
1796 ; AVX512F-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm7 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
1797 ; AVX512F-SLOW-NEXT: vpshufb %ymm7, %ymm3, %ymm9
1798 ; AVX512F-SLOW-NEXT: vpshufb %ymm7, %ymm2, %ymm7
1799 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm7 = ymm7[0],ymm9[0],ymm7[1],ymm9[1],ymm7[2],ymm9[2],ymm7[3],ymm9[3],ymm7[4],ymm9[4],ymm7[5],ymm9[5],ymm7[6],ymm9[6],ymm7[7],ymm9[7],ymm7[16],ymm9[16],ymm7[17],ymm9[17],ymm7[18],ymm9[18],ymm7[19],ymm9[19],ymm7[20],ymm9[20],ymm7[21],ymm9[21],ymm7[22],ymm9[22],ymm7[23],ymm9[23]
1800 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm9 = ymm4[8],ymm5[8],ymm4[9],ymm5[9],ymm4[10],ymm5[10],ymm4[11],ymm5[11],ymm4[12],ymm5[12],ymm4[13],ymm5[13],ymm4[14],ymm5[14],ymm4[15],ymm5[15],ymm4[24],ymm5[24],ymm4[25],ymm5[25],ymm4[26],ymm5[26],ymm4[27],ymm5[27],ymm4[28],ymm5[28],ymm4[29],ymm5[29],ymm4[30],ymm5[30],ymm4[31],ymm5[31]
1801 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm9 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
1802 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm7, %zmm7
1803 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} zmm7 = zmm7[2,2,2,3,6,6,6,7]
1804 ; AVX512F-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm9 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
1805 ; AVX512F-SLOW-NEXT: vpshufb %ymm9, %ymm5, %ymm5
1806 ; AVX512F-SLOW-NEXT: vpshufb %ymm9, %ymm4, %ymm4
1807 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[4],ymm5[4],ymm4[5],ymm5[5],ymm4[6],ymm5[6],ymm4[7],ymm5[7],ymm4[16],ymm5[16],ymm4[17],ymm5[17],ymm4[18],ymm5[18],ymm4[19],ymm5[19],ymm4[20],ymm5[20],ymm4[21],ymm5[21],ymm4[22],ymm5[22],ymm4[23],ymm5[23]
1808 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm2 = ymm2[8],ymm3[8],ymm2[9],ymm3[9],ymm2[10],ymm3[10],ymm2[11],ymm3[11],ymm2[12],ymm3[12],ymm2[13],ymm3[13],ymm2[14],ymm3[14],ymm2[15],ymm3[15],ymm2[24],ymm3[24],ymm2[25],ymm3[25],ymm2[26],ymm3[26],ymm2[27],ymm3[27],ymm2[28],ymm3[28],ymm2[29],ymm3[29],ymm2[30],ymm3[30],ymm2[31],ymm3[31]
1809 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
1810 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm4, %zmm2
1811 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} zmm2 = zmm2[2,2,2,3,6,6,6,7]
1812 ; AVX512F-SLOW-NEXT: vbroadcasti64x4 {{.*#+}} zmm3 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
1813 ; AVX512F-SLOW-NEXT: # zmm3 = mem[0,1,2,3,0,1,2,3]
1814 ; AVX512F-SLOW-NEXT: vpternlogq $202, %zmm7, %zmm2, %zmm3
1815 ; AVX512F-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm2 = [6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0]
1816 ; AVX512F-SLOW-NEXT: vpshufb %ymm2, %ymm1, %ymm4
1817 ; AVX512F-SLOW-NEXT: vpshufb %ymm2, %ymm0, %ymm2
1818 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm2 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[4],ymm4[4],ymm2[5],ymm4[5],ymm2[6],ymm4[6],ymm2[7],ymm4[7],ymm2[16],ymm4[16],ymm2[17],ymm4[17],ymm2[18],ymm4[18],ymm2[19],ymm4[19],ymm2[20],ymm4[20],ymm2[21],ymm4[21],ymm2[22],ymm4[22],ymm2[23],ymm4[23]
1819 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm0 = ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11],ymm0[12],ymm1[12],ymm0[13],ymm1[13],ymm0[14],ymm1[14],ymm0[15],ymm1[15],ymm0[24],ymm1[24],ymm0[25],ymm1[25],ymm0[26],ymm1[26],ymm0[27],ymm1[27],ymm0[28],ymm1[28],ymm0[29],ymm1[29],ymm0[30],ymm1[30],ymm0[31],ymm1[31]
1820 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20,21,26,27,24,25,22,23,28,29,26,27,28,29,30,31]
1821 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0
1822 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} zmm0 = zmm0[2,2,2,3,6,6,6,7]
1823 ; AVX512F-SLOW-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm0
1824 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm0, 128(%rax)
1825 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm8, (%rax)
1826 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm6, 64(%rax)
1827 ; AVX512F-SLOW-NEXT: vzeroupper
1828 ; AVX512F-SLOW-NEXT: retq
1830 ; AVX512F-FAST-LABEL: store_i8_stride6_vf32:
1831 ; AVX512F-FAST: # %bb.0:
1832 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
1833 ; AVX512F-FAST-NEXT: vmovdqa (%rdi), %ymm2
1834 ; AVX512F-FAST-NEXT: vmovdqa (%rsi), %ymm3
1835 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %ymm4
1836 ; AVX512F-FAST-NEXT: vmovdqa (%rcx), %ymm5
1837 ; AVX512F-FAST-NEXT: vmovdqa (%r8), %ymm0
1838 ; AVX512F-FAST-NEXT: vmovdqa (%r9), %ymm1
1839 ; AVX512F-FAST-NEXT: vpbroadcastq {{.*#+}} ymm6 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
1840 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm3, %ymm7
1841 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm2, %ymm6
1842 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm6[0],ymm7[0],ymm6[1],ymm7[1],ymm6[2],ymm7[2],ymm6[3],ymm7[3],ymm6[4],ymm7[4],ymm6[5],ymm7[5],ymm6[6],ymm7[6],ymm6[7],ymm7[7],ymm6[16],ymm7[16],ymm6[17],ymm7[17],ymm6[18],ymm7[18],ymm6[19],ymm7[19],ymm6[20],ymm7[20],ymm6[21],ymm7[21],ymm6[22],ymm7[22],ymm6[23],ymm7[23]
1843 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} ymm7 = ymm4[8],ymm5[8],ymm4[9],ymm5[9],ymm4[10],ymm5[10],ymm4[11],ymm5[11],ymm4[12],ymm5[12],ymm4[13],ymm5[13],ymm4[14],ymm5[14],ymm4[15],ymm5[15],ymm4[24],ymm5[24],ymm4[25],ymm5[25],ymm4[26],ymm5[26],ymm4[27],ymm5[27],ymm4[28],ymm5[28],ymm4[29],ymm5[29],ymm4[30],ymm5[30],ymm4[31],ymm5[31]
1844 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
1845 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm6, %zmm6
1846 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm6 = zmm6[2,2,2,3,6,6,6,7]
1847 ; AVX512F-FAST-NEXT: vpbroadcastq {{.*#+}} ymm7 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
1848 ; AVX512F-FAST-NEXT: vpshufb %ymm7, %ymm5, %ymm8
1849 ; AVX512F-FAST-NEXT: vpshufb %ymm7, %ymm4, %ymm7
1850 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} ymm7 = ymm7[0],ymm8[0],ymm7[1],ymm8[1],ymm7[2],ymm8[2],ymm7[3],ymm8[3],ymm7[4],ymm8[4],ymm7[5],ymm8[5],ymm7[6],ymm8[6],ymm7[7],ymm8[7],ymm7[16],ymm8[16],ymm7[17],ymm8[17],ymm7[18],ymm8[18],ymm7[19],ymm8[19],ymm7[20],ymm8[20],ymm7[21],ymm8[21],ymm7[22],ymm8[22],ymm7[23],ymm8[23]
1851 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} ymm8 = ymm2[8],ymm3[8],ymm2[9],ymm3[9],ymm2[10],ymm3[10],ymm2[11],ymm3[11],ymm2[12],ymm3[12],ymm2[13],ymm3[13],ymm2[14],ymm3[14],ymm2[15],ymm3[15],ymm2[24],ymm3[24],ymm2[25],ymm3[25],ymm2[26],ymm3[26],ymm2[27],ymm3[27],ymm2[28],ymm3[28],ymm2[29],ymm3[29],ymm2[30],ymm3[30],ymm2[31],ymm3[31]
1852 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm8 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
1853 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm8, %zmm7, %zmm7
1854 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm7 = zmm7[2,2,2,3,6,6,6,7]
1855 ; AVX512F-FAST-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
1856 ; AVX512F-FAST-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
1857 ; AVX512F-FAST-NEXT: vpternlogq $202, %zmm6, %zmm7, %zmm8
1858 ; AVX512F-FAST-NEXT: vpbroadcastq {{.*#+}} ymm6 = [6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0]
1859 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm1, %ymm7
1860 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm0, %ymm6
1861 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm6[0],ymm7[0],ymm6[1],ymm7[1],ymm6[2],ymm7[2],ymm6[3],ymm7[3],ymm6[4],ymm7[4],ymm6[5],ymm7[5],ymm6[6],ymm7[6],ymm6[7],ymm7[7],ymm6[16],ymm7[16],ymm6[17],ymm7[17],ymm6[18],ymm7[18],ymm6[19],ymm7[19],ymm6[20],ymm7[20],ymm6[21],ymm7[21],ymm6[22],ymm7[22],ymm6[23],ymm7[23]
1862 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} ymm7 = ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11],ymm0[12],ymm1[12],ymm0[13],ymm1[13],ymm0[14],ymm1[14],ymm0[15],ymm1[15],ymm0[24],ymm1[24],ymm0[25],ymm1[25],ymm0[26],ymm1[26],ymm0[27],ymm1[27],ymm0[28],ymm1[28],ymm0[29],ymm1[29],ymm0[30],ymm1[30],ymm0[31],ymm1[31]
1863 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20,21,26,27,24,25,22,23,28,29,26,27,28,29,30,31]
1864 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm7, %zmm6, %zmm6
1865 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm6 = zmm6[2,2,2,3,6,6,6,7]
1866 ; AVX512F-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm6
1867 ; AVX512F-FAST-NEXT: vmovdqa (%rcx), %xmm9
1868 ; AVX512F-FAST-NEXT: vpbroadcastq {{.*#+}} xmm7 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
1869 ; AVX512F-FAST-NEXT: vpshufb %xmm7, %xmm9, %xmm8
1870 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %xmm10
1871 ; AVX512F-FAST-NEXT: vpshufb %xmm7, %xmm10, %xmm7
1872 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3],xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
1873 ; AVX512F-FAST-NEXT: vmovdqa (%rsi), %xmm7
1874 ; AVX512F-FAST-NEXT: vmovdqa (%rdi), %xmm8
1875 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3],xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
1876 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm12 = xmm12[0,1,6,7,4,5,2,3,8,9,10,11,12,13,10,11]
1877 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm11, %zmm12, %zmm11
1878 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm11 = zmm11[0,0,0,1,4,4,4,5]
1879 ; AVX512F-FAST-NEXT: vpbroadcastq {{.*#+}} xmm12 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
1880 ; AVX512F-FAST-NEXT: vpshufb %xmm12, %xmm7, %xmm13
1881 ; AVX512F-FAST-NEXT: vpshufb %xmm12, %xmm8, %xmm12
1882 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm12 = xmm12[8],xmm13[8],xmm12[9],xmm13[9],xmm12[10],xmm13[10],xmm12[11],xmm13[11],xmm12[12],xmm13[12],xmm12[13],xmm13[13],xmm12[14],xmm13[14],xmm12[15],xmm13[15]
1883 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm13 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3],xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
1884 ; AVX512F-FAST-NEXT: vprold $16, %xmm13, %xmm13
1885 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm12, %zmm13, %zmm12
1886 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm13 = zmm12[0,0,0,1,4,4,4,5]
1887 ; AVX512F-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm11, %zmm13
1888 ; AVX512F-FAST-NEXT: vmovdqa (%r9), %xmm11
1889 ; AVX512F-FAST-NEXT: vpbroadcastq {{.*#+}} xmm14 = [6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0]
1890 ; AVX512F-FAST-NEXT: vpshufb %xmm14, %xmm11, %xmm15
1891 ; AVX512F-FAST-NEXT: vmovdqa (%r8), %xmm12
1892 ; AVX512F-FAST-NEXT: vpshufb %xmm14, %xmm12, %xmm14
1893 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3],xmm14[4],xmm15[4],xmm14[5],xmm15[5],xmm14[6],xmm15[6],xmm14[7],xmm15[7]
1894 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3],xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
1895 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm15 = xmm15[4,5,2,3,0,1,6,7,8,9,8,9,8,9,8,9]
1896 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm14, %zmm15, %zmm14
1897 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm14 = zmm14[0,0,0,1,4,4,4,5]
1898 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm15 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
1899 ; AVX512F-FAST-NEXT: vpternlogq $184, %zmm13, %zmm15, %zmm14
1900 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[4],ymm5[4],ymm4[5],ymm5[5],ymm4[6],ymm5[6],ymm4[7],ymm5[7],ymm4[16],ymm5[16],ymm4[17],ymm5[17],ymm4[18],ymm5[18],ymm4[19],ymm5[19],ymm4[20],ymm5[20],ymm4[21],ymm5[21],ymm4[22],ymm5[22],ymm4[23],ymm5[23]
1901 ; AVX512F-FAST-NEXT: vprold $16, %ymm4, %ymm4
1902 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm10[8],xmm9[8],xmm10[9],xmm9[9],xmm10[10],xmm9[10],xmm10[11],xmm9[11],xmm10[12],xmm9[12],xmm10[13],xmm9[13],xmm10[14],xmm9[14],xmm10[15],xmm9[15]
1903 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
1904 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = [0,0,0,1,10,10,10,11]
1905 ; AVX512F-FAST-NEXT: vpermt2q %zmm4, %zmm9, %zmm5
1906 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} ymm2 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[4],ymm3[4],ymm2[5],ymm3[5],ymm2[6],ymm3[6],ymm2[7],ymm3[7],ymm2[16],ymm3[16],ymm2[17],ymm3[17],ymm2[18],ymm3[18],ymm2[19],ymm3[19],ymm2[20],ymm3[20],ymm2[21],ymm3[21],ymm2[22],ymm3[22],ymm2[23],ymm3[23]
1907 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27]
1908 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm8[8],xmm7[8],xmm8[9],xmm7[9],xmm8[10],xmm7[10],xmm8[11],xmm7[11],xmm8[12],xmm7[12],xmm8[13],xmm7[13],xmm8[14],xmm7[14],xmm8[15],xmm7[15]
1909 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
1910 ; AVX512F-FAST-NEXT: vpermt2q %zmm2, %zmm9, %zmm3
1911 ; AVX512F-FAST-NEXT: vpternlogq $226, %zmm5, %zmm15, %zmm3
1912 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[4],ymm1[4],ymm0[5],ymm1[5],ymm0[6],ymm1[6],ymm0[7],ymm1[7],ymm0[16],ymm1[16],ymm0[17],ymm1[17],ymm0[18],ymm1[18],ymm0[19],ymm1[19],ymm0[20],ymm1[20],ymm0[21],ymm1[21],ymm0[22],ymm1[22],ymm0[23],ymm1[23]
1913 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20,21,18,19,16,17,22,23,24,25,24,25,24,25,24,25]
1914 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm12[8],xmm11[8],xmm12[9],xmm11[9],xmm12[10],xmm11[10],xmm12[11],xmm11[11],xmm12[12],xmm11[12],xmm12[13],xmm11[13],xmm12[14],xmm11[14],xmm12[15],xmm11[15]
1915 ; AVX512F-FAST-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[4,5,10,11,8,9,6,7,12,13,10,11,12,13,14,15]
1916 ; AVX512F-FAST-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
1917 ; AVX512F-FAST-NEXT: vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm1
1918 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm1, 64(%rax)
1919 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm14, (%rax)
1920 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm6, 128(%rax)
1921 ; AVX512F-FAST-NEXT: vzeroupper
1922 ; AVX512F-FAST-NEXT: retq
1924 ; AVX512BW-SLOW-LABEL: store_i8_stride6_vf32:
1925 ; AVX512BW-SLOW: # %bb.0:
1926 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1927 ; AVX512BW-SLOW-NEXT: vmovdqa (%rdi), %ymm4
1928 ; AVX512BW-SLOW-NEXT: vmovdqa (%rsi), %ymm5
1929 ; AVX512BW-SLOW-NEXT: vmovdqa (%rdx), %ymm2
1930 ; AVX512BW-SLOW-NEXT: vmovdqa (%rcx), %ymm3
1931 ; AVX512BW-SLOW-NEXT: vmovdqa (%r8), %ymm0
1932 ; AVX512BW-SLOW-NEXT: vmovdqa (%r9), %ymm1
1933 ; AVX512BW-SLOW-NEXT: vmovdqa (%rsi), %xmm7
1934 ; AVX512BW-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm6 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
1935 ; AVX512BW-SLOW-NEXT: vpshufb %xmm6, %xmm7, %xmm8
1936 ; AVX512BW-SLOW-NEXT: vmovdqa (%rdi), %xmm9
1937 ; AVX512BW-SLOW-NEXT: vpshufb %xmm6, %xmm9, %xmm6
1938 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm6[8],xmm8[8],xmm6[9],xmm8[9],xmm6[10],xmm8[10],xmm6[11],xmm8[11],xmm6[12],xmm8[12],xmm6[13],xmm8[13],xmm6[14],xmm8[14],xmm6[15],xmm8[15]
1939 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,0,1]
1940 ; AVX512BW-SLOW-NEXT: vmovdqa (%rcx), %xmm8
1941 ; AVX512BW-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm10 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
1942 ; AVX512BW-SLOW-NEXT: vpshufb %xmm10, %xmm8, %xmm11
1943 ; AVX512BW-SLOW-NEXT: vmovdqa (%rdx), %xmm12
1944 ; AVX512BW-SLOW-NEXT: vpshufb %xmm10, %xmm12, %xmm10
1945 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3],xmm10[4],xmm11[4],xmm10[5],xmm11[5],xmm10[6],xmm11[6],xmm10[7],xmm11[7]
1946 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,0,1]
1947 ; AVX512BW-SLOW-NEXT: movw $18724, %cx # imm = 0x4924
1948 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k1
1949 ; AVX512BW-SLOW-NEXT: vmovdqu16 %ymm6, %ymm10 {%k1}
1950 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm0, %zmm6
1951 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3],xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
1952 ; AVX512BW-SLOW-NEXT: vmovdqa {{.*#+}} ymm11 = [0,3,2,1,0,3,2,1,0,3,2,1,4,5,6,5]
1953 ; AVX512BW-SLOW-NEXT: vpermw %ymm10, %ymm11, %ymm10
1954 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm12[0],xmm8[0],xmm12[1],xmm8[1],xmm12[2],xmm8[2],xmm12[3],xmm8[3],xmm12[4],xmm8[4],xmm12[5],xmm8[5],xmm12[6],xmm8[6],xmm12[7],xmm8[7]
1955 ; AVX512BW-SLOW-NEXT: vprold $16, %xmm11, %xmm11
1956 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,0,1]
1957 ; AVX512BW-SLOW-NEXT: movw $9362, %cx # imm = 0x2492
1958 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k2
1959 ; AVX512BW-SLOW-NEXT: vmovdqu16 %ymm11, %ymm10 {%k2}
1960 ; AVX512BW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm6 = zmm10[0,1,2,3],zmm6[4,5,6,7]
1961 ; AVX512BW-SLOW-NEXT: vmovdqa (%r9), %xmm10
1962 ; AVX512BW-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm11 = [6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0]
1963 ; AVX512BW-SLOW-NEXT: vpshufb %xmm11, %xmm10, %xmm13
1964 ; AVX512BW-SLOW-NEXT: vmovdqa (%r8), %xmm14
1965 ; AVX512BW-SLOW-NEXT: vpshufb %xmm11, %xmm14, %xmm11
1966 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm11[0],xmm13[0],xmm11[1],xmm13[1],xmm11[2],xmm13[2],xmm11[3],xmm13[3],xmm11[4],xmm13[4],xmm11[5],xmm13[5],xmm11[6],xmm13[6],xmm11[7],xmm13[7]
1967 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,0,1]
1968 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm13 = xmm14[0],xmm10[0],xmm14[1],xmm10[1],xmm14[2],xmm10[2],xmm14[3],xmm10[3],xmm14[4],xmm10[4],xmm14[5],xmm10[5],xmm14[6],xmm10[6],xmm14[7],xmm10[7]
1969 ; AVX512BW-SLOW-NEXT: vmovdqa {{.*#+}} ymm15 = [2,1,0,3,2,1,0,3,2,1,0,3,4,4,4,4]
1970 ; AVX512BW-SLOW-NEXT: vpermw %ymm13, %ymm15, %ymm13
1971 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm11, %zmm13, %zmm11
1972 ; AVX512BW-SLOW-NEXT: movl $613566756, %ecx # imm = 0x24924924
1973 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k3
1974 ; AVX512BW-SLOW-NEXT: vmovdqu16 %zmm11, %zmm6 {%k3}
1975 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm11 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[4],ymm5[4],ymm4[5],ymm5[5],ymm4[6],ymm5[6],ymm4[7],ymm5[7],ymm4[16],ymm5[16],ymm4[17],ymm5[17],ymm4[18],ymm5[18],ymm4[19],ymm5[19],ymm4[20],ymm5[20],ymm4[21],ymm5[21],ymm4[22],ymm5[22],ymm4[23],ymm5[23]
1976 ; AVX512BW-SLOW-NEXT: vmovdqa {{.*#+}} ymm13 = [8,11,10,9,8,11,10,9,8,11,10,9,12,13,14,13]
1977 ; AVX512BW-SLOW-NEXT: vpermw %ymm11, %ymm13, %ymm11
1978 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm13 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[4],ymm3[4],ymm2[5],ymm3[5],ymm2[6],ymm3[6],ymm2[7],ymm3[7],ymm2[16],ymm3[16],ymm2[17],ymm3[17],ymm2[18],ymm3[18],ymm2[19],ymm3[19],ymm2[20],ymm3[20],ymm2[21],ymm3[21],ymm2[22],ymm3[22],ymm2[23],ymm3[23]
1979 ; AVX512BW-SLOW-NEXT: vprold $16, %ymm13, %ymm13
1980 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,2,2,3]
1981 ; AVX512BW-SLOW-NEXT: vmovdqu16 %ymm13, %ymm11 {%k2}
1982 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm11, %zmm0, %zmm11
1983 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm8 = xmm12[8],xmm8[8],xmm12[9],xmm8[9],xmm12[10],xmm8[10],xmm12[11],xmm8[11],xmm12[12],xmm8[12],xmm12[13],xmm8[13],xmm12[14],xmm8[14],xmm12[15],xmm8[15]
1984 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm9[8],xmm7[8],xmm9[9],xmm7[9],xmm9[10],xmm7[10],xmm9[11],xmm7[11],xmm9[12],xmm7[12],xmm9[13],xmm7[13],xmm9[14],xmm7[14],xmm9[15],xmm7[15]
1985 ; AVX512BW-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = [4,3,6,5,4,3,6,5,4,3,6,5,7,7,7,7]
1986 ; AVX512BW-SLOW-NEXT: vpermw %ymm7, %ymm9, %ymm7
1987 ; AVX512BW-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = [5,4,3,6,5,4,3,6,5,4,3,6,7,7,7,7]
1988 ; AVX512BW-SLOW-NEXT: vpermw %ymm8, %ymm9, %ymm7 {%k1}
1989 ; AVX512BW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm7 = zmm7[0,1,2,3],zmm11[4,5,6,7]
1990 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm8 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[4],ymm1[4],ymm0[5],ymm1[5],ymm0[6],ymm1[6],ymm0[7],ymm1[7],ymm0[16],ymm1[16],ymm0[17],ymm1[17],ymm0[18],ymm1[18],ymm0[19],ymm1[19],ymm0[20],ymm1[20],ymm0[21],ymm1[21],ymm0[22],ymm1[22],ymm0[23],ymm1[23]
1991 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm14[8],xmm10[8],xmm14[9],xmm10[9],xmm14[10],xmm10[10],xmm14[11],xmm10[11],xmm14[12],xmm10[12],xmm14[13],xmm10[13],xmm14[14],xmm10[14],xmm14[15],xmm10[15]
1992 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm8, %zmm9, %zmm8
1993 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm9 = [2,5,4,3,2,5,4,3,2,5,4,3,6,5,6,7,26,25,24,27,26,25,24,27,26,25,24,27,28,28,28,28]
1994 ; AVX512BW-SLOW-NEXT: movl $1227133513, %ecx # imm = 0x49249249
1995 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k2
1996 ; AVX512BW-SLOW-NEXT: vpermw %zmm8, %zmm9, %zmm7 {%k2}
1997 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm8 = ymm2[8],ymm3[8],ymm2[9],ymm3[9],ymm2[10],ymm3[10],ymm2[11],ymm3[11],ymm2[12],ymm3[12],ymm2[13],ymm3[13],ymm2[14],ymm3[14],ymm2[15],ymm3[15],ymm2[24],ymm3[24],ymm2[25],ymm3[25],ymm2[26],ymm3[26],ymm2[27],ymm3[27],ymm2[28],ymm3[28],ymm2[29],ymm3[29],ymm2[30],ymm3[30],ymm2[31],ymm3[31]
1998 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm9 = ymm4[8],ymm5[8],ymm4[9],ymm5[9],ymm4[10],ymm5[10],ymm4[11],ymm5[11],ymm4[12],ymm5[12],ymm4[13],ymm5[13],ymm4[14],ymm5[14],ymm4[15],ymm5[15],ymm4[24],ymm5[24],ymm4[25],ymm5[25],ymm4[26],ymm5[26],ymm4[27],ymm5[27],ymm4[28],ymm5[28],ymm4[29],ymm5[29],ymm4[30],ymm5[30],ymm4[31],ymm5[31]
1999 ; AVX512BW-SLOW-NEXT: vmovdqa {{.*#+}} ymm10 = [12,11,14,13,12,11,14,13,12,11,14,13,15,15,15,15]
2000 ; AVX512BW-SLOW-NEXT: vpermw %ymm9, %ymm10, %ymm9
2001 ; AVX512BW-SLOW-NEXT: vmovdqa {{.*#+}} ymm10 = [13,12,11,14,13,12,11,14,13,12,11,14,15,15,15,15]
2002 ; AVX512BW-SLOW-NEXT: vpermw %ymm8, %ymm10, %ymm9 {%k1}
2003 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm8
2004 ; AVX512BW-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm9 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
2005 ; AVX512BW-SLOW-NEXT: vpshufb %ymm9, %ymm5, %ymm5
2006 ; AVX512BW-SLOW-NEXT: vpshufb %ymm9, %ymm4, %ymm4
2007 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[4],ymm5[4],ymm4[5],ymm5[5],ymm4[6],ymm5[6],ymm4[7],ymm5[7],ymm4[16],ymm5[16],ymm4[17],ymm5[17],ymm4[18],ymm5[18],ymm4[19],ymm5[19],ymm4[20],ymm5[20],ymm4[21],ymm5[21],ymm4[22],ymm5[22],ymm4[23],ymm5[23]
2008 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
2009 ; AVX512BW-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm5 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
2010 ; AVX512BW-SLOW-NEXT: vpshufb %ymm5, %ymm3, %ymm3
2011 ; AVX512BW-SLOW-NEXT: vpshufb %ymm5, %ymm2, %ymm2
2012 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm2 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[4],ymm3[4],ymm2[5],ymm3[5],ymm2[6],ymm3[6],ymm2[7],ymm3[7],ymm2[16],ymm3[16],ymm2[17],ymm3[17],ymm2[18],ymm3[18],ymm2[19],ymm3[19],ymm2[20],ymm3[20],ymm2[21],ymm3[21],ymm2[22],ymm3[22],ymm2[23],ymm3[23]
2013 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2014 ; AVX512BW-SLOW-NEXT: vmovdqu16 %ymm4, %ymm2 {%k1}
2015 ; AVX512BW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm2 = zmm2[0,1,2,3],zmm8[4,5,6,7]
2016 ; AVX512BW-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm3 = [6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0]
2017 ; AVX512BW-SLOW-NEXT: vpshufb %ymm3, %ymm1, %ymm4
2018 ; AVX512BW-SLOW-NEXT: vpshufb %ymm3, %ymm0, %ymm3
2019 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[4],ymm4[4],ymm3[5],ymm4[5],ymm3[6],ymm4[6],ymm3[7],ymm4[7],ymm3[16],ymm4[16],ymm3[17],ymm4[17],ymm3[18],ymm4[18],ymm3[19],ymm4[19],ymm3[20],ymm4[20],ymm3[21],ymm4[21],ymm3[22],ymm4[22],ymm3[23],ymm4[23]
2020 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
2021 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm0 = ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11],ymm0[12],ymm1[12],ymm0[13],ymm1[13],ymm0[14],ymm1[14],ymm0[15],ymm1[15],ymm0[24],ymm1[24],ymm0[25],ymm1[25],ymm0[26],ymm1[26],ymm0[27],ymm1[27],ymm0[28],ymm1[28],ymm0[29],ymm1[29],ymm0[30],ymm1[30],ymm0[31],ymm1[31]
2022 ; AVX512BW-SLOW-NEXT: vmovdqa {{.*#+}} ymm1 = [10,13,12,11,10,13,12,11,10,13,12,11,14,13,14,15]
2023 ; AVX512BW-SLOW-NEXT: vpermw %ymm0, %ymm1, %ymm0
2024 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm3, %zmm0
2025 ; AVX512BW-SLOW-NEXT: movl $-1840700270, %ecx # imm = 0x92492492
2026 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k1
2027 ; AVX512BW-SLOW-NEXT: vmovdqu16 %zmm0, %zmm2 {%k1}
2028 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm2, 128(%rax)
2029 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm7, 64(%rax)
2030 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm6, (%rax)
2031 ; AVX512BW-SLOW-NEXT: vzeroupper
2032 ; AVX512BW-SLOW-NEXT: retq
2034 ; AVX512BW-FAST-LABEL: store_i8_stride6_vf32:
2035 ; AVX512BW-FAST: # %bb.0:
2036 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
2037 ; AVX512BW-FAST-NEXT: vmovdqa (%rdi), %ymm4
2038 ; AVX512BW-FAST-NEXT: vmovdqa (%rsi), %ymm5
2039 ; AVX512BW-FAST-NEXT: vmovdqa (%rdx), %ymm2
2040 ; AVX512BW-FAST-NEXT: vmovdqa (%rcx), %ymm3
2041 ; AVX512BW-FAST-NEXT: vmovdqa (%r8), %ymm0
2042 ; AVX512BW-FAST-NEXT: vmovdqa (%r9), %ymm1
2043 ; AVX512BW-FAST-NEXT: vmovdqa (%rcx), %xmm7
2044 ; AVX512BW-FAST-NEXT: vmovdqa (%rdx), %xmm8
2045 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm6 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3],xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
2046 ; AVX512BW-FAST-NEXT: vmovdqa (%rsi), %xmm9
2047 ; AVX512BW-FAST-NEXT: vmovdqa (%rdi), %xmm10
2048 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3],xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
2049 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = [0,3,2,1,0,3,2,1,0,3,2,1,4,5,6,5]
2050 ; AVX512BW-FAST-NEXT: vpermw %ymm11, %ymm12, %ymm11
2051 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = [1,0,3,2,1,0,3,2,1,0,3,2,5,4,7,6]
2052 ; AVX512BW-FAST-NEXT: movw $9362, %cx # imm = 0x2492
2053 ; AVX512BW-FAST-NEXT: kmovd %ecx, %k2
2054 ; AVX512BW-FAST-NEXT: vpermw %ymm6, %ymm12, %ymm11 {%k2}
2055 ; AVX512BW-FAST-NEXT: vpbroadcastq {{.*#+}} xmm6 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
2056 ; AVX512BW-FAST-NEXT: vpshufb %xmm6, %xmm9, %xmm12
2057 ; AVX512BW-FAST-NEXT: vpshufb %xmm6, %xmm10, %xmm6
2058 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm6[8],xmm12[8],xmm6[9],xmm12[9],xmm6[10],xmm12[10],xmm6[11],xmm12[11],xmm6[12],xmm12[12],xmm6[13],xmm12[13],xmm6[14],xmm12[14],xmm6[15],xmm12[15]
2059 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,0,1]
2060 ; AVX512BW-FAST-NEXT: vpbroadcastq {{.*#+}} xmm12 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
2061 ; AVX512BW-FAST-NEXT: vpshufb %xmm12, %xmm7, %xmm13
2062 ; AVX512BW-FAST-NEXT: vpshufb %xmm12, %xmm8, %xmm12
2063 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3],xmm12[4],xmm13[4],xmm12[5],xmm13[5],xmm12[6],xmm13[6],xmm12[7],xmm13[7]
2064 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,0,0,1]
2065 ; AVX512BW-FAST-NEXT: movw $18724, %cx # imm = 0x4924
2066 ; AVX512BW-FAST-NEXT: kmovd %ecx, %k1
2067 ; AVX512BW-FAST-NEXT: vmovdqu16 %ymm6, %ymm12 {%k1}
2068 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm12, %zmm0, %zmm6
2069 ; AVX512BW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm6 = zmm11[0,1,2,3],zmm6[4,5,6,7]
2070 ; AVX512BW-FAST-NEXT: vmovdqa (%r9), %xmm11
2071 ; AVX512BW-FAST-NEXT: vpbroadcastq {{.*#+}} xmm12 = [6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0]
2072 ; AVX512BW-FAST-NEXT: vpshufb %xmm12, %xmm11, %xmm13
2073 ; AVX512BW-FAST-NEXT: vmovdqa (%r8), %xmm14
2074 ; AVX512BW-FAST-NEXT: vpshufb %xmm12, %xmm14, %xmm12
2075 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3],xmm12[4],xmm13[4],xmm12[5],xmm13[5],xmm12[6],xmm13[6],xmm12[7],xmm13[7]
2076 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,0,0,1]
2077 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm13 = xmm14[0],xmm11[0],xmm14[1],xmm11[1],xmm14[2],xmm11[2],xmm14[3],xmm11[3],xmm14[4],xmm11[4],xmm14[5],xmm11[5],xmm14[6],xmm11[6],xmm14[7],xmm11[7]
2078 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = [2,1,0,3,2,1,0,3,2,1,0,3,4,4,4,4]
2079 ; AVX512BW-FAST-NEXT: vpermw %ymm13, %ymm15, %ymm13
2080 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm12, %zmm13, %zmm12
2081 ; AVX512BW-FAST-NEXT: movl $613566756, %ecx # imm = 0x24924924
2082 ; AVX512BW-FAST-NEXT: kmovd %ecx, %k3
2083 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm12, %zmm6 {%k3}
2084 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} ymm12 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[4],ymm3[4],ymm2[5],ymm3[5],ymm2[6],ymm3[6],ymm2[7],ymm3[7],ymm2[16],ymm3[16],ymm2[17],ymm3[17],ymm2[18],ymm3[18],ymm2[19],ymm3[19],ymm2[20],ymm3[20],ymm2[21],ymm3[21],ymm2[22],ymm3[22],ymm2[23],ymm3[23]
2085 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} ymm13 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[4],ymm5[4],ymm4[5],ymm5[5],ymm4[6],ymm5[6],ymm4[7],ymm5[7],ymm4[16],ymm5[16],ymm4[17],ymm5[17],ymm4[18],ymm5[18],ymm4[19],ymm5[19],ymm4[20],ymm5[20],ymm4[21],ymm5[21],ymm4[22],ymm5[22],ymm4[23],ymm5[23]
2086 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = [8,11,10,9,8,11,10,9,8,11,10,9,12,13,14,13]
2087 ; AVX512BW-FAST-NEXT: vpermw %ymm13, %ymm15, %ymm13
2088 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = [9,8,11,10,9,8,11,10,9,8,11,10,13,12,15,14]
2089 ; AVX512BW-FAST-NEXT: vpermw %ymm12, %ymm15, %ymm13 {%k2}
2090 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm13, %zmm0, %zmm12
2091 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm8[8],xmm7[8],xmm8[9],xmm7[9],xmm8[10],xmm7[10],xmm8[11],xmm7[11],xmm8[12],xmm7[12],xmm8[13],xmm7[13],xmm8[14],xmm7[14],xmm8[15],xmm7[15]
2092 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm8 = xmm10[8],xmm9[8],xmm10[9],xmm9[9],xmm10[10],xmm9[10],xmm10[11],xmm9[11],xmm10[12],xmm9[12],xmm10[13],xmm9[13],xmm10[14],xmm9[14],xmm10[15],xmm9[15]
2093 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [4,3,6,5,4,3,6,5,4,3,6,5,7,7,7,7]
2094 ; AVX512BW-FAST-NEXT: vpermw %ymm8, %ymm9, %ymm8
2095 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [5,4,3,6,5,4,3,6,5,4,3,6,7,7,7,7]
2096 ; AVX512BW-FAST-NEXT: vpermw %ymm7, %ymm9, %ymm8 {%k1}
2097 ; AVX512BW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm7 = zmm8[0,1,2,3],zmm12[4,5,6,7]
2098 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} ymm8 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[4],ymm1[4],ymm0[5],ymm1[5],ymm0[6],ymm1[6],ymm0[7],ymm1[7],ymm0[16],ymm1[16],ymm0[17],ymm1[17],ymm0[18],ymm1[18],ymm0[19],ymm1[19],ymm0[20],ymm1[20],ymm0[21],ymm1[21],ymm0[22],ymm1[22],ymm0[23],ymm1[23]
2099 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm14[8],xmm11[8],xmm14[9],xmm11[9],xmm14[10],xmm11[10],xmm14[11],xmm11[11],xmm14[12],xmm11[12],xmm14[13],xmm11[13],xmm14[14],xmm11[14],xmm14[15],xmm11[15]
2100 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm8, %zmm9, %zmm8
2101 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = [2,5,4,3,2,5,4,3,2,5,4,3,6,5,6,7,26,25,24,27,26,25,24,27,26,25,24,27,28,28,28,28]
2102 ; AVX512BW-FAST-NEXT: movl $1227133513, %ecx # imm = 0x49249249
2103 ; AVX512BW-FAST-NEXT: kmovd %ecx, %k2
2104 ; AVX512BW-FAST-NEXT: vpermw %zmm8, %zmm9, %zmm7 {%k2}
2105 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} ymm8 = ymm2[8],ymm3[8],ymm2[9],ymm3[9],ymm2[10],ymm3[10],ymm2[11],ymm3[11],ymm2[12],ymm3[12],ymm2[13],ymm3[13],ymm2[14],ymm3[14],ymm2[15],ymm3[15],ymm2[24],ymm3[24],ymm2[25],ymm3[25],ymm2[26],ymm3[26],ymm2[27],ymm3[27],ymm2[28],ymm3[28],ymm2[29],ymm3[29],ymm2[30],ymm3[30],ymm2[31],ymm3[31]
2106 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} ymm9 = ymm4[8],ymm5[8],ymm4[9],ymm5[9],ymm4[10],ymm5[10],ymm4[11],ymm5[11],ymm4[12],ymm5[12],ymm4[13],ymm5[13],ymm4[14],ymm5[14],ymm4[15],ymm5[15],ymm4[24],ymm5[24],ymm4[25],ymm5[25],ymm4[26],ymm5[26],ymm4[27],ymm5[27],ymm4[28],ymm5[28],ymm4[29],ymm5[29],ymm4[30],ymm5[30],ymm4[31],ymm5[31]
2107 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = [12,11,14,13,12,11,14,13,12,11,14,13,15,15,15,15]
2108 ; AVX512BW-FAST-NEXT: vpermw %ymm9, %ymm10, %ymm9
2109 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = [13,12,11,14,13,12,11,14,13,12,11,14,15,15,15,15]
2110 ; AVX512BW-FAST-NEXT: vpermw %ymm8, %ymm10, %ymm9 {%k1}
2111 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm8
2112 ; AVX512BW-FAST-NEXT: vpbroadcastq {{.*#+}} ymm9 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
2113 ; AVX512BW-FAST-NEXT: vpshufb %ymm9, %ymm5, %ymm5
2114 ; AVX512BW-FAST-NEXT: vpshufb %ymm9, %ymm4, %ymm4
2115 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[4],ymm5[4],ymm4[5],ymm5[5],ymm4[6],ymm5[6],ymm4[7],ymm5[7],ymm4[16],ymm5[16],ymm4[17],ymm5[17],ymm4[18],ymm5[18],ymm4[19],ymm5[19],ymm4[20],ymm5[20],ymm4[21],ymm5[21],ymm4[22],ymm5[22],ymm4[23],ymm5[23]
2116 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
2117 ; AVX512BW-FAST-NEXT: vpbroadcastq {{.*#+}} ymm5 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
2118 ; AVX512BW-FAST-NEXT: vpshufb %ymm5, %ymm3, %ymm3
2119 ; AVX512BW-FAST-NEXT: vpshufb %ymm5, %ymm2, %ymm2
2120 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} ymm2 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[4],ymm3[4],ymm2[5],ymm3[5],ymm2[6],ymm3[6],ymm2[7],ymm3[7],ymm2[16],ymm3[16],ymm2[17],ymm3[17],ymm2[18],ymm3[18],ymm2[19],ymm3[19],ymm2[20],ymm3[20],ymm2[21],ymm3[21],ymm2[22],ymm3[22],ymm2[23],ymm3[23]
2121 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2122 ; AVX512BW-FAST-NEXT: vmovdqu16 %ymm4, %ymm2 {%k1}
2123 ; AVX512BW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm2 = zmm2[0,1,2,3],zmm8[4,5,6,7]
2124 ; AVX512BW-FAST-NEXT: vpbroadcastq {{.*#+}} ymm3 = [6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0]
2125 ; AVX512BW-FAST-NEXT: vpshufb %ymm3, %ymm1, %ymm4
2126 ; AVX512BW-FAST-NEXT: vpshufb %ymm3, %ymm0, %ymm3
2127 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[4],ymm4[4],ymm3[5],ymm4[5],ymm3[6],ymm4[6],ymm3[7],ymm4[7],ymm3[16],ymm4[16],ymm3[17],ymm4[17],ymm3[18],ymm4[18],ymm3[19],ymm4[19],ymm3[20],ymm4[20],ymm3[21],ymm4[21],ymm3[22],ymm4[22],ymm3[23],ymm4[23]
2128 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
2129 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} ymm0 = ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11],ymm0[12],ymm1[12],ymm0[13],ymm1[13],ymm0[14],ymm1[14],ymm0[15],ymm1[15],ymm0[24],ymm1[24],ymm0[25],ymm1[25],ymm0[26],ymm1[26],ymm0[27],ymm1[27],ymm0[28],ymm1[28],ymm0[29],ymm1[29],ymm0[30],ymm1[30],ymm0[31],ymm1[31]
2130 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [10,13,12,11,10,13,12,11,10,13,12,11,14,13,14,15]
2131 ; AVX512BW-FAST-NEXT: vpermw %ymm0, %ymm1, %ymm0
2132 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm3, %zmm0
2133 ; AVX512BW-FAST-NEXT: movl $-1840700270, %ecx # imm = 0x92492492
2134 ; AVX512BW-FAST-NEXT: kmovd %ecx, %k1
2135 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm0, %zmm2 {%k1}
2136 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm2, 128(%rax)
2137 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm7, 64(%rax)
2138 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm6, (%rax)
2139 ; AVX512BW-FAST-NEXT: vzeroupper
2140 ; AVX512BW-FAST-NEXT: retq
2141 %in.vec0 = load <32 x i8>, ptr %in.vecptr0, align 64
2142 %in.vec1 = load <32 x i8>, ptr %in.vecptr1, align 64
2143 %in.vec2 = load <32 x i8>, ptr %in.vecptr2, align 64
2144 %in.vec3 = load <32 x i8>, ptr %in.vecptr3, align 64
2145 %in.vec4 = load <32 x i8>, ptr %in.vecptr4, align 64
2146 %in.vec5 = load <32 x i8>, ptr %in.vecptr5, align 64
2147 %1 = shufflevector <32 x i8> %in.vec0, <32 x i8> %in.vec1, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
2148 %2 = shufflevector <32 x i8> %in.vec2, <32 x i8> %in.vec3, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
2149 %3 = shufflevector <32 x i8> %in.vec4, <32 x i8> %in.vec5, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
2150 %4 = shufflevector <64 x i8> %1, <64 x i8> %2, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
2151 %5 = shufflevector <64 x i8> %3, <64 x i8> poison, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2152 %6 = shufflevector <128 x i8> %4, <128 x i8> %5, <192 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191>
2153 %interleaved.vec = shufflevector <192 x i8> %6, <192 x i8> poison, <192 x i32> <i32 0, i32 32, i32 64, i32 96, i32 128, i32 160, i32 1, i32 33, i32 65, i32 97, i32 129, i32 161, i32 2, i32 34, i32 66, i32 98, i32 130, i32 162, i32 3, i32 35, i32 67, i32 99, i32 131, i32 163, i32 4, i32 36, i32 68, i32 100, i32 132, i32 164, i32 5, i32 37, i32 69, i32 101, i32 133, i32 165, i32 6, i32 38, i32 70, i32 102, i32 134, i32 166, i32 7, i32 39, i32 71, i32 103, i32 135, i32 167, i32 8, i32 40, i32 72, i32 104, i32 136, i32 168, i32 9, i32 41, i32 73, i32 105, i32 137, i32 169, i32 10, i32 42, i32 74, i32 106, i32 138, i32 170, i32 11, i32 43, i32 75, i32 107, i32 139, i32 171, i32 12, i32 44, i32 76, i32 108, i32 140, i32 172, i32 13, i32 45, i32 77, i32 109, i32 141, i32 173, i32 14, i32 46, i32 78, i32 110, i32 142, i32 174, i32 15, i32 47, i32 79, i32 111, i32 143, i32 175, i32 16, i32 48, i32 80, i32 112, i32 144, i32 176, i32 17, i32 49, i32 81, i32 113, i32 145, i32 177, i32 18, i32 50, i32 82, i32 114, i32 146, i32 178, i32 19, i32 51, i32 83, i32 115, i32 147, i32 179, i32 20, i32 52, i32 84, i32 116, i32 148, i32 180, i32 21, i32 53, i32 85, i32 117, i32 149, i32 181, i32 22, i32 54, i32 86, i32 118, i32 150, i32 182, i32 23, i32 55, i32 87, i32 119, i32 151, i32 183, i32 24, i32 56, i32 88, i32 120, i32 152, i32 184, i32 25, i32 57, i32 89, i32 121, i32 153, i32 185, i32 26, i32 58, i32 90, i32 122, i32 154, i32 186, i32 27, i32 59, i32 91, i32 123, i32 155, i32 187, i32 28, i32 60, i32 92, i32 124, i32 156, i32 188, i32 29, i32 61, i32 93, i32 125, i32 157, i32 189, i32 30, i32 62, i32 94, i32 126, i32 158, i32 190, i32 31, i32 63, i32 95, i32 127, i32 159, i32 191>
2154 store <192 x i8> %interleaved.vec, ptr %out.vec, align 64
2158 define void @store_i8_stride6_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
2159 ; SSE-LABEL: store_i8_stride6_vf64:
2161 ; SSE-NEXT: subq $184, %rsp
2162 ; SSE-NEXT: movdqa (%rdi), %xmm1
2163 ; SSE-NEXT: movdqa (%rsi), %xmm5
2164 ; SSE-NEXT: movdqa (%rdx), %xmm4
2165 ; SSE-NEXT: movdqa (%rcx), %xmm9
2166 ; SSE-NEXT: movdqa (%r8), %xmm6
2167 ; SSE-NEXT: movdqa (%r9), %xmm2
2168 ; SSE-NEXT: movdqa %xmm1, %xmm8
2169 ; SSE-NEXT: punpcklbw {{.*#+}} xmm8 = xmm8[0],xmm5[0],xmm8[1],xmm5[1],xmm8[2],xmm5[2],xmm8[3],xmm5[3],xmm8[4],xmm5[4],xmm8[5],xmm5[5],xmm8[6],xmm5[6],xmm8[7],xmm5[7]
2170 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm8[0,0,1,1]
2171 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [65535,0,65535,65535,0,65535,65535,0]
2172 ; SSE-NEXT: pand %xmm12, %xmm3
2173 ; SSE-NEXT: movdqa %xmm4, %xmm10
2174 ; SSE-NEXT: punpcklbw {{.*#+}} xmm10 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3],xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
2175 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm10[1,0,2,2,4,5,6,7]
2176 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,1,0,1]
2177 ; SSE-NEXT: pandn %xmm7, %xmm12
2178 ; SSE-NEXT: por %xmm3, %xmm12
2179 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [65535,65535,0,65535,65535,0,65535,65535]
2180 ; SSE-NEXT: pand %xmm3, %xmm12
2181 ; SSE-NEXT: movdqa %xmm6, %xmm11
2182 ; SSE-NEXT: punpcklbw {{.*#+}} xmm11 = xmm11[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
2183 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm11[0,0,0,0]
2184 ; SSE-NEXT: movdqa %xmm3, %xmm13
2185 ; SSE-NEXT: pandn %xmm7, %xmm13
2186 ; SSE-NEXT: por %xmm12, %xmm13
2187 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
2188 ; SSE-NEXT: pand %xmm12, %xmm13
2189 ; SSE-NEXT: punpcklbw {{.*#+}} xmm15 = xmm15[0],xmm2[0],xmm15[1],xmm2[1],xmm15[2],xmm2[2],xmm15[3],xmm2[3],xmm15[4],xmm2[4],xmm15[5],xmm2[5],xmm15[6],xmm2[6],xmm15[7],xmm2[7]
2190 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm15[0,0,0,0]
2191 ; SSE-NEXT: movdqa %xmm12, %xmm0
2192 ; SSE-NEXT: pandn %xmm7, %xmm0
2193 ; SSE-NEXT: por %xmm13, %xmm0
2194 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2195 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm10[3,3,3,3,4,5,6,7]
2196 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,4,4,4]
2197 ; SSE-NEXT: movdqa %xmm3, %xmm13
2198 ; SSE-NEXT: pandn %xmm7, %xmm13
2199 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm8[1,1,2,2]
2200 ; SSE-NEXT: pand %xmm3, %xmm7
2201 ; SSE-NEXT: por %xmm7, %xmm13
2202 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,65535,65535,0,65535,65535,0,65535]
2203 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm11[1,1,2,2]
2204 ; SSE-NEXT: movdqa %xmm0, %xmm14
2205 ; SSE-NEXT: pandn %xmm7, %xmm14
2206 ; SSE-NEXT: pand %xmm0, %xmm13
2207 ; SSE-NEXT: por %xmm13, %xmm14
2208 ; SSE-NEXT: movdqa {{.*#+}} xmm13 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
2209 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm15[1,1,2,2]
2210 ; SSE-NEXT: movdqa %xmm13, %xmm0
2211 ; SSE-NEXT: pandn %xmm7, %xmm0
2212 ; SSE-NEXT: pand %xmm13, %xmm14
2213 ; SSE-NEXT: por %xmm14, %xmm0
2214 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2215 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm8[3,3,3,3]
2216 ; SSE-NEXT: movdqa %xmm3, %xmm8
2217 ; SSE-NEXT: pandn %xmm7, %xmm8
2218 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm10[0,1,2,3,5,6,7,7]
2219 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[2,2,2,3]
2220 ; SSE-NEXT: pand %xmm3, %xmm7
2221 ; SSE-NEXT: por %xmm8, %xmm7
2222 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm11[2,2,3,3]
2223 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,65535,65535,0,65535,65535,0]
2224 ; SSE-NEXT: movdqa %xmm0, %xmm10
2225 ; SSE-NEXT: pandn %xmm8, %xmm10
2226 ; SSE-NEXT: pand %xmm0, %xmm7
2227 ; SSE-NEXT: movdqa %xmm0, %xmm11
2228 ; SSE-NEXT: por %xmm7, %xmm10
2229 ; SSE-NEXT: movdqa {{.*#+}} xmm14 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
2230 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm15[2,2,3,3]
2231 ; SSE-NEXT: movdqa %xmm14, %xmm0
2232 ; SSE-NEXT: pandn %xmm7, %xmm0
2233 ; SSE-NEXT: pand %xmm14, %xmm10
2234 ; SSE-NEXT: por %xmm10, %xmm0
2235 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2236 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
2237 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,0,1,1]
2238 ; SSE-NEXT: pand %xmm11, %xmm5
2239 ; SSE-NEXT: punpckhbw {{.*#+}} xmm4 = xmm4[8],xmm9[8],xmm4[9],xmm9[9],xmm4[10],xmm9[10],xmm4[11],xmm9[11],xmm4[12],xmm9[12],xmm4[13],xmm9[13],xmm4[14],xmm9[14],xmm4[15],xmm9[15]
2240 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm4[1,0,2,2,4,5,6,7]
2241 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,1,0,1]
2242 ; SSE-NEXT: movdqa %xmm11, %xmm8
2243 ; SSE-NEXT: pandn %xmm7, %xmm8
2244 ; SSE-NEXT: por %xmm5, %xmm8
2245 ; SSE-NEXT: pand %xmm3, %xmm8
2246 ; SSE-NEXT: punpckhbw {{.*#+}} xmm6 = xmm6[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
2247 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm6[0,0,0,0]
2248 ; SSE-NEXT: movdqa %xmm3, %xmm7
2249 ; SSE-NEXT: pandn %xmm5, %xmm7
2250 ; SSE-NEXT: por %xmm8, %xmm7
2251 ; SSE-NEXT: pand %xmm12, %xmm7
2252 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
2253 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[0,0,0,0]
2254 ; SSE-NEXT: movdqa %xmm12, %xmm0
2255 ; SSE-NEXT: pandn %xmm5, %xmm0
2256 ; SSE-NEXT: por %xmm7, %xmm0
2257 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2258 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm4[3,3,3,3,4,5,6,7]
2259 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,4,4,4]
2260 ; SSE-NEXT: movdqa %xmm3, %xmm7
2261 ; SSE-NEXT: pandn %xmm5, %xmm7
2262 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[1,1,2,2]
2263 ; SSE-NEXT: pand %xmm3, %xmm5
2264 ; SSE-NEXT: por %xmm5, %xmm7
2265 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm6[1,1,2,2]
2266 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,65535,65535,0,65535,65535,0,65535]
2267 ; SSE-NEXT: movdqa %xmm0, %xmm8
2268 ; SSE-NEXT: pandn %xmm5, %xmm8
2269 ; SSE-NEXT: pand %xmm0, %xmm7
2270 ; SSE-NEXT: por %xmm7, %xmm8
2271 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[1,1,2,2]
2272 ; SSE-NEXT: movdqa %xmm13, %xmm0
2273 ; SSE-NEXT: pandn %xmm5, %xmm0
2274 ; SSE-NEXT: pand %xmm13, %xmm8
2275 ; SSE-NEXT: por %xmm8, %xmm0
2276 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2277 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,3,3,3]
2278 ; SSE-NEXT: movdqa %xmm3, %xmm5
2279 ; SSE-NEXT: pandn %xmm1, %xmm5
2280 ; SSE-NEXT: movdqa 16(%rdx), %xmm1
2281 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,5,6,7,7]
2282 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[2,2,2,3]
2283 ; SSE-NEXT: pand %xmm3, %xmm4
2284 ; SSE-NEXT: por %xmm5, %xmm4
2285 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm6[2,2,3,3]
2286 ; SSE-NEXT: movdqa %xmm11, %xmm6
2287 ; SSE-NEXT: pandn %xmm5, %xmm6
2288 ; SSE-NEXT: movdqa 16(%rcx), %xmm5
2289 ; SSE-NEXT: pand %xmm11, %xmm4
2290 ; SSE-NEXT: por %xmm4, %xmm6
2291 ; SSE-NEXT: pand %xmm14, %xmm6
2292 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
2293 ; SSE-NEXT: movdqa %xmm14, %xmm0
2294 ; SSE-NEXT: pandn %xmm2, %xmm0
2295 ; SSE-NEXT: por %xmm6, %xmm0
2296 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2297 ; SSE-NEXT: movdqa %xmm1, %xmm9
2298 ; SSE-NEXT: punpcklbw {{.*#+}} xmm9 = xmm9[0],xmm5[0],xmm9[1],xmm5[1],xmm9[2],xmm5[2],xmm9[3],xmm5[3],xmm9[4],xmm5[4],xmm9[5],xmm5[5],xmm9[6],xmm5[6],xmm9[7],xmm5[7]
2299 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm9[1,0,2,2,4,5,6,7]
2300 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,0,1]
2301 ; SSE-NEXT: movdqa %xmm11, %xmm0
2302 ; SSE-NEXT: movdqa %xmm11, %xmm6
2303 ; SSE-NEXT: pandn %xmm2, %xmm6
2304 ; SSE-NEXT: movdqa 16(%rdi), %xmm2
2305 ; SSE-NEXT: movdqa 16(%rsi), %xmm8
2306 ; SSE-NEXT: movdqa %xmm2, %xmm11
2307 ; SSE-NEXT: punpcklbw {{.*#+}} xmm11 = xmm11[0],xmm8[0],xmm11[1],xmm8[1],xmm11[2],xmm8[2],xmm11[3],xmm8[3],xmm11[4],xmm8[4],xmm11[5],xmm8[5],xmm11[6],xmm8[6],xmm11[7],xmm8[7]
2308 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm11[0,0,1,1]
2309 ; SSE-NEXT: pand %xmm0, %xmm4
2310 ; SSE-NEXT: por %xmm4, %xmm6
2311 ; SSE-NEXT: movdqa 16(%r8), %xmm4
2312 ; SSE-NEXT: movdqa %xmm4, %xmm10
2313 ; SSE-NEXT: punpcklbw {{.*#+}} xmm10 = xmm10[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
2314 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm10[0,0,0,0]
2315 ; SSE-NEXT: movdqa %xmm3, %xmm13
2316 ; SSE-NEXT: pandn %xmm7, %xmm13
2317 ; SSE-NEXT: pand %xmm3, %xmm6
2318 ; SSE-NEXT: por %xmm6, %xmm13
2319 ; SSE-NEXT: movdqa 16(%r9), %xmm6
2320 ; SSE-NEXT: punpcklbw {{.*#+}} xmm7 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3],xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
2321 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm7[0,0,0,0]
2322 ; SSE-NEXT: movdqa %xmm12, %xmm0
2323 ; SSE-NEXT: pandn %xmm15, %xmm0
2324 ; SSE-NEXT: pand %xmm12, %xmm13
2325 ; SSE-NEXT: por %xmm13, %xmm0
2326 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2327 ; SSE-NEXT: pshuflw {{.*#+}} xmm13 = xmm9[3,3,3,3,4,5,6,7]
2328 ; SSE-NEXT: pshufhw {{.*#+}} xmm13 = xmm13[0,1,2,3,4,4,4,4]
2329 ; SSE-NEXT: movdqa %xmm3, %xmm15
2330 ; SSE-NEXT: pandn %xmm13, %xmm15
2331 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm11[1,1,2,2]
2332 ; SSE-NEXT: pand %xmm3, %xmm13
2333 ; SSE-NEXT: por %xmm13, %xmm15
2334 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm10[1,1,2,2]
2335 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,65535,65535,0,65535,65535,0,65535]
2336 ; SSE-NEXT: movdqa %xmm0, %xmm12
2337 ; SSE-NEXT: pandn %xmm13, %xmm12
2338 ; SSE-NEXT: pand %xmm0, %xmm15
2339 ; SSE-NEXT: por %xmm15, %xmm12
2340 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm7[1,1,2,2]
2341 ; SSE-NEXT: movdqa {{.*#+}} xmm15 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
2342 ; SSE-NEXT: movdqa %xmm15, %xmm0
2343 ; SSE-NEXT: pandn %xmm13, %xmm0
2344 ; SSE-NEXT: pand %xmm15, %xmm12
2345 ; SSE-NEXT: movdqa %xmm15, %xmm13
2346 ; SSE-NEXT: por %xmm12, %xmm0
2347 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2348 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm11[3,3,3,3]
2349 ; SSE-NEXT: movdqa %xmm3, %xmm12
2350 ; SSE-NEXT: pandn %xmm11, %xmm12
2351 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,5,6,7,7]
2352 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[2,2,2,3]
2353 ; SSE-NEXT: pand %xmm3, %xmm9
2354 ; SSE-NEXT: por %xmm12, %xmm9
2355 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
2356 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,65535,65535,0,65535,65535,0]
2357 ; SSE-NEXT: movdqa %xmm0, %xmm11
2358 ; SSE-NEXT: pandn %xmm10, %xmm11
2359 ; SSE-NEXT: pand %xmm0, %xmm9
2360 ; SSE-NEXT: movdqa %xmm0, %xmm10
2361 ; SSE-NEXT: por %xmm9, %xmm11
2362 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
2363 ; SSE-NEXT: movdqa %xmm14, %xmm0
2364 ; SSE-NEXT: pandn %xmm7, %xmm0
2365 ; SSE-NEXT: pand %xmm14, %xmm11
2366 ; SSE-NEXT: por %xmm11, %xmm0
2367 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2368 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm8[8],xmm2[9],xmm8[9],xmm2[10],xmm8[10],xmm2[11],xmm8[11],xmm2[12],xmm8[12],xmm2[13],xmm8[13],xmm2[14],xmm8[14],xmm2[15],xmm8[15]
2369 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
2370 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm1[1,0,2,2,4,5,6,7]
2371 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,1,0,1]
2372 ; SSE-NEXT: movdqa %xmm10, %xmm7
2373 ; SSE-NEXT: pandn %xmm5, %xmm7
2374 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[0,0,1,1]
2375 ; SSE-NEXT: pand %xmm10, %xmm5
2376 ; SSE-NEXT: por %xmm5, %xmm7
2377 ; SSE-NEXT: punpckhbw {{.*#+}} xmm4 = xmm4[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
2378 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,0,0,0]
2379 ; SSE-NEXT: movdqa %xmm3, %xmm8
2380 ; SSE-NEXT: pandn %xmm5, %xmm8
2381 ; SSE-NEXT: pand %xmm3, %xmm7
2382 ; SSE-NEXT: por %xmm7, %xmm8
2383 ; SSE-NEXT: punpckhbw {{.*#+}} xmm5 = xmm5[8],xmm6[8],xmm5[9],xmm6[9],xmm5[10],xmm6[10],xmm5[11],xmm6[11],xmm5[12],xmm6[12],xmm5[13],xmm6[13],xmm5[14],xmm6[14],xmm5[15],xmm6[15]
2384 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,0,0,0]
2385 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
2386 ; SSE-NEXT: movdqa %xmm0, %xmm7
2387 ; SSE-NEXT: pandn %xmm6, %xmm7
2388 ; SSE-NEXT: pand %xmm0, %xmm8
2389 ; SSE-NEXT: movdqa %xmm0, %xmm15
2390 ; SSE-NEXT: por %xmm8, %xmm7
2391 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2392 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm1[3,3,3,3,4,5,6,7]
2393 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,4,4,4]
2394 ; SSE-NEXT: movdqa %xmm3, %xmm7
2395 ; SSE-NEXT: pandn %xmm6, %xmm7
2396 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm2[1,1,2,2]
2397 ; SSE-NEXT: pand %xmm3, %xmm6
2398 ; SSE-NEXT: por %xmm6, %xmm7
2399 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm4[1,1,2,2]
2400 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,65535,65535,0,65535,65535,0,65535]
2401 ; SSE-NEXT: movdqa %xmm0, %xmm8
2402 ; SSE-NEXT: pandn %xmm6, %xmm8
2403 ; SSE-NEXT: pand %xmm0, %xmm7
2404 ; SSE-NEXT: por %xmm7, %xmm8
2405 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm5[1,1,2,2]
2406 ; SSE-NEXT: movdqa %xmm13, %xmm0
2407 ; SSE-NEXT: pandn %xmm6, %xmm0
2408 ; SSE-NEXT: pand %xmm13, %xmm8
2409 ; SSE-NEXT: por %xmm8, %xmm0
2410 ; SSE-NEXT: movdqa %xmm0, (%rsp) # 16-byte Spill
2411 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[3,3,3,3]
2412 ; SSE-NEXT: movdqa %xmm3, %xmm6
2413 ; SSE-NEXT: pandn %xmm2, %xmm6
2414 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,6,7,7]
2415 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,3]
2416 ; SSE-NEXT: pand %xmm3, %xmm1
2417 ; SSE-NEXT: por %xmm6, %xmm1
2418 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[2,2,3,3]
2419 ; SSE-NEXT: movdqa %xmm10, %xmm4
2420 ; SSE-NEXT: pandn %xmm2, %xmm4
2421 ; SSE-NEXT: pand %xmm10, %xmm1
2422 ; SSE-NEXT: por %xmm1, %xmm4
2423 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[2,2,3,3]
2424 ; SSE-NEXT: movdqa %xmm14, %xmm0
2425 ; SSE-NEXT: pandn %xmm1, %xmm0
2426 ; SSE-NEXT: pand %xmm14, %xmm4
2427 ; SSE-NEXT: por %xmm4, %xmm0
2428 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2429 ; SSE-NEXT: movdqa 32(%rdx), %xmm1
2430 ; SSE-NEXT: movdqa 32(%rcx), %xmm5
2431 ; SSE-NEXT: movdqa %xmm1, %xmm9
2432 ; SSE-NEXT: punpcklbw {{.*#+}} xmm9 = xmm9[0],xmm5[0],xmm9[1],xmm5[1],xmm9[2],xmm5[2],xmm9[3],xmm5[3],xmm9[4],xmm5[4],xmm9[5],xmm5[5],xmm9[6],xmm5[6],xmm9[7],xmm5[7]
2433 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm9[1,0,2,2,4,5,6,7]
2434 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,0,1]
2435 ; SSE-NEXT: movdqa %xmm10, %xmm6
2436 ; SSE-NEXT: pandn %xmm2, %xmm6
2437 ; SSE-NEXT: movdqa 32(%rdi), %xmm2
2438 ; SSE-NEXT: movdqa 32(%rsi), %xmm8
2439 ; SSE-NEXT: movdqa %xmm2, %xmm11
2440 ; SSE-NEXT: punpcklbw {{.*#+}} xmm11 = xmm11[0],xmm8[0],xmm11[1],xmm8[1],xmm11[2],xmm8[2],xmm11[3],xmm8[3],xmm11[4],xmm8[4],xmm11[5],xmm8[5],xmm11[6],xmm8[6],xmm11[7],xmm8[7]
2441 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm11[0,0,1,1]
2442 ; SSE-NEXT: pand %xmm10, %xmm4
2443 ; SSE-NEXT: por %xmm4, %xmm6
2444 ; SSE-NEXT: movdqa 32(%r8), %xmm4
2445 ; SSE-NEXT: movdqa %xmm4, %xmm10
2446 ; SSE-NEXT: punpcklbw {{.*#+}} xmm10 = xmm10[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
2447 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm10[0,0,0,0]
2448 ; SSE-NEXT: movdqa %xmm3, %xmm12
2449 ; SSE-NEXT: pandn %xmm7, %xmm12
2450 ; SSE-NEXT: pand %xmm3, %xmm6
2451 ; SSE-NEXT: por %xmm6, %xmm12
2452 ; SSE-NEXT: movdqa 32(%r9), %xmm6
2453 ; SSE-NEXT: punpcklbw {{.*#+}} xmm7 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3],xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
2454 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm7[0,0,0,0]
2455 ; SSE-NEXT: movdqa %xmm15, %xmm0
2456 ; SSE-NEXT: pandn %xmm13, %xmm0
2457 ; SSE-NEXT: pand %xmm15, %xmm12
2458 ; SSE-NEXT: por %xmm12, %xmm0
2459 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2460 ; SSE-NEXT: pshuflw {{.*#+}} xmm12 = xmm9[3,3,3,3,4,5,6,7]
2461 ; SSE-NEXT: pshufhw {{.*#+}} xmm12 = xmm12[0,1,2,3,4,4,4,4]
2462 ; SSE-NEXT: movdqa %xmm3, %xmm13
2463 ; SSE-NEXT: pandn %xmm12, %xmm13
2464 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm11[1,1,2,2]
2465 ; SSE-NEXT: pand %xmm3, %xmm12
2466 ; SSE-NEXT: por %xmm12, %xmm13
2467 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm10[1,1,2,2]
2468 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,65535,65535,0,65535,65535,0,65535]
2469 ; SSE-NEXT: movdqa %xmm0, %xmm15
2470 ; SSE-NEXT: pandn %xmm12, %xmm15
2471 ; SSE-NEXT: pand %xmm0, %xmm13
2472 ; SSE-NEXT: por %xmm13, %xmm15
2473 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm7[1,1,2,2]
2474 ; SSE-NEXT: movdqa {{.*#+}} xmm13 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
2475 ; SSE-NEXT: movdqa %xmm13, %xmm0
2476 ; SSE-NEXT: pandn %xmm12, %xmm0
2477 ; SSE-NEXT: pand %xmm13, %xmm15
2478 ; SSE-NEXT: por %xmm15, %xmm0
2479 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2480 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm11[3,3,3,3]
2481 ; SSE-NEXT: movdqa %xmm3, %xmm12
2482 ; SSE-NEXT: pandn %xmm11, %xmm12
2483 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,5,6,7,7]
2484 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[2,2,2,3]
2485 ; SSE-NEXT: pand %xmm3, %xmm9
2486 ; SSE-NEXT: por %xmm12, %xmm9
2487 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
2488 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,65535,65535,0,65535,65535,0]
2489 ; SSE-NEXT: movdqa %xmm0, %xmm11
2490 ; SSE-NEXT: pandn %xmm10, %xmm11
2491 ; SSE-NEXT: pand %xmm0, %xmm9
2492 ; SSE-NEXT: movdqa %xmm0, %xmm10
2493 ; SSE-NEXT: por %xmm9, %xmm11
2494 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
2495 ; SSE-NEXT: movdqa %xmm14, %xmm0
2496 ; SSE-NEXT: pandn %xmm7, %xmm0
2497 ; SSE-NEXT: pand %xmm14, %xmm11
2498 ; SSE-NEXT: por %xmm11, %xmm0
2499 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2500 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm8[8],xmm2[9],xmm8[9],xmm2[10],xmm8[10],xmm2[11],xmm8[11],xmm2[12],xmm8[12],xmm2[13],xmm8[13],xmm2[14],xmm8[14],xmm2[15],xmm8[15]
2501 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
2502 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm1[1,0,2,2,4,5,6,7]
2503 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,1,0,1]
2504 ; SSE-NEXT: movdqa %xmm10, %xmm7
2505 ; SSE-NEXT: pandn %xmm5, %xmm7
2506 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[0,0,1,1]
2507 ; SSE-NEXT: pand %xmm10, %xmm5
2508 ; SSE-NEXT: por %xmm5, %xmm7
2509 ; SSE-NEXT: punpckhbw {{.*#+}} xmm4 = xmm4[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
2510 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,0,0,0]
2511 ; SSE-NEXT: movdqa %xmm3, %xmm8
2512 ; SSE-NEXT: pandn %xmm5, %xmm8
2513 ; SSE-NEXT: pand %xmm3, %xmm7
2514 ; SSE-NEXT: por %xmm7, %xmm8
2515 ; SSE-NEXT: punpckhbw {{.*#+}} xmm5 = xmm5[8],xmm6[8],xmm5[9],xmm6[9],xmm5[10],xmm6[10],xmm5[11],xmm6[11],xmm5[12],xmm6[12],xmm5[13],xmm6[13],xmm5[14],xmm6[14],xmm5[15],xmm6[15]
2516 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,0,0,0]
2517 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
2518 ; SSE-NEXT: movdqa %xmm0, %xmm7
2519 ; SSE-NEXT: pandn %xmm6, %xmm7
2520 ; SSE-NEXT: pand %xmm0, %xmm8
2521 ; SSE-NEXT: movdqa %xmm0, %xmm15
2522 ; SSE-NEXT: por %xmm8, %xmm7
2523 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2524 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm1[3,3,3,3,4,5,6,7]
2525 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,4,4,4]
2526 ; SSE-NEXT: movdqa %xmm3, %xmm7
2527 ; SSE-NEXT: pandn %xmm6, %xmm7
2528 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm2[1,1,2,2]
2529 ; SSE-NEXT: pand %xmm3, %xmm6
2530 ; SSE-NEXT: por %xmm6, %xmm7
2531 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm4[1,1,2,2]
2532 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,65535,65535,0,65535,65535,0,65535]
2533 ; SSE-NEXT: movdqa %xmm0, %xmm8
2534 ; SSE-NEXT: pandn %xmm6, %xmm8
2535 ; SSE-NEXT: pand %xmm0, %xmm7
2536 ; SSE-NEXT: por %xmm7, %xmm8
2537 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm5[1,1,2,2]
2538 ; SSE-NEXT: movdqa %xmm13, %xmm7
2539 ; SSE-NEXT: pandn %xmm6, %xmm7
2540 ; SSE-NEXT: pand %xmm13, %xmm8
2541 ; SSE-NEXT: por %xmm8, %xmm7
2542 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2543 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[3,3,3,3]
2544 ; SSE-NEXT: movdqa %xmm3, %xmm6
2545 ; SSE-NEXT: pandn %xmm2, %xmm6
2546 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,6,7,7]
2547 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,3]
2548 ; SSE-NEXT: pand %xmm3, %xmm1
2549 ; SSE-NEXT: por %xmm6, %xmm1
2550 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[2,2,3,3]
2551 ; SSE-NEXT: movdqa %xmm10, %xmm4
2552 ; SSE-NEXT: pandn %xmm2, %xmm4
2553 ; SSE-NEXT: pand %xmm10, %xmm1
2554 ; SSE-NEXT: por %xmm1, %xmm4
2555 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[2,2,3,3]
2556 ; SSE-NEXT: movdqa %xmm14, %xmm0
2557 ; SSE-NEXT: pandn %xmm1, %xmm0
2558 ; SSE-NEXT: pand %xmm14, %xmm4
2559 ; SSE-NEXT: por %xmm4, %xmm0
2560 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2561 ; SSE-NEXT: movdqa 48(%rdx), %xmm9
2562 ; SSE-NEXT: movdqa 48(%rcx), %xmm0
2563 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2564 ; SSE-NEXT: movdqa %xmm9, %xmm8
2565 ; SSE-NEXT: punpcklbw {{.*#+}} xmm8 = xmm8[0],xmm0[0],xmm8[1],xmm0[1],xmm8[2],xmm0[2],xmm8[3],xmm0[3],xmm8[4],xmm0[4],xmm8[5],xmm0[5],xmm8[6],xmm0[6],xmm8[7],xmm0[7]
2566 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm8[1,0,2,2,4,5,6,7]
2567 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,0,1]
2568 ; SSE-NEXT: movdqa %xmm10, %xmm0
2569 ; SSE-NEXT: movdqa %xmm10, %xmm4
2570 ; SSE-NEXT: pandn %xmm1, %xmm4
2571 ; SSE-NEXT: movdqa 48(%rdi), %xmm6
2572 ; SSE-NEXT: movdqa 48(%rsi), %xmm10
2573 ; SSE-NEXT: movdqa %xmm6, %xmm1
2574 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0],xmm10[0],xmm1[1],xmm10[1],xmm1[2],xmm10[2],xmm1[3],xmm10[3],xmm1[4],xmm10[4],xmm1[5],xmm10[5],xmm1[6],xmm10[6],xmm1[7],xmm10[7]
2575 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
2576 ; SSE-NEXT: pand %xmm0, %xmm2
2577 ; SSE-NEXT: por %xmm2, %xmm4
2578 ; SSE-NEXT: movdqa 48(%r8), %xmm7
2579 ; SSE-NEXT: movdqa %xmm7, %xmm2
2580 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
2581 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm2[0,0,0,0]
2582 ; SSE-NEXT: movdqa %xmm3, %xmm12
2583 ; SSE-NEXT: pandn %xmm11, %xmm12
2584 ; SSE-NEXT: pand %xmm3, %xmm4
2585 ; SSE-NEXT: por %xmm4, %xmm12
2586 ; SSE-NEXT: movdqa 48(%r9), %xmm11
2587 ; SSE-NEXT: punpcklbw {{.*#+}} xmm5 = xmm5[0],xmm11[0],xmm5[1],xmm11[1],xmm5[2],xmm11[2],xmm5[3],xmm11[3],xmm5[4],xmm11[4],xmm5[5],xmm11[5],xmm5[6],xmm11[6],xmm5[7],xmm11[7]
2588 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm5[0,0,0,0]
2589 ; SSE-NEXT: movdqa %xmm15, %xmm0
2590 ; SSE-NEXT: pandn %xmm13, %xmm15
2591 ; SSE-NEXT: pand %xmm0, %xmm12
2592 ; SSE-NEXT: por %xmm12, %xmm15
2593 ; SSE-NEXT: pshuflw {{.*#+}} xmm12 = xmm8[3,3,3,3,4,5,6,7]
2594 ; SSE-NEXT: pshufhw {{.*#+}} xmm12 = xmm12[0,1,2,3,4,4,4,4]
2595 ; SSE-NEXT: movdqa %xmm3, %xmm13
2596 ; SSE-NEXT: pandn %xmm12, %xmm13
2597 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm1[1,1,2,2]
2598 ; SSE-NEXT: pand %xmm3, %xmm12
2599 ; SSE-NEXT: por %xmm12, %xmm13
2600 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm2[1,1,2,2]
2601 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [0,65535,65535,0,65535,65535,0,65535]
2602 ; SSE-NEXT: movdqa %xmm4, %xmm0
2603 ; SSE-NEXT: pandn %xmm12, %xmm0
2604 ; SSE-NEXT: pand %xmm4, %xmm13
2605 ; SSE-NEXT: por %xmm13, %xmm0
2606 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm5[1,1,2,2]
2607 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
2608 ; SSE-NEXT: movdqa %xmm4, %xmm13
2609 ; SSE-NEXT: pandn %xmm12, %xmm13
2610 ; SSE-NEXT: pand %xmm4, %xmm0
2611 ; SSE-NEXT: por %xmm0, %xmm13
2612 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[3,3,3,3]
2613 ; SSE-NEXT: movdqa %xmm3, %xmm1
2614 ; SSE-NEXT: pandn %xmm0, %xmm1
2615 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm8[0,1,2,3,5,6,7,7]
2616 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,3]
2617 ; SSE-NEXT: pand %xmm3, %xmm0
2618 ; SSE-NEXT: por %xmm1, %xmm0
2619 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[2,2,3,3]
2620 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [65535,0,65535,65535,0,65535,65535,0]
2621 ; SSE-NEXT: movdqa %xmm12, %xmm2
2622 ; SSE-NEXT: pandn %xmm1, %xmm2
2623 ; SSE-NEXT: pand %xmm12, %xmm0
2624 ; SSE-NEXT: por %xmm0, %xmm2
2625 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm5[2,2,3,3]
2626 ; SSE-NEXT: movdqa %xmm14, %xmm8
2627 ; SSE-NEXT: pandn %xmm0, %xmm8
2628 ; SSE-NEXT: pand %xmm14, %xmm2
2629 ; SSE-NEXT: por %xmm2, %xmm8
2630 ; SSE-NEXT: punpckhbw {{.*#+}} xmm6 = xmm6[8],xmm10[8],xmm6[9],xmm10[9],xmm6[10],xmm10[10],xmm6[11],xmm10[11],xmm6[12],xmm10[12],xmm6[13],xmm10[13],xmm6[14],xmm10[14],xmm6[15],xmm10[15]
2631 ; SSE-NEXT: punpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Folded Reload
2632 ; SSE-NEXT: # xmm9 = xmm9[8],mem[8],xmm9[9],mem[9],xmm9[10],mem[10],xmm9[11],mem[11],xmm9[12],mem[12],xmm9[13],mem[13],xmm9[14],mem[14],xmm9[15],mem[15]
2633 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm9[1,0,2,2,4,5,6,7]
2634 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,0,1]
2635 ; SSE-NEXT: movdqa %xmm12, %xmm1
2636 ; SSE-NEXT: pandn %xmm0, %xmm1
2637 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[0,0,1,1]
2638 ; SSE-NEXT: pand %xmm12, %xmm0
2639 ; SSE-NEXT: por %xmm0, %xmm1
2640 ; SSE-NEXT: punpckhbw {{.*#+}} xmm7 = xmm7[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
2641 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm7[0,0,0,0]
2642 ; SSE-NEXT: movdqa %xmm3, %xmm2
2643 ; SSE-NEXT: pandn %xmm0, %xmm2
2644 ; SSE-NEXT: pand %xmm3, %xmm1
2645 ; SSE-NEXT: por %xmm1, %xmm2
2646 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
2647 ; SSE-NEXT: pand %xmm4, %xmm2
2648 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm11[8],xmm1[9],xmm11[9],xmm1[10],xmm11[10],xmm1[11],xmm11[11],xmm1[12],xmm11[12],xmm1[13],xmm11[13],xmm1[14],xmm11[14],xmm1[15],xmm11[15]
2649 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[0,0,0,0]
2650 ; SSE-NEXT: pandn %xmm0, %xmm4
2651 ; SSE-NEXT: por %xmm2, %xmm4
2652 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm9[3,3,3,3,4,5,6,7]
2653 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
2654 ; SSE-NEXT: movdqa %xmm3, %xmm2
2655 ; SSE-NEXT: pandn %xmm0, %xmm2
2656 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[1,1,2,2]
2657 ; SSE-NEXT: pand %xmm3, %xmm0
2658 ; SSE-NEXT: por %xmm0, %xmm2
2659 ; SSE-NEXT: movdqa {{.*#+}} xmm10 = [0,65535,65535,0,65535,65535,0,65535]
2660 ; SSE-NEXT: pand %xmm10, %xmm2
2661 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm7[1,1,2,2]
2662 ; SSE-NEXT: pandn %xmm0, %xmm10
2663 ; SSE-NEXT: por %xmm2, %xmm10
2664 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
2665 ; SSE-NEXT: pand %xmm2, %xmm10
2666 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,2]
2667 ; SSE-NEXT: pandn %xmm0, %xmm2
2668 ; SSE-NEXT: por %xmm10, %xmm2
2669 ; SSE-NEXT: movdqa %xmm2, %xmm10
2670 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[3,3,3,3]
2671 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm9[0,1,2,3,5,6,7,7]
2672 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,2,2,3]
2673 ; SSE-NEXT: pand %xmm3, %xmm2
2674 ; SSE-NEXT: pandn %xmm0, %xmm3
2675 ; SSE-NEXT: por %xmm2, %xmm3
2676 ; SSE-NEXT: movdqa %xmm12, %xmm2
2677 ; SSE-NEXT: pand %xmm12, %xmm3
2678 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm7[2,2,3,3]
2679 ; SSE-NEXT: pandn %xmm0, %xmm2
2680 ; SSE-NEXT: por %xmm3, %xmm2
2681 ; SSE-NEXT: pand %xmm14, %xmm2
2682 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[2,2,3,3]
2683 ; SSE-NEXT: pandn %xmm0, %xmm14
2684 ; SSE-NEXT: por %xmm2, %xmm14
2685 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2686 ; SSE-NEXT: movdqa %xmm14, 368(%rax)
2687 ; SSE-NEXT: movdqa %xmm10, 352(%rax)
2688 ; SSE-NEXT: movdqa %xmm4, 336(%rax)
2689 ; SSE-NEXT: movdqa %xmm8, 320(%rax)
2690 ; SSE-NEXT: movdqa %xmm13, 304(%rax)
2691 ; SSE-NEXT: movdqa %xmm15, 288(%rax)
2692 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2693 ; SSE-NEXT: movaps %xmm0, 272(%rax)
2694 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2695 ; SSE-NEXT: movaps %xmm0, 256(%rax)
2696 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2697 ; SSE-NEXT: movaps %xmm0, 240(%rax)
2698 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2699 ; SSE-NEXT: movaps %xmm0, 224(%rax)
2700 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2701 ; SSE-NEXT: movaps %xmm0, 208(%rax)
2702 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2703 ; SSE-NEXT: movaps %xmm0, 192(%rax)
2704 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2705 ; SSE-NEXT: movaps %xmm0, 176(%rax)
2706 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
2707 ; SSE-NEXT: movaps %xmm0, 160(%rax)
2708 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2709 ; SSE-NEXT: movaps %xmm0, 144(%rax)
2710 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2711 ; SSE-NEXT: movaps %xmm0, 128(%rax)
2712 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2713 ; SSE-NEXT: movaps %xmm0, 112(%rax)
2714 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2715 ; SSE-NEXT: movaps %xmm0, 96(%rax)
2716 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2717 ; SSE-NEXT: movaps %xmm0, 80(%rax)
2718 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2719 ; SSE-NEXT: movaps %xmm0, 64(%rax)
2720 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2721 ; SSE-NEXT: movaps %xmm0, 48(%rax)
2722 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2723 ; SSE-NEXT: movaps %xmm0, 32(%rax)
2724 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2725 ; SSE-NEXT: movaps %xmm0, 16(%rax)
2726 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2727 ; SSE-NEXT: movaps %xmm0, (%rax)
2728 ; SSE-NEXT: addq $184, %rsp
2731 ; AVX1-ONLY-LABEL: store_i8_stride6_vf64:
2732 ; AVX1-ONLY: # %bb.0:
2733 ; AVX1-ONLY-NEXT: subq $200, %rsp
2734 ; AVX1-ONLY-NEXT: vmovdqa 48(%rsi), %xmm1
2735 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdi), %xmm2
2736 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
2737 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[3,3,3,3]
2738 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
2739 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
2740 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2
2741 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm10 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
2742 ; AVX1-ONLY-NEXT: vandnps %ymm2, %ymm10, %ymm2
2743 ; AVX1-ONLY-NEXT: vmovdqa 48(%rcx), %xmm4
2744 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdx), %xmm5
2745 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3],xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
2746 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm6 = xmm3[0,1,2,3,5,6,7,7]
2747 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,2,3]
2748 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm5[8],xmm4[8],xmm5[9],xmm4[9],xmm5[10],xmm4[10],xmm5[11],xmm4[11],xmm5[12],xmm4[12],xmm5[13],xmm4[13],xmm5[14],xmm4[14],xmm5[15],xmm4[15]
2749 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm4[1,0,2,2,4,5,6,7]
2750 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,1,0,1]
2751 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm6, %ymm5
2752 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm10, %ymm5
2753 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm5, %ymm6
2754 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm6, %xmm5
2755 ; AVX1-ONLY-NEXT: vmovdqa 48(%r8), %xmm2
2756 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm7 = zero,zero,zero,zero,xmm2[8,u],zero,zero,zero,zero,xmm2[9,u],zero,zero,zero,zero
2757 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1],xmm7[2],xmm5[3,4],xmm7[5],xmm5[6,7]
2758 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm7 = xmm5[0,1,2,3,4],zero,xmm5[6,7,8,9,10],zero,xmm5[12,13,14,15]
2759 ; AVX1-ONLY-NEXT: vmovdqa 48(%r9), %xmm5
2760 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm8 = zero,zero,zero,zero,zero,xmm5[8],zero,zero,zero,zero,zero,xmm5[9],zero,zero,zero,zero
2761 ; AVX1-ONLY-NEXT: vpor %xmm7, %xmm8, %xmm7
2762 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2763 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm7 = zero,zero,xmm2[5,u],zero,zero,zero,zero,xmm2[6,u],zero,zero,zero,zero,xmm2[7,u]
2764 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0],xmm7[1],xmm6[2,3],xmm7[4],xmm6[5,6],xmm7[7]
2765 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm7 = [0,1,2,128,4,5,6,7,8,128,10,11,12,13,14,128]
2766 ; AVX1-ONLY-NEXT: vpshufb %xmm7, %xmm6, %xmm6
2767 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, %xmm8
2768 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm7 = zero,zero,zero,xmm5[5],zero,zero,zero,zero,zero,xmm5[6],zero,zero,zero,zero,zero,xmm5[7]
2769 ; AVX1-ONLY-NEXT: vpor %xmm7, %xmm6, %xmm6
2770 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2771 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm1[1,1,2,2]
2772 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[3,3,3,3]
2773 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm6, %ymm1
2774 ; AVX1-ONLY-NEXT: vandps %ymm1, %ymm10, %ymm1
2775 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm6 = xmm4[3,3,3,3,4,5,6,7]
2776 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,4,4,4]
2777 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,5,6,7,7]
2778 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,2,3]
2779 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm4
2780 ; AVX1-ONLY-NEXT: vandnps %ymm4, %ymm10, %ymm4
2781 ; AVX1-ONLY-NEXT: vorps %ymm4, %ymm1, %ymm1
2782 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm4
2783 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm7 = <128,128,13,u,128,128,128,128,14,u,128,128,128,128,15,u>
2784 ; AVX1-ONLY-NEXT: vpshufb %xmm7, %xmm2, %xmm6
2785 ; AVX1-ONLY-NEXT: vmovdqa %xmm7, %xmm15
2786 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0],xmm6[1],xmm4[2,3],xmm6[4],xmm4[5,6],xmm6[7]
2787 ; AVX1-ONLY-NEXT: vpshufb %xmm8, %xmm4, %xmm4
2788 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm6 = zero,zero,zero,xmm5[13],zero,zero,zero,zero,zero,xmm5[14],zero,zero,zero,zero,zero,xmm5[15]
2789 ; AVX1-ONLY-NEXT: vpor %xmm6, %xmm4, %xmm4
2790 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2791 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm4 = xmm2[10,u],zero,zero,zero,zero,xmm2[11,u],zero,zero,zero,zero,xmm2[12,u],zero,zero
2792 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm4[0],xmm1[1,2],xmm4[3],xmm1[4,5],xmm4[6],xmm1[7]
2793 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[0],zero,xmm1[2,3,4,5,6],zero,xmm1[8,9,10,11,12],zero,xmm1[14,15]
2794 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm13 = [128,10,128,128,128,128,128,11,128,128,128,128,128,12,128,128]
2795 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm5, %xmm4
2796 ; AVX1-ONLY-NEXT: vpor %xmm4, %xmm1, %xmm1
2797 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2798 ; AVX1-ONLY-NEXT: vmovdqa 32(%rsi), %xmm6
2799 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdi), %xmm11
2800 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm11[8],xmm6[8],xmm11[9],xmm6[9],xmm11[10],xmm6[10],xmm11[11],xmm6[11],xmm11[12],xmm6[12],xmm11[13],xmm6[13],xmm11[14],xmm6[14],xmm11[15],xmm6[15]
2801 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm7[1,1,2,2]
2802 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm7[3,3,3,3]
2803 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm1, %ymm1
2804 ; AVX1-ONLY-NEXT: vmovdqa 32(%rcx), %xmm12
2805 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdx), %xmm14
2806 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm14[8],xmm12[8],xmm14[9],xmm12[9],xmm14[10],xmm12[10],xmm14[11],xmm12[11],xmm14[12],xmm12[12],xmm14[13],xmm12[13],xmm14[14],xmm12[14],xmm14[15],xmm12[15]
2807 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm8 = xmm4[3,3,3,3,4,5,6,7]
2808 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,4,4,4]
2809 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm9 = xmm4[0,1,2,3,5,6,7,7]
2810 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,2,3]
2811 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm8, %ymm8
2812 ; AVX1-ONLY-NEXT: vandps %ymm1, %ymm10, %ymm1
2813 ; AVX1-ONLY-NEXT: vandnps %ymm8, %ymm10, %ymm8
2814 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm8, %ymm9
2815 ; AVX1-ONLY-NEXT: vmovdqa 32(%r8), %xmm1
2816 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm9, %xmm8
2817 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm1, %xmm10
2818 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0],xmm10[1],xmm8[2,3],xmm10[4],xmm8[5,6],xmm10[7]
2819 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm10 = xmm8[0,1,2],zero,xmm8[4,5,6,7,8],zero,xmm8[10,11,12,13,14],zero
2820 ; AVX1-ONLY-NEXT: vmovdqa 32(%r9), %xmm8
2821 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm15 = zero,zero,zero,xmm8[13],zero,zero,zero,zero,zero,xmm8[14],zero,zero,zero,zero,zero,xmm8[15]
2822 ; AVX1-ONLY-NEXT: vpor %xmm15, %xmm10, %xmm10
2823 ; AVX1-ONLY-NEXT: vmovdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2824 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm10 = xmm1[10,u],zero,zero,zero,zero,xmm1[11,u],zero,zero,zero,zero,xmm1[12,u],zero,zero
2825 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm9 = xmm10[0],xmm9[1,2],xmm10[3],xmm9[4,5],xmm10[6],xmm9[7]
2826 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm15 = [0,128,2,3,4,5,6,128,8,9,10,11,12,128,14,15]
2827 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm9, %xmm9
2828 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm8, %xmm10
2829 ; AVX1-ONLY-NEXT: vpor %xmm10, %xmm9, %xmm9
2830 ; AVX1-ONLY-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2831 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm0[0,0,1,1]
2832 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,2,2]
2833 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm9, %ymm0
2834 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm9 = xmm3[1,0,2,2,4,5,6,7]
2835 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[0,1,0,1]
2836 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[3,3,3,3,4,5,6,7]
2837 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,4,4]
2838 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm9, %ymm3
2839 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm9 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
2840 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm9, %ymm0
2841 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm9, %ymm3
2842 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm0, %ymm0
2843 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm3
2844 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm9 = xmm2[2,u],zero,zero,zero,zero,xmm2[3,u],zero,zero,zero,zero,xmm2[4,u],zero,zero
2845 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm9[0],xmm3[1,2],xmm9[3],xmm3[4,5],xmm9[6],xmm3[7]
2846 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm3, %xmm3
2847 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm13 = [128,2,128,128,128,128,128,3,128,128,128,128,128,4,128,128]
2848 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm5, %xmm9
2849 ; AVX1-ONLY-NEXT: vpor %xmm3, %xmm9, %xmm3
2850 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2851 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm10 = <128,128,128,128,0,u,128,128,128,128,1,u,128,128,128,128>
2852 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm2, %xmm2
2853 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm2[2],xmm0[3,4],xmm2[5],xmm0[6,7]
2854 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm9 = [0,1,2,3,4,128,6,7,8,9,10,128,12,13,14,15]
2855 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm0, %xmm0
2856 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm3 = [128,128,128,128,128,0,128,128,128,128,128,1,128,128,128,128]
2857 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm5, %xmm2
2858 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, %xmm15
2859 ; AVX1-ONLY-NEXT: vpor %xmm2, %xmm0, %xmm0
2860 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2861 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm11[0],xmm6[0],xmm11[1],xmm6[1],xmm11[2],xmm6[2],xmm11[3],xmm6[3],xmm11[4],xmm6[4],xmm11[5],xmm6[5],xmm11[6],xmm6[6],xmm11[7],xmm6[7]
2862 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,0,1,1]
2863 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[1,1,2,2]
2864 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
2865 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm14[0],xmm12[0],xmm14[1],xmm12[1],xmm14[2],xmm12[2],xmm14[3],xmm12[3],xmm14[4],xmm12[4],xmm14[5],xmm12[5],xmm14[6],xmm12[6],xmm14[7],xmm12[7]
2866 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm3[1,0,2,2,4,5,6,7]
2867 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,1,0,1]
2868 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm6 = xmm3[3,3,3,3,4,5,6,7]
2869 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,4,4,4]
2870 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
2871 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm6 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
2872 ; AVX1-ONLY-NEXT: vandps %ymm6, %ymm2, %ymm2
2873 ; AVX1-ONLY-NEXT: vandnps %ymm5, %ymm6, %ymm5
2874 ; AVX1-ONLY-NEXT: vorps %ymm5, %ymm2, %ymm2
2875 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm2, %xmm5
2876 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm6 = xmm1[2,u],zero,zero,zero,zero,xmm1[3,u],zero,zero,zero,zero,xmm1[4,u],zero,zero
2877 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0],xmm5[1,2],xmm6[3],xmm5[4,5],xmm6[6],xmm5[7]
2878 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[0],zero,xmm5[2,3,4,5,6],zero,xmm5[8,9,10,11,12],zero,xmm5[14,15]
2879 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm8, %xmm6
2880 ; AVX1-ONLY-NEXT: vpor %xmm6, %xmm5, %xmm5
2881 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2882 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm1, %xmm5
2883 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm5[2],xmm2[3,4],xmm5[5],xmm2[6,7]
2884 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm2, %xmm2
2885 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm8, %xmm5
2886 ; AVX1-ONLY-NEXT: vpor %xmm5, %xmm2, %xmm2
2887 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2888 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[3,3,3,3]
2889 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm7[0,0,1,1]
2890 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
2891 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm2 = xmm3[0,1,2,3,5,6,7,7]
2892 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,2,3]
2893 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm4[1,0,2,2,4,5,6,7]
2894 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,0,1]
2895 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
2896 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm13 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
2897 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm13, %ymm0
2898 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm13, %ymm2
2899 ; AVX1-ONLY-NEXT: vmovaps %ymm13, %ymm15
2900 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm2, %ymm0
2901 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm2
2902 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm5 = <128,128,128,128,8,u,128,128,128,128,9,u,128,128,128,128>
2903 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm1, %xmm3
2904 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm3[2],xmm2[3,4],xmm3[5],xmm2[6,7]
2905 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm2, %xmm2
2906 ; AVX1-ONLY-NEXT: vmovdqa %xmm9, %xmm14
2907 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm6 = [128,128,128,128,128,8,128,128,128,128,128,9,128,128,128,128]
2908 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm8, %xmm3
2909 ; AVX1-ONLY-NEXT: vpor %xmm3, %xmm2, %xmm2
2910 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2911 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm7 = <128,128,5,u,128,128,128,128,6,u,128,128,128,128,7,u>
2912 ; AVX1-ONLY-NEXT: vpshufb %xmm7, %xmm1, %xmm1
2913 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
2914 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm13 = [0,1,2,128,4,5,6,7,8,128,10,11,12,13,14,128]
2915 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm0, %xmm0
2916 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm9 = [128,128,128,5,128,128,128,128,128,6,128,128,128,128,128,7]
2917 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm8, %xmm1
2918 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm0, %xmm0
2919 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
2920 ; AVX1-ONLY-NEXT: vmovdqa 16(%rsi), %xmm0
2921 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm1
2922 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
2923 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
2924 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm11[3,3,3,3]
2925 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,0,1,1]
2926 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
2927 ; AVX1-ONLY-NEXT: vmovdqa 16(%rcx), %xmm2
2928 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdx), %xmm3
2929 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
2930 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm3[8],xmm2[8],xmm3[9],xmm2[9],xmm3[10],xmm2[10],xmm3[11],xmm2[11],xmm3[12],xmm2[12],xmm3[13],xmm2[13],xmm3[14],xmm2[14],xmm3[15],xmm2[15]
2931 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm2 = xmm10[0,1,2,3,5,6,7,7]
2932 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,2,3]
2933 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm4[1,0,2,2,4,5,6,7]
2934 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,0,1]
2935 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
2936 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm15, %ymm1
2937 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm15, %ymm2
2938 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm2, %ymm1
2939 ; AVX1-ONLY-NEXT: vmovdqa 16(%r8), %xmm3
2940 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm1, %xmm2
2941 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm3, %xmm5
2942 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm5[2],xmm2[3,4],xmm5[5],xmm2[6,7]
2943 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm2, %xmm5
2944 ; AVX1-ONLY-NEXT: vmovdqa 16(%r9), %xmm2
2945 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm2, %xmm6
2946 ; AVX1-ONLY-NEXT: vpor %xmm6, %xmm5, %xmm5
2947 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2948 ; AVX1-ONLY-NEXT: vpshufb %xmm7, %xmm3, %xmm5
2949 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0],xmm5[1],xmm1[2,3],xmm5[4],xmm1[5,6],xmm5[7]
2950 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm1, %xmm1
2951 ; AVX1-ONLY-NEXT: vpshufb %xmm9, %xmm2, %xmm5
2952 ; AVX1-ONLY-NEXT: vpor %xmm5, %xmm1, %xmm1
2953 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2954 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,2,2]
2955 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[3,3,3,3]
2956 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
2957 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm4[3,3,3,3,4,5,6,7]
2958 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
2959 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,5,6,7,7]
2960 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,2,3]
2961 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm1, %ymm1
2962 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm15, %ymm0
2963 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm15, %ymm1
2964 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm0, %ymm0
2965 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm0, %xmm1
2966 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm14 = <128,128,13,u,128,128,128,128,14,u,128,128,128,128,15,u>
2967 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm3, %xmm4
2968 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0],xmm4[1],xmm1[2,3],xmm4[4],xmm1[5,6],xmm4[7]
2969 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm1, %xmm1
2970 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm15 = [128,128,128,13,128,128,128,128,128,14,128,128,128,128,128,15]
2971 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm2, %xmm4
2972 ; AVX1-ONLY-NEXT: vpor %xmm4, %xmm1, %xmm1
2973 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2974 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm13 = <10,u,128,128,128,128,11,u,128,128,128,128,12,u,128,128>
2975 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm3, %xmm1
2976 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1,2],xmm1[3],xmm0[4,5],xmm1[6],xmm0[7]
2977 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm13 = [0,128,2,3,4,5,6,128,8,9,10,11,12,128,14,15]
2978 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm0, %xmm0
2979 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = zero,xmm2[10],zero,zero,zero,zero,zero,xmm2[11],zero,zero,zero,zero,zero,xmm2[12],zero,zero
2980 ; AVX1-ONLY-NEXT: vpor %xmm1, %xmm0, %xmm0
2981 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2982 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm8
2983 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm7
2984 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm7[8],xmm8[8],xmm7[9],xmm8[9],xmm7[10],xmm8[10],xmm7[11],xmm8[11],xmm7[12],xmm8[12],xmm7[13],xmm8[13],xmm7[14],xmm8[14],xmm7[15],xmm8[15]
2985 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2986 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm1[1,1,2,2]
2987 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[3,3,3,3]
2988 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
2989 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm6
2990 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm5
2991 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm5[8],xmm6[8],xmm5[9],xmm6[9],xmm5[10],xmm6[10],xmm5[11],xmm6[11],xmm5[12],xmm6[12],xmm5[13],xmm6[13],xmm5[14],xmm6[14],xmm5[15],xmm6[15]
2992 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm9[3,3,3,3,4,5,6,7]
2993 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
2994 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm12 = xmm9[0,1,2,3,5,6,7,7]
2995 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[2,2,2,3]
2996 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm1, %ymm1
2997 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm4 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
2998 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm0, %ymm0
2999 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm4, %ymm1
3000 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm0, %ymm12
3001 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm1
3002 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm1, %xmm0
3003 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm12, %xmm14
3004 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm14[0],xmm0[1],xmm14[2,3],xmm0[4],xmm14[5,6],xmm0[7]
3005 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm14 = xmm0[0,1,2],zero,xmm0[4,5,6,7,8],zero,xmm0[10,11,12,13,14],zero
3006 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm0
3007 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm0, %xmm15
3008 ; AVX1-ONLY-NEXT: vpor %xmm15, %xmm14, %xmm4
3009 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3010 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm15 = xmm1[10,u],zero,zero,zero,zero,xmm1[11,u],zero,zero,zero,zero,xmm1[12,u],zero,zero
3011 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm15[0],xmm12[1,2],xmm15[3],xmm12[4,5],xmm15[6],xmm12[7]
3012 ; AVX1-ONLY-NEXT: vmovdqa %xmm13, %xmm4
3013 ; AVX1-ONLY-NEXT: vpshufb %xmm13, %xmm12, %xmm12
3014 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm15 = zero,xmm0[10],zero,zero,zero,zero,zero,xmm0[11],zero,zero,zero,zero,zero,xmm0[12],zero,zero
3015 ; AVX1-ONLY-NEXT: vpor %xmm15, %xmm12, %xmm12
3016 ; AVX1-ONLY-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3017 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm11[0,0,1,1]
3018 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[1,1,2,2]
3019 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm15, %ymm11
3020 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm15 = xmm10[1,0,2,2,4,5,6,7]
3021 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[0,1,0,1]
3022 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm10 = xmm10[3,3,3,3,4,5,6,7]
3023 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm10 = xmm10[0,1,2,3,4,4,4,4]
3024 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm15, %ymm10
3025 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm13 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
3026 ; AVX1-ONLY-NEXT: vandps %ymm13, %ymm11, %ymm11
3027 ; AVX1-ONLY-NEXT: vandnps %ymm10, %ymm13, %ymm10
3028 ; AVX1-ONLY-NEXT: vorps %ymm10, %ymm11, %ymm11
3029 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm11, %xmm10
3030 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm12 = <2,u,128,128,128,128,3,u,128,128,128,128,4,u,128,128>
3031 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm3, %xmm15
3032 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm15[0],xmm10[1,2],xmm15[3],xmm10[4,5],xmm15[6],xmm10[7]
3033 ; AVX1-ONLY-NEXT: vpshufb %xmm4, %xmm10, %xmm10
3034 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm14 = [128,2,128,128,128,128,128,3,128,128,128,128,128,4,128,128]
3035 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm2, %xmm15
3036 ; AVX1-ONLY-NEXT: vpor %xmm15, %xmm10, %xmm4
3037 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3038 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm15 = <128,128,128,128,0,u,128,128,128,128,1,u,128,128,128,128>
3039 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm3, %xmm3
3040 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm11[0,1],xmm3[2],xmm11[3,4],xmm3[5],xmm11[6,7]
3041 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm11 = [0,1,2,3,4,128,6,7,8,9,10,128,12,13,14,15]
3042 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm3, %xmm3
3043 ; AVX1-ONLY-NEXT: vmovdqa {{.*#+}} xmm10 = [128,128,128,128,128,0,128,128,128,128,128,1,128,128,128,128]
3044 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm2, %xmm2
3045 ; AVX1-ONLY-NEXT: vpor %xmm2, %xmm3, %xmm2
3046 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3],xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
3047 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm7[0,0,1,1]
3048 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm7[1,1,2,2]
3049 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm3, %ymm3
3050 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3],xmm5[4],xmm6[4],xmm5[5],xmm6[5],xmm5[6],xmm6[6],xmm5[7],xmm6[7]
3051 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm6 = xmm4[1,0,2,2,4,5,6,7]
3052 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[0,1,0,1]
3053 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm8 = xmm4[3,3,3,3,4,5,6,7]
3054 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,4,4,4]
3055 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm6, %ymm6
3056 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm13, %ymm3
3057 ; AVX1-ONLY-NEXT: vandnps %ymm6, %ymm13, %ymm6
3058 ; AVX1-ONLY-NEXT: vorps %ymm6, %ymm3, %ymm6
3059 ; AVX1-ONLY-NEXT: vpshufb %xmm12, %xmm1, %xmm3
3060 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm6, %xmm8
3061 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0],xmm8[1,2],xmm3[3],xmm8[4,5],xmm3[6],xmm8[7]
3062 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[0],zero,xmm3[2,3,4,5,6],zero,xmm3[8,9,10,11,12],zero,xmm3[14,15]
3063 ; AVX1-ONLY-NEXT: vpshufb %xmm14, %xmm0, %xmm8
3064 ; AVX1-ONLY-NEXT: vpor %xmm3, %xmm8, %xmm3
3065 ; AVX1-ONLY-NEXT: vpshufb %xmm15, %xmm1, %xmm8
3066 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1],xmm8[2],xmm6[3,4],xmm8[5],xmm6[6,7]
3067 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm6, %xmm6
3068 ; AVX1-ONLY-NEXT: vpshufb %xmm10, %xmm0, %xmm8
3069 ; AVX1-ONLY-NEXT: vpor %xmm6, %xmm8, %xmm6
3070 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[3,3,3,3]
3071 ; AVX1-ONLY-NEXT: vpermilps $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Folded Reload
3072 ; AVX1-ONLY-NEXT: # xmm8 = mem[0,0,1,1]
3073 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm7, %ymm7
3074 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,5,6,7,7]
3075 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,2,3]
3076 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm9[1,0,2,2,4,5,6,7]
3077 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,1,0,1]
3078 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm4, %ymm4
3079 ; AVX1-ONLY-NEXT: vmovaps {{.*#+}} ymm8 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
3080 ; AVX1-ONLY-NEXT: vandnps %ymm7, %ymm8, %ymm5
3081 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm8, %ymm4
3082 ; AVX1-ONLY-NEXT: vorps %ymm5, %ymm4, %ymm4
3083 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm5 = zero,zero,zero,zero,xmm1[8,u],zero,zero,zero,zero,xmm1[9,u],zero,zero,zero,zero
3084 ; AVX1-ONLY-NEXT: vextractf128 $1, %ymm4, %xmm7
3085 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm7[0,1],xmm5[2],xmm7[3,4],xmm5[5],xmm7[6,7]
3086 ; AVX1-ONLY-NEXT: vpshufb %xmm11, %xmm5, %xmm5
3087 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm7 = zero,zero,zero,zero,zero,xmm0[8],zero,zero,zero,zero,zero,xmm0[9],zero,zero,zero,zero
3088 ; AVX1-ONLY-NEXT: vpor %xmm7, %xmm5, %xmm5
3089 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,xmm1[5,u],zero,zero,zero,zero,xmm1[6,u],zero,zero,zero,zero,xmm1[7,u]
3090 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm4[0],xmm1[1],xmm4[2,3],xmm1[4],xmm4[5,6],xmm1[7]
3091 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[0,1,2],zero,xmm1[4,5,6,7,8],zero,xmm1[10,11,12,13,14],zero
3092 ; AVX1-ONLY-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,zero,xmm0[5],zero,zero,zero,zero,zero,xmm0[6],zero,zero,zero,zero,zero,xmm0[7]
3093 ; AVX1-ONLY-NEXT: vpor %xmm0, %xmm1, %xmm0
3094 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
3095 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, 32(%rax)
3096 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, 48(%rax)
3097 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, (%rax)
3098 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, 16(%rax)
3099 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, 96(%rax)
3100 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3101 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 112(%rax)
3102 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3103 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 64(%rax)
3104 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3105 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 80(%rax)
3106 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3107 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 160(%rax)
3108 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3109 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 176(%rax)
3110 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3111 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 128(%rax)
3112 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3113 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 144(%rax)
3114 ; AVX1-ONLY-NEXT: vmovaps (%rsp), %xmm0 # 16-byte Reload
3115 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 224(%rax)
3116 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3117 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 240(%rax)
3118 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3119 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 192(%rax)
3120 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3121 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 208(%rax)
3122 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3123 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 288(%rax)
3124 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3125 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 304(%rax)
3126 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3127 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 256(%rax)
3128 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3129 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 272(%rax)
3130 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3131 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 352(%rax)
3132 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3133 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 368(%rax)
3134 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3135 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 320(%rax)
3136 ; AVX1-ONLY-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3137 ; AVX1-ONLY-NEXT: vmovaps %xmm0, 336(%rax)
3138 ; AVX1-ONLY-NEXT: addq $200, %rsp
3139 ; AVX1-ONLY-NEXT: vzeroupper
3140 ; AVX1-ONLY-NEXT: retq
3142 ; AVX2-SLOW-LABEL: store_i8_stride6_vf64:
3143 ; AVX2-SLOW: # %bb.0:
3144 ; AVX2-SLOW-NEXT: subq $664, %rsp # imm = 0x298
3145 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %ymm6
3146 ; AVX2-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3147 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %ymm5
3148 ; AVX2-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3149 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm1
3150 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %xmm7
3151 ; AVX2-SLOW-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3152 ; AVX2-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm2 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
3153 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm1, %xmm0
3154 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, %xmm8
3155 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3156 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm3
3157 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %xmm9
3158 ; AVX2-SLOW-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3159 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm3, %xmm1
3160 ; AVX2-SLOW-NEXT: vmovdqa %xmm3, %xmm11
3161 ; AVX2-SLOW-NEXT: vmovdqa %xmm3, (%rsp) # 16-byte Spill
3162 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
3163 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
3164 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm4
3165 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %xmm12
3166 ; AVX2-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm3 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
3167 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm4, %xmm1
3168 ; AVX2-SLOW-NEXT: vmovdqa %xmm4, %xmm15
3169 ; AVX2-SLOW-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3170 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm10
3171 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %xmm13
3172 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm10, %xmm4
3173 ; AVX2-SLOW-NEXT: vmovdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3174 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm4[8],xmm1[8],xmm4[9],xmm1[9],xmm4[10],xmm1[10],xmm4[11],xmm1[11],xmm4[12],xmm1[12],xmm4[13],xmm1[13],xmm4[14],xmm1[14],xmm4[15],xmm1[15]
3175 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm1[0,0,0,1]
3176 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm1 = <255,255,u,u,0,0,255,255,u,u,0,0,255,255,u,u,0,0,255,255,u,u,0,0,255,255,u,u,0,0,255,255>
3177 ; AVX2-SLOW-NEXT: vpblendvb %ymm1, %ymm0, %ymm4, %ymm0
3178 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm7, %xmm4
3179 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm9, %xmm2
3180 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3],xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
3181 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
3182 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm12, %xmm4
3183 ; AVX2-SLOW-NEXT: vmovdqa %xmm12, %xmm14
3184 ; AVX2-SLOW-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3185 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm13, %xmm3
3186 ; AVX2-SLOW-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3187 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm3[8],xmm4[8],xmm3[9],xmm4[9],xmm3[10],xmm4[10],xmm3[11],xmm4[11],xmm3[12],xmm4[12],xmm3[13],xmm4[13],xmm3[14],xmm4[14],xmm3[15],xmm4[15]
3188 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
3189 ; AVX2-SLOW-NEXT: vpblendvb %ymm1, %ymm2, %ymm3, %ymm2
3190 ; AVX2-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm4 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
3191 ; AVX2-SLOW-NEXT: vpshufb %ymm4, %ymm5, %ymm3
3192 ; AVX2-SLOW-NEXT: vpshufb %ymm4, %ymm6, %ymm5
3193 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm5[0],ymm3[0],ymm5[1],ymm3[1],ymm5[2],ymm3[2],ymm5[3],ymm3[3],ymm5[4],ymm3[4],ymm5[5],ymm3[5],ymm5[6],ymm3[6],ymm5[7],ymm3[7],ymm5[16],ymm3[16],ymm5[17],ymm3[17],ymm5[18],ymm3[18],ymm5[19],ymm3[19],ymm5[20],ymm3[20],ymm5[21],ymm3[21],ymm5[22],ymm3[22],ymm5[23],ymm3[23]
3194 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %ymm7
3195 ; AVX2-SLOW-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3196 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %ymm6
3197 ; AVX2-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3198 ; AVX2-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm5 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
3199 ; AVX2-SLOW-NEXT: vpshufb %ymm5, %ymm6, %ymm6
3200 ; AVX2-SLOW-NEXT: vpshufb %ymm5, %ymm7, %ymm7
3201 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm7[0],ymm6[0],ymm7[1],ymm6[1],ymm7[2],ymm6[2],ymm7[3],ymm6[3],ymm7[4],ymm6[4],ymm7[5],ymm6[5],ymm7[6],ymm6[6],ymm7[7],ymm6[7],ymm7[16],ymm6[16],ymm7[17],ymm6[17],ymm7[18],ymm6[18],ymm7[19],ymm6[19],ymm7[20],ymm6[20],ymm7[21],ymm6[21],ymm7[22],ymm6[22],ymm7[23],ymm6[23]
3202 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
3203 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
3204 ; AVX2-SLOW-NEXT: vpblendvb %ymm1, %ymm3, %ymm6, %ymm3
3205 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %ymm7
3206 ; AVX2-SLOW-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3207 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %ymm6
3208 ; AVX2-SLOW-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3209 ; AVX2-SLOW-NEXT: vpshufb %ymm4, %ymm6, %ymm6
3210 ; AVX2-SLOW-NEXT: vpshufb %ymm4, %ymm7, %ymm4
3211 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm4[0],ymm6[0],ymm4[1],ymm6[1],ymm4[2],ymm6[2],ymm4[3],ymm6[3],ymm4[4],ymm6[4],ymm4[5],ymm6[5],ymm4[6],ymm6[6],ymm4[7],ymm6[7],ymm4[16],ymm6[16],ymm4[17],ymm6[17],ymm4[18],ymm6[18],ymm4[19],ymm6[19],ymm4[20],ymm6[20],ymm4[21],ymm6[21],ymm4[22],ymm6[22],ymm4[23],ymm6[23]
3212 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %ymm7
3213 ; AVX2-SLOW-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3214 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %ymm9
3215 ; AVX2-SLOW-NEXT: vpshufb %ymm5, %ymm9, %ymm6
3216 ; AVX2-SLOW-NEXT: vmovdqa %ymm9, %ymm12
3217 ; AVX2-SLOW-NEXT: vpshufb %ymm5, %ymm7, %ymm5
3218 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm5 = ymm5[0],ymm6[0],ymm5[1],ymm6[1],ymm5[2],ymm6[2],ymm5[3],ymm6[3],ymm5[4],ymm6[4],ymm5[5],ymm6[5],ymm5[6],ymm6[6],ymm5[7],ymm6[7],ymm5[16],ymm6[16],ymm5[17],ymm6[17],ymm5[18],ymm6[18],ymm5[19],ymm6[19],ymm5[20],ymm6[20],ymm5[21],ymm6[21],ymm5[22],ymm6[22],ymm5[23],ymm6[23]
3219 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
3220 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
3221 ; AVX2-SLOW-NEXT: vpblendvb %ymm1, %ymm4, %ymm5, %ymm1
3222 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm6
3223 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm4 = <6,u,5,u,8,u,7,u,9,u,9,u,9,u,9,u>
3224 ; AVX2-SLOW-NEXT: vpshufb %xmm4, %xmm6, %xmm5
3225 ; AVX2-SLOW-NEXT: vmovdqa %xmm6, %xmm7
3226 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
3227 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255]
3228 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm0, %ymm5, %ymm0
3229 ; AVX2-SLOW-NEXT: vmovdqa 32(%r8), %xmm5
3230 ; AVX2-SLOW-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3231 ; AVX2-SLOW-NEXT: vpshufb %xmm4, %xmm5, %xmm4
3232 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
3233 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm2, %ymm4, %ymm2
3234 ; AVX2-SLOW-NEXT: vmovdqa 32(%r8), %ymm5
3235 ; AVX2-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3236 ; AVX2-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0]
3237 ; AVX2-SLOW-NEXT: # ymm4 = mem[0,1,0,1]
3238 ; AVX2-SLOW-NEXT: vpshufb %ymm4, %ymm5, %ymm5
3239 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
3240 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm3, %ymm5, %ymm3
3241 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %ymm5
3242 ; AVX2-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3243 ; AVX2-SLOW-NEXT: vpshufb %ymm4, %ymm5, %ymm4
3244 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
3245 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm1, %ymm4, %ymm1
3246 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %xmm5
3247 ; AVX2-SLOW-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3248 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm4 = <u,6,u,5,u,8,u,7,u,9,u,9,u,9,u,9>
3249 ; AVX2-SLOW-NEXT: vpshufb %xmm4, %xmm5, %xmm5
3250 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
3251 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
3252 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm0, %ymm5, %ymm0
3253 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3254 ; AVX2-SLOW-NEXT: vmovdqa 32(%r9), %xmm5
3255 ; AVX2-SLOW-NEXT: vpshufb %xmm4, %xmm5, %xmm0
3256 ; AVX2-SLOW-NEXT: vmovdqa %xmm5, %xmm9
3257 ; AVX2-SLOW-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3258 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
3259 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm2, %ymm0, %ymm0
3260 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3261 ; AVX2-SLOW-NEXT: vmovdqa 32(%r9), %ymm2
3262 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3263 ; AVX2-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0]
3264 ; AVX2-SLOW-NEXT: # ymm0 = mem[0,1,0,1]
3265 ; AVX2-SLOW-NEXT: vpshufb %ymm0, %ymm2, %ymm2
3266 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
3267 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm3, %ymm2, %ymm2
3268 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3269 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %ymm2
3270 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3271 ; AVX2-SLOW-NEXT: vpshufb %ymm0, %ymm2, %ymm0
3272 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
3273 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm1, %ymm0, %ymm0
3274 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3275 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm10[0],xmm15[0],xmm10[1],xmm15[1],xmm10[2],xmm15[2],xmm10[3],xmm15[3],xmm10[4],xmm15[4],xmm10[5],xmm15[5],xmm10[6],xmm15[6],xmm10[7],xmm15[7]
3276 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,3,2,1,4,5,6,7]
3277 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5,6,5]
3278 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,0,0,1]
3279 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm11[0],xmm8[0],xmm11[1],xmm8[1],xmm11[2],xmm8[2],xmm11[3],xmm8[3],xmm11[4],xmm8[4],xmm11[5],xmm8[5],xmm11[6],xmm8[6],xmm11[7],xmm8[7]
3280 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[1,0,3,2,4,5,6,7]
3281 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
3282 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm0[0,0,0,1]
3283 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = <255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255>
3284 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm11
3285 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3],xmm13[4],xmm14[4],xmm13[5],xmm14[5],xmm13[6],xmm14[6],xmm13[7],xmm14[7]
3286 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,3,2,1,4,5,6,7]
3287 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,6,5]
3288 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
3289 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3290 ; AVX2-SLOW-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
3291 ; AVX2-SLOW-NEXT: # xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3],xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
3292 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[1,0,3,2,4,5,6,7]
3293 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,4,4,4]
3294 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
3295 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm6
3296 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3297 ; AVX2-SLOW-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
3298 ; AVX2-SLOW-NEXT: # ymm1 = ymm1[0],mem[0],ymm1[1],mem[1],ymm1[2],mem[2],ymm1[3],mem[3],ymm1[4],mem[4],ymm1[5],mem[5],ymm1[6],mem[6],ymm1[7],mem[7],ymm1[16],mem[16],ymm1[17],mem[17],ymm1[18],mem[18],ymm1[19],mem[19],ymm1[20],mem[20],ymm1[21],mem[21],ymm1[22],mem[22],ymm1[23],mem[23]
3299 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
3300 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5,6,5,8,9,10,11,12,13,14,13]
3301 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
3302 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
3303 ; AVX2-SLOW-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm2 # 32-byte Folded Reload
3304 ; AVX2-SLOW-NEXT: # ymm2 = ymm15[0],mem[0],ymm15[1],mem[1],ymm15[2],mem[2],ymm15[3],mem[3],ymm15[4],mem[4],ymm15[5],mem[5],ymm15[6],mem[6],ymm15[7],mem[7],ymm15[16],mem[16],ymm15[17],mem[17],ymm15[18],mem[18],ymm15[19],mem[19],ymm15[20],mem[20],ymm15[21],mem[21],ymm15[22],mem[22],ymm15[23],mem[23]
3305 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[1,0,3,2,4,5,6,7,9,8,11,10,12,13,14,15]
3306 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm2[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
3307 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
3308 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
3309 ; AVX2-SLOW-NEXT: vmovdqa %ymm12, %ymm13
3310 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
3311 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm2 = ymm12[0],ymm13[0],ymm12[1],ymm13[1],ymm12[2],ymm13[2],ymm12[3],ymm13[3],ymm12[4],ymm13[4],ymm12[5],ymm13[5],ymm12[6],ymm13[6],ymm12[7],ymm13[7],ymm12[16],ymm13[16],ymm12[17],ymm13[17],ymm12[18],ymm13[18],ymm12[19],ymm13[19],ymm12[20],ymm13[20],ymm12[21],ymm13[21],ymm12[22],ymm13[22],ymm12[23],ymm13[23]
3312 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
3313 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5,6,5,8,9,10,11,12,13,14,13]
3314 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
3315 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
3316 ; AVX2-SLOW-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
3317 ; AVX2-SLOW-NEXT: # ymm3 = ymm3[0],mem[0],ymm3[1],mem[1],ymm3[2],mem[2],ymm3[3],mem[3],ymm3[4],mem[4],ymm3[5],mem[5],ymm3[6],mem[6],ymm3[7],mem[7],ymm3[16],mem[16],ymm3[17],mem[17],ymm3[18],mem[18],ymm3[19],mem[19],ymm3[20],mem[20],ymm3[21],mem[21],ymm3[22],mem[22],ymm3[23],mem[23]
3318 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} ymm3 = ymm3[1,0,3,2,4,5,6,7,9,8,11,10,12,13,14,15]
3319 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
3320 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
3321 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm2, %ymm3, %ymm0
3322 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm2 = <2,u,1,u,0,u,3,u,4,u,4,u,4,u,4,u>
3323 ; AVX2-SLOW-NEXT: vmovdqa %xmm7, %xmm14
3324 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm7, %xmm3
3325 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
3326 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = [255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255]
3327 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm11, %ymm3, %ymm3
3328 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
3329 ; AVX2-SLOW-NEXT: vpshufb %xmm2, %xmm7, %xmm2
3330 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
3331 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm6, %ymm2, %ymm2
3332 ; AVX2-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
3333 ; AVX2-SLOW-NEXT: # ymm6 = mem[0,1,0,1]
3334 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
3335 ; AVX2-SLOW-NEXT: vpshufb %ymm6, %ymm5, %ymm11
3336 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
3337 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm1, %ymm11, %ymm1
3338 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
3339 ; AVX2-SLOW-NEXT: vpshufb %ymm6, %ymm8, %ymm6
3340 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
3341 ; AVX2-SLOW-NEXT: vpblendvb %ymm4, %ymm0, %ymm6, %ymm5
3342 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm4 = <u,2,u,1,u,0,u,3,u,4,u,4,u,4,u,4>
3343 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
3344 ; AVX2-SLOW-NEXT: vpshufb %xmm4, %xmm11, %xmm6
3345 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,0,1]
3346 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm0 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
3347 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm3, %ymm6, %ymm3
3348 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3349 ; AVX2-SLOW-NEXT: vpshufb %xmm4, %xmm9, %xmm3
3350 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
3351 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm2, %ymm3, %ymm2
3352 ; AVX2-SLOW-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3353 ; AVX2-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
3354 ; AVX2-SLOW-NEXT: # ymm2 = mem[0,1,0,1]
3355 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
3356 ; AVX2-SLOW-NEXT: vpshufb %ymm2, %ymm9, %ymm3
3357 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
3358 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm1, %ymm3, %ymm1
3359 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3360 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
3361 ; AVX2-SLOW-NEXT: vpshufb %ymm2, %ymm10, %ymm1
3362 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
3363 ; AVX2-SLOW-NEXT: vpblendvb %ymm0, %ymm5, %ymm1, %ymm0
3364 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3365 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3366 ; AVX2-SLOW-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
3367 ; AVX2-SLOW-NEXT: # xmm0 = xmm0[8],mem[8],xmm0[9],mem[9],xmm0[10],mem[10],xmm0[11],mem[11],xmm0[12],mem[12],xmm0[13],mem[13],xmm0[14],mem[14],xmm0[15],mem[15]
3368 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3369 ; AVX2-SLOW-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
3370 ; AVX2-SLOW-NEXT: # xmm1 = xmm1[8],mem[8],xmm1[9],mem[9],xmm1[10],mem[10],xmm1[11],mem[11],xmm1[12],mem[12],xmm1[13],mem[13],xmm1[14],mem[14],xmm1[15],mem[15]
3371 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm5 = [8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
3372 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm0, %xmm0
3373 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm0[0,0,0,1]
3374 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm3 = [10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
3375 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm1, %xmm1
3376 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm1[0,0,0,1]
3377 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm1 = <u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u>
3378 ; AVX2-SLOW-NEXT: vpblendvb %ymm1, %ymm2, %ymm0, %ymm6
3379 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3380 ; AVX2-SLOW-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
3381 ; AVX2-SLOW-NEXT: # xmm0 = xmm0[8],mem[8],xmm0[9],mem[9],xmm0[10],mem[10],xmm0[11],mem[11],xmm0[12],mem[12],xmm0[13],mem[13],xmm0[14],mem[14],xmm0[15],mem[15]
3382 ; AVX2-SLOW-NEXT: vpshufb %xmm5, %xmm0, %xmm0
3383 ; AVX2-SLOW-NEXT: vmovdqa (%rsp), %xmm2 # 16-byte Reload
3384 ; AVX2-SLOW-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm5 # 16-byte Folded Reload
3385 ; AVX2-SLOW-NEXT: # xmm5 = xmm2[8],mem[8],xmm2[9],mem[9],xmm2[10],mem[10],xmm2[11],mem[11],xmm2[12],mem[12],xmm2[13],mem[13],xmm2[14],mem[14],xmm2[15],mem[15]
3386 ; AVX2-SLOW-NEXT: vpshufb %xmm3, %xmm5, %xmm3
3387 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
3388 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
3389 ; AVX2-SLOW-NEXT: vpblendvb %ymm1, %ymm0, %ymm3, %ymm4
3390 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3391 ; AVX2-SLOW-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm3 # 32-byte Folded Reload
3392 ; AVX2-SLOW-NEXT: # ymm3 = ymm0[8],mem[8],ymm0[9],mem[9],ymm0[10],mem[10],ymm0[11],mem[11],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15],ymm0[24],mem[24],ymm0[25],mem[25],ymm0[26],mem[26],ymm0[27],mem[27],ymm0[28],mem[28],ymm0[29],mem[29],ymm0[30],mem[30],ymm0[31],mem[31]
3393 ; AVX2-SLOW-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm5 # 32-byte Folded Reload
3394 ; AVX2-SLOW-NEXT: # ymm5 = ymm15[8],mem[8],ymm15[9],mem[9],ymm15[10],mem[10],ymm15[11],mem[11],ymm15[12],mem[12],ymm15[13],mem[13],ymm15[14],mem[14],ymm15[15],mem[15],ymm15[24],mem[24],ymm15[25],mem[25],ymm15[26],mem[26],ymm15[27],mem[27],ymm15[28],mem[28],ymm15[29],mem[29],ymm15[30],mem[30],ymm15[31],mem[31]
3395 ; AVX2-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
3396 ; AVX2-SLOW-NEXT: # ymm0 = mem[0,1,0,1]
3397 ; AVX2-SLOW-NEXT: vpshufb %ymm0, %ymm3, %ymm3
3398 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
3399 ; AVX2-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
3400 ; AVX2-SLOW-NEXT: # ymm2 = mem[0,1,0,1]
3401 ; AVX2-SLOW-NEXT: vpshufb %ymm2, %ymm5, %ymm5
3402 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
3403 ; AVX2-SLOW-NEXT: vpblendvb %ymm1, %ymm3, %ymm5, %ymm3
3404 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm5 = ymm12[8],ymm13[8],ymm12[9],ymm13[9],ymm12[10],ymm13[10],ymm12[11],ymm13[11],ymm12[12],ymm13[12],ymm12[13],ymm13[13],ymm12[14],ymm13[14],ymm12[15],ymm13[15],ymm12[24],ymm13[24],ymm12[25],ymm13[25],ymm12[26],ymm13[26],ymm12[27],ymm13[27],ymm12[28],ymm13[28],ymm12[29],ymm13[29],ymm12[30],ymm13[30],ymm12[31],ymm13[31]
3405 ; AVX2-SLOW-NEXT: vpshufb %ymm0, %ymm5, %ymm0
3406 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
3407 ; AVX2-SLOW-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm5 # 32-byte Folded Reload
3408 ; AVX2-SLOW-NEXT: # ymm5 = ymm5[8],mem[8],ymm5[9],mem[9],ymm5[10],mem[10],ymm5[11],mem[11],ymm5[12],mem[12],ymm5[13],mem[13],ymm5[14],mem[14],ymm5[15],mem[15],ymm5[24],mem[24],ymm5[25],mem[25],ymm5[26],mem[26],ymm5[27],mem[27],ymm5[28],mem[28],ymm5[29],mem[29],ymm5[30],mem[30],ymm5[31],mem[31]
3409 ; AVX2-SLOW-NEXT: vpshufb %ymm2, %ymm5, %ymm2
3410 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
3411 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
3412 ; AVX2-SLOW-NEXT: vpblendvb %ymm1, %ymm0, %ymm2, %ymm0
3413 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm1 = <10,u,13,u,12,u,11,u,14,u,13,u,14,u,15,u>
3414 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm7, %xmm2
3415 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
3416 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm5 = [0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0]
3417 ; AVX2-SLOW-NEXT: vpblendvb %ymm5, %ymm6, %ymm2, %ymm2
3418 ; AVX2-SLOW-NEXT: vpshufb %xmm1, %xmm14, %xmm1
3419 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
3420 ; AVX2-SLOW-NEXT: vpblendvb %ymm5, %ymm4, %ymm1, %ymm1
3421 ; AVX2-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0]
3422 ; AVX2-SLOW-NEXT: # ymm4 = mem[0,1,0,1]
3423 ; AVX2-SLOW-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
3424 ; AVX2-SLOW-NEXT: vpshufb %ymm4, %ymm6, %ymm6
3425 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
3426 ; AVX2-SLOW-NEXT: vpblendvb %ymm5, %ymm3, %ymm6, %ymm3
3427 ; AVX2-SLOW-NEXT: vpshufb %ymm4, %ymm8, %ymm4
3428 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
3429 ; AVX2-SLOW-NEXT: vpblendvb %ymm5, %ymm0, %ymm4, %ymm0
3430 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} xmm4 = <u,10,u,13,u,12,u,11,u,14,u,13,u,14,u,15>
3431 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
3432 ; AVX2-SLOW-NEXT: vpshufb %xmm4, %xmm5, %xmm5
3433 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
3434 ; AVX2-SLOW-NEXT: vmovdqa {{.*#+}} ymm6 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
3435 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm2, %ymm5, %ymm2
3436 ; AVX2-SLOW-NEXT: vpshufb %xmm4, %xmm11, %xmm4
3437 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
3438 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm1, %ymm4, %ymm1
3439 ; AVX2-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15]
3440 ; AVX2-SLOW-NEXT: # ymm4 = mem[0,1,0,1]
3441 ; AVX2-SLOW-NEXT: vpshufb %ymm4, %ymm9, %ymm5
3442 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
3443 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm3, %ymm5, %ymm3
3444 ; AVX2-SLOW-NEXT: vpshufb %ymm4, %ymm10, %ymm4
3445 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
3446 ; AVX2-SLOW-NEXT: vpblendvb %ymm6, %ymm0, %ymm4, %ymm0
3447 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3448 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, 160(%rax)
3449 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3450 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 128(%rax)
3451 ; AVX2-SLOW-NEXT: vmovdqa %ymm3, 352(%rax)
3452 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3453 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 320(%rax)
3454 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3455 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 96(%rax)
3456 ; AVX2-SLOW-NEXT: vmovdqa %ymm1, 64(%rax)
3457 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3458 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 224(%rax)
3459 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3460 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 288(%rax)
3461 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, 256(%rax)
3462 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3463 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 32(%rax)
3464 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3465 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 192(%rax)
3466 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3467 ; AVX2-SLOW-NEXT: vmovaps %ymm0, (%rax)
3468 ; AVX2-SLOW-NEXT: addq $664, %rsp # imm = 0x298
3469 ; AVX2-SLOW-NEXT: vzeroupper
3470 ; AVX2-SLOW-NEXT: retq
3472 ; AVX2-FAST-LABEL: store_i8_stride6_vf64:
3473 ; AVX2-FAST: # %bb.0:
3474 ; AVX2-FAST-NEXT: subq $680, %rsp # imm = 0x2A8
3475 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %ymm6
3476 ; AVX2-FAST-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3477 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %ymm5
3478 ; AVX2-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3479 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm0
3480 ; AVX2-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3481 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %xmm7
3482 ; AVX2-FAST-NEXT: vpbroadcastq {{.*#+}} xmm2 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
3483 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm0, %xmm0
3484 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm1
3485 ; AVX2-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3486 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %xmm8
3487 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm1, %xmm1
3488 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
3489 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
3490 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm4
3491 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %xmm9
3492 ; AVX2-FAST-NEXT: vpbroadcastq {{.*#+}} xmm3 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
3493 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm4, %xmm1
3494 ; AVX2-FAST-NEXT: vmovdqa %xmm4, %xmm12
3495 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3496 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm13
3497 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %xmm11
3498 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm13, %xmm4
3499 ; AVX2-FAST-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3500 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm4[8],xmm1[8],xmm4[9],xmm1[9],xmm4[10],xmm1[10],xmm4[11],xmm1[11],xmm4[12],xmm1[12],xmm4[13],xmm1[13],xmm4[14],xmm1[14],xmm4[15],xmm1[15]
3501 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm1[0,0,0,1]
3502 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = <255,255,u,u,0,0,255,255,u,u,0,0,255,255,u,u,0,0,255,255,u,u,0,0,255,255,u,u,0,0,255,255>
3503 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm0, %ymm4, %ymm0
3504 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm7, %xmm4
3505 ; AVX2-FAST-NEXT: vmovdqa %xmm7, %xmm14
3506 ; AVX2-FAST-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3507 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm8, %xmm2
3508 ; AVX2-FAST-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3509 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3],xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
3510 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
3511 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm9, %xmm4
3512 ; AVX2-FAST-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3513 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm11, %xmm3
3514 ; AVX2-FAST-NEXT: vmovdqa %xmm11, (%rsp) # 16-byte Spill
3515 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm3[8],xmm4[8],xmm3[9],xmm4[9],xmm3[10],xmm4[10],xmm3[11],xmm4[11],xmm3[12],xmm4[12],xmm3[13],xmm4[13],xmm3[14],xmm4[14],xmm3[15],xmm4[15]
3516 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
3517 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm2, %ymm3, %ymm2
3518 ; AVX2-FAST-NEXT: vpbroadcastq {{.*#+}} ymm4 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
3519 ; AVX2-FAST-NEXT: vpshufb %ymm4, %ymm5, %ymm3
3520 ; AVX2-FAST-NEXT: vpshufb %ymm4, %ymm6, %ymm5
3521 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm5[0],ymm3[0],ymm5[1],ymm3[1],ymm5[2],ymm3[2],ymm5[3],ymm3[3],ymm5[4],ymm3[4],ymm5[5],ymm3[5],ymm5[6],ymm3[6],ymm5[7],ymm3[7],ymm5[16],ymm3[16],ymm5[17],ymm3[17],ymm5[18],ymm3[18],ymm5[19],ymm3[19],ymm5[20],ymm3[20],ymm5[21],ymm3[21],ymm5[22],ymm3[22],ymm5[23],ymm3[23]
3522 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %ymm7
3523 ; AVX2-FAST-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3524 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %ymm6
3525 ; AVX2-FAST-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3526 ; AVX2-FAST-NEXT: vpbroadcastq {{.*#+}} ymm5 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
3527 ; AVX2-FAST-NEXT: vpshufb %ymm5, %ymm6, %ymm6
3528 ; AVX2-FAST-NEXT: vpshufb %ymm5, %ymm7, %ymm7
3529 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm7[0],ymm6[0],ymm7[1],ymm6[1],ymm7[2],ymm6[2],ymm7[3],ymm6[3],ymm7[4],ymm6[4],ymm7[5],ymm6[5],ymm7[6],ymm6[6],ymm7[7],ymm6[7],ymm7[16],ymm6[16],ymm7[17],ymm6[17],ymm7[18],ymm6[18],ymm7[19],ymm6[19],ymm7[20],ymm6[20],ymm7[21],ymm6[21],ymm7[22],ymm6[22],ymm7[23],ymm6[23]
3530 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
3531 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
3532 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm3, %ymm6, %ymm3
3533 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %ymm7
3534 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %ymm6
3535 ; AVX2-FAST-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3536 ; AVX2-FAST-NEXT: vpshufb %ymm4, %ymm6, %ymm6
3537 ; AVX2-FAST-NEXT: vpshufb %ymm4, %ymm7, %ymm4
3538 ; AVX2-FAST-NEXT: vmovdqa %ymm7, %ymm15
3539 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm4[0],ymm6[0],ymm4[1],ymm6[1],ymm4[2],ymm6[2],ymm4[3],ymm6[3],ymm4[4],ymm6[4],ymm4[5],ymm6[5],ymm4[6],ymm6[6],ymm4[7],ymm6[7],ymm4[16],ymm6[16],ymm4[17],ymm6[17],ymm4[18],ymm6[18],ymm4[19],ymm6[19],ymm4[20],ymm6[20],ymm4[21],ymm6[21],ymm4[22],ymm6[22],ymm4[23],ymm6[23]
3540 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %ymm7
3541 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %ymm6
3542 ; AVX2-FAST-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3543 ; AVX2-FAST-NEXT: vpshufb %ymm5, %ymm6, %ymm6
3544 ; AVX2-FAST-NEXT: vpshufb %ymm5, %ymm7, %ymm5
3545 ; AVX2-FAST-NEXT: vmovdqa %ymm7, %ymm10
3546 ; AVX2-FAST-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3547 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} ymm5 = ymm5[0],ymm6[0],ymm5[1],ymm6[1],ymm5[2],ymm6[2],ymm5[3],ymm6[3],ymm5[4],ymm6[4],ymm5[5],ymm6[5],ymm5[6],ymm6[6],ymm5[7],ymm6[7],ymm5[16],ymm6[16],ymm5[17],ymm6[17],ymm5[18],ymm6[18],ymm5[19],ymm6[19],ymm5[20],ymm6[20],ymm5[21],ymm6[21],ymm5[22],ymm6[22],ymm5[23],ymm6[23]
3548 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
3549 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
3550 ; AVX2-FAST-NEXT: vpblendvb %ymm1, %ymm4, %ymm5, %ymm1
3551 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm5
3552 ; AVX2-FAST-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3553 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm4 = <6,u,5,u,8,u,7,u,9,u,9,u,9,u,9,u>
3554 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm5, %xmm5
3555 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
3556 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255]
3557 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm0, %ymm5, %ymm7
3558 ; AVX2-FAST-NEXT: vmovdqa 32(%r8), %xmm0
3559 ; AVX2-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3560 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm0, %xmm4
3561 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
3562 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm2, %ymm4, %ymm2
3563 ; AVX2-FAST-NEXT: vmovdqa 32(%r8), %ymm0
3564 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3565 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0]
3566 ; AVX2-FAST-NEXT: # ymm4 = mem[0,1,0,1]
3567 ; AVX2-FAST-NEXT: vpshufb %ymm4, %ymm0, %ymm5
3568 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
3569 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm3, %ymm5, %ymm3
3570 ; AVX2-FAST-NEXT: vmovdqa (%r8), %ymm0
3571 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3572 ; AVX2-FAST-NEXT: vpshufb %ymm4, %ymm0, %ymm4
3573 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
3574 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm1, %ymm4, %ymm1
3575 ; AVX2-FAST-NEXT: vmovdqa (%r9), %xmm0
3576 ; AVX2-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3577 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm4 = <u,6,u,5,u,8,u,7,u,9,u,9,u,9,u,9>
3578 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm0, %xmm5
3579 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
3580 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
3581 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm7, %ymm5, %ymm5
3582 ; AVX2-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3583 ; AVX2-FAST-NEXT: vmovdqa 32(%r9), %xmm5
3584 ; AVX2-FAST-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3585 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm5, %xmm4
3586 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
3587 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm2, %ymm4, %ymm2
3588 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3589 ; AVX2-FAST-NEXT: vmovdqa 32(%r9), %ymm4
3590 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3591 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0]
3592 ; AVX2-FAST-NEXT: # ymm2 = mem[0,1,0,1]
3593 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm4, %ymm4
3594 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
3595 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm3, %ymm4, %ymm3
3596 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3597 ; AVX2-FAST-NEXT: vmovdqa (%r9), %ymm3
3598 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3599 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm3, %ymm2
3600 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
3601 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm0
3602 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3603 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm11[8],xmm9[8],xmm11[9],xmm9[9],xmm11[10],xmm9[10],xmm11[11],xmm9[11],xmm11[12],xmm9[12],xmm11[13],xmm9[13],xmm11[14],xmm9[14],xmm11[15],xmm9[15]
3604 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm2 = [8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
3605 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm0, %xmm0
3606 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
3607 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm8[8],xmm14[8],xmm8[9],xmm14[9],xmm8[10],xmm14[10],xmm8[11],xmm14[11],xmm8[12],xmm14[12],xmm8[13],xmm14[13],xmm8[14],xmm14[14],xmm8[15],xmm14[15]
3608 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm3 = [10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
3609 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm1, %xmm1
3610 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
3611 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = <u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u>
3612 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm0, %ymm1, %ymm6
3613 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm13[8],xmm12[8],xmm13[9],xmm12[9],xmm13[10],xmm12[10],xmm13[11],xmm12[11],xmm13[12],xmm12[12],xmm13[13],xmm12[13],xmm13[14],xmm12[14],xmm13[15],xmm12[15]
3614 ; AVX2-FAST-NEXT: vpshufb %xmm2, %xmm0, %xmm0
3615 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3616 ; AVX2-FAST-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
3617 ; AVX2-FAST-NEXT: # xmm1 = xmm1[8],mem[8],xmm1[9],mem[9],xmm1[10],mem[10],xmm1[11],mem[11],xmm1[12],mem[12],xmm1[13],mem[13],xmm1[14],mem[14],xmm1[15],mem[15]
3618 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm1, %xmm1
3619 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
3620 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
3621 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm0, %ymm1, %ymm4
3622 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3623 ; AVX2-FAST-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
3624 ; AVX2-FAST-NEXT: # ymm0 = ymm0[8],mem[8],ymm0[9],mem[9],ymm0[10],mem[10],ymm0[11],mem[11],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15],ymm0[24],mem[24],ymm0[25],mem[25],ymm0[26],mem[26],ymm0[27],mem[27],ymm0[28],mem[28],ymm0[29],mem[29],ymm0[30],mem[30],ymm0[31],mem[31]
3625 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
3626 ; AVX2-FAST-NEXT: # ymm2 = mem[0,1,0,1]
3627 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm0, %ymm0
3628 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm0[2,2,2,3]
3629 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3630 ; AVX2-FAST-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
3631 ; AVX2-FAST-NEXT: # ymm0 = ymm0[8],mem[8],ymm0[9],mem[9],ymm0[10],mem[10],ymm0[11],mem[11],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15],ymm0[24],mem[24],ymm0[25],mem[25],ymm0[26],mem[26],ymm0[27],mem[27],ymm0[28],mem[28],ymm0[29],mem[29],ymm0[30],mem[30],ymm0[31],mem[31]
3632 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
3633 ; AVX2-FAST-NEXT: # ymm1 = mem[0,1,0,1]
3634 ; AVX2-FAST-NEXT: vpshufb %ymm1, %ymm0, %ymm0
3635 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
3636 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm3, %ymm0, %ymm3
3637 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
3638 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} ymm0 = ymm10[8],ymm9[8],ymm10[9],ymm9[9],ymm10[10],ymm9[10],ymm10[11],ymm9[11],ymm10[12],ymm9[12],ymm10[13],ymm9[13],ymm10[14],ymm9[14],ymm10[15],ymm9[15],ymm10[24],ymm9[24],ymm10[25],ymm9[25],ymm10[26],ymm9[26],ymm10[27],ymm9[27],ymm10[28],ymm9[28],ymm10[29],ymm9[29],ymm10[30],ymm9[30],ymm10[31],ymm9[31]
3639 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm0, %ymm0
3640 ; AVX2-FAST-NEXT: vmovdqa %ymm15, %ymm12
3641 ; AVX2-FAST-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm2 # 32-byte Folded Reload
3642 ; AVX2-FAST-NEXT: # ymm2 = ymm15[8],mem[8],ymm15[9],mem[9],ymm15[10],mem[10],ymm15[11],mem[11],ymm15[12],mem[12],ymm15[13],mem[13],ymm15[14],mem[14],ymm15[15],mem[15],ymm15[24],mem[24],ymm15[25],mem[25],ymm15[26],mem[26],ymm15[27],mem[27],ymm15[28],mem[28],ymm15[29],mem[29],ymm15[30],mem[30],ymm15[31],mem[31]
3643 ; AVX2-FAST-NEXT: vpshufb %ymm1, %ymm2, %ymm1
3644 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
3645 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
3646 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm0, %ymm1, %ymm0
3647 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm1 = <10,u,13,u,12,u,11,u,14,u,13,u,14,u,15,u>
3648 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3649 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm15, %xmm2
3650 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
3651 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = [0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0]
3652 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm6, %ymm2, %ymm2
3653 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
3654 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm7, %xmm1
3655 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
3656 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm4, %ymm1, %ymm1
3657 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0]
3658 ; AVX2-FAST-NEXT: # ymm4 = mem[0,1,0,1]
3659 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
3660 ; AVX2-FAST-NEXT: vpshufb %ymm4, %ymm10, %ymm6
3661 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
3662 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm3, %ymm6, %ymm6
3663 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
3664 ; AVX2-FAST-NEXT: vpshufb %ymm4, %ymm11, %ymm3
3665 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
3666 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm0, %ymm3, %ymm0
3667 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm4 = <u,10,u,13,u,12,u,11,u,14,u,13,u,14,u,15>
3668 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3669 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm3, %xmm3
3670 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
3671 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
3672 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm2, %ymm3, %ymm2
3673 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3674 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
3675 ; AVX2-FAST-NEXT: vpshufb %xmm4, %xmm8, %xmm2
3676 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
3677 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm1, %ymm2, %ymm1
3678 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3679 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15]
3680 ; AVX2-FAST-NEXT: # ymm4 = mem[0,1,0,1]
3681 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
3682 ; AVX2-FAST-NEXT: vpshufb %ymm4, %ymm13, %ymm1
3683 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
3684 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm6, %ymm1, %ymm1
3685 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3686 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
3687 ; AVX2-FAST-NEXT: vpshufb %ymm4, %ymm14, %ymm4
3688 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
3689 ; AVX2-FAST-NEXT: vpblendvb %ymm5, %ymm0, %ymm4, %ymm0
3690 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3691 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3692 ; AVX2-FAST-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm4 # 16-byte Folded Reload
3693 ; AVX2-FAST-NEXT: # xmm4 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
3694 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3695 ; AVX2-FAST-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm5 # 16-byte Folded Reload
3696 ; AVX2-FAST-NEXT: # xmm5 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
3697 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm6 = [0,1,6,7,4,5,2,3,8,9,10,11,12,13,10,11]
3698 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm4, %xmm4
3699 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
3700 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm1 = [2,3,0,1,6,7,4,5,8,9,8,9,8,9,8,9]
3701 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm5, %xmm5
3702 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
3703 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = <255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255>
3704 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm4, %ymm5, %ymm4
3705 ; AVX2-FAST-NEXT: vmovdqa (%rsp), %xmm2 # 16-byte Reload
3706 ; AVX2-FAST-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm5 # 16-byte Folded Reload
3707 ; AVX2-FAST-NEXT: # xmm5 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3],xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
3708 ; AVX2-FAST-NEXT: vpshufb %xmm6, %xmm5, %xmm5
3709 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3710 ; AVX2-FAST-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm6 # 16-byte Folded Reload
3711 ; AVX2-FAST-NEXT: # xmm6 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3],xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
3712 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm6, %xmm1
3713 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
3714 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
3715 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm5, %ymm1, %ymm3
3716 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3717 ; AVX2-FAST-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm5 # 32-byte Folded Reload
3718 ; AVX2-FAST-NEXT: # ymm5 = ymm1[0],mem[0],ymm1[1],mem[1],ymm1[2],mem[2],ymm1[3],mem[3],ymm1[4],mem[4],ymm1[5],mem[5],ymm1[6],mem[6],ymm1[7],mem[7],ymm1[16],mem[16],ymm1[17],mem[17],ymm1[18],mem[18],ymm1[19],mem[19],ymm1[20],mem[20],ymm1[21],mem[21],ymm1[22],mem[22],ymm1[23],mem[23]
3719 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3720 ; AVX2-FAST-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm6 # 32-byte Folded Reload
3721 ; AVX2-FAST-NEXT: # ymm6 = ymm1[0],mem[0],ymm1[1],mem[1],ymm1[2],mem[2],ymm1[3],mem[3],ymm1[4],mem[4],ymm1[5],mem[5],ymm1[6],mem[6],ymm1[7],mem[7],ymm1[16],mem[16],ymm1[17],mem[17],ymm1[18],mem[18],ymm1[19],mem[19],ymm1[20],mem[20],ymm1[21],mem[21],ymm1[22],mem[22],ymm1[23],mem[23]
3722 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27,16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27]
3723 ; AVX2-FAST-NEXT: # ymm1 = mem[0,1,0,1]
3724 ; AVX2-FAST-NEXT: vpshufb %ymm1, %ymm5, %ymm5
3725 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
3726 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [18,19,16,17,22,23,20,21,24,25,24,25,24,25,24,25,18,19,16,17,22,23,20,21,24,25,24,25,24,25,24,25]
3727 ; AVX2-FAST-NEXT: # ymm2 = mem[0,1,0,1]
3728 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm6, %ymm6
3729 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
3730 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm5, %ymm6, %ymm5
3731 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
3732 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm6[0],ymm9[0],ymm6[1],ymm9[1],ymm6[2],ymm9[2],ymm6[3],ymm9[3],ymm6[4],ymm9[4],ymm6[5],ymm9[5],ymm6[6],ymm9[6],ymm6[7],ymm9[7],ymm6[16],ymm9[16],ymm6[17],ymm9[17],ymm6[18],ymm9[18],ymm6[19],ymm9[19],ymm6[20],ymm9[20],ymm6[21],ymm9[21],ymm6[22],ymm9[22],ymm6[23],ymm9[23]
3733 ; AVX2-FAST-NEXT: vpshufb %ymm1, %ymm6, %ymm1
3734 ; AVX2-FAST-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm12, %ymm6 # 32-byte Folded Reload
3735 ; AVX2-FAST-NEXT: # ymm6 = ymm12[0],mem[0],ymm12[1],mem[1],ymm12[2],mem[2],ymm12[3],mem[3],ymm12[4],mem[4],ymm12[5],mem[5],ymm12[6],mem[6],ymm12[7],mem[7],ymm12[16],mem[16],ymm12[17],mem[17],ymm12[18],mem[18],ymm12[19],mem[19],ymm12[20],mem[20],ymm12[21],mem[21],ymm12[22],mem[22],ymm12[23],mem[23]
3736 ; AVX2-FAST-NEXT: vpshufb %ymm2, %ymm6, %ymm2
3737 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
3738 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
3739 ; AVX2-FAST-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm0
3740 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm1 = <2,u,1,u,0,u,3,u,4,u,4,u,4,u,4,u>
3741 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm7, %xmm2
3742 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
3743 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255]
3744 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm4, %ymm2, %ymm2
3745 ; AVX2-FAST-NEXT: vpshufb %xmm1, %xmm15, %xmm1
3746 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
3747 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm3, %ymm1, %ymm1
3748 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
3749 ; AVX2-FAST-NEXT: # ymm3 = mem[0,1,0,1]
3750 ; AVX2-FAST-NEXT: vpshufb %ymm3, %ymm10, %ymm4
3751 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
3752 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm5, %ymm4, %ymm4
3753 ; AVX2-FAST-NEXT: vpshufb %ymm3, %ymm11, %ymm3
3754 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
3755 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm0, %ymm3, %ymm0
3756 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm3 = <u,2,u,1,u,0,u,3,u,4,u,4,u,4,u,4>
3757 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm8, %xmm5
3758 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
3759 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
3760 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm2, %ymm5, %ymm2
3761 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
3762 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm5, %xmm3
3763 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
3764 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm1, %ymm3, %ymm1
3765 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
3766 ; AVX2-FAST-NEXT: # ymm3 = mem[0,1,0,1]
3767 ; AVX2-FAST-NEXT: vpshufb %ymm3, %ymm13, %ymm5
3768 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
3769 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
3770 ; AVX2-FAST-NEXT: vpshufb %ymm3, %ymm14, %ymm3
3771 ; AVX2-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
3772 ; AVX2-FAST-NEXT: vpblendvb %ymm6, %ymm0, %ymm3, %ymm0
3773 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
3774 ; AVX2-FAST-NEXT: vmovdqa %ymm0, 96(%rax)
3775 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3776 ; AVX2-FAST-NEXT: vmovaps %ymm0, 160(%rax)
3777 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3778 ; AVX2-FAST-NEXT: vmovaps %ymm0, 128(%rax)
3779 ; AVX2-FAST-NEXT: vmovdqa %ymm4, 288(%rax)
3780 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3781 ; AVX2-FAST-NEXT: vmovaps %ymm0, 352(%rax)
3782 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3783 ; AVX2-FAST-NEXT: vmovaps %ymm0, 320(%rax)
3784 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3785 ; AVX2-FAST-NEXT: vmovaps %ymm0, 64(%rax)
3786 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3787 ; AVX2-FAST-NEXT: vmovaps %ymm0, 224(%rax)
3788 ; AVX2-FAST-NEXT: vmovdqa %ymm1, 192(%rax)
3789 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3790 ; AVX2-FAST-NEXT: vmovaps %ymm0, 256(%rax)
3791 ; AVX2-FAST-NEXT: vmovdqa %ymm2, (%rax)
3792 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3793 ; AVX2-FAST-NEXT: vmovaps %ymm0, 32(%rax)
3794 ; AVX2-FAST-NEXT: addq $680, %rsp # imm = 0x2A8
3795 ; AVX2-FAST-NEXT: vzeroupper
3796 ; AVX2-FAST-NEXT: retq
3798 ; AVX2-FAST-PERLANE-LABEL: store_i8_stride6_vf64:
3799 ; AVX2-FAST-PERLANE: # %bb.0:
3800 ; AVX2-FAST-PERLANE-NEXT: subq $680, %rsp # imm = 0x2A8
3801 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %ymm6
3802 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3803 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %ymm5
3804 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3805 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm0
3806 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3807 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %xmm7
3808 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq {{.*#+}} xmm2 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
3809 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm0
3810 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm1
3811 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3812 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %xmm8
3813 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm1, %xmm1
3814 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
3815 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
3816 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm4
3817 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %xmm9
3818 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq {{.*#+}} xmm3 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
3819 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm4, %xmm1
3820 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm4, %xmm12
3821 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3822 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm13
3823 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %xmm11
3824 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm13, %xmm4
3825 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3826 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm4[8],xmm1[8],xmm4[9],xmm1[9],xmm4[10],xmm1[10],xmm4[11],xmm1[11],xmm4[12],xmm1[12],xmm4[13],xmm1[13],xmm4[14],xmm1[14],xmm4[15],xmm1[15]
3827 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm1[0,0,0,1]
3828 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm1 = <255,255,u,u,0,0,255,255,u,u,0,0,255,255,u,u,0,0,255,255,u,u,0,0,255,255,u,u,0,0,255,255>
3829 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm1, %ymm0, %ymm4, %ymm0
3830 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm7, %xmm4
3831 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm7, %xmm14
3832 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3833 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm8, %xmm2
3834 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3835 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3],xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
3836 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
3837 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm9, %xmm4
3838 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3839 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm11, %xmm3
3840 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm11, (%rsp) # 16-byte Spill
3841 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm3[8],xmm4[8],xmm3[9],xmm4[9],xmm3[10],xmm4[10],xmm3[11],xmm4[11],xmm3[12],xmm4[12],xmm3[13],xmm4[13],xmm3[14],xmm4[14],xmm3[15],xmm4[15]
3842 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
3843 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm1, %ymm2, %ymm3, %ymm2
3844 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq {{.*#+}} ymm4 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
3845 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm4, %ymm5, %ymm3
3846 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm4, %ymm6, %ymm5
3847 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm5[0],ymm3[0],ymm5[1],ymm3[1],ymm5[2],ymm3[2],ymm5[3],ymm3[3],ymm5[4],ymm3[4],ymm5[5],ymm3[5],ymm5[6],ymm3[6],ymm5[7],ymm3[7],ymm5[16],ymm3[16],ymm5[17],ymm3[17],ymm5[18],ymm3[18],ymm5[19],ymm3[19],ymm5[20],ymm3[20],ymm5[21],ymm3[21],ymm5[22],ymm3[22],ymm5[23],ymm3[23]
3848 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %ymm7
3849 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3850 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %ymm6
3851 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3852 ; AVX2-FAST-PERLANE-NEXT: vpbroadcastq {{.*#+}} ymm5 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
3853 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm5, %ymm6, %ymm6
3854 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm5, %ymm7, %ymm7
3855 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm7[0],ymm6[0],ymm7[1],ymm6[1],ymm7[2],ymm6[2],ymm7[3],ymm6[3],ymm7[4],ymm6[4],ymm7[5],ymm6[5],ymm7[6],ymm6[6],ymm7[7],ymm6[7],ymm7[16],ymm6[16],ymm7[17],ymm6[17],ymm7[18],ymm6[18],ymm7[19],ymm6[19],ymm7[20],ymm6[20],ymm7[21],ymm6[21],ymm7[22],ymm6[22],ymm7[23],ymm6[23]
3856 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
3857 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
3858 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm1, %ymm3, %ymm6, %ymm3
3859 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %ymm7
3860 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %ymm6
3861 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3862 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm4, %ymm6, %ymm6
3863 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm4, %ymm7, %ymm4
3864 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm7, %ymm15
3865 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm4[0],ymm6[0],ymm4[1],ymm6[1],ymm4[2],ymm6[2],ymm4[3],ymm6[3],ymm4[4],ymm6[4],ymm4[5],ymm6[5],ymm4[6],ymm6[6],ymm4[7],ymm6[7],ymm4[16],ymm6[16],ymm4[17],ymm6[17],ymm4[18],ymm6[18],ymm4[19],ymm6[19],ymm4[20],ymm6[20],ymm4[21],ymm6[21],ymm4[22],ymm6[22],ymm4[23],ymm6[23]
3866 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %ymm7
3867 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %ymm6
3868 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3869 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm5, %ymm6, %ymm6
3870 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm5, %ymm7, %ymm5
3871 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm7, %ymm10
3872 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3873 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} ymm5 = ymm5[0],ymm6[0],ymm5[1],ymm6[1],ymm5[2],ymm6[2],ymm5[3],ymm6[3],ymm5[4],ymm6[4],ymm5[5],ymm6[5],ymm5[6],ymm6[6],ymm5[7],ymm6[7],ymm5[16],ymm6[16],ymm5[17],ymm6[17],ymm5[18],ymm6[18],ymm5[19],ymm6[19],ymm5[20],ymm6[20],ymm5[21],ymm6[21],ymm5[22],ymm6[22],ymm5[23],ymm6[23]
3874 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
3875 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
3876 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm1, %ymm4, %ymm5, %ymm1
3877 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm5
3878 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3879 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm4 = <6,u,5,u,8,u,7,u,9,u,9,u,9,u,9,u>
3880 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm4, %xmm5, %xmm5
3881 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
3882 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255]
3883 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm0, %ymm5, %ymm7
3884 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r8), %xmm0
3885 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3886 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm4, %xmm0, %xmm4
3887 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
3888 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm2, %ymm4, %ymm2
3889 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r8), %ymm0
3890 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3891 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0]
3892 ; AVX2-FAST-PERLANE-NEXT: # ymm4 = mem[0,1,0,1]
3893 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm4, %ymm0, %ymm5
3894 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
3895 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm3, %ymm5, %ymm3
3896 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %ymm0
3897 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3898 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm4, %ymm0, %ymm4
3899 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
3900 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm1, %ymm4, %ymm1
3901 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %xmm0
3902 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3903 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm4 = <u,6,u,5,u,8,u,7,u,9,u,9,u,9,u,9>
3904 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm4, %xmm0, %xmm5
3905 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
3906 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm0 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
3907 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm7, %ymm5, %ymm5
3908 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3909 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r9), %xmm5
3910 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3911 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm4, %xmm5, %xmm4
3912 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
3913 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm2, %ymm4, %ymm2
3914 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3915 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r9), %ymm4
3916 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3917 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0]
3918 ; AVX2-FAST-PERLANE-NEXT: # ymm2 = mem[0,1,0,1]
3919 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm2, %ymm4, %ymm4
3920 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
3921 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm3, %ymm4, %ymm3
3922 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3923 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %ymm3
3924 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3925 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm2, %ymm3, %ymm2
3926 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
3927 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm0
3928 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3929 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm11[8],xmm9[8],xmm11[9],xmm9[9],xmm11[10],xmm9[10],xmm11[11],xmm9[11],xmm11[12],xmm9[12],xmm11[13],xmm9[13],xmm11[14],xmm9[14],xmm11[15],xmm9[15]
3930 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm2 = [8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
3931 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm0
3932 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
3933 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm8[8],xmm14[8],xmm8[9],xmm14[9],xmm8[10],xmm14[10],xmm8[11],xmm14[11],xmm8[12],xmm14[12],xmm8[13],xmm14[13],xmm8[14],xmm14[14],xmm8[15],xmm14[15]
3934 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm3 = [10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
3935 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm1, %xmm1
3936 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
3937 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm5 = <u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u>
3938 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm0, %ymm1, %ymm6
3939 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm13[8],xmm12[8],xmm13[9],xmm12[9],xmm13[10],xmm12[10],xmm13[11],xmm12[11],xmm13[12],xmm12[12],xmm13[13],xmm12[13],xmm13[14],xmm12[14],xmm13[15],xmm12[15]
3940 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm2, %xmm0, %xmm0
3941 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
3942 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
3943 ; AVX2-FAST-PERLANE-NEXT: # xmm1 = xmm1[8],mem[8],xmm1[9],mem[9],xmm1[10],mem[10],xmm1[11],mem[11],xmm1[12],mem[12],xmm1[13],mem[13],xmm1[14],mem[14],xmm1[15],mem[15]
3944 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm1, %xmm1
3945 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
3946 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
3947 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm0, %ymm1, %ymm4
3948 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3949 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
3950 ; AVX2-FAST-PERLANE-NEXT: # ymm0 = ymm0[8],mem[8],ymm0[9],mem[9],ymm0[10],mem[10],ymm0[11],mem[11],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15],ymm0[24],mem[24],ymm0[25],mem[25],ymm0[26],mem[26],ymm0[27],mem[27],ymm0[28],mem[28],ymm0[29],mem[29],ymm0[30],mem[30],ymm0[31],mem[31]
3951 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
3952 ; AVX2-FAST-PERLANE-NEXT: # ymm2 = mem[0,1,0,1]
3953 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm2, %ymm0, %ymm0
3954 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm0[2,2,2,3]
3955 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3956 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
3957 ; AVX2-FAST-PERLANE-NEXT: # ymm0 = ymm0[8],mem[8],ymm0[9],mem[9],ymm0[10],mem[10],ymm0[11],mem[11],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15],ymm0[24],mem[24],ymm0[25],mem[25],ymm0[26],mem[26],ymm0[27],mem[27],ymm0[28],mem[28],ymm0[29],mem[29],ymm0[30],mem[30],ymm0[31],mem[31]
3958 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
3959 ; AVX2-FAST-PERLANE-NEXT: # ymm1 = mem[0,1,0,1]
3960 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm1, %ymm0, %ymm0
3961 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
3962 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm3, %ymm0, %ymm3
3963 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
3964 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} ymm0 = ymm10[8],ymm9[8],ymm10[9],ymm9[9],ymm10[10],ymm9[10],ymm10[11],ymm9[11],ymm10[12],ymm9[12],ymm10[13],ymm9[13],ymm10[14],ymm9[14],ymm10[15],ymm9[15],ymm10[24],ymm9[24],ymm10[25],ymm9[25],ymm10[26],ymm9[26],ymm10[27],ymm9[27],ymm10[28],ymm9[28],ymm10[29],ymm9[29],ymm10[30],ymm9[30],ymm10[31],ymm9[31]
3965 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm2, %ymm0, %ymm0
3966 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm15, %ymm12
3967 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm2 # 32-byte Folded Reload
3968 ; AVX2-FAST-PERLANE-NEXT: # ymm2 = ymm15[8],mem[8],ymm15[9],mem[9],ymm15[10],mem[10],ymm15[11],mem[11],ymm15[12],mem[12],ymm15[13],mem[13],ymm15[14],mem[14],ymm15[15],mem[15],ymm15[24],mem[24],ymm15[25],mem[25],ymm15[26],mem[26],ymm15[27],mem[27],ymm15[28],mem[28],ymm15[29],mem[29],ymm15[30],mem[30],ymm15[31],mem[31]
3969 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm1, %ymm2, %ymm1
3970 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
3971 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
3972 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm0, %ymm1, %ymm0
3973 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm1 = <10,u,13,u,12,u,11,u,14,u,13,u,14,u,15,u>
3974 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3975 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm15, %xmm2
3976 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
3977 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm5 = [0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0]
3978 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm6, %ymm2, %ymm2
3979 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
3980 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm7, %xmm1
3981 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
3982 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm4, %ymm1, %ymm1
3983 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0]
3984 ; AVX2-FAST-PERLANE-NEXT: # ymm4 = mem[0,1,0,1]
3985 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
3986 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm4, %ymm10, %ymm6
3987 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
3988 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm3, %ymm6, %ymm6
3989 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
3990 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm4, %ymm11, %ymm3
3991 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
3992 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm0, %ymm3, %ymm0
3993 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm4 = <u,10,u,13,u,12,u,11,u,14,u,13,u,14,u,15>
3994 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3995 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm4, %xmm3, %xmm3
3996 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
3997 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm5 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
3998 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm2, %ymm3, %ymm2
3999 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4000 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
4001 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm4, %xmm8, %xmm2
4002 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
4003 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm1, %ymm2, %ymm1
4004 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4005 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15]
4006 ; AVX2-FAST-PERLANE-NEXT: # ymm4 = mem[0,1,0,1]
4007 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
4008 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm4, %ymm13, %ymm1
4009 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
4010 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm6, %ymm1, %ymm1
4011 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4012 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
4013 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm4, %ymm14, %ymm4
4014 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
4015 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm5, %ymm0, %ymm4, %ymm0
4016 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4017 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4018 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm4 # 16-byte Folded Reload
4019 ; AVX2-FAST-PERLANE-NEXT: # xmm4 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
4020 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4021 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm5 # 16-byte Folded Reload
4022 ; AVX2-FAST-PERLANE-NEXT: # xmm5 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
4023 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm6 = [0,1,6,7,4,5,2,3,8,9,10,11,12,13,10,11]
4024 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm6, %xmm4, %xmm4
4025 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
4026 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm1 = [2,3,0,1,6,7,4,5,8,9,8,9,8,9,8,9]
4027 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm5, %xmm5
4028 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
4029 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm0 = <255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255,0,0,u,u,255,255>
4030 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm4, %ymm5, %ymm4
4031 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsp), %xmm2 # 16-byte Reload
4032 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm5 # 16-byte Folded Reload
4033 ; AVX2-FAST-PERLANE-NEXT: # xmm5 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3],xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
4034 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm6, %xmm5, %xmm5
4035 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
4036 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm6 # 16-byte Folded Reload
4037 ; AVX2-FAST-PERLANE-NEXT: # xmm6 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3],xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
4038 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm6, %xmm1
4039 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
4040 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
4041 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm5, %ymm1, %ymm3
4042 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4043 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm5 # 32-byte Folded Reload
4044 ; AVX2-FAST-PERLANE-NEXT: # ymm5 = ymm1[0],mem[0],ymm1[1],mem[1],ymm1[2],mem[2],ymm1[3],mem[3],ymm1[4],mem[4],ymm1[5],mem[5],ymm1[6],mem[6],ymm1[7],mem[7],ymm1[16],mem[16],ymm1[17],mem[17],ymm1[18],mem[18],ymm1[19],mem[19],ymm1[20],mem[20],ymm1[21],mem[21],ymm1[22],mem[22],ymm1[23],mem[23]
4045 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4046 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm6 # 32-byte Folded Reload
4047 ; AVX2-FAST-PERLANE-NEXT: # ymm6 = ymm1[0],mem[0],ymm1[1],mem[1],ymm1[2],mem[2],ymm1[3],mem[3],ymm1[4],mem[4],ymm1[5],mem[5],ymm1[6],mem[6],ymm1[7],mem[7],ymm1[16],mem[16],ymm1[17],mem[17],ymm1[18],mem[18],ymm1[19],mem[19],ymm1[20],mem[20],ymm1[21],mem[21],ymm1[22],mem[22],ymm1[23],mem[23]
4048 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27,16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27]
4049 ; AVX2-FAST-PERLANE-NEXT: # ymm1 = mem[0,1,0,1]
4050 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm1, %ymm5, %ymm5
4051 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
4052 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [18,19,16,17,22,23,20,21,24,25,24,25,24,25,24,25,18,19,16,17,22,23,20,21,24,25,24,25,24,25,24,25]
4053 ; AVX2-FAST-PERLANE-NEXT: # ymm2 = mem[0,1,0,1]
4054 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm2, %ymm6, %ymm6
4055 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
4056 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm5, %ymm6, %ymm5
4057 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
4058 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm6[0],ymm9[0],ymm6[1],ymm9[1],ymm6[2],ymm9[2],ymm6[3],ymm9[3],ymm6[4],ymm9[4],ymm6[5],ymm9[5],ymm6[6],ymm9[6],ymm6[7],ymm9[7],ymm6[16],ymm9[16],ymm6[17],ymm9[17],ymm6[18],ymm9[18],ymm6[19],ymm9[19],ymm6[20],ymm9[20],ymm6[21],ymm9[21],ymm6[22],ymm9[22],ymm6[23],ymm9[23]
4059 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm1, %ymm6, %ymm1
4060 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm12, %ymm6 # 32-byte Folded Reload
4061 ; AVX2-FAST-PERLANE-NEXT: # ymm6 = ymm12[0],mem[0],ymm12[1],mem[1],ymm12[2],mem[2],ymm12[3],mem[3],ymm12[4],mem[4],ymm12[5],mem[5],ymm12[6],mem[6],ymm12[7],mem[7],ymm12[16],mem[16],ymm12[17],mem[17],ymm12[18],mem[18],ymm12[19],mem[19],ymm12[20],mem[20],ymm12[21],mem[21],ymm12[22],mem[22],ymm12[23],mem[23]
4062 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm2, %ymm6, %ymm2
4063 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
4064 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
4065 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm0
4066 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm1 = <2,u,1,u,0,u,3,u,4,u,4,u,4,u,4,u>
4067 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm7, %xmm2
4068 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
4069 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255,255,255,0,0,255,255]
4070 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm4, %ymm2, %ymm2
4071 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm1, %xmm15, %xmm1
4072 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
4073 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm3, %ymm1, %ymm1
4074 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
4075 ; AVX2-FAST-PERLANE-NEXT: # ymm3 = mem[0,1,0,1]
4076 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm3, %ymm10, %ymm4
4077 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
4078 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm5, %ymm4, %ymm4
4079 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm3, %ymm11, %ymm3
4080 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
4081 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm0, %ymm3, %ymm0
4082 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm3 = <u,2,u,1,u,0,u,3,u,4,u,4,u,4,u,4>
4083 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm8, %xmm5
4084 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
4085 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
4086 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm2, %ymm5, %ymm2
4087 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
4088 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm5, %xmm3
4089 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
4090 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm1, %ymm3, %ymm1
4091 ; AVX2-FAST-PERLANE-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
4092 ; AVX2-FAST-PERLANE-NEXT: # ymm3 = mem[0,1,0,1]
4093 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm3, %ymm13, %ymm5
4094 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
4095 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
4096 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm3, %ymm14, %ymm3
4097 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
4098 ; AVX2-FAST-PERLANE-NEXT: vpblendvb %ymm6, %ymm0, %ymm3, %ymm0
4099 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
4100 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, 96(%rax)
4101 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4102 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 160(%rax)
4103 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4104 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 128(%rax)
4105 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm4, 288(%rax)
4106 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4107 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 352(%rax)
4108 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4109 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 320(%rax)
4110 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4111 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 64(%rax)
4112 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4113 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 224(%rax)
4114 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm1, 192(%rax)
4115 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4116 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 256(%rax)
4117 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm2, (%rax)
4118 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4119 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 32(%rax)
4120 ; AVX2-FAST-PERLANE-NEXT: addq $680, %rsp # imm = 0x2A8
4121 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
4122 ; AVX2-FAST-PERLANE-NEXT: retq
4124 ; AVX512F-SLOW-LABEL: store_i8_stride6_vf64:
4125 ; AVX512F-SLOW: # %bb.0:
4126 ; AVX512F-SLOW-NEXT: subq $264, %rsp # imm = 0x108
4127 ; AVX512F-SLOW-NEXT: vmovdqa (%rsi), %xmm5
4128 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rsi), %xmm2
4129 ; AVX512F-SLOW-NEXT: vmovdqa (%rdi), %xmm12
4130 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rdi), %xmm3
4131 ; AVX512F-SLOW-NEXT: vmovdqa (%r9), %xmm13
4132 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r9), %xmm6
4133 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rcx), %ymm8
4134 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rdx), %ymm9
4135 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rsi), %ymm10
4136 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rdi), %ymm11
4137 ; AVX512F-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm4 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
4138 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm2, %xmm0
4139 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm3, %xmm1
4140 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
4141 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
4142 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm3, %xmm26
4143 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm2, %xmm27
4144 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,3,2,1,4,5,6,7]
4145 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,6,5]
4146 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
4147 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4148 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm0 = <u,6,u,5,u,8,u,7,u,9,u,9,u,9,u,9>
4149 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm6, %xmm2
4150 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm1 = <u,2,u,1,u,0,u,3,u,4,u,4,u,4,u,4>
4151 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm6, %xmm7
4152 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm6, %xmm28
4153 ; AVX512F-SLOW-NEXT: vinserti32x4 $2, %xmm2, %zmm7, %zmm2
4154 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4155 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm5, %xmm2
4156 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm12, %xmm4
4157 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm4[8],xmm2[8],xmm4[9],xmm2[9],xmm4[10],xmm2[10],xmm4[11],xmm2[11],xmm4[12],xmm2[12],xmm4[13],xmm2[13],xmm4[14],xmm2[14],xmm4[15],xmm2[15]
4158 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm12[0],xmm5[0],xmm12[1],xmm5[1],xmm12[2],xmm5[2],xmm12[3],xmm5[3],xmm12[4],xmm5[4],xmm12[5],xmm5[5],xmm12[6],xmm5[6],xmm12[7],xmm5[7]
4159 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm5, %xmm29
4160 ; AVX512F-SLOW-NEXT: vmovdqa %xmm12, %xmm5
4161 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,3,2,1,4,5,6,7]
4162 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,6,5]
4163 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm4, %zmm2
4164 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4165 ; AVX512F-SLOW-NEXT: vmovdqa %xmm13, %xmm6
4166 ; AVX512F-SLOW-NEXT: vpshufb %xmm0, %xmm13, %xmm0
4167 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm13, %xmm1
4168 ; AVX512F-SLOW-NEXT: vinserti32x4 $2, %xmm0, %zmm1, %zmm0
4169 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
4170 ; AVX512F-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm7 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
4171 ; AVX512F-SLOW-NEXT: vpshufb %ymm7, %ymm10, %ymm0
4172 ; AVX512F-SLOW-NEXT: vpshufb %ymm7, %ymm11, %ymm1
4173 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[16],ymm0[16],ymm1[17],ymm0[17],ymm1[18],ymm0[18],ymm1[19],ymm0[19],ymm1[20],ymm0[20],ymm1[21],ymm0[21],ymm1[22],ymm0[22],ymm1[23],ymm0[23]
4174 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm1 = ymm11[8],ymm10[8],ymm11[9],ymm10[9],ymm11[10],ymm10[10],ymm11[11],ymm10[11],ymm11[12],ymm10[12],ymm11[13],ymm10[13],ymm11[14],ymm10[14],ymm11[15],ymm10[15],ymm11[24],ymm10[24],ymm11[25],ymm10[25],ymm11[26],ymm10[26],ymm11[27],ymm10[27],ymm11[28],ymm10[28],ymm11[29],ymm10[29],ymm11[30],ymm10[30],ymm11[31],ymm10[31]
4175 ; AVX512F-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm13 = [24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
4176 ; AVX512F-SLOW-NEXT: # ymm13 = mem[0,1,0,1]
4177 ; AVX512F-SLOW-NEXT: vpshufb %ymm13, %ymm1, %ymm1
4178 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
4179 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4180 ; AVX512F-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm12 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
4181 ; AVX512F-SLOW-NEXT: vpshufb %ymm12, %ymm8, %ymm0
4182 ; AVX512F-SLOW-NEXT: vpshufb %ymm12, %ymm9, %ymm1
4183 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[16],ymm0[16],ymm1[17],ymm0[17],ymm1[18],ymm0[18],ymm1[19],ymm0[19],ymm1[20],ymm0[20],ymm1[21],ymm0[21],ymm1[22],ymm0[22],ymm1[23],ymm0[23]
4184 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm1 = ymm9[8],ymm8[8],ymm9[9],ymm8[9],ymm9[10],ymm8[10],ymm9[11],ymm8[11],ymm9[12],ymm8[12],ymm9[13],ymm8[13],ymm9[14],ymm8[14],ymm9[15],ymm8[15],ymm9[24],ymm8[24],ymm9[25],ymm8[25],ymm9[26],ymm8[26],ymm9[27],ymm8[27],ymm9[28],ymm8[28],ymm9[29],ymm8[29],ymm9[30],ymm8[30],ymm9[31],ymm8[31]
4185 ; AVX512F-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm14 = [26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
4186 ; AVX512F-SLOW-NEXT: # ymm14 = mem[0,1,0,1]
4187 ; AVX512F-SLOW-NEXT: vpshufb %ymm14, %ymm1, %ymm1
4188 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
4189 ; AVX512F-SLOW-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4190 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r9), %ymm4
4191 ; AVX512F-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15]
4192 ; AVX512F-SLOW-NEXT: # ymm3 = mem[0,1,0,1]
4193 ; AVX512F-SLOW-NEXT: vpshufb %ymm3, %ymm4, %ymm0
4194 ; AVX512F-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0]
4195 ; AVX512F-SLOW-NEXT: # ymm1 = mem[0,1,0,1]
4196 ; AVX512F-SLOW-NEXT: vpshufb %ymm1, %ymm4, %ymm15
4197 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm15, %zmm25
4198 ; AVX512F-SLOW-NEXT: vmovdqa (%rsi), %ymm15
4199 ; AVX512F-SLOW-NEXT: vmovdqa (%rdi), %ymm2
4200 ; AVX512F-SLOW-NEXT: vpshufb %ymm7, %ymm15, %ymm0
4201 ; AVX512F-SLOW-NEXT: vpshufb %ymm7, %ymm2, %ymm7
4202 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm7[0],ymm0[0],ymm7[1],ymm0[1],ymm7[2],ymm0[2],ymm7[3],ymm0[3],ymm7[4],ymm0[4],ymm7[5],ymm0[5],ymm7[6],ymm0[6],ymm7[7],ymm0[7],ymm7[16],ymm0[16],ymm7[17],ymm0[17],ymm7[18],ymm0[18],ymm7[19],ymm0[19],ymm7[20],ymm0[20],ymm7[21],ymm0[21],ymm7[22],ymm0[22],ymm7[23],ymm0[23]
4203 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm7 = ymm2[8],ymm15[8],ymm2[9],ymm15[9],ymm2[10],ymm15[10],ymm2[11],ymm15[11],ymm2[12],ymm15[12],ymm2[13],ymm15[13],ymm2[14],ymm15[14],ymm2[15],ymm15[15],ymm2[24],ymm15[24],ymm2[25],ymm15[25],ymm2[26],ymm15[26],ymm2[27],ymm15[27],ymm2[28],ymm15[28],ymm2[29],ymm15[29],ymm2[30],ymm15[30],ymm2[31],ymm15[31]
4204 ; AVX512F-SLOW-NEXT: vpshufb %ymm13, %ymm7, %ymm7
4205 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm0, %zmm24
4206 ; AVX512F-SLOW-NEXT: vmovdqa (%rcx), %ymm0
4207 ; AVX512F-SLOW-NEXT: vmovdqa (%rdx), %ymm13
4208 ; AVX512F-SLOW-NEXT: vpshufb %ymm12, %ymm0, %ymm7
4209 ; AVX512F-SLOW-NEXT: vpshufb %ymm12, %ymm13, %ymm12
4210 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm7 = ymm12[0],ymm7[0],ymm12[1],ymm7[1],ymm12[2],ymm7[2],ymm12[3],ymm7[3],ymm12[4],ymm7[4],ymm12[5],ymm7[5],ymm12[6],ymm7[6],ymm12[7],ymm7[7],ymm12[16],ymm7[16],ymm12[17],ymm7[17],ymm12[18],ymm7[18],ymm12[19],ymm7[19],ymm12[20],ymm7[20],ymm12[21],ymm7[21],ymm12[22],ymm7[22],ymm12[23],ymm7[23]
4211 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm12 = ymm13[8],ymm0[8],ymm13[9],ymm0[9],ymm13[10],ymm0[10],ymm13[11],ymm0[11],ymm13[12],ymm0[12],ymm13[13],ymm0[13],ymm13[14],ymm0[14],ymm13[15],ymm0[15],ymm13[24],ymm0[24],ymm13[25],ymm0[25],ymm13[26],ymm0[26],ymm13[27],ymm0[27],ymm13[28],ymm0[28],ymm13[29],ymm0[29],ymm13[30],ymm0[30],ymm13[31],ymm0[31]
4212 ; AVX512F-SLOW-NEXT: vpshufb %ymm14, %ymm12, %ymm12
4213 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm12, %zmm7, %zmm23
4214 ; AVX512F-SLOW-NEXT: vmovdqa (%r9), %ymm7
4215 ; AVX512F-SLOW-NEXT: vpshufb %ymm3, %ymm7, %ymm3
4216 ; AVX512F-SLOW-NEXT: vpshufb %ymm1, %ymm7, %ymm1
4217 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm1, %zmm22
4218 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm13[0],ymm0[0],ymm13[1],ymm0[1],ymm13[2],ymm0[2],ymm13[3],ymm0[3],ymm13[4],ymm0[4],ymm13[5],ymm0[5],ymm13[6],ymm0[6],ymm13[7],ymm0[7],ymm13[16],ymm0[16],ymm13[17],ymm0[17],ymm13[18],ymm0[18],ymm13[19],ymm0[19],ymm13[20],ymm0[20],ymm13[21],ymm0[21],ymm13[22],ymm0[22],ymm13[23],ymm0[23]
4219 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm0, %ymm19
4220 ; AVX512F-SLOW-NEXT: vmovdqa (%rcx), %xmm1
4221 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm29, %xmm0
4222 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm13 = xmm5[8],xmm0[8],xmm5[9],xmm0[9],xmm5[10],xmm0[10],xmm5[11],xmm0[11],xmm5[12],xmm0[12],xmm5[13],xmm0[13],xmm5[14],xmm0[14],xmm5[15],xmm0[15]
4223 ; AVX512F-SLOW-NEXT: vmovdqa (%rdx), %xmm3
4224 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm12 = ymm2[0],ymm15[0],ymm2[1],ymm15[1],ymm2[2],ymm15[2],ymm2[3],ymm15[3],ymm2[4],ymm15[4],ymm2[5],ymm15[5],ymm2[6],ymm15[6],ymm2[7],ymm15[7],ymm2[16],ymm15[16],ymm2[17],ymm15[17],ymm2[18],ymm15[18],ymm2[19],ymm15[19],ymm2[20],ymm15[20],ymm2[21],ymm15[21],ymm2[22],ymm15[22],ymm2[23],ymm15[23]
4225 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm3[8],xmm1[8],xmm3[9],xmm1[9],xmm3[10],xmm1[10],xmm3[11],xmm1[11],xmm3[12],xmm1[12],xmm3[13],xmm1[13],xmm3[14],xmm1[14],xmm3[15],xmm1[15]
4226 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm15 = [10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
4227 ; AVX512F-SLOW-NEXT: vpshufb %xmm15, %xmm2, %xmm14
4228 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rcx), %xmm2
4229 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rdx), %xmm0
4230 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm0[8],xmm2[8],xmm0[9],xmm2[9],xmm0[10],xmm2[10],xmm0[11],xmm2[11],xmm0[12],xmm2[12],xmm0[13],xmm2[13],xmm0[14],xmm2[14],xmm0[15],xmm2[15]
4231 ; AVX512F-SLOW-NEXT: vpshufb %xmm15, %xmm5, %xmm5
4232 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm5, %ymm29
4233 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm5 = ymm9[0],ymm8[0],ymm9[1],ymm8[1],ymm9[2],ymm8[2],ymm9[3],ymm8[3],ymm9[4],ymm8[4],ymm9[5],ymm8[5],ymm9[6],ymm8[6],ymm9[7],ymm8[7],ymm9[16],ymm8[16],ymm9[17],ymm8[17],ymm9[18],ymm8[18],ymm9[19],ymm8[19],ymm9[20],ymm8[20],ymm9[21],ymm8[21],ymm9[22],ymm8[22],ymm9[23],ymm8[23]
4234 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm5, %ymm30
4235 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm26, %xmm5
4236 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm27, %xmm8
4237 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm5[8],xmm8[8],xmm5[9],xmm8[9],xmm5[10],xmm8[10],xmm5[11],xmm8[11],xmm5[12],xmm8[12],xmm5[13],xmm8[13],xmm5[14],xmm8[14],xmm5[15],xmm8[15]
4238 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm9 = [8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
4239 ; AVX512F-SLOW-NEXT: vpshufb %xmm9, %xmm13, %xmm8
4240 ; AVX512F-SLOW-NEXT: vpshufb %xmm9, %xmm5, %xmm5
4241 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm5, %ymm31
4242 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm5 = ymm11[0],ymm10[0],ymm11[1],ymm10[1],ymm11[2],ymm10[2],ymm11[3],ymm10[3],ymm11[4],ymm10[4],ymm11[5],ymm10[5],ymm11[6],ymm10[6],ymm11[7],ymm10[7],ymm11[16],ymm10[16],ymm11[17],ymm10[17],ymm11[18],ymm10[18],ymm11[19],ymm10[19],ymm11[20],ymm10[20],ymm11[21],ymm10[21],ymm11[22],ymm10[22],ymm11[23],ymm10[23]
4243 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm5, %ymm20
4244 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm5 = <u,10,u,13,u,12,u,11,u,14,u,13,u,14,u,15>
4245 ; AVX512F-SLOW-NEXT: vpshufb %xmm5, %xmm6, %xmm11
4246 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm28, %xmm6
4247 ; AVX512F-SLOW-NEXT: vpshufb %xmm5, %xmm6, %xmm5
4248 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm5, %ymm28
4249 ; AVX512F-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
4250 ; AVX512F-SLOW-NEXT: # ymm5 = mem[0,1,0,1]
4251 ; AVX512F-SLOW-NEXT: vpshufb %ymm5, %ymm7, %ymm9
4252 ; AVX512F-SLOW-NEXT: vpshufb %ymm5, %ymm4, %ymm4
4253 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm4, %ymm21
4254 ; AVX512F-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm4 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
4255 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm2, %xmm5
4256 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm0, %xmm7
4257 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3],xmm7[4],xmm5[4],xmm7[5],xmm5[5],xmm7[6],xmm5[6],xmm7[7],xmm5[7]
4258 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3],xmm0[4],xmm2[4],xmm0[5],xmm2[5],xmm0[6],xmm2[6],xmm0[7],xmm2[7]
4259 ; AVX512F-SLOW-NEXT: vprold $16, %xmm0, %xmm0
4260 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm0, %zmm26
4261 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm1, %xmm0
4262 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm3, %xmm2
4263 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3],xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7]
4264 ; AVX512F-SLOW-NEXT: vmovdqa (%r8), %xmm2
4265 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3],xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
4266 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r8), %xmm3
4267 ; AVX512F-SLOW-NEXT: vprold $16, %xmm1, %xmm1
4268 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm4 = <10,u,13,u,12,u,11,u,14,u,13,u,14,u,15,u>
4269 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm27
4270 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm2, %xmm5
4271 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm3, %xmm15
4272 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm1 = <6,u,5,u,8,u,7,u,9,u,9,u,9,u,9,u>
4273 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm3, %xmm13
4274 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm13, %ymm18
4275 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} xmm4 = <2,u,1,u,0,u,3,u,4,u,4,u,4,u,4,u>
4276 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm3, %xmm13
4277 ; AVX512F-SLOW-NEXT: vpshufb %xmm1, %xmm2, %xmm0
4278 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm0, %ymm16
4279 ; AVX512F-SLOW-NEXT: vpshufb %xmm4, %xmm2, %xmm0
4280 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm0, %ymm17
4281 ; AVX512F-SLOW-NEXT: vmovdqa (%r8), %ymm3
4282 ; AVX512F-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
4283 ; AVX512F-SLOW-NEXT: # ymm1 = mem[0,1,0,1]
4284 ; AVX512F-SLOW-NEXT: vpshufb %ymm1, %ymm3, %ymm4
4285 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r8), %ymm2
4286 ; AVX512F-SLOW-NEXT: vpshufb %ymm1, %ymm2, %ymm6
4287 ; AVX512F-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0]
4288 ; AVX512F-SLOW-NEXT: # ymm1 = mem[0,1,0,1]
4289 ; AVX512F-SLOW-NEXT: vpshufb %ymm1, %ymm2, %ymm10
4290 ; AVX512F-SLOW-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0]
4291 ; AVX512F-SLOW-NEXT: # ymm0 = mem[0,1,0,1]
4292 ; AVX512F-SLOW-NEXT: vpshufb %ymm0, %ymm2, %ymm7
4293 ; AVX512F-SLOW-NEXT: vpshufb %ymm1, %ymm3, %ymm2
4294 ; AVX512F-SLOW-NEXT: vpshufb %ymm0, %ymm3, %ymm3
4295 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm14[0,0,0,1]
4296 ; AVX512F-SLOW-NEXT: vprold $16, %ymm19, %ymm1
4297 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,0,1]
4298 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} ymm12 = ymm12[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
4299 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5,6,5,8,9,10,11,12,13,14,13]
4300 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,2,2,3]
4301 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm4[2,2,2,3]
4302 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
4303 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,0,1]
4304 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
4305 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
4306 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
4307 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm12, %zmm8, %zmm1
4308 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
4309 ; AVX512F-SLOW-NEXT: vpternlogq $226, %zmm0, %zmm12, %zmm1
4310 ; AVX512F-SLOW-NEXT: vextracti64x4 $1, %zmm1, %ymm0
4311 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} ymm8 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
4312 ; AVX512F-SLOW-NEXT: vpternlogq $184, %ymm0, %ymm8, %ymm14
4313 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} ymm4 = [0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0]
4314 ; AVX512F-SLOW-NEXT: vpternlogq $184, %ymm1, %ymm4, %ymm5
4315 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm14, %zmm0, %zmm0
4316 ; AVX512F-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm5[0,1,2,3],zmm0[4,5,6,7]
4317 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm11, %zmm0
4318 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm5 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
4319 ; AVX512F-SLOW-NEXT: vpternlogq $184, %zmm1, %zmm5, %zmm0
4320 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm29[0,0,0,1]
4321 ; AVX512F-SLOW-NEXT: vprold $16, %ymm30, %ymm9
4322 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
4323 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm9, %zmm1, %zmm1
4324 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm9 = ymm31[0,0,0,1]
4325 ; AVX512F-SLOW-NEXT: vmovdqa64 %ymm20, %ymm11
4326 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} ymm11 = ymm11[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
4327 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} ymm11 = ymm11[0,1,2,3,4,5,6,5,8,9,10,11,12,13,14,13]
4328 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
4329 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
4330 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm15[0,0,0,1]
4331 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm28[0,0,0,1]
4332 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm28 = ymm21[2,2,2,3]
4333 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm29 = ymm18[0,0,0,1]
4334 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm30 = ymm13[0,0,0,1]
4335 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm13 = ymm16[0,0,0,1]
4336 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm31 = ymm17[0,0,0,1]
4337 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,2,2,3]
4338 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[2,2,2,3]
4339 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
4340 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
4341 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm11, %zmm9, %zmm9
4342 ; AVX512F-SLOW-NEXT: vpternlogq $226, %zmm1, %zmm12, %zmm9
4343 ; AVX512F-SLOW-NEXT: vextracti64x4 $1, %zmm9, %ymm1
4344 ; AVX512F-SLOW-NEXT: vpternlogq $184, %ymm1, %ymm8, %ymm6
4345 ; AVX512F-SLOW-NEXT: vpternlogq $184, %ymm9, %ymm4, %ymm14
4346 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm6, %zmm0, %zmm1
4347 ; AVX512F-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm14[0,1,2,3],zmm1[4,5,6,7]
4348 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm28, %zmm15, %zmm6
4349 ; AVX512F-SLOW-NEXT: vpternlogq $184, %zmm1, %zmm5, %zmm6
4350 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} zmm1 = zmm26[0,0,0,1,4,4,4,5]
4351 ; AVX512F-SLOW-NEXT: vpermq $64, {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Folded Reload
4352 ; AVX512F-SLOW-NEXT: # zmm5 = mem[0,0,0,1,4,4,4,5]
4353 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm9 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0]
4354 ; AVX512F-SLOW-NEXT: vpternlogq $226, %zmm1, %zmm9, %zmm5
4355 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} zmm1 = zmm27[0,0,0,1,4,4,4,5]
4356 ; AVX512F-SLOW-NEXT: vpermq $64, {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Folded Reload
4357 ; AVX512F-SLOW-NEXT: # zmm11 = mem[0,0,0,1,4,4,4,5]
4358 ; AVX512F-SLOW-NEXT: vpternlogq $226, %zmm1, %zmm9, %zmm11
4359 ; AVX512F-SLOW-NEXT: vextracti64x4 $1, %zmm5, %ymm1
4360 ; AVX512F-SLOW-NEXT: vmovdqa {{.*#+}} ymm9 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
4361 ; AVX512F-SLOW-NEXT: vpternlogq $184, %ymm1, %ymm9, %ymm29
4362 ; AVX512F-SLOW-NEXT: vpternlogq $184, %ymm5, %ymm8, %ymm30
4363 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm29, %zmm0, %zmm1
4364 ; AVX512F-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm30[0,1,2,3],zmm1[4,5,6,7]
4365 ; AVX512F-SLOW-NEXT: vpermq $64, {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Folded Reload
4366 ; AVX512F-SLOW-NEXT: # zmm5 = mem[0,0,0,1,4,4,4,5]
4367 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm14 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
4368 ; AVX512F-SLOW-NEXT: vpternlogq $184, %zmm1, %zmm14, %zmm5
4369 ; AVX512F-SLOW-NEXT: vpternlogq $184, %ymm11, %ymm8, %ymm31
4370 ; AVX512F-SLOW-NEXT: vextracti64x4 $1, %zmm11, %ymm1
4371 ; AVX512F-SLOW-NEXT: vpternlogq $184, %ymm1, %ymm9, %ymm13
4372 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm13, %zmm0, %zmm1
4373 ; AVX512F-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm31[0,1,2,3],zmm1[4,5,6,7]
4374 ; AVX512F-SLOW-NEXT: vpermq $64, (%rsp), %zmm8 # 64-byte Folded Reload
4375 ; AVX512F-SLOW-NEXT: # zmm8 = mem[0,0,0,1,4,4,4,5]
4376 ; AVX512F-SLOW-NEXT: vpternlogq $184, %zmm1, %zmm14, %zmm8
4377 ; AVX512F-SLOW-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Folded Reload
4378 ; AVX512F-SLOW-NEXT: # zmm1 = mem[2,2,2,3,6,6,6,7]
4379 ; AVX512F-SLOW-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Folded Reload
4380 ; AVX512F-SLOW-NEXT: # zmm11 = mem[2,2,2,3,6,6,6,7]
4381 ; AVX512F-SLOW-NEXT: vpternlogq $226, %zmm1, %zmm12, %zmm11
4382 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} zmm1 = zmm24[2,2,2,3,6,6,6,7]
4383 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} zmm13 = zmm23[2,2,2,3,6,6,6,7]
4384 ; AVX512F-SLOW-NEXT: vpternlogq $226, %zmm1, %zmm12, %zmm13
4385 ; AVX512F-SLOW-NEXT: vextracti64x4 $1, %zmm11, %ymm1
4386 ; AVX512F-SLOW-NEXT: vpternlogq $184, %ymm1, %ymm4, %ymm10
4387 ; AVX512F-SLOW-NEXT: vextracti64x4 $1, %zmm13, %ymm1
4388 ; AVX512F-SLOW-NEXT: vpternlogq $184, %ymm1, %ymm4, %ymm2
4389 ; AVX512F-SLOW-NEXT: vpternlogq $184, %ymm11, %ymm9, %ymm7
4390 ; AVX512F-SLOW-NEXT: vpternlogq $184, %ymm13, %ymm9, %ymm3
4391 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm0, %zmm1
4392 ; AVX512F-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm7[0,1,2,3],zmm1[4,5,6,7]
4393 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} zmm4 = zmm25[2,2,2,3,6,6,6,7]
4394 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm7 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
4395 ; AVX512F-SLOW-NEXT: vpternlogq $184, %zmm1, %zmm7, %zmm4
4396 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm1
4397 ; AVX512F-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm3[0,1,2,3],zmm1[4,5,6,7]
4398 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} zmm2 = zmm22[2,2,2,3,6,6,6,7]
4399 ; AVX512F-SLOW-NEXT: vpternlogq $184, %zmm1, %zmm7, %zmm2
4400 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
4401 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm2, 128(%rax)
4402 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm4, 320(%rax)
4403 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm8, (%rax)
4404 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm5, 192(%rax)
4405 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm6, 256(%rax)
4406 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm0, 64(%rax)
4407 ; AVX512F-SLOW-NEXT: addq $264, %rsp # imm = 0x108
4408 ; AVX512F-SLOW-NEXT: vzeroupper
4409 ; AVX512F-SLOW-NEXT: retq
4411 ; AVX512F-FAST-LABEL: store_i8_stride6_vf64:
4412 ; AVX512F-FAST: # %bb.0:
4413 ; AVX512F-FAST-NEXT: subq $360, %rsp # imm = 0x168
4414 ; AVX512F-FAST-NEXT: vmovdqa 32(%rsi), %ymm4
4415 ; AVX512F-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4416 ; AVX512F-FAST-NEXT: vpbroadcastq {{.*#+}} ymm0 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
4417 ; AVX512F-FAST-NEXT: vpshufb %ymm0, %ymm4, %ymm1
4418 ; AVX512F-FAST-NEXT: vmovdqa 32(%rdi), %ymm3
4419 ; AVX512F-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4420 ; AVX512F-FAST-NEXT: vpshufb %ymm0, %ymm3, %ymm2
4421 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} ymm1 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[4],ymm1[4],ymm2[5],ymm1[5],ymm2[6],ymm1[6],ymm2[7],ymm1[7],ymm2[16],ymm1[16],ymm2[17],ymm1[17],ymm2[18],ymm1[18],ymm2[19],ymm1[19],ymm2[20],ymm1[20],ymm2[21],ymm1[21],ymm2[22],ymm1[22],ymm2[23],ymm1[23]
4422 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} ymm3 = ymm3[8],ymm4[8],ymm3[9],ymm4[9],ymm3[10],ymm4[10],ymm3[11],ymm4[11],ymm3[12],ymm4[12],ymm3[13],ymm4[13],ymm3[14],ymm4[14],ymm3[15],ymm4[15],ymm3[24],ymm4[24],ymm3[25],ymm4[25],ymm3[26],ymm4[26],ymm3[27],ymm4[27],ymm3[28],ymm4[28],ymm3[29],ymm4[29],ymm3[30],ymm4[30],ymm3[31],ymm4[31]
4423 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
4424 ; AVX512F-FAST-NEXT: # ymm2 = mem[0,1,0,1]
4425 ; AVX512F-FAST-NEXT: vpshufb %ymm2, %ymm3, %ymm3
4426 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm1, %zmm1
4427 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4428 ; AVX512F-FAST-NEXT: vmovdqa 32(%rcx), %ymm6
4429 ; AVX512F-FAST-NEXT: vpbroadcastq {{.*#+}} ymm1 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
4430 ; AVX512F-FAST-NEXT: vpshufb %ymm1, %ymm6, %ymm3
4431 ; AVX512F-FAST-NEXT: vmovdqa 32(%rdx), %ymm7
4432 ; AVX512F-FAST-NEXT: vpshufb %ymm1, %ymm7, %ymm4
4433 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm4[0],ymm3[0],ymm4[1],ymm3[1],ymm4[2],ymm3[2],ymm4[3],ymm3[3],ymm4[4],ymm3[4],ymm4[5],ymm3[5],ymm4[6],ymm3[6],ymm4[7],ymm3[7],ymm4[16],ymm3[16],ymm4[17],ymm3[17],ymm4[18],ymm3[18],ymm4[19],ymm3[19],ymm4[20],ymm3[20],ymm4[21],ymm3[21],ymm4[22],ymm3[22],ymm4[23],ymm3[23]
4434 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} ymm5 = ymm7[8],ymm6[8],ymm7[9],ymm6[9],ymm7[10],ymm6[10],ymm7[11],ymm6[11],ymm7[12],ymm6[12],ymm7[13],ymm6[13],ymm7[14],ymm6[14],ymm7[15],ymm6[15],ymm7[24],ymm6[24],ymm7[25],ymm6[25],ymm7[26],ymm6[26],ymm7[27],ymm6[27],ymm7[28],ymm6[28],ymm7[29],ymm6[29],ymm7[30],ymm6[30],ymm7[31],ymm6[31]
4435 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm7, %ymm20
4436 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm6, %ymm25
4437 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
4438 ; AVX512F-FAST-NEXT: # ymm3 = mem[0,1,0,1]
4439 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm5, %ymm5
4440 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm4
4441 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4442 ; AVX512F-FAST-NEXT: vmovdqa 32(%r9), %ymm5
4443 ; AVX512F-FAST-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4444 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15]
4445 ; AVX512F-FAST-NEXT: # ymm7 = mem[0,1,0,1]
4446 ; AVX512F-FAST-NEXT: vpshufb %ymm7, %ymm5, %ymm4
4447 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0]
4448 ; AVX512F-FAST-NEXT: # ymm9 = mem[0,1,0,1]
4449 ; AVX512F-FAST-NEXT: vpshufb %ymm9, %ymm5, %ymm5
4450 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm5, %zmm4
4451 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4452 ; AVX512F-FAST-NEXT: vmovdqa (%rsi), %ymm14
4453 ; AVX512F-FAST-NEXT: vpshufb %ymm0, %ymm14, %ymm6
4454 ; AVX512F-FAST-NEXT: vmovdqa (%rdi), %ymm8
4455 ; AVX512F-FAST-NEXT: vpshufb %ymm0, %ymm8, %ymm0
4456 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[1],ymm6[1],ymm0[2],ymm6[2],ymm0[3],ymm6[3],ymm0[4],ymm6[4],ymm0[5],ymm6[5],ymm0[6],ymm6[6],ymm0[7],ymm6[7],ymm0[16],ymm6[16],ymm0[17],ymm6[17],ymm0[18],ymm6[18],ymm0[19],ymm6[19],ymm0[20],ymm6[20],ymm0[21],ymm6[21],ymm0[22],ymm6[22],ymm0[23],ymm6[23]
4457 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} ymm6 = ymm8[8],ymm14[8],ymm8[9],ymm14[9],ymm8[10],ymm14[10],ymm8[11],ymm14[11],ymm8[12],ymm14[12],ymm8[13],ymm14[13],ymm8[14],ymm14[14],ymm8[15],ymm14[15],ymm8[24],ymm14[24],ymm8[25],ymm14[25],ymm8[26],ymm14[26],ymm8[27],ymm14[27],ymm8[28],ymm14[28],ymm8[29],ymm14[29],ymm8[30],ymm14[30],ymm8[31],ymm14[31]
4458 ; AVX512F-FAST-NEXT: vpshufb %ymm2, %ymm6, %ymm2
4459 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
4460 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4461 ; AVX512F-FAST-NEXT: vmovdqa (%rcx), %ymm6
4462 ; AVX512F-FAST-NEXT: vpshufb %ymm1, %ymm6, %ymm0
4463 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %ymm11
4464 ; AVX512F-FAST-NEXT: vpshufb %ymm1, %ymm11, %ymm1
4465 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[16],ymm0[16],ymm1[17],ymm0[17],ymm1[18],ymm0[18],ymm1[19],ymm0[19],ymm1[20],ymm0[20],ymm1[21],ymm0[21],ymm1[22],ymm0[22],ymm1[23],ymm0[23]
4466 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} ymm1 = ymm11[8],ymm6[8],ymm11[9],ymm6[9],ymm11[10],ymm6[10],ymm11[11],ymm6[11],ymm11[12],ymm6[12],ymm11[13],ymm6[13],ymm11[14],ymm6[14],ymm11[15],ymm6[15],ymm11[24],ymm6[24],ymm11[25],ymm6[25],ymm11[26],ymm6[26],ymm11[27],ymm6[27],ymm11[28],ymm6[28],ymm11[29],ymm6[29],ymm11[30],ymm6[30],ymm11[31],ymm6[31]
4467 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm1, %ymm1
4468 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
4469 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4470 ; AVX512F-FAST-NEXT: vmovdqa (%r9), %ymm2
4471 ; AVX512F-FAST-NEXT: vpshufb %ymm7, %ymm2, %ymm0
4472 ; AVX512F-FAST-NEXT: vpshufb %ymm9, %ymm2, %ymm1
4473 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm2, %ymm16
4474 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
4475 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
4476 ; AVX512F-FAST-NEXT: vmovdqa 32(%rsi), %xmm9
4477 ; AVX512F-FAST-NEXT: vpbroadcastq {{.*#+}} xmm0 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
4478 ; AVX512F-FAST-NEXT: vpshufb %xmm0, %xmm9, %xmm1
4479 ; AVX512F-FAST-NEXT: vmovdqa 32(%rdi), %xmm7
4480 ; AVX512F-FAST-NEXT: vpshufb %xmm0, %xmm7, %xmm2
4481 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
4482 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm7[0],xmm9[0],xmm7[1],xmm9[1],xmm7[2],xmm9[2],xmm7[3],xmm9[3],xmm7[4],xmm9[4],xmm7[5],xmm9[5],xmm7[6],xmm9[6],xmm7[7],xmm9[7]
4483 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} xmm13 = [0,1,6,7,4,5,2,3,8,9,10,11,12,13,10,11]
4484 ; AVX512F-FAST-NEXT: vpshufb %xmm13, %xmm2, %xmm2
4485 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm1, %zmm2, %zmm26
4486 ; AVX512F-FAST-NEXT: vmovdqa 32(%r9), %xmm4
4487 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} xmm12 = <u,6,u,5,u,8,u,7,u,9,u,9,u,9,u,9>
4488 ; AVX512F-FAST-NEXT: vpshufb %xmm12, %xmm4, %xmm2
4489 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} xmm1 = <u,2,u,1,u,0,u,3,u,4,u,4,u,4,u,4>
4490 ; AVX512F-FAST-NEXT: vpshufb %xmm1, %xmm4, %xmm3
4491 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm4, %xmm17
4492 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm2, %zmm3, %zmm27
4493 ; AVX512F-FAST-NEXT: vmovdqa (%rsi), %xmm5
4494 ; AVX512F-FAST-NEXT: vmovdqa (%rdi), %xmm2
4495 ; AVX512F-FAST-NEXT: vpshufb %xmm0, %xmm5, %xmm10
4496 ; AVX512F-FAST-NEXT: vpshufb %xmm0, %xmm2, %xmm0
4497 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm0[8],xmm10[8],xmm0[9],xmm10[9],xmm0[10],xmm10[10],xmm0[11],xmm10[11],xmm0[12],xmm10[12],xmm0[13],xmm10[13],xmm0[14],xmm10[14],xmm0[15],xmm10[15]
4498 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3],xmm2[4],xmm5[4],xmm2[5],xmm5[5],xmm2[6],xmm5[6],xmm2[7],xmm5[7]
4499 ; AVX512F-FAST-NEXT: vpshufb %xmm13, %xmm10, %xmm10
4500 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm10, %zmm28
4501 ; AVX512F-FAST-NEXT: vmovdqa (%r9), %xmm13
4502 ; AVX512F-FAST-NEXT: vpshufb %xmm12, %xmm13, %xmm0
4503 ; AVX512F-FAST-NEXT: vpshufb %xmm1, %xmm13, %xmm1
4504 ; AVX512F-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm1, %zmm29
4505 ; AVX512F-FAST-NEXT: vmovdqa 32(%r8), %ymm12
4506 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0]
4507 ; AVX512F-FAST-NEXT: # ymm1 = mem[0,1,0,1]
4508 ; AVX512F-FAST-NEXT: vpshufb %ymm1, %ymm12, %ymm0
4509 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm1, %ymm19
4510 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm21 = ymm0[2,2,2,3]
4511 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0]
4512 ; AVX512F-FAST-NEXT: # ymm1 = mem[0,1,0,1]
4513 ; AVX512F-FAST-NEXT: vpshufb %ymm1, %ymm12, %ymm0
4514 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm1, %ymm18
4515 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm22 = ymm0[2,2,2,3]
4516 ; AVX512F-FAST-NEXT: vmovdqa 32(%rcx), %xmm4
4517 ; AVX512F-FAST-NEXT: vpbroadcastq {{.*#+}} xmm10 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
4518 ; AVX512F-FAST-NEXT: vpshufb %xmm10, %xmm4, %xmm0
4519 ; AVX512F-FAST-NEXT: vmovdqa 32(%rdx), %xmm3
4520 ; AVX512F-FAST-NEXT: vpshufb %xmm10, %xmm3, %xmm15
4521 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3],xmm15[4],xmm0[4],xmm15[5],xmm0[5],xmm15[6],xmm0[6],xmm15[7],xmm0[7]
4522 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3],xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
4523 ; AVX512F-FAST-NEXT: vprold $16, %xmm15, %xmm15
4524 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm15, %zmm23
4525 ; AVX512F-FAST-NEXT: vmovdqa (%rcx), %xmm0
4526 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %xmm15
4527 ; AVX512F-FAST-NEXT: vpshufb %xmm10, %xmm0, %xmm1
4528 ; AVX512F-FAST-NEXT: vpshufb %xmm10, %xmm15, %xmm10
4529 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm10[0],xmm1[0],xmm10[1],xmm1[1],xmm10[2],xmm1[2],xmm10[3],xmm1[3],xmm10[4],xmm1[4],xmm10[5],xmm1[5],xmm10[6],xmm1[6],xmm10[7],xmm1[7]
4530 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3],xmm15[4],xmm0[4],xmm15[5],xmm0[5],xmm15[6],xmm0[6],xmm15[7],xmm0[7]
4531 ; AVX512F-FAST-NEXT: vprold $16, %xmm10, %xmm10
4532 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm10, %zmm24
4533 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} ymm1 = ymm11[0],ymm6[0],ymm11[1],ymm6[1],ymm11[2],ymm6[2],ymm11[3],ymm6[3],ymm11[4],ymm6[4],ymm11[5],ymm6[5],ymm11[6],ymm6[6],ymm11[7],ymm6[7],ymm11[16],ymm6[16],ymm11[17],ymm6[17],ymm11[18],ymm6[18],ymm11[19],ymm6[19],ymm11[20],ymm6[20],ymm11[21],ymm6[21],ymm11[22],ymm6[22],ymm11[23],ymm6[23]
4534 ; AVX512F-FAST-NEXT: vprold $16, %ymm1, %ymm1
4535 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm10 = xmm15[8],xmm0[8],xmm15[9],xmm0[9],xmm15[10],xmm0[10],xmm15[11],xmm0[11],xmm15[12],xmm0[12],xmm15[13],xmm0[13],xmm15[14],xmm0[14],xmm15[15],xmm0[15]
4536 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm8[0],ymm14[0],ymm8[1],ymm14[1],ymm8[2],ymm14[2],ymm8[3],ymm14[3],ymm8[4],ymm14[4],ymm8[5],ymm14[5],ymm8[6],ymm14[6],ymm8[7],ymm14[7],ymm8[16],ymm14[16],ymm8[17],ymm14[17],ymm8[18],ymm14[18],ymm8[19],ymm14[19],ymm8[20],ymm14[20],ymm8[21],ymm14[21],ymm8[22],ymm14[22],ymm8[23],ymm14[23]
4537 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm2[8],xmm5[8],xmm2[9],xmm5[9],xmm2[10],xmm5[10],xmm2[11],xmm5[11],xmm2[12],xmm5[12],xmm2[13],xmm5[13],xmm2[14],xmm5[14],xmm2[15],xmm5[15]
4538 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27,16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27]
4539 ; AVX512F-FAST-NEXT: # ymm5 = mem[0,1,0,1]
4540 ; AVX512F-FAST-NEXT: vpshufb %ymm5, %ymm0, %ymm0
4541 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} xmm11 = [8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
4542 ; AVX512F-FAST-NEXT: vpshufb %xmm11, %xmm2, %xmm6
4543 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm31 = [0,0,0,1,10,10,10,11]
4544 ; AVX512F-FAST-NEXT: vpermt2q %zmm0, %zmm31, %zmm6
4545 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} xmm0 = [10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
4546 ; AVX512F-FAST-NEXT: vpshufb %xmm0, %xmm10, %xmm2
4547 ; AVX512F-FAST-NEXT: vpermt2q %zmm1, %zmm31, %zmm2
4548 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm30 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
4549 ; AVX512F-FAST-NEXT: vpternlogq $226, %zmm2, %zmm30, %zmm6
4550 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm10 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
4551 ; AVX512F-FAST-NEXT: # ymm10 = mem[0,1,0,1]
4552 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm16, %ymm1
4553 ; AVX512F-FAST-NEXT: vpshufb %ymm10, %ymm1, %ymm1
4554 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} xmm8 = <u,10,u,13,u,12,u,11,u,14,u,13,u,14,u,15>
4555 ; AVX512F-FAST-NEXT: vpshufb %xmm8, %xmm13, %xmm2
4556 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm8, %xmm16
4557 ; AVX512F-FAST-NEXT: vpermt2q %zmm1, %zmm31, %zmm2
4558 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm20, %ymm1
4559 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm25, %ymm8
4560 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} ymm1 = ymm1[0],ymm8[0],ymm1[1],ymm8[1],ymm1[2],ymm8[2],ymm1[3],ymm8[3],ymm1[4],ymm8[4],ymm1[5],ymm8[5],ymm1[6],ymm8[6],ymm1[7],ymm8[7],ymm1[16],ymm8[16],ymm1[17],ymm8[17],ymm1[18],ymm8[18],ymm1[19],ymm8[19],ymm1[20],ymm8[20],ymm1[21],ymm8[21],ymm1[22],ymm8[22],ymm1[23],ymm8[23]
4561 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm3[8],xmm4[8],xmm3[9],xmm4[9],xmm3[10],xmm4[10],xmm3[11],xmm4[11],xmm3[12],xmm4[12],xmm3[13],xmm4[13],xmm3[14],xmm4[14],xmm3[15],xmm4[15]
4562 ; AVX512F-FAST-NEXT: vprold $16, %ymm1, %ymm1
4563 ; AVX512F-FAST-NEXT: vpshufb %xmm0, %xmm3, %xmm3
4564 ; AVX512F-FAST-NEXT: vpermt2q %zmm1, %zmm31, %zmm3
4565 ; AVX512F-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4566 ; AVX512F-FAST-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm4 # 32-byte Folded Reload
4567 ; AVX512F-FAST-NEXT: # ymm4 = ymm0[0],mem[0],ymm0[1],mem[1],ymm0[2],mem[2],ymm0[3],mem[3],ymm0[4],mem[4],ymm0[5],mem[5],ymm0[6],mem[6],ymm0[7],mem[7],ymm0[16],mem[16],ymm0[17],mem[17],ymm0[18],mem[18],ymm0[19],mem[19],ymm0[20],mem[20],ymm0[21],mem[21],ymm0[22],mem[22],ymm0[23],mem[23]
4568 ; AVX512F-FAST-NEXT: vmovdqa (%r8), %ymm13
4569 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm19, %ymm0
4570 ; AVX512F-FAST-NEXT: vpshufb %ymm0, %ymm13, %ymm8
4571 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm18, %ymm0
4572 ; AVX512F-FAST-NEXT: vpshufb %ymm0, %ymm13, %ymm1
4573 ; AVX512F-FAST-NEXT: vpshufb %ymm5, %ymm4, %ymm4
4574 ; AVX512F-FAST-NEXT: vmovdqa (%r8), %xmm15
4575 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm7[8],xmm9[8],xmm7[9],xmm9[9],xmm7[10],xmm9[10],xmm7[11],xmm9[11],xmm7[12],xmm9[12],xmm7[13],xmm9[13],xmm7[14],xmm9[14],xmm7[15],xmm9[15]
4576 ; AVX512F-FAST-NEXT: vmovdqa 32(%r8), %xmm7
4577 ; AVX512F-FAST-NEXT: vpshufb %xmm11, %xmm5, %xmm5
4578 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} xmm14 = <6,u,5,u,8,u,7,u,9,u,9,u,9,u,9,u>
4579 ; AVX512F-FAST-NEXT: vpermt2q %zmm4, %zmm31, %zmm5
4580 ; AVX512F-FAST-NEXT: vpshufb %xmm14, %xmm7, %xmm4
4581 ; AVX512F-FAST-NEXT: vpternlogq $226, %zmm3, %zmm30, %zmm5
4582 ; AVX512F-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
4583 ; AVX512F-FAST-NEXT: # ymm3 = mem[0,1,0,1]
4584 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm13, %ymm11
4585 ; AVX512F-FAST-NEXT: vpshufb %ymm3, %ymm12, %ymm9
4586 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} xmm3 = <2,u,1,u,0,u,3,u,4,u,4,u,4,u,4,u>
4587 ; AVX512F-FAST-NEXT: vpshufb %xmm14, %xmm15, %xmm12
4588 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm7, %xmm13
4589 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm15, %xmm14
4590 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} xmm3 = <10,u,13,u,12,u,11,u,14,u,13,u,14,u,15,u>
4591 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm15, %xmm15
4592 ; AVX512F-FAST-NEXT: vpshufb %xmm3, %xmm7, %xmm0
4593 ; AVX512F-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
4594 ; AVX512F-FAST-NEXT: vpshufb %ymm10, %ymm3, %ymm10
4595 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,2,2,3]
4596 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
4597 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm3 = ymm4[0,0,0,1]
4598 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm7 = ymm13[0,0,0,1]
4599 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm4 = ymm12[0,0,0,1]
4600 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm12 = ymm14[0,0,0,1]
4601 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm13 = ymm15[0,0,0,1]
4602 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm17, %xmm14
4603 ; AVX512F-FAST-NEXT: vmovdqa64 %xmm16, %xmm15
4604 ; AVX512F-FAST-NEXT: vpshufb %xmm15, %xmm14, %xmm14
4605 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = [0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0]
4606 ; AVX512F-FAST-NEXT: vpternlogq $184, %ymm6, %ymm15, %ymm13
4607 ; AVX512F-FAST-NEXT: vextracti64x4 $1, %zmm6, %ymm6
4608 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
4609 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
4610 ; AVX512F-FAST-NEXT: vpternlogq $184, %ymm5, %ymm15, %ymm0
4611 ; AVX512F-FAST-NEXT: vextracti64x4 $1, %zmm5, %ymm5
4612 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
4613 ; AVX512F-FAST-NEXT: vpermt2q %zmm10, %zmm31, %zmm14
4614 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
4615 ; AVX512F-FAST-NEXT: vpternlogq $184, %ymm5, %ymm10, %ymm9
4616 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm0, %zmm5
4617 ; AVX512F-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm5[4,5,6,7]
4618 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
4619 ; AVX512F-FAST-NEXT: vpternlogq $184, %zmm0, %zmm5, %zmm14
4620 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
4621 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm14, 256(%rax)
4622 ; AVX512F-FAST-NEXT: vpternlogq $184, %ymm6, %ymm10, %ymm11
4623 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm11, %zmm0, %zmm0
4624 ; AVX512F-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm13[0,1,2,3],zmm0[4,5,6,7]
4625 ; AVX512F-FAST-NEXT: vpternlogq $184, %zmm0, %zmm5, %zmm2
4626 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm2, 64(%rax)
4627 ; AVX512F-FAST-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Folded Reload
4628 ; AVX512F-FAST-NEXT: # zmm0 = mem[2,2,2,3,6,6,6,7]
4629 ; AVX512F-FAST-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
4630 ; AVX512F-FAST-NEXT: # zmm2 = mem[2,2,2,3,6,6,6,7]
4631 ; AVX512F-FAST-NEXT: vpternlogq $226, %zmm0, %zmm30, %zmm2
4632 ; AVX512F-FAST-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Folded Reload
4633 ; AVX512F-FAST-NEXT: # zmm0 = mem[2,2,2,3,6,6,6,7]
4634 ; AVX512F-FAST-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Folded Reload
4635 ; AVX512F-FAST-NEXT: # zmm5 = mem[2,2,2,3,6,6,6,7]
4636 ; AVX512F-FAST-NEXT: vpternlogq $226, %zmm0, %zmm30, %zmm5
4637 ; AVX512F-FAST-NEXT: vextracti64x4 $1, %zmm2, %ymm0
4638 ; AVX512F-FAST-NEXT: vpternlogq $184, %ymm0, %ymm15, %ymm21
4639 ; AVX512F-FAST-NEXT: vextracti64x4 $1, %zmm5, %ymm0
4640 ; AVX512F-FAST-NEXT: vpternlogq $184, %ymm0, %ymm15, %ymm8
4641 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm0 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
4642 ; AVX512F-FAST-NEXT: vpternlogq $184, %ymm2, %ymm0, %ymm22
4643 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm21, %zmm0, %zmm2
4644 ; AVX512F-FAST-NEXT: vshufi64x2 {{.*#+}} zmm2 = zmm22[0,1,2,3],zmm2[4,5,6,7]
4645 ; AVX512F-FAST-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Folded Reload
4646 ; AVX512F-FAST-NEXT: # zmm6 = mem[2,2,2,3,6,6,6,7]
4647 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
4648 ; AVX512F-FAST-NEXT: vpternlogq $184, %zmm2, %zmm9, %zmm6
4649 ; AVX512F-FAST-NEXT: vpternlogq $184, %ymm5, %ymm0, %ymm1
4650 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm2
4651 ; AVX512F-FAST-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm1[0,1,2,3],zmm2[4,5,6,7]
4652 ; AVX512F-FAST-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
4653 ; AVX512F-FAST-NEXT: # zmm2 = mem[2,2,2,3,6,6,6,7]
4654 ; AVX512F-FAST-NEXT: vpternlogq $184, %zmm1, %zmm9, %zmm2
4655 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm1 = zmm23[0,0,0,1,4,4,4,5]
4656 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm5 = zmm26[0,0,0,1,4,4,4,5]
4657 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm8 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0]
4658 ; AVX512F-FAST-NEXT: vpternlogq $226, %zmm1, %zmm8, %zmm5
4659 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm1 = zmm24[0,0,0,1,4,4,4,5]
4660 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm9 = zmm28[0,0,0,1,4,4,4,5]
4661 ; AVX512F-FAST-NEXT: vpternlogq $226, %zmm1, %zmm8, %zmm9
4662 ; AVX512F-FAST-NEXT: vextracti64x4 $1, %zmm5, %ymm1
4663 ; AVX512F-FAST-NEXT: vpternlogq $184, %ymm1, %ymm0, %ymm3
4664 ; AVX512F-FAST-NEXT: vextracti64x4 $1, %zmm9, %ymm1
4665 ; AVX512F-FAST-NEXT: vpternlogq $184, %ymm1, %ymm0, %ymm4
4666 ; AVX512F-FAST-NEXT: vpternlogq $184, %ymm5, %ymm10, %ymm7
4667 ; AVX512F-FAST-NEXT: vpternlogq $184, %ymm9, %ymm10, %ymm12
4668 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm0, %zmm0
4669 ; AVX512F-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm7[0,1,2,3],zmm0[4,5,6,7]
4670 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm1 = zmm27[0,0,0,1,4,4,4,5]
4671 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm3 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
4672 ; AVX512F-FAST-NEXT: vpternlogq $184, %zmm0, %zmm3, %zmm1
4673 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0
4674 ; AVX512F-FAST-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm12[0,1,2,3],zmm0[4,5,6,7]
4675 ; AVX512F-FAST-NEXT: vpermq {{.*#+}} zmm4 = zmm29[0,0,0,1,4,4,4,5]
4676 ; AVX512F-FAST-NEXT: vpternlogq $184, %zmm0, %zmm3, %zmm4
4677 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm4, (%rax)
4678 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm1, 192(%rax)
4679 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm2, 128(%rax)
4680 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm6, 320(%rax)
4681 ; AVX512F-FAST-NEXT: addq $360, %rsp # imm = 0x168
4682 ; AVX512F-FAST-NEXT: vzeroupper
4683 ; AVX512F-FAST-NEXT: retq
4685 ; AVX512BW-ONLY-SLOW-LABEL: store_i8_stride6_vf64:
4686 ; AVX512BW-ONLY-SLOW: # %bb.0:
4687 ; AVX512BW-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
4688 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %zmm14
4689 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %zmm12
4690 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rsi), %ymm16
4691 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdi), %ymm17
4692 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm17[0],ymm16[0],ymm17[1],ymm16[1],ymm17[2],ymm16[2],ymm17[3],ymm16[3],ymm17[4],ymm16[4],ymm17[5],ymm16[5],ymm17[6],ymm16[6],ymm17[7],ymm16[7],ymm17[16],ymm16[16],ymm17[17],ymm16[17],ymm17[18],ymm16[18],ymm17[19],ymm16[19],ymm17[20],ymm16[20],ymm17[21],ymm16[21],ymm17[22],ymm16[22],ymm17[23],ymm16[23]
4693 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rsi), %xmm1
4694 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 32(%rsi), %xmm9
4695 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rdi), %xmm3
4696 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 32(%rdi), %xmm11
4697 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm3[8],xmm1[8],xmm3[9],xmm1[9],xmm3[10],xmm1[10],xmm3[11],xmm1[11],xmm3[12],xmm1[12],xmm3[13],xmm1[13],xmm3[14],xmm1[14],xmm3[15],xmm1[15]
4698 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0
4699 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm7 = [4,3,6,5,4,3,6,5,4,3,6,5,7,7,7,7,24,27,26,25,24,27,26,25,24,27,26,25,28,29,30,29]
4700 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %zmm0, %zmm7, %zmm0
4701 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rcx), %xmm2
4702 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 32(%rcx), %xmm8
4703 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm4
4704 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 32(%rdx), %xmm10
4705 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm4[8],xmm2[8],xmm4[9],xmm2[9],xmm4[10],xmm2[10],xmm4[11],xmm2[11],xmm4[12],xmm2[12],xmm4[13],xmm2[13],xmm4[14],xmm2[14],xmm4[15],xmm2[15]
4706 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm20 = [5,4,3,6,5,4,3,6,5,4,3,6,7,7,7,7]
4707 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %ymm5, %ymm20, %ymm5
4708 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rcx), %ymm18
4709 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%rdx), %ymm19
4710 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm19[0],ymm18[0],ymm19[1],ymm18[1],ymm19[2],ymm18[2],ymm19[3],ymm18[3],ymm19[4],ymm18[4],ymm19[5],ymm18[5],ymm19[6],ymm18[6],ymm19[7],ymm18[7],ymm19[16],ymm18[16],ymm19[17],ymm18[17],ymm19[18],ymm18[18],ymm19[19],ymm18[19],ymm19[20],ymm18[20],ymm19[21],ymm18[21],ymm19[22],ymm18[22],ymm19[23],ymm18[23]
4711 ; AVX512BW-ONLY-SLOW-NEXT: vprold $16, %ymm6, %ymm6
4712 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
4713 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm6, %zmm5, %zmm5
4714 ; AVX512BW-ONLY-SLOW-NEXT: movl $613566756, %r10d # imm = 0x24924924
4715 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %r10d, %k1
4716 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu16 %zmm5, %zmm0 {%k1}
4717 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%r8), %xmm5
4718 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 32(%r8), %xmm13
4719 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm5[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
4720 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm23 = [2,5,4,3,2,5,4,3,2,5,4,3,6,5,6,7]
4721 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %ymm6, %ymm23, %ymm6
4722 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r8), %ymm21
4723 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} ymm24 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
4724 ; AVX512BW-ONLY-SLOW-NEXT: # ymm24 = mem[0,1,2,3,0,1,2,3]
4725 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm24, %ymm21, %ymm15
4726 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
4727 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm6, %zmm6
4728 ; AVX512BW-ONLY-SLOW-NEXT: movl $1227133513, %r10d # imm = 0x49249249
4729 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %r10d, %k2
4730 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu16 %zmm6, %zmm0 {%k2}
4731 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa (%r9), %xmm6
4732 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa 32(%r9), %xmm15
4733 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm22 = xmm6[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
4734 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %ymm22, %ymm23, %ymm25
4735 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 (%r9), %ymm22
4736 ; AVX512BW-ONLY-SLOW-NEXT: vbroadcasti32x4 {{.*#+}} ymm26 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
4737 ; AVX512BW-ONLY-SLOW-NEXT: # ymm26 = mem[0,1,2,3,0,1,2,3]
4738 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm26, %ymm22, %ymm27
4739 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm27 = ymm27[2,2,2,3]
4740 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm27, %zmm25, %zmm25
4741 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $2342443691899625602, %r10 # imm = 0x2082082082082082
4742 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %r10, %k3
4743 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm25, %zmm0 {%k3}
4744 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 32(%rsi), %ymm25
4745 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 32(%rdi), %ymm27
4746 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm28 = ymm27[0],ymm25[0],ymm27[1],ymm25[1],ymm27[2],ymm25[2],ymm27[3],ymm25[3],ymm27[4],ymm25[4],ymm27[5],ymm25[5],ymm27[6],ymm25[6],ymm27[7],ymm25[7],ymm27[16],ymm25[16],ymm27[17],ymm25[17],ymm27[18],ymm25[18],ymm27[19],ymm25[19],ymm27[20],ymm25[20],ymm27[21],ymm25[21],ymm27[22],ymm25[22],ymm27[23],ymm25[23]
4747 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm29 = xmm11[8],xmm9[8],xmm11[9],xmm9[9],xmm11[10],xmm9[10],xmm11[11],xmm9[11],xmm11[12],xmm9[12],xmm11[13],xmm9[13],xmm11[14],xmm9[14],xmm11[15],xmm9[15]
4748 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm28, %zmm29, %zmm28
4749 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %zmm28, %zmm7, %zmm7
4750 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm28 = xmm10[8],xmm8[8],xmm10[9],xmm8[9],xmm10[10],xmm8[10],xmm10[11],xmm8[11],xmm10[12],xmm8[12],xmm10[13],xmm8[13],xmm10[14],xmm8[14],xmm10[15],xmm8[15]
4751 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %ymm28, %ymm20, %ymm20
4752 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 32(%rcx), %ymm28
4753 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 32(%rdx), %ymm29
4754 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm30 = ymm29[0],ymm28[0],ymm29[1],ymm28[1],ymm29[2],ymm28[2],ymm29[3],ymm28[3],ymm29[4],ymm28[4],ymm29[5],ymm28[5],ymm29[6],ymm28[6],ymm29[7],ymm28[7],ymm29[16],ymm28[16],ymm29[17],ymm28[17],ymm29[18],ymm28[18],ymm29[19],ymm28[19],ymm29[20],ymm28[20],ymm29[21],ymm28[21],ymm29[22],ymm28[22],ymm29[23],ymm28[23]
4755 ; AVX512BW-ONLY-SLOW-NEXT: vprold $16, %ymm30, %ymm30
4756 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm30 = ymm30[2,2,2,3]
4757 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm30, %zmm20, %zmm20
4758 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu16 %zmm20, %zmm7 {%k1}
4759 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm20 = xmm13[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
4760 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %ymm20, %ymm23, %ymm20
4761 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 32(%r8), %ymm30
4762 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm24, %ymm30, %ymm24
4763 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm24 = ymm24[2,2,2,3]
4764 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm24, %zmm20, %zmm20
4765 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu16 %zmm20, %zmm7 {%k2}
4766 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm20 = xmm15[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
4767 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %ymm20, %ymm23, %ymm20
4768 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 32(%r9), %ymm23
4769 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm26, %ymm23, %ymm24
4770 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm24 = ymm24[2,2,2,3]
4771 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm24, %zmm20, %zmm20
4772 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm20, %zmm7 {%k3}
4773 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm24 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
4774 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm24, %ymm25, %ymm20
4775 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm24, %ymm27, %ymm26
4776 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm20 = ymm26[0],ymm20[0],ymm26[1],ymm20[1],ymm26[2],ymm20[2],ymm26[3],ymm20[3],ymm26[4],ymm20[4],ymm26[5],ymm20[5],ymm26[6],ymm20[6],ymm26[7],ymm20[7],ymm26[16],ymm20[16],ymm26[17],ymm20[17],ymm26[18],ymm20[18],ymm26[19],ymm20[19],ymm26[20],ymm20[20],ymm26[21],ymm20[21],ymm26[22],ymm20[22],ymm26[23],ymm20[23]
4777 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
4778 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm25 = ymm27[8],ymm25[8],ymm27[9],ymm25[9],ymm27[10],ymm25[10],ymm27[11],ymm25[11],ymm27[12],ymm25[12],ymm27[13],ymm25[13],ymm27[14],ymm25[14],ymm27[15],ymm25[15],ymm27[24],ymm25[24],ymm27[25],ymm25[25],ymm27[26],ymm25[26],ymm27[27],ymm25[27],ymm27[28],ymm25[28],ymm27[29],ymm25[29],ymm27[30],ymm25[30],ymm27[31],ymm25[31]
4779 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm26 = [12,11,14,13,12,11,14,13,12,11,14,13,15,15,15,15]
4780 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %ymm25, %ymm26, %ymm25
4781 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm25, %zmm20, %zmm25
4782 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm27 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
4783 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm27, %ymm28, %ymm20
4784 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm27, %ymm29, %ymm31
4785 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm20 = ymm31[0],ymm20[0],ymm31[1],ymm20[1],ymm31[2],ymm20[2],ymm31[3],ymm20[3],ymm31[4],ymm20[4],ymm31[5],ymm20[5],ymm31[6],ymm20[6],ymm31[7],ymm20[7],ymm31[16],ymm20[16],ymm31[17],ymm20[17],ymm31[18],ymm20[18],ymm31[19],ymm20[19],ymm31[20],ymm20[20],ymm31[21],ymm20[21],ymm31[22],ymm20[22],ymm31[23],ymm20[23]
4786 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
4787 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm28 = ymm29[8],ymm28[8],ymm29[9],ymm28[9],ymm29[10],ymm28[10],ymm29[11],ymm28[11],ymm29[12],ymm28[12],ymm29[13],ymm28[13],ymm29[14],ymm28[14],ymm29[15],ymm28[15],ymm29[24],ymm28[24],ymm29[25],ymm28[25],ymm29[26],ymm28[26],ymm29[27],ymm28[27],ymm29[28],ymm28[28],ymm29[29],ymm28[29],ymm29[30],ymm28[30],ymm29[31],ymm28[31]
4788 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm29 = [13,12,11,14,13,12,11,14,13,12,11,14,15,15,15,15]
4789 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %ymm28, %ymm29, %ymm28
4790 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm28, %zmm20, %zmm20
4791 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu16 %zmm25, %zmm20 {%k1}
4792 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm25 = zmm30[0,1,2,3],zmm14[4,5,6,7]
4793 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm28 = <u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,10,u,13,u,12,u,11,u,14,u,u,u,u,u,15,u>
4794 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %zmm28, %zmm25, %zmm25
4795 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm25 = zmm25[2,2,2,3,6,6,6,7]
4796 ; AVX512BW-ONLY-SLOW-NEXT: movl $-1840700270, %ecx # imm = 0x92492492
4797 ; AVX512BW-ONLY-SLOW-NEXT: kmovd %ecx, %k2
4798 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu16 %zmm25, %zmm20 {%k2}
4799 ; AVX512BW-ONLY-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm23 = zmm23[0,1,2,3],zmm12[4,5,6,7]
4800 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm25 = <u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,10,u,13,u,12,u,11,u,14,u,u,u,u,u,15>
4801 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %zmm25, %zmm23, %zmm23
4802 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm23 = zmm23[2,2,2,3,6,6,6,7]
4803 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $-9076969306111049208, %rcx # imm = 0x8208208208208208
4804 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %rcx, %k3
4805 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm23, %zmm20 {%k3}
4806 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm24, %ymm16, %ymm23
4807 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm24, %ymm17, %ymm24
4808 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm23 = ymm24[0],ymm23[0],ymm24[1],ymm23[1],ymm24[2],ymm23[2],ymm24[3],ymm23[3],ymm24[4],ymm23[4],ymm24[5],ymm23[5],ymm24[6],ymm23[6],ymm24[7],ymm23[7],ymm24[16],ymm23[16],ymm24[17],ymm23[17],ymm24[18],ymm23[18],ymm24[19],ymm23[19],ymm24[20],ymm23[20],ymm24[21],ymm23[21],ymm24[22],ymm23[22],ymm24[23],ymm23[23]
4809 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm23 = ymm23[2,2,2,3]
4810 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm16 = ymm17[8],ymm16[8],ymm17[9],ymm16[9],ymm17[10],ymm16[10],ymm17[11],ymm16[11],ymm17[12],ymm16[12],ymm17[13],ymm16[13],ymm17[14],ymm16[14],ymm17[15],ymm16[15],ymm17[24],ymm16[24],ymm17[25],ymm16[25],ymm17[26],ymm16[26],ymm17[27],ymm16[27],ymm17[28],ymm16[28],ymm17[29],ymm16[29],ymm17[30],ymm16[30],ymm17[31],ymm16[31]
4811 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %ymm16, %ymm26, %ymm16
4812 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm16, %zmm23, %zmm17
4813 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm27, %ymm18, %ymm16
4814 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %ymm27, %ymm19, %ymm23
4815 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm16 = ymm23[0],ymm16[0],ymm23[1],ymm16[1],ymm23[2],ymm16[2],ymm23[3],ymm16[3],ymm23[4],ymm16[4],ymm23[5],ymm16[5],ymm23[6],ymm16[6],ymm23[7],ymm16[7],ymm23[16],ymm16[16],ymm23[17],ymm16[17],ymm23[18],ymm16[18],ymm23[19],ymm16[19],ymm23[20],ymm16[20],ymm23[21],ymm16[21],ymm23[22],ymm16[22],ymm23[23],ymm16[23]
4816 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm16 = ymm16[2,2,2,3]
4817 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm18 = ymm19[8],ymm18[8],ymm19[9],ymm18[9],ymm19[10],ymm18[10],ymm19[11],ymm18[11],ymm19[12],ymm18[12],ymm19[13],ymm18[13],ymm19[14],ymm18[14],ymm19[15],ymm18[15],ymm19[24],ymm18[24],ymm19[25],ymm18[25],ymm19[26],ymm18[26],ymm19[27],ymm18[27],ymm19[28],ymm18[28],ymm19[29],ymm18[29],ymm19[30],ymm18[30],ymm19[31],ymm18[31]
4818 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %ymm18, %ymm29, %ymm18
4819 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm18, %zmm16, %zmm16
4820 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu16 %zmm17, %zmm16 {%k1}
4821 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm21, %zmm14, %zmm14
4822 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %zmm28, %zmm14, %zmm14
4823 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm14 = zmm14[2,2,2,3,6,6,6,7]
4824 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu16 %zmm14, %zmm16 {%k2}
4825 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm22, %zmm12, %zmm12
4826 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %zmm25, %zmm12, %zmm12
4827 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm12 = zmm12[2,2,2,3,6,6,6,7]
4828 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm12, %zmm16 {%k3}
4829 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm12 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
4830 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm12, %xmm9, %xmm14
4831 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm12, %xmm11, %xmm17
4832 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm14 = xmm17[8],xmm14[8],xmm17[9],xmm14[9],xmm17[10],xmm14[10],xmm17[11],xmm14[11],xmm17[12],xmm14[12],xmm17[13],xmm14[13],xmm17[14],xmm14[14],xmm17[15],xmm14[15]
4833 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
4834 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3],xmm11[4],xmm9[4],xmm11[5],xmm9[5],xmm11[6],xmm9[6],xmm11[7],xmm9[7]
4835 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa {{.*#+}} ymm11 = [0,3,2,1,0,3,2,1,0,3,2,1,4,5,6,5]
4836 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %ymm9, %ymm11, %ymm9
4837 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm14, %zmm9, %zmm9
4838 ; AVX512BW-ONLY-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm14 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
4839 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm14, %xmm8, %xmm17
4840 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm14, %xmm10, %xmm18
4841 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm17 = xmm18[0],xmm17[0],xmm18[1],xmm17[1],xmm18[2],xmm17[2],xmm18[3],xmm17[3],xmm18[4],xmm17[4],xmm18[5],xmm17[5],xmm18[6],xmm17[6],xmm18[7],xmm17[7]
4842 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3],xmm10[4],xmm8[4],xmm10[5],xmm8[5],xmm10[6],xmm8[6],xmm10[7],xmm8[7]
4843 ; AVX512BW-ONLY-SLOW-NEXT: vprold $16, %xmm8, %xmm8
4844 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm17, %zmm8, %zmm8
4845 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm8 = zmm8[0,0,0,1,4,4,4,5]
4846 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu16 %zmm8, %zmm9 {%k2}
4847 ; AVX512BW-ONLY-SLOW-NEXT: vpmovzxbw {{.*#+}} xmm8 = xmm13[0],zero,xmm13[1],zero,xmm13[2],zero,xmm13[3],zero,xmm13[4],zero,xmm13[5],zero,xmm13[6],zero,xmm13[7],zero
4848 ; AVX512BW-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm13[2,1,2,3]
4849 ; AVX512BW-ONLY-SLOW-NEXT: vpmovzxbw {{.*#+}} xmm10 = xmm10[0],zero,xmm10[1],zero,xmm10[2],zero,xmm10[3],zero,xmm10[4],zero,xmm10[5],zero,xmm10[6],zero,xmm10[7],zero
4850 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, %xmm10, %zmm8, %zmm8
4851 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm10 = [2,1,0,3,2,1,0,3,2,1,0,3,4,4,4,4,22,21,16,23,22,21,16,23,22,21,16,23,17,17,17,17]
4852 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %zmm8, %zmm10, %zmm9 {%k1}
4853 ; AVX512BW-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm15[2,1,2,3]
4854 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm8[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4855 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm13 = xmm15[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4856 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, %xmm8, %zmm13, %zmm8
4857 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %zmm8, %zmm10, %zmm8
4858 ; AVX512BW-ONLY-SLOW-NEXT: movabsq $585610922974906400, %rcx # imm = 0x820820820820820
4859 ; AVX512BW-ONLY-SLOW-NEXT: kmovq %rcx, %k3
4860 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm8, %zmm9 {%k3}
4861 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm12, %xmm1, %xmm8
4862 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm12, %xmm3, %xmm12
4863 ; AVX512BW-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm8 = xmm12[8],xmm8[8],xmm12[9],xmm8[9],xmm12[10],xmm8[10],xmm12[11],xmm8[11],xmm12[12],xmm8[12],xmm12[13],xmm8[13],xmm12[14],xmm8[14],xmm12[15],xmm8[15]
4864 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,0,1]
4865 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3],xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
4866 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %ymm1, %ymm11, %ymm1
4867 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm8, %zmm1, %zmm1
4868 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm14, %xmm2, %xmm3
4869 ; AVX512BW-ONLY-SLOW-NEXT: vpshufb %xmm14, %xmm4, %xmm8
4870 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm8[0],xmm3[0],xmm8[1],xmm3[1],xmm8[2],xmm3[2],xmm8[3],xmm3[3],xmm8[4],xmm3[4],xmm8[5],xmm3[5],xmm8[6],xmm3[6],xmm8[7],xmm3[7]
4871 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3],xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
4872 ; AVX512BW-ONLY-SLOW-NEXT: vprold $16, %xmm2, %xmm2
4873 ; AVX512BW-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm2
4874 ; AVX512BW-ONLY-SLOW-NEXT: vpermq {{.*#+}} zmm2 = zmm2[0,0,0,1,4,4,4,5]
4875 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu16 %zmm2, %zmm1 {%k2}
4876 ; AVX512BW-ONLY-SLOW-NEXT: vpmovzxbw {{.*#+}} xmm2 = xmm5[0],zero,xmm5[1],zero,xmm5[2],zero,xmm5[3],zero,xmm5[4],zero,xmm5[5],zero,xmm5[6],zero,xmm5[7],zero
4877 ; AVX512BW-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm5[2,1,2,3]
4878 ; AVX512BW-ONLY-SLOW-NEXT: vpmovzxbw {{.*#+}} xmm3 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero,xmm3[4],zero,xmm3[5],zero,xmm3[6],zero,xmm3[7],zero
4879 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, %xmm3, %zmm2, %zmm2
4880 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %zmm2, %zmm10, %zmm1 {%k1}
4881 ; AVX512BW-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm6[2,1,2,3]
4882 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4883 ; AVX512BW-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm6[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
4884 ; AVX512BW-ONLY-SLOW-NEXT: vinserti32x4 $2, %xmm2, %zmm3, %zmm2
4885 ; AVX512BW-ONLY-SLOW-NEXT: vpermw %zmm2, %zmm10, %zmm2
4886 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqu8 %zmm2, %zmm1 {%k3}
4887 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm1, (%rax)
4888 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm9, 192(%rax)
4889 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm16, 128(%rax)
4890 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm20, 320(%rax)
4891 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm7, 256(%rax)
4892 ; AVX512BW-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, 64(%rax)
4893 ; AVX512BW-ONLY-SLOW-NEXT: vzeroupper
4894 ; AVX512BW-ONLY-SLOW-NEXT: retq
4896 ; AVX512BW-FAST-LABEL: store_i8_stride6_vf64:
4897 ; AVX512BW-FAST: # %bb.0:
4898 ; AVX512BW-FAST-NEXT: vmovdqa64 (%r8), %zmm8
4899 ; AVX512BW-FAST-NEXT: vmovdqa64 (%r9), %zmm10
4900 ; AVX512BW-FAST-NEXT: vmovdqa 32(%rsi), %ymm3
4901 ; AVX512BW-FAST-NEXT: vpbroadcastq {{.*#+}} ymm6 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
4902 ; AVX512BW-FAST-NEXT: vpshufb %ymm6, %ymm3, %ymm0
4903 ; AVX512BW-FAST-NEXT: vmovdqa 32(%rdi), %ymm4
4904 ; AVX512BW-FAST-NEXT: vpshufb %ymm6, %ymm4, %ymm1
4905 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[16],ymm0[16],ymm1[17],ymm0[17],ymm1[18],ymm0[18],ymm1[19],ymm0[19],ymm1[20],ymm0[20],ymm1[21],ymm0[21],ymm1[22],ymm0[22],ymm1[23],ymm0[23]
4906 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
4907 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} ymm1 = ymm4[8],ymm3[8],ymm4[9],ymm3[9],ymm4[10],ymm3[10],ymm4[11],ymm3[11],ymm4[12],ymm3[12],ymm4[13],ymm3[13],ymm4[14],ymm3[14],ymm4[15],ymm3[15],ymm4[24],ymm3[24],ymm4[25],ymm3[25],ymm4[26],ymm3[26],ymm4[27],ymm3[27],ymm4[28],ymm3[28],ymm4[29],ymm3[29],ymm4[30],ymm3[30],ymm4[31],ymm3[31]
4908 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [12,11,14,13,12,11,14,13,12,11,14,13,15,15,15,15]
4909 ; AVX512BW-FAST-NEXT: vpermw %ymm1, %ymm9, %ymm1
4910 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm1
4911 ; AVX512BW-FAST-NEXT: vmovdqa 32(%rcx), %ymm5
4912 ; AVX512BW-FAST-NEXT: vpbroadcastq {{.*#+}} ymm12 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
4913 ; AVX512BW-FAST-NEXT: vpshufb %ymm12, %ymm5, %ymm0
4914 ; AVX512BW-FAST-NEXT: vmovdqa 32(%rdx), %ymm7
4915 ; AVX512BW-FAST-NEXT: vpshufb %ymm12, %ymm7, %ymm2
4916 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm2[0],ymm0[0],ymm2[1],ymm0[1],ymm2[2],ymm0[2],ymm2[3],ymm0[3],ymm2[4],ymm0[4],ymm2[5],ymm0[5],ymm2[6],ymm0[6],ymm2[7],ymm0[7],ymm2[16],ymm0[16],ymm2[17],ymm0[17],ymm2[18],ymm0[18],ymm2[19],ymm0[19],ymm2[20],ymm0[20],ymm2[21],ymm0[21],ymm2[22],ymm0[22],ymm2[23],ymm0[23]
4917 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
4918 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} ymm2 = ymm7[8],ymm5[8],ymm7[9],ymm5[9],ymm7[10],ymm5[10],ymm7[11],ymm5[11],ymm7[12],ymm5[12],ymm7[13],ymm5[13],ymm7[14],ymm5[14],ymm7[15],ymm5[15],ymm7[24],ymm5[24],ymm7[25],ymm5[25],ymm7[26],ymm5[26],ymm7[27],ymm5[27],ymm7[28],ymm5[28],ymm7[29],ymm5[29],ymm7[30],ymm5[30],ymm7[31],ymm5[31]
4919 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = [13,12,11,14,13,12,11,14,13,12,11,14,15,15,15,15]
4920 ; AVX512BW-FAST-NEXT: vpermw %ymm2, %ymm13, %ymm2
4921 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
4922 ; AVX512BW-FAST-NEXT: movl $613566756, %eax # imm = 0x24924924
4923 ; AVX512BW-FAST-NEXT: kmovd %eax, %k1
4924 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm1, %zmm0 {%k1}
4925 ; AVX512BW-FAST-NEXT: vmovdqa 32(%r8), %ymm2
4926 ; AVX512BW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm2[0,1,2,3],zmm8[4,5,6,7]
4927 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm15 = <u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,10,u,13,u,12,u,11,u,14,u,u,u,u,u,15,u>
4928 ; AVX512BW-FAST-NEXT: vpshufb %zmm15, %zmm1, %zmm1
4929 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm1 = zmm1[2,2,2,3,6,6,6,7]
4930 ; AVX512BW-FAST-NEXT: movl $-1840700270, %eax # imm = 0x92492492
4931 ; AVX512BW-FAST-NEXT: kmovd %eax, %k2
4932 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm1, %zmm0 {%k2}
4933 ; AVX512BW-FAST-NEXT: vmovdqa 32(%r9), %ymm1
4934 ; AVX512BW-FAST-NEXT: vshufi64x2 {{.*#+}} zmm11 = zmm1[0,1,2,3],zmm10[4,5,6,7]
4935 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm16 = <u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,10,u,13,u,12,u,11,u,14,u,u,u,u,u,15>
4936 ; AVX512BW-FAST-NEXT: vpshufb %zmm16, %zmm11, %zmm11
4937 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm11 = zmm11[2,2,2,3,6,6,6,7]
4938 ; AVX512BW-FAST-NEXT: movabsq $-9076969306111049208, %rax # imm = 0x8208208208208208
4939 ; AVX512BW-FAST-NEXT: kmovq %rax, %k3
4940 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm11, %zmm0 {%k3}
4941 ; AVX512BW-FAST-NEXT: vmovdqa (%rsi), %ymm11
4942 ; AVX512BW-FAST-NEXT: vpshufb %ymm6, %ymm11, %ymm17
4943 ; AVX512BW-FAST-NEXT: vmovdqa (%rdi), %ymm14
4944 ; AVX512BW-FAST-NEXT: vpshufb %ymm6, %ymm14, %ymm6
4945 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm6[0],ymm17[0],ymm6[1],ymm17[1],ymm6[2],ymm17[2],ymm6[3],ymm17[3],ymm6[4],ymm17[4],ymm6[5],ymm17[5],ymm6[6],ymm17[6],ymm6[7],ymm17[7],ymm6[16],ymm17[16],ymm6[17],ymm17[17],ymm6[18],ymm17[18],ymm6[19],ymm17[19],ymm6[20],ymm17[20],ymm6[21],ymm17[21],ymm6[22],ymm17[22],ymm6[23],ymm17[23]
4946 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
4947 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} ymm17 = ymm14[8],ymm11[8],ymm14[9],ymm11[9],ymm14[10],ymm11[10],ymm14[11],ymm11[11],ymm14[12],ymm11[12],ymm14[13],ymm11[13],ymm14[14],ymm11[14],ymm14[15],ymm11[15],ymm14[24],ymm11[24],ymm14[25],ymm11[25],ymm14[26],ymm11[26],ymm14[27],ymm11[27],ymm14[28],ymm11[28],ymm14[29],ymm11[29],ymm14[30],ymm11[30],ymm14[31],ymm11[31]
4948 ; AVX512BW-FAST-NEXT: vpermw %ymm17, %ymm9, %ymm9
4949 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm6, %zmm9
4950 ; AVX512BW-FAST-NEXT: vmovdqa64 (%rcx), %ymm18
4951 ; AVX512BW-FAST-NEXT: vpshufb %ymm12, %ymm18, %ymm6
4952 ; AVX512BW-FAST-NEXT: vmovdqa64 (%rdx), %ymm19
4953 ; AVX512BW-FAST-NEXT: vpshufb %ymm12, %ymm19, %ymm12
4954 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm12[0],ymm6[0],ymm12[1],ymm6[1],ymm12[2],ymm6[2],ymm12[3],ymm6[3],ymm12[4],ymm6[4],ymm12[5],ymm6[5],ymm12[6],ymm6[6],ymm12[7],ymm6[7],ymm12[16],ymm6[16],ymm12[17],ymm6[17],ymm12[18],ymm6[18],ymm12[19],ymm6[19],ymm12[20],ymm6[20],ymm12[21],ymm6[21],ymm12[22],ymm6[22],ymm12[23],ymm6[23]
4955 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
4956 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} ymm12 = ymm19[8],ymm18[8],ymm19[9],ymm18[9],ymm19[10],ymm18[10],ymm19[11],ymm18[11],ymm19[12],ymm18[12],ymm19[13],ymm18[13],ymm19[14],ymm18[14],ymm19[15],ymm18[15],ymm19[24],ymm18[24],ymm19[25],ymm18[25],ymm19[26],ymm18[26],ymm19[27],ymm18[27],ymm19[28],ymm18[28],ymm19[29],ymm18[29],ymm19[30],ymm18[30],ymm19[31],ymm18[31]
4957 ; AVX512BW-FAST-NEXT: vpermw %ymm12, %ymm13, %ymm12
4958 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm12, %zmm6, %zmm6
4959 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm9, %zmm6 {%k1}
4960 ; AVX512BW-FAST-NEXT: vmovdqa (%r8), %ymm9
4961 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm8, %zmm8
4962 ; AVX512BW-FAST-NEXT: vpshufb %zmm15, %zmm8, %zmm8
4963 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm8 = zmm8[2,2,2,3,6,6,6,7]
4964 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm8, %zmm6 {%k2}
4965 ; AVX512BW-FAST-NEXT: vmovdqa (%r9), %ymm8
4966 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm8, %zmm10, %zmm10
4967 ; AVX512BW-FAST-NEXT: vpshufb %zmm16, %zmm10, %zmm10
4968 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} zmm10 = zmm10[2,2,2,3,6,6,6,7]
4969 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm10, %zmm6 {%k3}
4970 ; AVX512BW-FAST-NEXT: vmovdqa64 (%rcx), %xmm22
4971 ; AVX512BW-FAST-NEXT: vmovdqa 32(%rcx), %xmm12
4972 ; AVX512BW-FAST-NEXT: vpbroadcastq {{.*#+}} xmm21 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
4973 ; AVX512BW-FAST-NEXT: vpshufb %xmm21, %xmm12, %xmm10
4974 ; AVX512BW-FAST-NEXT: vmovdqa64 (%rdx), %xmm23
4975 ; AVX512BW-FAST-NEXT: vmovdqa 32(%rdx), %xmm15
4976 ; AVX512BW-FAST-NEXT: vpshufb %xmm21, %xmm15, %xmm13
4977 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm13[0],xmm10[0],xmm13[1],xmm10[1],xmm13[2],xmm10[2],xmm13[3],xmm10[3],xmm13[4],xmm10[4],xmm13[5],xmm10[5],xmm13[6],xmm10[6],xmm13[7],xmm10[7]
4978 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,0,1]
4979 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm13 = xmm15[0],xmm12[0],xmm15[1],xmm12[1],xmm15[2],xmm12[2],xmm15[3],xmm12[3],xmm15[4],xmm12[4],xmm15[5],xmm12[5],xmm15[6],xmm12[6],xmm15[7],xmm12[7]
4980 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} ymm24 = [1,0,3,2,1,0,3,2,1,0,3,2,5,4,7,6]
4981 ; AVX512BW-FAST-NEXT: vpermw %ymm13, %ymm24, %ymm13
4982 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm10, %zmm13, %zmm13
4983 ; AVX512BW-FAST-NEXT: vmovdqa64 32(%rsi), %xmm17
4984 ; AVX512BW-FAST-NEXT: vpbroadcastq {{.*#+}} xmm25 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
4985 ; AVX512BW-FAST-NEXT: vpshufb %xmm25, %xmm17, %xmm10
4986 ; AVX512BW-FAST-NEXT: vmovdqa64 32(%rdi), %xmm20
4987 ; AVX512BW-FAST-NEXT: vpshufb %xmm25, %xmm20, %xmm16
4988 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm10 = xmm16[8],xmm10[8],xmm16[9],xmm10[9],xmm16[10],xmm10[10],xmm16[11],xmm10[11],xmm16[12],xmm10[12],xmm16[13],xmm10[13],xmm16[14],xmm10[14],xmm16[15],xmm10[15]
4989 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,0,1]
4990 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm16 = xmm20[0],xmm17[0],xmm20[1],xmm17[1],xmm20[2],xmm17[2],xmm20[3],xmm17[3],xmm20[4],xmm17[4],xmm20[5],xmm17[5],xmm20[6],xmm17[6],xmm20[7],xmm17[7]
4991 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} ymm26 = [0,3,2,1,0,3,2,1,0,3,2,1,4,5,6,5]
4992 ; AVX512BW-FAST-NEXT: vpermw %ymm16, %ymm26, %ymm16
4993 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm10, %zmm16, %zmm10
4994 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm13, %zmm10 {%k2}
4995 ; AVX512BW-FAST-NEXT: vmovdqa 32(%r8), %xmm13
4996 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} xmm27 = <8,u,9,u,u,u,u,u,u,u,5,u,6,u,7,u>
4997 ; AVX512BW-FAST-NEXT: vpshufb %xmm27, %xmm13, %xmm16
4998 ; AVX512BW-FAST-NEXT: vpmovzxbw {{.*#+}} xmm28 = xmm13[0],zero,xmm13[1],zero,xmm13[2],zero,xmm13[3],zero,xmm13[4],zero,xmm13[5],zero,xmm13[6],zero,xmm13[7],zero
4999 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm16, %zmm28, %zmm16
5000 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm28 = [2,1,0,3,2,1,0,3,2,1,0,3,4,4,4,4,22,21,16,23,22,21,16,23,22,21,16,23,17,17,17,17]
5001 ; AVX512BW-FAST-NEXT: vpermw %zmm16, %zmm28, %zmm10 {%k1}
5002 ; AVX512BW-FAST-NEXT: vmovdqa64 32(%r9), %xmm16
5003 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} xmm29 = <u,8,u,9,u,10,u,11,u,4,u,5,u,6,u,7>
5004 ; AVX512BW-FAST-NEXT: vpshufb %xmm29, %xmm16, %xmm30
5005 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm31 = xmm16[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
5006 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm30, %zmm31, %zmm30
5007 ; AVX512BW-FAST-NEXT: vmovdqa64 (%rsi), %xmm31
5008 ; AVX512BW-FAST-NEXT: vpermw %zmm30, %zmm28, %zmm30
5009 ; AVX512BW-FAST-NEXT: movabsq $585610922974906400, %rax # imm = 0x820820820820820
5010 ; AVX512BW-FAST-NEXT: kmovq %rax, %k3
5011 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm30, %zmm10 {%k3}
5012 ; AVX512BW-FAST-NEXT: vpshufb %xmm21, %xmm22, %xmm30
5013 ; AVX512BW-FAST-NEXT: vpshufb %xmm21, %xmm23, %xmm21
5014 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm21 = xmm21[0],xmm30[0],xmm21[1],xmm30[1],xmm21[2],xmm30[2],xmm21[3],xmm30[3],xmm21[4],xmm30[4],xmm21[5],xmm30[5],xmm21[6],xmm30[6],xmm21[7],xmm30[7]
5015 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm30 = xmm23[0],xmm22[0],xmm23[1],xmm22[1],xmm23[2],xmm22[2],xmm23[3],xmm22[3],xmm23[4],xmm22[4],xmm23[5],xmm22[5],xmm23[6],xmm22[6],xmm23[7],xmm22[7]
5016 ; AVX512BW-FAST-NEXT: vpermw %ymm30, %ymm24, %ymm24
5017 ; AVX512BW-FAST-NEXT: vmovdqa64 (%rdi), %xmm30
5018 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm21 = ymm21[0,0,0,1]
5019 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm21, %zmm24, %zmm24
5020 ; AVX512BW-FAST-NEXT: vpshufb %xmm25, %xmm31, %xmm21
5021 ; AVX512BW-FAST-NEXT: vpshufb %xmm25, %xmm30, %xmm25
5022 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm21 = xmm25[8],xmm21[8],xmm25[9],xmm21[9],xmm25[10],xmm21[10],xmm25[11],xmm21[11],xmm25[12],xmm21[12],xmm25[13],xmm21[13],xmm25[14],xmm21[14],xmm25[15],xmm21[15]
5023 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm25 = xmm30[0],xmm31[0],xmm30[1],xmm31[1],xmm30[2],xmm31[2],xmm30[3],xmm31[3],xmm30[4],xmm31[4],xmm30[5],xmm31[5],xmm30[6],xmm31[6],xmm30[7],xmm31[7]
5024 ; AVX512BW-FAST-NEXT: vpermw %ymm25, %ymm26, %ymm25
5025 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm21 = ymm21[0,0,0,1]
5026 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm21, %zmm25, %zmm21
5027 ; AVX512BW-FAST-NEXT: vmovdqa64 (%r8), %xmm25
5028 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm24, %zmm21 {%k2}
5029 ; AVX512BW-FAST-NEXT: vpshufb %xmm27, %xmm25, %xmm24
5030 ; AVX512BW-FAST-NEXT: vpmovzxbw {{.*#+}} xmm26 = xmm25[0],zero,xmm25[1],zero,xmm25[2],zero,xmm25[3],zero,xmm25[4],zero,xmm25[5],zero,xmm25[6],zero,xmm25[7],zero
5031 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm24, %zmm26, %zmm26
5032 ; AVX512BW-FAST-NEXT: vmovdqa64 (%r9), %xmm24
5033 ; AVX512BW-FAST-NEXT: vpermw %zmm26, %zmm28, %zmm21 {%k1}
5034 ; AVX512BW-FAST-NEXT: vpshufb %xmm29, %xmm24, %xmm26
5035 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm27 = xmm24[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
5036 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm26, %zmm27, %zmm26
5037 ; AVX512BW-FAST-NEXT: vpermw %zmm26, %zmm28, %zmm26
5038 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm26, %zmm21 {%k3}
5039 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} ymm18 = ymm19[0],ymm18[0],ymm19[1],ymm18[1],ymm19[2],ymm18[2],ymm19[3],ymm18[3],ymm19[4],ymm18[4],ymm19[5],ymm18[5],ymm19[6],ymm18[6],ymm19[7],ymm18[7],ymm19[16],ymm18[16],ymm19[17],ymm18[17],ymm19[18],ymm18[18],ymm19[19],ymm18[19],ymm19[20],ymm18[20],ymm19[21],ymm18[21],ymm19[22],ymm18[22],ymm19[23],ymm18[23]
5040 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm19 = xmm23[8],xmm22[8],xmm23[9],xmm22[9],xmm23[10],xmm22[10],xmm23[11],xmm22[11],xmm23[12],xmm22[12],xmm23[13],xmm22[13],xmm23[14],xmm22[14],xmm23[15],xmm22[15]
5041 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm18, %zmm19, %zmm18
5042 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} ymm11 = ymm14[0],ymm11[0],ymm14[1],ymm11[1],ymm14[2],ymm11[2],ymm14[3],ymm11[3],ymm14[4],ymm11[4],ymm14[5],ymm11[5],ymm14[6],ymm11[6],ymm14[7],ymm11[7],ymm14[16],ymm11[16],ymm14[17],ymm11[17],ymm14[18],ymm11[18],ymm14[19],ymm11[19],ymm14[20],ymm11[20],ymm14[21],ymm11[21],ymm14[22],ymm11[22],ymm14[23],ymm11[23]
5043 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm14 = xmm30[8],xmm31[8],xmm30[9],xmm31[9],xmm30[10],xmm31[10],xmm30[11],xmm31[11],xmm30[12],xmm31[12],xmm30[13],xmm31[13],xmm30[14],xmm31[14],xmm30[15],xmm31[15]
5044 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm11, %zmm14, %zmm11
5045 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm14 = [4,3,6,5,4,3,6,5,4,3,6,5,7,7,7,7,24,27,26,25,24,27,26,25,24,27,26,25,28,29,30,29]
5046 ; AVX512BW-FAST-NEXT: vpermw %zmm11, %zmm14, %zmm11
5047 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm19 = [5,4,3,6,5,4,3,6,5,4,3,6,7,7,7,7,25,24,27,26,25,24,27,26,25,24,27,26,29,28,31,30]
5048 ; AVX512BW-FAST-NEXT: vpermw %zmm18, %zmm19, %zmm11 {%k1}
5049 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} ymm5 = ymm7[0],ymm5[0],ymm7[1],ymm5[1],ymm7[2],ymm5[2],ymm7[3],ymm5[3],ymm7[4],ymm5[4],ymm7[5],ymm5[5],ymm7[6],ymm5[6],ymm7[7],ymm5[7],ymm7[16],ymm5[16],ymm7[17],ymm5[17],ymm7[18],ymm5[18],ymm7[19],ymm5[19],ymm7[20],ymm5[20],ymm7[21],ymm5[21],ymm7[22],ymm5[22],ymm7[23],ymm5[23]
5050 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm15[8],xmm12[8],xmm15[9],xmm12[9],xmm15[10],xmm12[10],xmm15[11],xmm12[11],xmm15[12],xmm12[12],xmm15[13],xmm12[13],xmm15[14],xmm12[14],xmm15[15],xmm12[15]
5051 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm7, %zmm5
5052 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[1],ymm3[1],ymm4[2],ymm3[2],ymm4[3],ymm3[3],ymm4[4],ymm3[4],ymm4[5],ymm3[5],ymm4[6],ymm3[6],ymm4[7],ymm3[7],ymm4[16],ymm3[16],ymm4[17],ymm3[17],ymm4[18],ymm3[18],ymm4[19],ymm3[19],ymm4[20],ymm3[20],ymm4[21],ymm3[21],ymm4[22],ymm3[22],ymm4[23],ymm3[23]
5053 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm20[8],xmm17[8],xmm20[9],xmm17[9],xmm20[10],xmm17[10],xmm20[11],xmm17[11],xmm20[12],xmm17[12],xmm20[13],xmm17[13],xmm20[14],xmm17[14],xmm20[15],xmm17[15]
5054 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm4, %zmm3
5055 ; AVX512BW-FAST-NEXT: vpermw %zmm3, %zmm14, %zmm3
5056 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm25[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
5057 ; AVX512BW-FAST-NEXT: vpermw %zmm5, %zmm19, %zmm3 {%k1}
5058 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = [2,5,4,3,2,5,4,3,2,5,4,3,6,5,6,7]
5059 ; AVX512BW-FAST-NEXT: vpermw %ymm4, %ymm5, %ymm4
5060 ; AVX512BW-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
5061 ; AVX512BW-FAST-NEXT: # ymm7 = mem[0,1,0,1]
5062 ; AVX512BW-FAST-NEXT: vpshufb %ymm7, %ymm9, %ymm9
5063 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
5064 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm9, %zmm4, %zmm4
5065 ; AVX512BW-FAST-NEXT: movl $1227133513, %eax # imm = 0x49249249
5066 ; AVX512BW-FAST-NEXT: kmovd %eax, %k1
5067 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm4, %zmm11 {%k1}
5068 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm24[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
5069 ; AVX512BW-FAST-NEXT: vpermw %ymm4, %ymm5, %ymm4
5070 ; AVX512BW-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
5071 ; AVX512BW-FAST-NEXT: # ymm9 = mem[0,1,0,1]
5072 ; AVX512BW-FAST-NEXT: vpshufb %ymm9, %ymm8, %ymm8
5073 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,2,2,3]
5074 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm8, %zmm4, %zmm4
5075 ; AVX512BW-FAST-NEXT: movabsq $2342443691899625602, %rax # imm = 0x2082082082082082
5076 ; AVX512BW-FAST-NEXT: kmovq %rax, %k2
5077 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm4, %zmm11 {%k2}
5078 ; AVX512BW-FAST-NEXT: vpshufb %ymm7, %ymm2, %ymm2
5079 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm13[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
5080 ; AVX512BW-FAST-NEXT: vpermw %ymm4, %ymm5, %ymm4
5081 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
5082 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm4, %zmm2
5083 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm2, %zmm3 {%k1}
5084 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm16[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
5085 ; AVX512BW-FAST-NEXT: vpermw %ymm2, %ymm5, %ymm2
5086 ; AVX512BW-FAST-NEXT: vpshufb %ymm9, %ymm1, %ymm1
5087 ; AVX512BW-FAST-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
5088 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1
5089 ; AVX512BW-FAST-NEXT: vmovdqu8 %zmm1, %zmm3 {%k2}
5090 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
5091 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm3, 256(%rax)
5092 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm11, 64(%rax)
5093 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm21, (%rax)
5094 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm10, 192(%rax)
5095 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm6, 128(%rax)
5096 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm0, 320(%rax)
5097 ; AVX512BW-FAST-NEXT: vzeroupper
5098 ; AVX512BW-FAST-NEXT: retq
5100 ; AVX512DQBW-SLOW-LABEL: store_i8_stride6_vf64:
5101 ; AVX512DQBW-SLOW: # %bb.0:
5102 ; AVX512DQBW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
5103 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r8), %zmm14
5104 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r9), %zmm12
5105 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rsi), %ymm16
5106 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdi), %ymm17
5107 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm17[0],ymm16[0],ymm17[1],ymm16[1],ymm17[2],ymm16[2],ymm17[3],ymm16[3],ymm17[4],ymm16[4],ymm17[5],ymm16[5],ymm17[6],ymm16[6],ymm17[7],ymm16[7],ymm17[16],ymm16[16],ymm17[17],ymm16[17],ymm17[18],ymm16[18],ymm17[19],ymm16[19],ymm17[20],ymm16[20],ymm17[21],ymm16[21],ymm17[22],ymm16[22],ymm17[23],ymm16[23]
5108 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rsi), %xmm1
5109 ; AVX512DQBW-SLOW-NEXT: vmovdqa 32(%rsi), %xmm9
5110 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rdi), %xmm3
5111 ; AVX512DQBW-SLOW-NEXT: vmovdqa 32(%rdi), %xmm11
5112 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm3[8],xmm1[8],xmm3[9],xmm1[9],xmm3[10],xmm1[10],xmm3[11],xmm1[11],xmm3[12],xmm1[12],xmm3[13],xmm1[13],xmm3[14],xmm1[14],xmm3[15],xmm1[15]
5113 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0
5114 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm7 = [4,3,6,5,4,3,6,5,4,3,6,5,7,7,7,7,24,27,26,25,24,27,26,25,24,27,26,25,28,29,30,29]
5115 ; AVX512DQBW-SLOW-NEXT: vpermw %zmm0, %zmm7, %zmm0
5116 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rcx), %xmm2
5117 ; AVX512DQBW-SLOW-NEXT: vmovdqa 32(%rcx), %xmm8
5118 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%rdx), %xmm4
5119 ; AVX512DQBW-SLOW-NEXT: vmovdqa 32(%rdx), %xmm10
5120 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm4[8],xmm2[8],xmm4[9],xmm2[9],xmm4[10],xmm2[10],xmm4[11],xmm2[11],xmm4[12],xmm2[12],xmm4[13],xmm2[13],xmm4[14],xmm2[14],xmm4[15],xmm2[15]
5121 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm20 = [5,4,3,6,5,4,3,6,5,4,3,6,7,7,7,7]
5122 ; AVX512DQBW-SLOW-NEXT: vpermw %ymm5, %ymm20, %ymm5
5123 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rcx), %ymm18
5124 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%rdx), %ymm19
5125 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm19[0],ymm18[0],ymm19[1],ymm18[1],ymm19[2],ymm18[2],ymm19[3],ymm18[3],ymm19[4],ymm18[4],ymm19[5],ymm18[5],ymm19[6],ymm18[6],ymm19[7],ymm18[7],ymm19[16],ymm18[16],ymm19[17],ymm18[17],ymm19[18],ymm18[18],ymm19[19],ymm18[19],ymm19[20],ymm18[20],ymm19[21],ymm18[21],ymm19[22],ymm18[22],ymm19[23],ymm18[23]
5126 ; AVX512DQBW-SLOW-NEXT: vprold $16, %ymm6, %ymm6
5127 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
5128 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm6, %zmm5, %zmm5
5129 ; AVX512DQBW-SLOW-NEXT: movl $613566756, %r10d # imm = 0x24924924
5130 ; AVX512DQBW-SLOW-NEXT: kmovd %r10d, %k1
5131 ; AVX512DQBW-SLOW-NEXT: vmovdqu16 %zmm5, %zmm0 {%k1}
5132 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%r8), %xmm5
5133 ; AVX512DQBW-SLOW-NEXT: vmovdqa 32(%r8), %xmm13
5134 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm5[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
5135 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm23 = [2,5,4,3,2,5,4,3,2,5,4,3,6,5,6,7]
5136 ; AVX512DQBW-SLOW-NEXT: vpermw %ymm6, %ymm23, %ymm6
5137 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r8), %ymm21
5138 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x2 {{.*#+}} ymm24 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
5139 ; AVX512DQBW-SLOW-NEXT: # ymm24 = mem[0,1,0,1]
5140 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm24, %ymm21, %ymm15
5141 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
5142 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm15, %zmm6, %zmm6
5143 ; AVX512DQBW-SLOW-NEXT: movl $1227133513, %r10d # imm = 0x49249249
5144 ; AVX512DQBW-SLOW-NEXT: kmovd %r10d, %k2
5145 ; AVX512DQBW-SLOW-NEXT: vmovdqu16 %zmm6, %zmm0 {%k2}
5146 ; AVX512DQBW-SLOW-NEXT: vmovdqa (%r9), %xmm6
5147 ; AVX512DQBW-SLOW-NEXT: vmovdqa 32(%r9), %xmm15
5148 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm22 = xmm6[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
5149 ; AVX512DQBW-SLOW-NEXT: vpermw %ymm22, %ymm23, %ymm25
5150 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 (%r9), %ymm22
5151 ; AVX512DQBW-SLOW-NEXT: vbroadcasti64x2 {{.*#+}} ymm26 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
5152 ; AVX512DQBW-SLOW-NEXT: # ymm26 = mem[0,1,0,1]
5153 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm26, %ymm22, %ymm27
5154 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm27 = ymm27[2,2,2,3]
5155 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm27, %zmm25, %zmm25
5156 ; AVX512DQBW-SLOW-NEXT: movabsq $2342443691899625602, %r10 # imm = 0x2082082082082082
5157 ; AVX512DQBW-SLOW-NEXT: kmovq %r10, %k3
5158 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm25, %zmm0 {%k3}
5159 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 32(%rsi), %ymm25
5160 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 32(%rdi), %ymm27
5161 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm28 = ymm27[0],ymm25[0],ymm27[1],ymm25[1],ymm27[2],ymm25[2],ymm27[3],ymm25[3],ymm27[4],ymm25[4],ymm27[5],ymm25[5],ymm27[6],ymm25[6],ymm27[7],ymm25[7],ymm27[16],ymm25[16],ymm27[17],ymm25[17],ymm27[18],ymm25[18],ymm27[19],ymm25[19],ymm27[20],ymm25[20],ymm27[21],ymm25[21],ymm27[22],ymm25[22],ymm27[23],ymm25[23]
5162 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm29 = xmm11[8],xmm9[8],xmm11[9],xmm9[9],xmm11[10],xmm9[10],xmm11[11],xmm9[11],xmm11[12],xmm9[12],xmm11[13],xmm9[13],xmm11[14],xmm9[14],xmm11[15],xmm9[15]
5163 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm28, %zmm29, %zmm28
5164 ; AVX512DQBW-SLOW-NEXT: vpermw %zmm28, %zmm7, %zmm7
5165 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm28 = xmm10[8],xmm8[8],xmm10[9],xmm8[9],xmm10[10],xmm8[10],xmm10[11],xmm8[11],xmm10[12],xmm8[12],xmm10[13],xmm8[13],xmm10[14],xmm8[14],xmm10[15],xmm8[15]
5166 ; AVX512DQBW-SLOW-NEXT: vpermw %ymm28, %ymm20, %ymm20
5167 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 32(%rcx), %ymm28
5168 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 32(%rdx), %ymm29
5169 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm30 = ymm29[0],ymm28[0],ymm29[1],ymm28[1],ymm29[2],ymm28[2],ymm29[3],ymm28[3],ymm29[4],ymm28[4],ymm29[5],ymm28[5],ymm29[6],ymm28[6],ymm29[7],ymm28[7],ymm29[16],ymm28[16],ymm29[17],ymm28[17],ymm29[18],ymm28[18],ymm29[19],ymm28[19],ymm29[20],ymm28[20],ymm29[21],ymm28[21],ymm29[22],ymm28[22],ymm29[23],ymm28[23]
5170 ; AVX512DQBW-SLOW-NEXT: vprold $16, %ymm30, %ymm30
5171 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm30 = ymm30[2,2,2,3]
5172 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm30, %zmm20, %zmm20
5173 ; AVX512DQBW-SLOW-NEXT: vmovdqu16 %zmm20, %zmm7 {%k1}
5174 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm20 = xmm13[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
5175 ; AVX512DQBW-SLOW-NEXT: vpermw %ymm20, %ymm23, %ymm20
5176 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 32(%r8), %ymm30
5177 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm24, %ymm30, %ymm24
5178 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm24 = ymm24[2,2,2,3]
5179 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm24, %zmm20, %zmm20
5180 ; AVX512DQBW-SLOW-NEXT: vmovdqu16 %zmm20, %zmm7 {%k2}
5181 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm20 = xmm15[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
5182 ; AVX512DQBW-SLOW-NEXT: vpermw %ymm20, %ymm23, %ymm20
5183 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 32(%r9), %ymm23
5184 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm26, %ymm23, %ymm24
5185 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm24 = ymm24[2,2,2,3]
5186 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm24, %zmm20, %zmm20
5187 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm20, %zmm7 {%k3}
5188 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm24 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
5189 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm24, %ymm25, %ymm20
5190 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm24, %ymm27, %ymm26
5191 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm20 = ymm26[0],ymm20[0],ymm26[1],ymm20[1],ymm26[2],ymm20[2],ymm26[3],ymm20[3],ymm26[4],ymm20[4],ymm26[5],ymm20[5],ymm26[6],ymm20[6],ymm26[7],ymm20[7],ymm26[16],ymm20[16],ymm26[17],ymm20[17],ymm26[18],ymm20[18],ymm26[19],ymm20[19],ymm26[20],ymm20[20],ymm26[21],ymm20[21],ymm26[22],ymm20[22],ymm26[23],ymm20[23]
5192 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
5193 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm25 = ymm27[8],ymm25[8],ymm27[9],ymm25[9],ymm27[10],ymm25[10],ymm27[11],ymm25[11],ymm27[12],ymm25[12],ymm27[13],ymm25[13],ymm27[14],ymm25[14],ymm27[15],ymm25[15],ymm27[24],ymm25[24],ymm27[25],ymm25[25],ymm27[26],ymm25[26],ymm27[27],ymm25[27],ymm27[28],ymm25[28],ymm27[29],ymm25[29],ymm27[30],ymm25[30],ymm27[31],ymm25[31]
5194 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm26 = [12,11,14,13,12,11,14,13,12,11,14,13,15,15,15,15]
5195 ; AVX512DQBW-SLOW-NEXT: vpermw %ymm25, %ymm26, %ymm25
5196 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm25, %zmm20, %zmm25
5197 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq {{.*#+}} ymm27 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
5198 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm27, %ymm28, %ymm20
5199 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm27, %ymm29, %ymm31
5200 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm20 = ymm31[0],ymm20[0],ymm31[1],ymm20[1],ymm31[2],ymm20[2],ymm31[3],ymm20[3],ymm31[4],ymm20[4],ymm31[5],ymm20[5],ymm31[6],ymm20[6],ymm31[7],ymm20[7],ymm31[16],ymm20[16],ymm31[17],ymm20[17],ymm31[18],ymm20[18],ymm31[19],ymm20[19],ymm31[20],ymm20[20],ymm31[21],ymm20[21],ymm31[22],ymm20[22],ymm31[23],ymm20[23]
5201 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
5202 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm28 = ymm29[8],ymm28[8],ymm29[9],ymm28[9],ymm29[10],ymm28[10],ymm29[11],ymm28[11],ymm29[12],ymm28[12],ymm29[13],ymm28[13],ymm29[14],ymm28[14],ymm29[15],ymm28[15],ymm29[24],ymm28[24],ymm29[25],ymm28[25],ymm29[26],ymm28[26],ymm29[27],ymm28[27],ymm29[28],ymm28[28],ymm29[29],ymm28[29],ymm29[30],ymm28[30],ymm29[31],ymm28[31]
5203 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm29 = [13,12,11,14,13,12,11,14,13,12,11,14,15,15,15,15]
5204 ; AVX512DQBW-SLOW-NEXT: vpermw %ymm28, %ymm29, %ymm28
5205 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm28, %zmm20, %zmm20
5206 ; AVX512DQBW-SLOW-NEXT: vmovdqu16 %zmm25, %zmm20 {%k1}
5207 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm25 = zmm30[0,1,2,3],zmm14[4,5,6,7]
5208 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm28 = <u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,10,u,13,u,12,u,11,u,14,u,u,u,u,u,15,u>
5209 ; AVX512DQBW-SLOW-NEXT: vpshufb %zmm28, %zmm25, %zmm25
5210 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} zmm25 = zmm25[2,2,2,3,6,6,6,7]
5211 ; AVX512DQBW-SLOW-NEXT: movl $-1840700270, %ecx # imm = 0x92492492
5212 ; AVX512DQBW-SLOW-NEXT: kmovd %ecx, %k2
5213 ; AVX512DQBW-SLOW-NEXT: vmovdqu16 %zmm25, %zmm20 {%k2}
5214 ; AVX512DQBW-SLOW-NEXT: vshufi64x2 {{.*#+}} zmm23 = zmm23[0,1,2,3],zmm12[4,5,6,7]
5215 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm25 = <u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,10,u,13,u,12,u,11,u,14,u,u,u,u,u,15>
5216 ; AVX512DQBW-SLOW-NEXT: vpshufb %zmm25, %zmm23, %zmm23
5217 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} zmm23 = zmm23[2,2,2,3,6,6,6,7]
5218 ; AVX512DQBW-SLOW-NEXT: movabsq $-9076969306111049208, %rcx # imm = 0x8208208208208208
5219 ; AVX512DQBW-SLOW-NEXT: kmovq %rcx, %k3
5220 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm23, %zmm20 {%k3}
5221 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm24, %ymm16, %ymm23
5222 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm24, %ymm17, %ymm24
5223 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm23 = ymm24[0],ymm23[0],ymm24[1],ymm23[1],ymm24[2],ymm23[2],ymm24[3],ymm23[3],ymm24[4],ymm23[4],ymm24[5],ymm23[5],ymm24[6],ymm23[6],ymm24[7],ymm23[7],ymm24[16],ymm23[16],ymm24[17],ymm23[17],ymm24[18],ymm23[18],ymm24[19],ymm23[19],ymm24[20],ymm23[20],ymm24[21],ymm23[21],ymm24[22],ymm23[22],ymm24[23],ymm23[23]
5224 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm23 = ymm23[2,2,2,3]
5225 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm16 = ymm17[8],ymm16[8],ymm17[9],ymm16[9],ymm17[10],ymm16[10],ymm17[11],ymm16[11],ymm17[12],ymm16[12],ymm17[13],ymm16[13],ymm17[14],ymm16[14],ymm17[15],ymm16[15],ymm17[24],ymm16[24],ymm17[25],ymm16[25],ymm17[26],ymm16[26],ymm17[27],ymm16[27],ymm17[28],ymm16[28],ymm17[29],ymm16[29],ymm17[30],ymm16[30],ymm17[31],ymm16[31]
5226 ; AVX512DQBW-SLOW-NEXT: vpermw %ymm16, %ymm26, %ymm16
5227 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm16, %zmm23, %zmm17
5228 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm27, %ymm18, %ymm16
5229 ; AVX512DQBW-SLOW-NEXT: vpshufb %ymm27, %ymm19, %ymm23
5230 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} ymm16 = ymm23[0],ymm16[0],ymm23[1],ymm16[1],ymm23[2],ymm16[2],ymm23[3],ymm16[3],ymm23[4],ymm16[4],ymm23[5],ymm16[5],ymm23[6],ymm16[6],ymm23[7],ymm16[7],ymm23[16],ymm16[16],ymm23[17],ymm16[17],ymm23[18],ymm16[18],ymm23[19],ymm16[19],ymm23[20],ymm16[20],ymm23[21],ymm16[21],ymm23[22],ymm16[22],ymm23[23],ymm16[23]
5231 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm16 = ymm16[2,2,2,3]
5232 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} ymm18 = ymm19[8],ymm18[8],ymm19[9],ymm18[9],ymm19[10],ymm18[10],ymm19[11],ymm18[11],ymm19[12],ymm18[12],ymm19[13],ymm18[13],ymm19[14],ymm18[14],ymm19[15],ymm18[15],ymm19[24],ymm18[24],ymm19[25],ymm18[25],ymm19[26],ymm18[26],ymm19[27],ymm18[27],ymm19[28],ymm18[28],ymm19[29],ymm18[29],ymm19[30],ymm18[30],ymm19[31],ymm18[31]
5233 ; AVX512DQBW-SLOW-NEXT: vpermw %ymm18, %ymm29, %ymm18
5234 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm18, %zmm16, %zmm16
5235 ; AVX512DQBW-SLOW-NEXT: vmovdqu16 %zmm17, %zmm16 {%k1}
5236 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm21, %zmm14, %zmm14
5237 ; AVX512DQBW-SLOW-NEXT: vpshufb %zmm28, %zmm14, %zmm14
5238 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} zmm14 = zmm14[2,2,2,3,6,6,6,7]
5239 ; AVX512DQBW-SLOW-NEXT: vmovdqu16 %zmm14, %zmm16 {%k2}
5240 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm22, %zmm12, %zmm12
5241 ; AVX512DQBW-SLOW-NEXT: vpshufb %zmm25, %zmm12, %zmm12
5242 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} zmm12 = zmm12[2,2,2,3,6,6,6,7]
5243 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm12, %zmm16 {%k3}
5244 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm12 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
5245 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm12, %xmm9, %xmm14
5246 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm12, %xmm11, %xmm17
5247 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm14 = xmm17[8],xmm14[8],xmm17[9],xmm14[9],xmm17[10],xmm14[10],xmm17[11],xmm14[11],xmm17[12],xmm14[12],xmm17[13],xmm14[13],xmm17[14],xmm14[14],xmm17[15],xmm14[15]
5248 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
5249 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3],xmm11[4],xmm9[4],xmm11[5],xmm9[5],xmm11[6],xmm9[6],xmm11[7],xmm9[7]
5250 ; AVX512DQBW-SLOW-NEXT: vmovdqa {{.*#+}} ymm11 = [0,3,2,1,0,3,2,1,0,3,2,1,4,5,6,5]
5251 ; AVX512DQBW-SLOW-NEXT: vpermw %ymm9, %ymm11, %ymm9
5252 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm14, %zmm9, %zmm9
5253 ; AVX512DQBW-SLOW-NEXT: vpbroadcastq {{.*#+}} xmm14 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
5254 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm14, %xmm8, %xmm17
5255 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm14, %xmm10, %xmm18
5256 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm17 = xmm18[0],xmm17[0],xmm18[1],xmm17[1],xmm18[2],xmm17[2],xmm18[3],xmm17[3],xmm18[4],xmm17[4],xmm18[5],xmm17[5],xmm18[6],xmm17[6],xmm18[7],xmm17[7]
5257 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3],xmm10[4],xmm8[4],xmm10[5],xmm8[5],xmm10[6],xmm8[6],xmm10[7],xmm8[7]
5258 ; AVX512DQBW-SLOW-NEXT: vprold $16, %xmm8, %xmm8
5259 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm17, %zmm8, %zmm8
5260 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} zmm8 = zmm8[0,0,0,1,4,4,4,5]
5261 ; AVX512DQBW-SLOW-NEXT: vmovdqu16 %zmm8, %zmm9 {%k2}
5262 ; AVX512DQBW-SLOW-NEXT: vpmovzxbw {{.*#+}} xmm8 = xmm13[0],zero,xmm13[1],zero,xmm13[2],zero,xmm13[3],zero,xmm13[4],zero,xmm13[5],zero,xmm13[6],zero,xmm13[7],zero
5263 ; AVX512DQBW-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm13[2,1,2,3]
5264 ; AVX512DQBW-SLOW-NEXT: vpmovzxbw {{.*#+}} xmm10 = xmm10[0],zero,xmm10[1],zero,xmm10[2],zero,xmm10[3],zero,xmm10[4],zero,xmm10[5],zero,xmm10[6],zero,xmm10[7],zero
5265 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, %xmm10, %zmm8, %zmm8
5266 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm10 = [2,1,0,3,2,1,0,3,2,1,0,3,4,4,4,4,22,21,16,23,22,21,16,23,22,21,16,23,17,17,17,17]
5267 ; AVX512DQBW-SLOW-NEXT: vpermw %zmm8, %zmm10, %zmm9 {%k1}
5268 ; AVX512DQBW-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm15[2,1,2,3]
5269 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm8[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
5270 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm13 = xmm15[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
5271 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, %xmm8, %zmm13, %zmm8
5272 ; AVX512DQBW-SLOW-NEXT: vpermw %zmm8, %zmm10, %zmm8
5273 ; AVX512DQBW-SLOW-NEXT: movabsq $585610922974906400, %rcx # imm = 0x820820820820820
5274 ; AVX512DQBW-SLOW-NEXT: kmovq %rcx, %k3
5275 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm8, %zmm9 {%k3}
5276 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm12, %xmm1, %xmm8
5277 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm12, %xmm3, %xmm12
5278 ; AVX512DQBW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm8 = xmm12[8],xmm8[8],xmm12[9],xmm8[9],xmm12[10],xmm8[10],xmm12[11],xmm8[11],xmm12[12],xmm8[12],xmm12[13],xmm8[13],xmm12[14],xmm8[14],xmm12[15],xmm8[15]
5279 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,0,1]
5280 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3],xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
5281 ; AVX512DQBW-SLOW-NEXT: vpermw %ymm1, %ymm11, %ymm1
5282 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm8, %zmm1, %zmm1
5283 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm14, %xmm2, %xmm3
5284 ; AVX512DQBW-SLOW-NEXT: vpshufb %xmm14, %xmm4, %xmm8
5285 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm8[0],xmm3[0],xmm8[1],xmm3[1],xmm8[2],xmm3[2],xmm8[3],xmm3[3],xmm8[4],xmm3[4],xmm8[5],xmm3[5],xmm8[6],xmm3[6],xmm8[7],xmm3[7]
5286 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3],xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
5287 ; AVX512DQBW-SLOW-NEXT: vprold $16, %xmm2, %xmm2
5288 ; AVX512DQBW-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm2
5289 ; AVX512DQBW-SLOW-NEXT: vpermq {{.*#+}} zmm2 = zmm2[0,0,0,1,4,4,4,5]
5290 ; AVX512DQBW-SLOW-NEXT: vmovdqu16 %zmm2, %zmm1 {%k2}
5291 ; AVX512DQBW-SLOW-NEXT: vpmovzxbw {{.*#+}} xmm2 = xmm5[0],zero,xmm5[1],zero,xmm5[2],zero,xmm5[3],zero,xmm5[4],zero,xmm5[5],zero,xmm5[6],zero,xmm5[7],zero
5292 ; AVX512DQBW-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm5[2,1,2,3]
5293 ; AVX512DQBW-SLOW-NEXT: vpmovzxbw {{.*#+}} xmm3 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero,xmm3[4],zero,xmm3[5],zero,xmm3[6],zero,xmm3[7],zero
5294 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, %xmm3, %zmm2, %zmm2
5295 ; AVX512DQBW-SLOW-NEXT: vpermw %zmm2, %zmm10, %zmm1 {%k1}
5296 ; AVX512DQBW-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm6[2,1,2,3]
5297 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
5298 ; AVX512DQBW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm6[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
5299 ; AVX512DQBW-SLOW-NEXT: vinserti32x4 $2, %xmm2, %zmm3, %zmm2
5300 ; AVX512DQBW-SLOW-NEXT: vpermw %zmm2, %zmm10, %zmm2
5301 ; AVX512DQBW-SLOW-NEXT: vmovdqu8 %zmm2, %zmm1 {%k3}
5302 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm1, (%rax)
5303 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm9, 192(%rax)
5304 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm16, 128(%rax)
5305 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm20, 320(%rax)
5306 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm7, 256(%rax)
5307 ; AVX512DQBW-SLOW-NEXT: vmovdqa64 %zmm0, 64(%rax)
5308 ; AVX512DQBW-SLOW-NEXT: vzeroupper
5309 ; AVX512DQBW-SLOW-NEXT: retq
5310 %in.vec0 = load <64 x i8>, ptr %in.vecptr0, align 64
5311 %in.vec1 = load <64 x i8>, ptr %in.vecptr1, align 64
5312 %in.vec2 = load <64 x i8>, ptr %in.vecptr2, align 64
5313 %in.vec3 = load <64 x i8>, ptr %in.vecptr3, align 64
5314 %in.vec4 = load <64 x i8>, ptr %in.vecptr4, align 64
5315 %in.vec5 = load <64 x i8>, ptr %in.vecptr5, align 64
5316 %1 = shufflevector <64 x i8> %in.vec0, <64 x i8> %in.vec1, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
5317 %2 = shufflevector <64 x i8> %in.vec2, <64 x i8> %in.vec3, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
5318 %3 = shufflevector <64 x i8> %in.vec4, <64 x i8> %in.vec5, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
5319 %4 = shufflevector <128 x i8> %1, <128 x i8> %2, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
5320 %5 = shufflevector <128 x i8> %3, <128 x i8> poison, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
5321 %6 = shufflevector <256 x i8> %4, <256 x i8> %5, <384 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255, i32 256, i32 257, i32 258, i32 259, i32 260, i32 261, i32 262, i32 263, i32 264, i32 265, i32 266, i32 267, i32 268, i32 269, i32 270, i32 271, i32 272, i32 273, i32 274, i32 275, i32 276, i32 277, i32 278, i32 279, i32 280, i32 281, i32 282, i32 283, i32 284, i32 285, i32 286, i32 287, i32 288, i32 289, i32 290, i32 291, i32 292, i32 293, i32 294, i32 295, i32 296, i32 297, i32 298, i32 299, i32 300, i32 301, i32 302, i32 303, i32 304, i32 305, i32 306, i32 307, i32 308, i32 309, i32 310, i32 311, i32 312, i32 313, i32 314, i32 315, i32 316, i32 317, i32 318, i32 319, i32 320, i32 321, i32 322, i32 323, i32 324, i32 325, i32 326, i32 327, i32 328, i32 329, i32 330, i32 331, i32 332, i32 333, i32 334, i32 335, i32 336, i32 337, i32 338, i32 339, i32 340, i32 341, i32 342, i32 343, i32 344, i32 345, i32 346, i32 347, i32 348, i32 349, i32 350, i32 351, i32 352, i32 353, i32 354, i32 355, i32 356, i32 357, i32 358, i32 359, i32 360, i32 361, i32 362, i32 363, i32 364, i32 365, i32 366, i32 367, i32 368, i32 369, i32 370, i32 371, i32 372, i32 373, i32 374, i32 375, i32 376, i32 377, i32 378, i32 379, i32 380, i32 381, i32 382, i32 383>
5322 %interleaved.vec = shufflevector <384 x i8> %6, <384 x i8> poison, <384 x i32> <i32 0, i32 64, i32 128, i32 192, i32 256, i32 320, i32 1, i32 65, i32 129, i32 193, i32 257, i32 321, i32 2, i32 66, i32 130, i32 194, i32 258, i32 322, i32 3, i32 67, i32 131, i32 195, i32 259, i32 323, i32 4, i32 68, i32 132, i32 196, i32 260, i32 324, i32 5, i32 69, i32 133, i32 197, i32 261, i32 325, i32 6, i32 70, i32 134, i32 198, i32 262, i32 326, i32 7, i32 71, i32 135, i32 199, i32 263, i32 327, i32 8, i32 72, i32 136, i32 200, i32 264, i32 328, i32 9, i32 73, i32 137, i32 201, i32 265, i32 329, i32 10, i32 74, i32 138, i32 202, i32 266, i32 330, i32 11, i32 75, i32 139, i32 203, i32 267, i32 331, i32 12, i32 76, i32 140, i32 204, i32 268, i32 332, i32 13, i32 77, i32 141, i32 205, i32 269, i32 333, i32 14, i32 78, i32 142, i32 206, i32 270, i32 334, i32 15, i32 79, i32 143, i32 207, i32 271, i32 335, i32 16, i32 80, i32 144, i32 208, i32 272, i32 336, i32 17, i32 81, i32 145, i32 209, i32 273, i32 337, i32 18, i32 82, i32 146, i32 210, i32 274, i32 338, i32 19, i32 83, i32 147, i32 211, i32 275, i32 339, i32 20, i32 84, i32 148, i32 212, i32 276, i32 340, i32 21, i32 85, i32 149, i32 213, i32 277, i32 341, i32 22, i32 86, i32 150, i32 214, i32 278, i32 342, i32 23, i32 87, i32 151, i32 215, i32 279, i32 343, i32 24, i32 88, i32 152, i32 216, i32 280, i32 344, i32 25, i32 89, i32 153, i32 217, i32 281, i32 345, i32 26, i32 90, i32 154, i32 218, i32 282, i32 346, i32 27, i32 91, i32 155, i32 219, i32 283, i32 347, i32 28, i32 92, i32 156, i32 220, i32 284, i32 348, i32 29, i32 93, i32 157, i32 221, i32 285, i32 349, i32 30, i32 94, i32 158, i32 222, i32 286, i32 350, i32 31, i32 95, i32 159, i32 223, i32 287, i32 351, i32 32, i32 96, i32 160, i32 224, i32 288, i32 352, i32 33, i32 97, i32 161, i32 225, i32 289, i32 353, i32 34, i32 98, i32 162, i32 226, i32 290, i32 354, i32 35, i32 99, i32 163, i32 227, i32 291, i32 355, i32 36, i32 100, i32 164, i32 228, i32 292, i32 356, i32 37, i32 101, i32 165, i32 229, i32 293, i32 357, i32 38, i32 102, i32 166, i32 230, i32 294, i32 358, i32 39, i32 103, i32 167, i32 231, i32 295, i32 359, i32 40, i32 104, i32 168, i32 232, i32 296, i32 360, i32 41, i32 105, i32 169, i32 233, i32 297, i32 361, i32 42, i32 106, i32 170, i32 234, i32 298, i32 362, i32 43, i32 107, i32 171, i32 235, i32 299, i32 363, i32 44, i32 108, i32 172, i32 236, i32 300, i32 364, i32 45, i32 109, i32 173, i32 237, i32 301, i32 365, i32 46, i32 110, i32 174, i32 238, i32 302, i32 366, i32 47, i32 111, i32 175, i32 239, i32 303, i32 367, i32 48, i32 112, i32 176, i32 240, i32 304, i32 368, i32 49, i32 113, i32 177, i32 241, i32 305, i32 369, i32 50, i32 114, i32 178, i32 242, i32 306, i32 370, i32 51, i32 115, i32 179, i32 243, i32 307, i32 371, i32 52, i32 116, i32 180, i32 244, i32 308, i32 372, i32 53, i32 117, i32 181, i32 245, i32 309, i32 373, i32 54, i32 118, i32 182, i32 246, i32 310, i32 374, i32 55, i32 119, i32 183, i32 247, i32 311, i32 375, i32 56, i32 120, i32 184, i32 248, i32 312, i32 376, i32 57, i32 121, i32 185, i32 249, i32 313, i32 377, i32 58, i32 122, i32 186, i32 250, i32 314, i32 378, i32 59, i32 123, i32 187, i32 251, i32 315, i32 379, i32 60, i32 124, i32 188, i32 252, i32 316, i32 380, i32 61, i32 125, i32 189, i32 253, i32 317, i32 381, i32 62, i32 126, i32 190, i32 254, i32 318, i32 382, i32 63, i32 127, i32 191, i32 255, i32 319, i32 383>
5323 store <384 x i8> %interleaved.vec, ptr %out.vec, align 64
5326 ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
5330 ; AVX512-FAST: {{.*}}
5331 ; AVX512-SLOW: {{.*}}
5332 ; AVX512BW-ONLY-FAST: {{.*}}
5333 ; AVX512DQ-FAST: {{.*}}
5334 ; AVX512DQ-SLOW: {{.*}}
5335 ; AVX512DQBW-FAST: {{.*}}
5336 ; AVX512F-ONLY-FAST: {{.*}}
5337 ; AVX512F-ONLY-SLOW: {{.*}}
5340 ; FALLBACK10: {{.*}}
5341 ; FALLBACK11: {{.*}}
5342 ; FALLBACK12: {{.*}}