1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx | FileCheck %s --check-prefixes=AVX
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=AVX2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX2-FP
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX2-FCP
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX512
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512-FCP
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX512DQ
10 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512DQ-FCP
11 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX512BW
12 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512BW-FCP
13 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefixes=AVX512DQ-BW
14 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512DQ-BW-FCP
16 ; These patterns are produced by LoopVectorizer for interleaved stores.
18 define void @store_i8_stride6_vf2(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
19 ; SSE-LABEL: store_i8_stride6_vf2:
21 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
22 ; SSE-NEXT: movdqa (%rdi), %xmm0
23 ; SSE-NEXT: movdqa (%rdx), %xmm1
24 ; SSE-NEXT: movdqa (%r8), %xmm2
25 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
26 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
27 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
28 ; SSE-NEXT: pxor %xmm1, %xmm1
29 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
30 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,5,7,6,7]
31 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,2]
32 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[0,2,1,3,4,5,6,7]
33 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,6,7]
34 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,1]
35 ; SSE-NEXT: packuswb %xmm1, %xmm0
36 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,65535,0,65535,65535,0,65535,65535]
37 ; SSE-NEXT: pand %xmm1, %xmm0
38 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
39 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,0,0]
40 ; SSE-NEXT: pandn %xmm2, %xmm1
41 ; SSE-NEXT: por %xmm0, %xmm1
42 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
43 ; SSE-NEXT: pand %xmm0, %xmm1
44 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3],xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
45 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,0,0,0]
46 ; SSE-NEXT: pandn %xmm2, %xmm0
47 ; SSE-NEXT: por %xmm1, %xmm0
48 ; SSE-NEXT: movq %xmm0, (%rax)
49 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,3,2,3]
50 ; SSE-NEXT: movd %xmm0, 8(%rax)
53 ; AVX-LABEL: store_i8_stride6_vf2:
55 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
56 ; AVX-NEXT: vmovdqa (%rdi), %xmm0
57 ; AVX-NEXT: vmovdqa (%rdx), %xmm1
58 ; AVX-NEXT: vmovdqa (%r9), %xmm2
59 ; AVX-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
60 ; AVX-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
61 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
62 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
63 ; AVX-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,1],zero,xmm0[2,6,10,14,3],zero,xmm0[u,u,u,u]
64 ; AVX-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,zero,zero,zero,xmm2[0],zero,zero,zero,zero,zero,xmm2[1,u,u,u,u]
65 ; AVX-NEXT: vpor %xmm1, %xmm0, %xmm0
66 ; AVX-NEXT: vpextrd $2, %xmm0, 8(%rax)
67 ; AVX-NEXT: vmovq %xmm0, (%rax)
70 ; AVX2-LABEL: store_i8_stride6_vf2:
72 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
73 ; AVX2-NEXT: vmovdqa (%rdi), %xmm0
74 ; AVX2-NEXT: vmovdqa (%rdx), %xmm1
75 ; AVX2-NEXT: vmovdqa (%r9), %xmm2
76 ; AVX2-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
77 ; AVX2-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
78 ; AVX2-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
79 ; AVX2-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
80 ; AVX2-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,1],zero,xmm0[2,6,10,14,3],zero,xmm0[u,u,u,u]
81 ; AVX2-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,zero,zero,zero,xmm2[0],zero,zero,zero,zero,zero,xmm2[1,u,u,u,u]
82 ; AVX2-NEXT: vpor %xmm1, %xmm0, %xmm0
83 ; AVX2-NEXT: vpextrd $2, %xmm0, 8(%rax)
84 ; AVX2-NEXT: vmovq %xmm0, (%rax)
87 ; AVX2-FP-LABEL: store_i8_stride6_vf2:
89 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
90 ; AVX2-FP-NEXT: vmovdqa (%rdi), %xmm0
91 ; AVX2-FP-NEXT: vmovdqa (%rdx), %xmm1
92 ; AVX2-FP-NEXT: vmovdqa (%r9), %xmm2
93 ; AVX2-FP-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
94 ; AVX2-FP-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
95 ; AVX2-FP-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
96 ; AVX2-FP-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
97 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,1],zero,xmm0[2,6,10,14,3],zero,xmm0[u,u,u,u]
98 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,zero,zero,zero,xmm2[0],zero,zero,zero,zero,zero,xmm2[1,u,u,u,u]
99 ; AVX2-FP-NEXT: vpor %xmm1, %xmm0, %xmm0
100 ; AVX2-FP-NEXT: vpextrd $2, %xmm0, 8(%rax)
101 ; AVX2-FP-NEXT: vmovq %xmm0, (%rax)
104 ; AVX2-FCP-LABEL: store_i8_stride6_vf2:
106 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
107 ; AVX2-FCP-NEXT: vmovdqa (%rdi), %xmm0
108 ; AVX2-FCP-NEXT: vmovdqa (%rdx), %xmm1
109 ; AVX2-FCP-NEXT: vmovdqa (%r9), %xmm2
110 ; AVX2-FCP-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
111 ; AVX2-FCP-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
112 ; AVX2-FCP-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
113 ; AVX2-FCP-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
114 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,1],zero,xmm0[2,6,10,14,3],zero,xmm0[u,u,u,u]
115 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,zero,zero,zero,xmm2[0],zero,zero,zero,zero,zero,xmm2[1,u,u,u,u]
116 ; AVX2-FCP-NEXT: vpor %xmm1, %xmm0, %xmm0
117 ; AVX2-FCP-NEXT: vpextrd $2, %xmm0, 8(%rax)
118 ; AVX2-FCP-NEXT: vmovq %xmm0, (%rax)
119 ; AVX2-FCP-NEXT: retq
121 ; AVX512-LABEL: store_i8_stride6_vf2:
123 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
124 ; AVX512-NEXT: vmovdqa (%rdi), %xmm0
125 ; AVX512-NEXT: vmovdqa (%rdx), %xmm1
126 ; AVX512-NEXT: vmovdqa (%r9), %xmm2
127 ; AVX512-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
128 ; AVX512-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
129 ; AVX512-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
130 ; AVX512-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
131 ; AVX512-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,1],zero,xmm0[2,6,10,14,3],zero,xmm0[u,u,u,u]
132 ; AVX512-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,zero,zero,zero,xmm2[0],zero,zero,zero,zero,zero,xmm2[1,u,u,u,u]
133 ; AVX512-NEXT: vpor %xmm1, %xmm0, %xmm0
134 ; AVX512-NEXT: vpextrd $2, %xmm0, 8(%rax)
135 ; AVX512-NEXT: vmovq %xmm0, (%rax)
138 ; AVX512-FCP-LABEL: store_i8_stride6_vf2:
139 ; AVX512-FCP: # %bb.0:
140 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
141 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %xmm0
142 ; AVX512-FCP-NEXT: vmovdqa (%rdx), %xmm1
143 ; AVX512-FCP-NEXT: vmovdqa (%r9), %xmm2
144 ; AVX512-FCP-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
145 ; AVX512-FCP-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
146 ; AVX512-FCP-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
147 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
148 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,1],zero,xmm0[2,6,10,14,3],zero,xmm0[u,u,u,u]
149 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,zero,zero,zero,xmm2[0],zero,zero,zero,zero,zero,xmm2[1,u,u,u,u]
150 ; AVX512-FCP-NEXT: vpor %xmm1, %xmm0, %xmm0
151 ; AVX512-FCP-NEXT: vpextrd $2, %xmm0, 8(%rax)
152 ; AVX512-FCP-NEXT: vmovq %xmm0, (%rax)
153 ; AVX512-FCP-NEXT: retq
155 ; AVX512DQ-LABEL: store_i8_stride6_vf2:
157 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
158 ; AVX512DQ-NEXT: vmovdqa (%rdi), %xmm0
159 ; AVX512DQ-NEXT: vmovdqa (%rdx), %xmm1
160 ; AVX512DQ-NEXT: vmovdqa (%r9), %xmm2
161 ; AVX512DQ-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
162 ; AVX512DQ-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
163 ; AVX512DQ-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
164 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
165 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,1],zero,xmm0[2,6,10,14,3],zero,xmm0[u,u,u,u]
166 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,zero,zero,zero,xmm2[0],zero,zero,zero,zero,zero,xmm2[1,u,u,u,u]
167 ; AVX512DQ-NEXT: vpor %xmm1, %xmm0, %xmm0
168 ; AVX512DQ-NEXT: vpextrd $2, %xmm0, 8(%rax)
169 ; AVX512DQ-NEXT: vmovq %xmm0, (%rax)
170 ; AVX512DQ-NEXT: retq
172 ; AVX512DQ-FCP-LABEL: store_i8_stride6_vf2:
173 ; AVX512DQ-FCP: # %bb.0:
174 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
175 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %xmm0
176 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdx), %xmm1
177 ; AVX512DQ-FCP-NEXT: vmovdqa (%r9), %xmm2
178 ; AVX512DQ-FCP-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
179 ; AVX512DQ-FCP-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
180 ; AVX512DQ-FCP-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
181 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
182 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,1],zero,xmm0[2,6,10,14,3],zero,xmm0[u,u,u,u]
183 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,zero,zero,zero,xmm2[0],zero,zero,zero,zero,zero,xmm2[1,u,u,u,u]
184 ; AVX512DQ-FCP-NEXT: vpor %xmm1, %xmm0, %xmm0
185 ; AVX512DQ-FCP-NEXT: vpextrd $2, %xmm0, 8(%rax)
186 ; AVX512DQ-FCP-NEXT: vmovq %xmm0, (%rax)
187 ; AVX512DQ-FCP-NEXT: retq
189 ; AVX512BW-LABEL: store_i8_stride6_vf2:
191 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
192 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm0
193 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm1
194 ; AVX512BW-NEXT: vmovdqa (%r9), %xmm2
195 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
196 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
197 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
198 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
199 ; AVX512BW-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,1],zero,xmm0[2,6,10,14,3],zero,xmm0[u,u,u,u]
200 ; AVX512BW-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,zero,zero,zero,xmm2[0],zero,zero,zero,zero,zero,xmm2[1,u,u,u,u]
201 ; AVX512BW-NEXT: vpor %xmm1, %xmm0, %xmm0
202 ; AVX512BW-NEXT: vpextrd $2, %xmm0, 8(%rax)
203 ; AVX512BW-NEXT: vmovq %xmm0, (%rax)
204 ; AVX512BW-NEXT: retq
206 ; AVX512BW-FCP-LABEL: store_i8_stride6_vf2:
207 ; AVX512BW-FCP: # %bb.0:
208 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
209 ; AVX512BW-FCP-NEXT: vmovdqa (%rdi), %xmm0
210 ; AVX512BW-FCP-NEXT: vmovdqa (%rdx), %xmm1
211 ; AVX512BW-FCP-NEXT: vmovdqa (%r9), %xmm2
212 ; AVX512BW-FCP-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
213 ; AVX512BW-FCP-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
214 ; AVX512BW-FCP-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
215 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
216 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,1],zero,xmm0[2,6,10,14,3],zero,xmm0[u,u,u,u]
217 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,zero,zero,zero,xmm2[0],zero,zero,zero,zero,zero,xmm2[1,u,u,u,u]
218 ; AVX512BW-FCP-NEXT: vpor %xmm1, %xmm0, %xmm0
219 ; AVX512BW-FCP-NEXT: vpextrd $2, %xmm0, 8(%rax)
220 ; AVX512BW-FCP-NEXT: vmovq %xmm0, (%rax)
221 ; AVX512BW-FCP-NEXT: retq
223 ; AVX512DQ-BW-LABEL: store_i8_stride6_vf2:
224 ; AVX512DQ-BW: # %bb.0:
225 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
226 ; AVX512DQ-BW-NEXT: vmovdqa (%rdi), %xmm0
227 ; AVX512DQ-BW-NEXT: vmovdqa (%rdx), %xmm1
228 ; AVX512DQ-BW-NEXT: vmovdqa (%r9), %xmm2
229 ; AVX512DQ-BW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
230 ; AVX512DQ-BW-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
231 ; AVX512DQ-BW-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
232 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
233 ; AVX512DQ-BW-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,1],zero,xmm0[2,6,10,14,3],zero,xmm0[u,u,u,u]
234 ; AVX512DQ-BW-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,zero,zero,zero,xmm2[0],zero,zero,zero,zero,zero,xmm2[1,u,u,u,u]
235 ; AVX512DQ-BW-NEXT: vpor %xmm1, %xmm0, %xmm0
236 ; AVX512DQ-BW-NEXT: vpextrd $2, %xmm0, 8(%rax)
237 ; AVX512DQ-BW-NEXT: vmovq %xmm0, (%rax)
238 ; AVX512DQ-BW-NEXT: retq
240 ; AVX512DQ-BW-FCP-LABEL: store_i8_stride6_vf2:
241 ; AVX512DQ-BW-FCP: # %bb.0:
242 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
243 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdi), %xmm0
244 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdx), %xmm1
245 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%r9), %xmm2
246 ; AVX512DQ-BW-FCP-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
247 ; AVX512DQ-BW-FCP-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
248 ; AVX512DQ-BW-FCP-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
249 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
250 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,1],zero,xmm0[2,6,10,14,3],zero,xmm0[u,u,u,u]
251 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,zero,zero,zero,xmm2[0],zero,zero,zero,zero,zero,xmm2[1,u,u,u,u]
252 ; AVX512DQ-BW-FCP-NEXT: vpor %xmm1, %xmm0, %xmm0
253 ; AVX512DQ-BW-FCP-NEXT: vpextrd $2, %xmm0, 8(%rax)
254 ; AVX512DQ-BW-FCP-NEXT: vmovq %xmm0, (%rax)
255 ; AVX512DQ-BW-FCP-NEXT: retq
256 %in.vec0 = load <2 x i8>, ptr %in.vecptr0, align 64
257 %in.vec1 = load <2 x i8>, ptr %in.vecptr1, align 64
258 %in.vec2 = load <2 x i8>, ptr %in.vecptr2, align 64
259 %in.vec3 = load <2 x i8>, ptr %in.vecptr3, align 64
260 %in.vec4 = load <2 x i8>, ptr %in.vecptr4, align 64
261 %in.vec5 = load <2 x i8>, ptr %in.vecptr5, align 64
262 %1 = shufflevector <2 x i8> %in.vec0, <2 x i8> %in.vec1, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
263 %2 = shufflevector <2 x i8> %in.vec2, <2 x i8> %in.vec3, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
264 %3 = shufflevector <2 x i8> %in.vec4, <2 x i8> %in.vec5, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
265 %4 = shufflevector <4 x i8> %1, <4 x i8> %2, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
266 %5 = shufflevector <4 x i8> %3, <4 x i8> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef>
267 %6 = shufflevector <8 x i8> %4, <8 x i8> %5, <12 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11>
268 %interleaved.vec = shufflevector <12 x i8> %6, <12 x i8> poison, <12 x i32> <i32 0, i32 2, i32 4, i32 6, i32 8, i32 10, i32 1, i32 3, i32 5, i32 7, i32 9, i32 11>
269 store <12 x i8> %interleaved.vec, ptr %out.vec, align 64
273 define void @store_i8_stride6_vf4(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
274 ; SSE-LABEL: store_i8_stride6_vf4:
276 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
277 ; SSE-NEXT: movdqa (%rdi), %xmm1
278 ; SSE-NEXT: movdqa (%rdx), %xmm2
279 ; SSE-NEXT: movdqa (%r8), %xmm0
280 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
281 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1]
282 ; SSE-NEXT: pxor %xmm3, %xmm3
283 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3],xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
284 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm2[0,2,2,3]
285 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[0,1,0,2,4,5,6,7]
286 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[0,2,1,3]
287 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm5[1,3,2,3,4,5,6,7]
288 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,4,6]
289 ; SSE-NEXT: packuswb %xmm5, %xmm4
290 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [65535,0,65535,65535,0,65535,65535,0]
291 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3],xmm1[4],xmm3[4],xmm1[5],xmm3[5],xmm1[6],xmm3[6],xmm1[7],xmm3[7]
292 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm1[0,1,1,3]
293 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,6,6,7]
294 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm1[0,2,2,0]
295 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm6[0,2,2,3,4,5,6,7]
296 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,5,7,5]
297 ; SSE-NEXT: packuswb %xmm3, %xmm6
298 ; SSE-NEXT: pand %xmm5, %xmm6
299 ; SSE-NEXT: pandn %xmm4, %xmm5
300 ; SSE-NEXT: por %xmm6, %xmm5
301 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [65535,65535,0,65535,65535,0,65535,65535]
302 ; SSE-NEXT: pand %xmm3, %xmm5
303 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
304 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm0[0,0,0,0]
305 ; SSE-NEXT: pandn %xmm4, %xmm3
306 ; SSE-NEXT: por %xmm5, %xmm3
307 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,1,3]
308 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,5,7,6,7]
309 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,3]
310 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,3,1,4,5,6,7]
311 ; SSE-NEXT: packuswb %xmm2, %xmm1
312 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,3,2,3]
313 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [65535,0,0,65535,65535,65535,65535,65535]
314 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
315 ; SSE-NEXT: pand %xmm2, %xmm0
316 ; SSE-NEXT: pandn %xmm1, %xmm2
317 ; SSE-NEXT: por %xmm0, %xmm2
318 ; SSE-NEXT: movq %xmm2, 16(%rax)
319 ; SSE-NEXT: movdqa %xmm3, (%rax)
322 ; AVX-LABEL: store_i8_stride6_vf4:
324 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
325 ; AVX-NEXT: vmovdqa (%rdi), %xmm0
326 ; AVX-NEXT: vmovdqa (%rdx), %xmm1
327 ; AVX-NEXT: vmovdqa (%r8), %xmm2
328 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
329 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
330 ; AVX-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
331 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
332 ; AVX-NEXT: vpshufb {{.*#+}} xmm2 = xmm0[0,4,8,12],zero,zero,xmm0[1,5,9,13],zero,zero,xmm0[2,6,10,14]
333 ; AVX-NEXT: vpshufb {{.*#+}} xmm3 = zero,zero,zero,zero,xmm1[0,4],zero,zero,zero,zero,xmm1[1,5],zero,zero,zero,zero
334 ; AVX-NEXT: vpor %xmm3, %xmm2, %xmm2
335 ; AVX-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[3,7,11,15],zero,zero,xmm0[u,u,u,u,u,u,u,u]
336 ; AVX-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[2,6],zero,zero,zero,zero,xmm1[3,7,u,u,u,u,u,u,u,u]
337 ; AVX-NEXT: vpor %xmm1, %xmm0, %xmm0
338 ; AVX-NEXT: vmovq %xmm0, 16(%rax)
339 ; AVX-NEXT: vmovdqa %xmm2, (%rax)
342 ; AVX2-LABEL: store_i8_stride6_vf4:
344 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
345 ; AVX2-NEXT: vmovdqa (%rdi), %xmm0
346 ; AVX2-NEXT: vmovdqa (%rdx), %xmm1
347 ; AVX2-NEXT: vmovdqa (%r8), %xmm2
348 ; AVX2-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
349 ; AVX2-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
350 ; AVX2-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
351 ; AVX2-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
352 ; AVX2-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
353 ; AVX2-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,4,8,12],zero,zero,ymm0[1,5,9,13],zero,zero,ymm0[2,6,10,14,18,22],zero,zero,zero,zero,ymm0[19,23],zero,zero,zero,zero,zero,zero,zero,zero
354 ; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
355 ; AVX2-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm0[0,4],zero,zero,zero,zero,ymm0[1,5],zero,zero,zero,zero,zero,zero,ymm0[19,23,27,31],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
356 ; AVX2-NEXT: vpor %ymm0, %ymm1, %ymm0
357 ; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm1
358 ; AVX2-NEXT: vmovq %xmm1, 16(%rax)
359 ; AVX2-NEXT: vmovdqa %xmm0, (%rax)
360 ; AVX2-NEXT: vzeroupper
363 ; AVX2-FP-LABEL: store_i8_stride6_vf4:
365 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
366 ; AVX2-FP-NEXT: vmovdqa (%rdi), %xmm0
367 ; AVX2-FP-NEXT: vmovdqa (%rdx), %xmm1
368 ; AVX2-FP-NEXT: vmovdqa (%r8), %xmm2
369 ; AVX2-FP-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
370 ; AVX2-FP-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
371 ; AVX2-FP-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
372 ; AVX2-FP-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
373 ; AVX2-FP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
374 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,4,8,12],zero,zero,ymm0[1,5,9,13],zero,zero,ymm0[2,6,10,14,18,22],zero,zero,zero,zero,ymm0[19,23],zero,zero,zero,zero,zero,zero,zero,zero
375 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
376 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm0[0,4],zero,zero,zero,zero,ymm0[1,5],zero,zero,zero,zero,zero,zero,ymm0[19,23,27,31],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
377 ; AVX2-FP-NEXT: vpor %ymm0, %ymm1, %ymm0
378 ; AVX2-FP-NEXT: vextracti128 $1, %ymm0, %xmm1
379 ; AVX2-FP-NEXT: vmovq %xmm1, 16(%rax)
380 ; AVX2-FP-NEXT: vmovdqa %xmm0, (%rax)
381 ; AVX2-FP-NEXT: vzeroupper
384 ; AVX2-FCP-LABEL: store_i8_stride6_vf4:
386 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
387 ; AVX2-FCP-NEXT: vmovdqa (%rdi), %xmm0
388 ; AVX2-FCP-NEXT: vmovdqa (%rdx), %xmm1
389 ; AVX2-FCP-NEXT: vmovdqa (%r8), %xmm2
390 ; AVX2-FCP-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
391 ; AVX2-FCP-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
392 ; AVX2-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
393 ; AVX2-FCP-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
394 ; AVX2-FCP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
395 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,4,8,12],zero,zero,ymm0[1,5,9,13],zero,zero,ymm0[2,6,10,14,18,22],zero,zero,zero,zero,ymm0[19,23],zero,zero,zero,zero,zero,zero,zero,zero
396 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
397 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm0[0,4],zero,zero,zero,zero,ymm0[1,5],zero,zero,zero,zero,zero,zero,ymm0[19,23,27,31],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
398 ; AVX2-FCP-NEXT: vpor %ymm0, %ymm1, %ymm0
399 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm0, %xmm1
400 ; AVX2-FCP-NEXT: vmovq %xmm1, 16(%rax)
401 ; AVX2-FCP-NEXT: vmovdqa %xmm0, (%rax)
402 ; AVX2-FCP-NEXT: vzeroupper
403 ; AVX2-FCP-NEXT: retq
405 ; AVX512-LABEL: store_i8_stride6_vf4:
407 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
408 ; AVX512-NEXT: vmovdqa (%rdi), %xmm0
409 ; AVX512-NEXT: vmovdqa (%rdx), %xmm1
410 ; AVX512-NEXT: vmovdqa (%r8), %xmm2
411 ; AVX512-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
412 ; AVX512-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
413 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
414 ; AVX512-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
415 ; AVX512-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
416 ; AVX512-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,4,8,12],zero,zero,ymm0[1,5,9,13],zero,zero,ymm0[2,6,10,14,18,22],zero,zero,zero,zero,ymm0[19,23,u,u,u,u,u,u,u,u]
417 ; AVX512-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
418 ; AVX512-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm0[0,4],zero,zero,zero,zero,ymm0[1,5],zero,zero,zero,zero,zero,zero,ymm0[19,23,27,31],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
419 ; AVX512-NEXT: vpor %ymm0, %ymm1, %ymm0
420 ; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm1
421 ; AVX512-NEXT: vmovq %xmm1, 16(%rax)
422 ; AVX512-NEXT: vmovdqa %xmm0, (%rax)
423 ; AVX512-NEXT: vzeroupper
426 ; AVX512-FCP-LABEL: store_i8_stride6_vf4:
427 ; AVX512-FCP: # %bb.0:
428 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
429 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %xmm0
430 ; AVX512-FCP-NEXT: vmovdqa (%rdx), %xmm1
431 ; AVX512-FCP-NEXT: vmovdqa (%r8), %xmm2
432 ; AVX512-FCP-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
433 ; AVX512-FCP-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
434 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
435 ; AVX512-FCP-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
436 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
437 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,4,8,12],zero,zero,ymm0[1,5,9,13],zero,zero,ymm0[2,6,10,14,18,22],zero,zero,zero,zero,ymm0[19,23,u,u,u,u,u,u,u,u]
438 ; AVX512-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
439 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm0[0,4],zero,zero,zero,zero,ymm0[1,5],zero,zero,zero,zero,zero,zero,ymm0[19,23,27,31],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
440 ; AVX512-FCP-NEXT: vpor %ymm0, %ymm1, %ymm0
441 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm0, %xmm1
442 ; AVX512-FCP-NEXT: vmovq %xmm1, 16(%rax)
443 ; AVX512-FCP-NEXT: vmovdqa %xmm0, (%rax)
444 ; AVX512-FCP-NEXT: vzeroupper
445 ; AVX512-FCP-NEXT: retq
447 ; AVX512DQ-LABEL: store_i8_stride6_vf4:
449 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
450 ; AVX512DQ-NEXT: vmovdqa (%rdi), %xmm0
451 ; AVX512DQ-NEXT: vmovdqa (%rdx), %xmm1
452 ; AVX512DQ-NEXT: vmovdqa (%r8), %xmm2
453 ; AVX512DQ-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
454 ; AVX512DQ-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
455 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
456 ; AVX512DQ-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
457 ; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
458 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,4,8,12],zero,zero,ymm0[1,5,9,13],zero,zero,ymm0[2,6,10,14,18,22],zero,zero,zero,zero,ymm0[19,23,u,u,u,u,u,u,u,u]
459 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
460 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm0[0,4],zero,zero,zero,zero,ymm0[1,5],zero,zero,zero,zero,zero,zero,ymm0[19,23,27,31],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
461 ; AVX512DQ-NEXT: vpor %ymm0, %ymm1, %ymm0
462 ; AVX512DQ-NEXT: vextracti128 $1, %ymm0, %xmm1
463 ; AVX512DQ-NEXT: vmovq %xmm1, 16(%rax)
464 ; AVX512DQ-NEXT: vmovdqa %xmm0, (%rax)
465 ; AVX512DQ-NEXT: vzeroupper
466 ; AVX512DQ-NEXT: retq
468 ; AVX512DQ-FCP-LABEL: store_i8_stride6_vf4:
469 ; AVX512DQ-FCP: # %bb.0:
470 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
471 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %xmm0
472 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdx), %xmm1
473 ; AVX512DQ-FCP-NEXT: vmovdqa (%r8), %xmm2
474 ; AVX512DQ-FCP-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
475 ; AVX512DQ-FCP-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
476 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
477 ; AVX512DQ-FCP-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
478 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
479 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,4,8,12],zero,zero,ymm0[1,5,9,13],zero,zero,ymm0[2,6,10,14,18,22],zero,zero,zero,zero,ymm0[19,23,u,u,u,u,u,u,u,u]
480 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
481 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm0[0,4],zero,zero,zero,zero,ymm0[1,5],zero,zero,zero,zero,zero,zero,ymm0[19,23,27,31],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
482 ; AVX512DQ-FCP-NEXT: vpor %ymm0, %ymm1, %ymm0
483 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm0, %xmm1
484 ; AVX512DQ-FCP-NEXT: vmovq %xmm1, 16(%rax)
485 ; AVX512DQ-FCP-NEXT: vmovdqa %xmm0, (%rax)
486 ; AVX512DQ-FCP-NEXT: vzeroupper
487 ; AVX512DQ-FCP-NEXT: retq
489 ; AVX512BW-LABEL: store_i8_stride6_vf4:
491 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
492 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm0
493 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm1
494 ; AVX512BW-NEXT: vmovdqa (%r8), %xmm2
495 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
496 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
497 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
498 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
499 ; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
500 ; AVX512BW-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,4,8,12,u,u,1,5,9,13,u,u,2,6,10,14,18,22,u,u,u,u,19,23,u,u,u,u,u,u,u,u]
501 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
502 ; AVX512BW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,0,4,u,u,u,u,1,5,u,u,u,u,u,u,19,23,27,31,u,u,u,u,u,u,u,u,u,u]
503 ; AVX512BW-NEXT: movw $1572, %cx # imm = 0x624
504 ; AVX512BW-NEXT: kmovd %ecx, %k1
505 ; AVX512BW-NEXT: vmovdqu16 %ymm0, %ymm1 {%k1}
506 ; AVX512BW-NEXT: vextracti128 $1, %ymm1, %xmm0
507 ; AVX512BW-NEXT: vmovq %xmm0, 16(%rax)
508 ; AVX512BW-NEXT: vmovdqa %xmm1, (%rax)
509 ; AVX512BW-NEXT: vzeroupper
510 ; AVX512BW-NEXT: retq
512 ; AVX512BW-FCP-LABEL: store_i8_stride6_vf4:
513 ; AVX512BW-FCP: # %bb.0:
514 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
515 ; AVX512BW-FCP-NEXT: vmovdqa (%rdi), %xmm0
516 ; AVX512BW-FCP-NEXT: vmovdqa (%rdx), %xmm1
517 ; AVX512BW-FCP-NEXT: vmovdqa (%r8), %xmm2
518 ; AVX512BW-FCP-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
519 ; AVX512BW-FCP-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
520 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
521 ; AVX512BW-FCP-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
522 ; AVX512BW-FCP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
523 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,4,8,12,u,u,1,5,9,13,u,u,2,6,10,14,18,22,u,u,u,u,19,23,u,u,u,u,u,u,u,u]
524 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
525 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,0,4,u,u,u,u,1,5,u,u,u,u,u,u,19,23,27,31,u,u,u,u,u,u,u,u,u,u]
526 ; AVX512BW-FCP-NEXT: movw $1572, %cx # imm = 0x624
527 ; AVX512BW-FCP-NEXT: kmovd %ecx, %k1
528 ; AVX512BW-FCP-NEXT: vmovdqu16 %ymm0, %ymm1 {%k1}
529 ; AVX512BW-FCP-NEXT: vextracti128 $1, %ymm1, %xmm0
530 ; AVX512BW-FCP-NEXT: vmovq %xmm0, 16(%rax)
531 ; AVX512BW-FCP-NEXT: vmovdqa %xmm1, (%rax)
532 ; AVX512BW-FCP-NEXT: vzeroupper
533 ; AVX512BW-FCP-NEXT: retq
535 ; AVX512DQ-BW-LABEL: store_i8_stride6_vf4:
536 ; AVX512DQ-BW: # %bb.0:
537 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
538 ; AVX512DQ-BW-NEXT: vmovdqa (%rdi), %xmm0
539 ; AVX512DQ-BW-NEXT: vmovdqa (%rdx), %xmm1
540 ; AVX512DQ-BW-NEXT: vmovdqa (%r8), %xmm2
541 ; AVX512DQ-BW-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
542 ; AVX512DQ-BW-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
543 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
544 ; AVX512DQ-BW-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
545 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
546 ; AVX512DQ-BW-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,4,8,12,u,u,1,5,9,13,u,u,2,6,10,14,18,22,u,u,u,u,19,23,u,u,u,u,u,u,u,u]
547 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
548 ; AVX512DQ-BW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,0,4,u,u,u,u,1,5,u,u,u,u,u,u,19,23,27,31,u,u,u,u,u,u,u,u,u,u]
549 ; AVX512DQ-BW-NEXT: movw $1572, %cx # imm = 0x624
550 ; AVX512DQ-BW-NEXT: kmovd %ecx, %k1
551 ; AVX512DQ-BW-NEXT: vmovdqu16 %ymm0, %ymm1 {%k1}
552 ; AVX512DQ-BW-NEXT: vextracti128 $1, %ymm1, %xmm0
553 ; AVX512DQ-BW-NEXT: vmovq %xmm0, 16(%rax)
554 ; AVX512DQ-BW-NEXT: vmovdqa %xmm1, (%rax)
555 ; AVX512DQ-BW-NEXT: vzeroupper
556 ; AVX512DQ-BW-NEXT: retq
558 ; AVX512DQ-BW-FCP-LABEL: store_i8_stride6_vf4:
559 ; AVX512DQ-BW-FCP: # %bb.0:
560 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
561 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdi), %xmm0
562 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdx), %xmm1
563 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%r8), %xmm2
564 ; AVX512DQ-BW-FCP-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
565 ; AVX512DQ-BW-FCP-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
566 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
567 ; AVX512DQ-BW-FCP-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
568 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
569 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,4,8,12,u,u,1,5,9,13,u,u,2,6,10,14,18,22,u,u,u,u,19,23,u,u,u,u,u,u,u,u]
570 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
571 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,0,4,u,u,u,u,1,5,u,u,u,u,u,u,19,23,27,31,u,u,u,u,u,u,u,u,u,u]
572 ; AVX512DQ-BW-FCP-NEXT: movw $1572, %cx # imm = 0x624
573 ; AVX512DQ-BW-FCP-NEXT: kmovd %ecx, %k1
574 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %ymm0, %ymm1 {%k1}
575 ; AVX512DQ-BW-FCP-NEXT: vextracti128 $1, %ymm1, %xmm0
576 ; AVX512DQ-BW-FCP-NEXT: vmovq %xmm0, 16(%rax)
577 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %xmm1, (%rax)
578 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
579 ; AVX512DQ-BW-FCP-NEXT: retq
580 %in.vec0 = load <4 x i8>, ptr %in.vecptr0, align 64
581 %in.vec1 = load <4 x i8>, ptr %in.vecptr1, align 64
582 %in.vec2 = load <4 x i8>, ptr %in.vecptr2, align 64
583 %in.vec3 = load <4 x i8>, ptr %in.vecptr3, align 64
584 %in.vec4 = load <4 x i8>, ptr %in.vecptr4, align 64
585 %in.vec5 = load <4 x i8>, ptr %in.vecptr5, align 64
586 %1 = shufflevector <4 x i8> %in.vec0, <4 x i8> %in.vec1, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
587 %2 = shufflevector <4 x i8> %in.vec2, <4 x i8> %in.vec3, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
588 %3 = shufflevector <4 x i8> %in.vec4, <4 x i8> %in.vec5, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
589 %4 = shufflevector <8 x i8> %1, <8 x i8> %2, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
590 %5 = shufflevector <8 x i8> %3, <8 x i8> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
591 %6 = shufflevector <16 x i8> %4, <16 x i8> %5, <24 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23>
592 %interleaved.vec = shufflevector <24 x i8> %6, <24 x i8> poison, <24 x i32> <i32 0, i32 4, i32 8, i32 12, i32 16, i32 20, i32 1, i32 5, i32 9, i32 13, i32 17, i32 21, i32 2, i32 6, i32 10, i32 14, i32 18, i32 22, i32 3, i32 7, i32 11, i32 15, i32 19, i32 23>
593 store <24 x i8> %interleaved.vec, ptr %out.vec, align 64
597 define void @store_i8_stride6_vf8(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
598 ; SSE-LABEL: store_i8_stride6_vf8:
600 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
601 ; SSE-NEXT: movq {{.*#+}} xmm0 = mem[0],zero
602 ; SSE-NEXT: movq {{.*#+}} xmm1 = mem[0],zero
603 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
604 ; SSE-NEXT: movq {{.*#+}} xmm2 = mem[0],zero
605 ; SSE-NEXT: movq {{.*#+}} xmm1 = mem[0],zero
606 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
607 ; SSE-NEXT: movq {{.*#+}} xmm1 = mem[0],zero
608 ; SSE-NEXT: movq {{.*#+}} xmm7 = mem[0],zero
609 ; SSE-NEXT: pxor %xmm4, %xmm4
610 ; SSE-NEXT: movdqa %xmm1, %xmm3
611 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3],xmm1[4],xmm7[4],xmm1[5],xmm7[5],xmm1[6],xmm7[6],xmm1[7],xmm7[7]
612 ; SSE-NEXT: punpcklbw {{.*#+}} xmm7 = xmm7[0],xmm4[0],xmm7[1],xmm4[1],xmm7[2],xmm4[2],xmm7[3],xmm4[3],xmm7[4],xmm4[4],xmm7[5],xmm4[5],xmm7[6],xmm4[6],xmm7[7],xmm4[7]
613 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3],xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
614 ; SSE-NEXT: movdqa %xmm3, %xmm4
615 ; SSE-NEXT: punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm7[0],xmm4[1],xmm7[1],xmm4[2],xmm7[2],xmm4[3],xmm7[3]
616 ; SSE-NEXT: packuswb %xmm4, %xmm4
617 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,0,2,3]
618 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [65535,65535,0,65535,65535,0,65535,65535]
619 ; SSE-NEXT: movdqa %xmm4, %xmm8
620 ; SSE-NEXT: pandn %xmm5, %xmm8
621 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm0[0,0,1,1]
622 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [65535,0,65535,65535,0,65535,65535,0]
623 ; SSE-NEXT: pand %xmm5, %xmm9
624 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm2[1,0,2,2,4,5,6,7]
625 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm6[0,1,0,1]
626 ; SSE-NEXT: movdqa %xmm5, %xmm6
627 ; SSE-NEXT: pandn %xmm10, %xmm6
628 ; SSE-NEXT: por %xmm9, %xmm6
629 ; SSE-NEXT: pand %xmm4, %xmm6
630 ; SSE-NEXT: por %xmm8, %xmm6
631 ; SSE-NEXT: punpckhwd {{.*#+}} xmm3 = xmm3[4],xmm7[4],xmm3[5],xmm7[5],xmm3[6],xmm7[6],xmm3[7],xmm7[7]
632 ; SSE-NEXT: packuswb %xmm3, %xmm3
633 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,1,3,3]
634 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm0[3,3,3,3]
635 ; SSE-NEXT: movdqa %xmm4, %xmm8
636 ; SSE-NEXT: pandn %xmm7, %xmm8
637 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm2[0,1,2,3,5,6,7,7]
638 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[2,2,2,3]
639 ; SSE-NEXT: pand %xmm4, %xmm7
640 ; SSE-NEXT: por %xmm8, %xmm7
641 ; SSE-NEXT: pand %xmm5, %xmm7
642 ; SSE-NEXT: pandn %xmm3, %xmm5
643 ; SSE-NEXT: por %xmm7, %xmm5
644 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,2]
645 ; SSE-NEXT: pand %xmm4, %xmm0
646 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[3,3,3,3,4,5,6,7]
647 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,4,4,4]
648 ; SSE-NEXT: pandn %xmm2, %xmm4
649 ; SSE-NEXT: por %xmm0, %xmm4
650 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,65535,65535,0,65535,65535,0,65535]
651 ; SSE-NEXT: pand %xmm0, %xmm4
652 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,2,2]
653 ; SSE-NEXT: pandn %xmm1, %xmm0
654 ; SSE-NEXT: por %xmm4, %xmm0
655 ; SSE-NEXT: movdqa %xmm5, 32(%rax)
656 ; SSE-NEXT: movdqa %xmm0, 16(%rax)
657 ; SSE-NEXT: movdqa %xmm6, (%rax)
660 ; AVX-LABEL: store_i8_stride6_vf8:
662 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
663 ; AVX-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
664 ; AVX-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
665 ; AVX-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
666 ; AVX-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
667 ; AVX-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
668 ; AVX-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
669 ; AVX-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
670 ; AVX-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
671 ; AVX-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
672 ; AVX-NEXT: vpshufb {{.*#+}} xmm3 = xmm1[u,u],zero,zero,xmm1[3,11,u,u],zero,zero,xmm1[4,12,u,u],zero,zero
673 ; AVX-NEXT: vpshufb {{.*#+}} xmm4 = xmm0[u,u,3,11],zero,zero,xmm0[u,u,4,12],zero,zero,xmm0[u,u,5,13]
674 ; AVX-NEXT: vpor %xmm3, %xmm4, %xmm3
675 ; AVX-NEXT: vpshufb {{.*#+}} xmm4 = xmm2[2,10,u,u,u,u,3,11,u,u,u,u,4,12,u,u]
676 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm4[0],xmm3[1,2],xmm4[3],xmm3[4,5],xmm4[6],xmm3[7]
677 ; AVX-NEXT: vpshufb {{.*#+}} xmm4 = zero,zero,xmm1[0,8,u,u],zero,zero,xmm1[1,9,u,u],zero,zero,xmm1[2,10]
678 ; AVX-NEXT: vpshufb {{.*#+}} xmm5 = xmm0[0,8],zero,zero,xmm0[u,u,1,9],zero,zero,xmm0[u,u,2,10],zero,zero
679 ; AVX-NEXT: vpor %xmm4, %xmm5, %xmm4
680 ; AVX-NEXT: vpshufb {{.*#+}} xmm5 = xmm2[u,u,u,u,0,8,u,u,u,u,1,9,u,u,u,u]
681 ; AVX-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1],xmm5[2],xmm4[3,4],xmm5[5],xmm4[6,7]
682 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
683 ; AVX-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[u,u,6,14],zero,zero,xmm0[u,u,7,15],zero,zero,xmm0[u,u]
684 ; AVX-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[5,13,u,u],zero,zero,xmm1[6,14,u,u],zero,zero,xmm1[7,15,u,u]
685 ; AVX-NEXT: vpor %xmm0, %xmm1, %xmm0
686 ; AVX-NEXT: vpshufb {{.*#+}} xmm1 = xmm2[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15]
687 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
688 ; AVX-NEXT: vmovdqa %xmm0, 32(%rax)
689 ; AVX-NEXT: vmovaps %ymm3, (%rax)
690 ; AVX-NEXT: vzeroupper
693 ; AVX2-LABEL: store_i8_stride6_vf8:
695 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
696 ; AVX2-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
697 ; AVX2-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
698 ; AVX2-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
699 ; AVX2-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
700 ; AVX2-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
701 ; AVX2-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
702 ; AVX2-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm2
703 ; AVX2-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
704 ; AVX2-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
705 ; AVX2-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
706 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm4 = [0,8,0,8,u,u,1,9,1,9,u,u,2,10,2,10,u,u,3,11,3,11,u,u,4,12,4,12,u,u,5,13]
707 ; AVX2-NEXT: vpshufb %ymm4, %ymm2, %ymm5
708 ; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,0,1]
709 ; AVX2-NEXT: vpshufb %ymm4, %ymm2, %ymm2
710 ; AVX2-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0],ymm2[1],ymm5[2,3],ymm2[4],ymm5[5,6],ymm2[7],ymm5[8],ymm2[9],ymm5[10,11],ymm2[12],ymm5[13,14],ymm2[15]
711 ; AVX2-NEXT: vpshufb {{.*#+}} xmm4 = xmm3[2,10,1,9,0,8,3,11,u,u,u,u,4,12,u,u]
712 ; AVX2-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
713 ; AVX2-NEXT: vpmovsxbw {{.*#+}} ymm5 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
714 ; AVX2-NEXT: vpblendvb %ymm5, %ymm2, %ymm4, %ymm2
715 ; AVX2-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[u,u,6,14],zero,zero,xmm0[u,u,7,15],zero,zero,xmm0[u,u]
716 ; AVX2-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[5,13,u,u],zero,zero,xmm1[6,14,u,u],zero,zero,xmm1[7,15,u,u]
717 ; AVX2-NEXT: vpor %xmm0, %xmm1, %xmm0
718 ; AVX2-NEXT: vpshufb {{.*#+}} xmm1 = xmm3[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15]
719 ; AVX2-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
720 ; AVX2-NEXT: vmovdqa %xmm0, 32(%rax)
721 ; AVX2-NEXT: vmovdqa %ymm2, (%rax)
722 ; AVX2-NEXT: vzeroupper
725 ; AVX2-FP-LABEL: store_i8_stride6_vf8:
727 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
728 ; AVX2-FP-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
729 ; AVX2-FP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
730 ; AVX2-FP-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
731 ; AVX2-FP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
732 ; AVX2-FP-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
733 ; AVX2-FP-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
734 ; AVX2-FP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm2
735 ; AVX2-FP-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
736 ; AVX2-FP-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
737 ; AVX2-FP-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
738 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} ymm4 = [0,8,0,8,u,u,1,9,1,9,u,u,2,10,2,10,u,u,3,11,3,11,u,u,4,12,4,12,u,u,5,13]
739 ; AVX2-FP-NEXT: vpshufb %ymm4, %ymm2, %ymm5
740 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,0,1]
741 ; AVX2-FP-NEXT: vpshufb %ymm4, %ymm2, %ymm2
742 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0],ymm2[1],ymm5[2,3],ymm2[4],ymm5[5,6],ymm2[7],ymm5[8],ymm2[9],ymm5[10,11],ymm2[12],ymm5[13,14],ymm2[15]
743 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm4 = xmm3[2,10,1,9,0,8,3,11,u,u,u,u,4,12,u,u]
744 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
745 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} ymm5 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
746 ; AVX2-FP-NEXT: vpblendvb %ymm5, %ymm2, %ymm4, %ymm2
747 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[u,u,6,14],zero,zero,xmm0[u,u,7,15],zero,zero,xmm0[u,u]
748 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[5,13,u,u],zero,zero,xmm1[6,14,u,u],zero,zero,xmm1[7,15,u,u]
749 ; AVX2-FP-NEXT: vpor %xmm0, %xmm1, %xmm0
750 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm1 = xmm3[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15]
751 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
752 ; AVX2-FP-NEXT: vmovdqa %xmm0, 32(%rax)
753 ; AVX2-FP-NEXT: vmovdqa %ymm2, (%rax)
754 ; AVX2-FP-NEXT: vzeroupper
757 ; AVX2-FCP-LABEL: store_i8_stride6_vf8:
759 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
760 ; AVX2-FCP-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
761 ; AVX2-FCP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
762 ; AVX2-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
763 ; AVX2-FCP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
764 ; AVX2-FCP-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
765 ; AVX2-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
766 ; AVX2-FCP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm2
767 ; AVX2-FCP-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
768 ; AVX2-FCP-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
769 ; AVX2-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
770 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} ymm4 = [0,8,0,8,u,u,1,9,1,9,u,u,2,10,2,10,u,u,3,11,3,11,u,u,4,12,4,12,u,u,5,13]
771 ; AVX2-FCP-NEXT: vpshufb %ymm4, %ymm2, %ymm5
772 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,3,0,1]
773 ; AVX2-FCP-NEXT: vpshufb %ymm4, %ymm2, %ymm2
774 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0],ymm2[1],ymm5[2,3],ymm2[4],ymm5[5,6],ymm2[7],ymm5[8],ymm2[9],ymm5[10,11],ymm2[12],ymm5[13,14],ymm2[15]
775 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm4 = xmm3[2,10,1,9,0,8,3,11,u,u,u,u,4,12,u,u]
776 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
777 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} ymm5 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
778 ; AVX2-FCP-NEXT: vpblendvb %ymm5, %ymm2, %ymm4, %ymm2
779 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[u,u,6,14],zero,zero,xmm0[u,u,7,15],zero,zero,xmm0[u,u]
780 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[5,13,u,u],zero,zero,xmm1[6,14,u,u],zero,zero,xmm1[7,15,u,u]
781 ; AVX2-FCP-NEXT: vpor %xmm0, %xmm1, %xmm0
782 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm1 = xmm3[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15]
783 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
784 ; AVX2-FCP-NEXT: vmovdqa %xmm0, 32(%rax)
785 ; AVX2-FCP-NEXT: vmovdqa %ymm2, (%rax)
786 ; AVX2-FCP-NEXT: vzeroupper
787 ; AVX2-FCP-NEXT: retq
789 ; AVX512-LABEL: store_i8_stride6_vf8:
791 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
792 ; AVX512-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
793 ; AVX512-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
794 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
795 ; AVX512-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
796 ; AVX512-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
797 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
798 ; AVX512-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
799 ; AVX512-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
800 ; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
801 ; AVX512-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm3
802 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm4 = [0,8,0,8,u,u,1,9,1,9,u,u,2,10,2,10,u,u,3,11,3,11,u,u,4,12,4,12,u,u,5,13]
803 ; AVX512-NEXT: vpshufb %ymm4, %ymm3, %ymm5
804 ; AVX512-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,0,1]
805 ; AVX512-NEXT: vpshufb %ymm4, %ymm3, %ymm3
806 ; AVX512-NEXT: vpblendw {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2,3],ymm3[4],ymm5[5,6],ymm3[7],ymm5[8],ymm3[9],ymm5[10,11],ymm3[12],ymm5[13,14],ymm3[15]
807 ; AVX512-NEXT: vpshufb {{.*#+}} xmm4 = xmm2[2,10,1,9,0,8,3,11,u,u,u,u,4,12,u,u]
808 ; AVX512-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
809 ; AVX512-NEXT: vpternlogq {{.*#+}} ymm4 = ymm4 ^ (mem & (ymm4 ^ ymm3))
810 ; AVX512-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[u,u,6,14],zero,zero,xmm0[u,u,7,15],zero,zero,xmm0[u,u]
811 ; AVX512-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[5,13,u,u],zero,zero,xmm1[6,14,u,u],zero,zero,xmm1[7,15,u,u]
812 ; AVX512-NEXT: vpor %xmm0, %xmm1, %xmm0
813 ; AVX512-NEXT: vpshufb {{.*#+}} xmm1 = xmm2[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15]
814 ; AVX512-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
815 ; AVX512-NEXT: vinserti32x4 $2, %xmm0, %zmm4, %zmm1
816 ; AVX512-NEXT: vmovdqa %xmm0, 32(%rax)
817 ; AVX512-NEXT: vmovdqa %ymm1, (%rax)
818 ; AVX512-NEXT: vzeroupper
821 ; AVX512-FCP-LABEL: store_i8_stride6_vf8:
822 ; AVX512-FCP: # %bb.0:
823 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
824 ; AVX512-FCP-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
825 ; AVX512-FCP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
826 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
827 ; AVX512-FCP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
828 ; AVX512-FCP-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
829 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
830 ; AVX512-FCP-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
831 ; AVX512-FCP-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
832 ; AVX512-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
833 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm3
834 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm4 = [0,8,0,8,u,u,1,9,1,9,u,u,2,10,2,10,u,u,3,11,3,11,u,u,4,12,4,12,u,u,5,13]
835 ; AVX512-FCP-NEXT: vpshufb %ymm4, %ymm3, %ymm5
836 ; AVX512-FCP-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,0,1]
837 ; AVX512-FCP-NEXT: vpshufb %ymm4, %ymm3, %ymm3
838 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2,3],ymm3[4],ymm5[5,6],ymm3[7],ymm5[8],ymm3[9],ymm5[10,11],ymm3[12],ymm5[13,14],ymm3[15]
839 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm4 = xmm2[2,10,1,9,0,8,3,11,u,u,u,u,4,12,u,u]
840 ; AVX512-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
841 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} ymm4 = ymm4 ^ (mem & (ymm4 ^ ymm3))
842 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[u,u,6,14],zero,zero,xmm0[u,u,7,15],zero,zero,xmm0[u,u]
843 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[5,13,u,u],zero,zero,xmm1[6,14,u,u],zero,zero,xmm1[7,15,u,u]
844 ; AVX512-FCP-NEXT: vpor %xmm0, %xmm1, %xmm0
845 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm1 = xmm2[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15]
846 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
847 ; AVX512-FCP-NEXT: vinserti32x4 $2, %xmm0, %zmm4, %zmm1
848 ; AVX512-FCP-NEXT: vmovdqa %xmm0, 32(%rax)
849 ; AVX512-FCP-NEXT: vmovdqa %ymm1, (%rax)
850 ; AVX512-FCP-NEXT: vzeroupper
851 ; AVX512-FCP-NEXT: retq
853 ; AVX512DQ-LABEL: store_i8_stride6_vf8:
855 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
856 ; AVX512DQ-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
857 ; AVX512DQ-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
858 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
859 ; AVX512DQ-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
860 ; AVX512DQ-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
861 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
862 ; AVX512DQ-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
863 ; AVX512DQ-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
864 ; AVX512DQ-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
865 ; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm3
866 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm4 = [0,8,0,8,u,u,1,9,1,9,u,u,2,10,2,10,u,u,3,11,3,11,u,u,4,12,4,12,u,u,5,13]
867 ; AVX512DQ-NEXT: vpshufb %ymm4, %ymm3, %ymm5
868 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,0,1]
869 ; AVX512DQ-NEXT: vpshufb %ymm4, %ymm3, %ymm3
870 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2,3],ymm3[4],ymm5[5,6],ymm3[7],ymm5[8],ymm3[9],ymm5[10,11],ymm3[12],ymm5[13,14],ymm3[15]
871 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm4 = xmm2[2,10,1,9,0,8,3,11,u,u,u,u,4,12,u,u]
872 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
873 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} ymm4 = ymm4 ^ (mem & (ymm4 ^ ymm3))
874 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[u,u,6,14],zero,zero,xmm0[u,u,7,15],zero,zero,xmm0[u,u]
875 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[5,13,u,u],zero,zero,xmm1[6,14,u,u],zero,zero,xmm1[7,15,u,u]
876 ; AVX512DQ-NEXT: vpor %xmm0, %xmm1, %xmm0
877 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm1 = xmm2[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15]
878 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
879 ; AVX512DQ-NEXT: vinserti32x4 $2, %xmm0, %zmm4, %zmm1
880 ; AVX512DQ-NEXT: vmovdqa %xmm0, 32(%rax)
881 ; AVX512DQ-NEXT: vmovdqa %ymm1, (%rax)
882 ; AVX512DQ-NEXT: vzeroupper
883 ; AVX512DQ-NEXT: retq
885 ; AVX512DQ-FCP-LABEL: store_i8_stride6_vf8:
886 ; AVX512DQ-FCP: # %bb.0:
887 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
888 ; AVX512DQ-FCP-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
889 ; AVX512DQ-FCP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
890 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
891 ; AVX512DQ-FCP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
892 ; AVX512DQ-FCP-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
893 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
894 ; AVX512DQ-FCP-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
895 ; AVX512DQ-FCP-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
896 ; AVX512DQ-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
897 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm3
898 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm4 = [0,8,0,8,u,u,1,9,1,9,u,u,2,10,2,10,u,u,3,11,3,11,u,u,4,12,4,12,u,u,5,13]
899 ; AVX512DQ-FCP-NEXT: vpshufb %ymm4, %ymm3, %ymm5
900 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,0,1]
901 ; AVX512DQ-FCP-NEXT: vpshufb %ymm4, %ymm3, %ymm3
902 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2,3],ymm3[4],ymm5[5,6],ymm3[7],ymm5[8],ymm3[9],ymm5[10,11],ymm3[12],ymm5[13,14],ymm3[15]
903 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm4 = xmm2[2,10,1,9,0,8,3,11,u,u,u,u,4,12,u,u]
904 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
905 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} ymm4 = ymm4 ^ (mem & (ymm4 ^ ymm3))
906 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[u,u,6,14],zero,zero,xmm0[u,u,7,15],zero,zero,xmm0[u,u]
907 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[5,13,u,u],zero,zero,xmm1[6,14,u,u],zero,zero,xmm1[7,15,u,u]
908 ; AVX512DQ-FCP-NEXT: vpor %xmm0, %xmm1, %xmm0
909 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm1 = xmm2[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15]
910 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
911 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, %xmm0, %zmm4, %zmm1
912 ; AVX512DQ-FCP-NEXT: vmovdqa %xmm0, 32(%rax)
913 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm1, (%rax)
914 ; AVX512DQ-FCP-NEXT: vzeroupper
915 ; AVX512DQ-FCP-NEXT: retq
917 ; AVX512BW-LABEL: store_i8_stride6_vf8:
919 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
920 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
921 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
922 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
923 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
924 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
925 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
926 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
927 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
928 ; AVX512BW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
929 ; AVX512BW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm3
930 ; AVX512BW-NEXT: vmovdqa {{.*#+}} ymm4 = [0,8,0,8,u,u,1,9,1,9,u,u,2,10,2,10,u,u,3,11,3,11,u,u,4,12,4,12,u,u,5,13]
931 ; AVX512BW-NEXT: vpshufb %ymm4, %ymm3, %ymm5
932 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,0,1]
933 ; AVX512BW-NEXT: vpshufb %ymm4, %ymm3, %ymm3
934 ; AVX512BW-NEXT: vpblendw {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2,3],ymm3[4],ymm5[5,6],ymm3[7],ymm5[8],ymm3[9],ymm5[10,11],ymm3[12],ymm5[13,14],ymm3[15]
935 ; AVX512BW-NEXT: vpshufb {{.*#+}} xmm4 = xmm2[2,10,1,9,0,8,3,11,u,u,u,u,4,12,u,u]
936 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
937 ; AVX512BW-NEXT: movw $18724, %cx # imm = 0x4924
938 ; AVX512BW-NEXT: kmovd %ecx, %k1
939 ; AVX512BW-NEXT: vmovdqu16 %ymm4, %ymm3 {%k1}
940 ; AVX512BW-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[u,u,6,14],zero,zero,xmm0[u,u,7,15],zero,zero,xmm0[u,u]
941 ; AVX512BW-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[5,13,u,u],zero,zero,xmm1[6,14,u,u],zero,zero,xmm1[7,15,u,u]
942 ; AVX512BW-NEXT: vpor %xmm0, %xmm1, %xmm0
943 ; AVX512BW-NEXT: vpshufb {{.*#+}} xmm1 = xmm2[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15]
944 ; AVX512BW-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
945 ; AVX512BW-NEXT: vinserti32x4 $2, %xmm0, %zmm3, %zmm1
946 ; AVX512BW-NEXT: vmovdqa %xmm0, 32(%rax)
947 ; AVX512BW-NEXT: vmovdqa %ymm1, (%rax)
948 ; AVX512BW-NEXT: vzeroupper
949 ; AVX512BW-NEXT: retq
951 ; AVX512BW-FCP-LABEL: store_i8_stride6_vf8:
952 ; AVX512BW-FCP: # %bb.0:
953 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
954 ; AVX512BW-FCP-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
955 ; AVX512BW-FCP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
956 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
957 ; AVX512BW-FCP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
958 ; AVX512BW-FCP-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
959 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
960 ; AVX512BW-FCP-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
961 ; AVX512BW-FCP-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
962 ; AVX512BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
963 ; AVX512BW-FCP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm3
964 ; AVX512BW-FCP-NEXT: vmovdqa {{.*#+}} ymm4 = [0,8,0,8,u,u,1,9,1,9,u,u,2,10,2,10,u,u,3,11,3,11,u,u,4,12,4,12,u,u,5,13]
965 ; AVX512BW-FCP-NEXT: vpshufb %ymm4, %ymm3, %ymm5
966 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,0,1]
967 ; AVX512BW-FCP-NEXT: vpshufb %ymm4, %ymm3, %ymm3
968 ; AVX512BW-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2,3],ymm3[4],ymm5[5,6],ymm3[7],ymm5[8],ymm3[9],ymm5[10,11],ymm3[12],ymm5[13,14],ymm3[15]
969 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} xmm4 = xmm2[2,10,1,9,0,8,3,11,u,u,u,u,4,12,u,u]
970 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
971 ; AVX512BW-FCP-NEXT: movw $18724, %cx # imm = 0x4924
972 ; AVX512BW-FCP-NEXT: kmovd %ecx, %k1
973 ; AVX512BW-FCP-NEXT: vmovdqu16 %ymm4, %ymm3 {%k1}
974 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[u,u,6,14],zero,zero,xmm0[u,u,7,15],zero,zero,xmm0[u,u]
975 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[5,13,u,u],zero,zero,xmm1[6,14,u,u],zero,zero,xmm1[7,15,u,u]
976 ; AVX512BW-FCP-NEXT: vpor %xmm0, %xmm1, %xmm0
977 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} xmm1 = xmm2[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15]
978 ; AVX512BW-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
979 ; AVX512BW-FCP-NEXT: vinserti32x4 $2, %xmm0, %zmm3, %zmm1
980 ; AVX512BW-FCP-NEXT: vmovdqa %xmm0, 32(%rax)
981 ; AVX512BW-FCP-NEXT: vmovdqa %ymm1, (%rax)
982 ; AVX512BW-FCP-NEXT: vzeroupper
983 ; AVX512BW-FCP-NEXT: retq
985 ; AVX512DQ-BW-LABEL: store_i8_stride6_vf8:
986 ; AVX512DQ-BW: # %bb.0:
987 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
988 ; AVX512DQ-BW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
989 ; AVX512DQ-BW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
990 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
991 ; AVX512DQ-BW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
992 ; AVX512DQ-BW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
993 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
994 ; AVX512DQ-BW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
995 ; AVX512DQ-BW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
996 ; AVX512DQ-BW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
997 ; AVX512DQ-BW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm3
998 ; AVX512DQ-BW-NEXT: vmovdqa {{.*#+}} ymm4 = [0,8,0,8,u,u,1,9,1,9,u,u,2,10,2,10,u,u,3,11,3,11,u,u,4,12,4,12,u,u,5,13]
999 ; AVX512DQ-BW-NEXT: vpshufb %ymm4, %ymm3, %ymm5
1000 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,0,1]
1001 ; AVX512DQ-BW-NEXT: vpshufb %ymm4, %ymm3, %ymm3
1002 ; AVX512DQ-BW-NEXT: vpblendw {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2,3],ymm3[4],ymm5[5,6],ymm3[7],ymm5[8],ymm3[9],ymm5[10,11],ymm3[12],ymm5[13,14],ymm3[15]
1003 ; AVX512DQ-BW-NEXT: vpshufb {{.*#+}} xmm4 = xmm2[2,10,1,9,0,8,3,11,u,u,u,u,4,12,u,u]
1004 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
1005 ; AVX512DQ-BW-NEXT: movw $18724, %cx # imm = 0x4924
1006 ; AVX512DQ-BW-NEXT: kmovd %ecx, %k1
1007 ; AVX512DQ-BW-NEXT: vmovdqu16 %ymm4, %ymm3 {%k1}
1008 ; AVX512DQ-BW-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[u,u,6,14],zero,zero,xmm0[u,u,7,15],zero,zero,xmm0[u,u]
1009 ; AVX512DQ-BW-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[5,13,u,u],zero,zero,xmm1[6,14,u,u],zero,zero,xmm1[7,15,u,u]
1010 ; AVX512DQ-BW-NEXT: vpor %xmm0, %xmm1, %xmm0
1011 ; AVX512DQ-BW-NEXT: vpshufb {{.*#+}} xmm1 = xmm2[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15]
1012 ; AVX512DQ-BW-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
1013 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, %xmm0, %zmm3, %zmm1
1014 ; AVX512DQ-BW-NEXT: vmovdqa %xmm0, 32(%rax)
1015 ; AVX512DQ-BW-NEXT: vmovdqa %ymm1, (%rax)
1016 ; AVX512DQ-BW-NEXT: vzeroupper
1017 ; AVX512DQ-BW-NEXT: retq
1019 ; AVX512DQ-BW-FCP-LABEL: store_i8_stride6_vf8:
1020 ; AVX512DQ-BW-FCP: # %bb.0:
1021 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1022 ; AVX512DQ-BW-FCP-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
1023 ; AVX512DQ-BW-FCP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
1024 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
1025 ; AVX512DQ-BW-FCP-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
1026 ; AVX512DQ-BW-FCP-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
1027 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
1028 ; AVX512DQ-BW-FCP-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
1029 ; AVX512DQ-BW-FCP-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
1030 ; AVX512DQ-BW-FCP-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
1031 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm3
1032 ; AVX512DQ-BW-FCP-NEXT: vmovdqa {{.*#+}} ymm4 = [0,8,0,8,u,u,1,9,1,9,u,u,2,10,2,10,u,u,3,11,3,11,u,u,4,12,4,12,u,u,5,13]
1033 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm4, %ymm3, %ymm5
1034 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,3,0,1]
1035 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm4, %ymm3, %ymm3
1036 ; AVX512DQ-BW-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2,3],ymm3[4],ymm5[5,6],ymm3[7],ymm5[8],ymm3[9],ymm5[10,11],ymm3[12],ymm5[13,14],ymm3[15]
1037 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} xmm4 = xmm2[2,10,1,9,0,8,3,11,u,u,u,u,4,12,u,u]
1038 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
1039 ; AVX512DQ-BW-FCP-NEXT: movw $18724, %cx # imm = 0x4924
1040 ; AVX512DQ-BW-FCP-NEXT: kmovd %ecx, %k1
1041 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %ymm4, %ymm3 {%k1}
1042 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,xmm0[u,u,6,14],zero,zero,xmm0[u,u,7,15],zero,zero,xmm0[u,u]
1043 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[5,13,u,u],zero,zero,xmm1[6,14,u,u],zero,zero,xmm1[7,15,u,u]
1044 ; AVX512DQ-BW-FCP-NEXT: vpor %xmm0, %xmm1, %xmm0
1045 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} xmm1 = xmm2[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15]
1046 ; AVX512DQ-BW-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
1047 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $2, %xmm0, %zmm3, %zmm1
1048 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %xmm0, 32(%rax)
1049 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm1, (%rax)
1050 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
1051 ; AVX512DQ-BW-FCP-NEXT: retq
1052 %in.vec0 = load <8 x i8>, ptr %in.vecptr0, align 64
1053 %in.vec1 = load <8 x i8>, ptr %in.vecptr1, align 64
1054 %in.vec2 = load <8 x i8>, ptr %in.vecptr2, align 64
1055 %in.vec3 = load <8 x i8>, ptr %in.vecptr3, align 64
1056 %in.vec4 = load <8 x i8>, ptr %in.vecptr4, align 64
1057 %in.vec5 = load <8 x i8>, ptr %in.vecptr5, align 64
1058 %1 = shufflevector <8 x i8> %in.vec0, <8 x i8> %in.vec1, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
1059 %2 = shufflevector <8 x i8> %in.vec2, <8 x i8> %in.vec3, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
1060 %3 = shufflevector <8 x i8> %in.vec4, <8 x i8> %in.vec5, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
1061 %4 = shufflevector <16 x i8> %1, <16 x i8> %2, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1062 %5 = shufflevector <16 x i8> %3, <16 x i8> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1063 %6 = shufflevector <32 x i8> %4, <32 x i8> %5, <48 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47>
1064 %interleaved.vec = shufflevector <48 x i8> %6, <48 x i8> poison, <48 x i32> <i32 0, i32 8, i32 16, i32 24, i32 32, i32 40, i32 1, i32 9, i32 17, i32 25, i32 33, i32 41, i32 2, i32 10, i32 18, i32 26, i32 34, i32 42, i32 3, i32 11, i32 19, i32 27, i32 35, i32 43, i32 4, i32 12, i32 20, i32 28, i32 36, i32 44, i32 5, i32 13, i32 21, i32 29, i32 37, i32 45, i32 6, i32 14, i32 22, i32 30, i32 38, i32 46, i32 7, i32 15, i32 23, i32 31, i32 39, i32 47>
1065 store <48 x i8> %interleaved.vec, ptr %out.vec, align 64
1069 define void @store_i8_stride6_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
1070 ; SSE-LABEL: store_i8_stride6_vf16:
1072 ; SSE-NEXT: movdqa (%rdi), %xmm12
1073 ; SSE-NEXT: movdqa (%rsi), %xmm8
1074 ; SSE-NEXT: movdqa (%rdx), %xmm13
1075 ; SSE-NEXT: movdqa (%rcx), %xmm2
1076 ; SSE-NEXT: movdqa (%r8), %xmm11
1077 ; SSE-NEXT: movdqa (%r9), %xmm10
1078 ; SSE-NEXT: movdqa %xmm12, %xmm0
1079 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm8[0],xmm0[1],xmm8[1],xmm0[2],xmm8[2],xmm0[3],xmm8[3],xmm0[4],xmm8[4],xmm0[5],xmm8[5],xmm0[6],xmm8[6],xmm0[7],xmm8[7]
1080 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1081 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,1,1]
1082 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [65535,0,65535,65535,0,65535,65535,0]
1083 ; SSE-NEXT: pand %xmm4, %xmm0
1084 ; SSE-NEXT: movdqa %xmm13, %xmm7
1085 ; SSE-NEXT: punpcklbw {{.*#+}} xmm7 = xmm7[0],xmm2[0],xmm7[1],xmm2[1],xmm7[2],xmm2[2],xmm7[3],xmm2[3],xmm7[4],xmm2[4],xmm7[5],xmm2[5],xmm7[6],xmm2[6],xmm7[7],xmm2[7]
1086 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm7[1,0,2,2,4,5,6,7]
1087 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,0,1]
1088 ; SSE-NEXT: movdqa %xmm4, %xmm3
1089 ; SSE-NEXT: pandn %xmm1, %xmm3
1090 ; SSE-NEXT: por %xmm0, %xmm3
1091 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,65535,0,65535,65535,0,65535,65535]
1092 ; SSE-NEXT: pand %xmm1, %xmm3
1093 ; SSE-NEXT: movdqa %xmm11, %xmm6
1094 ; SSE-NEXT: punpcklbw {{.*#+}} xmm6 = xmm6[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1095 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[0,0,0,0]
1096 ; SSE-NEXT: movdqa %xmm1, %xmm9
1097 ; SSE-NEXT: pandn %xmm0, %xmm9
1098 ; SSE-NEXT: por %xmm3, %xmm9
1099 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
1100 ; SSE-NEXT: pand %xmm3, %xmm9
1101 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm10[0],xmm0[1],xmm10[1],xmm0[2],xmm10[2],xmm0[3],xmm10[3],xmm0[4],xmm10[4],xmm0[5],xmm10[5],xmm0[6],xmm10[6],xmm0[7],xmm10[7]
1102 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1103 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm0[0,0,0,0]
1104 ; SSE-NEXT: movdqa %xmm3, %xmm0
1105 ; SSE-NEXT: pandn %xmm14, %xmm0
1106 ; SSE-NEXT: por %xmm9, %xmm0
1107 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1108 ; SSE-NEXT: punpckhbw {{.*#+}} xmm12 = xmm12[8],xmm8[8],xmm12[9],xmm8[9],xmm12[10],xmm8[10],xmm12[11],xmm8[11],xmm12[12],xmm8[12],xmm12[13],xmm8[13],xmm12[14],xmm8[14],xmm12[15],xmm8[15]
1109 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm12[1,1,2,2]
1110 ; SSE-NEXT: pand %xmm1, %xmm8
1111 ; SSE-NEXT: punpckhbw {{.*#+}} xmm13 = xmm13[8],xmm2[8],xmm13[9],xmm2[9],xmm13[10],xmm2[10],xmm13[11],xmm2[11],xmm13[12],xmm2[12],xmm13[13],xmm2[13],xmm13[14],xmm2[14],xmm13[15],xmm2[15]
1112 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm13[3,3,3,3,4,5,6,7]
1113 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,4,4,4]
1114 ; SSE-NEXT: movdqa %xmm1, %xmm9
1115 ; SSE-NEXT: pandn %xmm2, %xmm9
1116 ; SSE-NEXT: por %xmm8, %xmm9
1117 ; SSE-NEXT: movdqa {{.*#+}} xmm8 = [0,65535,65535,0,65535,65535,0,65535]
1118 ; SSE-NEXT: pand %xmm8, %xmm9
1119 ; SSE-NEXT: punpckhbw {{.*#+}} xmm11 = xmm11[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
1120 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm11[1,1,2,2]
1121 ; SSE-NEXT: movdqa %xmm8, %xmm14
1122 ; SSE-NEXT: pandn %xmm2, %xmm14
1123 ; SSE-NEXT: por %xmm9, %xmm14
1124 ; SSE-NEXT: movdqa {{.*#+}} xmm9 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
1125 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm10[8],xmm2[9],xmm10[9],xmm2[10],xmm10[10],xmm2[11],xmm10[11],xmm2[12],xmm10[12],xmm2[13],xmm10[13],xmm2[14],xmm10[14],xmm2[15],xmm10[15]
1126 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm2[1,1,2,2]
1127 ; SSE-NEXT: movdqa %xmm9, %xmm10
1128 ; SSE-NEXT: pandn %xmm15, %xmm10
1129 ; SSE-NEXT: pand %xmm9, %xmm14
1130 ; SSE-NEXT: por %xmm14, %xmm10
1131 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm12[3,3,3,3]
1132 ; SSE-NEXT: movdqa %xmm1, %xmm15
1133 ; SSE-NEXT: pandn %xmm14, %xmm15
1134 ; SSE-NEXT: pshufhw {{.*#+}} xmm14 = xmm13[0,1,2,3,5,6,7,7]
1135 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[2,2,2,3]
1136 ; SSE-NEXT: pand %xmm1, %xmm14
1137 ; SSE-NEXT: por %xmm15, %xmm14
1138 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm11[2,2,3,3]
1139 ; SSE-NEXT: movdqa %xmm4, %xmm5
1140 ; SSE-NEXT: pandn %xmm15, %xmm5
1141 ; SSE-NEXT: pand %xmm4, %xmm14
1142 ; SSE-NEXT: por %xmm14, %xmm5
1143 ; SSE-NEXT: movdqa {{.*#+}} xmm14 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
1144 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[2,2,3,3]
1145 ; SSE-NEXT: movdqa %xmm14, %xmm15
1146 ; SSE-NEXT: pandn %xmm0, %xmm15
1147 ; SSE-NEXT: pand %xmm14, %xmm5
1148 ; SSE-NEXT: por %xmm5, %xmm15
1149 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm13[1,0,2,2,4,5,6,7]
1150 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,0,1]
1151 ; SSE-NEXT: movdqa %xmm4, %xmm5
1152 ; SSE-NEXT: pandn %xmm0, %xmm5
1153 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm12[0,0,1,1]
1154 ; SSE-NEXT: pand %xmm4, %xmm0
1155 ; SSE-NEXT: por %xmm0, %xmm5
1156 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm11[0,0,0,0]
1157 ; SSE-NEXT: movdqa %xmm1, %xmm11
1158 ; SSE-NEXT: pandn %xmm0, %xmm11
1159 ; SSE-NEXT: pand %xmm1, %xmm5
1160 ; SSE-NEXT: por %xmm5, %xmm11
1161 ; SSE-NEXT: pand %xmm3, %xmm11
1162 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[0,0,0,0]
1163 ; SSE-NEXT: pandn %xmm0, %xmm3
1164 ; SSE-NEXT: por %xmm11, %xmm3
1165 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
1166 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm5[3,3,3,3]
1167 ; SSE-NEXT: movdqa %xmm1, %xmm2
1168 ; SSE-NEXT: pandn %xmm0, %xmm2
1169 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm7[0,1,2,3,5,6,7,7]
1170 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,3]
1171 ; SSE-NEXT: pand %xmm1, %xmm0
1172 ; SSE-NEXT: por %xmm2, %xmm0
1173 ; SSE-NEXT: pand %xmm4, %xmm0
1174 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm6[2,2,3,3]
1175 ; SSE-NEXT: pandn %xmm2, %xmm4
1176 ; SSE-NEXT: por %xmm0, %xmm4
1177 ; SSE-NEXT: pand %xmm14, %xmm4
1178 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
1179 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm11[2,2,3,3]
1180 ; SSE-NEXT: pandn %xmm0, %xmm14
1181 ; SSE-NEXT: por %xmm4, %xmm14
1182 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm5[1,1,2,2]
1183 ; SSE-NEXT: pand %xmm1, %xmm0
1184 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm7[3,3,3,3,4,5,6,7]
1185 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,4,4,4]
1186 ; SSE-NEXT: pandn %xmm2, %xmm1
1187 ; SSE-NEXT: por %xmm0, %xmm1
1188 ; SSE-NEXT: pand %xmm8, %xmm1
1189 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[1,1,2,2]
1190 ; SSE-NEXT: pandn %xmm0, %xmm8
1191 ; SSE-NEXT: por %xmm1, %xmm8
1192 ; SSE-NEXT: pand %xmm9, %xmm8
1193 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm11[1,1,2,2]
1194 ; SSE-NEXT: pandn %xmm0, %xmm9
1195 ; SSE-NEXT: por %xmm8, %xmm9
1196 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1197 ; SSE-NEXT: movdqa %xmm9, 16(%rax)
1198 ; SSE-NEXT: movdqa %xmm14, 32(%rax)
1199 ; SSE-NEXT: movdqa %xmm3, 48(%rax)
1200 ; SSE-NEXT: movdqa %xmm15, 80(%rax)
1201 ; SSE-NEXT: movdqa %xmm10, 64(%rax)
1202 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1203 ; SSE-NEXT: movaps %xmm0, (%rax)
1206 ; AVX-LABEL: store_i8_stride6_vf16:
1208 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
1209 ; AVX-NEXT: vmovdqa (%rdi), %xmm1
1210 ; AVX-NEXT: vmovdqa (%rsi), %xmm2
1211 ; AVX-NEXT: vmovdqa (%rdx), %xmm3
1212 ; AVX-NEXT: vmovdqa (%rcx), %xmm4
1213 ; AVX-NEXT: vmovdqa (%r8), %xmm5
1214 ; AVX-NEXT: vmovdqa (%r9), %xmm6
1215 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3],xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
1216 ; AVX-NEXT: vpshufd {{.*#+}} xmm0 = xmm7[1,1,2,2]
1217 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3],xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
1218 ; AVX-NEXT: vpshuflw {{.*#+}} xmm9 = xmm8[3,3,3,3,4,5,6,7]
1219 ; AVX-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,4,4,4]
1220 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm9[2],xmm0[3,4],xmm9[5],xmm0[6,7]
1221 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3],xmm5[4],xmm6[4],xmm5[5],xmm6[5],xmm5[6],xmm6[6],xmm5[7],xmm6[7]
1222 ; AVX-NEXT: vpshufd {{.*#+}} xmm10 = xmm9[1,1,2,2]
1223 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm10[0],xmm0[1,2],xmm10[3],xmm0[4,5],xmm10[6],xmm0[7]
1224 ; AVX-NEXT: vpshufd {{.*#+}} xmm10 = xmm7[0,0,1,1]
1225 ; AVX-NEXT: vpshuflw {{.*#+}} xmm11 = xmm8[1,0,2,2,4,5,6,7]
1226 ; AVX-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[0,1,0,1]
1227 ; AVX-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0],xmm11[1],xmm10[2,3],xmm11[4],xmm10[5,6],xmm11[7]
1228 ; AVX-NEXT: vpshufd {{.*#+}} xmm11 = xmm9[0,0,0,0]
1229 ; AVX-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0,1],xmm11[2],xmm10[3,4],xmm11[5],xmm10[6,7]
1230 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm10, %ymm0
1231 ; AVX-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm1[8],xmm2[8],xmm1[9],xmm2[9],xmm1[10],xmm2[10],xmm1[11],xmm2[11],xmm1[12],xmm2[12],xmm1[13],xmm2[13],xmm1[14],xmm2[14],xmm1[15],xmm2[15]
1232 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
1233 ; AVX-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm3[8],xmm4[8],xmm3[9],xmm4[9],xmm3[10],xmm4[10],xmm3[11],xmm4[11],xmm3[12],xmm4[12],xmm3[13],xmm4[13],xmm3[14],xmm4[14],xmm3[15],xmm4[15]
1234 ; AVX-NEXT: vpshuflw {{.*#+}} xmm4 = xmm3[1,0,2,2,4,5,6,7]
1235 ; AVX-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[0,1,0,1]
1236 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm4[1],xmm2[2,3],xmm4[4],xmm2[5,6],xmm4[7]
1237 ; AVX-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm5[8],xmm6[8],xmm5[9],xmm6[9],xmm5[10],xmm6[10],xmm5[11],xmm6[11],xmm5[12],xmm6[12],xmm5[13],xmm6[13],xmm5[14],xmm6[14],xmm5[15],xmm6[15]
1238 ; AVX-NEXT: vpshufd {{.*#+}} xmm5 = xmm4[0,0,0,0]
1239 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm5[2],xmm2[3,4],xmm5[5],xmm2[6,7]
1240 ; AVX-NEXT: vpshufd {{.*#+}} xmm5 = xmm7[3,3,3,3]
1241 ; AVX-NEXT: vpshufhw {{.*#+}} xmm6 = xmm8[0,1,2,3,5,6,7,7]
1242 ; AVX-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,2,3]
1243 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2],xmm6[3,4],xmm5[5],xmm6[6,7]
1244 ; AVX-NEXT: vpshufd {{.*#+}} xmm6 = xmm9[2,2,3,3]
1245 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0],xmm6[1],xmm5[2,3],xmm6[4],xmm5[5,6],xmm6[7]
1246 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm5, %ymm2
1247 ; AVX-NEXT: vpshufd {{.*#+}} xmm5 = xmm1[3,3,3,3]
1248 ; AVX-NEXT: vpshufhw {{.*#+}} xmm6 = xmm3[0,1,2,3,5,6,7,7]
1249 ; AVX-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,2,3]
1250 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2],xmm6[3,4],xmm5[5],xmm6[6,7]
1251 ; AVX-NEXT: vpshufd {{.*#+}} xmm6 = xmm4[2,2,3,3]
1252 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0],xmm6[1],xmm5[2,3],xmm6[4],xmm5[5,6],xmm6[7]
1253 ; AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[1,1,2,2]
1254 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[3,3,3,3,4,5,6,7]
1255 ; AVX-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,4,4]
1256 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1],xmm3[2],xmm1[3,4],xmm3[5],xmm1[6,7]
1257 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm4[1,1,2,2]
1258 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0],xmm1[1,2],xmm3[3],xmm1[4,5],xmm3[6],xmm1[7]
1259 ; AVX-NEXT: vinsertf128 $1, %xmm5, %ymm1, %ymm1
1260 ; AVX-NEXT: vmovaps %ymm1, 64(%rax)
1261 ; AVX-NEXT: vmovaps %ymm2, 32(%rax)
1262 ; AVX-NEXT: vmovaps %ymm0, (%rax)
1263 ; AVX-NEXT: vzeroupper
1266 ; AVX2-LABEL: store_i8_stride6_vf16:
1268 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
1269 ; AVX2-NEXT: vmovdqa (%rdi), %xmm0
1270 ; AVX2-NEXT: vmovdqa (%rdx), %xmm1
1271 ; AVX2-NEXT: vmovdqa (%r8), %xmm2
1272 ; AVX2-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
1273 ; AVX2-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
1274 ; AVX2-NEXT: vinserti128 $1, (%r9), %ymm2, %ymm2
1275 ; AVX2-NEXT: vpermq {{.*#+}} ymm3 = ymm1[0,2,0,2]
1276 ; AVX2-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,ymm3[0,8],zero,zero,zero,zero,ymm3[1,9],zero,zero,zero,zero,ymm3[2,10],zero,zero,zero,zero,ymm3[19,27],zero,zero,zero,zero,ymm3[20,28],zero,zero,zero,zero
1277 ; AVX2-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,0,2]
1278 ; AVX2-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[0,8],zero,zero,zero,zero,ymm4[1,9],zero,zero,zero,zero,ymm4[2,10],zero,zero,zero,zero,ymm4[19,27],zero,zero,zero,zero,ymm4[20,28],zero,zero,zero,zero,ymm4[21,29]
1279 ; AVX2-NEXT: vpor %ymm3, %ymm4, %ymm3
1280 ; AVX2-NEXT: vpermq {{.*#+}} ymm4 = ymm2[0,2,0,2]
1281 ; AVX2-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,0,8,u,u,u,u,1,9,u,u,u,u,18,26,u,u,u,u,19,27,u,u,u,u,20,28,u,u]
1282 ; AVX2-NEXT: vpmovsxbw {{.*#+}} ymm5 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
1283 ; AVX2-NEXT: vpblendvb %ymm5, %ymm3, %ymm4, %ymm3
1284 ; AVX2-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,1,3]
1285 ; AVX2-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,zero,zero,ymm4[6,14],zero,zero,zero,zero,ymm4[7,15],zero,zero,zero,zero,ymm4[16,24],zero,zero,zero,zero,ymm4[17,25],zero,zero,zero,zero,ymm4[18,26],zero,zero
1286 ; AVX2-NEXT: vpermq {{.*#+}} ymm5 = ymm1[0,2,1,3]
1287 ; AVX2-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[5,13],zero,zero,zero,zero,ymm5[6,14],zero,zero,zero,zero,ymm5[7,15],zero,zero,zero,zero,ymm5[16,24],zero,zero,zero,zero,ymm5[17,25],zero,zero,zero,zero,ymm5[18,26]
1288 ; AVX2-NEXT: vpor %ymm4, %ymm5, %ymm4
1289 ; AVX2-NEXT: vpermq {{.*#+}} ymm5 = ymm2[0,2,1,3]
1290 ; AVX2-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15,u,u,u,u,16,24,u,u,u,u,17,25,u,u,u,u]
1291 ; AVX2-NEXT: vpmovsxbw {{.*#+}} ymm6 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
1292 ; AVX2-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
1293 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
1294 ; AVX2-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,ymm1[3,11],zero,zero,zero,zero,ymm1[4,12],zero,zero,zero,zero,ymm1[21,29],zero,zero,zero,zero,ymm1[22,30],zero,zero,zero,zero,ymm1[23,31],zero,zero
1295 ; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
1296 ; AVX2-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,ymm0[3,11],zero,zero,zero,zero,ymm0[4,12],zero,zero,zero,zero,ymm0[5,13],zero,zero,zero,zero,ymm0[22,30],zero,zero,zero,zero,ymm0[23,31],zero,zero,zero,zero
1297 ; AVX2-NEXT: vpor %ymm1, %ymm0, %ymm0
1298 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
1299 ; AVX2-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,10,u,u,u,u,3,11,u,u,u,u,4,12,u,u,u,u,21,29,u,u,u,u,22,30,u,u,u,u,23,31]
1300 ; AVX2-NEXT: vpmovsxbw {{.*#+}} ymm2 = [0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0]
1301 ; AVX2-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
1302 ; AVX2-NEXT: vmovdqa %ymm0, 64(%rax)
1303 ; AVX2-NEXT: vmovdqa %ymm4, 32(%rax)
1304 ; AVX2-NEXT: vmovdqa %ymm3, (%rax)
1305 ; AVX2-NEXT: vzeroupper
1308 ; AVX2-FP-LABEL: store_i8_stride6_vf16:
1310 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1311 ; AVX2-FP-NEXT: vmovdqa (%rdi), %xmm0
1312 ; AVX2-FP-NEXT: vmovdqa (%rdx), %xmm1
1313 ; AVX2-FP-NEXT: vmovdqa (%r8), %xmm2
1314 ; AVX2-FP-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
1315 ; AVX2-FP-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
1316 ; AVX2-FP-NEXT: vinserti128 $1, (%r9), %ymm2, %ymm2
1317 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm3 = ymm1[0,2,0,2]
1318 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,ymm3[0,8],zero,zero,zero,zero,ymm3[1,9],zero,zero,zero,zero,ymm3[2,10],zero,zero,zero,zero,ymm3[19,27],zero,zero,zero,zero,ymm3[20,28],zero,zero,zero,zero
1319 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,0,2]
1320 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[0,8],zero,zero,zero,zero,ymm4[1,9],zero,zero,zero,zero,ymm4[2,10],zero,zero,zero,zero,ymm4[19,27],zero,zero,zero,zero,ymm4[20,28],zero,zero,zero,zero,ymm4[21,29]
1321 ; AVX2-FP-NEXT: vpor %ymm3, %ymm4, %ymm3
1322 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm4 = ymm2[0,2,0,2]
1323 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,0,8,u,u,u,u,1,9,u,u,u,u,18,26,u,u,u,u,19,27,u,u,u,u,20,28,u,u]
1324 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} ymm5 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
1325 ; AVX2-FP-NEXT: vpblendvb %ymm5, %ymm3, %ymm4, %ymm3
1326 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,1,3]
1327 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,zero,zero,ymm4[6,14],zero,zero,zero,zero,ymm4[7,15],zero,zero,zero,zero,ymm4[16,24],zero,zero,zero,zero,ymm4[17,25],zero,zero,zero,zero,ymm4[18,26],zero,zero
1328 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm5 = ymm1[0,2,1,3]
1329 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[5,13],zero,zero,zero,zero,ymm5[6,14],zero,zero,zero,zero,ymm5[7,15],zero,zero,zero,zero,ymm5[16,24],zero,zero,zero,zero,ymm5[17,25],zero,zero,zero,zero,ymm5[18,26]
1330 ; AVX2-FP-NEXT: vpor %ymm4, %ymm5, %ymm4
1331 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm5 = ymm2[0,2,1,3]
1332 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15,u,u,u,u,16,24,u,u,u,u,17,25,u,u,u,u]
1333 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} ymm6 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
1334 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
1335 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
1336 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,ymm1[3,11],zero,zero,zero,zero,ymm1[4,12],zero,zero,zero,zero,ymm1[21,29],zero,zero,zero,zero,ymm1[22,30],zero,zero,zero,zero,ymm1[23,31],zero,zero
1337 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
1338 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,ymm0[3,11],zero,zero,zero,zero,ymm0[4,12],zero,zero,zero,zero,ymm0[5,13],zero,zero,zero,zero,ymm0[22,30],zero,zero,zero,zero,ymm0[23,31],zero,zero,zero,zero
1339 ; AVX2-FP-NEXT: vpor %ymm1, %ymm0, %ymm0
1340 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
1341 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,10,u,u,u,u,3,11,u,u,u,u,4,12,u,u,u,u,21,29,u,u,u,u,22,30,u,u,u,u,23,31]
1342 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} ymm2 = [0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0]
1343 ; AVX2-FP-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
1344 ; AVX2-FP-NEXT: vmovdqa %ymm0, 64(%rax)
1345 ; AVX2-FP-NEXT: vmovdqa %ymm4, 32(%rax)
1346 ; AVX2-FP-NEXT: vmovdqa %ymm3, (%rax)
1347 ; AVX2-FP-NEXT: vzeroupper
1348 ; AVX2-FP-NEXT: retq
1350 ; AVX2-FCP-LABEL: store_i8_stride6_vf16:
1351 ; AVX2-FCP: # %bb.0:
1352 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1353 ; AVX2-FCP-NEXT: vmovdqa (%rdi), %xmm0
1354 ; AVX2-FCP-NEXT: vmovdqa (%rdx), %xmm1
1355 ; AVX2-FCP-NEXT: vmovdqa (%r8), %xmm2
1356 ; AVX2-FCP-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
1357 ; AVX2-FCP-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
1358 ; AVX2-FCP-NEXT: vinserti128 $1, (%r9), %ymm2, %ymm2
1359 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm3 = ymm1[0,2,0,2]
1360 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,ymm3[0,8],zero,zero,zero,zero,ymm3[1,9],zero,zero,zero,zero,ymm3[2,10],zero,zero,zero,zero,ymm3[19,27],zero,zero,zero,zero,ymm3[20,28],zero,zero,zero,zero
1361 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,0,2]
1362 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[0,8],zero,zero,zero,zero,ymm4[1,9],zero,zero,zero,zero,ymm4[2,10],zero,zero,zero,zero,ymm4[19,27],zero,zero,zero,zero,ymm4[20,28],zero,zero,zero,zero,ymm4[21,29]
1363 ; AVX2-FCP-NEXT: vpor %ymm3, %ymm4, %ymm3
1364 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm2[0,2,0,2]
1365 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,0,8,u,u,u,u,1,9,u,u,u,u,18,26,u,u,u,u,19,27,u,u,u,u,20,28,u,u]
1366 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} ymm5 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
1367 ; AVX2-FCP-NEXT: vpblendvb %ymm5, %ymm3, %ymm4, %ymm3
1368 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,1,3]
1369 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,zero,zero,ymm4[6,14],zero,zero,zero,zero,ymm4[7,15],zero,zero,zero,zero,ymm4[16,24],zero,zero,zero,zero,ymm4[17,25],zero,zero,zero,zero,ymm4[18,26],zero,zero
1370 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm1[0,2,1,3]
1371 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[5,13],zero,zero,zero,zero,ymm5[6,14],zero,zero,zero,zero,ymm5[7,15],zero,zero,zero,zero,ymm5[16,24],zero,zero,zero,zero,ymm5[17,25],zero,zero,zero,zero,ymm5[18,26]
1372 ; AVX2-FCP-NEXT: vpor %ymm4, %ymm5, %ymm4
1373 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm2[0,2,1,3]
1374 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[u,u,5,13,u,u,u,u,6,14,u,u,u,u,7,15,u,u,u,u,16,24,u,u,u,u,17,25,u,u,u,u]
1375 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} ymm6 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
1376 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
1377 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
1378 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,ymm1[3,11],zero,zero,zero,zero,ymm1[4,12],zero,zero,zero,zero,ymm1[21,29],zero,zero,zero,zero,ymm1[22,30],zero,zero,zero,zero,ymm1[23,31],zero,zero
1379 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
1380 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,ymm0[3,11],zero,zero,zero,zero,ymm0[4,12],zero,zero,zero,zero,ymm0[5,13],zero,zero,zero,zero,ymm0[22,30],zero,zero,zero,zero,ymm0[23,31],zero,zero,zero,zero
1381 ; AVX2-FCP-NEXT: vpor %ymm1, %ymm0, %ymm0
1382 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
1383 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,10,u,u,u,u,3,11,u,u,u,u,4,12,u,u,u,u,21,29,u,u,u,u,22,30,u,u,u,u,23,31]
1384 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} ymm2 = [0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0]
1385 ; AVX2-FCP-NEXT: vpblendvb %ymm2, %ymm0, %ymm1, %ymm0
1386 ; AVX2-FCP-NEXT: vmovdqa %ymm0, 64(%rax)
1387 ; AVX2-FCP-NEXT: vmovdqa %ymm4, 32(%rax)
1388 ; AVX2-FCP-NEXT: vmovdqa %ymm3, (%rax)
1389 ; AVX2-FCP-NEXT: vzeroupper
1390 ; AVX2-FCP-NEXT: retq
1392 ; AVX512-LABEL: store_i8_stride6_vf16:
1394 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
1395 ; AVX512-NEXT: vmovdqa (%rdi), %xmm0
1396 ; AVX512-NEXT: vmovdqa (%rdx), %xmm1
1397 ; AVX512-NEXT: vmovdqa (%r8), %xmm2
1398 ; AVX512-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
1399 ; AVX512-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
1400 ; AVX512-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm2
1401 ; AVX512-NEXT: vpermq {{.*#+}} ymm3 = ymm1[0,2,0,2]
1402 ; AVX512-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,ymm3[0,8,u,u],zero,zero,ymm3[1,9,u,u],zero,zero,ymm3[2,10,u,u],zero,zero,ymm3[19,27,u,u],zero,zero,ymm3[20,28,u,u],zero,zero
1403 ; AVX512-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,1,3]
1404 ; AVX512-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,ymm4[u,u,6,14],zero,zero,ymm4[u,u,7,15],zero,zero,ymm4[u,u,16,24],zero,zero,ymm4[u,u,17,25],zero,zero,ymm4[u,u,18,26],zero,zero
1405 ; AVX512-NEXT: vinserti64x4 $1, %ymm4, %zmm3, %zmm3
1406 ; AVX512-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,0,2]
1407 ; AVX512-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[0,8],zero,zero,ymm4[u,u,1,9],zero,zero,ymm4[u,u,2,10],zero,zero,ymm4[u,u,19,27],zero,zero,ymm4[u,u,20,28],zero,zero,ymm4[u,u,21,29]
1408 ; AVX512-NEXT: vpermq {{.*#+}} ymm5 = ymm1[0,2,1,3]
1409 ; AVX512-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[5,13,u,u],zero,zero,ymm5[6,14,u,u],zero,zero,ymm5[7,15,u,u],zero,zero,ymm5[16,24,u,u],zero,zero,ymm5[17,25,u,u],zero,zero,ymm5[18,26]
1410 ; AVX512-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm4
1411 ; AVX512-NEXT: vporq %zmm3, %zmm4, %zmm3
1412 ; AVX512-NEXT: vpermq {{.*#+}} ymm4 = ymm2[0,2,0,2]
1413 ; AVX512-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,zero,zero,ymm4[0,8],zero,zero,zero,zero,ymm4[1,9],zero,zero,zero,zero,ymm4[18,26],zero,zero,zero,zero,ymm4[19,27],zero,zero,zero,zero,ymm4[20,28],zero,zero
1414 ; AVX512-NEXT: vpermq {{.*#+}} ymm5 = ymm2[0,2,1,3]
1415 ; AVX512-NEXT: vpshufb {{.*#+}} ymm5 = zero,zero,ymm5[5,13],zero,zero,zero,zero,ymm5[6,14],zero,zero,zero,zero,ymm5[7,15],zero,zero,zero,zero,ymm5[16,24],zero,zero,zero,zero,ymm5[17,25],zero,zero,zero,zero
1416 ; AVX512-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm4
1417 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm4 = zmm4 | (zmm3 & mem)
1418 ; AVX512-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
1419 ; AVX512-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u],zero,zero,ymm1[3,11,u,u],zero,zero,ymm1[4,12,u,u],zero,zero,ymm1[21,29,u,u],zero,zero,ymm1[22,30,u,u],zero,zero,ymm1[23,31,u,u]
1420 ; AVX512-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
1421 ; AVX512-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,3,11],zero,zero,ymm0[u,u,4,12],zero,zero,ymm0[u,u,5,13],zero,zero,ymm0[u,u,22,30],zero,zero,ymm0[u,u,23,31],zero,zero,ymm0[u,u]
1422 ; AVX512-NEXT: vpor %ymm1, %ymm0, %ymm0
1423 ; AVX512-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
1424 ; AVX512-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,10],zero,zero,zero,zero,ymm1[3,11],zero,zero,zero,zero,ymm1[4,12],zero,zero,zero,zero,ymm1[21,29],zero,zero,zero,zero,ymm1[22,30],zero,zero,zero,zero,ymm1[23,31]
1425 ; AVX512-NEXT: vpternlogq {{.*#+}} ymm1 = ymm1 | (ymm0 & mem)
1426 ; AVX512-NEXT: vmovdqa %ymm1, 64(%rax)
1427 ; AVX512-NEXT: vmovdqa64 %zmm4, (%rax)
1428 ; AVX512-NEXT: vzeroupper
1431 ; AVX512-FCP-LABEL: store_i8_stride6_vf16:
1432 ; AVX512-FCP: # %bb.0:
1433 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1434 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %xmm0
1435 ; AVX512-FCP-NEXT: vmovdqa (%rdx), %xmm1
1436 ; AVX512-FCP-NEXT: vmovdqa (%r8), %xmm2
1437 ; AVX512-FCP-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
1438 ; AVX512-FCP-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
1439 ; AVX512-FCP-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm2
1440 ; AVX512-FCP-NEXT: vpermq {{.*#+}} ymm3 = ymm1[0,2,0,2]
1441 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,ymm3[0,8,u,u],zero,zero,ymm3[1,9,u,u],zero,zero,ymm3[2,10,u,u],zero,zero,ymm3[19,27,u,u],zero,zero,ymm3[20,28,u,u],zero,zero
1442 ; AVX512-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,1,3]
1443 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,ymm4[u,u,6,14],zero,zero,ymm4[u,u,7,15],zero,zero,ymm4[u,u,16,24],zero,zero,ymm4[u,u,17,25],zero,zero,ymm4[u,u,18,26],zero,zero
1444 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm4, %zmm3, %zmm3
1445 ; AVX512-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,0,2]
1446 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[0,8],zero,zero,ymm4[u,u,1,9],zero,zero,ymm4[u,u,2,10],zero,zero,ymm4[u,u,19,27],zero,zero,ymm4[u,u,20,28],zero,zero,ymm4[u,u,21,29]
1447 ; AVX512-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm1[0,2,1,3]
1448 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[5,13,u,u],zero,zero,ymm5[6,14,u,u],zero,zero,ymm5[7,15,u,u],zero,zero,ymm5[16,24,u,u],zero,zero,ymm5[17,25,u,u],zero,zero,ymm5[18,26]
1449 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm4
1450 ; AVX512-FCP-NEXT: vporq %zmm3, %zmm4, %zmm3
1451 ; AVX512-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm2[0,2,0,2]
1452 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,zero,zero,ymm4[0,8],zero,zero,zero,zero,ymm4[1,9],zero,zero,zero,zero,ymm4[18,26],zero,zero,zero,zero,ymm4[19,27],zero,zero,zero,zero,ymm4[20,28],zero,zero
1453 ; AVX512-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm2[0,2,1,3]
1454 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm5 = zero,zero,ymm5[5,13],zero,zero,zero,zero,ymm5[6,14],zero,zero,zero,zero,ymm5[7,15],zero,zero,zero,zero,ymm5[16,24],zero,zero,zero,zero,ymm5[17,25],zero,zero,zero,zero
1455 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm4
1456 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm4 = zmm4 | (zmm3 & mem)
1457 ; AVX512-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
1458 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u],zero,zero,ymm1[3,11,u,u],zero,zero,ymm1[4,12,u,u],zero,zero,ymm1[21,29,u,u],zero,zero,ymm1[22,30,u,u],zero,zero,ymm1[23,31,u,u]
1459 ; AVX512-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
1460 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,3,11],zero,zero,ymm0[u,u,4,12],zero,zero,ymm0[u,u,5,13],zero,zero,ymm0[u,u,22,30],zero,zero,ymm0[u,u,23,31],zero,zero,ymm0[u,u]
1461 ; AVX512-FCP-NEXT: vpor %ymm1, %ymm0, %ymm0
1462 ; AVX512-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
1463 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,10],zero,zero,zero,zero,ymm1[3,11],zero,zero,zero,zero,ymm1[4,12],zero,zero,zero,zero,ymm1[21,29],zero,zero,zero,zero,ymm1[22,30],zero,zero,zero,zero,ymm1[23,31]
1464 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} ymm1 = ymm1 | (ymm0 & mem)
1465 ; AVX512-FCP-NEXT: vmovdqa %ymm1, 64(%rax)
1466 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, (%rax)
1467 ; AVX512-FCP-NEXT: vzeroupper
1468 ; AVX512-FCP-NEXT: retq
1470 ; AVX512DQ-LABEL: store_i8_stride6_vf16:
1471 ; AVX512DQ: # %bb.0:
1472 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
1473 ; AVX512DQ-NEXT: vmovdqa (%rdi), %xmm0
1474 ; AVX512DQ-NEXT: vmovdqa (%rdx), %xmm1
1475 ; AVX512DQ-NEXT: vmovdqa (%r8), %xmm2
1476 ; AVX512DQ-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
1477 ; AVX512DQ-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
1478 ; AVX512DQ-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm2
1479 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm3 = ymm1[0,2,0,2]
1480 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,ymm3[0,8,u,u],zero,zero,ymm3[1,9,u,u],zero,zero,ymm3[2,10,u,u],zero,zero,ymm3[19,27,u,u],zero,zero,ymm3[20,28,u,u],zero,zero
1481 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,1,3]
1482 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,ymm4[u,u,6,14],zero,zero,ymm4[u,u,7,15],zero,zero,ymm4[u,u,16,24],zero,zero,ymm4[u,u,17,25],zero,zero,ymm4[u,u,18,26],zero,zero
1483 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm4, %zmm3, %zmm3
1484 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,0,2]
1485 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[0,8],zero,zero,ymm4[u,u,1,9],zero,zero,ymm4[u,u,2,10],zero,zero,ymm4[u,u,19,27],zero,zero,ymm4[u,u,20,28],zero,zero,ymm4[u,u,21,29]
1486 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm5 = ymm1[0,2,1,3]
1487 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[5,13,u,u],zero,zero,ymm5[6,14,u,u],zero,zero,ymm5[7,15,u,u],zero,zero,ymm5[16,24,u,u],zero,zero,ymm5[17,25,u,u],zero,zero,ymm5[18,26]
1488 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm4
1489 ; AVX512DQ-NEXT: vporq %zmm3, %zmm4, %zmm3
1490 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm4 = ymm2[0,2,0,2]
1491 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,zero,zero,ymm4[0,8],zero,zero,zero,zero,ymm4[1,9],zero,zero,zero,zero,ymm4[18,26],zero,zero,zero,zero,ymm4[19,27],zero,zero,zero,zero,ymm4[20,28],zero,zero
1492 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm5 = ymm2[0,2,1,3]
1493 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm5 = zero,zero,ymm5[5,13],zero,zero,zero,zero,ymm5[6,14],zero,zero,zero,zero,ymm5[7,15],zero,zero,zero,zero,ymm5[16,24],zero,zero,zero,zero,ymm5[17,25],zero,zero,zero,zero
1494 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm4
1495 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm4 = zmm4 | (zmm3 & mem)
1496 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
1497 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u],zero,zero,ymm1[3,11,u,u],zero,zero,ymm1[4,12,u,u],zero,zero,ymm1[21,29,u,u],zero,zero,ymm1[22,30,u,u],zero,zero,ymm1[23,31,u,u]
1498 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
1499 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,3,11],zero,zero,ymm0[u,u,4,12],zero,zero,ymm0[u,u,5,13],zero,zero,ymm0[u,u,22,30],zero,zero,ymm0[u,u,23,31],zero,zero,ymm0[u,u]
1500 ; AVX512DQ-NEXT: vpor %ymm1, %ymm0, %ymm0
1501 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
1502 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,10],zero,zero,zero,zero,ymm1[3,11],zero,zero,zero,zero,ymm1[4,12],zero,zero,zero,zero,ymm1[21,29],zero,zero,zero,zero,ymm1[22,30],zero,zero,zero,zero,ymm1[23,31]
1503 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} ymm1 = ymm1 | (ymm0 & mem)
1504 ; AVX512DQ-NEXT: vmovdqa %ymm1, 64(%rax)
1505 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, (%rax)
1506 ; AVX512DQ-NEXT: vzeroupper
1507 ; AVX512DQ-NEXT: retq
1509 ; AVX512DQ-FCP-LABEL: store_i8_stride6_vf16:
1510 ; AVX512DQ-FCP: # %bb.0:
1511 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1512 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %xmm0
1513 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdx), %xmm1
1514 ; AVX512DQ-FCP-NEXT: vmovdqa (%r8), %xmm2
1515 ; AVX512DQ-FCP-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
1516 ; AVX512DQ-FCP-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
1517 ; AVX512DQ-FCP-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm2
1518 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} ymm3 = ymm1[0,2,0,2]
1519 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,ymm3[0,8,u,u],zero,zero,ymm3[1,9,u,u],zero,zero,ymm3[2,10,u,u],zero,zero,ymm3[19,27,u,u],zero,zero,ymm3[20,28,u,u],zero,zero
1520 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,1,3]
1521 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,ymm4[u,u,6,14],zero,zero,ymm4[u,u,7,15],zero,zero,ymm4[u,u,16,24],zero,zero,ymm4[u,u,17,25],zero,zero,ymm4[u,u,18,26],zero,zero
1522 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm4, %zmm3, %zmm3
1523 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,0,2]
1524 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[0,8],zero,zero,ymm4[u,u,1,9],zero,zero,ymm4[u,u,2,10],zero,zero,ymm4[u,u,19,27],zero,zero,ymm4[u,u,20,28],zero,zero,ymm4[u,u,21,29]
1525 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm1[0,2,1,3]
1526 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[5,13,u,u],zero,zero,ymm5[6,14,u,u],zero,zero,ymm5[7,15,u,u],zero,zero,ymm5[16,24,u,u],zero,zero,ymm5[17,25,u,u],zero,zero,ymm5[18,26]
1527 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm4
1528 ; AVX512DQ-FCP-NEXT: vporq %zmm3, %zmm4, %zmm3
1529 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm2[0,2,0,2]
1530 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,zero,zero,ymm4[0,8],zero,zero,zero,zero,ymm4[1,9],zero,zero,zero,zero,ymm4[18,26],zero,zero,zero,zero,ymm4[19,27],zero,zero,zero,zero,ymm4[20,28],zero,zero
1531 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm2[0,2,1,3]
1532 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm5 = zero,zero,ymm5[5,13],zero,zero,zero,zero,ymm5[6,14],zero,zero,zero,zero,ymm5[7,15],zero,zero,zero,zero,ymm5[16,24],zero,zero,zero,zero,ymm5[17,25],zero,zero,zero,zero
1533 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm4
1534 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm4 = zmm4 | (zmm3 & mem)
1535 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
1536 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u],zero,zero,ymm1[3,11,u,u],zero,zero,ymm1[4,12,u,u],zero,zero,ymm1[21,29,u,u],zero,zero,ymm1[22,30,u,u],zero,zero,ymm1[23,31,u,u]
1537 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
1538 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,3,11],zero,zero,ymm0[u,u,4,12],zero,zero,ymm0[u,u,5,13],zero,zero,ymm0[u,u,22,30],zero,zero,ymm0[u,u,23,31],zero,zero,ymm0[u,u]
1539 ; AVX512DQ-FCP-NEXT: vpor %ymm1, %ymm0, %ymm0
1540 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
1541 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,10],zero,zero,zero,zero,ymm1[3,11],zero,zero,zero,zero,ymm1[4,12],zero,zero,zero,zero,ymm1[21,29],zero,zero,zero,zero,ymm1[22,30],zero,zero,zero,zero,ymm1[23,31]
1542 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} ymm1 = ymm1 | (ymm0 & mem)
1543 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm1, 64(%rax)
1544 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, (%rax)
1545 ; AVX512DQ-FCP-NEXT: vzeroupper
1546 ; AVX512DQ-FCP-NEXT: retq
1548 ; AVX512BW-LABEL: store_i8_stride6_vf16:
1549 ; AVX512BW: # %bb.0:
1550 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1551 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm0
1552 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm1
1553 ; AVX512BW-NEXT: vmovdqa (%r8), %xmm2
1554 ; AVX512BW-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
1555 ; AVX512BW-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
1556 ; AVX512BW-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm2
1557 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm3 = ymm1[0,2,0,2]
1558 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,1,3]
1559 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm4, %zmm3, %zmm3
1560 ; AVX512BW-NEXT: vpshufb {{.*#+}} zmm3 = zmm3[u,u,0,8,u,u,u,u,1,9,u,u,u,u,2,10,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u,u,u,u,u,38,46,u,u,u,u,39,47,u,u,u,u,48,56,u,u,u,u,49,57,u,u,u,u,50,58,u,u]
1561 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,0,2]
1562 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm5 = ymm1[0,2,1,3]
1563 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm4
1564 ; AVX512BW-NEXT: vpshufb {{.*#+}} zmm4 = zmm4[0,8,u,u,u,u,1,9,u,u,u,u,2,10,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u,21,29,37,45,u,u,u,u,38,46,u,u,u,u,39,47,u,u,u,u,48,56,u,u,u,u,49,57,u,u,u,u,50,58]
1565 ; AVX512BW-NEXT: movl $1227105426, %ecx # imm = 0x49242492
1566 ; AVX512BW-NEXT: kmovd %ecx, %k1
1567 ; AVX512BW-NEXT: vmovdqu16 %zmm3, %zmm4 {%k1}
1568 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm3 = ymm2[0,2,0,2]
1569 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm5 = ymm2[0,2,1,3]
1570 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm5, %zmm3, %zmm3
1571 ; AVX512BW-NEXT: vpshufb {{.*#+}} zmm3 = zmm3[u,u,u,u,0,8,u,u,u,u,1,9,u,u,u,u,18,26,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u,37,45,u,u,u,u,38,46,u,u,u,u,39,47,u,u,u,u,48,56,u,u,u,u,49,57,u,u,u,u]
1572 ; AVX512BW-NEXT: movl $613566756, %ecx # imm = 0x24924924
1573 ; AVX512BW-NEXT: kmovd %ecx, %k1
1574 ; AVX512BW-NEXT: vmovdqu16 %zmm3, %zmm4 {%k1}
1575 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
1576 ; AVX512BW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,3,11,u,u,u,u,4,12,u,u,u,u,21,29,u,u,u,u,22,30,u,u,u,u,23,31,u,u]
1577 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
1578 ; AVX512BW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,3,11,u,u,u,u,4,12,u,u,u,u,5,13,u,u,u,u,22,30,u,u,u,u,23,31,u,u,u,u]
1579 ; AVX512BW-NEXT: movw $18724, %cx # imm = 0x4924
1580 ; AVX512BW-NEXT: kmovd %ecx, %k1
1581 ; AVX512BW-NEXT: vmovdqu16 %ymm1, %ymm0 {%k1}
1582 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
1583 ; AVX512BW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,10,u,u,u,u,3,11,u,u,u,u,4,12,u,u,u,u,21,29,u,u,u,u,22,30,u,u,u,u,23,31]
1584 ; AVX512BW-NEXT: movw $-28087, %cx # imm = 0x9249
1585 ; AVX512BW-NEXT: kmovd %ecx, %k1
1586 ; AVX512BW-NEXT: vmovdqu16 %ymm1, %ymm0 {%k1}
1587 ; AVX512BW-NEXT: vmovdqa %ymm0, 64(%rax)
1588 ; AVX512BW-NEXT: vmovdqa64 %zmm4, (%rax)
1589 ; AVX512BW-NEXT: vzeroupper
1590 ; AVX512BW-NEXT: retq
1592 ; AVX512BW-FCP-LABEL: store_i8_stride6_vf16:
1593 ; AVX512BW-FCP: # %bb.0:
1594 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1595 ; AVX512BW-FCP-NEXT: vmovdqa (%rdi), %xmm0
1596 ; AVX512BW-FCP-NEXT: vmovdqa (%rdx), %xmm1
1597 ; AVX512BW-FCP-NEXT: vmovdqa (%r8), %xmm2
1598 ; AVX512BW-FCP-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
1599 ; AVX512BW-FCP-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
1600 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm3
1601 ; AVX512BW-FCP-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm2
1602 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm4 = [0,2,0,2,8,10,9,11]
1603 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm5
1604 ; AVX512BW-FCP-NEXT: vpermt2q %zmm3, %zmm4, %zmm5
1605 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} zmm5 = zmm5[u,u,0,8,u,u,u,u,1,9,u,u,u,u,2,10,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u,u,u,u,u,38,46,u,u,u,u,39,47,u,u,u,u,48,56,u,u,u,u,49,57,u,u,u,u,50,58,u,u]
1606 ; AVX512BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
1607 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} zmm3 = zmm3[0,8,u,u,u,u,1,9,u,u,u,u,2,10,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u,21,29,37,45,u,u,u,u,38,46,u,u,u,u,39,47,u,u,u,u,48,56,u,u,u,u,49,57,u,u,u,u,50,58]
1608 ; AVX512BW-FCP-NEXT: movl $1227105426, %ecx # imm = 0x49242492
1609 ; AVX512BW-FCP-NEXT: kmovd %ecx, %k1
1610 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm5, %zmm3 {%k1}
1611 ; AVX512BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm4 = [0,2,0,2,0,2,1,3]
1612 ; AVX512BW-FCP-NEXT: vpermq %zmm2, %zmm4, %zmm4
1613 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} zmm4 = zmm4[u,u,u,u,0,8,u,u,u,u,1,9,u,u,u,u,18,26,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u,37,45,u,u,u,u,38,46,u,u,u,u,39,47,u,u,u,u,48,56,u,u,u,u,49,57,u,u,u,u]
1614 ; AVX512BW-FCP-NEXT: movl $613566756, %ecx # imm = 0x24924924
1615 ; AVX512BW-FCP-NEXT: kmovd %ecx, %k1
1616 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm4, %zmm3 {%k1}
1617 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
1618 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,3,11,u,u,u,u,4,12,u,u,u,u,21,29,u,u,u,u,22,30,u,u,u,u,23,31,u,u]
1619 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
1620 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,3,11,u,u,u,u,4,12,u,u,u,u,5,13,u,u,u,u,22,30,u,u,u,u,23,31,u,u,u,u]
1621 ; AVX512BW-FCP-NEXT: movw $18724, %cx # imm = 0x4924
1622 ; AVX512BW-FCP-NEXT: kmovd %ecx, %k1
1623 ; AVX512BW-FCP-NEXT: vmovdqu16 %ymm1, %ymm0 {%k1}
1624 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
1625 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,10,u,u,u,u,3,11,u,u,u,u,4,12,u,u,u,u,21,29,u,u,u,u,22,30,u,u,u,u,23,31]
1626 ; AVX512BW-FCP-NEXT: movw $-28087, %cx # imm = 0x9249
1627 ; AVX512BW-FCP-NEXT: kmovd %ecx, %k1
1628 ; AVX512BW-FCP-NEXT: vmovdqu16 %ymm1, %ymm0 {%k1}
1629 ; AVX512BW-FCP-NEXT: vmovdqa %ymm0, 64(%rax)
1630 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, (%rax)
1631 ; AVX512BW-FCP-NEXT: vzeroupper
1632 ; AVX512BW-FCP-NEXT: retq
1634 ; AVX512DQ-BW-LABEL: store_i8_stride6_vf16:
1635 ; AVX512DQ-BW: # %bb.0:
1636 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1637 ; AVX512DQ-BW-NEXT: vmovdqa (%rdi), %xmm0
1638 ; AVX512DQ-BW-NEXT: vmovdqa (%rdx), %xmm1
1639 ; AVX512DQ-BW-NEXT: vmovdqa (%r8), %xmm2
1640 ; AVX512DQ-BW-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
1641 ; AVX512DQ-BW-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
1642 ; AVX512DQ-BW-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm2
1643 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm3 = ymm1[0,2,0,2]
1644 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,1,3]
1645 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm4, %zmm3, %zmm3
1646 ; AVX512DQ-BW-NEXT: vpshufb {{.*#+}} zmm3 = zmm3[u,u,0,8,u,u,u,u,1,9,u,u,u,u,2,10,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u,u,u,u,u,38,46,u,u,u,u,39,47,u,u,u,u,48,56,u,u,u,u,49,57,u,u,u,u,50,58,u,u]
1647 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm4 = ymm0[0,2,0,2]
1648 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm5 = ymm1[0,2,1,3]
1649 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm4
1650 ; AVX512DQ-BW-NEXT: vpshufb {{.*#+}} zmm4 = zmm4[0,8,u,u,u,u,1,9,u,u,u,u,2,10,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u,21,29,37,45,u,u,u,u,38,46,u,u,u,u,39,47,u,u,u,u,48,56,u,u,u,u,49,57,u,u,u,u,50,58]
1651 ; AVX512DQ-BW-NEXT: movl $1227105426, %ecx # imm = 0x49242492
1652 ; AVX512DQ-BW-NEXT: kmovd %ecx, %k1
1653 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm3, %zmm4 {%k1}
1654 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm3 = ymm2[0,2,0,2]
1655 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm5 = ymm2[0,2,1,3]
1656 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm5, %zmm3, %zmm3
1657 ; AVX512DQ-BW-NEXT: vpshufb {{.*#+}} zmm3 = zmm3[u,u,u,u,0,8,u,u,u,u,1,9,u,u,u,u,18,26,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u,37,45,u,u,u,u,38,46,u,u,u,u,39,47,u,u,u,u,48,56,u,u,u,u,49,57,u,u,u,u]
1658 ; AVX512DQ-BW-NEXT: movl $613566756, %ecx # imm = 0x24924924
1659 ; AVX512DQ-BW-NEXT: kmovd %ecx, %k1
1660 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm3, %zmm4 {%k1}
1661 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
1662 ; AVX512DQ-BW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,3,11,u,u,u,u,4,12,u,u,u,u,21,29,u,u,u,u,22,30,u,u,u,u,23,31,u,u]
1663 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
1664 ; AVX512DQ-BW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,3,11,u,u,u,u,4,12,u,u,u,u,5,13,u,u,u,u,22,30,u,u,u,u,23,31,u,u,u,u]
1665 ; AVX512DQ-BW-NEXT: movw $18724, %cx # imm = 0x4924
1666 ; AVX512DQ-BW-NEXT: kmovd %ecx, %k1
1667 ; AVX512DQ-BW-NEXT: vmovdqu16 %ymm1, %ymm0 {%k1}
1668 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
1669 ; AVX512DQ-BW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,10,u,u,u,u,3,11,u,u,u,u,4,12,u,u,u,u,21,29,u,u,u,u,22,30,u,u,u,u,23,31]
1670 ; AVX512DQ-BW-NEXT: movw $-28087, %cx # imm = 0x9249
1671 ; AVX512DQ-BW-NEXT: kmovd %ecx, %k1
1672 ; AVX512DQ-BW-NEXT: vmovdqu16 %ymm1, %ymm0 {%k1}
1673 ; AVX512DQ-BW-NEXT: vmovdqa %ymm0, 64(%rax)
1674 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm4, (%rax)
1675 ; AVX512DQ-BW-NEXT: vzeroupper
1676 ; AVX512DQ-BW-NEXT: retq
1678 ; AVX512DQ-BW-FCP-LABEL: store_i8_stride6_vf16:
1679 ; AVX512DQ-BW-FCP: # %bb.0:
1680 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
1681 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdi), %xmm0
1682 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdx), %xmm1
1683 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%r8), %xmm2
1684 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
1685 ; AVX512DQ-BW-FCP-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
1686 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm3
1687 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $1, (%r9), %zmm2, %zmm2
1688 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm4 = [0,2,0,2,8,10,9,11]
1689 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm5
1690 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm3, %zmm4, %zmm5
1691 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} zmm5 = zmm5[u,u,0,8,u,u,u,u,1,9,u,u,u,u,2,10,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u,u,u,u,u,38,46,u,u,u,u,39,47,u,u,u,u,48,56,u,u,u,u,49,57,u,u,u,u,50,58,u,u]
1692 ; AVX512DQ-BW-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
1693 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} zmm3 = zmm3[0,8,u,u,u,u,1,9,u,u,u,u,2,10,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u,21,29,37,45,u,u,u,u,38,46,u,u,u,u,39,47,u,u,u,u,48,56,u,u,u,u,49,57,u,u,u,u,50,58]
1694 ; AVX512DQ-BW-FCP-NEXT: movl $1227105426, %ecx # imm = 0x49242492
1695 ; AVX512DQ-BW-FCP-NEXT: kmovd %ecx, %k1
1696 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm5, %zmm3 {%k1}
1697 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbq {{.*#+}} zmm4 = [0,2,0,2,0,2,1,3]
1698 ; AVX512DQ-BW-FCP-NEXT: vpermq %zmm2, %zmm4, %zmm4
1699 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} zmm4 = zmm4[u,u,u,u,0,8,u,u,u,u,1,9,u,u,u,u,18,26,u,u,u,u,19,27,u,u,u,u,20,28,u,u,u,u,37,45,u,u,u,u,38,46,u,u,u,u,39,47,u,u,u,u,48,56,u,u,u,u,49,57,u,u,u,u]
1700 ; AVX512DQ-BW-FCP-NEXT: movl $613566756, %ecx # imm = 0x24924924
1701 ; AVX512DQ-BW-FCP-NEXT: kmovd %ecx, %k1
1702 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm4, %zmm3 {%k1}
1703 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
1704 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,3,11,u,u,u,u,4,12,u,u,u,u,21,29,u,u,u,u,22,30,u,u,u,u,23,31,u,u]
1705 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
1706 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,3,11,u,u,u,u,4,12,u,u,u,u,5,13,u,u,u,u,22,30,u,u,u,u,23,31,u,u,u,u]
1707 ; AVX512DQ-BW-FCP-NEXT: movw $18724, %cx # imm = 0x4924
1708 ; AVX512DQ-BW-FCP-NEXT: kmovd %ecx, %k1
1709 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %ymm1, %ymm0 {%k1}
1710 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm2[1,3,1,3]
1711 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[2,10,u,u,u,u,3,11,u,u,u,u,4,12,u,u,u,u,21,29,u,u,u,u,22,30,u,u,u,u,23,31]
1712 ; AVX512DQ-BW-FCP-NEXT: movw $-28087, %cx # imm = 0x9249
1713 ; AVX512DQ-BW-FCP-NEXT: kmovd %ecx, %k1
1714 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %ymm1, %ymm0 {%k1}
1715 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm0, 64(%rax)
1716 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, (%rax)
1717 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
1718 ; AVX512DQ-BW-FCP-NEXT: retq
1719 %in.vec0 = load <16 x i8>, ptr %in.vecptr0, align 64
1720 %in.vec1 = load <16 x i8>, ptr %in.vecptr1, align 64
1721 %in.vec2 = load <16 x i8>, ptr %in.vecptr2, align 64
1722 %in.vec3 = load <16 x i8>, ptr %in.vecptr3, align 64
1723 %in.vec4 = load <16 x i8>, ptr %in.vecptr4, align 64
1724 %in.vec5 = load <16 x i8>, ptr %in.vecptr5, align 64
1725 %1 = shufflevector <16 x i8> %in.vec0, <16 x i8> %in.vec1, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1726 %2 = shufflevector <16 x i8> %in.vec2, <16 x i8> %in.vec3, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1727 %3 = shufflevector <16 x i8> %in.vec4, <16 x i8> %in.vec5, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1728 %4 = shufflevector <32 x i8> %1, <32 x i8> %2, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
1729 %5 = shufflevector <32 x i8> %3, <32 x i8> poison, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
1730 %6 = shufflevector <64 x i8> %4, <64 x i8> %5, <96 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95>
1731 %interleaved.vec = shufflevector <96 x i8> %6, <96 x i8> poison, <96 x i32> <i32 0, i32 16, i32 32, i32 48, i32 64, i32 80, i32 1, i32 17, i32 33, i32 49, i32 65, i32 81, i32 2, i32 18, i32 34, i32 50, i32 66, i32 82, i32 3, i32 19, i32 35, i32 51, i32 67, i32 83, i32 4, i32 20, i32 36, i32 52, i32 68, i32 84, i32 5, i32 21, i32 37, i32 53, i32 69, i32 85, i32 6, i32 22, i32 38, i32 54, i32 70, i32 86, i32 7, i32 23, i32 39, i32 55, i32 71, i32 87, i32 8, i32 24, i32 40, i32 56, i32 72, i32 88, i32 9, i32 25, i32 41, i32 57, i32 73, i32 89, i32 10, i32 26, i32 42, i32 58, i32 74, i32 90, i32 11, i32 27, i32 43, i32 59, i32 75, i32 91, i32 12, i32 28, i32 44, i32 60, i32 76, i32 92, i32 13, i32 29, i32 45, i32 61, i32 77, i32 93, i32 14, i32 30, i32 46, i32 62, i32 78, i32 94, i32 15, i32 31, i32 47, i32 63, i32 79, i32 95>
1732 store <96 x i8> %interleaved.vec, ptr %out.vec, align 64
1736 define void @store_i8_stride6_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
1737 ; SSE-LABEL: store_i8_stride6_vf32:
1739 ; SSE-NEXT: subq $200, %rsp
1740 ; SSE-NEXT: movdqa 16(%rdi), %xmm8
1741 ; SSE-NEXT: movdqa 16(%rsi), %xmm5
1742 ; SSE-NEXT: movdqa 16(%rdx), %xmm12
1743 ; SSE-NEXT: movdqa 16(%rcx), %xmm4
1744 ; SSE-NEXT: movdqa 16(%r8), %xmm11
1745 ; SSE-NEXT: movdqa 16(%r9), %xmm0
1746 ; SSE-NEXT: movdqa %xmm8, %xmm1
1747 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
1748 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1749 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
1750 ; SSE-NEXT: movdqa {{.*#+}} xmm9 = [65535,0,65535,65535,0,65535,65535,0]
1751 ; SSE-NEXT: pand %xmm9, %xmm2
1752 ; SSE-NEXT: movdqa %xmm12, %xmm1
1753 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm4[8],xmm1[9],xmm4[9],xmm1[10],xmm4[10],xmm1[11],xmm4[11],xmm1[12],xmm4[12],xmm1[13],xmm4[13],xmm1[14],xmm4[14],xmm1[15],xmm4[15]
1754 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1755 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm1[1,0,2,2,4,5,6,7]
1756 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,1,0,1]
1757 ; SSE-NEXT: movdqa %xmm9, %xmm6
1758 ; SSE-NEXT: pandn %xmm3, %xmm6
1759 ; SSE-NEXT: por %xmm2, %xmm6
1760 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [65535,65535,0,65535,65535,0,65535,65535]
1761 ; SSE-NEXT: pand %xmm3, %xmm6
1762 ; SSE-NEXT: movdqa %xmm11, %xmm1
1763 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
1764 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1765 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[0,0,0,0]
1766 ; SSE-NEXT: movdqa %xmm3, %xmm7
1767 ; SSE-NEXT: pandn %xmm2, %xmm7
1768 ; SSE-NEXT: por %xmm6, %xmm7
1769 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
1770 ; SSE-NEXT: pand %xmm2, %xmm7
1771 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
1772 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1773 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm1[0,0,0,0]
1774 ; SSE-NEXT: movdqa %xmm2, %xmm1
1775 ; SSE-NEXT: pandn %xmm6, %xmm1
1776 ; SSE-NEXT: por %xmm7, %xmm1
1777 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1778 ; SSE-NEXT: punpcklbw {{.*#+}} xmm8 = xmm8[0],xmm5[0],xmm8[1],xmm5[1],xmm8[2],xmm5[2],xmm8[3],xmm5[3],xmm8[4],xmm5[4],xmm8[5],xmm5[5],xmm8[6],xmm5[6],xmm8[7],xmm5[7]
1779 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1780 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm8[3,3,3,3]
1781 ; SSE-NEXT: movdqa %xmm3, %xmm6
1782 ; SSE-NEXT: pandn %xmm5, %xmm6
1783 ; SSE-NEXT: punpcklbw {{.*#+}} xmm12 = xmm12[0],xmm4[0],xmm12[1],xmm4[1],xmm12[2],xmm4[2],xmm12[3],xmm4[3],xmm12[4],xmm4[4],xmm12[5],xmm4[5],xmm12[6],xmm4[6],xmm12[7],xmm4[7]
1784 ; SSE-NEXT: movdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1785 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm12[0,1,2,3,5,6,7,7]
1786 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[2,2,2,3]
1787 ; SSE-NEXT: pand %xmm3, %xmm4
1788 ; SSE-NEXT: por %xmm6, %xmm4
1789 ; SSE-NEXT: pand %xmm9, %xmm4
1790 ; SSE-NEXT: punpcklbw {{.*#+}} xmm11 = xmm11[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1791 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1792 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm11[2,2,3,3]
1793 ; SSE-NEXT: movdqa %xmm9, %xmm6
1794 ; SSE-NEXT: pandn %xmm5, %xmm6
1795 ; SSE-NEXT: por %xmm4, %xmm6
1796 ; SSE-NEXT: movdqa {{.*#+}} xmm10 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
1797 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1798 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1799 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
1800 ; SSE-NEXT: movdqa %xmm10, %xmm1
1801 ; SSE-NEXT: pandn %xmm0, %xmm1
1802 ; SSE-NEXT: movdqa (%rdi), %xmm0
1803 ; SSE-NEXT: pand %xmm10, %xmm6
1804 ; SSE-NEXT: por %xmm6, %xmm1
1805 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1806 ; SSE-NEXT: movdqa (%rsi), %xmm14
1807 ; SSE-NEXT: movdqa %xmm0, %xmm1
1808 ; SSE-NEXT: movdqa %xmm0, %xmm8
1809 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm14[8],xmm1[9],xmm14[9],xmm1[10],xmm14[10],xmm1[11],xmm14[11],xmm1[12],xmm14[12],xmm1[13],xmm14[13],xmm1[14],xmm14[14],xmm1[15],xmm14[15]
1810 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[3,3,3,3]
1811 ; SSE-NEXT: movdqa %xmm1, %xmm2
1812 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1813 ; SSE-NEXT: movdqa %xmm3, %xmm6
1814 ; SSE-NEXT: pandn %xmm0, %xmm6
1815 ; SSE-NEXT: movdqa (%rdx), %xmm11
1816 ; SSE-NEXT: movdqa (%rcx), %xmm12
1817 ; SSE-NEXT: movdqa %xmm11, %xmm1
1818 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm12[8],xmm1[9],xmm12[9],xmm1[10],xmm12[10],xmm1[11],xmm12[11],xmm1[12],xmm12[12],xmm1[13],xmm12[13],xmm1[14],xmm12[14],xmm1[15],xmm12[15]
1819 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm1[0,1,2,3,5,6,7,7]
1820 ; SSE-NEXT: movdqa %xmm1, %xmm5
1821 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1822 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,3]
1823 ; SSE-NEXT: pand %xmm3, %xmm0
1824 ; SSE-NEXT: por %xmm6, %xmm0
1825 ; SSE-NEXT: movdqa (%r8), %xmm13
1826 ; SSE-NEXT: movdqa %xmm13, %xmm7
1827 ; SSE-NEXT: punpckhbw {{.*#+}} xmm7 = xmm7[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
1828 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm7[2,2,3,3]
1829 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1830 ; SSE-NEXT: movdqa %xmm9, %xmm15
1831 ; SSE-NEXT: pandn %xmm6, %xmm15
1832 ; SSE-NEXT: pand %xmm9, %xmm0
1833 ; SSE-NEXT: por %xmm0, %xmm15
1834 ; SSE-NEXT: movdqa (%r9), %xmm6
1835 ; SSE-NEXT: punpckhbw {{.*#+}} xmm4 = xmm4[8],xmm6[8],xmm4[9],xmm6[9],xmm4[10],xmm6[10],xmm4[11],xmm6[11],xmm4[12],xmm6[12],xmm4[13],xmm6[13],xmm4[14],xmm6[14],xmm4[15],xmm6[15]
1836 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm4[2,2,3,3]
1837 ; SSE-NEXT: movdqa %xmm4, (%rsp) # 16-byte Spill
1838 ; SSE-NEXT: movdqa %xmm10, %xmm1
1839 ; SSE-NEXT: pandn %xmm0, %xmm1
1840 ; SSE-NEXT: pand %xmm10, %xmm15
1841 ; SSE-NEXT: por %xmm15, %xmm1
1842 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1843 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm5[3,3,3,3,4,5,6,7]
1844 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
1845 ; SSE-NEXT: movdqa %xmm3, %xmm15
1846 ; SSE-NEXT: pandn %xmm0, %xmm15
1847 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,2,2]
1848 ; SSE-NEXT: pand %xmm3, %xmm0
1849 ; SSE-NEXT: por %xmm0, %xmm15
1850 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [0,65535,65535,0,65535,65535,0,65535]
1851 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm7[1,1,2,2]
1852 ; SSE-NEXT: movdqa %xmm1, %xmm0
1853 ; SSE-NEXT: pandn %xmm2, %xmm0
1854 ; SSE-NEXT: pand %xmm1, %xmm15
1855 ; SSE-NEXT: por %xmm15, %xmm0
1856 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
1857 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[1,1,2,2]
1858 ; SSE-NEXT: movdqa %xmm5, %xmm15
1859 ; SSE-NEXT: pandn %xmm2, %xmm15
1860 ; SSE-NEXT: pand %xmm5, %xmm0
1861 ; SSE-NEXT: por %xmm0, %xmm15
1862 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1863 ; SSE-NEXT: punpcklbw {{.*#+}} xmm8 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3],xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
1864 ; SSE-NEXT: punpcklbw {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3],xmm11[4],xmm12[4],xmm11[5],xmm12[5],xmm11[6],xmm12[6],xmm11[7],xmm12[7]
1865 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm11[3,3,3,3,4,5,6,7]
1866 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1867 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
1868 ; SSE-NEXT: movdqa %xmm3, %xmm2
1869 ; SSE-NEXT: pandn %xmm0, %xmm2
1870 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm8[1,1,2,2]
1871 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1872 ; SSE-NEXT: pand %xmm3, %xmm0
1873 ; SSE-NEXT: por %xmm0, %xmm2
1874 ; SSE-NEXT: punpcklbw {{.*#+}} xmm13 = xmm13[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1875 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm13[1,1,2,2]
1876 ; SSE-NEXT: movdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1877 ; SSE-NEXT: movdqa %xmm1, %xmm4
1878 ; SSE-NEXT: pandn %xmm0, %xmm4
1879 ; SSE-NEXT: pand %xmm1, %xmm2
1880 ; SSE-NEXT: por %xmm2, %xmm4
1881 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1],xmm2[2],xmm6[2],xmm2[3],xmm6[3],xmm2[4],xmm6[4],xmm2[5],xmm6[5],xmm2[6],xmm6[6],xmm2[7],xmm6[7]
1882 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,2,2]
1883 ; SSE-NEXT: movdqa %xmm2, %xmm6
1884 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1885 ; SSE-NEXT: movdqa %xmm5, %xmm12
1886 ; SSE-NEXT: pandn %xmm0, %xmm12
1887 ; SSE-NEXT: pand %xmm5, %xmm4
1888 ; SSE-NEXT: por %xmm4, %xmm12
1889 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm11[1,0,2,2,4,5,6,7]
1890 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,0,1]
1891 ; SSE-NEXT: movdqa %xmm9, %xmm2
1892 ; SSE-NEXT: pandn %xmm0, %xmm2
1893 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm8[0,0,1,1]
1894 ; SSE-NEXT: pand %xmm9, %xmm0
1895 ; SSE-NEXT: por %xmm0, %xmm2
1896 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm13[0,0,0,0]
1897 ; SSE-NEXT: movdqa %xmm3, %xmm4
1898 ; SSE-NEXT: pandn %xmm0, %xmm4
1899 ; SSE-NEXT: pand %xmm3, %xmm2
1900 ; SSE-NEXT: por %xmm2, %xmm4
1901 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[0,0,0,0]
1902 ; SSE-NEXT: movdqa {{.*#+}} xmm7 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
1903 ; SSE-NEXT: movdqa %xmm7, %xmm11
1904 ; SSE-NEXT: pandn %xmm0, %xmm11
1905 ; SSE-NEXT: pand %xmm7, %xmm4
1906 ; SSE-NEXT: por %xmm4, %xmm11
1907 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
1908 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm4[3,3,3,3]
1909 ; SSE-NEXT: movdqa %xmm3, %xmm2
1910 ; SSE-NEXT: pandn %xmm0, %xmm2
1911 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
1912 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm13[0,1,2,3,5,6,7,7]
1913 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,3]
1914 ; SSE-NEXT: pand %xmm3, %xmm0
1915 ; SSE-NEXT: por %xmm2, %xmm0
1916 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
1917 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm14[2,2,3,3]
1918 ; SSE-NEXT: movdqa %xmm9, %xmm8
1919 ; SSE-NEXT: pandn %xmm2, %xmm8
1920 ; SSE-NEXT: pand %xmm9, %xmm0
1921 ; SSE-NEXT: por %xmm0, %xmm8
1922 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
1923 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm15[2,2,3,3]
1924 ; SSE-NEXT: movdqa %xmm10, %xmm6
1925 ; SSE-NEXT: pandn %xmm0, %xmm6
1926 ; SSE-NEXT: pand %xmm10, %xmm8
1927 ; SSE-NEXT: por %xmm8, %xmm6
1928 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm13[3,3,3,3,4,5,6,7]
1929 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
1930 ; SSE-NEXT: movdqa %xmm3, %xmm2
1931 ; SSE-NEXT: pandn %xmm0, %xmm2
1932 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm4[1,1,2,2]
1933 ; SSE-NEXT: pand %xmm3, %xmm0
1934 ; SSE-NEXT: por %xmm0, %xmm2
1935 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm14[1,1,2,2]
1936 ; SSE-NEXT: movdqa %xmm1, %xmm8
1937 ; SSE-NEXT: pandn %xmm0, %xmm8
1938 ; SSE-NEXT: pand %xmm1, %xmm2
1939 ; SSE-NEXT: por %xmm2, %xmm8
1940 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm15[1,1,2,2]
1941 ; SSE-NEXT: movdqa %xmm5, %xmm15
1942 ; SSE-NEXT: pandn %xmm0, %xmm15
1943 ; SSE-NEXT: pand %xmm5, %xmm8
1944 ; SSE-NEXT: por %xmm8, %xmm15
1945 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1946 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm2[3,3,3,3,4,5,6,7]
1947 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
1948 ; SSE-NEXT: movdqa %xmm3, %xmm8
1949 ; SSE-NEXT: pandn %xmm0, %xmm8
1950 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
1951 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm13[1,1,2,2]
1952 ; SSE-NEXT: pand %xmm3, %xmm0
1953 ; SSE-NEXT: por %xmm0, %xmm8
1954 ; SSE-NEXT: pand %xmm1, %xmm8
1955 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
1956 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm14[1,1,2,2]
1957 ; SSE-NEXT: pandn %xmm0, %xmm1
1958 ; SSE-NEXT: por %xmm8, %xmm1
1959 ; SSE-NEXT: pand %xmm5, %xmm1
1960 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
1961 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm4[1,1,2,2]
1962 ; SSE-NEXT: pandn %xmm0, %xmm5
1963 ; SSE-NEXT: por %xmm1, %xmm5
1964 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm2[1,0,2,2,4,5,6,7]
1965 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,0,1]
1966 ; SSE-NEXT: movdqa %xmm9, %xmm1
1967 ; SSE-NEXT: pandn %xmm0, %xmm1
1968 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm13[0,0,1,1]
1969 ; SSE-NEXT: pand %xmm9, %xmm0
1970 ; SSE-NEXT: por %xmm0, %xmm1
1971 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm14[0,0,0,0]
1972 ; SSE-NEXT: movdqa %xmm3, %xmm8
1973 ; SSE-NEXT: pandn %xmm0, %xmm8
1974 ; SSE-NEXT: pand %xmm3, %xmm1
1975 ; SSE-NEXT: por %xmm1, %xmm8
1976 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm4[0,0,0,0]
1977 ; SSE-NEXT: movdqa %xmm7, %xmm0
1978 ; SSE-NEXT: pandn %xmm1, %xmm0
1979 ; SSE-NEXT: pand %xmm7, %xmm8
1980 ; SSE-NEXT: por %xmm8, %xmm0
1981 ; SSE-NEXT: pshuflw $161, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1982 ; SSE-NEXT: # xmm1 = mem[1,0,2,2,4,5,6,7]
1983 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,0,1]
1984 ; SSE-NEXT: movdqa %xmm9, %xmm8
1985 ; SSE-NEXT: pandn %xmm1, %xmm8
1986 ; SSE-NEXT: pshufd $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1987 ; SSE-NEXT: # xmm1 = mem[0,0,1,1]
1988 ; SSE-NEXT: pand %xmm9, %xmm1
1989 ; SSE-NEXT: por %xmm1, %xmm8
1990 ; SSE-NEXT: pshufd $0, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
1991 ; SSE-NEXT: # xmm1 = mem[0,0,0,0]
1992 ; SSE-NEXT: movdqa %xmm3, %xmm13
1993 ; SSE-NEXT: pandn %xmm1, %xmm13
1994 ; SSE-NEXT: pand %xmm3, %xmm8
1995 ; SSE-NEXT: por %xmm8, %xmm13
1996 ; SSE-NEXT: pand %xmm7, %xmm13
1997 ; SSE-NEXT: pshufd $0, (%rsp), %xmm1 # 16-byte Folded Reload
1998 ; SSE-NEXT: # xmm1 = mem[0,0,0,0]
1999 ; SSE-NEXT: pandn %xmm1, %xmm7
2000 ; SSE-NEXT: por %xmm13, %xmm7
2001 ; SSE-NEXT: pshufd $255, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2002 ; SSE-NEXT: # xmm1 = mem[3,3,3,3]
2003 ; SSE-NEXT: pshufhw $249, {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Folded Reload
2004 ; SSE-NEXT: # xmm8 = mem[0,1,2,3,5,6,7,7]
2005 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[2,2,2,3]
2006 ; SSE-NEXT: pand %xmm3, %xmm8
2007 ; SSE-NEXT: pandn %xmm1, %xmm3
2008 ; SSE-NEXT: por %xmm8, %xmm3
2009 ; SSE-NEXT: pand %xmm9, %xmm3
2010 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2011 ; SSE-NEXT: # xmm1 = mem[2,2,3,3]
2012 ; SSE-NEXT: pandn %xmm1, %xmm9
2013 ; SSE-NEXT: por %xmm3, %xmm9
2014 ; SSE-NEXT: pand %xmm10, %xmm9
2015 ; SSE-NEXT: pshufd $250, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2016 ; SSE-NEXT: # xmm1 = mem[2,2,3,3]
2017 ; SSE-NEXT: pandn %xmm1, %xmm10
2018 ; SSE-NEXT: por %xmm9, %xmm10
2019 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2020 ; SSE-NEXT: movdqa %xmm10, 32(%rax)
2021 ; SSE-NEXT: movdqa %xmm7, 48(%rax)
2022 ; SSE-NEXT: movdqa %xmm0, 96(%rax)
2023 ; SSE-NEXT: movdqa %xmm5, 112(%rax)
2024 ; SSE-NEXT: movdqa %xmm15, 160(%rax)
2025 ; SSE-NEXT: movdqa %xmm6, 176(%rax)
2026 ; SSE-NEXT: movdqa %xmm11, (%rax)
2027 ; SSE-NEXT: movdqa %xmm12, 16(%rax)
2028 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2029 ; SSE-NEXT: movaps %xmm0, 64(%rax)
2030 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2031 ; SSE-NEXT: movaps %xmm0, 80(%rax)
2032 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2033 ; SSE-NEXT: movaps %xmm0, 128(%rax)
2034 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2035 ; SSE-NEXT: movaps %xmm0, 144(%rax)
2036 ; SSE-NEXT: addq $200, %rsp
2039 ; AVX-LABEL: store_i8_stride6_vf32:
2041 ; AVX-NEXT: vmovdqa 16(%rsi), %xmm0
2042 ; AVX-NEXT: vmovdqa 16(%rdi), %xmm1
2043 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
2044 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm11[3,3,3,3]
2045 ; AVX-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
2046 ; AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,0,1,1]
2047 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
2048 ; AVX-NEXT: vmovaps {{.*#+}} ymm10 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
2049 ; AVX-NEXT: vandnps %ymm1, %ymm10, %ymm2
2050 ; AVX-NEXT: vmovdqa 16(%rcx), %xmm1
2051 ; AVX-NEXT: vmovdqa 16(%rdx), %xmm3
2052 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3],xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
2053 ; AVX-NEXT: vpshufhw {{.*#+}} xmm4 = xmm14[0,1,2,3,5,6,7,7]
2054 ; AVX-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,2,3]
2055 ; AVX-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm3[8],xmm1[8],xmm3[9],xmm1[9],xmm3[10],xmm1[10],xmm3[11],xmm1[11],xmm3[12],xmm1[12],xmm3[13],xmm1[13],xmm3[14],xmm1[14],xmm3[15],xmm1[15]
2056 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm1[1,0,2,2,4,5,6,7]
2057 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,0,1]
2058 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
2059 ; AVX-NEXT: vandps %ymm3, %ymm10, %ymm3
2060 ; AVX-NEXT: vorps %ymm2, %ymm3, %ymm2
2061 ; AVX-NEXT: vextractf128 $1, %ymm2, %xmm3
2062 ; AVX-NEXT: vmovdqa 16(%r8), %xmm12
2063 ; AVX-NEXT: vpshufb {{.*#+}} xmm4 = zero,zero,zero,zero,xmm12[8,u],zero,zero,zero,zero,xmm12[9,u],zero,zero,zero,zero
2064 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm4[2],xmm3[3,4],xmm4[5],xmm3[6,7]
2065 ; AVX-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[0,1,2,3,4],zero,xmm3[6,7,8,9,10],zero,xmm3[12,13,14,15]
2066 ; AVX-NEXT: vmovdqa 16(%r9), %xmm13
2067 ; AVX-NEXT: vpshufb {{.*#+}} xmm4 = zero,zero,zero,zero,zero,xmm13[8],zero,zero,zero,zero,zero,xmm13[9],zero,zero,zero,zero
2068 ; AVX-NEXT: vpor %xmm4, %xmm3, %xmm3
2069 ; AVX-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2070 ; AVX-NEXT: vpshufb {{.*#+}} xmm3 = zero,zero,xmm12[5,u],zero,zero,zero,zero,xmm12[6,u],zero,zero,zero,zero,xmm12[7,u]
2071 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm3[1],xmm2[2,3],xmm3[4],xmm2[5,6],xmm3[7]
2072 ; AVX-NEXT: vmovdqa {{.*#+}} xmm9 = [0,1,2,128,4,5,6,7,8,128,10,11,12,13,14,128]
2073 ; AVX-NEXT: vpshufb %xmm9, %xmm2, %xmm2
2074 ; AVX-NEXT: vpshufb {{.*#+}} xmm3 = zero,zero,zero,xmm13[5],zero,zero,zero,zero,zero,xmm13[6],zero,zero,zero,zero,zero,xmm13[7]
2075 ; AVX-NEXT: vpor %xmm3, %xmm2, %xmm2
2076 ; AVX-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2077 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[1,1,2,2]
2078 ; AVX-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[3,3,3,3]
2079 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm0
2080 ; AVX-NEXT: vandps %ymm0, %ymm10, %ymm0
2081 ; AVX-NEXT: vpshuflw {{.*#+}} xmm2 = xmm1[3,3,3,3,4,5,6,7]
2082 ; AVX-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,4,4,4]
2083 ; AVX-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,6,7,7]
2084 ; AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,2,3]
2085 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm1
2086 ; AVX-NEXT: vandnps %ymm1, %ymm10, %ymm1
2087 ; AVX-NEXT: vorps %ymm1, %ymm0, %ymm0
2088 ; AVX-NEXT: vpshufb {{.*#+}} xmm1 = xmm12[10,u],zero,zero,zero,zero,xmm12[11,u],zero,zero,zero,zero,xmm12[12,u],zero,zero
2089 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0],xmm0[1,2],xmm1[3],xmm0[4,5],xmm1[6],xmm0[7]
2090 ; AVX-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[0],zero,xmm1[2,3,4,5,6],zero,xmm1[8,9,10,11,12],zero,xmm1[14,15]
2091 ; AVX-NEXT: vpshufb {{.*#+}} xmm3 = zero,xmm13[10],zero,zero,zero,zero,zero,xmm13[11],zero,zero,zero,zero,zero,xmm13[12],zero,zero
2092 ; AVX-NEXT: vpor %xmm3, %xmm1, %xmm1
2093 ; AVX-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2094 ; AVX-NEXT: vextractf128 $1, %ymm0, %xmm0
2095 ; AVX-NEXT: vmovdqa {{.*#+}} xmm2 = [128,128,13,u,128,128,128,128,14,u,128,128,128,128,15,u]
2096 ; AVX-NEXT: vpshufb %xmm2, %xmm12, %xmm1
2097 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
2098 ; AVX-NEXT: vpshufb %xmm9, %xmm0, %xmm0
2099 ; AVX-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,zero,xmm13[13],zero,zero,zero,zero,zero,xmm13[14],zero,zero,zero,zero,zero,xmm13[15]
2100 ; AVX-NEXT: vpor %xmm1, %xmm0, %xmm0
2101 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2102 ; AVX-NEXT: vmovdqa (%rsi), %xmm8
2103 ; AVX-NEXT: vmovdqa (%rdi), %xmm6
2104 ; AVX-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm6[8],xmm8[8],xmm6[9],xmm8[9],xmm6[10],xmm8[10],xmm6[11],xmm8[11],xmm6[12],xmm8[12],xmm6[13],xmm8[13],xmm6[14],xmm8[14],xmm6[15],xmm8[15]
2105 ; AVX-NEXT: vpshufd {{.*#+}} xmm0 = xmm5[1,1,2,2]
2106 ; AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm5[3,3,3,3]
2107 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
2108 ; AVX-NEXT: vmovdqa (%rcx), %xmm7
2109 ; AVX-NEXT: vmovdqa (%rdx), %xmm4
2110 ; AVX-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm4[8],xmm7[8],xmm4[9],xmm7[9],xmm4[10],xmm7[10],xmm4[11],xmm7[11],xmm4[12],xmm7[12],xmm4[13],xmm7[13],xmm4[14],xmm7[14],xmm4[15],xmm7[15]
2111 ; AVX-NEXT: vpshuflw {{.*#+}} xmm1 = xmm3[3,3,3,3,4,5,6,7]
2112 ; AVX-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
2113 ; AVX-NEXT: vpshufhw {{.*#+}} xmm15 = xmm3[0,1,2,3,5,6,7,7]
2114 ; AVX-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[2,2,2,3]
2115 ; AVX-NEXT: vinsertf128 $1, %xmm15, %ymm1, %ymm1
2116 ; AVX-NEXT: vandps %ymm0, %ymm10, %ymm0
2117 ; AVX-NEXT: vandnps %ymm1, %ymm10, %ymm1
2118 ; AVX-NEXT: vorps %ymm1, %ymm0, %ymm15
2119 ; AVX-NEXT: vmovdqa (%r8), %xmm1
2120 ; AVX-NEXT: vpshufb %xmm2, %xmm1, %xmm0
2121 ; AVX-NEXT: vextractf128 $1, %ymm15, %xmm2
2122 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm2[0],xmm0[1],xmm2[2,3],xmm0[4],xmm2[5,6],xmm0[7]
2123 ; AVX-NEXT: vpshufb %xmm9, %xmm0, %xmm2
2124 ; AVX-NEXT: vmovdqa (%r9), %xmm0
2125 ; AVX-NEXT: vpshufb {{.*#+}} xmm9 = zero,zero,zero,xmm0[13],zero,zero,zero,zero,zero,xmm0[14],zero,zero,zero,zero,zero,xmm0[15]
2126 ; AVX-NEXT: vpor %xmm2, %xmm9, %xmm2
2127 ; AVX-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2128 ; AVX-NEXT: vpshufb {{.*#+}} xmm2 = xmm1[10,u],zero,zero,zero,zero,xmm1[11,u],zero,zero,zero,zero,xmm1[12,u],zero,zero
2129 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm15[1,2],xmm2[3],xmm15[4,5],xmm2[6],xmm15[7]
2130 ; AVX-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[0],zero,xmm2[2,3,4,5,6],zero,xmm2[8,9,10,11,12],zero,xmm2[14,15]
2131 ; AVX-NEXT: vpshufb {{.*#+}} xmm9 = zero,xmm0[10],zero,zero,zero,zero,zero,xmm0[11],zero,zero,zero,zero,zero,xmm0[12],zero,zero
2132 ; AVX-NEXT: vpor %xmm2, %xmm9, %xmm2
2133 ; AVX-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2134 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm11[0,0,1,1]
2135 ; AVX-NEXT: vpshufd {{.*#+}} xmm9 = xmm11[1,1,2,2]
2136 ; AVX-NEXT: vinsertf128 $1, %xmm9, %ymm2, %ymm9
2137 ; AVX-NEXT: vpshuflw {{.*#+}} xmm2 = xmm14[1,0,2,2,4,5,6,7]
2138 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,0,1]
2139 ; AVX-NEXT: vpshuflw {{.*#+}} xmm11 = xmm14[3,3,3,3,4,5,6,7]
2140 ; AVX-NEXT: vpshufhw {{.*#+}} xmm11 = xmm11[0,1,2,3,4,4,4,4]
2141 ; AVX-NEXT: vinsertf128 $1, %xmm11, %ymm2, %ymm11
2142 ; AVX-NEXT: vmovaps {{.*#+}} ymm2 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
2143 ; AVX-NEXT: vandps %ymm2, %ymm9, %ymm9
2144 ; AVX-NEXT: vandnps %ymm11, %ymm2, %ymm11
2145 ; AVX-NEXT: vorps %ymm11, %ymm9, %ymm15
2146 ; AVX-NEXT: vpshufb {{.*#+}} xmm11 = zero,zero,zero,zero,xmm12[0,u],zero,zero,zero,zero,xmm12[1,u],zero,zero,zero,zero
2147 ; AVX-NEXT: vpblendw {{.*#+}} xmm11 = xmm15[0,1],xmm11[2],xmm15[3,4],xmm11[5],xmm15[6,7]
2148 ; AVX-NEXT: vmovdqa {{.*#+}} xmm14 = [0,1,2,3,4,128,6,7,8,9,10,128,12,13,14,15]
2149 ; AVX-NEXT: vpshufb %xmm14, %xmm11, %xmm11
2150 ; AVX-NEXT: vpshufb {{.*#+}} xmm10 = zero,zero,zero,zero,zero,xmm13[0],zero,zero,zero,zero,zero,xmm13[1],zero,zero,zero,zero
2151 ; AVX-NEXT: vpor %xmm10, %xmm11, %xmm11
2152 ; AVX-NEXT: vextractf128 $1, %ymm15, %xmm10
2153 ; AVX-NEXT: vmovdqa {{.*#+}} xmm15 = [2,u,128,128,128,128,3,u,128,128,128,128,4,u,128,128]
2154 ; AVX-NEXT: vpshufb %xmm15, %xmm12, %xmm12
2155 ; AVX-NEXT: vpblendw {{.*#+}} xmm10 = xmm12[0],xmm10[1,2],xmm12[3],xmm10[4,5],xmm12[6],xmm10[7]
2156 ; AVX-NEXT: vmovdqa {{.*#+}} xmm9 = [0,128,2,3,4,5,6,128,8,9,10,11,12,128,14,15]
2157 ; AVX-NEXT: vpshufb %xmm9, %xmm10, %xmm10
2158 ; AVX-NEXT: vmovdqa {{.*#+}} xmm12 = [128,2,128,128,128,128,128,3,128,128,128,128,128,4,128,128]
2159 ; AVX-NEXT: vpshufb %xmm12, %xmm13, %xmm13
2160 ; AVX-NEXT: vpor %xmm13, %xmm10, %xmm10
2161 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm6 = xmm6[0],xmm8[0],xmm6[1],xmm8[1],xmm6[2],xmm8[2],xmm6[3],xmm8[3],xmm6[4],xmm8[4],xmm6[5],xmm8[5],xmm6[6],xmm8[6],xmm6[7],xmm8[7]
2162 ; AVX-NEXT: vpshufd {{.*#+}} xmm8 = xmm6[0,0,1,1]
2163 ; AVX-NEXT: vpshufd {{.*#+}} xmm13 = xmm6[1,1,2,2]
2164 ; AVX-NEXT: vinsertf128 $1, %xmm13, %ymm8, %ymm8
2165 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm4[0],xmm7[0],xmm4[1],xmm7[1],xmm4[2],xmm7[2],xmm4[3],xmm7[3],xmm4[4],xmm7[4],xmm4[5],xmm7[5],xmm4[6],xmm7[6],xmm4[7],xmm7[7]
2166 ; AVX-NEXT: vpshuflw {{.*#+}} xmm7 = xmm4[1,0,2,2,4,5,6,7]
2167 ; AVX-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[0,1,0,1]
2168 ; AVX-NEXT: vpshuflw {{.*#+}} xmm13 = xmm4[3,3,3,3,4,5,6,7]
2169 ; AVX-NEXT: vpshufhw {{.*#+}} xmm13 = xmm13[0,1,2,3,4,4,4,4]
2170 ; AVX-NEXT: vinsertf128 $1, %xmm13, %ymm7, %ymm7
2171 ; AVX-NEXT: vandps %ymm2, %ymm8, %ymm8
2172 ; AVX-NEXT: vandnps %ymm7, %ymm2, %ymm2
2173 ; AVX-NEXT: vorps %ymm2, %ymm8, %ymm7
2174 ; AVX-NEXT: vpshufb %xmm15, %xmm1, %xmm2
2175 ; AVX-NEXT: vextractf128 $1, %ymm7, %xmm8
2176 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm8[1,2],xmm2[3],xmm8[4,5],xmm2[6],xmm8[7]
2177 ; AVX-NEXT: vpshufb %xmm9, %xmm2, %xmm2
2178 ; AVX-NEXT: vpshufb %xmm12, %xmm0, %xmm8
2179 ; AVX-NEXT: vpor %xmm2, %xmm8, %xmm2
2180 ; AVX-NEXT: vpshufb {{.*#+}} xmm8 = zero,zero,zero,zero,xmm1[0,u],zero,zero,zero,zero,xmm1[1,u],zero,zero,zero,zero
2181 ; AVX-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1],xmm8[2],xmm7[3,4],xmm8[5],xmm7[6,7]
2182 ; AVX-NEXT: vpshufb %xmm14, %xmm7, %xmm7
2183 ; AVX-NEXT: vpshufb {{.*#+}} xmm8 = zero,zero,zero,zero,zero,xmm0[0],zero,zero,zero,zero,zero,xmm0[1],zero,zero,zero,zero
2184 ; AVX-NEXT: vpor %xmm7, %xmm8, %xmm7
2185 ; AVX-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[3,3,3,3]
2186 ; AVX-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,0,1,1]
2187 ; AVX-NEXT: vinsertf128 $1, %xmm5, %ymm6, %ymm5
2188 ; AVX-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,5,6,7,7]
2189 ; AVX-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,2,3]
2190 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[1,0,2,2,4,5,6,7]
2191 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,0,1]
2192 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
2193 ; AVX-NEXT: vmovaps {{.*#+}} ymm6 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
2194 ; AVX-NEXT: vandnps %ymm5, %ymm6, %ymm4
2195 ; AVX-NEXT: vandps %ymm6, %ymm3, %ymm3
2196 ; AVX-NEXT: vorps %ymm4, %ymm3, %ymm3
2197 ; AVX-NEXT: vpshufb {{.*#+}} xmm4 = zero,zero,xmm1[5,u],zero,zero,zero,zero,xmm1[6,u],zero,zero,zero,zero,xmm1[7,u]
2198 ; AVX-NEXT: vpblendw {{.*#+}} xmm4 = xmm3[0],xmm4[1],xmm3[2,3],xmm4[4],xmm3[5,6],xmm4[7]
2199 ; AVX-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[0,1,2],zero,xmm4[4,5,6,7,8],zero,xmm4[10,11,12,13,14],zero
2200 ; AVX-NEXT: vpshufb {{.*#+}} xmm5 = zero,zero,zero,xmm0[5],zero,zero,zero,zero,zero,xmm0[6],zero,zero,zero,zero,zero,xmm0[7]
2201 ; AVX-NEXT: vpor %xmm5, %xmm4, %xmm4
2202 ; AVX-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,zero,zero,xmm1[8,u],zero,zero,zero,zero,xmm1[9,u],zero,zero,zero,zero
2203 ; AVX-NEXT: vextractf128 $1, %ymm3, %xmm3
2204 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0,1],xmm1[2],xmm3[3,4],xmm1[5],xmm3[6,7]
2205 ; AVX-NEXT: vpshufb %xmm14, %xmm1, %xmm1
2206 ; AVX-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,zero,zero,zero,xmm0[8],zero,zero,zero,zero,zero,xmm0[9],zero,zero,zero,zero
2207 ; AVX-NEXT: vpor %xmm0, %xmm1, %xmm0
2208 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
2209 ; AVX-NEXT: vmovdqa %xmm0, 48(%rax)
2210 ; AVX-NEXT: vmovdqa %xmm4, 32(%rax)
2211 ; AVX-NEXT: vmovdqa %xmm7, (%rax)
2212 ; AVX-NEXT: vmovdqa %xmm2, 16(%rax)
2213 ; AVX-NEXT: vmovdqa %xmm10, 112(%rax)
2214 ; AVX-NEXT: vmovdqa %xmm11, 96(%rax)
2215 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2216 ; AVX-NEXT: vmovaps %xmm0, 64(%rax)
2217 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2218 ; AVX-NEXT: vmovaps %xmm0, 80(%rax)
2219 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2220 ; AVX-NEXT: vmovaps %xmm0, 176(%rax)
2221 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2222 ; AVX-NEXT: vmovaps %xmm0, 160(%rax)
2223 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2224 ; AVX-NEXT: vmovaps %xmm0, 128(%rax)
2225 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2226 ; AVX-NEXT: vmovaps %xmm0, 144(%rax)
2227 ; AVX-NEXT: vzeroupper
2230 ; AVX2-LABEL: store_i8_stride6_vf32:
2232 ; AVX2-NEXT: pushq %rax
2233 ; AVX2-NEXT: vmovdqa (%rdi), %ymm1
2234 ; AVX2-NEXT: vmovdqa (%rsi), %ymm3
2235 ; AVX2-NEXT: vmovdqa (%rdx), %ymm0
2236 ; AVX2-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2237 ; AVX2-NEXT: vmovdqa (%rcx), %ymm2
2238 ; AVX2-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2239 ; AVX2-NEXT: vmovdqa (%r8), %ymm4
2240 ; AVX2-NEXT: vpbroadcastq {{.*#+}} ymm7 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
2241 ; AVX2-NEXT: vmovdqa (%rcx), %xmm6
2242 ; AVX2-NEXT: vpshufb %xmm7, %xmm6, %xmm5
2243 ; AVX2-NEXT: vmovdqa (%rdx), %xmm8
2244 ; AVX2-NEXT: vpshufb %xmm7, %xmm8, %xmm9
2245 ; AVX2-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm9[0],xmm5[0],xmm9[1],xmm5[1],xmm9[2],xmm5[2],xmm9[3],xmm5[3],xmm9[4],xmm5[4],xmm9[5],xmm5[5],xmm9[6],xmm5[6],xmm9[7],xmm5[7]
2246 ; AVX2-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
2247 ; AVX2-NEXT: vmovdqa (%rsi), %xmm11
2248 ; AVX2-NEXT: vpbroadcastq {{.*#+}} xmm9 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
2249 ; AVX2-NEXT: vpshufb %xmm9, %xmm11, %xmm10
2250 ; AVX2-NEXT: vmovdqa (%rdi), %xmm13
2251 ; AVX2-NEXT: vpshufb %xmm9, %xmm13, %xmm9
2252 ; AVX2-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm9[8],xmm10[8],xmm9[9],xmm10[9],xmm9[10],xmm10[10],xmm9[11],xmm10[11],xmm9[12],xmm10[12],xmm9[13],xmm10[13],xmm9[14],xmm10[14],xmm9[15],xmm10[15]
2253 ; AVX2-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,0,1]
2254 ; AVX2-NEXT: vpmovsxbw {{.*#+}} ymm10 = [65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535]
2255 ; AVX2-NEXT: vpblendvb %ymm10, %ymm5, %ymm9, %ymm9
2256 ; AVX2-NEXT: vmovdqa (%r8), %xmm5
2257 ; AVX2-NEXT: vpshufb {{.*#+}} xmm12 = xmm5[6,u,5,u,8,u,7,u,9,u,9,u,9,u,9,u]
2258 ; AVX2-NEXT: vpermq {{.*#+}} ymm12 = ymm12[0,0,0,1]
2259 ; AVX2-NEXT: vpmovsxbw {{.*#+}} ymm14 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
2260 ; AVX2-NEXT: vpblendvb %ymm14, %ymm9, %ymm12, %ymm9
2261 ; AVX2-NEXT: vpshufb %ymm7, %ymm2, %ymm12
2262 ; AVX2-NEXT: vpshufb %ymm7, %ymm0, %ymm7
2263 ; AVX2-NEXT: vpunpcklbw {{.*#+}} ymm12 = ymm7[0],ymm12[0],ymm7[1],ymm12[1],ymm7[2],ymm12[2],ymm7[3],ymm12[3],ymm7[4],ymm12[4],ymm7[5],ymm12[5],ymm7[6],ymm12[6],ymm7[7],ymm12[7],ymm7[16],ymm12[16],ymm7[17],ymm12[17],ymm7[18],ymm12[18],ymm7[19],ymm12[19],ymm7[20],ymm12[20],ymm7[21],ymm12[21],ymm7[22],ymm12[22],ymm7[23],ymm12[23]
2264 ; AVX2-NEXT: vpbroadcastq {{.*#+}} ymm7 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
2265 ; AVX2-NEXT: vpshufb %ymm7, %ymm3, %ymm15
2266 ; AVX2-NEXT: vmovdqa %ymm1, %ymm2
2267 ; AVX2-NEXT: vpshufb %ymm7, %ymm1, %ymm7
2268 ; AVX2-NEXT: vpunpcklbw {{.*#+}} ymm15 = ymm7[0],ymm15[0],ymm7[1],ymm15[1],ymm7[2],ymm15[2],ymm7[3],ymm15[3],ymm7[4],ymm15[4],ymm7[5],ymm15[5],ymm7[6],ymm15[6],ymm7[7],ymm15[7],ymm7[16],ymm15[16],ymm7[17],ymm15[17],ymm7[18],ymm15[18],ymm7[19],ymm15[19],ymm7[20],ymm15[20],ymm7[21],ymm15[21],ymm7[22],ymm15[22],ymm7[23],ymm15[23]
2269 ; AVX2-NEXT: vmovdqa (%r9), %ymm7
2270 ; AVX2-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,2,2,3]
2271 ; AVX2-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
2272 ; AVX2-NEXT: vpblendvb %ymm10, %ymm12, %ymm15, %ymm12
2273 ; AVX2-NEXT: vmovdqa (%r9), %xmm10
2274 ; AVX2-NEXT: vpshufb {{.*#+}} ymm15 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u]
2275 ; AVX2-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
2276 ; AVX2-NEXT: vpblendvb %ymm14, %ymm12, %ymm15, %ymm12
2277 ; AVX2-NEXT: vpshufb {{.*#+}} xmm14 = xmm10[u,6,u,5,u,8,u,7,u,9,u,9,u,9,u,9]
2278 ; AVX2-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
2279 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm15 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
2280 ; AVX2-NEXT: vpblendvb %ymm15, %ymm9, %ymm14, %ymm0
2281 ; AVX2-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2282 ; AVX2-NEXT: vpshufb {{.*#+}} ymm14 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u]
2283 ; AVX2-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,3]
2284 ; AVX2-NEXT: vpblendvb %ymm15, %ymm12, %ymm14, %ymm0
2285 ; AVX2-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2286 ; AVX2-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm13[0],xmm11[0],xmm13[1],xmm11[1],xmm13[2],xmm11[2],xmm13[3],xmm11[3],xmm13[4],xmm11[4],xmm13[5],xmm11[5],xmm13[6],xmm11[6],xmm13[7],xmm11[7]
2287 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm14 = xmm14[0,3,2,1,4,5,6,7]
2288 ; AVX2-NEXT: vpshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,4,5,6,5]
2289 ; AVX2-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
2290 ; AVX2-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3],xmm8[4],xmm6[4],xmm8[5],xmm6[5],xmm8[6],xmm6[6],xmm8[7],xmm6[7]
2291 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm15 = xmm15[1,0,3,2,4,5,6,7]
2292 ; AVX2-NEXT: vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,4,4,4]
2293 ; AVX2-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,0,0,1]
2294 ; AVX2-NEXT: vpmovsxbw {{.*#+}} ymm0 = [65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535]
2295 ; AVX2-NEXT: vpblendvb %ymm0, %ymm14, %ymm15, %ymm14
2296 ; AVX2-NEXT: vpunpcklbw {{.*#+}} ymm15 = ymm1[0],ymm3[0],ymm1[1],ymm3[1],ymm1[2],ymm3[2],ymm1[3],ymm3[3],ymm1[4],ymm3[4],ymm1[5],ymm3[5],ymm1[6],ymm3[6],ymm1[7],ymm3[7],ymm1[16],ymm3[16],ymm1[17],ymm3[17],ymm1[18],ymm3[18],ymm1[19],ymm3[19],ymm1[20],ymm3[20],ymm1[21],ymm3[21],ymm1[22],ymm3[22],ymm1[23],ymm3[23]
2297 ; AVX2-NEXT: vmovdqa %ymm3, %ymm12
2298 ; AVX2-NEXT: vpshuflw {{.*#+}} ymm15 = ymm15[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
2299 ; AVX2-NEXT: vpshufhw {{.*#+}} ymm15 = ymm15[0,1,2,3,4,5,6,5,8,9,10,11,12,13,14,13]
2300 ; AVX2-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
2301 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2302 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
2303 ; AVX2-NEXT: vpunpcklbw {{.*#+}} ymm9 = ymm3[0],ymm1[0],ymm3[1],ymm1[1],ymm3[2],ymm1[2],ymm3[3],ymm1[3],ymm3[4],ymm1[4],ymm3[5],ymm1[5],ymm3[6],ymm1[6],ymm3[7],ymm1[7],ymm3[16],ymm1[16],ymm3[17],ymm1[17],ymm3[18],ymm1[18],ymm3[19],ymm1[19],ymm3[20],ymm1[20],ymm3[21],ymm1[21],ymm3[22],ymm1[22],ymm3[23],ymm1[23]
2304 ; AVX2-NEXT: vpshuflw {{.*#+}} ymm9 = ymm9[1,0,3,2,4,5,6,7,9,8,11,10,12,13,14,15]
2305 ; AVX2-NEXT: vpshufhw {{.*#+}} ymm9 = ymm9[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
2306 ; AVX2-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
2307 ; AVX2-NEXT: vpblendvb %ymm0, %ymm15, %ymm9, %ymm0
2308 ; AVX2-NEXT: vpshufb {{.*#+}} xmm9 = xmm5[2,u,1,u,0,u,3,u,4,u,4,u,4,u,4,u]
2309 ; AVX2-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,0,1]
2310 ; AVX2-NEXT: vpmovsxbw {{.*#+}} ymm15 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
2311 ; AVX2-NEXT: vpblendvb %ymm15, %ymm14, %ymm9, %ymm9
2312 ; AVX2-NEXT: vpshufb {{.*#+}} ymm14 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,u,17,u,16,u,19,u,u,u,u,u,20,u,u,u]
2313 ; AVX2-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,3]
2314 ; AVX2-NEXT: vpblendvb %ymm15, %ymm0, %ymm14, %ymm0
2315 ; AVX2-NEXT: vpshufb {{.*#+}} xmm14 = xmm10[u,2,u,1,u,0,u,3,u,4,u,4,u,4,u,4]
2316 ; AVX2-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
2317 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm15 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
2318 ; AVX2-NEXT: vpblendvb %ymm15, %ymm9, %ymm14, %ymm14
2319 ; AVX2-NEXT: vpshufb {{.*#+}} ymm9 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,u,17,u,16,u,19,u,u,u,u,u,20,u,u]
2320 ; AVX2-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
2321 ; AVX2-NEXT: vpblendvb %ymm15, %ymm0, %ymm9, %ymm15
2322 ; AVX2-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm13[8],xmm11[8],xmm13[9],xmm11[9],xmm13[10],xmm11[10],xmm13[11],xmm11[11],xmm13[12],xmm11[12],xmm13[13],xmm11[13],xmm13[14],xmm11[14],xmm13[15],xmm11[15]
2323 ; AVX2-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
2324 ; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
2325 ; AVX2-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm8[8],xmm6[8],xmm8[9],xmm6[9],xmm8[10],xmm6[10],xmm8[11],xmm6[11],xmm8[12],xmm6[12],xmm8[13],xmm6[13],xmm8[14],xmm6[14],xmm8[15],xmm6[15]
2326 ; AVX2-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
2327 ; AVX2-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,0,1]
2328 ; AVX2-NEXT: vpmovsxbw {{.*#+}} ymm8 = [0,65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535,0,0]
2329 ; AVX2-NEXT: vpblendvb %ymm8, %ymm0, %ymm6, %ymm0
2330 ; AVX2-NEXT: vpunpckhbw {{.*#+}} ymm2 = ymm2[8],ymm12[8],ymm2[9],ymm12[9],ymm2[10],ymm12[10],ymm2[11],ymm12[11],ymm2[12],ymm12[12],ymm2[13],ymm12[13],ymm2[14],ymm12[14],ymm2[15],ymm12[15],ymm2[24],ymm12[24],ymm2[25],ymm12[25],ymm2[26],ymm12[26],ymm2[27],ymm12[27],ymm2[28],ymm12[28],ymm2[29],ymm12[29],ymm2[30],ymm12[30],ymm2[31],ymm12[31]
2331 ; AVX2-NEXT: vpunpckhbw {{.*#+}} ymm1 = ymm3[8],ymm1[8],ymm3[9],ymm1[9],ymm3[10],ymm1[10],ymm3[11],ymm1[11],ymm3[12],ymm1[12],ymm3[13],ymm1[13],ymm3[14],ymm1[14],ymm3[15],ymm1[15],ymm3[24],ymm1[24],ymm3[25],ymm1[25],ymm3[26],ymm1[26],ymm3[27],ymm1[27],ymm3[28],ymm1[28],ymm3[29],ymm1[29],ymm3[30],ymm1[30],ymm3[31],ymm1[31]
2332 ; AVX2-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
2333 ; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2334 ; AVX2-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
2335 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
2336 ; AVX2-NEXT: vpblendvb %ymm8, %ymm2, %ymm1, %ymm1
2337 ; AVX2-NEXT: vpshufb {{.*#+}} xmm2 = xmm5[10,u,13,u,12,u,11,u,14,u,13,u,14,u,15,u]
2338 ; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
2339 ; AVX2-NEXT: vpmovsxbw {{.*#+}} ymm3 = [0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0]
2340 ; AVX2-NEXT: vpblendvb %ymm3, %ymm0, %ymm2, %ymm0
2341 ; AVX2-NEXT: vpshufb {{.*#+}} ymm2 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,u,29,u,28,u,27,u,30,u,u,u,u,u,31,u]
2342 ; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2343 ; AVX2-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
2344 ; AVX2-NEXT: vpshufb {{.*#+}} xmm2 = xmm10[u,10,u,13,u,12,u,11,u,14,u,13,u,14,u,15]
2345 ; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
2346 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm3 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
2347 ; AVX2-NEXT: vpblendvb %ymm3, %ymm0, %ymm2, %ymm0
2348 ; AVX2-NEXT: vpshufb {{.*#+}} ymm2 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,u,29,u,28,u,27,u,30,u,u,u,u,u,31]
2349 ; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2350 ; AVX2-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
2351 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
2352 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2353 ; AVX2-NEXT: vmovaps %ymm2, 128(%rax)
2354 ; AVX2-NEXT: vmovdqa %ymm1, 160(%rax)
2355 ; AVX2-NEXT: vmovdqa %ymm0, 64(%rax)
2356 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2357 ; AVX2-NEXT: vmovaps %ymm0, 32(%rax)
2358 ; AVX2-NEXT: vmovdqa %ymm15, 96(%rax)
2359 ; AVX2-NEXT: vmovdqa %ymm14, (%rax)
2360 ; AVX2-NEXT: popq %rax
2361 ; AVX2-NEXT: vzeroupper
2364 ; AVX2-FP-LABEL: store_i8_stride6_vf32:
2366 ; AVX2-FP-NEXT: subq $40, %rsp
2367 ; AVX2-FP-NEXT: vmovdqa (%rdi), %ymm2
2368 ; AVX2-FP-NEXT: vmovdqa (%rsi), %ymm6
2369 ; AVX2-FP-NEXT: vmovdqa (%rdx), %ymm3
2370 ; AVX2-FP-NEXT: vmovdqa (%rcx), %ymm4
2371 ; AVX2-FP-NEXT: vmovdqa (%r8), %ymm0
2372 ; AVX2-FP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2373 ; AVX2-FP-NEXT: vpbroadcastq {{.*#+}} ymm9 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
2374 ; AVX2-FP-NEXT: vmovdqa (%rcx), %xmm1
2375 ; AVX2-FP-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2376 ; AVX2-FP-NEXT: vpshufb %xmm9, %xmm1, %xmm7
2377 ; AVX2-FP-NEXT: vmovdqa (%rdx), %xmm1
2378 ; AVX2-FP-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2379 ; AVX2-FP-NEXT: vpshufb %xmm9, %xmm1, %xmm8
2380 ; AVX2-FP-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3],xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
2381 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,0,1]
2382 ; AVX2-FP-NEXT: vmovdqa (%rsi), %xmm10
2383 ; AVX2-FP-NEXT: vpbroadcastq {{.*#+}} xmm11 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
2384 ; AVX2-FP-NEXT: vpshufb %xmm11, %xmm10, %xmm12
2385 ; AVX2-FP-NEXT: vmovdqa (%rdi), %xmm5
2386 ; AVX2-FP-NEXT: vpshufb %xmm11, %xmm5, %xmm11
2387 ; AVX2-FP-NEXT: vpunpckhbw {{.*#+}} xmm11 = xmm11[8],xmm12[8],xmm11[9],xmm12[9],xmm11[10],xmm12[10],xmm11[11],xmm12[11],xmm11[12],xmm12[12],xmm11[13],xmm12[13],xmm11[14],xmm12[14],xmm11[15],xmm12[15]
2388 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,0,1]
2389 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} ymm12 = [65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535]
2390 ; AVX2-FP-NEXT: vpblendvb %ymm12, %ymm7, %ymm11, %ymm11
2391 ; AVX2-FP-NEXT: vmovdqa (%r8), %xmm7
2392 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm13 = xmm7[6,u,5,u,8,u,7,u,9,u,9,u,9,u,9,u]
2393 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm13 = ymm13[0,0,0,1]
2394 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} ymm14 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
2395 ; AVX2-FP-NEXT: vpblendvb %ymm14, %ymm11, %ymm13, %ymm11
2396 ; AVX2-FP-NEXT: vpshufb %ymm9, %ymm4, %ymm13
2397 ; AVX2-FP-NEXT: vpshufb %ymm9, %ymm3, %ymm9
2398 ; AVX2-FP-NEXT: vpunpcklbw {{.*#+}} ymm13 = ymm9[0],ymm13[0],ymm9[1],ymm13[1],ymm9[2],ymm13[2],ymm9[3],ymm13[3],ymm9[4],ymm13[4],ymm9[5],ymm13[5],ymm9[6],ymm13[6],ymm9[7],ymm13[7],ymm9[16],ymm13[16],ymm9[17],ymm13[17],ymm9[18],ymm13[18],ymm9[19],ymm13[19],ymm9[20],ymm13[20],ymm9[21],ymm13[21],ymm9[22],ymm13[22],ymm9[23],ymm13[23]
2399 ; AVX2-FP-NEXT: vpbroadcastq {{.*#+}} ymm9 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
2400 ; AVX2-FP-NEXT: vpshufb %ymm9, %ymm6, %ymm15
2401 ; AVX2-FP-NEXT: vpshufb %ymm9, %ymm2, %ymm9
2402 ; AVX2-FP-NEXT: vmovdqa %ymm2, %ymm8
2403 ; AVX2-FP-NEXT: vpunpcklbw {{.*#+}} ymm15 = ymm9[0],ymm15[0],ymm9[1],ymm15[1],ymm9[2],ymm15[2],ymm9[3],ymm15[3],ymm9[4],ymm15[4],ymm9[5],ymm15[5],ymm9[6],ymm15[6],ymm9[7],ymm15[7],ymm9[16],ymm15[16],ymm9[17],ymm15[17],ymm9[18],ymm15[18],ymm9[19],ymm15[19],ymm9[20],ymm15[20],ymm9[21],ymm15[21],ymm9[22],ymm15[22],ymm9[23],ymm15[23]
2404 ; AVX2-FP-NEXT: vmovdqa (%r9), %ymm1
2405 ; AVX2-FP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2406 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,2,2,3]
2407 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
2408 ; AVX2-FP-NEXT: vpblendvb %ymm12, %ymm13, %ymm15, %ymm13
2409 ; AVX2-FP-NEXT: vmovdqa (%r9), %xmm12
2410 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm15 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u]
2411 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
2412 ; AVX2-FP-NEXT: vpblendvb %ymm14, %ymm13, %ymm15, %ymm13
2413 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm14 = xmm12[u,6,u,5,u,8,u,7,u,9,u,9,u,9,u,9]
2414 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
2415 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} ymm15 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
2416 ; AVX2-FP-NEXT: vpblendvb %ymm15, %ymm11, %ymm14, %ymm0
2417 ; AVX2-FP-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
2418 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm14 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u]
2419 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,3]
2420 ; AVX2-FP-NEXT: vpblendvb %ymm15, %ymm13, %ymm14, %ymm0
2421 ; AVX2-FP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2422 ; AVX2-FP-NEXT: vpunpckhbw {{.*#+}} xmm14 = xmm5[8],xmm10[8],xmm5[9],xmm10[9],xmm5[10],xmm10[10],xmm5[11],xmm10[11],xmm5[12],xmm10[12],xmm5[13],xmm10[13],xmm5[14],xmm10[14],xmm5[15],xmm10[15]
2423 ; AVX2-FP-NEXT: vmovdqa %xmm10, %xmm13
2424 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm14 = xmm14[8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
2425 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
2426 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
2427 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2428 ; AVX2-FP-NEXT: vpunpckhbw {{.*#+}} xmm15 = xmm1[8],xmm2[8],xmm1[9],xmm2[9],xmm1[10],xmm2[10],xmm1[11],xmm2[11],xmm1[12],xmm2[12],xmm1[13],xmm2[13],xmm1[14],xmm2[14],xmm1[15],xmm2[15]
2429 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm15 = xmm15[10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
2430 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,0,0,1]
2431 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} ymm0 = [0,65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535,0,0]
2432 ; AVX2-FP-NEXT: vpblendvb %ymm0, %ymm14, %ymm15, %ymm14
2433 ; AVX2-FP-NEXT: vpunpckhbw {{.*#+}} ymm15 = ymm8[8],ymm6[8],ymm8[9],ymm6[9],ymm8[10],ymm6[10],ymm8[11],ymm6[11],ymm8[12],ymm6[12],ymm8[13],ymm6[13],ymm8[14],ymm6[14],ymm8[15],ymm6[15],ymm8[24],ymm6[24],ymm8[25],ymm6[25],ymm8[26],ymm6[26],ymm8[27],ymm6[27],ymm8[28],ymm6[28],ymm8[29],ymm6[29],ymm8[30],ymm6[30],ymm8[31],ymm6[31]
2434 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm15 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
2435 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
2436 ; AVX2-FP-NEXT: vpunpckhbw {{.*#+}} ymm11 = ymm3[8],ymm4[8],ymm3[9],ymm4[9],ymm3[10],ymm4[10],ymm3[11],ymm4[11],ymm3[12],ymm4[12],ymm3[13],ymm4[13],ymm3[14],ymm4[14],ymm3[15],ymm4[15],ymm3[24],ymm4[24],ymm3[25],ymm4[25],ymm3[26],ymm4[26],ymm3[27],ymm4[27],ymm3[28],ymm4[28],ymm3[29],ymm4[29],ymm3[30],ymm4[30],ymm3[31],ymm4[31]
2437 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm11 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
2438 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
2439 ; AVX2-FP-NEXT: vpblendvb %ymm0, %ymm15, %ymm11, %ymm0
2440 ; AVX2-FP-NEXT: vmovdqa %xmm7, %xmm9
2441 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm11 = xmm7[10,u,13,u,12,u,11,u,14,u,13,u,14,u,15,u]
2442 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,0,1]
2443 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} ymm15 = [0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0]
2444 ; AVX2-FP-NEXT: vpblendvb %ymm15, %ymm14, %ymm11, %ymm11
2445 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
2446 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm14 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,u,29,u,28,u,27,u,30,u,u,u,u,u,31,u]
2447 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,3]
2448 ; AVX2-FP-NEXT: vpblendvb %ymm15, %ymm0, %ymm14, %ymm0
2449 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm14 = xmm12[u,10,u,13,u,12,u,11,u,14,u,13,u,14,u,15]
2450 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
2451 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} ymm15 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
2452 ; AVX2-FP-NEXT: vpblendvb %ymm15, %ymm11, %ymm14, %ymm14
2453 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
2454 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm11 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,u,29,u,28,u,27,u,30,u,u,u,u,u,31]
2455 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
2456 ; AVX2-FP-NEXT: vpblendvb %ymm15, %ymm0, %ymm11, %ymm15
2457 ; AVX2-FP-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm5[0],xmm13[0],xmm5[1],xmm13[1],xmm5[2],xmm13[2],xmm5[3],xmm13[3],xmm5[4],xmm13[4],xmm5[5],xmm13[5],xmm5[6],xmm13[6],xmm5[7],xmm13[7]
2458 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,6,7,4,5,2,3,8,9,10,11,12,13,10,11]
2459 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
2460 ; AVX2-FP-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3],xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
2461 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[2,3,0,1,6,7,4,5,8,9,8,9,8,9,8,9]
2462 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
2463 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} ymm11 = [65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535]
2464 ; AVX2-FP-NEXT: vpblendvb %ymm11, %ymm0, %ymm5, %ymm0
2465 ; AVX2-FP-NEXT: vpunpcklbw {{.*#+}} ymm2 = ymm8[0],ymm6[0],ymm8[1],ymm6[1],ymm8[2],ymm6[2],ymm8[3],ymm6[3],ymm8[4],ymm6[4],ymm8[5],ymm6[5],ymm8[6],ymm6[6],ymm8[7],ymm6[7],ymm8[16],ymm6[16],ymm8[17],ymm6[17],ymm8[18],ymm6[18],ymm8[19],ymm6[19],ymm8[20],ymm6[20],ymm8[21],ymm6[21],ymm8[22],ymm6[22],ymm8[23],ymm6[23]
2466 ; AVX2-FP-NEXT: vpunpcklbw {{.*#+}} ymm1 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[4],ymm4[4],ymm3[5],ymm4[5],ymm3[6],ymm4[6],ymm3[7],ymm4[7],ymm3[16],ymm4[16],ymm3[17],ymm4[17],ymm3[18],ymm4[18],ymm3[19],ymm4[19],ymm3[20],ymm4[20],ymm3[21],ymm4[21],ymm3[22],ymm4[22],ymm3[23],ymm4[23]
2467 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27]
2468 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2469 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,16,17,22,23,20,21,24,25,24,25,24,25,24,25]
2470 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
2471 ; AVX2-FP-NEXT: vpblendvb %ymm11, %ymm2, %ymm1, %ymm1
2472 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm2 = xmm9[2,u,1,u,0,u,3,u,4,u,4,u,4,u,4,u]
2473 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
2474 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} ymm3 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
2475 ; AVX2-FP-NEXT: vpblendvb %ymm3, %ymm0, %ymm2, %ymm0
2476 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm2 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,u,17,u,16,u,19,u,u,u,u,u,20,u,u,u]
2477 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2478 ; AVX2-FP-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
2479 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm2 = xmm12[u,2,u,1,u,0,u,3,u,4,u,4,u,4,u,4]
2480 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
2481 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} ymm3 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
2482 ; AVX2-FP-NEXT: vpblendvb %ymm3, %ymm0, %ymm2, %ymm0
2483 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm2 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,u,17,u,16,u,19,u,u,u,u,u,20,u,u]
2484 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2485 ; AVX2-FP-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
2486 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
2487 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2488 ; AVX2-FP-NEXT: vmovaps %ymm2, 128(%rax)
2489 ; AVX2-FP-NEXT: vmovdqa %ymm1, 96(%rax)
2490 ; AVX2-FP-NEXT: vmovdqa %ymm15, 160(%rax)
2491 ; AVX2-FP-NEXT: vmovdqa %ymm14, 64(%rax)
2492 ; AVX2-FP-NEXT: vmovups (%rsp), %ymm1 # 32-byte Reload
2493 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%rax)
2494 ; AVX2-FP-NEXT: vmovdqa %ymm0, (%rax)
2495 ; AVX2-FP-NEXT: addq $40, %rsp
2496 ; AVX2-FP-NEXT: vzeroupper
2497 ; AVX2-FP-NEXT: retq
2499 ; AVX2-FCP-LABEL: store_i8_stride6_vf32:
2500 ; AVX2-FCP: # %bb.0:
2501 ; AVX2-FCP-NEXT: subq $40, %rsp
2502 ; AVX2-FCP-NEXT: vmovdqa (%rdi), %ymm2
2503 ; AVX2-FCP-NEXT: vmovdqa (%rsi), %ymm6
2504 ; AVX2-FCP-NEXT: vmovdqa (%rdx), %ymm3
2505 ; AVX2-FCP-NEXT: vmovdqa (%rcx), %ymm4
2506 ; AVX2-FCP-NEXT: vmovdqa (%r8), %ymm0
2507 ; AVX2-FCP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2508 ; AVX2-FCP-NEXT: vpbroadcastq {{.*#+}} ymm9 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
2509 ; AVX2-FCP-NEXT: vmovdqa (%rcx), %xmm1
2510 ; AVX2-FCP-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2511 ; AVX2-FCP-NEXT: vpshufb %xmm9, %xmm1, %xmm7
2512 ; AVX2-FCP-NEXT: vmovdqa (%rdx), %xmm1
2513 ; AVX2-FCP-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2514 ; AVX2-FCP-NEXT: vpshufb %xmm9, %xmm1, %xmm8
2515 ; AVX2-FCP-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3],xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
2516 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,0,1]
2517 ; AVX2-FCP-NEXT: vmovdqa (%rsi), %xmm10
2518 ; AVX2-FCP-NEXT: vpbroadcastq {{.*#+}} xmm11 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
2519 ; AVX2-FCP-NEXT: vpshufb %xmm11, %xmm10, %xmm12
2520 ; AVX2-FCP-NEXT: vmovdqa (%rdi), %xmm5
2521 ; AVX2-FCP-NEXT: vpshufb %xmm11, %xmm5, %xmm11
2522 ; AVX2-FCP-NEXT: vpunpckhbw {{.*#+}} xmm11 = xmm11[8],xmm12[8],xmm11[9],xmm12[9],xmm11[10],xmm12[10],xmm11[11],xmm12[11],xmm11[12],xmm12[12],xmm11[13],xmm12[13],xmm11[14],xmm12[14],xmm11[15],xmm12[15]
2523 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,0,1]
2524 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} ymm12 = [65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535]
2525 ; AVX2-FCP-NEXT: vpblendvb %ymm12, %ymm7, %ymm11, %ymm11
2526 ; AVX2-FCP-NEXT: vmovdqa (%r8), %xmm7
2527 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm13 = xmm7[6,u,5,u,8,u,7,u,9,u,9,u,9,u,9,u]
2528 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm13 = ymm13[0,0,0,1]
2529 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} ymm14 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
2530 ; AVX2-FCP-NEXT: vpblendvb %ymm14, %ymm11, %ymm13, %ymm11
2531 ; AVX2-FCP-NEXT: vpshufb %ymm9, %ymm4, %ymm13
2532 ; AVX2-FCP-NEXT: vpshufb %ymm9, %ymm3, %ymm9
2533 ; AVX2-FCP-NEXT: vpunpcklbw {{.*#+}} ymm13 = ymm9[0],ymm13[0],ymm9[1],ymm13[1],ymm9[2],ymm13[2],ymm9[3],ymm13[3],ymm9[4],ymm13[4],ymm9[5],ymm13[5],ymm9[6],ymm13[6],ymm9[7],ymm13[7],ymm9[16],ymm13[16],ymm9[17],ymm13[17],ymm9[18],ymm13[18],ymm9[19],ymm13[19],ymm9[20],ymm13[20],ymm9[21],ymm13[21],ymm9[22],ymm13[22],ymm9[23],ymm13[23]
2534 ; AVX2-FCP-NEXT: vpbroadcastq {{.*#+}} ymm9 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
2535 ; AVX2-FCP-NEXT: vpshufb %ymm9, %ymm6, %ymm15
2536 ; AVX2-FCP-NEXT: vpshufb %ymm9, %ymm2, %ymm9
2537 ; AVX2-FCP-NEXT: vmovdqa %ymm2, %ymm8
2538 ; AVX2-FCP-NEXT: vpunpcklbw {{.*#+}} ymm15 = ymm9[0],ymm15[0],ymm9[1],ymm15[1],ymm9[2],ymm15[2],ymm9[3],ymm15[3],ymm9[4],ymm15[4],ymm9[5],ymm15[5],ymm9[6],ymm15[6],ymm9[7],ymm15[7],ymm9[16],ymm15[16],ymm9[17],ymm15[17],ymm9[18],ymm15[18],ymm9[19],ymm15[19],ymm9[20],ymm15[20],ymm9[21],ymm15[21],ymm9[22],ymm15[22],ymm9[23],ymm15[23]
2539 ; AVX2-FCP-NEXT: vmovdqa (%r9), %ymm1
2540 ; AVX2-FCP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2541 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,2,2,3]
2542 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
2543 ; AVX2-FCP-NEXT: vpblendvb %ymm12, %ymm13, %ymm15, %ymm13
2544 ; AVX2-FCP-NEXT: vmovdqa (%r9), %xmm12
2545 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm15 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u]
2546 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
2547 ; AVX2-FCP-NEXT: vpblendvb %ymm14, %ymm13, %ymm15, %ymm13
2548 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm14 = xmm12[u,6,u,5,u,8,u,7,u,9,u,9,u,9,u,9]
2549 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
2550 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} ymm15 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
2551 ; AVX2-FCP-NEXT: vpblendvb %ymm15, %ymm11, %ymm14, %ymm0
2552 ; AVX2-FCP-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
2553 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm14 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u]
2554 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,3]
2555 ; AVX2-FCP-NEXT: vpblendvb %ymm15, %ymm13, %ymm14, %ymm0
2556 ; AVX2-FCP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2557 ; AVX2-FCP-NEXT: vpunpckhbw {{.*#+}} xmm14 = xmm5[8],xmm10[8],xmm5[9],xmm10[9],xmm5[10],xmm10[10],xmm5[11],xmm10[11],xmm5[12],xmm10[12],xmm5[13],xmm10[13],xmm5[14],xmm10[14],xmm5[15],xmm10[15]
2558 ; AVX2-FCP-NEXT: vmovdqa %xmm10, %xmm13
2559 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm14 = xmm14[8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
2560 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
2561 ; AVX2-FCP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
2562 ; AVX2-FCP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2563 ; AVX2-FCP-NEXT: vpunpckhbw {{.*#+}} xmm15 = xmm1[8],xmm2[8],xmm1[9],xmm2[9],xmm1[10],xmm2[10],xmm1[11],xmm2[11],xmm1[12],xmm2[12],xmm1[13],xmm2[13],xmm1[14],xmm2[14],xmm1[15],xmm2[15]
2564 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm15 = xmm15[10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
2565 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm15 = ymm15[0,0,0,1]
2566 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} ymm0 = [0,65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535,0,0]
2567 ; AVX2-FCP-NEXT: vpblendvb %ymm0, %ymm14, %ymm15, %ymm14
2568 ; AVX2-FCP-NEXT: vpunpckhbw {{.*#+}} ymm15 = ymm8[8],ymm6[8],ymm8[9],ymm6[9],ymm8[10],ymm6[10],ymm8[11],ymm6[11],ymm8[12],ymm6[12],ymm8[13],ymm6[13],ymm8[14],ymm6[14],ymm8[15],ymm6[15],ymm8[24],ymm6[24],ymm8[25],ymm6[25],ymm8[26],ymm6[26],ymm8[27],ymm6[27],ymm8[28],ymm6[28],ymm8[29],ymm6[29],ymm8[30],ymm6[30],ymm8[31],ymm6[31]
2569 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm15 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
2570 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
2571 ; AVX2-FCP-NEXT: vpunpckhbw {{.*#+}} ymm11 = ymm3[8],ymm4[8],ymm3[9],ymm4[9],ymm3[10],ymm4[10],ymm3[11],ymm4[11],ymm3[12],ymm4[12],ymm3[13],ymm4[13],ymm3[14],ymm4[14],ymm3[15],ymm4[15],ymm3[24],ymm4[24],ymm3[25],ymm4[25],ymm3[26],ymm4[26],ymm3[27],ymm4[27],ymm3[28],ymm4[28],ymm3[29],ymm4[29],ymm3[30],ymm4[30],ymm3[31],ymm4[31]
2572 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm11 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
2573 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
2574 ; AVX2-FCP-NEXT: vpblendvb %ymm0, %ymm15, %ymm11, %ymm0
2575 ; AVX2-FCP-NEXT: vmovdqa %xmm7, %xmm9
2576 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm11 = xmm7[10,u,13,u,12,u,11,u,14,u,13,u,14,u,15,u]
2577 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,0,1]
2578 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} ymm15 = [0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0]
2579 ; AVX2-FCP-NEXT: vpblendvb %ymm15, %ymm14, %ymm11, %ymm11
2580 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
2581 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm14 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,u,29,u,28,u,27,u,30,u,u,u,u,u,31,u]
2582 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm14 = ymm14[2,2,2,3]
2583 ; AVX2-FCP-NEXT: vpblendvb %ymm15, %ymm0, %ymm14, %ymm0
2584 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm14 = xmm12[u,10,u,13,u,12,u,11,u,14,u,13,u,14,u,15]
2585 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
2586 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} ymm15 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
2587 ; AVX2-FCP-NEXT: vpblendvb %ymm15, %ymm11, %ymm14, %ymm14
2588 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
2589 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm11 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,u,29,u,28,u,27,u,30,u,u,u,u,u,31]
2590 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
2591 ; AVX2-FCP-NEXT: vpblendvb %ymm15, %ymm0, %ymm11, %ymm15
2592 ; AVX2-FCP-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm5[0],xmm13[0],xmm5[1],xmm13[1],xmm5[2],xmm13[2],xmm5[3],xmm13[3],xmm5[4],xmm13[4],xmm5[5],xmm13[5],xmm5[6],xmm13[6],xmm5[7],xmm13[7]
2593 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,6,7,4,5,2,3,8,9,10,11,12,13,10,11]
2594 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
2595 ; AVX2-FCP-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3],xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
2596 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[2,3,0,1,6,7,4,5,8,9,8,9,8,9,8,9]
2597 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
2598 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} ymm11 = [65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535]
2599 ; AVX2-FCP-NEXT: vpblendvb %ymm11, %ymm0, %ymm5, %ymm0
2600 ; AVX2-FCP-NEXT: vpunpcklbw {{.*#+}} ymm2 = ymm8[0],ymm6[0],ymm8[1],ymm6[1],ymm8[2],ymm6[2],ymm8[3],ymm6[3],ymm8[4],ymm6[4],ymm8[5],ymm6[5],ymm8[6],ymm6[6],ymm8[7],ymm6[7],ymm8[16],ymm6[16],ymm8[17],ymm6[17],ymm8[18],ymm6[18],ymm8[19],ymm6[19],ymm8[20],ymm6[20],ymm8[21],ymm6[21],ymm8[22],ymm6[22],ymm8[23],ymm6[23]
2601 ; AVX2-FCP-NEXT: vpunpcklbw {{.*#+}} ymm1 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[4],ymm4[4],ymm3[5],ymm4[5],ymm3[6],ymm4[6],ymm3[7],ymm4[7],ymm3[16],ymm4[16],ymm3[17],ymm4[17],ymm3[18],ymm4[18],ymm3[19],ymm4[19],ymm3[20],ymm4[20],ymm3[21],ymm4[21],ymm3[22],ymm4[22],ymm3[23],ymm4[23]
2602 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27]
2603 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2604 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,16,17,22,23,20,21,24,25,24,25,24,25,24,25]
2605 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
2606 ; AVX2-FCP-NEXT: vpblendvb %ymm11, %ymm2, %ymm1, %ymm1
2607 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm2 = xmm9[2,u,1,u,0,u,3,u,4,u,4,u,4,u,4,u]
2608 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
2609 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} ymm3 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
2610 ; AVX2-FCP-NEXT: vpblendvb %ymm3, %ymm0, %ymm2, %ymm0
2611 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm2 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,u,17,u,16,u,19,u,u,u,u,u,20,u,u,u]
2612 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2613 ; AVX2-FCP-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
2614 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm2 = xmm12[u,2,u,1,u,0,u,3,u,4,u,4,u,4,u,4]
2615 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
2616 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} ymm3 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
2617 ; AVX2-FCP-NEXT: vpblendvb %ymm3, %ymm0, %ymm2, %ymm0
2618 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm2 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,u,17,u,16,u,19,u,u,u,u,u,20,u,u]
2619 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
2620 ; AVX2-FCP-NEXT: vpblendvb %ymm3, %ymm1, %ymm2, %ymm1
2621 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
2622 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
2623 ; AVX2-FCP-NEXT: vmovaps %ymm2, 128(%rax)
2624 ; AVX2-FCP-NEXT: vmovdqa %ymm1, 96(%rax)
2625 ; AVX2-FCP-NEXT: vmovdqa %ymm15, 160(%rax)
2626 ; AVX2-FCP-NEXT: vmovdqa %ymm14, 64(%rax)
2627 ; AVX2-FCP-NEXT: vmovups (%rsp), %ymm1 # 32-byte Reload
2628 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%rax)
2629 ; AVX2-FCP-NEXT: vmovdqa %ymm0, (%rax)
2630 ; AVX2-FCP-NEXT: addq $40, %rsp
2631 ; AVX2-FCP-NEXT: vzeroupper
2632 ; AVX2-FCP-NEXT: retq
2634 ; AVX512-LABEL: store_i8_stride6_vf32:
2636 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
2637 ; AVX512-NEXT: vmovdqa (%rdi), %ymm3
2638 ; AVX512-NEXT: vmovdqa (%rsi), %ymm5
2639 ; AVX512-NEXT: vmovdqa (%rdx), %ymm2
2640 ; AVX512-NEXT: vmovdqa (%rcx), %ymm4
2641 ; AVX512-NEXT: vmovdqa (%r8), %ymm0
2642 ; AVX512-NEXT: vmovdqa (%r9), %ymm1
2643 ; AVX512-NEXT: vmovdqa (%rcx), %xmm7
2644 ; AVX512-NEXT: vmovdqa (%rdx), %xmm8
2645 ; AVX512-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm8[8],xmm7[8],xmm8[9],xmm7[9],xmm8[10],xmm7[10],xmm8[11],xmm7[11],xmm8[12],xmm7[12],xmm8[13],xmm7[13],xmm8[14],xmm7[14],xmm8[15],xmm7[15]
2646 ; AVX512-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
2647 ; AVX512-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,0,1]
2648 ; AVX512-NEXT: vpunpcklbw {{.*#+}} ymm9 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[4],ymm4[4],ymm2[5],ymm4[5],ymm2[6],ymm4[6],ymm2[7],ymm4[7],ymm2[16],ymm4[16],ymm2[17],ymm4[17],ymm2[18],ymm4[18],ymm2[19],ymm4[19],ymm2[20],ymm4[20],ymm2[21],ymm4[21],ymm2[22],ymm4[22],ymm2[23],ymm4[23]
2649 ; AVX512-NEXT: vprold $16, %ymm9, %ymm9
2650 ; AVX512-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
2651 ; AVX512-NEXT: vinserti64x4 $1, %ymm9, %zmm6, %zmm6
2652 ; AVX512-NEXT: vmovdqa (%rsi), %xmm9
2653 ; AVX512-NEXT: vmovdqa (%rdi), %xmm10
2654 ; AVX512-NEXT: vpunpckhbw {{.*#+}} xmm11 = xmm10[8],xmm9[8],xmm10[9],xmm9[9],xmm10[10],xmm9[10],xmm10[11],xmm9[11],xmm10[12],xmm9[12],xmm10[13],xmm9[13],xmm10[14],xmm9[14],xmm10[15],xmm9[15]
2655 ; AVX512-NEXT: vpshufb {{.*#+}} xmm11 = xmm11[8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
2656 ; AVX512-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,0,1]
2657 ; AVX512-NEXT: vpunpcklbw {{.*#+}} ymm12 = ymm3[0],ymm5[0],ymm3[1],ymm5[1],ymm3[2],ymm5[2],ymm3[3],ymm5[3],ymm3[4],ymm5[4],ymm3[5],ymm5[5],ymm3[6],ymm5[6],ymm3[7],ymm5[7],ymm3[16],ymm5[16],ymm3[17],ymm5[17],ymm3[18],ymm5[18],ymm3[19],ymm5[19],ymm3[20],ymm5[20],ymm3[21],ymm5[21],ymm3[22],ymm5[22],ymm3[23],ymm5[23]
2658 ; AVX512-NEXT: vpshuflw {{.*#+}} ymm12 = ymm12[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
2659 ; AVX512-NEXT: vpshufhw {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5,6,5,8,9,10,11,12,13,14,13]
2660 ; AVX512-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,2,2,3]
2661 ; AVX512-NEXT: vinserti64x4 $1, %ymm12, %zmm11, %zmm13
2662 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm14 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
2663 ; AVX512-NEXT: vpternlogd {{.*#+}} zmm13 = zmm6 ^ (zmm14 & (zmm13 ^ zmm6))
2664 ; AVX512-NEXT: vmovdqa (%r9), %xmm11
2665 ; AVX512-NEXT: vmovdqa (%r8), %xmm12
2666 ; AVX512-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm12[8],xmm11[8],xmm12[9],xmm11[9],xmm12[10],xmm11[10],xmm12[11],xmm11[11],xmm12[12],xmm11[12],xmm12[13],xmm11[13],xmm12[14],xmm11[14],xmm12[15],xmm11[15]
2667 ; AVX512-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[4,5,10,11,8,9,6,7,12,13,10,11,12,13,14,15]
2668 ; AVX512-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,0,1]
2669 ; AVX512-NEXT: vpunpcklbw {{.*#+}} ymm15 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[4],ymm1[4],ymm0[5],ymm1[5],ymm0[6],ymm1[6],ymm0[7],ymm1[7],ymm0[16],ymm1[16],ymm0[17],ymm1[17],ymm0[18],ymm1[18],ymm0[19],ymm1[19],ymm0[20],ymm1[20],ymm0[21],ymm1[21],ymm0[22],ymm1[22],ymm0[23],ymm1[23]
2670 ; AVX512-NEXT: vpshuflw {{.*#+}} ymm15 = ymm15[2,1,0,3,4,5,6,7,10,9,8,11,12,13,14,15]
2671 ; AVX512-NEXT: vpshufhw {{.*#+}} ymm15 = ymm15[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
2672 ; AVX512-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
2673 ; AVX512-NEXT: vinserti64x4 $1, %ymm15, %zmm6, %zmm6
2674 ; AVX512-NEXT: vpternlogd {{.*#+}} zmm6 = zmm6 ^ (mem & (zmm6 ^ zmm13))
2675 ; AVX512-NEXT: vpbroadcastq {{.*#+}} xmm13 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
2676 ; AVX512-NEXT: vpshufb %xmm13, %xmm9, %xmm15
2677 ; AVX512-NEXT: vpshufb %xmm13, %xmm10, %xmm13
2678 ; AVX512-NEXT: vpunpckhbw {{.*#+}} xmm13 = xmm13[8],xmm15[8],xmm13[9],xmm15[9],xmm13[10],xmm15[10],xmm13[11],xmm15[11],xmm13[12],xmm15[12],xmm13[13],xmm15[13],xmm13[14],xmm15[14],xmm13[15],xmm15[15]
2679 ; AVX512-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3],xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
2680 ; AVX512-NEXT: vprold $16, %xmm15, %xmm15
2681 ; AVX512-NEXT: vinserti64x4 $1, %ymm13, %zmm15, %zmm13
2682 ; AVX512-NEXT: vpermq {{.*#+}} zmm13 = zmm13[0,0,0,1,4,4,4,5]
2683 ; AVX512-NEXT: vpbroadcastq {{.*#+}} ymm15 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
2684 ; AVX512-NEXT: vpshufb %xmm15, %xmm7, %xmm7
2685 ; AVX512-NEXT: vpshufb %xmm15, %xmm8, %xmm8
2686 ; AVX512-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3],xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
2687 ; AVX512-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3],xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
2688 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm8 = xmm8[0,3,2,1,4,5,6,7]
2689 ; AVX512-NEXT: vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,6,5]
2690 ; AVX512-NEXT: vinserti64x4 $1, %ymm7, %zmm8, %zmm7
2691 ; AVX512-NEXT: vpermq {{.*#+}} zmm8 = zmm7[0,0,0,1,4,4,4,5]
2692 ; AVX512-NEXT: vpternlogd {{.*#+}} zmm8 = zmm13 ^ (mem & (zmm8 ^ zmm13))
2693 ; AVX512-NEXT: vpbroadcastq {{.*#+}} ymm7 = [6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0]
2694 ; AVX512-NEXT: vpshufb %xmm7, %xmm11, %xmm9
2695 ; AVX512-NEXT: vpshufb %xmm7, %xmm12, %xmm10
2696 ; AVX512-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3],xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
2697 ; AVX512-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3],xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
2698 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm10 = xmm10[2,1,0,3,4,5,6,7]
2699 ; AVX512-NEXT: vpshufhw {{.*#+}} xmm10 = xmm10[0,1,2,3,4,4,4,4]
2700 ; AVX512-NEXT: vinserti64x4 $1, %ymm9, %zmm10, %zmm9
2701 ; AVX512-NEXT: vpermq {{.*#+}} zmm9 = zmm9[0,0,0,1,4,4,4,5]
2702 ; AVX512-NEXT: vpternlogd {{.*#+}} zmm9 = zmm9 ^ (zmm14 & (zmm9 ^ zmm8))
2703 ; AVX512-NEXT: vpshufb %ymm15, %ymm4, %ymm8
2704 ; AVX512-NEXT: vpshufb %ymm15, %ymm2, %ymm10
2705 ; AVX512-NEXT: vpunpcklbw {{.*#+}} ymm8 = ymm10[0],ymm8[0],ymm10[1],ymm8[1],ymm10[2],ymm8[2],ymm10[3],ymm8[3],ymm10[4],ymm8[4],ymm10[5],ymm8[5],ymm10[6],ymm8[6],ymm10[7],ymm8[7],ymm10[16],ymm8[16],ymm10[17],ymm8[17],ymm10[18],ymm8[18],ymm10[19],ymm8[19],ymm10[20],ymm8[20],ymm10[21],ymm8[21],ymm10[22],ymm8[22],ymm10[23],ymm8[23]
2706 ; AVX512-NEXT: vpunpckhbw {{.*#+}} ymm10 = ymm3[8],ymm5[8],ymm3[9],ymm5[9],ymm3[10],ymm5[10],ymm3[11],ymm5[11],ymm3[12],ymm5[12],ymm3[13],ymm5[13],ymm3[14],ymm5[14],ymm3[15],ymm5[15],ymm3[24],ymm5[24],ymm3[25],ymm5[25],ymm3[26],ymm5[26],ymm3[27],ymm5[27],ymm3[28],ymm5[28],ymm3[29],ymm5[29],ymm3[30],ymm5[30],ymm3[31],ymm5[31]
2707 ; AVX512-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
2708 ; AVX512-NEXT: vinserti64x4 $1, %ymm10, %zmm8, %zmm8
2709 ; AVX512-NEXT: vpermq {{.*#+}} zmm8 = zmm8[2,2,2,3,6,6,6,7]
2710 ; AVX512-NEXT: vpbroadcastq {{.*#+}} ymm10 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
2711 ; AVX512-NEXT: vpshufb %ymm10, %ymm5, %ymm5
2712 ; AVX512-NEXT: vpshufb %ymm10, %ymm3, %ymm3
2713 ; AVX512-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm3[0],ymm5[0],ymm3[1],ymm5[1],ymm3[2],ymm5[2],ymm3[3],ymm5[3],ymm3[4],ymm5[4],ymm3[5],ymm5[5],ymm3[6],ymm5[6],ymm3[7],ymm5[7],ymm3[16],ymm5[16],ymm3[17],ymm5[17],ymm3[18],ymm5[18],ymm3[19],ymm5[19],ymm3[20],ymm5[20],ymm3[21],ymm5[21],ymm3[22],ymm5[22],ymm3[23],ymm5[23]
2714 ; AVX512-NEXT: vpunpckhbw {{.*#+}} ymm2 = ymm2[8],ymm4[8],ymm2[9],ymm4[9],ymm2[10],ymm4[10],ymm2[11],ymm4[11],ymm2[12],ymm4[12],ymm2[13],ymm4[13],ymm2[14],ymm4[14],ymm2[15],ymm4[15],ymm2[24],ymm4[24],ymm2[25],ymm4[25],ymm2[26],ymm4[26],ymm2[27],ymm4[27],ymm2[28],ymm4[28],ymm2[29],ymm4[29],ymm2[30],ymm4[30],ymm2[31],ymm4[31]
2715 ; AVX512-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
2716 ; AVX512-NEXT: vinserti64x4 $1, %ymm2, %zmm3, %zmm2
2717 ; AVX512-NEXT: vpermq {{.*#+}} zmm2 = zmm2[2,2,2,3,6,6,6,7]
2718 ; AVX512-NEXT: vpternlogd {{.*#+}} zmm2 = zmm2 ^ (mem & (zmm2 ^ zmm8))
2719 ; AVX512-NEXT: vpshufb %ymm7, %ymm1, %ymm3
2720 ; AVX512-NEXT: vpshufb %ymm7, %ymm0, %ymm4
2721 ; AVX512-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[1],ymm3[1],ymm4[2],ymm3[2],ymm4[3],ymm3[3],ymm4[4],ymm3[4],ymm4[5],ymm3[5],ymm4[6],ymm3[6],ymm4[7],ymm3[7],ymm4[16],ymm3[16],ymm4[17],ymm3[17],ymm4[18],ymm3[18],ymm4[19],ymm3[19],ymm4[20],ymm3[20],ymm4[21],ymm3[21],ymm4[22],ymm3[22],ymm4[23],ymm3[23]
2722 ; AVX512-NEXT: vpunpckhbw {{.*#+}} ymm0 = ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11],ymm0[12],ymm1[12],ymm0[13],ymm1[13],ymm0[14],ymm1[14],ymm0[15],ymm1[15],ymm0[24],ymm1[24],ymm0[25],ymm1[25],ymm0[26],ymm1[26],ymm0[27],ymm1[27],ymm0[28],ymm1[28],ymm0[29],ymm1[29],ymm0[30],ymm1[30],ymm0[31],ymm1[31]
2723 ; AVX512-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20,21,26,27,24,25,22,23,28,29,26,27,28,29,30,31]
2724 ; AVX512-NEXT: vinserti64x4 $1, %ymm0, %zmm3, %zmm0
2725 ; AVX512-NEXT: vpermq {{.*#+}} zmm0 = zmm0[2,2,2,3,6,6,6,7]
2726 ; AVX512-NEXT: vpternlogd {{.*#+}} zmm0 = zmm0 ^ (mem & (zmm0 ^ zmm2))
2727 ; AVX512-NEXT: vmovdqa64 %zmm0, 128(%rax)
2728 ; AVX512-NEXT: vmovdqa64 %zmm9, (%rax)
2729 ; AVX512-NEXT: vmovdqa64 %zmm6, 64(%rax)
2730 ; AVX512-NEXT: vzeroupper
2733 ; AVX512-FCP-LABEL: store_i8_stride6_vf32:
2734 ; AVX512-FCP: # %bb.0:
2735 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
2736 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %ymm2
2737 ; AVX512-FCP-NEXT: vmovdqa (%rsi), %ymm3
2738 ; AVX512-FCP-NEXT: vmovdqa (%rdx), %ymm4
2739 ; AVX512-FCP-NEXT: vmovdqa (%rcx), %ymm5
2740 ; AVX512-FCP-NEXT: vmovdqa (%r8), %ymm0
2741 ; AVX512-FCP-NEXT: vmovdqa (%r9), %ymm1
2742 ; AVX512-FCP-NEXT: vpbroadcastq {{.*#+}} ymm7 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
2743 ; AVX512-FCP-NEXT: vpshufb %ymm7, %ymm5, %ymm6
2744 ; AVX512-FCP-NEXT: vpshufb %ymm7, %ymm4, %ymm8
2745 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm8[0],ymm6[0],ymm8[1],ymm6[1],ymm8[2],ymm6[2],ymm8[3],ymm6[3],ymm8[4],ymm6[4],ymm8[5],ymm6[5],ymm8[6],ymm6[6],ymm8[7],ymm6[7],ymm8[16],ymm6[16],ymm8[17],ymm6[17],ymm8[18],ymm6[18],ymm8[19],ymm6[19],ymm8[20],ymm6[20],ymm8[21],ymm6[21],ymm8[22],ymm6[22],ymm8[23],ymm6[23]
2746 ; AVX512-FCP-NEXT: vpunpckhbw {{.*#+}} ymm8 = ymm2[8],ymm3[8],ymm2[9],ymm3[9],ymm2[10],ymm3[10],ymm2[11],ymm3[11],ymm2[12],ymm3[12],ymm2[13],ymm3[13],ymm2[14],ymm3[14],ymm2[15],ymm3[15],ymm2[24],ymm3[24],ymm2[25],ymm3[25],ymm2[26],ymm3[26],ymm2[27],ymm3[27],ymm2[28],ymm3[28],ymm2[29],ymm3[29],ymm2[30],ymm3[30],ymm2[31],ymm3[31]
2747 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm8 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
2748 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm8, %zmm6, %zmm6
2749 ; AVX512-FCP-NEXT: vpermq {{.*#+}} zmm6 = zmm6[2,2,2,3,6,6,6,7]
2750 ; AVX512-FCP-NEXT: vpbroadcastq {{.*#+}} ymm8 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
2751 ; AVX512-FCP-NEXT: vpshufb %ymm8, %ymm3, %ymm9
2752 ; AVX512-FCP-NEXT: vpshufb %ymm8, %ymm2, %ymm8
2753 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} ymm8 = ymm8[0],ymm9[0],ymm8[1],ymm9[1],ymm8[2],ymm9[2],ymm8[3],ymm9[3],ymm8[4],ymm9[4],ymm8[5],ymm9[5],ymm8[6],ymm9[6],ymm8[7],ymm9[7],ymm8[16],ymm9[16],ymm8[17],ymm9[17],ymm8[18],ymm9[18],ymm8[19],ymm9[19],ymm8[20],ymm9[20],ymm8[21],ymm9[21],ymm8[22],ymm9[22],ymm8[23],ymm9[23]
2754 ; AVX512-FCP-NEXT: vpunpckhbw {{.*#+}} ymm9 = ymm4[8],ymm5[8],ymm4[9],ymm5[9],ymm4[10],ymm5[10],ymm4[11],ymm5[11],ymm4[12],ymm5[12],ymm4[13],ymm5[13],ymm4[14],ymm5[14],ymm4[15],ymm5[15],ymm4[24],ymm5[24],ymm4[25],ymm5[25],ymm4[26],ymm5[26],ymm4[27],ymm5[27],ymm4[28],ymm5[28],ymm4[29],ymm5[29],ymm4[30],ymm5[30],ymm4[31],ymm5[31]
2755 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm9 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
2756 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm9, %zmm8, %zmm8
2757 ; AVX512-FCP-NEXT: vpermq {{.*#+}} zmm8 = zmm8[2,2,2,3,6,6,6,7]
2758 ; AVX512-FCP-NEXT: vpternlogd {{.*#+}} zmm8 = zmm8 ^ (mem & (zmm8 ^ zmm6))
2759 ; AVX512-FCP-NEXT: vpbroadcastq {{.*#+}} ymm10 = [6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0]
2760 ; AVX512-FCP-NEXT: vpshufb %ymm10, %ymm1, %ymm6
2761 ; AVX512-FCP-NEXT: vpshufb %ymm10, %ymm0, %ymm9
2762 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm9[0],ymm6[0],ymm9[1],ymm6[1],ymm9[2],ymm6[2],ymm9[3],ymm6[3],ymm9[4],ymm6[4],ymm9[5],ymm6[5],ymm9[6],ymm6[6],ymm9[7],ymm6[7],ymm9[16],ymm6[16],ymm9[17],ymm6[17],ymm9[18],ymm6[18],ymm9[19],ymm6[19],ymm9[20],ymm6[20],ymm9[21],ymm6[21],ymm9[22],ymm6[22],ymm9[23],ymm6[23]
2763 ; AVX512-FCP-NEXT: vpunpckhbw {{.*#+}} ymm9 = ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11],ymm0[12],ymm1[12],ymm0[13],ymm1[13],ymm0[14],ymm1[14],ymm0[15],ymm1[15],ymm0[24],ymm1[24],ymm0[25],ymm1[25],ymm0[26],ymm1[26],ymm0[27],ymm1[27],ymm0[28],ymm1[28],ymm0[29],ymm1[29],ymm0[30],ymm1[30],ymm0[31],ymm1[31]
2764 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm9 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20,21,26,27,24,25,22,23,28,29,26,27,28,29,30,31]
2765 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm9, %zmm6, %zmm6
2766 ; AVX512-FCP-NEXT: vpermq {{.*#+}} zmm6 = zmm6[2,2,2,3,6,6,6,7]
2767 ; AVX512-FCP-NEXT: vpternlogd {{.*#+}} zmm6 = zmm6 ^ (mem & (zmm6 ^ zmm8))
2768 ; AVX512-FCP-NEXT: vmovdqa (%rcx), %xmm9
2769 ; AVX512-FCP-NEXT: vpshufb %xmm7, %xmm9, %xmm8
2770 ; AVX512-FCP-NEXT: vmovdqa (%rdx), %xmm11
2771 ; AVX512-FCP-NEXT: vpshufb %xmm7, %xmm11, %xmm7
2772 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3],xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
2773 ; AVX512-FCP-NEXT: vmovdqa (%rsi), %xmm7
2774 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %xmm8
2775 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} xmm13 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3],xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
2776 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm13 = xmm13[0,1,6,7,4,5,2,3,8,9,10,11,12,13,10,11]
2777 ; AVX512-FCP-NEXT: vinserti32x4 $2, %xmm12, %zmm13, %zmm12
2778 ; AVX512-FCP-NEXT: vpermq {{.*#+}} zmm12 = zmm12[0,0,0,1,4,4,4,5]
2779 ; AVX512-FCP-NEXT: vpbroadcastq {{.*#+}} xmm13 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
2780 ; AVX512-FCP-NEXT: vpshufb %xmm13, %xmm7, %xmm14
2781 ; AVX512-FCP-NEXT: vpshufb %xmm13, %xmm8, %xmm13
2782 ; AVX512-FCP-NEXT: vpunpckhbw {{.*#+}} xmm13 = xmm13[8],xmm14[8],xmm13[9],xmm14[9],xmm13[10],xmm14[10],xmm13[11],xmm14[11],xmm13[12],xmm14[12],xmm13[13],xmm14[13],xmm13[14],xmm14[14],xmm13[15],xmm14[15]
2783 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3],xmm11[4],xmm9[4],xmm11[5],xmm9[5],xmm11[6],xmm9[6],xmm11[7],xmm9[7]
2784 ; AVX512-FCP-NEXT: vprold $16, %xmm14, %xmm14
2785 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm13, %zmm14, %zmm13
2786 ; AVX512-FCP-NEXT: vpermq {{.*#+}} zmm14 = zmm13[0,0,0,1,4,4,4,5]
2787 ; AVX512-FCP-NEXT: vpternlogd {{.*#+}} zmm14 = zmm14 ^ (mem & (zmm14 ^ zmm12))
2788 ; AVX512-FCP-NEXT: vmovdqa (%r9), %xmm12
2789 ; AVX512-FCP-NEXT: vpshufb %xmm10, %xmm12, %xmm15
2790 ; AVX512-FCP-NEXT: vmovdqa (%r8), %xmm13
2791 ; AVX512-FCP-NEXT: vpshufb %xmm10, %xmm13, %xmm10
2792 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm10[0],xmm15[0],xmm10[1],xmm15[1],xmm10[2],xmm15[2],xmm10[3],xmm15[3],xmm10[4],xmm15[4],xmm10[5],xmm15[5],xmm10[6],xmm15[6],xmm10[7],xmm15[7]
2793 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3],xmm13[4],xmm12[4],xmm13[5],xmm12[5],xmm13[6],xmm12[6],xmm13[7],xmm12[7]
2794 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm15 = xmm15[4,5,2,3,0,1,6,7,8,9,8,9,8,9,8,9]
2795 ; AVX512-FCP-NEXT: vinserti32x4 $2, %xmm10, %zmm15, %zmm10
2796 ; AVX512-FCP-NEXT: vpermq {{.*#+}} zmm10 = zmm10[0,0,0,1,4,4,4,5]
2797 ; AVX512-FCP-NEXT: vmovdqa64 {{.*#+}} zmm15 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
2798 ; AVX512-FCP-NEXT: vpternlogd {{.*#+}} zmm10 = zmm10 ^ (zmm15 & (zmm10 ^ zmm14))
2799 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[4],ymm5[4],ymm4[5],ymm5[5],ymm4[6],ymm5[6],ymm4[7],ymm5[7],ymm4[16],ymm5[16],ymm4[17],ymm5[17],ymm4[18],ymm5[18],ymm4[19],ymm5[19],ymm4[20],ymm5[20],ymm4[21],ymm5[21],ymm4[22],ymm5[22],ymm4[23],ymm5[23]
2800 ; AVX512-FCP-NEXT: vprold $16, %ymm4, %ymm4
2801 ; AVX512-FCP-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm11[8],xmm9[8],xmm11[9],xmm9[9],xmm11[10],xmm9[10],xmm11[11],xmm9[11],xmm11[12],xmm9[12],xmm11[13],xmm9[13],xmm11[14],xmm9[14],xmm11[15],xmm9[15]
2802 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
2803 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm9 = [0,0,0,1,10,10,10,11]
2804 ; AVX512-FCP-NEXT: vpermt2q %zmm4, %zmm9, %zmm5
2805 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} ymm2 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[4],ymm3[4],ymm2[5],ymm3[5],ymm2[6],ymm3[6],ymm2[7],ymm3[7],ymm2[16],ymm3[16],ymm2[17],ymm3[17],ymm2[18],ymm3[18],ymm2[19],ymm3[19],ymm2[20],ymm3[20],ymm2[21],ymm3[21],ymm2[22],ymm3[22],ymm2[23],ymm3[23]
2806 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27]
2807 ; AVX512-FCP-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm8[8],xmm7[8],xmm8[9],xmm7[9],xmm8[10],xmm7[10],xmm8[11],xmm7[11],xmm8[12],xmm7[12],xmm8[13],xmm7[13],xmm8[14],xmm7[14],xmm8[15],xmm7[15]
2808 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
2809 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm9, %zmm3
2810 ; AVX512-FCP-NEXT: vpternlogd {{.*#+}} zmm3 = zmm5 ^ (zmm15 & (zmm3 ^ zmm5))
2811 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[4],ymm1[4],ymm0[5],ymm1[5],ymm0[6],ymm1[6],ymm0[7],ymm1[7],ymm0[16],ymm1[16],ymm0[17],ymm1[17],ymm0[18],ymm1[18],ymm0[19],ymm1[19],ymm0[20],ymm1[20],ymm0[21],ymm1[21],ymm0[22],ymm1[22],ymm0[23],ymm1[23]
2812 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20,21,18,19,16,17,22,23,24,25,24,25,24,25,24,25]
2813 ; AVX512-FCP-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm13[8],xmm12[8],xmm13[9],xmm12[9],xmm13[10],xmm12[10],xmm13[11],xmm12[11],xmm13[12],xmm12[12],xmm13[13],xmm12[13],xmm13[14],xmm12[14],xmm13[15],xmm12[15]
2814 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[4,5,10,11,8,9,6,7,12,13,10,11,12,13,14,15]
2815 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
2816 ; AVX512-FCP-NEXT: vpternlogd {{.*#+}} zmm1 = zmm1 ^ (mem & (zmm1 ^ zmm3))
2817 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, 64(%rax)
2818 ; AVX512-FCP-NEXT: vmovdqa64 %zmm10, (%rax)
2819 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, 128(%rax)
2820 ; AVX512-FCP-NEXT: vzeroupper
2821 ; AVX512-FCP-NEXT: retq
2823 ; AVX512DQ-LABEL: store_i8_stride6_vf32:
2824 ; AVX512DQ: # %bb.0:
2825 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
2826 ; AVX512DQ-NEXT: vmovdqa (%rdi), %ymm3
2827 ; AVX512DQ-NEXT: vmovdqa (%rsi), %ymm5
2828 ; AVX512DQ-NEXT: vmovdqa (%rdx), %ymm2
2829 ; AVX512DQ-NEXT: vmovdqa (%rcx), %ymm4
2830 ; AVX512DQ-NEXT: vmovdqa (%r8), %ymm0
2831 ; AVX512DQ-NEXT: vmovdqa (%r9), %ymm1
2832 ; AVX512DQ-NEXT: vmovdqa (%rcx), %xmm7
2833 ; AVX512DQ-NEXT: vmovdqa (%rdx), %xmm8
2834 ; AVX512DQ-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm8[8],xmm7[8],xmm8[9],xmm7[9],xmm8[10],xmm7[10],xmm8[11],xmm7[11],xmm8[12],xmm7[12],xmm8[13],xmm7[13],xmm8[14],xmm7[14],xmm8[15],xmm7[15]
2835 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
2836 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,0,1]
2837 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} ymm9 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[4],ymm4[4],ymm2[5],ymm4[5],ymm2[6],ymm4[6],ymm2[7],ymm4[7],ymm2[16],ymm4[16],ymm2[17],ymm4[17],ymm2[18],ymm4[18],ymm2[19],ymm4[19],ymm2[20],ymm4[20],ymm2[21],ymm4[21],ymm2[22],ymm4[22],ymm2[23],ymm4[23]
2838 ; AVX512DQ-NEXT: vprold $16, %ymm9, %ymm9
2839 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
2840 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm9, %zmm6, %zmm6
2841 ; AVX512DQ-NEXT: vmovdqa (%rsi), %xmm9
2842 ; AVX512DQ-NEXT: vmovdqa (%rdi), %xmm10
2843 ; AVX512DQ-NEXT: vpunpckhbw {{.*#+}} xmm11 = xmm10[8],xmm9[8],xmm10[9],xmm9[9],xmm10[10],xmm9[10],xmm10[11],xmm9[11],xmm10[12],xmm9[12],xmm10[13],xmm9[13],xmm10[14],xmm9[14],xmm10[15],xmm9[15]
2844 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm11 = xmm11[8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
2845 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,0,1]
2846 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} ymm12 = ymm3[0],ymm5[0],ymm3[1],ymm5[1],ymm3[2],ymm5[2],ymm3[3],ymm5[3],ymm3[4],ymm5[4],ymm3[5],ymm5[5],ymm3[6],ymm5[6],ymm3[7],ymm5[7],ymm3[16],ymm5[16],ymm3[17],ymm5[17],ymm3[18],ymm5[18],ymm3[19],ymm5[19],ymm3[20],ymm5[20],ymm3[21],ymm5[21],ymm3[22],ymm5[22],ymm3[23],ymm5[23]
2847 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} ymm12 = ymm12[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
2848 ; AVX512DQ-NEXT: vpshufhw {{.*#+}} ymm12 = ymm12[0,1,2,3,4,5,6,5,8,9,10,11,12,13,14,13]
2849 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm12 = ymm12[2,2,2,3]
2850 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm12, %zmm11, %zmm13
2851 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm14 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
2852 ; AVX512DQ-NEXT: vpternlogd {{.*#+}} zmm13 = zmm6 ^ (zmm14 & (zmm13 ^ zmm6))
2853 ; AVX512DQ-NEXT: vmovdqa (%r9), %xmm11
2854 ; AVX512DQ-NEXT: vmovdqa (%r8), %xmm12
2855 ; AVX512DQ-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm12[8],xmm11[8],xmm12[9],xmm11[9],xmm12[10],xmm11[10],xmm12[11],xmm11[11],xmm12[12],xmm11[12],xmm12[13],xmm11[13],xmm12[14],xmm11[14],xmm12[15],xmm11[15]
2856 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[4,5,10,11,8,9,6,7,12,13,10,11,12,13,14,15]
2857 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,0,1]
2858 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} ymm15 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[4],ymm1[4],ymm0[5],ymm1[5],ymm0[6],ymm1[6],ymm0[7],ymm1[7],ymm0[16],ymm1[16],ymm0[17],ymm1[17],ymm0[18],ymm1[18],ymm0[19],ymm1[19],ymm0[20],ymm1[20],ymm0[21],ymm1[21],ymm0[22],ymm1[22],ymm0[23],ymm1[23]
2859 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} ymm15 = ymm15[2,1,0,3,4,5,6,7,10,9,8,11,12,13,14,15]
2860 ; AVX512DQ-NEXT: vpshufhw {{.*#+}} ymm15 = ymm15[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
2861 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
2862 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm15, %zmm6, %zmm6
2863 ; AVX512DQ-NEXT: vpternlogd {{.*#+}} zmm6 = zmm6 ^ (mem & (zmm6 ^ zmm13))
2864 ; AVX512DQ-NEXT: vpbroadcastq {{.*#+}} xmm13 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
2865 ; AVX512DQ-NEXT: vpshufb %xmm13, %xmm9, %xmm15
2866 ; AVX512DQ-NEXT: vpshufb %xmm13, %xmm10, %xmm13
2867 ; AVX512DQ-NEXT: vpunpckhbw {{.*#+}} xmm13 = xmm13[8],xmm15[8],xmm13[9],xmm15[9],xmm13[10],xmm15[10],xmm13[11],xmm15[11],xmm13[12],xmm15[12],xmm13[13],xmm15[13],xmm13[14],xmm15[14],xmm13[15],xmm15[15]
2868 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3],xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
2869 ; AVX512DQ-NEXT: vprold $16, %xmm15, %xmm15
2870 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm13, %zmm15, %zmm13
2871 ; AVX512DQ-NEXT: vpermq {{.*#+}} zmm13 = zmm13[0,0,0,1,4,4,4,5]
2872 ; AVX512DQ-NEXT: vpbroadcastq {{.*#+}} ymm15 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
2873 ; AVX512DQ-NEXT: vpshufb %xmm15, %xmm7, %xmm7
2874 ; AVX512DQ-NEXT: vpshufb %xmm15, %xmm8, %xmm8
2875 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3],xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
2876 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3],xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
2877 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm8 = xmm8[0,3,2,1,4,5,6,7]
2878 ; AVX512DQ-NEXT: vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,6,5]
2879 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm7, %zmm8, %zmm7
2880 ; AVX512DQ-NEXT: vpermq {{.*#+}} zmm8 = zmm7[0,0,0,1,4,4,4,5]
2881 ; AVX512DQ-NEXT: vpternlogd {{.*#+}} zmm8 = zmm13 ^ (mem & (zmm8 ^ zmm13))
2882 ; AVX512DQ-NEXT: vpbroadcastq {{.*#+}} ymm7 = [6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0]
2883 ; AVX512DQ-NEXT: vpshufb %xmm7, %xmm11, %xmm9
2884 ; AVX512DQ-NEXT: vpshufb %xmm7, %xmm12, %xmm10
2885 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3],xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
2886 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3],xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
2887 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm10 = xmm10[2,1,0,3,4,5,6,7]
2888 ; AVX512DQ-NEXT: vpshufhw {{.*#+}} xmm10 = xmm10[0,1,2,3,4,4,4,4]
2889 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm9, %zmm10, %zmm9
2890 ; AVX512DQ-NEXT: vpermq {{.*#+}} zmm9 = zmm9[0,0,0,1,4,4,4,5]
2891 ; AVX512DQ-NEXT: vpternlogd {{.*#+}} zmm9 = zmm9 ^ (zmm14 & (zmm9 ^ zmm8))
2892 ; AVX512DQ-NEXT: vpshufb %ymm15, %ymm4, %ymm8
2893 ; AVX512DQ-NEXT: vpshufb %ymm15, %ymm2, %ymm10
2894 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} ymm8 = ymm10[0],ymm8[0],ymm10[1],ymm8[1],ymm10[2],ymm8[2],ymm10[3],ymm8[3],ymm10[4],ymm8[4],ymm10[5],ymm8[5],ymm10[6],ymm8[6],ymm10[7],ymm8[7],ymm10[16],ymm8[16],ymm10[17],ymm8[17],ymm10[18],ymm8[18],ymm10[19],ymm8[19],ymm10[20],ymm8[20],ymm10[21],ymm8[21],ymm10[22],ymm8[22],ymm10[23],ymm8[23]
2895 ; AVX512DQ-NEXT: vpunpckhbw {{.*#+}} ymm10 = ymm3[8],ymm5[8],ymm3[9],ymm5[9],ymm3[10],ymm5[10],ymm3[11],ymm5[11],ymm3[12],ymm5[12],ymm3[13],ymm5[13],ymm3[14],ymm5[14],ymm3[15],ymm5[15],ymm3[24],ymm5[24],ymm3[25],ymm5[25],ymm3[26],ymm5[26],ymm3[27],ymm5[27],ymm3[28],ymm5[28],ymm3[29],ymm5[29],ymm3[30],ymm5[30],ymm3[31],ymm5[31]
2896 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
2897 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm10, %zmm8, %zmm8
2898 ; AVX512DQ-NEXT: vpermq {{.*#+}} zmm8 = zmm8[2,2,2,3,6,6,6,7]
2899 ; AVX512DQ-NEXT: vpbroadcastq {{.*#+}} ymm10 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
2900 ; AVX512DQ-NEXT: vpshufb %ymm10, %ymm5, %ymm5
2901 ; AVX512DQ-NEXT: vpshufb %ymm10, %ymm3, %ymm3
2902 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm3[0],ymm5[0],ymm3[1],ymm5[1],ymm3[2],ymm5[2],ymm3[3],ymm5[3],ymm3[4],ymm5[4],ymm3[5],ymm5[5],ymm3[6],ymm5[6],ymm3[7],ymm5[7],ymm3[16],ymm5[16],ymm3[17],ymm5[17],ymm3[18],ymm5[18],ymm3[19],ymm5[19],ymm3[20],ymm5[20],ymm3[21],ymm5[21],ymm3[22],ymm5[22],ymm3[23],ymm5[23]
2903 ; AVX512DQ-NEXT: vpunpckhbw {{.*#+}} ymm2 = ymm2[8],ymm4[8],ymm2[9],ymm4[9],ymm2[10],ymm4[10],ymm2[11],ymm4[11],ymm2[12],ymm4[12],ymm2[13],ymm4[13],ymm2[14],ymm4[14],ymm2[15],ymm4[15],ymm2[24],ymm4[24],ymm2[25],ymm4[25],ymm2[26],ymm4[26],ymm2[27],ymm4[27],ymm2[28],ymm4[28],ymm2[29],ymm4[29],ymm2[30],ymm4[30],ymm2[31],ymm4[31]
2904 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
2905 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm2, %zmm3, %zmm2
2906 ; AVX512DQ-NEXT: vpermq {{.*#+}} zmm2 = zmm2[2,2,2,3,6,6,6,7]
2907 ; AVX512DQ-NEXT: vpternlogd {{.*#+}} zmm2 = zmm2 ^ (mem & (zmm2 ^ zmm8))
2908 ; AVX512DQ-NEXT: vpshufb %ymm7, %ymm1, %ymm3
2909 ; AVX512DQ-NEXT: vpshufb %ymm7, %ymm0, %ymm4
2910 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[1],ymm3[1],ymm4[2],ymm3[2],ymm4[3],ymm3[3],ymm4[4],ymm3[4],ymm4[5],ymm3[5],ymm4[6],ymm3[6],ymm4[7],ymm3[7],ymm4[16],ymm3[16],ymm4[17],ymm3[17],ymm4[18],ymm3[18],ymm4[19],ymm3[19],ymm4[20],ymm3[20],ymm4[21],ymm3[21],ymm4[22],ymm3[22],ymm4[23],ymm3[23]
2911 ; AVX512DQ-NEXT: vpunpckhbw {{.*#+}} ymm0 = ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11],ymm0[12],ymm1[12],ymm0[13],ymm1[13],ymm0[14],ymm1[14],ymm0[15],ymm1[15],ymm0[24],ymm1[24],ymm0[25],ymm1[25],ymm0[26],ymm1[26],ymm0[27],ymm1[27],ymm0[28],ymm1[28],ymm0[29],ymm1[29],ymm0[30],ymm1[30],ymm0[31],ymm1[31]
2912 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20,21,26,27,24,25,22,23,28,29,26,27,28,29,30,31]
2913 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm0, %zmm3, %zmm0
2914 ; AVX512DQ-NEXT: vpermq {{.*#+}} zmm0 = zmm0[2,2,2,3,6,6,6,7]
2915 ; AVX512DQ-NEXT: vpternlogd {{.*#+}} zmm0 = zmm0 ^ (mem & (zmm0 ^ zmm2))
2916 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 128(%rax)
2917 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, (%rax)
2918 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 64(%rax)
2919 ; AVX512DQ-NEXT: vzeroupper
2920 ; AVX512DQ-NEXT: retq
2922 ; AVX512DQ-FCP-LABEL: store_i8_stride6_vf32:
2923 ; AVX512DQ-FCP: # %bb.0:
2924 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
2925 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %ymm2
2926 ; AVX512DQ-FCP-NEXT: vmovdqa (%rsi), %ymm3
2927 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdx), %ymm4
2928 ; AVX512DQ-FCP-NEXT: vmovdqa (%rcx), %ymm5
2929 ; AVX512DQ-FCP-NEXT: vmovdqa (%r8), %ymm0
2930 ; AVX512DQ-FCP-NEXT: vmovdqa (%r9), %ymm1
2931 ; AVX512DQ-FCP-NEXT: vpbroadcastq {{.*#+}} ymm7 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
2932 ; AVX512DQ-FCP-NEXT: vpshufb %ymm7, %ymm5, %ymm6
2933 ; AVX512DQ-FCP-NEXT: vpshufb %ymm7, %ymm4, %ymm8
2934 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm8[0],ymm6[0],ymm8[1],ymm6[1],ymm8[2],ymm6[2],ymm8[3],ymm6[3],ymm8[4],ymm6[4],ymm8[5],ymm6[5],ymm8[6],ymm6[6],ymm8[7],ymm6[7],ymm8[16],ymm6[16],ymm8[17],ymm6[17],ymm8[18],ymm6[18],ymm8[19],ymm6[19],ymm8[20],ymm6[20],ymm8[21],ymm6[21],ymm8[22],ymm6[22],ymm8[23],ymm6[23]
2935 ; AVX512DQ-FCP-NEXT: vpunpckhbw {{.*#+}} ymm8 = ymm2[8],ymm3[8],ymm2[9],ymm3[9],ymm2[10],ymm3[10],ymm2[11],ymm3[11],ymm2[12],ymm3[12],ymm2[13],ymm3[13],ymm2[14],ymm3[14],ymm2[15],ymm3[15],ymm2[24],ymm3[24],ymm2[25],ymm3[25],ymm2[26],ymm3[26],ymm2[27],ymm3[27],ymm2[28],ymm3[28],ymm2[29],ymm3[29],ymm2[30],ymm3[30],ymm2[31],ymm3[31]
2936 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm8 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
2937 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm8, %zmm6, %zmm6
2938 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} zmm6 = zmm6[2,2,2,3,6,6,6,7]
2939 ; AVX512DQ-FCP-NEXT: vpbroadcastq {{.*#+}} ymm8 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
2940 ; AVX512DQ-FCP-NEXT: vpshufb %ymm8, %ymm3, %ymm9
2941 ; AVX512DQ-FCP-NEXT: vpshufb %ymm8, %ymm2, %ymm8
2942 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} ymm8 = ymm8[0],ymm9[0],ymm8[1],ymm9[1],ymm8[2],ymm9[2],ymm8[3],ymm9[3],ymm8[4],ymm9[4],ymm8[5],ymm9[5],ymm8[6],ymm9[6],ymm8[7],ymm9[7],ymm8[16],ymm9[16],ymm8[17],ymm9[17],ymm8[18],ymm9[18],ymm8[19],ymm9[19],ymm8[20],ymm9[20],ymm8[21],ymm9[21],ymm8[22],ymm9[22],ymm8[23],ymm9[23]
2943 ; AVX512DQ-FCP-NEXT: vpunpckhbw {{.*#+}} ymm9 = ymm4[8],ymm5[8],ymm4[9],ymm5[9],ymm4[10],ymm5[10],ymm4[11],ymm5[11],ymm4[12],ymm5[12],ymm4[13],ymm5[13],ymm4[14],ymm5[14],ymm4[15],ymm5[15],ymm4[24],ymm5[24],ymm4[25],ymm5[25],ymm4[26],ymm5[26],ymm4[27],ymm5[27],ymm4[28],ymm5[28],ymm4[29],ymm5[29],ymm4[30],ymm5[30],ymm4[31],ymm5[31]
2944 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm9 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
2945 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm9, %zmm8, %zmm8
2946 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} zmm8 = zmm8[2,2,2,3,6,6,6,7]
2947 ; AVX512DQ-FCP-NEXT: vpternlogd {{.*#+}} zmm8 = zmm8 ^ (mem & (zmm8 ^ zmm6))
2948 ; AVX512DQ-FCP-NEXT: vpbroadcastq {{.*#+}} ymm10 = [6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0]
2949 ; AVX512DQ-FCP-NEXT: vpshufb %ymm10, %ymm1, %ymm6
2950 ; AVX512DQ-FCP-NEXT: vpshufb %ymm10, %ymm0, %ymm9
2951 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm9[0],ymm6[0],ymm9[1],ymm6[1],ymm9[2],ymm6[2],ymm9[3],ymm6[3],ymm9[4],ymm6[4],ymm9[5],ymm6[5],ymm9[6],ymm6[6],ymm9[7],ymm6[7],ymm9[16],ymm6[16],ymm9[17],ymm6[17],ymm9[18],ymm6[18],ymm9[19],ymm6[19],ymm9[20],ymm6[20],ymm9[21],ymm6[21],ymm9[22],ymm6[22],ymm9[23],ymm6[23]
2952 ; AVX512DQ-FCP-NEXT: vpunpckhbw {{.*#+}} ymm9 = ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11],ymm0[12],ymm1[12],ymm0[13],ymm1[13],ymm0[14],ymm1[14],ymm0[15],ymm1[15],ymm0[24],ymm1[24],ymm0[25],ymm1[25],ymm0[26],ymm1[26],ymm0[27],ymm1[27],ymm0[28],ymm1[28],ymm0[29],ymm1[29],ymm0[30],ymm1[30],ymm0[31],ymm1[31]
2953 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm9 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20,21,26,27,24,25,22,23,28,29,26,27,28,29,30,31]
2954 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm9, %zmm6, %zmm6
2955 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} zmm6 = zmm6[2,2,2,3,6,6,6,7]
2956 ; AVX512DQ-FCP-NEXT: vpternlogd {{.*#+}} zmm6 = zmm6 ^ (mem & (zmm6 ^ zmm8))
2957 ; AVX512DQ-FCP-NEXT: vmovdqa (%rcx), %xmm9
2958 ; AVX512DQ-FCP-NEXT: vpshufb %xmm7, %xmm9, %xmm8
2959 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdx), %xmm11
2960 ; AVX512DQ-FCP-NEXT: vpshufb %xmm7, %xmm11, %xmm7
2961 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3],xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
2962 ; AVX512DQ-FCP-NEXT: vmovdqa (%rsi), %xmm7
2963 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %xmm8
2964 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} xmm13 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3],xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
2965 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm13 = xmm13[0,1,6,7,4,5,2,3,8,9,10,11,12,13,10,11]
2966 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, %xmm12, %zmm13, %zmm12
2967 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} zmm12 = zmm12[0,0,0,1,4,4,4,5]
2968 ; AVX512DQ-FCP-NEXT: vpbroadcastq {{.*#+}} xmm13 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
2969 ; AVX512DQ-FCP-NEXT: vpshufb %xmm13, %xmm7, %xmm14
2970 ; AVX512DQ-FCP-NEXT: vpshufb %xmm13, %xmm8, %xmm13
2971 ; AVX512DQ-FCP-NEXT: vpunpckhbw {{.*#+}} xmm13 = xmm13[8],xmm14[8],xmm13[9],xmm14[9],xmm13[10],xmm14[10],xmm13[11],xmm14[11],xmm13[12],xmm14[12],xmm13[13],xmm14[13],xmm13[14],xmm14[14],xmm13[15],xmm14[15]
2972 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3],xmm11[4],xmm9[4],xmm11[5],xmm9[5],xmm11[6],xmm9[6],xmm11[7],xmm9[7]
2973 ; AVX512DQ-FCP-NEXT: vprold $16, %xmm14, %xmm14
2974 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm13, %zmm14, %zmm13
2975 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} zmm14 = zmm13[0,0,0,1,4,4,4,5]
2976 ; AVX512DQ-FCP-NEXT: vpternlogd {{.*#+}} zmm14 = zmm14 ^ (mem & (zmm14 ^ zmm12))
2977 ; AVX512DQ-FCP-NEXT: vmovdqa (%r9), %xmm12
2978 ; AVX512DQ-FCP-NEXT: vpshufb %xmm10, %xmm12, %xmm15
2979 ; AVX512DQ-FCP-NEXT: vmovdqa (%r8), %xmm13
2980 ; AVX512DQ-FCP-NEXT: vpshufb %xmm10, %xmm13, %xmm10
2981 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm10[0],xmm15[0],xmm10[1],xmm15[1],xmm10[2],xmm15[2],xmm10[3],xmm15[3],xmm10[4],xmm15[4],xmm10[5],xmm15[5],xmm10[6],xmm15[6],xmm10[7],xmm15[7]
2982 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3],xmm13[4],xmm12[4],xmm13[5],xmm12[5],xmm13[6],xmm12[6],xmm13[7],xmm12[7]
2983 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm15 = xmm15[4,5,2,3,0,1,6,7,8,9,8,9,8,9,8,9]
2984 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, %xmm10, %zmm15, %zmm10
2985 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} zmm10 = zmm10[0,0,0,1,4,4,4,5]
2986 ; AVX512DQ-FCP-NEXT: vmovdqa64 {{.*#+}} zmm15 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
2987 ; AVX512DQ-FCP-NEXT: vpternlogd {{.*#+}} zmm10 = zmm10 ^ (zmm15 & (zmm10 ^ zmm14))
2988 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm4[0],ymm5[0],ymm4[1],ymm5[1],ymm4[2],ymm5[2],ymm4[3],ymm5[3],ymm4[4],ymm5[4],ymm4[5],ymm5[5],ymm4[6],ymm5[6],ymm4[7],ymm5[7],ymm4[16],ymm5[16],ymm4[17],ymm5[17],ymm4[18],ymm5[18],ymm4[19],ymm5[19],ymm4[20],ymm5[20],ymm4[21],ymm5[21],ymm4[22],ymm5[22],ymm4[23],ymm5[23]
2989 ; AVX512DQ-FCP-NEXT: vprold $16, %ymm4, %ymm4
2990 ; AVX512DQ-FCP-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm11[8],xmm9[8],xmm11[9],xmm9[9],xmm11[10],xmm9[10],xmm11[11],xmm9[11],xmm11[12],xmm9[12],xmm11[13],xmm9[13],xmm11[14],xmm9[14],xmm11[15],xmm9[15]
2991 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
2992 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm9 = [0,0,0,1,10,10,10,11]
2993 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm4, %zmm9, %zmm5
2994 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} ymm2 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[4],ymm3[4],ymm2[5],ymm3[5],ymm2[6],ymm3[6],ymm2[7],ymm3[7],ymm2[16],ymm3[16],ymm2[17],ymm3[17],ymm2[18],ymm3[18],ymm2[19],ymm3[19],ymm2[20],ymm3[20],ymm2[21],ymm3[21],ymm2[22],ymm3[22],ymm2[23],ymm3[23]
2995 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27]
2996 ; AVX512DQ-FCP-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm8[8],xmm7[8],xmm8[9],xmm7[9],xmm8[10],xmm7[10],xmm8[11],xmm7[11],xmm8[12],xmm7[12],xmm8[13],xmm7[13],xmm8[14],xmm7[14],xmm8[15],xmm7[15]
2997 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
2998 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm9, %zmm3
2999 ; AVX512DQ-FCP-NEXT: vpternlogd {{.*#+}} zmm3 = zmm5 ^ (zmm15 & (zmm3 ^ zmm5))
3000 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[4],ymm1[4],ymm0[5],ymm1[5],ymm0[6],ymm1[6],ymm0[7],ymm1[7],ymm0[16],ymm1[16],ymm0[17],ymm1[17],ymm0[18],ymm1[18],ymm0[19],ymm1[19],ymm0[20],ymm1[20],ymm0[21],ymm1[21],ymm0[22],ymm1[22],ymm0[23],ymm1[23]
3001 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20,21,18,19,16,17,22,23,24,25,24,25,24,25,24,25]
3002 ; AVX512DQ-FCP-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm13[8],xmm12[8],xmm13[9],xmm12[9],xmm13[10],xmm12[10],xmm13[11],xmm12[11],xmm13[12],xmm12[12],xmm13[13],xmm12[13],xmm13[14],xmm12[14],xmm13[15],xmm12[15]
3003 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[4,5,10,11,8,9,6,7,12,13,10,11,12,13,14,15]
3004 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm9, %zmm1
3005 ; AVX512DQ-FCP-NEXT: vpternlogd {{.*#+}} zmm1 = zmm1 ^ (mem & (zmm1 ^ zmm3))
3006 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, 64(%rax)
3007 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm10, (%rax)
3008 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, 128(%rax)
3009 ; AVX512DQ-FCP-NEXT: vzeroupper
3010 ; AVX512DQ-FCP-NEXT: retq
3012 ; AVX512BW-LABEL: store_i8_stride6_vf32:
3013 ; AVX512BW: # %bb.0:
3014 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3015 ; AVX512BW-NEXT: vmovdqa (%rdi), %ymm9
3016 ; AVX512BW-NEXT: vmovdqa (%rsi), %ymm10
3017 ; AVX512BW-NEXT: vmovdqa (%rdx), %ymm11
3018 ; AVX512BW-NEXT: vmovdqa (%rcx), %ymm12
3019 ; AVX512BW-NEXT: vmovdqa (%r8), %ymm7
3020 ; AVX512BW-NEXT: vmovdqa (%r9), %ymm8
3021 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm9[0],ymm10[0],ymm9[1],ymm10[1],ymm9[2],ymm10[2],ymm9[3],ymm10[3],ymm9[4],ymm10[4],ymm9[5],ymm10[5],ymm9[6],ymm10[6],ymm9[7],ymm10[7],ymm9[16],ymm10[16],ymm9[17],ymm10[17],ymm9[18],ymm10[18],ymm9[19],ymm10[19],ymm9[20],ymm10[20],ymm9[21],ymm10[21],ymm9[22],ymm10[22],ymm9[23],ymm10[23]
3022 ; AVX512BW-NEXT: vmovdqa (%rsi), %xmm1
3023 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm2
3024 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
3025 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm0, %zmm3, %zmm0
3026 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} zmm3 = [4,3,6,5,4,3,6,5,4,3,6,5,7,7,7,7,24,27,26,25,24,27,26,25,24,27,26,25,28,29,30,29]
3027 ; AVX512BW-NEXT: vpermw %zmm0, %zmm3, %zmm0
3028 ; AVX512BW-NEXT: vmovdqa (%rcx), %xmm3
3029 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm4
3030 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm4[8],xmm3[8],xmm4[9],xmm3[9],xmm4[10],xmm3[10],xmm4[11],xmm3[11],xmm4[12],xmm3[12],xmm4[13],xmm3[13],xmm4[14],xmm3[14],xmm4[15],xmm3[15]
3031 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm6 = [5,4,3,6,5,4,3,6,5,4,3,6,7,7,7,7]
3032 ; AVX512BW-NEXT: vpermw %ymm5, %ymm6, %ymm5
3033 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm11[0],ymm12[0],ymm11[1],ymm12[1],ymm11[2],ymm12[2],ymm11[3],ymm12[3],ymm11[4],ymm12[4],ymm11[5],ymm12[5],ymm11[6],ymm12[6],ymm11[7],ymm12[7],ymm11[16],ymm12[16],ymm11[17],ymm12[17],ymm11[18],ymm12[18],ymm11[19],ymm12[19],ymm11[20],ymm12[20],ymm11[21],ymm12[21],ymm11[22],ymm12[22],ymm11[23],ymm12[23]
3034 ; AVX512BW-NEXT: vprold $16, %ymm6, %ymm6
3035 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
3036 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm6, %zmm5, %zmm5
3037 ; AVX512BW-NEXT: movl $613566756, %ecx # imm = 0x24924924
3038 ; AVX512BW-NEXT: kmovd %ecx, %k1
3039 ; AVX512BW-NEXT: vmovdqu16 %zmm5, %zmm0 {%k1}
3040 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} ymm13 = ymm7[0],ymm8[0],ymm7[1],ymm8[1],ymm7[2],ymm8[2],ymm7[3],ymm8[3],ymm7[4],ymm8[4],ymm7[5],ymm8[5],ymm7[6],ymm8[6],ymm7[7],ymm8[7],ymm7[16],ymm8[16],ymm7[17],ymm8[17],ymm7[18],ymm8[18],ymm7[19],ymm8[19],ymm7[20],ymm8[20],ymm7[21],ymm8[21],ymm7[22],ymm8[22],ymm7[23],ymm8[23]
3041 ; AVX512BW-NEXT: vmovdqa (%r9), %xmm5
3042 ; AVX512BW-NEXT: vmovdqa (%r8), %xmm6
3043 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} xmm14 = xmm6[8],xmm5[8],xmm6[9],xmm5[9],xmm6[10],xmm5[10],xmm6[11],xmm5[11],xmm6[12],xmm5[12],xmm6[13],xmm5[13],xmm6[14],xmm5[14],xmm6[15],xmm5[15]
3044 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm13, %zmm14, %zmm13
3045 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} zmm14 = [2,5,4,3,2,5,4,3,2,5,4,3,6,5,6,7,26,25,24,27,26,25,24,27,26,25,24,27,28,28,28,28]
3046 ; AVX512BW-NEXT: movl $1227133513, %ecx # imm = 0x49249249
3047 ; AVX512BW-NEXT: kmovd %ecx, %k2
3048 ; AVX512BW-NEXT: vpermw %zmm13, %zmm14, %zmm0 {%k2}
3049 ; AVX512BW-NEXT: vpbroadcastq {{.*#+}} ymm13 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
3050 ; AVX512BW-NEXT: vpshufb %ymm13, %ymm10, %ymm14
3051 ; AVX512BW-NEXT: vpshufb %ymm13, %ymm9, %ymm13
3052 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} ymm13 = ymm13[0],ymm14[0],ymm13[1],ymm14[1],ymm13[2],ymm14[2],ymm13[3],ymm14[3],ymm13[4],ymm14[4],ymm13[5],ymm14[5],ymm13[6],ymm14[6],ymm13[7],ymm14[7],ymm13[16],ymm14[16],ymm13[17],ymm14[17],ymm13[18],ymm14[18],ymm13[19],ymm14[19],ymm13[20],ymm14[20],ymm13[21],ymm14[21],ymm13[22],ymm14[22],ymm13[23],ymm14[23]
3053 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,2,2,3]
3054 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} ymm14 = ymm11[8],ymm12[8],ymm11[9],ymm12[9],ymm11[10],ymm12[10],ymm11[11],ymm12[11],ymm11[12],ymm12[12],ymm11[13],ymm12[13],ymm11[14],ymm12[14],ymm11[15],ymm12[15],ymm11[24],ymm12[24],ymm11[25],ymm12[25],ymm11[26],ymm12[26],ymm11[27],ymm12[27],ymm11[28],ymm12[28],ymm11[29],ymm12[29],ymm11[30],ymm12[30],ymm11[31],ymm12[31]
3055 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm15 = [13,12,11,14,13,12,11,14,13,12,11,14,15,15,15,15]
3056 ; AVX512BW-NEXT: vpermw %ymm14, %ymm15, %ymm14
3057 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm14, %zmm13, %zmm13
3058 ; AVX512BW-NEXT: vpbroadcastq {{.*#+}} ymm14 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
3059 ; AVX512BW-NEXT: vpshufb %ymm14, %ymm12, %ymm12
3060 ; AVX512BW-NEXT: vpshufb %ymm14, %ymm11, %ymm11
3061 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} ymm11 = ymm11[0],ymm12[0],ymm11[1],ymm12[1],ymm11[2],ymm12[2],ymm11[3],ymm12[3],ymm11[4],ymm12[4],ymm11[5],ymm12[5],ymm11[6],ymm12[6],ymm11[7],ymm12[7],ymm11[16],ymm12[16],ymm11[17],ymm12[17],ymm11[18],ymm12[18],ymm11[19],ymm12[19],ymm11[20],ymm12[20],ymm11[21],ymm12[21],ymm11[22],ymm12[22],ymm11[23],ymm12[23]
3062 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
3063 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} ymm9 = ymm9[8],ymm10[8],ymm9[9],ymm10[9],ymm9[10],ymm10[10],ymm9[11],ymm10[11],ymm9[12],ymm10[12],ymm9[13],ymm10[13],ymm9[14],ymm10[14],ymm9[15],ymm10[15],ymm9[24],ymm10[24],ymm9[25],ymm10[25],ymm9[26],ymm10[26],ymm9[27],ymm10[27],ymm9[28],ymm10[28],ymm9[29],ymm10[29],ymm9[30],ymm10[30],ymm9[31],ymm10[31]
3064 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm10 = [12,11,14,13,12,11,14,13,12,11,14,13,15,15,15,15]
3065 ; AVX512BW-NEXT: vpermw %ymm9, %ymm10, %ymm9
3066 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm9, %zmm11, %zmm9
3067 ; AVX512BW-NEXT: movl $1227114788, %ecx # imm = 0x49244924
3068 ; AVX512BW-NEXT: kmovd %ecx, %k2
3069 ; AVX512BW-NEXT: vmovdqu16 %zmm13, %zmm9 {%k2}
3070 ; AVX512BW-NEXT: vpbroadcastq {{.*#+}} ymm10 = [6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0]
3071 ; AVX512BW-NEXT: vpshufb %ymm10, %ymm8, %ymm11
3072 ; AVX512BW-NEXT: vpshufb %ymm10, %ymm7, %ymm12
3073 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} ymm11 = ymm12[0],ymm11[0],ymm12[1],ymm11[1],ymm12[2],ymm11[2],ymm12[3],ymm11[3],ymm12[4],ymm11[4],ymm12[5],ymm11[5],ymm12[6],ymm11[6],ymm12[7],ymm11[7],ymm12[16],ymm11[16],ymm12[17],ymm11[17],ymm12[18],ymm11[18],ymm12[19],ymm11[19],ymm12[20],ymm11[20],ymm12[21],ymm11[21],ymm12[22],ymm11[22],ymm12[23],ymm11[23]
3074 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
3075 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} ymm7 = ymm7[8],ymm8[8],ymm7[9],ymm8[9],ymm7[10],ymm8[10],ymm7[11],ymm8[11],ymm7[12],ymm8[12],ymm7[13],ymm8[13],ymm7[14],ymm8[14],ymm7[15],ymm8[15],ymm7[24],ymm8[24],ymm7[25],ymm8[25],ymm7[26],ymm8[26],ymm7[27],ymm8[27],ymm7[28],ymm8[28],ymm7[29],ymm8[29],ymm7[30],ymm8[30],ymm7[31],ymm8[31]
3076 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm8 = [10,13,12,11,10,13,12,11,10,13,12,11,14,13,14,15]
3077 ; AVX512BW-NEXT: vpermw %ymm7, %ymm8, %ymm7
3078 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm7, %zmm11, %zmm7
3079 ; AVX512BW-NEXT: movl $-1840700270, %ecx # imm = 0x92492492
3080 ; AVX512BW-NEXT: kmovd %ecx, %k2
3081 ; AVX512BW-NEXT: vmovdqu16 %zmm7, %zmm9 {%k2}
3082 ; AVX512BW-NEXT: vpshufb %xmm14, %xmm3, %xmm7
3083 ; AVX512BW-NEXT: vpshufb %xmm14, %xmm4, %xmm8
3084 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3],xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
3085 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,0,1]
3086 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
3087 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm11 = [0,3,2,1,0,3,2,1,0,3,2,1,4,5,6,5]
3088 ; AVX512BW-NEXT: vpermw %ymm8, %ymm11, %ymm8
3089 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm7, %zmm8, %zmm7
3090 ; AVX512BW-NEXT: vpbroadcastq {{.*#+}} xmm8 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
3091 ; AVX512BW-NEXT: vpshufb %xmm8, %xmm1, %xmm1
3092 ; AVX512BW-NEXT: vpshufb %xmm8, %xmm2, %xmm2
3093 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
3094 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3],xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
3095 ; AVX512BW-NEXT: vprold $16, %xmm2, %xmm2
3096 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1
3097 ; AVX512BW-NEXT: vpermq {{.*#+}} zmm1 = zmm1[0,0,0,1,4,4,4,5]
3098 ; AVX512BW-NEXT: movl $1227105426, %ecx # imm = 0x49242492
3099 ; AVX512BW-NEXT: kmovd %ecx, %k2
3100 ; AVX512BW-NEXT: vmovdqu16 %zmm1, %zmm7 {%k2}
3101 ; AVX512BW-NEXT: vpshufb %xmm10, %xmm5, %xmm1
3102 ; AVX512BW-NEXT: vpshufb %xmm10, %xmm6, %xmm2
3103 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
3104 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
3105 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3],xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
3106 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm3 = [2,1,0,3,2,1,0,3,2,1,0,3,4,4,4,4]
3107 ; AVX512BW-NEXT: vpermw %ymm2, %ymm3, %ymm2
3108 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1
3109 ; AVX512BW-NEXT: vmovdqu16 %zmm1, %zmm7 {%k1}
3110 ; AVX512BW-NEXT: vmovdqa64 %zmm7, (%rax)
3111 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 128(%rax)
3112 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 64(%rax)
3113 ; AVX512BW-NEXT: vzeroupper
3114 ; AVX512BW-NEXT: retq
3116 ; AVX512BW-FCP-LABEL: store_i8_stride6_vf32:
3117 ; AVX512BW-FCP: # %bb.0:
3118 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
3119 ; AVX512BW-FCP-NEXT: vmovdqa (%rdi), %ymm2
3120 ; AVX512BW-FCP-NEXT: vmovdqa (%rsi), %ymm3
3121 ; AVX512BW-FCP-NEXT: vmovdqa (%rdx), %ymm5
3122 ; AVX512BW-FCP-NEXT: vmovdqa (%rcx), %ymm6
3123 ; AVX512BW-FCP-NEXT: vmovdqa (%r8), %ymm0
3124 ; AVX512BW-FCP-NEXT: vmovdqa (%r9), %ymm1
3125 ; AVX512BW-FCP-NEXT: vpbroadcastq {{.*#+}} ymm4 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
3126 ; AVX512BW-FCP-NEXT: vpshufb %ymm4, %ymm3, %ymm7
3127 ; AVX512BW-FCP-NEXT: vpshufb %ymm4, %ymm2, %ymm4
3128 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[1],ymm7[1],ymm4[2],ymm7[2],ymm4[3],ymm7[3],ymm4[4],ymm7[4],ymm4[5],ymm7[5],ymm4[6],ymm7[6],ymm4[7],ymm7[7],ymm4[16],ymm7[16],ymm4[17],ymm7[17],ymm4[18],ymm7[18],ymm4[19],ymm7[19],ymm4[20],ymm7[20],ymm4[21],ymm7[21],ymm4[22],ymm7[22],ymm4[23],ymm7[23]
3129 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
3130 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} ymm7 = ymm5[8],ymm6[8],ymm5[9],ymm6[9],ymm5[10],ymm6[10],ymm5[11],ymm6[11],ymm5[12],ymm6[12],ymm5[13],ymm6[13],ymm5[14],ymm6[14],ymm5[15],ymm6[15],ymm5[24],ymm6[24],ymm5[25],ymm6[25],ymm5[26],ymm6[26],ymm5[27],ymm6[27],ymm5[28],ymm6[28],ymm5[29],ymm6[29],ymm5[30],ymm6[30],ymm5[31],ymm6[31]
3131 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm8 = [13,12,11,14,13,12,11,14,13,12,11,14,15,15,15,15]
3132 ; AVX512BW-FCP-NEXT: vpermw %ymm7, %ymm8, %ymm7
3133 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm7, %zmm4, %zmm7
3134 ; AVX512BW-FCP-NEXT: vpbroadcastq {{.*#+}} ymm8 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
3135 ; AVX512BW-FCP-NEXT: vpshufb %ymm8, %ymm6, %ymm4
3136 ; AVX512BW-FCP-NEXT: vpshufb %ymm8, %ymm5, %ymm9
3137 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm9[0],ymm4[0],ymm9[1],ymm4[1],ymm9[2],ymm4[2],ymm9[3],ymm4[3],ymm9[4],ymm4[4],ymm9[5],ymm4[5],ymm9[6],ymm4[6],ymm9[7],ymm4[7],ymm9[16],ymm4[16],ymm9[17],ymm4[17],ymm9[18],ymm4[18],ymm9[19],ymm4[19],ymm9[20],ymm4[20],ymm9[21],ymm4[21],ymm9[22],ymm4[22],ymm9[23],ymm4[23]
3138 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
3139 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} ymm9 = ymm2[8],ymm3[8],ymm2[9],ymm3[9],ymm2[10],ymm3[10],ymm2[11],ymm3[11],ymm2[12],ymm3[12],ymm2[13],ymm3[13],ymm2[14],ymm3[14],ymm2[15],ymm3[15],ymm2[24],ymm3[24],ymm2[25],ymm3[25],ymm2[26],ymm3[26],ymm2[27],ymm3[27],ymm2[28],ymm3[28],ymm2[29],ymm3[29],ymm2[30],ymm3[30],ymm2[31],ymm3[31]
3140 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm10 = [12,11,14,13,12,11,14,13,12,11,14,13,15,15,15,15]
3141 ; AVX512BW-FCP-NEXT: vpermw %ymm9, %ymm10, %ymm9
3142 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm9, %zmm4, %zmm4
3143 ; AVX512BW-FCP-NEXT: movl $1227114788, %r10d # imm = 0x49244924
3144 ; AVX512BW-FCP-NEXT: kmovd %r10d, %k1
3145 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm7, %zmm4 {%k1}
3146 ; AVX512BW-FCP-NEXT: vpbroadcastq {{.*#+}} ymm7 = [6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0]
3147 ; AVX512BW-FCP-NEXT: vpshufb %ymm7, %ymm1, %ymm9
3148 ; AVX512BW-FCP-NEXT: vpshufb %ymm7, %ymm0, %ymm10
3149 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm9 = ymm10[0],ymm9[0],ymm10[1],ymm9[1],ymm10[2],ymm9[2],ymm10[3],ymm9[3],ymm10[4],ymm9[4],ymm10[5],ymm9[5],ymm10[6],ymm9[6],ymm10[7],ymm9[7],ymm10[16],ymm9[16],ymm10[17],ymm9[17],ymm10[18],ymm9[18],ymm10[19],ymm9[19],ymm10[20],ymm9[20],ymm10[21],ymm9[21],ymm10[22],ymm9[22],ymm10[23],ymm9[23]
3150 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
3151 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} ymm10 = ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11],ymm0[12],ymm1[12],ymm0[13],ymm1[13],ymm0[14],ymm1[14],ymm0[15],ymm1[15],ymm0[24],ymm1[24],ymm0[25],ymm1[25],ymm0[26],ymm1[26],ymm0[27],ymm1[27],ymm0[28],ymm1[28],ymm0[29],ymm1[29],ymm0[30],ymm1[30],ymm0[31],ymm1[31]
3152 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm11 = [10,13,12,11,10,13,12,11,10,13,12,11,14,13,14,15]
3153 ; AVX512BW-FCP-NEXT: vpermw %ymm10, %ymm11, %ymm10
3154 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm10, %zmm9, %zmm9
3155 ; AVX512BW-FCP-NEXT: movl $-1840700270, %r10d # imm = 0x92492492
3156 ; AVX512BW-FCP-NEXT: kmovd %r10d, %k1
3157 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm9, %zmm4 {%k1}
3158 ; AVX512BW-FCP-NEXT: vmovdqa (%rsi), %xmm9
3159 ; AVX512BW-FCP-NEXT: vpbroadcastq {{.*#+}} xmm10 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
3160 ; AVX512BW-FCP-NEXT: vpshufb %xmm10, %xmm9, %xmm11
3161 ; AVX512BW-FCP-NEXT: vmovdqa (%rdi), %xmm12
3162 ; AVX512BW-FCP-NEXT: vpshufb %xmm10, %xmm12, %xmm10
3163 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm10 = xmm10[8],xmm11[8],xmm10[9],xmm11[9],xmm10[10],xmm11[10],xmm10[11],xmm11[11],xmm10[12],xmm11[12],xmm10[13],xmm11[13],xmm10[14],xmm11[14],xmm10[15],xmm11[15]
3164 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,0,1]
3165 ; AVX512BW-FCP-NEXT: vmovdqa (%rcx), %xmm11
3166 ; AVX512BW-FCP-NEXT: vmovdqa (%rdx), %xmm13
3167 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm13[0],xmm11[0],xmm13[1],xmm11[1],xmm13[2],xmm11[2],xmm13[3],xmm11[3],xmm13[4],xmm11[4],xmm13[5],xmm11[5],xmm13[6],xmm11[6],xmm13[7],xmm11[7]
3168 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm15 = [1,0,3,2,1,0,3,2,1,0,3,2,5,4,7,6]
3169 ; AVX512BW-FCP-NEXT: vpermw %ymm14, %ymm15, %ymm14
3170 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm10, %zmm14, %zmm10
3171 ; AVX512BW-FCP-NEXT: vpshufb %xmm8, %xmm11, %xmm14
3172 ; AVX512BW-FCP-NEXT: vpshufb %xmm8, %xmm13, %xmm8
3173 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3],xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
3174 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,0,1]
3175 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm12[0],xmm9[0],xmm12[1],xmm9[1],xmm12[2],xmm9[2],xmm12[3],xmm9[3],xmm12[4],xmm9[4],xmm12[5],xmm9[5],xmm12[6],xmm9[6],xmm12[7],xmm9[7]
3176 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm15 = [0,3,2,1,0,3,2,1,0,3,2,1,4,5,6,5]
3177 ; AVX512BW-FCP-NEXT: vpermw %ymm14, %ymm15, %ymm14
3178 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm8, %zmm14, %zmm8
3179 ; AVX512BW-FCP-NEXT: movl $1227105426, %ecx # imm = 0x49242492
3180 ; AVX512BW-FCP-NEXT: kmovd %ecx, %k1
3181 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm10, %zmm8 {%k1}
3182 ; AVX512BW-FCP-NEXT: vmovdqa (%r9), %xmm10
3183 ; AVX512BW-FCP-NEXT: vpshufb %xmm7, %xmm10, %xmm14
3184 ; AVX512BW-FCP-NEXT: vmovdqa (%r8), %xmm15
3185 ; AVX512BW-FCP-NEXT: vpshufb %xmm7, %xmm15, %xmm7
3186 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm7[0],xmm14[0],xmm7[1],xmm14[1],xmm7[2],xmm14[2],xmm7[3],xmm14[3],xmm7[4],xmm14[4],xmm7[5],xmm14[5],xmm7[6],xmm14[6],xmm7[7],xmm14[7]
3187 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,0,1]
3188 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm15[0],xmm10[0],xmm15[1],xmm10[1],xmm15[2],xmm10[2],xmm15[3],xmm10[3],xmm15[4],xmm10[4],xmm15[5],xmm10[5],xmm15[6],xmm10[6],xmm15[7],xmm10[7]
3189 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm16 = [2,1,0,3,2,1,0,3,2,1,0,3,4,4,4,4]
3190 ; AVX512BW-FCP-NEXT: vpermw %ymm14, %ymm16, %ymm14
3191 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm7, %zmm14, %zmm7
3192 ; AVX512BW-FCP-NEXT: movl $613566756, %ecx # imm = 0x24924924
3193 ; AVX512BW-FCP-NEXT: kmovd %ecx, %k1
3194 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm7, %zmm8 {%k1}
3195 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm5 = ymm5[0],ymm6[0],ymm5[1],ymm6[1],ymm5[2],ymm6[2],ymm5[3],ymm6[3],ymm5[4],ymm6[4],ymm5[5],ymm6[5],ymm5[6],ymm6[6],ymm5[7],ymm6[7],ymm5[16],ymm6[16],ymm5[17],ymm6[17],ymm5[18],ymm6[18],ymm5[19],ymm6[19],ymm5[20],ymm6[20],ymm5[21],ymm6[21],ymm5[22],ymm6[22],ymm5[23],ymm6[23]
3196 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm13[8],xmm11[8],xmm13[9],xmm11[9],xmm13[10],xmm11[10],xmm13[11],xmm11[11],xmm13[12],xmm11[12],xmm13[13],xmm11[13],xmm13[14],xmm11[14],xmm13[15],xmm11[15]
3197 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm5, %zmm6, %zmm5
3198 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm2 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[4],ymm3[4],ymm2[5],ymm3[5],ymm2[6],ymm3[6],ymm2[7],ymm3[7],ymm2[16],ymm3[16],ymm2[17],ymm3[17],ymm2[18],ymm3[18],ymm2[19],ymm3[19],ymm2[20],ymm3[20],ymm2[21],ymm3[21],ymm2[22],ymm3[22],ymm2[23],ymm3[23]
3199 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm12[8],xmm9[8],xmm12[9],xmm9[9],xmm12[10],xmm9[10],xmm12[11],xmm9[11],xmm12[12],xmm9[12],xmm12[13],xmm9[13],xmm12[14],xmm9[14],xmm12[15],xmm9[15]
3200 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm2, %zmm3, %zmm2
3201 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm3 = [4,3,6,5,4,3,6,5,4,3,6,5,7,7,7,7,24,27,26,25,24,27,26,25,24,27,26,25,28,29,30,29]
3202 ; AVX512BW-FCP-NEXT: vpermw %zmm2, %zmm3, %zmm2
3203 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm3 = [5,4,3,6,5,4,3,6,5,4,3,6,7,7,7,7,25,24,27,26,25,24,27,26,25,24,27,26,29,28,31,30]
3204 ; AVX512BW-FCP-NEXT: vpermw %zmm5, %zmm3, %zmm2 {%k1}
3205 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[4],ymm1[4],ymm0[5],ymm1[5],ymm0[6],ymm1[6],ymm0[7],ymm1[7],ymm0[16],ymm1[16],ymm0[17],ymm1[17],ymm0[18],ymm1[18],ymm0[19],ymm1[19],ymm0[20],ymm1[20],ymm0[21],ymm1[21],ymm0[22],ymm1[22],ymm0[23],ymm1[23]
3206 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm15[8],xmm10[8],xmm15[9],xmm10[9],xmm15[10],xmm10[10],xmm15[11],xmm10[11],xmm15[12],xmm10[12],xmm15[13],xmm10[13],xmm15[14],xmm10[14],xmm15[15],xmm10[15]
3207 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
3208 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm1 = [2,5,4,3,2,5,4,3,2,5,4,3,6,5,6,7,26,25,24,27,26,25,24,27,26,25,24,27,28,28,28,28]
3209 ; AVX512BW-FCP-NEXT: movl $1227133513, %ecx # imm = 0x49249249
3210 ; AVX512BW-FCP-NEXT: kmovd %ecx, %k1
3211 ; AVX512BW-FCP-NEXT: vpermw %zmm0, %zmm1, %zmm2 {%k1}
3212 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, 64(%rax)
3213 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, (%rax)
3214 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm4, 128(%rax)
3215 ; AVX512BW-FCP-NEXT: vzeroupper
3216 ; AVX512BW-FCP-NEXT: retq
3218 ; AVX512DQ-BW-LABEL: store_i8_stride6_vf32:
3219 ; AVX512DQ-BW: # %bb.0:
3220 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3221 ; AVX512DQ-BW-NEXT: vmovdqa (%rdi), %ymm9
3222 ; AVX512DQ-BW-NEXT: vmovdqa (%rsi), %ymm10
3223 ; AVX512DQ-BW-NEXT: vmovdqa (%rdx), %ymm11
3224 ; AVX512DQ-BW-NEXT: vmovdqa (%rcx), %ymm12
3225 ; AVX512DQ-BW-NEXT: vmovdqa (%r8), %ymm7
3226 ; AVX512DQ-BW-NEXT: vmovdqa (%r9), %ymm8
3227 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm9[0],ymm10[0],ymm9[1],ymm10[1],ymm9[2],ymm10[2],ymm9[3],ymm10[3],ymm9[4],ymm10[4],ymm9[5],ymm10[5],ymm9[6],ymm10[6],ymm9[7],ymm10[7],ymm9[16],ymm10[16],ymm9[17],ymm10[17],ymm9[18],ymm10[18],ymm9[19],ymm10[19],ymm9[20],ymm10[20],ymm9[21],ymm10[21],ymm9[22],ymm10[22],ymm9[23],ymm10[23]
3228 ; AVX512DQ-BW-NEXT: vmovdqa (%rsi), %xmm1
3229 ; AVX512DQ-BW-NEXT: vmovdqa (%rdi), %xmm2
3230 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
3231 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm0, %zmm3, %zmm0
3232 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} zmm3 = [4,3,6,5,4,3,6,5,4,3,6,5,7,7,7,7,24,27,26,25,24,27,26,25,24,27,26,25,28,29,30,29]
3233 ; AVX512DQ-BW-NEXT: vpermw %zmm0, %zmm3, %zmm0
3234 ; AVX512DQ-BW-NEXT: vmovdqa (%rcx), %xmm3
3235 ; AVX512DQ-BW-NEXT: vmovdqa (%rdx), %xmm4
3236 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm4[8],xmm3[8],xmm4[9],xmm3[9],xmm4[10],xmm3[10],xmm4[11],xmm3[11],xmm4[12],xmm3[12],xmm4[13],xmm3[13],xmm4[14],xmm3[14],xmm4[15],xmm3[15]
3237 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm6 = [5,4,3,6,5,4,3,6,5,4,3,6,7,7,7,7]
3238 ; AVX512DQ-BW-NEXT: vpermw %ymm5, %ymm6, %ymm5
3239 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm11[0],ymm12[0],ymm11[1],ymm12[1],ymm11[2],ymm12[2],ymm11[3],ymm12[3],ymm11[4],ymm12[4],ymm11[5],ymm12[5],ymm11[6],ymm12[6],ymm11[7],ymm12[7],ymm11[16],ymm12[16],ymm11[17],ymm12[17],ymm11[18],ymm12[18],ymm11[19],ymm12[19],ymm11[20],ymm12[20],ymm11[21],ymm12[21],ymm11[22],ymm12[22],ymm11[23],ymm12[23]
3240 ; AVX512DQ-BW-NEXT: vprold $16, %ymm6, %ymm6
3241 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
3242 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm6, %zmm5, %zmm5
3243 ; AVX512DQ-BW-NEXT: movl $613566756, %ecx # imm = 0x24924924
3244 ; AVX512DQ-BW-NEXT: kmovd %ecx, %k1
3245 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm5, %zmm0 {%k1}
3246 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} ymm13 = ymm7[0],ymm8[0],ymm7[1],ymm8[1],ymm7[2],ymm8[2],ymm7[3],ymm8[3],ymm7[4],ymm8[4],ymm7[5],ymm8[5],ymm7[6],ymm8[6],ymm7[7],ymm8[7],ymm7[16],ymm8[16],ymm7[17],ymm8[17],ymm7[18],ymm8[18],ymm7[19],ymm8[19],ymm7[20],ymm8[20],ymm7[21],ymm8[21],ymm7[22],ymm8[22],ymm7[23],ymm8[23]
3247 ; AVX512DQ-BW-NEXT: vmovdqa (%r9), %xmm5
3248 ; AVX512DQ-BW-NEXT: vmovdqa (%r8), %xmm6
3249 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} xmm14 = xmm6[8],xmm5[8],xmm6[9],xmm5[9],xmm6[10],xmm5[10],xmm6[11],xmm5[11],xmm6[12],xmm5[12],xmm6[13],xmm5[13],xmm6[14],xmm5[14],xmm6[15],xmm5[15]
3250 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm13, %zmm14, %zmm13
3251 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} zmm14 = [2,5,4,3,2,5,4,3,2,5,4,3,6,5,6,7,26,25,24,27,26,25,24,27,26,25,24,27,28,28,28,28]
3252 ; AVX512DQ-BW-NEXT: movl $1227133513, %ecx # imm = 0x49249249
3253 ; AVX512DQ-BW-NEXT: kmovd %ecx, %k2
3254 ; AVX512DQ-BW-NEXT: vpermw %zmm13, %zmm14, %zmm0 {%k2}
3255 ; AVX512DQ-BW-NEXT: vpbroadcastq {{.*#+}} ymm13 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
3256 ; AVX512DQ-BW-NEXT: vpshufb %ymm13, %ymm10, %ymm14
3257 ; AVX512DQ-BW-NEXT: vpshufb %ymm13, %ymm9, %ymm13
3258 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} ymm13 = ymm13[0],ymm14[0],ymm13[1],ymm14[1],ymm13[2],ymm14[2],ymm13[3],ymm14[3],ymm13[4],ymm14[4],ymm13[5],ymm14[5],ymm13[6],ymm14[6],ymm13[7],ymm14[7],ymm13[16],ymm14[16],ymm13[17],ymm14[17],ymm13[18],ymm14[18],ymm13[19],ymm14[19],ymm13[20],ymm14[20],ymm13[21],ymm14[21],ymm13[22],ymm14[22],ymm13[23],ymm14[23]
3259 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm13 = ymm13[2,2,2,3]
3260 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} ymm14 = ymm11[8],ymm12[8],ymm11[9],ymm12[9],ymm11[10],ymm12[10],ymm11[11],ymm12[11],ymm11[12],ymm12[12],ymm11[13],ymm12[13],ymm11[14],ymm12[14],ymm11[15],ymm12[15],ymm11[24],ymm12[24],ymm11[25],ymm12[25],ymm11[26],ymm12[26],ymm11[27],ymm12[27],ymm11[28],ymm12[28],ymm11[29],ymm12[29],ymm11[30],ymm12[30],ymm11[31],ymm12[31]
3261 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm15 = [13,12,11,14,13,12,11,14,13,12,11,14,15,15,15,15]
3262 ; AVX512DQ-BW-NEXT: vpermw %ymm14, %ymm15, %ymm14
3263 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm14, %zmm13, %zmm13
3264 ; AVX512DQ-BW-NEXT: vpbroadcastq {{.*#+}} ymm14 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
3265 ; AVX512DQ-BW-NEXT: vpshufb %ymm14, %ymm12, %ymm12
3266 ; AVX512DQ-BW-NEXT: vpshufb %ymm14, %ymm11, %ymm11
3267 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} ymm11 = ymm11[0],ymm12[0],ymm11[1],ymm12[1],ymm11[2],ymm12[2],ymm11[3],ymm12[3],ymm11[4],ymm12[4],ymm11[5],ymm12[5],ymm11[6],ymm12[6],ymm11[7],ymm12[7],ymm11[16],ymm12[16],ymm11[17],ymm12[17],ymm11[18],ymm12[18],ymm11[19],ymm12[19],ymm11[20],ymm12[20],ymm11[21],ymm12[21],ymm11[22],ymm12[22],ymm11[23],ymm12[23]
3268 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
3269 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} ymm9 = ymm9[8],ymm10[8],ymm9[9],ymm10[9],ymm9[10],ymm10[10],ymm9[11],ymm10[11],ymm9[12],ymm10[12],ymm9[13],ymm10[13],ymm9[14],ymm10[14],ymm9[15],ymm10[15],ymm9[24],ymm10[24],ymm9[25],ymm10[25],ymm9[26],ymm10[26],ymm9[27],ymm10[27],ymm9[28],ymm10[28],ymm9[29],ymm10[29],ymm9[30],ymm10[30],ymm9[31],ymm10[31]
3270 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm10 = [12,11,14,13,12,11,14,13,12,11,14,13,15,15,15,15]
3271 ; AVX512DQ-BW-NEXT: vpermw %ymm9, %ymm10, %ymm9
3272 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm9, %zmm11, %zmm9
3273 ; AVX512DQ-BW-NEXT: movl $1227114788, %ecx # imm = 0x49244924
3274 ; AVX512DQ-BW-NEXT: kmovd %ecx, %k2
3275 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm13, %zmm9 {%k2}
3276 ; AVX512DQ-BW-NEXT: vpbroadcastq {{.*#+}} ymm10 = [6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0]
3277 ; AVX512DQ-BW-NEXT: vpshufb %ymm10, %ymm8, %ymm11
3278 ; AVX512DQ-BW-NEXT: vpshufb %ymm10, %ymm7, %ymm12
3279 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} ymm11 = ymm12[0],ymm11[0],ymm12[1],ymm11[1],ymm12[2],ymm11[2],ymm12[3],ymm11[3],ymm12[4],ymm11[4],ymm12[5],ymm11[5],ymm12[6],ymm11[6],ymm12[7],ymm11[7],ymm12[16],ymm11[16],ymm12[17],ymm11[17],ymm12[18],ymm11[18],ymm12[19],ymm11[19],ymm12[20],ymm11[20],ymm12[21],ymm11[21],ymm12[22],ymm11[22],ymm12[23],ymm11[23]
3280 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
3281 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} ymm7 = ymm7[8],ymm8[8],ymm7[9],ymm8[9],ymm7[10],ymm8[10],ymm7[11],ymm8[11],ymm7[12],ymm8[12],ymm7[13],ymm8[13],ymm7[14],ymm8[14],ymm7[15],ymm8[15],ymm7[24],ymm8[24],ymm7[25],ymm8[25],ymm7[26],ymm8[26],ymm7[27],ymm8[27],ymm7[28],ymm8[28],ymm7[29],ymm8[29],ymm7[30],ymm8[30],ymm7[31],ymm8[31]
3282 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm8 = [10,13,12,11,10,13,12,11,10,13,12,11,14,13,14,15]
3283 ; AVX512DQ-BW-NEXT: vpermw %ymm7, %ymm8, %ymm7
3284 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm7, %zmm11, %zmm7
3285 ; AVX512DQ-BW-NEXT: movl $-1840700270, %ecx # imm = 0x92492492
3286 ; AVX512DQ-BW-NEXT: kmovd %ecx, %k2
3287 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm7, %zmm9 {%k2}
3288 ; AVX512DQ-BW-NEXT: vpshufb %xmm14, %xmm3, %xmm7
3289 ; AVX512DQ-BW-NEXT: vpshufb %xmm14, %xmm4, %xmm8
3290 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3],xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
3291 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,0,1]
3292 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
3293 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm11 = [0,3,2,1,0,3,2,1,0,3,2,1,4,5,6,5]
3294 ; AVX512DQ-BW-NEXT: vpermw %ymm8, %ymm11, %ymm8
3295 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm7, %zmm8, %zmm7
3296 ; AVX512DQ-BW-NEXT: vpbroadcastq {{.*#+}} xmm8 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
3297 ; AVX512DQ-BW-NEXT: vpshufb %xmm8, %xmm1, %xmm1
3298 ; AVX512DQ-BW-NEXT: vpshufb %xmm8, %xmm2, %xmm2
3299 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
3300 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3],xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
3301 ; AVX512DQ-BW-NEXT: vprold $16, %xmm2, %xmm2
3302 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1
3303 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} zmm1 = zmm1[0,0,0,1,4,4,4,5]
3304 ; AVX512DQ-BW-NEXT: movl $1227105426, %ecx # imm = 0x49242492
3305 ; AVX512DQ-BW-NEXT: kmovd %ecx, %k2
3306 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm1, %zmm7 {%k2}
3307 ; AVX512DQ-BW-NEXT: vpshufb %xmm10, %xmm5, %xmm1
3308 ; AVX512DQ-BW-NEXT: vpshufb %xmm10, %xmm6, %xmm2
3309 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
3310 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
3311 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3],xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
3312 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm3 = [2,1,0,3,2,1,0,3,2,1,0,3,4,4,4,4]
3313 ; AVX512DQ-BW-NEXT: vpermw %ymm2, %ymm3, %ymm2
3314 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1
3315 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm1, %zmm7 {%k1}
3316 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, (%rax)
3317 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, 128(%rax)
3318 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, 64(%rax)
3319 ; AVX512DQ-BW-NEXT: vzeroupper
3320 ; AVX512DQ-BW-NEXT: retq
3322 ; AVX512DQ-BW-FCP-LABEL: store_i8_stride6_vf32:
3323 ; AVX512DQ-BW-FCP: # %bb.0:
3324 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
3325 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdi), %ymm2
3326 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rsi), %ymm3
3327 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdx), %ymm5
3328 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rcx), %ymm6
3329 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%r8), %ymm0
3330 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%r9), %ymm1
3331 ; AVX512DQ-BW-FCP-NEXT: vpbroadcastq {{.*#+}} ymm4 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
3332 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm4, %ymm3, %ymm7
3333 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm4, %ymm2, %ymm4
3334 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm4[0],ymm7[0],ymm4[1],ymm7[1],ymm4[2],ymm7[2],ymm4[3],ymm7[3],ymm4[4],ymm7[4],ymm4[5],ymm7[5],ymm4[6],ymm7[6],ymm4[7],ymm7[7],ymm4[16],ymm7[16],ymm4[17],ymm7[17],ymm4[18],ymm7[18],ymm4[19],ymm7[19],ymm4[20],ymm7[20],ymm4[21],ymm7[21],ymm4[22],ymm7[22],ymm4[23],ymm7[23]
3335 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
3336 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} ymm7 = ymm5[8],ymm6[8],ymm5[9],ymm6[9],ymm5[10],ymm6[10],ymm5[11],ymm6[11],ymm5[12],ymm6[12],ymm5[13],ymm6[13],ymm5[14],ymm6[14],ymm5[15],ymm6[15],ymm5[24],ymm6[24],ymm5[25],ymm6[25],ymm5[26],ymm6[26],ymm5[27],ymm6[27],ymm5[28],ymm6[28],ymm5[29],ymm6[29],ymm5[30],ymm6[30],ymm5[31],ymm6[31]
3337 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm8 = [13,12,11,14,13,12,11,14,13,12,11,14,15,15,15,15]
3338 ; AVX512DQ-BW-FCP-NEXT: vpermw %ymm7, %ymm8, %ymm7
3339 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm7, %zmm4, %zmm7
3340 ; AVX512DQ-BW-FCP-NEXT: vpbroadcastq {{.*#+}} ymm8 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
3341 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm8, %ymm6, %ymm4
3342 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm8, %ymm5, %ymm9
3343 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm9[0],ymm4[0],ymm9[1],ymm4[1],ymm9[2],ymm4[2],ymm9[3],ymm4[3],ymm9[4],ymm4[4],ymm9[5],ymm4[5],ymm9[6],ymm4[6],ymm9[7],ymm4[7],ymm9[16],ymm4[16],ymm9[17],ymm4[17],ymm9[18],ymm4[18],ymm9[19],ymm4[19],ymm9[20],ymm4[20],ymm9[21],ymm4[21],ymm9[22],ymm4[22],ymm9[23],ymm4[23]
3344 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
3345 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} ymm9 = ymm2[8],ymm3[8],ymm2[9],ymm3[9],ymm2[10],ymm3[10],ymm2[11],ymm3[11],ymm2[12],ymm3[12],ymm2[13],ymm3[13],ymm2[14],ymm3[14],ymm2[15],ymm3[15],ymm2[24],ymm3[24],ymm2[25],ymm3[25],ymm2[26],ymm3[26],ymm2[27],ymm3[27],ymm2[28],ymm3[28],ymm2[29],ymm3[29],ymm2[30],ymm3[30],ymm2[31],ymm3[31]
3346 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm10 = [12,11,14,13,12,11,14,13,12,11,14,13,15,15,15,15]
3347 ; AVX512DQ-BW-FCP-NEXT: vpermw %ymm9, %ymm10, %ymm9
3348 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm9, %zmm4, %zmm4
3349 ; AVX512DQ-BW-FCP-NEXT: movl $1227114788, %r10d # imm = 0x49244924
3350 ; AVX512DQ-BW-FCP-NEXT: kmovd %r10d, %k1
3351 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm7, %zmm4 {%k1}
3352 ; AVX512DQ-BW-FCP-NEXT: vpbroadcastq {{.*#+}} ymm7 = [6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0,6,5,8,7,0,9,0,0]
3353 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm7, %ymm1, %ymm9
3354 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm7, %ymm0, %ymm10
3355 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm9 = ymm10[0],ymm9[0],ymm10[1],ymm9[1],ymm10[2],ymm9[2],ymm10[3],ymm9[3],ymm10[4],ymm9[4],ymm10[5],ymm9[5],ymm10[6],ymm9[6],ymm10[7],ymm9[7],ymm10[16],ymm9[16],ymm10[17],ymm9[17],ymm10[18],ymm9[18],ymm10[19],ymm9[19],ymm10[20],ymm9[20],ymm10[21],ymm9[21],ymm10[22],ymm9[22],ymm10[23],ymm9[23]
3356 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
3357 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} ymm10 = ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11],ymm0[12],ymm1[12],ymm0[13],ymm1[13],ymm0[14],ymm1[14],ymm0[15],ymm1[15],ymm0[24],ymm1[24],ymm0[25],ymm1[25],ymm0[26],ymm1[26],ymm0[27],ymm1[27],ymm0[28],ymm1[28],ymm0[29],ymm1[29],ymm0[30],ymm1[30],ymm0[31],ymm1[31]
3358 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm11 = [10,13,12,11,10,13,12,11,10,13,12,11,14,13,14,15]
3359 ; AVX512DQ-BW-FCP-NEXT: vpermw %ymm10, %ymm11, %ymm10
3360 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm10, %zmm9, %zmm9
3361 ; AVX512DQ-BW-FCP-NEXT: movl $-1840700270, %r10d # imm = 0x92492492
3362 ; AVX512DQ-BW-FCP-NEXT: kmovd %r10d, %k1
3363 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm9, %zmm4 {%k1}
3364 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rsi), %xmm9
3365 ; AVX512DQ-BW-FCP-NEXT: vpbroadcastq {{.*#+}} xmm10 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
3366 ; AVX512DQ-BW-FCP-NEXT: vpshufb %xmm10, %xmm9, %xmm11
3367 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdi), %xmm12
3368 ; AVX512DQ-BW-FCP-NEXT: vpshufb %xmm10, %xmm12, %xmm10
3369 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm10 = xmm10[8],xmm11[8],xmm10[9],xmm11[9],xmm10[10],xmm11[10],xmm10[11],xmm11[11],xmm10[12],xmm11[12],xmm10[13],xmm11[13],xmm10[14],xmm11[14],xmm10[15],xmm11[15]
3370 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,0,1]
3371 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rcx), %xmm11
3372 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdx), %xmm13
3373 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm13[0],xmm11[0],xmm13[1],xmm11[1],xmm13[2],xmm11[2],xmm13[3],xmm11[3],xmm13[4],xmm11[4],xmm13[5],xmm11[5],xmm13[6],xmm11[6],xmm13[7],xmm11[7]
3374 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm15 = [1,0,3,2,1,0,3,2,1,0,3,2,5,4,7,6]
3375 ; AVX512DQ-BW-FCP-NEXT: vpermw %ymm14, %ymm15, %ymm14
3376 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm10, %zmm14, %zmm10
3377 ; AVX512DQ-BW-FCP-NEXT: vpshufb %xmm8, %xmm11, %xmm14
3378 ; AVX512DQ-BW-FCP-NEXT: vpshufb %xmm8, %xmm13, %xmm8
3379 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3],xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
3380 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,0,1]
3381 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm12[0],xmm9[0],xmm12[1],xmm9[1],xmm12[2],xmm9[2],xmm12[3],xmm9[3],xmm12[4],xmm9[4],xmm12[5],xmm9[5],xmm12[6],xmm9[6],xmm12[7],xmm9[7]
3382 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm15 = [0,3,2,1,0,3,2,1,0,3,2,1,4,5,6,5]
3383 ; AVX512DQ-BW-FCP-NEXT: vpermw %ymm14, %ymm15, %ymm14
3384 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm8, %zmm14, %zmm8
3385 ; AVX512DQ-BW-FCP-NEXT: movl $1227105426, %ecx # imm = 0x49242492
3386 ; AVX512DQ-BW-FCP-NEXT: kmovd %ecx, %k1
3387 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm10, %zmm8 {%k1}
3388 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%r9), %xmm10
3389 ; AVX512DQ-BW-FCP-NEXT: vpshufb %xmm7, %xmm10, %xmm14
3390 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%r8), %xmm15
3391 ; AVX512DQ-BW-FCP-NEXT: vpshufb %xmm7, %xmm15, %xmm7
3392 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm7[0],xmm14[0],xmm7[1],xmm14[1],xmm7[2],xmm14[2],xmm7[3],xmm14[3],xmm7[4],xmm14[4],xmm7[5],xmm14[5],xmm7[6],xmm14[6],xmm7[7],xmm14[7]
3393 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,0,1]
3394 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm15[0],xmm10[0],xmm15[1],xmm10[1],xmm15[2],xmm10[2],xmm15[3],xmm10[3],xmm15[4],xmm10[4],xmm15[5],xmm10[5],xmm15[6],xmm10[6],xmm15[7],xmm10[7]
3395 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm16 = [2,1,0,3,2,1,0,3,2,1,0,3,4,4,4,4]
3396 ; AVX512DQ-BW-FCP-NEXT: vpermw %ymm14, %ymm16, %ymm14
3397 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm7, %zmm14, %zmm7
3398 ; AVX512DQ-BW-FCP-NEXT: movl $613566756, %ecx # imm = 0x24924924
3399 ; AVX512DQ-BW-FCP-NEXT: kmovd %ecx, %k1
3400 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm7, %zmm8 {%k1}
3401 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm5 = ymm5[0],ymm6[0],ymm5[1],ymm6[1],ymm5[2],ymm6[2],ymm5[3],ymm6[3],ymm5[4],ymm6[4],ymm5[5],ymm6[5],ymm5[6],ymm6[6],ymm5[7],ymm6[7],ymm5[16],ymm6[16],ymm5[17],ymm6[17],ymm5[18],ymm6[18],ymm5[19],ymm6[19],ymm5[20],ymm6[20],ymm5[21],ymm6[21],ymm5[22],ymm6[22],ymm5[23],ymm6[23]
3402 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm13[8],xmm11[8],xmm13[9],xmm11[9],xmm13[10],xmm11[10],xmm13[11],xmm11[11],xmm13[12],xmm11[12],xmm13[13],xmm11[13],xmm13[14],xmm11[14],xmm13[15],xmm11[15]
3403 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm5, %zmm6, %zmm5
3404 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm2 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[4],ymm3[4],ymm2[5],ymm3[5],ymm2[6],ymm3[6],ymm2[7],ymm3[7],ymm2[16],ymm3[16],ymm2[17],ymm3[17],ymm2[18],ymm3[18],ymm2[19],ymm3[19],ymm2[20],ymm3[20],ymm2[21],ymm3[21],ymm2[22],ymm3[22],ymm2[23],ymm3[23]
3405 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm12[8],xmm9[8],xmm12[9],xmm9[9],xmm12[10],xmm9[10],xmm12[11],xmm9[11],xmm12[12],xmm9[12],xmm12[13],xmm9[13],xmm12[14],xmm9[14],xmm12[15],xmm9[15]
3406 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm2, %zmm3, %zmm2
3407 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm3 = [4,3,6,5,4,3,6,5,4,3,6,5,7,7,7,7,24,27,26,25,24,27,26,25,24,27,26,25,28,29,30,29]
3408 ; AVX512DQ-BW-FCP-NEXT: vpermw %zmm2, %zmm3, %zmm2
3409 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm3 = [5,4,3,6,5,4,3,6,5,4,3,6,7,7,7,7,25,24,27,26,25,24,27,26,25,24,27,26,29,28,31,30]
3410 ; AVX512DQ-BW-FCP-NEXT: vpermw %zmm5, %zmm3, %zmm2 {%k1}
3411 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[4],ymm1[4],ymm0[5],ymm1[5],ymm0[6],ymm1[6],ymm0[7],ymm1[7],ymm0[16],ymm1[16],ymm0[17],ymm1[17],ymm0[18],ymm1[18],ymm0[19],ymm1[19],ymm0[20],ymm1[20],ymm0[21],ymm1[21],ymm0[22],ymm1[22],ymm0[23],ymm1[23]
3412 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm15[8],xmm10[8],xmm15[9],xmm10[9],xmm15[10],xmm10[10],xmm15[11],xmm10[11],xmm15[12],xmm10[12],xmm15[13],xmm10[13],xmm15[14],xmm10[14],xmm15[15],xmm10[15]
3413 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
3414 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm1 = [2,5,4,3,2,5,4,3,2,5,4,3,6,5,6,7,26,25,24,27,26,25,24,27,26,25,24,27,28,28,28,28]
3415 ; AVX512DQ-BW-FCP-NEXT: movl $1227133513, %ecx # imm = 0x49249249
3416 ; AVX512DQ-BW-FCP-NEXT: kmovd %ecx, %k1
3417 ; AVX512DQ-BW-FCP-NEXT: vpermw %zmm0, %zmm1, %zmm2 {%k1}
3418 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, 64(%rax)
3419 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, (%rax)
3420 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm4, 128(%rax)
3421 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
3422 ; AVX512DQ-BW-FCP-NEXT: retq
3423 %in.vec0 = load <32 x i8>, ptr %in.vecptr0, align 64
3424 %in.vec1 = load <32 x i8>, ptr %in.vecptr1, align 64
3425 %in.vec2 = load <32 x i8>, ptr %in.vecptr2, align 64
3426 %in.vec3 = load <32 x i8>, ptr %in.vecptr3, align 64
3427 %in.vec4 = load <32 x i8>, ptr %in.vecptr4, align 64
3428 %in.vec5 = load <32 x i8>, ptr %in.vecptr5, align 64
3429 %1 = shufflevector <32 x i8> %in.vec0, <32 x i8> %in.vec1, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
3430 %2 = shufflevector <32 x i8> %in.vec2, <32 x i8> %in.vec3, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
3431 %3 = shufflevector <32 x i8> %in.vec4, <32 x i8> %in.vec5, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
3432 %4 = shufflevector <64 x i8> %1, <64 x i8> %2, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
3433 %5 = shufflevector <64 x i8> %3, <64 x i8> poison, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
3434 %6 = shufflevector <128 x i8> %4, <128 x i8> %5, <192 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191>
3435 %interleaved.vec = shufflevector <192 x i8> %6, <192 x i8> poison, <192 x i32> <i32 0, i32 32, i32 64, i32 96, i32 128, i32 160, i32 1, i32 33, i32 65, i32 97, i32 129, i32 161, i32 2, i32 34, i32 66, i32 98, i32 130, i32 162, i32 3, i32 35, i32 67, i32 99, i32 131, i32 163, i32 4, i32 36, i32 68, i32 100, i32 132, i32 164, i32 5, i32 37, i32 69, i32 101, i32 133, i32 165, i32 6, i32 38, i32 70, i32 102, i32 134, i32 166, i32 7, i32 39, i32 71, i32 103, i32 135, i32 167, i32 8, i32 40, i32 72, i32 104, i32 136, i32 168, i32 9, i32 41, i32 73, i32 105, i32 137, i32 169, i32 10, i32 42, i32 74, i32 106, i32 138, i32 170, i32 11, i32 43, i32 75, i32 107, i32 139, i32 171, i32 12, i32 44, i32 76, i32 108, i32 140, i32 172, i32 13, i32 45, i32 77, i32 109, i32 141, i32 173, i32 14, i32 46, i32 78, i32 110, i32 142, i32 174, i32 15, i32 47, i32 79, i32 111, i32 143, i32 175, i32 16, i32 48, i32 80, i32 112, i32 144, i32 176, i32 17, i32 49, i32 81, i32 113, i32 145, i32 177, i32 18, i32 50, i32 82, i32 114, i32 146, i32 178, i32 19, i32 51, i32 83, i32 115, i32 147, i32 179, i32 20, i32 52, i32 84, i32 116, i32 148, i32 180, i32 21, i32 53, i32 85, i32 117, i32 149, i32 181, i32 22, i32 54, i32 86, i32 118, i32 150, i32 182, i32 23, i32 55, i32 87, i32 119, i32 151, i32 183, i32 24, i32 56, i32 88, i32 120, i32 152, i32 184, i32 25, i32 57, i32 89, i32 121, i32 153, i32 185, i32 26, i32 58, i32 90, i32 122, i32 154, i32 186, i32 27, i32 59, i32 91, i32 123, i32 155, i32 187, i32 28, i32 60, i32 92, i32 124, i32 156, i32 188, i32 29, i32 61, i32 93, i32 125, i32 157, i32 189, i32 30, i32 62, i32 94, i32 126, i32 158, i32 190, i32 31, i32 63, i32 95, i32 127, i32 159, i32 191>
3436 store <192 x i8> %interleaved.vec, ptr %out.vec, align 64
3440 define void @store_i8_stride6_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %out.vec) nounwind {
3441 ; SSE-LABEL: store_i8_stride6_vf64:
3443 ; SSE-NEXT: subq $184, %rsp
3444 ; SSE-NEXT: movdqa (%rdi), %xmm1
3445 ; SSE-NEXT: movdqa (%rsi), %xmm5
3446 ; SSE-NEXT: movdqa (%rdx), %xmm4
3447 ; SSE-NEXT: movdqa (%rcx), %xmm9
3448 ; SSE-NEXT: movdqa (%r8), %xmm6
3449 ; SSE-NEXT: movdqa (%r9), %xmm2
3450 ; SSE-NEXT: movdqa %xmm1, %xmm8
3451 ; SSE-NEXT: punpcklbw {{.*#+}} xmm8 = xmm8[0],xmm5[0],xmm8[1],xmm5[1],xmm8[2],xmm5[2],xmm8[3],xmm5[3],xmm8[4],xmm5[4],xmm8[5],xmm5[5],xmm8[6],xmm5[6],xmm8[7],xmm5[7]
3452 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm8[0,0,1,1]
3453 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [65535,0,65535,65535,0,65535,65535,0]
3454 ; SSE-NEXT: pand %xmm12, %xmm3
3455 ; SSE-NEXT: movdqa %xmm4, %xmm10
3456 ; SSE-NEXT: punpcklbw {{.*#+}} xmm10 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3],xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
3457 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm10[1,0,2,2,4,5,6,7]
3458 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,1,0,1]
3459 ; SSE-NEXT: pandn %xmm7, %xmm12
3460 ; SSE-NEXT: por %xmm3, %xmm12
3461 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [65535,65535,0,65535,65535,0,65535,65535]
3462 ; SSE-NEXT: pand %xmm3, %xmm12
3463 ; SSE-NEXT: movdqa %xmm6, %xmm11
3464 ; SSE-NEXT: punpcklbw {{.*#+}} xmm11 = xmm11[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
3465 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm11[0,0,0,0]
3466 ; SSE-NEXT: movdqa %xmm3, %xmm13
3467 ; SSE-NEXT: pandn %xmm7, %xmm13
3468 ; SSE-NEXT: por %xmm12, %xmm13
3469 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
3470 ; SSE-NEXT: pand %xmm12, %xmm13
3471 ; SSE-NEXT: punpcklbw {{.*#+}} xmm15 = xmm15[0],xmm2[0],xmm15[1],xmm2[1],xmm15[2],xmm2[2],xmm15[3],xmm2[3],xmm15[4],xmm2[4],xmm15[5],xmm2[5],xmm15[6],xmm2[6],xmm15[7],xmm2[7]
3472 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm15[0,0,0,0]
3473 ; SSE-NEXT: movdqa %xmm12, %xmm0
3474 ; SSE-NEXT: pandn %xmm7, %xmm0
3475 ; SSE-NEXT: por %xmm13, %xmm0
3476 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3477 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm10[3,3,3,3,4,5,6,7]
3478 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,4,4,4]
3479 ; SSE-NEXT: movdqa %xmm3, %xmm13
3480 ; SSE-NEXT: pandn %xmm7, %xmm13
3481 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm8[1,1,2,2]
3482 ; SSE-NEXT: pand %xmm3, %xmm7
3483 ; SSE-NEXT: por %xmm7, %xmm13
3484 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,65535,65535,0,65535,65535,0,65535]
3485 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm11[1,1,2,2]
3486 ; SSE-NEXT: movdqa %xmm0, %xmm14
3487 ; SSE-NEXT: pandn %xmm7, %xmm14
3488 ; SSE-NEXT: pand %xmm0, %xmm13
3489 ; SSE-NEXT: por %xmm13, %xmm14
3490 ; SSE-NEXT: movdqa {{.*#+}} xmm13 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
3491 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm15[1,1,2,2]
3492 ; SSE-NEXT: movdqa %xmm13, %xmm0
3493 ; SSE-NEXT: pandn %xmm7, %xmm0
3494 ; SSE-NEXT: pand %xmm13, %xmm14
3495 ; SSE-NEXT: por %xmm14, %xmm0
3496 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3497 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm8[3,3,3,3]
3498 ; SSE-NEXT: movdqa %xmm3, %xmm8
3499 ; SSE-NEXT: pandn %xmm7, %xmm8
3500 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm10[0,1,2,3,5,6,7,7]
3501 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[2,2,2,3]
3502 ; SSE-NEXT: pand %xmm3, %xmm7
3503 ; SSE-NEXT: por %xmm8, %xmm7
3504 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm11[2,2,3,3]
3505 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,65535,65535,0,65535,65535,0]
3506 ; SSE-NEXT: movdqa %xmm0, %xmm10
3507 ; SSE-NEXT: pandn %xmm8, %xmm10
3508 ; SSE-NEXT: pand %xmm0, %xmm7
3509 ; SSE-NEXT: movdqa %xmm0, %xmm11
3510 ; SSE-NEXT: por %xmm7, %xmm10
3511 ; SSE-NEXT: movdqa {{.*#+}} xmm14 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
3512 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm15[2,2,3,3]
3513 ; SSE-NEXT: movdqa %xmm14, %xmm0
3514 ; SSE-NEXT: pandn %xmm7, %xmm0
3515 ; SSE-NEXT: pand %xmm14, %xmm10
3516 ; SSE-NEXT: por %xmm10, %xmm0
3517 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3518 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
3519 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,0,1,1]
3520 ; SSE-NEXT: pand %xmm11, %xmm5
3521 ; SSE-NEXT: punpckhbw {{.*#+}} xmm4 = xmm4[8],xmm9[8],xmm4[9],xmm9[9],xmm4[10],xmm9[10],xmm4[11],xmm9[11],xmm4[12],xmm9[12],xmm4[13],xmm9[13],xmm4[14],xmm9[14],xmm4[15],xmm9[15]
3522 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm4[1,0,2,2,4,5,6,7]
3523 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,1,0,1]
3524 ; SSE-NEXT: movdqa %xmm11, %xmm8
3525 ; SSE-NEXT: pandn %xmm7, %xmm8
3526 ; SSE-NEXT: por %xmm5, %xmm8
3527 ; SSE-NEXT: pand %xmm3, %xmm8
3528 ; SSE-NEXT: punpckhbw {{.*#+}} xmm6 = xmm6[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
3529 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm6[0,0,0,0]
3530 ; SSE-NEXT: movdqa %xmm3, %xmm7
3531 ; SSE-NEXT: pandn %xmm5, %xmm7
3532 ; SSE-NEXT: por %xmm8, %xmm7
3533 ; SSE-NEXT: pand %xmm12, %xmm7
3534 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
3535 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[0,0,0,0]
3536 ; SSE-NEXT: movdqa %xmm12, %xmm0
3537 ; SSE-NEXT: pandn %xmm5, %xmm0
3538 ; SSE-NEXT: por %xmm7, %xmm0
3539 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3540 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm4[3,3,3,3,4,5,6,7]
3541 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,4,4,4]
3542 ; SSE-NEXT: movdqa %xmm3, %xmm7
3543 ; SSE-NEXT: pandn %xmm5, %xmm7
3544 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[1,1,2,2]
3545 ; SSE-NEXT: pand %xmm3, %xmm5
3546 ; SSE-NEXT: por %xmm5, %xmm7
3547 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm6[1,1,2,2]
3548 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,65535,65535,0,65535,65535,0,65535]
3549 ; SSE-NEXT: movdqa %xmm0, %xmm8
3550 ; SSE-NEXT: pandn %xmm5, %xmm8
3551 ; SSE-NEXT: pand %xmm0, %xmm7
3552 ; SSE-NEXT: por %xmm7, %xmm8
3553 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[1,1,2,2]
3554 ; SSE-NEXT: movdqa %xmm13, %xmm0
3555 ; SSE-NEXT: pandn %xmm5, %xmm0
3556 ; SSE-NEXT: pand %xmm13, %xmm8
3557 ; SSE-NEXT: por %xmm8, %xmm0
3558 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3559 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,3,3,3]
3560 ; SSE-NEXT: movdqa %xmm3, %xmm5
3561 ; SSE-NEXT: pandn %xmm1, %xmm5
3562 ; SSE-NEXT: movdqa 16(%rdx), %xmm1
3563 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,5,6,7,7]
3564 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[2,2,2,3]
3565 ; SSE-NEXT: pand %xmm3, %xmm4
3566 ; SSE-NEXT: por %xmm5, %xmm4
3567 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm6[2,2,3,3]
3568 ; SSE-NEXT: movdqa %xmm11, %xmm6
3569 ; SSE-NEXT: pandn %xmm5, %xmm6
3570 ; SSE-NEXT: movdqa 16(%rcx), %xmm5
3571 ; SSE-NEXT: pand %xmm11, %xmm4
3572 ; SSE-NEXT: por %xmm4, %xmm6
3573 ; SSE-NEXT: pand %xmm14, %xmm6
3574 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
3575 ; SSE-NEXT: movdqa %xmm14, %xmm0
3576 ; SSE-NEXT: pandn %xmm2, %xmm0
3577 ; SSE-NEXT: por %xmm6, %xmm0
3578 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3579 ; SSE-NEXT: movdqa %xmm1, %xmm9
3580 ; SSE-NEXT: punpcklbw {{.*#+}} xmm9 = xmm9[0],xmm5[0],xmm9[1],xmm5[1],xmm9[2],xmm5[2],xmm9[3],xmm5[3],xmm9[4],xmm5[4],xmm9[5],xmm5[5],xmm9[6],xmm5[6],xmm9[7],xmm5[7]
3581 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm9[1,0,2,2,4,5,6,7]
3582 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,0,1]
3583 ; SSE-NEXT: movdqa %xmm11, %xmm0
3584 ; SSE-NEXT: movdqa %xmm11, %xmm6
3585 ; SSE-NEXT: pandn %xmm2, %xmm6
3586 ; SSE-NEXT: movdqa 16(%rdi), %xmm2
3587 ; SSE-NEXT: movdqa 16(%rsi), %xmm8
3588 ; SSE-NEXT: movdqa %xmm2, %xmm11
3589 ; SSE-NEXT: punpcklbw {{.*#+}} xmm11 = xmm11[0],xmm8[0],xmm11[1],xmm8[1],xmm11[2],xmm8[2],xmm11[3],xmm8[3],xmm11[4],xmm8[4],xmm11[5],xmm8[5],xmm11[6],xmm8[6],xmm11[7],xmm8[7]
3590 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm11[0,0,1,1]
3591 ; SSE-NEXT: pand %xmm0, %xmm4
3592 ; SSE-NEXT: por %xmm4, %xmm6
3593 ; SSE-NEXT: movdqa 16(%r8), %xmm4
3594 ; SSE-NEXT: movdqa %xmm4, %xmm10
3595 ; SSE-NEXT: punpcklbw {{.*#+}} xmm10 = xmm10[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
3596 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm10[0,0,0,0]
3597 ; SSE-NEXT: movdqa %xmm3, %xmm13
3598 ; SSE-NEXT: pandn %xmm7, %xmm13
3599 ; SSE-NEXT: pand %xmm3, %xmm6
3600 ; SSE-NEXT: por %xmm6, %xmm13
3601 ; SSE-NEXT: movdqa 16(%r9), %xmm6
3602 ; SSE-NEXT: punpcklbw {{.*#+}} xmm7 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3],xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
3603 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm7[0,0,0,0]
3604 ; SSE-NEXT: movdqa %xmm12, %xmm0
3605 ; SSE-NEXT: pandn %xmm15, %xmm0
3606 ; SSE-NEXT: pand %xmm12, %xmm13
3607 ; SSE-NEXT: por %xmm13, %xmm0
3608 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3609 ; SSE-NEXT: pshuflw {{.*#+}} xmm13 = xmm9[3,3,3,3,4,5,6,7]
3610 ; SSE-NEXT: pshufhw {{.*#+}} xmm13 = xmm13[0,1,2,3,4,4,4,4]
3611 ; SSE-NEXT: movdqa %xmm3, %xmm15
3612 ; SSE-NEXT: pandn %xmm13, %xmm15
3613 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm11[1,1,2,2]
3614 ; SSE-NEXT: pand %xmm3, %xmm13
3615 ; SSE-NEXT: por %xmm13, %xmm15
3616 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm10[1,1,2,2]
3617 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,65535,65535,0,65535,65535,0,65535]
3618 ; SSE-NEXT: movdqa %xmm0, %xmm12
3619 ; SSE-NEXT: pandn %xmm13, %xmm12
3620 ; SSE-NEXT: pand %xmm0, %xmm15
3621 ; SSE-NEXT: por %xmm15, %xmm12
3622 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm7[1,1,2,2]
3623 ; SSE-NEXT: movdqa {{.*#+}} xmm15 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
3624 ; SSE-NEXT: movdqa %xmm15, %xmm0
3625 ; SSE-NEXT: pandn %xmm13, %xmm0
3626 ; SSE-NEXT: pand %xmm15, %xmm12
3627 ; SSE-NEXT: movdqa %xmm15, %xmm13
3628 ; SSE-NEXT: por %xmm12, %xmm0
3629 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3630 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm11[3,3,3,3]
3631 ; SSE-NEXT: movdqa %xmm3, %xmm12
3632 ; SSE-NEXT: pandn %xmm11, %xmm12
3633 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,5,6,7,7]
3634 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[2,2,2,3]
3635 ; SSE-NEXT: pand %xmm3, %xmm9
3636 ; SSE-NEXT: por %xmm12, %xmm9
3637 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
3638 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,65535,65535,0,65535,65535,0]
3639 ; SSE-NEXT: movdqa %xmm0, %xmm11
3640 ; SSE-NEXT: pandn %xmm10, %xmm11
3641 ; SSE-NEXT: pand %xmm0, %xmm9
3642 ; SSE-NEXT: movdqa %xmm0, %xmm10
3643 ; SSE-NEXT: por %xmm9, %xmm11
3644 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
3645 ; SSE-NEXT: movdqa %xmm14, %xmm0
3646 ; SSE-NEXT: pandn %xmm7, %xmm0
3647 ; SSE-NEXT: pand %xmm14, %xmm11
3648 ; SSE-NEXT: por %xmm11, %xmm0
3649 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3650 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm8[8],xmm2[9],xmm8[9],xmm2[10],xmm8[10],xmm2[11],xmm8[11],xmm2[12],xmm8[12],xmm2[13],xmm8[13],xmm2[14],xmm8[14],xmm2[15],xmm8[15]
3651 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
3652 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm1[1,0,2,2,4,5,6,7]
3653 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,1,0,1]
3654 ; SSE-NEXT: movdqa %xmm10, %xmm7
3655 ; SSE-NEXT: pandn %xmm5, %xmm7
3656 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[0,0,1,1]
3657 ; SSE-NEXT: pand %xmm10, %xmm5
3658 ; SSE-NEXT: por %xmm5, %xmm7
3659 ; SSE-NEXT: punpckhbw {{.*#+}} xmm4 = xmm4[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
3660 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,0,0,0]
3661 ; SSE-NEXT: movdqa %xmm3, %xmm8
3662 ; SSE-NEXT: pandn %xmm5, %xmm8
3663 ; SSE-NEXT: pand %xmm3, %xmm7
3664 ; SSE-NEXT: por %xmm7, %xmm8
3665 ; SSE-NEXT: punpckhbw {{.*#+}} xmm5 = xmm5[8],xmm6[8],xmm5[9],xmm6[9],xmm5[10],xmm6[10],xmm5[11],xmm6[11],xmm5[12],xmm6[12],xmm5[13],xmm6[13],xmm5[14],xmm6[14],xmm5[15],xmm6[15]
3666 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,0,0,0]
3667 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
3668 ; SSE-NEXT: movdqa %xmm0, %xmm7
3669 ; SSE-NEXT: pandn %xmm6, %xmm7
3670 ; SSE-NEXT: pand %xmm0, %xmm8
3671 ; SSE-NEXT: movdqa %xmm0, %xmm15
3672 ; SSE-NEXT: por %xmm8, %xmm7
3673 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3674 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm1[3,3,3,3,4,5,6,7]
3675 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,4,4,4]
3676 ; SSE-NEXT: movdqa %xmm3, %xmm7
3677 ; SSE-NEXT: pandn %xmm6, %xmm7
3678 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm2[1,1,2,2]
3679 ; SSE-NEXT: pand %xmm3, %xmm6
3680 ; SSE-NEXT: por %xmm6, %xmm7
3681 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm4[1,1,2,2]
3682 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,65535,65535,0,65535,65535,0,65535]
3683 ; SSE-NEXT: movdqa %xmm0, %xmm8
3684 ; SSE-NEXT: pandn %xmm6, %xmm8
3685 ; SSE-NEXT: pand %xmm0, %xmm7
3686 ; SSE-NEXT: por %xmm7, %xmm8
3687 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm5[1,1,2,2]
3688 ; SSE-NEXT: movdqa %xmm13, %xmm0
3689 ; SSE-NEXT: pandn %xmm6, %xmm0
3690 ; SSE-NEXT: pand %xmm13, %xmm8
3691 ; SSE-NEXT: por %xmm8, %xmm0
3692 ; SSE-NEXT: movdqa %xmm0, (%rsp) # 16-byte Spill
3693 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[3,3,3,3]
3694 ; SSE-NEXT: movdqa %xmm3, %xmm6
3695 ; SSE-NEXT: pandn %xmm2, %xmm6
3696 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,6,7,7]
3697 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,3]
3698 ; SSE-NEXT: pand %xmm3, %xmm1
3699 ; SSE-NEXT: por %xmm6, %xmm1
3700 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[2,2,3,3]
3701 ; SSE-NEXT: movdqa %xmm10, %xmm4
3702 ; SSE-NEXT: pandn %xmm2, %xmm4
3703 ; SSE-NEXT: pand %xmm10, %xmm1
3704 ; SSE-NEXT: por %xmm1, %xmm4
3705 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[2,2,3,3]
3706 ; SSE-NEXT: movdqa %xmm14, %xmm0
3707 ; SSE-NEXT: pandn %xmm1, %xmm0
3708 ; SSE-NEXT: pand %xmm14, %xmm4
3709 ; SSE-NEXT: por %xmm4, %xmm0
3710 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3711 ; SSE-NEXT: movdqa 32(%rdx), %xmm1
3712 ; SSE-NEXT: movdqa 32(%rcx), %xmm5
3713 ; SSE-NEXT: movdqa %xmm1, %xmm9
3714 ; SSE-NEXT: punpcklbw {{.*#+}} xmm9 = xmm9[0],xmm5[0],xmm9[1],xmm5[1],xmm9[2],xmm5[2],xmm9[3],xmm5[3],xmm9[4],xmm5[4],xmm9[5],xmm5[5],xmm9[6],xmm5[6],xmm9[7],xmm5[7]
3715 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm9[1,0,2,2,4,5,6,7]
3716 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,0,1]
3717 ; SSE-NEXT: movdqa %xmm10, %xmm6
3718 ; SSE-NEXT: pandn %xmm2, %xmm6
3719 ; SSE-NEXT: movdqa 32(%rdi), %xmm2
3720 ; SSE-NEXT: movdqa 32(%rsi), %xmm8
3721 ; SSE-NEXT: movdqa %xmm2, %xmm11
3722 ; SSE-NEXT: punpcklbw {{.*#+}} xmm11 = xmm11[0],xmm8[0],xmm11[1],xmm8[1],xmm11[2],xmm8[2],xmm11[3],xmm8[3],xmm11[4],xmm8[4],xmm11[5],xmm8[5],xmm11[6],xmm8[6],xmm11[7],xmm8[7]
3723 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm11[0,0,1,1]
3724 ; SSE-NEXT: pand %xmm10, %xmm4
3725 ; SSE-NEXT: por %xmm4, %xmm6
3726 ; SSE-NEXT: movdqa 32(%r8), %xmm4
3727 ; SSE-NEXT: movdqa %xmm4, %xmm10
3728 ; SSE-NEXT: punpcklbw {{.*#+}} xmm10 = xmm10[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
3729 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm10[0,0,0,0]
3730 ; SSE-NEXT: movdqa %xmm3, %xmm12
3731 ; SSE-NEXT: pandn %xmm7, %xmm12
3732 ; SSE-NEXT: pand %xmm3, %xmm6
3733 ; SSE-NEXT: por %xmm6, %xmm12
3734 ; SSE-NEXT: movdqa 32(%r9), %xmm6
3735 ; SSE-NEXT: punpcklbw {{.*#+}} xmm7 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3],xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
3736 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm7[0,0,0,0]
3737 ; SSE-NEXT: movdqa %xmm15, %xmm0
3738 ; SSE-NEXT: pandn %xmm13, %xmm0
3739 ; SSE-NEXT: pand %xmm15, %xmm12
3740 ; SSE-NEXT: por %xmm12, %xmm0
3741 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3742 ; SSE-NEXT: pshuflw {{.*#+}} xmm12 = xmm9[3,3,3,3,4,5,6,7]
3743 ; SSE-NEXT: pshufhw {{.*#+}} xmm12 = xmm12[0,1,2,3,4,4,4,4]
3744 ; SSE-NEXT: movdqa %xmm3, %xmm13
3745 ; SSE-NEXT: pandn %xmm12, %xmm13
3746 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm11[1,1,2,2]
3747 ; SSE-NEXT: pand %xmm3, %xmm12
3748 ; SSE-NEXT: por %xmm12, %xmm13
3749 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm10[1,1,2,2]
3750 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,65535,65535,0,65535,65535,0,65535]
3751 ; SSE-NEXT: movdqa %xmm0, %xmm15
3752 ; SSE-NEXT: pandn %xmm12, %xmm15
3753 ; SSE-NEXT: pand %xmm0, %xmm13
3754 ; SSE-NEXT: por %xmm13, %xmm15
3755 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm7[1,1,2,2]
3756 ; SSE-NEXT: movdqa {{.*#+}} xmm13 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
3757 ; SSE-NEXT: movdqa %xmm13, %xmm0
3758 ; SSE-NEXT: pandn %xmm12, %xmm0
3759 ; SSE-NEXT: pand %xmm13, %xmm15
3760 ; SSE-NEXT: por %xmm15, %xmm0
3761 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3762 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm11[3,3,3,3]
3763 ; SSE-NEXT: movdqa %xmm3, %xmm12
3764 ; SSE-NEXT: pandn %xmm11, %xmm12
3765 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,5,6,7,7]
3766 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[2,2,2,3]
3767 ; SSE-NEXT: pand %xmm3, %xmm9
3768 ; SSE-NEXT: por %xmm12, %xmm9
3769 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
3770 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,65535,65535,0,65535,65535,0]
3771 ; SSE-NEXT: movdqa %xmm0, %xmm11
3772 ; SSE-NEXT: pandn %xmm10, %xmm11
3773 ; SSE-NEXT: pand %xmm0, %xmm9
3774 ; SSE-NEXT: movdqa %xmm0, %xmm10
3775 ; SSE-NEXT: por %xmm9, %xmm11
3776 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
3777 ; SSE-NEXT: movdqa %xmm14, %xmm0
3778 ; SSE-NEXT: pandn %xmm7, %xmm0
3779 ; SSE-NEXT: pand %xmm14, %xmm11
3780 ; SSE-NEXT: por %xmm11, %xmm0
3781 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3782 ; SSE-NEXT: punpckhbw {{.*#+}} xmm2 = xmm2[8],xmm8[8],xmm2[9],xmm8[9],xmm2[10],xmm8[10],xmm2[11],xmm8[11],xmm2[12],xmm8[12],xmm2[13],xmm8[13],xmm2[14],xmm8[14],xmm2[15],xmm8[15]
3783 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm5[8],xmm1[9],xmm5[9],xmm1[10],xmm5[10],xmm1[11],xmm5[11],xmm1[12],xmm5[12],xmm1[13],xmm5[13],xmm1[14],xmm5[14],xmm1[15],xmm5[15]
3784 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm1[1,0,2,2,4,5,6,7]
3785 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,1,0,1]
3786 ; SSE-NEXT: movdqa %xmm10, %xmm7
3787 ; SSE-NEXT: pandn %xmm5, %xmm7
3788 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[0,0,1,1]
3789 ; SSE-NEXT: pand %xmm10, %xmm5
3790 ; SSE-NEXT: por %xmm5, %xmm7
3791 ; SSE-NEXT: punpckhbw {{.*#+}} xmm4 = xmm4[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
3792 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,0,0,0]
3793 ; SSE-NEXT: movdqa %xmm3, %xmm8
3794 ; SSE-NEXT: pandn %xmm5, %xmm8
3795 ; SSE-NEXT: pand %xmm3, %xmm7
3796 ; SSE-NEXT: por %xmm7, %xmm8
3797 ; SSE-NEXT: punpckhbw {{.*#+}} xmm5 = xmm5[8],xmm6[8],xmm5[9],xmm6[9],xmm5[10],xmm6[10],xmm5[11],xmm6[11],xmm5[12],xmm6[12],xmm5[13],xmm6[13],xmm5[14],xmm6[14],xmm5[15],xmm6[15]
3798 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,0,0,0]
3799 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
3800 ; SSE-NEXT: movdqa %xmm0, %xmm7
3801 ; SSE-NEXT: pandn %xmm6, %xmm7
3802 ; SSE-NEXT: pand %xmm0, %xmm8
3803 ; SSE-NEXT: movdqa %xmm0, %xmm15
3804 ; SSE-NEXT: por %xmm8, %xmm7
3805 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3806 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm1[3,3,3,3,4,5,6,7]
3807 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,4,4,4]
3808 ; SSE-NEXT: movdqa %xmm3, %xmm7
3809 ; SSE-NEXT: pandn %xmm6, %xmm7
3810 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm2[1,1,2,2]
3811 ; SSE-NEXT: pand %xmm3, %xmm6
3812 ; SSE-NEXT: por %xmm6, %xmm7
3813 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm4[1,1,2,2]
3814 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,65535,65535,0,65535,65535,0,65535]
3815 ; SSE-NEXT: movdqa %xmm0, %xmm8
3816 ; SSE-NEXT: pandn %xmm6, %xmm8
3817 ; SSE-NEXT: pand %xmm0, %xmm7
3818 ; SSE-NEXT: por %xmm7, %xmm8
3819 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm5[1,1,2,2]
3820 ; SSE-NEXT: movdqa %xmm13, %xmm7
3821 ; SSE-NEXT: pandn %xmm6, %xmm7
3822 ; SSE-NEXT: pand %xmm13, %xmm8
3823 ; SSE-NEXT: por %xmm8, %xmm7
3824 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3825 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[3,3,3,3]
3826 ; SSE-NEXT: movdqa %xmm3, %xmm6
3827 ; SSE-NEXT: pandn %xmm2, %xmm6
3828 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,6,7,7]
3829 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,2,2,3]
3830 ; SSE-NEXT: pand %xmm3, %xmm1
3831 ; SSE-NEXT: por %xmm6, %xmm1
3832 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[2,2,3,3]
3833 ; SSE-NEXT: movdqa %xmm10, %xmm4
3834 ; SSE-NEXT: pandn %xmm2, %xmm4
3835 ; SSE-NEXT: pand %xmm10, %xmm1
3836 ; SSE-NEXT: por %xmm1, %xmm4
3837 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[2,2,3,3]
3838 ; SSE-NEXT: movdqa %xmm14, %xmm0
3839 ; SSE-NEXT: pandn %xmm1, %xmm0
3840 ; SSE-NEXT: pand %xmm14, %xmm4
3841 ; SSE-NEXT: por %xmm4, %xmm0
3842 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3843 ; SSE-NEXT: movdqa 48(%rdx), %xmm9
3844 ; SSE-NEXT: movdqa 48(%rcx), %xmm0
3845 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3846 ; SSE-NEXT: movdqa %xmm9, %xmm8
3847 ; SSE-NEXT: punpcklbw {{.*#+}} xmm8 = xmm8[0],xmm0[0],xmm8[1],xmm0[1],xmm8[2],xmm0[2],xmm8[3],xmm0[3],xmm8[4],xmm0[4],xmm8[5],xmm0[5],xmm8[6],xmm0[6],xmm8[7],xmm0[7]
3848 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm8[1,0,2,2,4,5,6,7]
3849 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,0,1]
3850 ; SSE-NEXT: movdqa %xmm10, %xmm0
3851 ; SSE-NEXT: movdqa %xmm10, %xmm4
3852 ; SSE-NEXT: pandn %xmm1, %xmm4
3853 ; SSE-NEXT: movdqa 48(%rdi), %xmm6
3854 ; SSE-NEXT: movdqa 48(%rsi), %xmm10
3855 ; SSE-NEXT: movdqa %xmm6, %xmm1
3856 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0],xmm10[0],xmm1[1],xmm10[1],xmm1[2],xmm10[2],xmm1[3],xmm10[3],xmm1[4],xmm10[4],xmm1[5],xmm10[5],xmm1[6],xmm10[6],xmm1[7],xmm10[7]
3857 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
3858 ; SSE-NEXT: pand %xmm0, %xmm2
3859 ; SSE-NEXT: por %xmm2, %xmm4
3860 ; SSE-NEXT: movdqa 48(%r8), %xmm7
3861 ; SSE-NEXT: movdqa %xmm7, %xmm2
3862 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
3863 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm2[0,0,0,0]
3864 ; SSE-NEXT: movdqa %xmm3, %xmm12
3865 ; SSE-NEXT: pandn %xmm11, %xmm12
3866 ; SSE-NEXT: pand %xmm3, %xmm4
3867 ; SSE-NEXT: por %xmm4, %xmm12
3868 ; SSE-NEXT: movdqa 48(%r9), %xmm11
3869 ; SSE-NEXT: punpcklbw {{.*#+}} xmm5 = xmm5[0],xmm11[0],xmm5[1],xmm11[1],xmm5[2],xmm11[2],xmm5[3],xmm11[3],xmm5[4],xmm11[4],xmm5[5],xmm11[5],xmm5[6],xmm11[6],xmm5[7],xmm11[7]
3870 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm5[0,0,0,0]
3871 ; SSE-NEXT: movdqa %xmm15, %xmm0
3872 ; SSE-NEXT: pandn %xmm13, %xmm15
3873 ; SSE-NEXT: pand %xmm0, %xmm12
3874 ; SSE-NEXT: por %xmm12, %xmm15
3875 ; SSE-NEXT: pshuflw {{.*#+}} xmm12 = xmm8[3,3,3,3,4,5,6,7]
3876 ; SSE-NEXT: pshufhw {{.*#+}} xmm12 = xmm12[0,1,2,3,4,4,4,4]
3877 ; SSE-NEXT: movdqa %xmm3, %xmm13
3878 ; SSE-NEXT: pandn %xmm12, %xmm13
3879 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm1[1,1,2,2]
3880 ; SSE-NEXT: pand %xmm3, %xmm12
3881 ; SSE-NEXT: por %xmm12, %xmm13
3882 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm2[1,1,2,2]
3883 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [0,65535,65535,0,65535,65535,0,65535]
3884 ; SSE-NEXT: movdqa %xmm4, %xmm0
3885 ; SSE-NEXT: pandn %xmm12, %xmm0
3886 ; SSE-NEXT: pand %xmm4, %xmm13
3887 ; SSE-NEXT: por %xmm13, %xmm0
3888 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm5[1,1,2,2]
3889 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
3890 ; SSE-NEXT: movdqa %xmm4, %xmm13
3891 ; SSE-NEXT: pandn %xmm12, %xmm13
3892 ; SSE-NEXT: pand %xmm4, %xmm0
3893 ; SSE-NEXT: por %xmm0, %xmm13
3894 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[3,3,3,3]
3895 ; SSE-NEXT: movdqa %xmm3, %xmm1
3896 ; SSE-NEXT: pandn %xmm0, %xmm1
3897 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm8[0,1,2,3,5,6,7,7]
3898 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,2,2,3]
3899 ; SSE-NEXT: pand %xmm3, %xmm0
3900 ; SSE-NEXT: por %xmm1, %xmm0
3901 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[2,2,3,3]
3902 ; SSE-NEXT: movdqa {{.*#+}} xmm12 = [65535,0,65535,65535,0,65535,65535,0]
3903 ; SSE-NEXT: movdqa %xmm12, %xmm2
3904 ; SSE-NEXT: pandn %xmm1, %xmm2
3905 ; SSE-NEXT: pand %xmm12, %xmm0
3906 ; SSE-NEXT: por %xmm0, %xmm2
3907 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm5[2,2,3,3]
3908 ; SSE-NEXT: movdqa %xmm14, %xmm8
3909 ; SSE-NEXT: pandn %xmm0, %xmm8
3910 ; SSE-NEXT: pand %xmm14, %xmm2
3911 ; SSE-NEXT: por %xmm2, %xmm8
3912 ; SSE-NEXT: punpckhbw {{.*#+}} xmm6 = xmm6[8],xmm10[8],xmm6[9],xmm10[9],xmm6[10],xmm10[10],xmm6[11],xmm10[11],xmm6[12],xmm10[12],xmm6[13],xmm10[13],xmm6[14],xmm10[14],xmm6[15],xmm10[15]
3913 ; SSE-NEXT: punpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Folded Reload
3914 ; SSE-NEXT: # xmm9 = xmm9[8],mem[8],xmm9[9],mem[9],xmm9[10],mem[10],xmm9[11],mem[11],xmm9[12],mem[12],xmm9[13],mem[13],xmm9[14],mem[14],xmm9[15],mem[15]
3915 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm9[1,0,2,2,4,5,6,7]
3916 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,0,1]
3917 ; SSE-NEXT: movdqa %xmm12, %xmm1
3918 ; SSE-NEXT: pandn %xmm0, %xmm1
3919 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[0,0,1,1]
3920 ; SSE-NEXT: pand %xmm12, %xmm0
3921 ; SSE-NEXT: por %xmm0, %xmm1
3922 ; SSE-NEXT: punpckhbw {{.*#+}} xmm7 = xmm7[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
3923 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm7[0,0,0,0]
3924 ; SSE-NEXT: movdqa %xmm3, %xmm2
3925 ; SSE-NEXT: pandn %xmm0, %xmm2
3926 ; SSE-NEXT: pand %xmm3, %xmm1
3927 ; SSE-NEXT: por %xmm1, %xmm2
3928 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
3929 ; SSE-NEXT: pand %xmm4, %xmm2
3930 ; SSE-NEXT: punpckhbw {{.*#+}} xmm1 = xmm1[8],xmm11[8],xmm1[9],xmm11[9],xmm1[10],xmm11[10],xmm1[11],xmm11[11],xmm1[12],xmm11[12],xmm1[13],xmm11[13],xmm1[14],xmm11[14],xmm1[15],xmm11[15]
3931 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[0,0,0,0]
3932 ; SSE-NEXT: pandn %xmm0, %xmm4
3933 ; SSE-NEXT: por %xmm2, %xmm4
3934 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm9[3,3,3,3,4,5,6,7]
3935 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
3936 ; SSE-NEXT: movdqa %xmm3, %xmm2
3937 ; SSE-NEXT: pandn %xmm0, %xmm2
3938 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[1,1,2,2]
3939 ; SSE-NEXT: pand %xmm3, %xmm0
3940 ; SSE-NEXT: por %xmm0, %xmm2
3941 ; SSE-NEXT: movdqa {{.*#+}} xmm10 = [0,65535,65535,0,65535,65535,0,65535]
3942 ; SSE-NEXT: pand %xmm10, %xmm2
3943 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm7[1,1,2,2]
3944 ; SSE-NEXT: pandn %xmm0, %xmm10
3945 ; SSE-NEXT: por %xmm2, %xmm10
3946 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
3947 ; SSE-NEXT: pand %xmm2, %xmm10
3948 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,2]
3949 ; SSE-NEXT: pandn %xmm0, %xmm2
3950 ; SSE-NEXT: por %xmm10, %xmm2
3951 ; SSE-NEXT: movdqa %xmm2, %xmm10
3952 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[3,3,3,3]
3953 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm9[0,1,2,3,5,6,7,7]
3954 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,2,2,3]
3955 ; SSE-NEXT: pand %xmm3, %xmm2
3956 ; SSE-NEXT: pandn %xmm0, %xmm3
3957 ; SSE-NEXT: por %xmm2, %xmm3
3958 ; SSE-NEXT: movdqa %xmm12, %xmm2
3959 ; SSE-NEXT: pand %xmm12, %xmm3
3960 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm7[2,2,3,3]
3961 ; SSE-NEXT: pandn %xmm0, %xmm2
3962 ; SSE-NEXT: por %xmm3, %xmm2
3963 ; SSE-NEXT: pand %xmm14, %xmm2
3964 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[2,2,3,3]
3965 ; SSE-NEXT: pandn %xmm0, %xmm14
3966 ; SSE-NEXT: por %xmm2, %xmm14
3967 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
3968 ; SSE-NEXT: movdqa %xmm14, 368(%rax)
3969 ; SSE-NEXT: movdqa %xmm10, 352(%rax)
3970 ; SSE-NEXT: movdqa %xmm4, 336(%rax)
3971 ; SSE-NEXT: movdqa %xmm8, 320(%rax)
3972 ; SSE-NEXT: movdqa %xmm13, 304(%rax)
3973 ; SSE-NEXT: movdqa %xmm15, 288(%rax)
3974 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3975 ; SSE-NEXT: movaps %xmm0, 272(%rax)
3976 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3977 ; SSE-NEXT: movaps %xmm0, 256(%rax)
3978 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3979 ; SSE-NEXT: movaps %xmm0, 240(%rax)
3980 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3981 ; SSE-NEXT: movaps %xmm0, 224(%rax)
3982 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3983 ; SSE-NEXT: movaps %xmm0, 208(%rax)
3984 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3985 ; SSE-NEXT: movaps %xmm0, 192(%rax)
3986 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3987 ; SSE-NEXT: movaps %xmm0, 176(%rax)
3988 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
3989 ; SSE-NEXT: movaps %xmm0, 160(%rax)
3990 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3991 ; SSE-NEXT: movaps %xmm0, 144(%rax)
3992 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3993 ; SSE-NEXT: movaps %xmm0, 128(%rax)
3994 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3995 ; SSE-NEXT: movaps %xmm0, 112(%rax)
3996 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3997 ; SSE-NEXT: movaps %xmm0, 96(%rax)
3998 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3999 ; SSE-NEXT: movaps %xmm0, 80(%rax)
4000 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4001 ; SSE-NEXT: movaps %xmm0, 64(%rax)
4002 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4003 ; SSE-NEXT: movaps %xmm0, 48(%rax)
4004 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4005 ; SSE-NEXT: movaps %xmm0, 32(%rax)
4006 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4007 ; SSE-NEXT: movaps %xmm0, 16(%rax)
4008 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4009 ; SSE-NEXT: movaps %xmm0, (%rax)
4010 ; SSE-NEXT: addq $184, %rsp
4013 ; AVX-LABEL: store_i8_stride6_vf64:
4015 ; AVX-NEXT: subq $200, %rsp
4016 ; AVX-NEXT: vmovdqa 48(%rsi), %xmm1
4017 ; AVX-NEXT: vmovdqa 48(%rdi), %xmm2
4018 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
4019 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[3,3,3,3]
4020 ; AVX-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
4021 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[0,0,1,1]
4022 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2
4023 ; AVX-NEXT: vmovaps {{.*#+}} ymm10 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
4024 ; AVX-NEXT: vandnps %ymm2, %ymm10, %ymm2
4025 ; AVX-NEXT: vmovdqa 48(%rcx), %xmm4
4026 ; AVX-NEXT: vmovdqa 48(%rdx), %xmm5
4027 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3],xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
4028 ; AVX-NEXT: vpshufhw {{.*#+}} xmm6 = xmm3[0,1,2,3,5,6,7,7]
4029 ; AVX-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[2,2,2,3]
4030 ; AVX-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm5[8],xmm4[8],xmm5[9],xmm4[9],xmm5[10],xmm4[10],xmm5[11],xmm4[11],xmm5[12],xmm4[12],xmm5[13],xmm4[13],xmm5[14],xmm4[14],xmm5[15],xmm4[15]
4031 ; AVX-NEXT: vpshuflw {{.*#+}} xmm5 = xmm4[1,0,2,2,4,5,6,7]
4032 ; AVX-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,1,0,1]
4033 ; AVX-NEXT: vinsertf128 $1, %xmm5, %ymm6, %ymm5
4034 ; AVX-NEXT: vandps %ymm5, %ymm10, %ymm5
4035 ; AVX-NEXT: vorps %ymm2, %ymm5, %ymm6
4036 ; AVX-NEXT: vextractf128 $1, %ymm6, %xmm5
4037 ; AVX-NEXT: vmovdqa 48(%r8), %xmm2
4038 ; AVX-NEXT: vpshufb {{.*#+}} xmm7 = zero,zero,zero,zero,xmm2[8,u],zero,zero,zero,zero,xmm2[9,u],zero,zero,zero,zero
4039 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1],xmm7[2],xmm5[3,4],xmm7[5],xmm5[6,7]
4040 ; AVX-NEXT: vpshufb {{.*#+}} xmm7 = xmm5[0,1,2,3,4],zero,xmm5[6,7,8,9,10],zero,xmm5[12,13,14,15]
4041 ; AVX-NEXT: vmovdqa 48(%r9), %xmm5
4042 ; AVX-NEXT: vpshufb {{.*#+}} xmm8 = zero,zero,zero,zero,zero,xmm5[8],zero,zero,zero,zero,zero,xmm5[9],zero,zero,zero,zero
4043 ; AVX-NEXT: vpor %xmm7, %xmm8, %xmm7
4044 ; AVX-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4045 ; AVX-NEXT: vpshufb {{.*#+}} xmm7 = zero,zero,xmm2[5,u],zero,zero,zero,zero,xmm2[6,u],zero,zero,zero,zero,xmm2[7,u]
4046 ; AVX-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0],xmm7[1],xmm6[2,3],xmm7[4],xmm6[5,6],xmm7[7]
4047 ; AVX-NEXT: vmovdqa {{.*#+}} xmm8 = [0,1,2,128,4,5,6,7,8,128,10,11,12,13,14,128]
4048 ; AVX-NEXT: vpshufb %xmm8, %xmm6, %xmm6
4049 ; AVX-NEXT: vpshufb {{.*#+}} xmm7 = zero,zero,zero,xmm5[5],zero,zero,zero,zero,zero,xmm5[6],zero,zero,zero,zero,zero,xmm5[7]
4050 ; AVX-NEXT: vpor %xmm7, %xmm6, %xmm6
4051 ; AVX-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4052 ; AVX-NEXT: vpshufd {{.*#+}} xmm6 = xmm1[1,1,2,2]
4053 ; AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[3,3,3,3]
4054 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm6, %ymm1
4055 ; AVX-NEXT: vandps %ymm1, %ymm10, %ymm1
4056 ; AVX-NEXT: vpshuflw {{.*#+}} xmm6 = xmm4[3,3,3,3,4,5,6,7]
4057 ; AVX-NEXT: vpshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,4,4,4]
4058 ; AVX-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,5,6,7,7]
4059 ; AVX-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,2,3]
4060 ; AVX-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm4
4061 ; AVX-NEXT: vandnps %ymm4, %ymm10, %ymm4
4062 ; AVX-NEXT: vorps %ymm4, %ymm1, %ymm1
4063 ; AVX-NEXT: vextractf128 $1, %ymm1, %xmm4
4064 ; AVX-NEXT: vmovdqa {{.*#+}} xmm15 = [128,128,13,u,128,128,128,128,14,u,128,128,128,128,15,u]
4065 ; AVX-NEXT: vpshufb %xmm15, %xmm2, %xmm6
4066 ; AVX-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0],xmm6[1],xmm4[2,3],xmm6[4],xmm4[5,6],xmm6[7]
4067 ; AVX-NEXT: vpshufb %xmm8, %xmm4, %xmm4
4068 ; AVX-NEXT: vpshufb {{.*#+}} xmm6 = zero,zero,zero,xmm5[13],zero,zero,zero,zero,zero,xmm5[14],zero,zero,zero,zero,zero,xmm5[15]
4069 ; AVX-NEXT: vpor %xmm6, %xmm4, %xmm4
4070 ; AVX-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4071 ; AVX-NEXT: vpshufb {{.*#+}} xmm4 = xmm2[10,u],zero,zero,zero,zero,xmm2[11,u],zero,zero,zero,zero,xmm2[12,u],zero,zero
4072 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm4[0],xmm1[1,2],xmm4[3],xmm1[4,5],xmm4[6],xmm1[7]
4073 ; AVX-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[0],zero,xmm1[2,3,4,5,6],zero,xmm1[8,9,10,11,12],zero,xmm1[14,15]
4074 ; AVX-NEXT: vmovdqa {{.*#+}} xmm13 = [128,10,128,128,128,128,128,11,128,128,128,128,128,12,128,128]
4075 ; AVX-NEXT: vpshufb %xmm13, %xmm5, %xmm4
4076 ; AVX-NEXT: vpor %xmm4, %xmm1, %xmm1
4077 ; AVX-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4078 ; AVX-NEXT: vmovdqa 32(%rsi), %xmm6
4079 ; AVX-NEXT: vmovdqa 32(%rdi), %xmm11
4080 ; AVX-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm11[8],xmm6[8],xmm11[9],xmm6[9],xmm11[10],xmm6[10],xmm11[11],xmm6[11],xmm11[12],xmm6[12],xmm11[13],xmm6[13],xmm11[14],xmm6[14],xmm11[15],xmm6[15]
4081 ; AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm7[1,1,2,2]
4082 ; AVX-NEXT: vpshufd {{.*#+}} xmm4 = xmm7[3,3,3,3]
4083 ; AVX-NEXT: vinsertf128 $1, %xmm4, %ymm1, %ymm1
4084 ; AVX-NEXT: vmovdqa 32(%rcx), %xmm12
4085 ; AVX-NEXT: vmovdqa 32(%rdx), %xmm14
4086 ; AVX-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm14[8],xmm12[8],xmm14[9],xmm12[9],xmm14[10],xmm12[10],xmm14[11],xmm12[11],xmm14[12],xmm12[12],xmm14[13],xmm12[13],xmm14[14],xmm12[14],xmm14[15],xmm12[15]
4087 ; AVX-NEXT: vpshuflw {{.*#+}} xmm8 = xmm4[3,3,3,3,4,5,6,7]
4088 ; AVX-NEXT: vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,4,4,4]
4089 ; AVX-NEXT: vpshufhw {{.*#+}} xmm9 = xmm4[0,1,2,3,5,6,7,7]
4090 ; AVX-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[2,2,2,3]
4091 ; AVX-NEXT: vinsertf128 $1, %xmm9, %ymm8, %ymm8
4092 ; AVX-NEXT: vandps %ymm1, %ymm10, %ymm1
4093 ; AVX-NEXT: vandnps %ymm8, %ymm10, %ymm8
4094 ; AVX-NEXT: vorps %ymm1, %ymm8, %ymm9
4095 ; AVX-NEXT: vmovdqa 32(%r8), %xmm1
4096 ; AVX-NEXT: vextractf128 $1, %ymm9, %xmm8
4097 ; AVX-NEXT: vpshufb %xmm15, %xmm1, %xmm10
4098 ; AVX-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0],xmm10[1],xmm8[2,3],xmm10[4],xmm8[5,6],xmm10[7]
4099 ; AVX-NEXT: vpshufb {{.*#+}} xmm10 = xmm8[0,1,2],zero,xmm8[4,5,6,7,8],zero,xmm8[10,11,12,13,14],zero
4100 ; AVX-NEXT: vmovdqa 32(%r9), %xmm8
4101 ; AVX-NEXT: vpshufb {{.*#+}} xmm15 = zero,zero,zero,xmm8[13],zero,zero,zero,zero,zero,xmm8[14],zero,zero,zero,zero,zero,xmm8[15]
4102 ; AVX-NEXT: vpor %xmm15, %xmm10, %xmm10
4103 ; AVX-NEXT: vmovdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4104 ; AVX-NEXT: vpshufb {{.*#+}} xmm10 = xmm1[10,u],zero,zero,zero,zero,xmm1[11,u],zero,zero,zero,zero,xmm1[12,u],zero,zero
4105 ; AVX-NEXT: vpblendw {{.*#+}} xmm9 = xmm10[0],xmm9[1,2],xmm10[3],xmm9[4,5],xmm10[6],xmm9[7]
4106 ; AVX-NEXT: vmovdqa {{.*#+}} xmm15 = [0,128,2,3,4,5,6,128,8,9,10,11,12,128,14,15]
4107 ; AVX-NEXT: vpshufb %xmm15, %xmm9, %xmm9
4108 ; AVX-NEXT: vpshufb %xmm13, %xmm8, %xmm10
4109 ; AVX-NEXT: vpor %xmm10, %xmm9, %xmm9
4110 ; AVX-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4111 ; AVX-NEXT: vpshufd {{.*#+}} xmm9 = xmm0[0,0,1,1]
4112 ; AVX-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,2,2]
4113 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm9, %ymm0
4114 ; AVX-NEXT: vpshuflw {{.*#+}} xmm9 = xmm3[1,0,2,2,4,5,6,7]
4115 ; AVX-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[0,1,0,1]
4116 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[3,3,3,3,4,5,6,7]
4117 ; AVX-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,4,4]
4118 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm9, %ymm3
4119 ; AVX-NEXT: vmovaps {{.*#+}} ymm9 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
4120 ; AVX-NEXT: vandps %ymm0, %ymm9, %ymm0
4121 ; AVX-NEXT: vandnps %ymm3, %ymm9, %ymm3
4122 ; AVX-NEXT: vorps %ymm3, %ymm0, %ymm0
4123 ; AVX-NEXT: vextractf128 $1, %ymm0, %xmm3
4124 ; AVX-NEXT: vpshufb {{.*#+}} xmm9 = xmm2[2,u],zero,zero,zero,zero,xmm2[3,u],zero,zero,zero,zero,xmm2[4,u],zero,zero
4125 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm9[0],xmm3[1,2],xmm9[3],xmm3[4,5],xmm9[6],xmm3[7]
4126 ; AVX-NEXT: vpshufb %xmm15, %xmm3, %xmm3
4127 ; AVX-NEXT: vmovdqa {{.*#+}} xmm13 = [128,2,128,128,128,128,128,3,128,128,128,128,128,4,128,128]
4128 ; AVX-NEXT: vpshufb %xmm13, %xmm5, %xmm9
4129 ; AVX-NEXT: vpor %xmm3, %xmm9, %xmm3
4130 ; AVX-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4131 ; AVX-NEXT: vmovdqa {{.*#+}} xmm10 = [128,128,128,128,0,u,128,128,128,128,1,u,128,128,128,128]
4132 ; AVX-NEXT: vpshufb %xmm10, %xmm2, %xmm2
4133 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm2[2],xmm0[3,4],xmm2[5],xmm0[6,7]
4134 ; AVX-NEXT: vmovdqa {{.*#+}} xmm9 = [0,1,2,3,4,128,6,7,8,9,10,128,12,13,14,15]
4135 ; AVX-NEXT: vpshufb %xmm9, %xmm0, %xmm0
4136 ; AVX-NEXT: vmovdqa {{.*#+}} xmm15 = [128,128,128,128,128,0,128,128,128,128,128,1,128,128,128,128]
4137 ; AVX-NEXT: vpshufb %xmm15, %xmm5, %xmm2
4138 ; AVX-NEXT: vpor %xmm2, %xmm0, %xmm0
4139 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4140 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm11[0],xmm6[0],xmm11[1],xmm6[1],xmm11[2],xmm6[2],xmm11[3],xmm6[3],xmm11[4],xmm6[4],xmm11[5],xmm6[5],xmm11[6],xmm6[6],xmm11[7],xmm6[7]
4141 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,0,1,1]
4142 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[1,1,2,2]
4143 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
4144 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm14[0],xmm12[0],xmm14[1],xmm12[1],xmm14[2],xmm12[2],xmm14[3],xmm12[3],xmm14[4],xmm12[4],xmm14[5],xmm12[5],xmm14[6],xmm12[6],xmm14[7],xmm12[7]
4145 ; AVX-NEXT: vpshuflw {{.*#+}} xmm5 = xmm3[1,0,2,2,4,5,6,7]
4146 ; AVX-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,1,0,1]
4147 ; AVX-NEXT: vpshuflw {{.*#+}} xmm6 = xmm3[3,3,3,3,4,5,6,7]
4148 ; AVX-NEXT: vpshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,4,4,4]
4149 ; AVX-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
4150 ; AVX-NEXT: vmovaps {{.*#+}} ymm6 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
4151 ; AVX-NEXT: vandps %ymm6, %ymm2, %ymm2
4152 ; AVX-NEXT: vandnps %ymm5, %ymm6, %ymm5
4153 ; AVX-NEXT: vorps %ymm5, %ymm2, %ymm2
4154 ; AVX-NEXT: vextractf128 $1, %ymm2, %xmm5
4155 ; AVX-NEXT: vpshufb {{.*#+}} xmm6 = xmm1[2,u],zero,zero,zero,zero,xmm1[3,u],zero,zero,zero,zero,xmm1[4,u],zero,zero
4156 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0],xmm5[1,2],xmm6[3],xmm5[4,5],xmm6[6],xmm5[7]
4157 ; AVX-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[0],zero,xmm5[2,3,4,5,6],zero,xmm5[8,9,10,11,12],zero,xmm5[14,15]
4158 ; AVX-NEXT: vpshufb %xmm13, %xmm8, %xmm6
4159 ; AVX-NEXT: vpor %xmm6, %xmm5, %xmm5
4160 ; AVX-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4161 ; AVX-NEXT: vpshufb %xmm10, %xmm1, %xmm5
4162 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm5[2],xmm2[3,4],xmm5[5],xmm2[6,7]
4163 ; AVX-NEXT: vpshufb %xmm9, %xmm2, %xmm2
4164 ; AVX-NEXT: vpshufb %xmm15, %xmm8, %xmm5
4165 ; AVX-NEXT: vpor %xmm5, %xmm2, %xmm2
4166 ; AVX-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4167 ; AVX-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[3,3,3,3]
4168 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm7[0,0,1,1]
4169 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
4170 ; AVX-NEXT: vpshufhw {{.*#+}} xmm2 = xmm3[0,1,2,3,5,6,7,7]
4171 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,2,3]
4172 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm4[1,0,2,2,4,5,6,7]
4173 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,0,1]
4174 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
4175 ; AVX-NEXT: vmovaps {{.*#+}} ymm15 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
4176 ; AVX-NEXT: vandnps %ymm0, %ymm15, %ymm0
4177 ; AVX-NEXT: vandps %ymm2, %ymm15, %ymm2
4178 ; AVX-NEXT: vorps %ymm0, %ymm2, %ymm0
4179 ; AVX-NEXT: vextractf128 $1, %ymm0, %xmm2
4180 ; AVX-NEXT: vmovdqa {{.*#+}} xmm5 = [128,128,128,128,8,u,128,128,128,128,9,u,128,128,128,128]
4181 ; AVX-NEXT: vpshufb %xmm5, %xmm1, %xmm3
4182 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm3[2],xmm2[3,4],xmm3[5],xmm2[6,7]
4183 ; AVX-NEXT: vpshufb %xmm9, %xmm2, %xmm2
4184 ; AVX-NEXT: vmovdqa %xmm9, %xmm14
4185 ; AVX-NEXT: vmovdqa {{.*#+}} xmm6 = [128,128,128,128,128,8,128,128,128,128,128,9,128,128,128,128]
4186 ; AVX-NEXT: vpshufb %xmm6, %xmm8, %xmm3
4187 ; AVX-NEXT: vpor %xmm3, %xmm2, %xmm2
4188 ; AVX-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4189 ; AVX-NEXT: vmovdqa {{.*#+}} xmm7 = [128,128,5,u,128,128,128,128,6,u,128,128,128,128,7,u]
4190 ; AVX-NEXT: vpshufb %xmm7, %xmm1, %xmm1
4191 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3],xmm1[4],xmm0[5,6],xmm1[7]
4192 ; AVX-NEXT: vmovdqa {{.*#+}} xmm13 = [0,1,2,128,4,5,6,7,8,128,10,11,12,13,14,128]
4193 ; AVX-NEXT: vpshufb %xmm13, %xmm0, %xmm0
4194 ; AVX-NEXT: vmovdqa {{.*#+}} xmm9 = [128,128,128,5,128,128,128,128,128,6,128,128,128,128,128,7]
4195 ; AVX-NEXT: vpshufb %xmm9, %xmm8, %xmm1
4196 ; AVX-NEXT: vpor %xmm1, %xmm0, %xmm0
4197 ; AVX-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
4198 ; AVX-NEXT: vmovdqa 16(%rsi), %xmm0
4199 ; AVX-NEXT: vmovdqa 16(%rdi), %xmm1
4200 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
4201 ; AVX-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
4202 ; AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm11[3,3,3,3]
4203 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,0,1,1]
4204 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
4205 ; AVX-NEXT: vmovdqa 16(%rcx), %xmm2
4206 ; AVX-NEXT: vmovdqa 16(%rdx), %xmm3
4207 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
4208 ; AVX-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm3[8],xmm2[8],xmm3[9],xmm2[9],xmm3[10],xmm2[10],xmm3[11],xmm2[11],xmm3[12],xmm2[12],xmm3[13],xmm2[13],xmm3[14],xmm2[14],xmm3[15],xmm2[15]
4209 ; AVX-NEXT: vpshufhw {{.*#+}} xmm2 = xmm10[0,1,2,3,5,6,7,7]
4210 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,2,2,3]
4211 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm4[1,0,2,2,4,5,6,7]
4212 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,1,0,1]
4213 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
4214 ; AVX-NEXT: vandnps %ymm1, %ymm15, %ymm1
4215 ; AVX-NEXT: vandps %ymm2, %ymm15, %ymm2
4216 ; AVX-NEXT: vorps %ymm1, %ymm2, %ymm1
4217 ; AVX-NEXT: vmovdqa 16(%r8), %xmm3
4218 ; AVX-NEXT: vextractf128 $1, %ymm1, %xmm2
4219 ; AVX-NEXT: vpshufb %xmm5, %xmm3, %xmm5
4220 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm5[2],xmm2[3,4],xmm5[5],xmm2[6,7]
4221 ; AVX-NEXT: vpshufb %xmm14, %xmm2, %xmm5
4222 ; AVX-NEXT: vmovdqa 16(%r9), %xmm2
4223 ; AVX-NEXT: vpshufb %xmm6, %xmm2, %xmm6
4224 ; AVX-NEXT: vpor %xmm6, %xmm5, %xmm5
4225 ; AVX-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4226 ; AVX-NEXT: vpshufb %xmm7, %xmm3, %xmm5
4227 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0],xmm5[1],xmm1[2,3],xmm5[4],xmm1[5,6],xmm5[7]
4228 ; AVX-NEXT: vpshufb %xmm13, %xmm1, %xmm1
4229 ; AVX-NEXT: vpshufb %xmm9, %xmm2, %xmm5
4230 ; AVX-NEXT: vpor %xmm5, %xmm1, %xmm1
4231 ; AVX-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4232 ; AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[1,1,2,2]
4233 ; AVX-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[3,3,3,3]
4234 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
4235 ; AVX-NEXT: vpshuflw {{.*#+}} xmm1 = xmm4[3,3,3,3,4,5,6,7]
4236 ; AVX-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
4237 ; AVX-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,5,6,7,7]
4238 ; AVX-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,2,3]
4239 ; AVX-NEXT: vinsertf128 $1, %xmm4, %ymm1, %ymm1
4240 ; AVX-NEXT: vandps %ymm0, %ymm15, %ymm0
4241 ; AVX-NEXT: vandnps %ymm1, %ymm15, %ymm1
4242 ; AVX-NEXT: vorps %ymm1, %ymm0, %ymm0
4243 ; AVX-NEXT: vextractf128 $1, %ymm0, %xmm1
4244 ; AVX-NEXT: vmovdqa {{.*#+}} xmm14 = [128,128,13,u,128,128,128,128,14,u,128,128,128,128,15,u]
4245 ; AVX-NEXT: vpshufb %xmm14, %xmm3, %xmm4
4246 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0],xmm4[1],xmm1[2,3],xmm4[4],xmm1[5,6],xmm4[7]
4247 ; AVX-NEXT: vpshufb %xmm13, %xmm1, %xmm1
4248 ; AVX-NEXT: vmovdqa {{.*#+}} xmm15 = [128,128,128,13,128,128,128,128,128,14,128,128,128,128,128,15]
4249 ; AVX-NEXT: vpshufb %xmm15, %xmm2, %xmm4
4250 ; AVX-NEXT: vpor %xmm4, %xmm1, %xmm1
4251 ; AVX-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4252 ; AVX-NEXT: vmovdqa {{.*#+}} xmm13 = [10,u,128,128,128,128,11,u,128,128,128,128,12,u,128,128]
4253 ; AVX-NEXT: vpshufb %xmm13, %xmm3, %xmm1
4254 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1,2],xmm1[3],xmm0[4,5],xmm1[6],xmm0[7]
4255 ; AVX-NEXT: vmovdqa {{.*#+}} xmm13 = [0,128,2,3,4,5,6,128,8,9,10,11,12,128,14,15]
4256 ; AVX-NEXT: vpshufb %xmm13, %xmm0, %xmm0
4257 ; AVX-NEXT: vpshufb {{.*#+}} xmm1 = zero,xmm2[10],zero,zero,zero,zero,zero,xmm2[11],zero,zero,zero,zero,zero,xmm2[12],zero,zero
4258 ; AVX-NEXT: vpor %xmm1, %xmm0, %xmm0
4259 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4260 ; AVX-NEXT: vmovdqa (%rsi), %xmm8
4261 ; AVX-NEXT: vmovdqa (%rdi), %xmm7
4262 ; AVX-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm7[8],xmm8[8],xmm7[9],xmm8[9],xmm7[10],xmm8[10],xmm7[11],xmm8[11],xmm7[12],xmm8[12],xmm7[13],xmm8[13],xmm7[14],xmm8[14],xmm7[15],xmm8[15]
4263 ; AVX-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4264 ; AVX-NEXT: vpshufd {{.*#+}} xmm0 = xmm1[1,1,2,2]
4265 ; AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[3,3,3,3]
4266 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
4267 ; AVX-NEXT: vmovdqa (%rcx), %xmm6
4268 ; AVX-NEXT: vmovdqa (%rdx), %xmm5
4269 ; AVX-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm5[8],xmm6[8],xmm5[9],xmm6[9],xmm5[10],xmm6[10],xmm5[11],xmm6[11],xmm5[12],xmm6[12],xmm5[13],xmm6[13],xmm5[14],xmm6[14],xmm5[15],xmm6[15]
4270 ; AVX-NEXT: vpshuflw {{.*#+}} xmm1 = xmm9[3,3,3,3,4,5,6,7]
4271 ; AVX-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,4,4]
4272 ; AVX-NEXT: vpshufhw {{.*#+}} xmm12 = xmm9[0,1,2,3,5,6,7,7]
4273 ; AVX-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[2,2,2,3]
4274 ; AVX-NEXT: vinsertf128 $1, %xmm12, %ymm1, %ymm1
4275 ; AVX-NEXT: vmovaps {{.*#+}} ymm4 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
4276 ; AVX-NEXT: vandps %ymm4, %ymm0, %ymm0
4277 ; AVX-NEXT: vandnps %ymm1, %ymm4, %ymm1
4278 ; AVX-NEXT: vorps %ymm1, %ymm0, %ymm12
4279 ; AVX-NEXT: vmovdqa (%r8), %xmm1
4280 ; AVX-NEXT: vpshufb %xmm14, %xmm1, %xmm0
4281 ; AVX-NEXT: vextractf128 $1, %ymm12, %xmm14
4282 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm14[0],xmm0[1],xmm14[2,3],xmm0[4],xmm14[5,6],xmm0[7]
4283 ; AVX-NEXT: vpshufb {{.*#+}} xmm14 = xmm0[0,1,2],zero,xmm0[4,5,6,7,8],zero,xmm0[10,11,12,13,14],zero
4284 ; AVX-NEXT: vmovdqa (%r9), %xmm0
4285 ; AVX-NEXT: vpshufb %xmm15, %xmm0, %xmm15
4286 ; AVX-NEXT: vpor %xmm15, %xmm14, %xmm4
4287 ; AVX-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4288 ; AVX-NEXT: vpshufb {{.*#+}} xmm15 = xmm1[10,u],zero,zero,zero,zero,xmm1[11,u],zero,zero,zero,zero,xmm1[12,u],zero,zero
4289 ; AVX-NEXT: vpblendw {{.*#+}} xmm12 = xmm15[0],xmm12[1,2],xmm15[3],xmm12[4,5],xmm15[6],xmm12[7]
4290 ; AVX-NEXT: vmovdqa %xmm13, %xmm4
4291 ; AVX-NEXT: vpshufb %xmm13, %xmm12, %xmm12
4292 ; AVX-NEXT: vpshufb {{.*#+}} xmm15 = zero,xmm0[10],zero,zero,zero,zero,zero,xmm0[11],zero,zero,zero,zero,zero,xmm0[12],zero,zero
4293 ; AVX-NEXT: vpor %xmm15, %xmm12, %xmm12
4294 ; AVX-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4295 ; AVX-NEXT: vpshufd {{.*#+}} xmm15 = xmm11[0,0,1,1]
4296 ; AVX-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[1,1,2,2]
4297 ; AVX-NEXT: vinsertf128 $1, %xmm11, %ymm15, %ymm11
4298 ; AVX-NEXT: vpshuflw {{.*#+}} xmm15 = xmm10[1,0,2,2,4,5,6,7]
4299 ; AVX-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[0,1,0,1]
4300 ; AVX-NEXT: vpshuflw {{.*#+}} xmm10 = xmm10[3,3,3,3,4,5,6,7]
4301 ; AVX-NEXT: vpshufhw {{.*#+}} xmm10 = xmm10[0,1,2,3,4,4,4,4]
4302 ; AVX-NEXT: vinsertf128 $1, %xmm10, %ymm15, %ymm10
4303 ; AVX-NEXT: vmovaps {{.*#+}} ymm13 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
4304 ; AVX-NEXT: vandps %ymm13, %ymm11, %ymm11
4305 ; AVX-NEXT: vandnps %ymm10, %ymm13, %ymm10
4306 ; AVX-NEXT: vorps %ymm10, %ymm11, %ymm11
4307 ; AVX-NEXT: vextractf128 $1, %ymm11, %xmm10
4308 ; AVX-NEXT: vmovdqa {{.*#+}} xmm12 = [2,u,128,128,128,128,3,u,128,128,128,128,4,u,128,128]
4309 ; AVX-NEXT: vpshufb %xmm12, %xmm3, %xmm15
4310 ; AVX-NEXT: vpblendw {{.*#+}} xmm10 = xmm15[0],xmm10[1,2],xmm15[3],xmm10[4,5],xmm15[6],xmm10[7]
4311 ; AVX-NEXT: vpshufb %xmm4, %xmm10, %xmm10
4312 ; AVX-NEXT: vmovdqa {{.*#+}} xmm14 = [128,2,128,128,128,128,128,3,128,128,128,128,128,4,128,128]
4313 ; AVX-NEXT: vpshufb %xmm14, %xmm2, %xmm15
4314 ; AVX-NEXT: vpor %xmm15, %xmm10, %xmm4
4315 ; AVX-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4316 ; AVX-NEXT: vmovdqa {{.*#+}} xmm15 = [128,128,128,128,0,u,128,128,128,128,1,u,128,128,128,128]
4317 ; AVX-NEXT: vpshufb %xmm15, %xmm3, %xmm3
4318 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm11[0,1],xmm3[2],xmm11[3,4],xmm3[5],xmm11[6,7]
4319 ; AVX-NEXT: vmovdqa {{.*#+}} xmm11 = [0,1,2,3,4,128,6,7,8,9,10,128,12,13,14,15]
4320 ; AVX-NEXT: vpshufb %xmm11, %xmm3, %xmm3
4321 ; AVX-NEXT: vmovdqa {{.*#+}} xmm10 = [128,128,128,128,128,0,128,128,128,128,128,1,128,128,128,128]
4322 ; AVX-NEXT: vpshufb %xmm10, %xmm2, %xmm2
4323 ; AVX-NEXT: vpor %xmm2, %xmm3, %xmm2
4324 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3],xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
4325 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm7[0,0,1,1]
4326 ; AVX-NEXT: vpshufd {{.*#+}} xmm8 = xmm7[1,1,2,2]
4327 ; AVX-NEXT: vinsertf128 $1, %xmm8, %ymm3, %ymm3
4328 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3],xmm5[4],xmm6[4],xmm5[5],xmm6[5],xmm5[6],xmm6[6],xmm5[7],xmm6[7]
4329 ; AVX-NEXT: vpshuflw {{.*#+}} xmm6 = xmm4[1,0,2,2,4,5,6,7]
4330 ; AVX-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[0,1,0,1]
4331 ; AVX-NEXT: vpshuflw {{.*#+}} xmm8 = xmm4[3,3,3,3,4,5,6,7]
4332 ; AVX-NEXT: vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,4,4,4]
4333 ; AVX-NEXT: vinsertf128 $1, %xmm8, %ymm6, %ymm6
4334 ; AVX-NEXT: vandps %ymm3, %ymm13, %ymm3
4335 ; AVX-NEXT: vandnps %ymm6, %ymm13, %ymm6
4336 ; AVX-NEXT: vorps %ymm6, %ymm3, %ymm6
4337 ; AVX-NEXT: vpshufb %xmm12, %xmm1, %xmm3
4338 ; AVX-NEXT: vextractf128 $1, %ymm6, %xmm8
4339 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0],xmm8[1,2],xmm3[3],xmm8[4,5],xmm3[6],xmm8[7]
4340 ; AVX-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[0],zero,xmm3[2,3,4,5,6],zero,xmm3[8,9,10,11,12],zero,xmm3[14,15]
4341 ; AVX-NEXT: vpshufb %xmm14, %xmm0, %xmm8
4342 ; AVX-NEXT: vpor %xmm3, %xmm8, %xmm3
4343 ; AVX-NEXT: vpshufb %xmm15, %xmm1, %xmm8
4344 ; AVX-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1],xmm8[2],xmm6[3,4],xmm8[5],xmm6[6,7]
4345 ; AVX-NEXT: vpshufb %xmm11, %xmm6, %xmm6
4346 ; AVX-NEXT: vpshufb %xmm10, %xmm0, %xmm8
4347 ; AVX-NEXT: vpor %xmm6, %xmm8, %xmm6
4348 ; AVX-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[3,3,3,3]
4349 ; AVX-NEXT: vpermilps $80, {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Folded Reload
4350 ; AVX-NEXT: # xmm8 = mem[0,0,1,1]
4351 ; AVX-NEXT: vinsertf128 $1, %xmm8, %ymm7, %ymm7
4352 ; AVX-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,5,6,7,7]
4353 ; AVX-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[2,2,2,3]
4354 ; AVX-NEXT: vpshuflw {{.*#+}} xmm5 = xmm9[1,0,2,2,4,5,6,7]
4355 ; AVX-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,1,0,1]
4356 ; AVX-NEXT: vinsertf128 $1, %xmm5, %ymm4, %ymm4
4357 ; AVX-NEXT: vmovaps {{.*#+}} ymm8 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
4358 ; AVX-NEXT: vandnps %ymm7, %ymm8, %ymm5
4359 ; AVX-NEXT: vandps %ymm4, %ymm8, %ymm4
4360 ; AVX-NEXT: vorps %ymm5, %ymm4, %ymm4
4361 ; AVX-NEXT: vpshufb {{.*#+}} xmm5 = zero,zero,zero,zero,xmm1[8,u],zero,zero,zero,zero,xmm1[9,u],zero,zero,zero,zero
4362 ; AVX-NEXT: vextractf128 $1, %ymm4, %xmm7
4363 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm7[0,1],xmm5[2],xmm7[3,4],xmm5[5],xmm7[6,7]
4364 ; AVX-NEXT: vpshufb %xmm11, %xmm5, %xmm5
4365 ; AVX-NEXT: vpshufb {{.*#+}} xmm7 = zero,zero,zero,zero,zero,xmm0[8],zero,zero,zero,zero,zero,xmm0[9],zero,zero,zero,zero
4366 ; AVX-NEXT: vpor %xmm7, %xmm5, %xmm5
4367 ; AVX-NEXT: vpshufb {{.*#+}} xmm1 = zero,zero,xmm1[5,u],zero,zero,zero,zero,xmm1[6,u],zero,zero,zero,zero,xmm1[7,u]
4368 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm4[0],xmm1[1],xmm4[2,3],xmm1[4],xmm4[5,6],xmm1[7]
4369 ; AVX-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[0,1,2],zero,xmm1[4,5,6,7,8],zero,xmm1[10,11,12,13,14],zero
4370 ; AVX-NEXT: vpshufb {{.*#+}} xmm0 = zero,zero,zero,xmm0[5],zero,zero,zero,zero,zero,xmm0[6],zero,zero,zero,zero,zero,xmm0[7]
4371 ; AVX-NEXT: vpor %xmm0, %xmm1, %xmm0
4372 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
4373 ; AVX-NEXT: vmovdqa %xmm0, 32(%rax)
4374 ; AVX-NEXT: vmovdqa %xmm5, 48(%rax)
4375 ; AVX-NEXT: vmovdqa %xmm6, (%rax)
4376 ; AVX-NEXT: vmovdqa %xmm3, 16(%rax)
4377 ; AVX-NEXT: vmovdqa %xmm2, 96(%rax)
4378 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4379 ; AVX-NEXT: vmovaps %xmm0, 112(%rax)
4380 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4381 ; AVX-NEXT: vmovaps %xmm0, 64(%rax)
4382 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4383 ; AVX-NEXT: vmovaps %xmm0, 80(%rax)
4384 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4385 ; AVX-NEXT: vmovaps %xmm0, 160(%rax)
4386 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4387 ; AVX-NEXT: vmovaps %xmm0, 176(%rax)
4388 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4389 ; AVX-NEXT: vmovaps %xmm0, 128(%rax)
4390 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4391 ; AVX-NEXT: vmovaps %xmm0, 144(%rax)
4392 ; AVX-NEXT: vmovaps (%rsp), %xmm0 # 16-byte Reload
4393 ; AVX-NEXT: vmovaps %xmm0, 224(%rax)
4394 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4395 ; AVX-NEXT: vmovaps %xmm0, 240(%rax)
4396 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4397 ; AVX-NEXT: vmovaps %xmm0, 192(%rax)
4398 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4399 ; AVX-NEXT: vmovaps %xmm0, 208(%rax)
4400 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4401 ; AVX-NEXT: vmovaps %xmm0, 288(%rax)
4402 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4403 ; AVX-NEXT: vmovaps %xmm0, 304(%rax)
4404 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4405 ; AVX-NEXT: vmovaps %xmm0, 256(%rax)
4406 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4407 ; AVX-NEXT: vmovaps %xmm0, 272(%rax)
4408 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4409 ; AVX-NEXT: vmovaps %xmm0, 352(%rax)
4410 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4411 ; AVX-NEXT: vmovaps %xmm0, 368(%rax)
4412 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4413 ; AVX-NEXT: vmovaps %xmm0, 320(%rax)
4414 ; AVX-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4415 ; AVX-NEXT: vmovaps %xmm0, 336(%rax)
4416 ; AVX-NEXT: addq $200, %rsp
4417 ; AVX-NEXT: vzeroupper
4420 ; AVX2-LABEL: store_i8_stride6_vf64:
4422 ; AVX2-NEXT: subq $664, %rsp # imm = 0x298
4423 ; AVX2-NEXT: vmovdqa 32(%rdx), %ymm6
4424 ; AVX2-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4425 ; AVX2-NEXT: vmovdqa 32(%rcx), %ymm7
4426 ; AVX2-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4427 ; AVX2-NEXT: vpbroadcastq {{.*#+}} ymm0 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
4428 ; AVX2-NEXT: vmovdqa (%rcx), %xmm8
4429 ; AVX2-NEXT: vmovdqa 32(%rcx), %xmm5
4430 ; AVX2-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4431 ; AVX2-NEXT: vpshufb %xmm0, %xmm8, %xmm1
4432 ; AVX2-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4433 ; AVX2-NEXT: vmovdqa (%rdx), %xmm11
4434 ; AVX2-NEXT: vmovdqa 32(%rdx), %xmm9
4435 ; AVX2-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4436 ; AVX2-NEXT: vpshufb %xmm0, %xmm11, %xmm2
4437 ; AVX2-NEXT: vmovdqa %xmm11, (%rsp) # 16-byte Spill
4438 ; AVX2-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
4439 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
4440 ; AVX2-NEXT: vmovdqa (%rsi), %xmm15
4441 ; AVX2-NEXT: vmovdqa 32(%rsi), %xmm14
4442 ; AVX2-NEXT: vpbroadcastq {{.*#+}} xmm3 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
4443 ; AVX2-NEXT: vpshufb %xmm3, %xmm15, %xmm2
4444 ; AVX2-NEXT: vmovdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4445 ; AVX2-NEXT: vmovdqa (%rdi), %xmm10
4446 ; AVX2-NEXT: vmovdqa 32(%rdi), %xmm13
4447 ; AVX2-NEXT: vpshufb %xmm3, %xmm10, %xmm4
4448 ; AVX2-NEXT: vmovdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4449 ; AVX2-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm4[8],xmm2[8],xmm4[9],xmm2[9],xmm4[10],xmm2[10],xmm4[11],xmm2[11],xmm4[12],xmm2[12],xmm4[13],xmm2[13],xmm4[14],xmm2[14],xmm4[15],xmm2[15]
4450 ; AVX2-NEXT: vpermq {{.*#+}} ymm4 = ymm2[0,0,0,1]
4451 ; AVX2-NEXT: vpmovsxbw {{.*#+}} ymm2 = [65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535]
4452 ; AVX2-NEXT: vpblendvb %ymm2, %ymm1, %ymm4, %ymm1
4453 ; AVX2-NEXT: vpshufb %xmm0, %xmm5, %xmm4
4454 ; AVX2-NEXT: vpshufb %xmm0, %xmm9, %xmm5
4455 ; AVX2-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3],xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
4456 ; AVX2-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
4457 ; AVX2-NEXT: vpshufb %xmm3, %xmm14, %xmm5
4458 ; AVX2-NEXT: vmovdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4459 ; AVX2-NEXT: vpshufb %xmm3, %xmm13, %xmm3
4460 ; AVX2-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4461 ; AVX2-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm3[8],xmm5[8],xmm3[9],xmm5[9],xmm3[10],xmm5[10],xmm3[11],xmm5[11],xmm3[12],xmm5[12],xmm3[13],xmm5[13],xmm3[14],xmm5[14],xmm3[15],xmm5[15]
4462 ; AVX2-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
4463 ; AVX2-NEXT: vpblendvb %ymm2, %ymm4, %ymm3, %ymm3
4464 ; AVX2-NEXT: vpshufb %ymm0, %ymm7, %ymm4
4465 ; AVX2-NEXT: vpshufb %ymm0, %ymm6, %ymm5
4466 ; AVX2-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm5[0],ymm4[0],ymm5[1],ymm4[1],ymm5[2],ymm4[2],ymm5[3],ymm4[3],ymm5[4],ymm4[4],ymm5[5],ymm4[5],ymm5[6],ymm4[6],ymm5[7],ymm4[7],ymm5[16],ymm4[16],ymm5[17],ymm4[17],ymm5[18],ymm4[18],ymm5[19],ymm4[19],ymm5[20],ymm4[20],ymm5[21],ymm4[21],ymm5[22],ymm4[22],ymm5[23],ymm4[23]
4467 ; AVX2-NEXT: vmovdqa 32(%rdi), %ymm7
4468 ; AVX2-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4469 ; AVX2-NEXT: vmovdqa 32(%rsi), %ymm6
4470 ; AVX2-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4471 ; AVX2-NEXT: vpbroadcastq {{.*#+}} ymm5 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
4472 ; AVX2-NEXT: vpshufb %ymm5, %ymm6, %ymm6
4473 ; AVX2-NEXT: vpshufb %ymm5, %ymm7, %ymm7
4474 ; AVX2-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm7[0],ymm6[0],ymm7[1],ymm6[1],ymm7[2],ymm6[2],ymm7[3],ymm6[3],ymm7[4],ymm6[4],ymm7[5],ymm6[5],ymm7[6],ymm6[6],ymm7[7],ymm6[7],ymm7[16],ymm6[16],ymm7[17],ymm6[17],ymm7[18],ymm6[18],ymm7[19],ymm6[19],ymm7[20],ymm6[20],ymm7[21],ymm6[21],ymm7[22],ymm6[22],ymm7[23],ymm6[23]
4475 ; AVX2-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
4476 ; AVX2-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
4477 ; AVX2-NEXT: vpblendvb %ymm2, %ymm4, %ymm6, %ymm4
4478 ; AVX2-NEXT: vmovdqa (%rdx), %ymm7
4479 ; AVX2-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4480 ; AVX2-NEXT: vmovdqa (%rcx), %ymm6
4481 ; AVX2-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4482 ; AVX2-NEXT: vpshufb %ymm0, %ymm6, %ymm6
4483 ; AVX2-NEXT: vpshufb %ymm0, %ymm7, %ymm0
4484 ; AVX2-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[1],ymm6[1],ymm0[2],ymm6[2],ymm0[3],ymm6[3],ymm0[4],ymm6[4],ymm0[5],ymm6[5],ymm0[6],ymm6[6],ymm0[7],ymm6[7],ymm0[16],ymm6[16],ymm0[17],ymm6[17],ymm0[18],ymm6[18],ymm0[19],ymm6[19],ymm0[20],ymm6[20],ymm0[21],ymm6[21],ymm0[22],ymm6[22],ymm0[23],ymm6[23]
4485 ; AVX2-NEXT: vmovdqa (%rdi), %ymm7
4486 ; AVX2-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4487 ; AVX2-NEXT: vmovdqa (%rsi), %ymm12
4488 ; AVX2-NEXT: vpshufb %ymm5, %ymm12, %ymm6
4489 ; AVX2-NEXT: vpshufb %ymm5, %ymm7, %ymm5
4490 ; AVX2-NEXT: vpunpcklbw {{.*#+}} ymm5 = ymm5[0],ymm6[0],ymm5[1],ymm6[1],ymm5[2],ymm6[2],ymm5[3],ymm6[3],ymm5[4],ymm6[4],ymm5[5],ymm6[5],ymm5[6],ymm6[6],ymm5[7],ymm6[7],ymm5[16],ymm6[16],ymm5[17],ymm6[17],ymm5[18],ymm6[18],ymm5[19],ymm6[19],ymm5[20],ymm6[20],ymm5[21],ymm6[21],ymm5[22],ymm6[22],ymm5[23],ymm6[23]
4491 ; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
4492 ; AVX2-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
4493 ; AVX2-NEXT: vpblendvb %ymm2, %ymm0, %ymm5, %ymm0
4494 ; AVX2-NEXT: vmovdqa (%r8), %xmm7
4495 ; AVX2-NEXT: vpmovsxbw {{.*#+}} xmm2 = [6,5,8,7,9,9,9,9]
4496 ; AVX2-NEXT: vpshufb %xmm2, %xmm7, %xmm5
4497 ; AVX2-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
4498 ; AVX2-NEXT: vpmovsxbw {{.*#+}} ymm6 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
4499 ; AVX2-NEXT: vpblendvb %ymm6, %ymm1, %ymm5, %ymm1
4500 ; AVX2-NEXT: vmovdqa 32(%r8), %xmm5
4501 ; AVX2-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4502 ; AVX2-NEXT: vpshufb %xmm2, %xmm5, %xmm2
4503 ; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
4504 ; AVX2-NEXT: vpblendvb %ymm6, %ymm3, %ymm2, %ymm2
4505 ; AVX2-NEXT: vmovdqa 32(%r8), %ymm5
4506 ; AVX2-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4507 ; AVX2-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0]
4508 ; AVX2-NEXT: # ymm3 = mem[0,1,0,1]
4509 ; AVX2-NEXT: vpshufb %ymm3, %ymm5, %ymm5
4510 ; AVX2-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
4511 ; AVX2-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
4512 ; AVX2-NEXT: vmovdqa (%r8), %ymm5
4513 ; AVX2-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4514 ; AVX2-NEXT: vpshufb %ymm3, %ymm5, %ymm3
4515 ; AVX2-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
4516 ; AVX2-NEXT: vpblendvb %ymm6, %ymm0, %ymm3, %ymm0
4517 ; AVX2-NEXT: vmovdqa (%r9), %xmm5
4518 ; AVX2-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4519 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [u,6,u,5,u,8,u,7,u,9,u,9,u,9,u,9]
4520 ; AVX2-NEXT: vpshufb %xmm3, %xmm5, %xmm5
4521 ; AVX2-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
4522 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
4523 ; AVX2-NEXT: vpblendvb %ymm6, %ymm1, %ymm5, %ymm1
4524 ; AVX2-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4525 ; AVX2-NEXT: vmovdqa 32(%r9), %xmm9
4526 ; AVX2-NEXT: vpshufb %xmm3, %xmm9, %xmm1
4527 ; AVX2-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4528 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
4529 ; AVX2-NEXT: vpblendvb %ymm6, %ymm2, %ymm1, %ymm1
4530 ; AVX2-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4531 ; AVX2-NEXT: vmovdqa 32(%r9), %ymm2
4532 ; AVX2-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4533 ; AVX2-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0]
4534 ; AVX2-NEXT: # ymm1 = mem[0,1,0,1]
4535 ; AVX2-NEXT: vpshufb %ymm1, %ymm2, %ymm2
4536 ; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
4537 ; AVX2-NEXT: vpblendvb %ymm6, %ymm4, %ymm2, %ymm2
4538 ; AVX2-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4539 ; AVX2-NEXT: vmovdqa (%r9), %ymm2
4540 ; AVX2-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4541 ; AVX2-NEXT: vpshufb %ymm1, %ymm2, %ymm1
4542 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
4543 ; AVX2-NEXT: vpblendvb %ymm6, %ymm0, %ymm1, %ymm0
4544 ; AVX2-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4545 ; AVX2-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm10[0],xmm15[0],xmm10[1],xmm15[1],xmm10[2],xmm15[2],xmm10[3],xmm15[3],xmm10[4],xmm15[4],xmm10[5],xmm15[5],xmm10[6],xmm15[6],xmm10[7],xmm15[7]
4546 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,3,2,1,4,5,6,7]
4547 ; AVX2-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5,6,5]
4548 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm0[0,0,0,1]
4549 ; AVX2-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm11[0],xmm8[0],xmm11[1],xmm8[1],xmm11[2],xmm8[2],xmm11[3],xmm8[3],xmm11[4],xmm8[4],xmm11[5],xmm8[5],xmm11[6],xmm8[6],xmm11[7],xmm8[7]
4550 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[1,0,3,2,4,5,6,7]
4551 ; AVX2-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,4,4,4]
4552 ; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm0[0,0,0,1]
4553 ; AVX2-NEXT: vpmovsxbw {{.*#+}} ymm0 = [65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535]
4554 ; AVX2-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm11
4555 ; AVX2-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3],xmm13[4],xmm14[4],xmm13[5],xmm14[5],xmm13[6],xmm14[6],xmm13[7],xmm14[7]
4556 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,3,2,1,4,5,6,7]
4557 ; AVX2-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,6,5]
4558 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
4559 ; AVX2-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
4560 ; AVX2-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
4561 ; AVX2-NEXT: # xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3],xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
4562 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[1,0,3,2,4,5,6,7]
4563 ; AVX2-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,4,4,4]
4564 ; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
4565 ; AVX2-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm6
4566 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4567 ; AVX2-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
4568 ; AVX2-NEXT: # ymm1 = ymm1[0],mem[0],ymm1[1],mem[1],ymm1[2],mem[2],ymm1[3],mem[3],ymm1[4],mem[4],ymm1[5],mem[5],ymm1[6],mem[6],ymm1[7],mem[7],ymm1[16],mem[16],ymm1[17],mem[17],ymm1[18],mem[18],ymm1[19],mem[19],ymm1[20],mem[20],ymm1[21],mem[21],ymm1[22],mem[22],ymm1[23],mem[23]
4569 ; AVX2-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
4570 ; AVX2-NEXT: vpshufhw {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5,6,5,8,9,10,11,12,13,14,13]
4571 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
4572 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
4573 ; AVX2-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm2 # 32-byte Folded Reload
4574 ; AVX2-NEXT: # ymm2 = ymm15[0],mem[0],ymm15[1],mem[1],ymm15[2],mem[2],ymm15[3],mem[3],ymm15[4],mem[4],ymm15[5],mem[5],ymm15[6],mem[6],ymm15[7],mem[7],ymm15[16],mem[16],ymm15[17],mem[17],ymm15[18],mem[18],ymm15[19],mem[19],ymm15[20],mem[20],ymm15[21],mem[21],ymm15[22],mem[22],ymm15[23],mem[23]
4575 ; AVX2-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[1,0,3,2,4,5,6,7,9,8,11,10,12,13,14,15]
4576 ; AVX2-NEXT: vpshufhw {{.*#+}} ymm2 = ymm2[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
4577 ; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
4578 ; AVX2-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm1
4579 ; AVX2-NEXT: vmovdqa %ymm12, %ymm13
4580 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
4581 ; AVX2-NEXT: vpunpcklbw {{.*#+}} ymm2 = ymm12[0],ymm13[0],ymm12[1],ymm13[1],ymm12[2],ymm13[2],ymm12[3],ymm13[3],ymm12[4],ymm13[4],ymm12[5],ymm13[5],ymm12[6],ymm13[6],ymm12[7],ymm13[7],ymm12[16],ymm13[16],ymm12[17],ymm13[17],ymm12[18],ymm13[18],ymm12[19],ymm13[19],ymm12[20],ymm13[20],ymm12[21],ymm13[21],ymm12[22],ymm13[22],ymm12[23],ymm13[23]
4582 ; AVX2-NEXT: vpshuflw {{.*#+}} ymm2 = ymm2[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
4583 ; AVX2-NEXT: vpshufhw {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5,6,5,8,9,10,11,12,13,14,13]
4584 ; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
4585 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
4586 ; AVX2-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
4587 ; AVX2-NEXT: # ymm3 = ymm3[0],mem[0],ymm3[1],mem[1],ymm3[2],mem[2],ymm3[3],mem[3],ymm3[4],mem[4],ymm3[5],mem[5],ymm3[6],mem[6],ymm3[7],mem[7],ymm3[16],mem[16],ymm3[17],mem[17],ymm3[18],mem[18],ymm3[19],mem[19],ymm3[20],mem[20],ymm3[21],mem[21],ymm3[22],mem[22],ymm3[23],mem[23]
4588 ; AVX2-NEXT: vpshuflw {{.*#+}} ymm3 = ymm3[1,0,3,2,4,5,6,7,9,8,11,10,12,13,14,15]
4589 ; AVX2-NEXT: vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,4,4,4,4,8,9,10,11,12,12,12,12]
4590 ; AVX2-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
4591 ; AVX2-NEXT: vpblendvb %ymm0, %ymm2, %ymm3, %ymm0
4592 ; AVX2-NEXT: vpmovsxbw {{.*#+}} xmm2 = [2,1,0,3,4,4,4,4]
4593 ; AVX2-NEXT: vmovdqa %xmm7, %xmm14
4594 ; AVX2-NEXT: vpshufb %xmm2, %xmm7, %xmm3
4595 ; AVX2-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
4596 ; AVX2-NEXT: vpmovsxbw {{.*#+}} ymm4 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
4597 ; AVX2-NEXT: vpblendvb %ymm4, %ymm11, %ymm3, %ymm3
4598 ; AVX2-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
4599 ; AVX2-NEXT: vpshufb %xmm2, %xmm7, %xmm2
4600 ; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
4601 ; AVX2-NEXT: vpblendvb %ymm4, %ymm6, %ymm2, %ymm2
4602 ; AVX2-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
4603 ; AVX2-NEXT: # ymm6 = mem[0,1,0,1]
4604 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
4605 ; AVX2-NEXT: vpshufb %ymm6, %ymm5, %ymm11
4606 ; AVX2-NEXT: vpermq {{.*#+}} ymm11 = ymm11[2,2,2,3]
4607 ; AVX2-NEXT: vpblendvb %ymm4, %ymm1, %ymm11, %ymm1
4608 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
4609 ; AVX2-NEXT: vpshufb %ymm6, %ymm8, %ymm6
4610 ; AVX2-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
4611 ; AVX2-NEXT: vpblendvb %ymm4, %ymm0, %ymm6, %ymm5
4612 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm4 = [u,2,u,1,u,0,u,3,u,4,u,4,u,4,u,4]
4613 ; AVX2-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
4614 ; AVX2-NEXT: vpshufb %xmm4, %xmm11, %xmm6
4615 ; AVX2-NEXT: vpermq {{.*#+}} ymm6 = ymm6[0,0,0,1]
4616 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm0 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
4617 ; AVX2-NEXT: vpblendvb %ymm0, %ymm3, %ymm6, %ymm3
4618 ; AVX2-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4619 ; AVX2-NEXT: vpshufb %xmm4, %xmm9, %xmm3
4620 ; AVX2-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
4621 ; AVX2-NEXT: vpblendvb %ymm0, %ymm2, %ymm3, %ymm2
4622 ; AVX2-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4623 ; AVX2-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
4624 ; AVX2-NEXT: # ymm2 = mem[0,1,0,1]
4625 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
4626 ; AVX2-NEXT: vpshufb %ymm2, %ymm9, %ymm3
4627 ; AVX2-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
4628 ; AVX2-NEXT: vpblendvb %ymm0, %ymm1, %ymm3, %ymm1
4629 ; AVX2-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4630 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
4631 ; AVX2-NEXT: vpshufb %ymm2, %ymm10, %ymm1
4632 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
4633 ; AVX2-NEXT: vpblendvb %ymm0, %ymm5, %ymm1, %ymm0
4634 ; AVX2-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4635 ; AVX2-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4636 ; AVX2-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
4637 ; AVX2-NEXT: # xmm0 = xmm0[8],mem[8],xmm0[9],mem[9],xmm0[10],mem[10],xmm0[11],mem[11],xmm0[12],mem[12],xmm0[13],mem[13],xmm0[14],mem[14],xmm0[15],mem[15]
4638 ; AVX2-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4639 ; AVX2-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
4640 ; AVX2-NEXT: # xmm1 = xmm1[8],mem[8],xmm1[9],mem[9],xmm1[10],mem[10],xmm1[11],mem[11],xmm1[12],mem[12],xmm1[13],mem[13],xmm1[14],mem[14],xmm1[15],mem[15]
4641 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm5 = [8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
4642 ; AVX2-NEXT: vpshufb %xmm5, %xmm0, %xmm0
4643 ; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm0[0,0,0,1]
4644 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
4645 ; AVX2-NEXT: vpshufb %xmm3, %xmm1, %xmm1
4646 ; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm1[0,0,0,1]
4647 ; AVX2-NEXT: vpmovsxbw {{.*#+}} ymm1 = [0,65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535,0,0]
4648 ; AVX2-NEXT: vpblendvb %ymm1, %ymm2, %ymm0, %ymm6
4649 ; AVX2-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4650 ; AVX2-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
4651 ; AVX2-NEXT: # xmm0 = xmm0[8],mem[8],xmm0[9],mem[9],xmm0[10],mem[10],xmm0[11],mem[11],xmm0[12],mem[12],xmm0[13],mem[13],xmm0[14],mem[14],xmm0[15],mem[15]
4652 ; AVX2-NEXT: vpshufb %xmm5, %xmm0, %xmm0
4653 ; AVX2-NEXT: vmovdqa (%rsp), %xmm2 # 16-byte Reload
4654 ; AVX2-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm5 # 16-byte Folded Reload
4655 ; AVX2-NEXT: # xmm5 = xmm2[8],mem[8],xmm2[9],mem[9],xmm2[10],mem[10],xmm2[11],mem[11],xmm2[12],mem[12],xmm2[13],mem[13],xmm2[14],mem[14],xmm2[15],mem[15]
4656 ; AVX2-NEXT: vpshufb %xmm3, %xmm5, %xmm3
4657 ; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
4658 ; AVX2-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
4659 ; AVX2-NEXT: vpblendvb %ymm1, %ymm0, %ymm3, %ymm4
4660 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4661 ; AVX2-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm3 # 32-byte Folded Reload
4662 ; AVX2-NEXT: # ymm3 = ymm0[8],mem[8],ymm0[9],mem[9],ymm0[10],mem[10],ymm0[11],mem[11],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15],ymm0[24],mem[24],ymm0[25],mem[25],ymm0[26],mem[26],ymm0[27],mem[27],ymm0[28],mem[28],ymm0[29],mem[29],ymm0[30],mem[30],ymm0[31],mem[31]
4663 ; AVX2-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm5 # 32-byte Folded Reload
4664 ; AVX2-NEXT: # ymm5 = ymm15[8],mem[8],ymm15[9],mem[9],ymm15[10],mem[10],ymm15[11],mem[11],ymm15[12],mem[12],ymm15[13],mem[13],ymm15[14],mem[14],ymm15[15],mem[15],ymm15[24],mem[24],ymm15[25],mem[25],ymm15[26],mem[26],ymm15[27],mem[27],ymm15[28],mem[28],ymm15[29],mem[29],ymm15[30],mem[30],ymm15[31],mem[31]
4665 ; AVX2-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
4666 ; AVX2-NEXT: # ymm0 = mem[0,1,0,1]
4667 ; AVX2-NEXT: vpshufb %ymm0, %ymm3, %ymm3
4668 ; AVX2-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
4669 ; AVX2-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
4670 ; AVX2-NEXT: # ymm2 = mem[0,1,0,1]
4671 ; AVX2-NEXT: vpshufb %ymm2, %ymm5, %ymm5
4672 ; AVX2-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
4673 ; AVX2-NEXT: vpblendvb %ymm1, %ymm3, %ymm5, %ymm3
4674 ; AVX2-NEXT: vpunpckhbw {{.*#+}} ymm5 = ymm12[8],ymm13[8],ymm12[9],ymm13[9],ymm12[10],ymm13[10],ymm12[11],ymm13[11],ymm12[12],ymm13[12],ymm12[13],ymm13[13],ymm12[14],ymm13[14],ymm12[15],ymm13[15],ymm12[24],ymm13[24],ymm12[25],ymm13[25],ymm12[26],ymm13[26],ymm12[27],ymm13[27],ymm12[28],ymm13[28],ymm12[29],ymm13[29],ymm12[30],ymm13[30],ymm12[31],ymm13[31]
4675 ; AVX2-NEXT: vpshufb %ymm0, %ymm5, %ymm0
4676 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
4677 ; AVX2-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm5 # 32-byte Folded Reload
4678 ; AVX2-NEXT: # ymm5 = ymm5[8],mem[8],ymm5[9],mem[9],ymm5[10],mem[10],ymm5[11],mem[11],ymm5[12],mem[12],ymm5[13],mem[13],ymm5[14],mem[14],ymm5[15],mem[15],ymm5[24],mem[24],ymm5[25],mem[25],ymm5[26],mem[26],ymm5[27],mem[27],ymm5[28],mem[28],ymm5[29],mem[29],ymm5[30],mem[30],ymm5[31],mem[31]
4679 ; AVX2-NEXT: vpshufb %ymm2, %ymm5, %ymm2
4680 ; AVX2-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
4681 ; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
4682 ; AVX2-NEXT: vpblendvb %ymm1, %ymm0, %ymm2, %ymm0
4683 ; AVX2-NEXT: vpmovsxbw {{.*#+}} xmm1 = [10,13,12,11,14,13,14,15]
4684 ; AVX2-NEXT: vpshufb %xmm1, %xmm7, %xmm2
4685 ; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
4686 ; AVX2-NEXT: vpmovsxbw {{.*#+}} ymm5 = [0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0]
4687 ; AVX2-NEXT: vpblendvb %ymm5, %ymm6, %ymm2, %ymm2
4688 ; AVX2-NEXT: vpshufb %xmm1, %xmm14, %xmm1
4689 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
4690 ; AVX2-NEXT: vpblendvb %ymm5, %ymm4, %ymm1, %ymm1
4691 ; AVX2-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0]
4692 ; AVX2-NEXT: # ymm4 = mem[0,1,0,1]
4693 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
4694 ; AVX2-NEXT: vpshufb %ymm4, %ymm6, %ymm6
4695 ; AVX2-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
4696 ; AVX2-NEXT: vpblendvb %ymm5, %ymm3, %ymm6, %ymm3
4697 ; AVX2-NEXT: vpshufb %ymm4, %ymm8, %ymm4
4698 ; AVX2-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
4699 ; AVX2-NEXT: vpblendvb %ymm5, %ymm0, %ymm4, %ymm0
4700 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm4 = [u,10,u,13,u,12,u,11,u,14,u,13,u,14,u,15]
4701 ; AVX2-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
4702 ; AVX2-NEXT: vpshufb %xmm4, %xmm5, %xmm5
4703 ; AVX2-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
4704 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm6 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
4705 ; AVX2-NEXT: vpblendvb %ymm6, %ymm2, %ymm5, %ymm2
4706 ; AVX2-NEXT: vpshufb %xmm4, %xmm11, %xmm4
4707 ; AVX2-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
4708 ; AVX2-NEXT: vpblendvb %ymm6, %ymm1, %ymm4, %ymm1
4709 ; AVX2-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15]
4710 ; AVX2-NEXT: # ymm4 = mem[0,1,0,1]
4711 ; AVX2-NEXT: vpshufb %ymm4, %ymm9, %ymm5
4712 ; AVX2-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
4713 ; AVX2-NEXT: vpblendvb %ymm6, %ymm3, %ymm5, %ymm3
4714 ; AVX2-NEXT: vpshufb %ymm4, %ymm10, %ymm4
4715 ; AVX2-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
4716 ; AVX2-NEXT: vpblendvb %ymm6, %ymm0, %ymm4, %ymm0
4717 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
4718 ; AVX2-NEXT: vmovdqa %ymm0, 160(%rax)
4719 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4720 ; AVX2-NEXT: vmovaps %ymm0, 128(%rax)
4721 ; AVX2-NEXT: vmovdqa %ymm3, 352(%rax)
4722 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4723 ; AVX2-NEXT: vmovaps %ymm0, 320(%rax)
4724 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4725 ; AVX2-NEXT: vmovaps %ymm0, 96(%rax)
4726 ; AVX2-NEXT: vmovdqa %ymm1, 64(%rax)
4727 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4728 ; AVX2-NEXT: vmovaps %ymm0, 224(%rax)
4729 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4730 ; AVX2-NEXT: vmovaps %ymm0, 288(%rax)
4731 ; AVX2-NEXT: vmovdqa %ymm2, 256(%rax)
4732 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4733 ; AVX2-NEXT: vmovaps %ymm0, 32(%rax)
4734 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4735 ; AVX2-NEXT: vmovaps %ymm0, 192(%rax)
4736 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4737 ; AVX2-NEXT: vmovaps %ymm0, (%rax)
4738 ; AVX2-NEXT: addq $664, %rsp # imm = 0x298
4739 ; AVX2-NEXT: vzeroupper
4742 ; AVX2-FP-LABEL: store_i8_stride6_vf64:
4744 ; AVX2-FP-NEXT: subq $664, %rsp # imm = 0x298
4745 ; AVX2-FP-NEXT: vmovdqa 32(%rdx), %ymm6
4746 ; AVX2-FP-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4747 ; AVX2-FP-NEXT: vmovdqa 32(%rcx), %ymm7
4748 ; AVX2-FP-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4749 ; AVX2-FP-NEXT: vpbroadcastq {{.*#+}} ymm0 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
4750 ; AVX2-FP-NEXT: vmovdqa (%rcx), %xmm1
4751 ; AVX2-FP-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4752 ; AVX2-FP-NEXT: vmovdqa 32(%rcx), %xmm14
4753 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm1, %xmm1
4754 ; AVX2-FP-NEXT: vmovdqa (%rdx), %xmm2
4755 ; AVX2-FP-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4756 ; AVX2-FP-NEXT: vmovdqa 32(%rdx), %xmm8
4757 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm2, %xmm2
4758 ; AVX2-FP-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
4759 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
4760 ; AVX2-FP-NEXT: vmovdqa (%rsi), %xmm12
4761 ; AVX2-FP-NEXT: vmovdqa 32(%rsi), %xmm9
4762 ; AVX2-FP-NEXT: vpbroadcastq {{.*#+}} xmm3 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
4763 ; AVX2-FP-NEXT: vpshufb %xmm3, %xmm12, %xmm2
4764 ; AVX2-FP-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4765 ; AVX2-FP-NEXT: vmovdqa (%rdi), %xmm13
4766 ; AVX2-FP-NEXT: vmovdqa 32(%rdi), %xmm11
4767 ; AVX2-FP-NEXT: vpshufb %xmm3, %xmm13, %xmm4
4768 ; AVX2-FP-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4769 ; AVX2-FP-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm4[8],xmm2[8],xmm4[9],xmm2[9],xmm4[10],xmm2[10],xmm4[11],xmm2[11],xmm4[12],xmm2[12],xmm4[13],xmm2[13],xmm4[14],xmm2[14],xmm4[15],xmm2[15]
4770 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm4 = ymm2[0,0,0,1]
4771 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} ymm2 = [65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535]
4772 ; AVX2-FP-NEXT: vpblendvb %ymm2, %ymm1, %ymm4, %ymm1
4773 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm14, %xmm4
4774 ; AVX2-FP-NEXT: vmovdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4775 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm8, %xmm5
4776 ; AVX2-FP-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4777 ; AVX2-FP-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3],xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
4778 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
4779 ; AVX2-FP-NEXT: vpshufb %xmm3, %xmm9, %xmm5
4780 ; AVX2-FP-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4781 ; AVX2-FP-NEXT: vpshufb %xmm3, %xmm11, %xmm3
4782 ; AVX2-FP-NEXT: vmovdqa %xmm11, (%rsp) # 16-byte Spill
4783 ; AVX2-FP-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm3[8],xmm5[8],xmm3[9],xmm5[9],xmm3[10],xmm5[10],xmm3[11],xmm5[11],xmm3[12],xmm5[12],xmm3[13],xmm5[13],xmm3[14],xmm5[14],xmm3[15],xmm5[15]
4784 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
4785 ; AVX2-FP-NEXT: vpblendvb %ymm2, %ymm4, %ymm3, %ymm3
4786 ; AVX2-FP-NEXT: vpshufb %ymm0, %ymm7, %ymm4
4787 ; AVX2-FP-NEXT: vpshufb %ymm0, %ymm6, %ymm5
4788 ; AVX2-FP-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm5[0],ymm4[0],ymm5[1],ymm4[1],ymm5[2],ymm4[2],ymm5[3],ymm4[3],ymm5[4],ymm4[4],ymm5[5],ymm4[5],ymm5[6],ymm4[6],ymm5[7],ymm4[7],ymm5[16],ymm4[16],ymm5[17],ymm4[17],ymm5[18],ymm4[18],ymm5[19],ymm4[19],ymm5[20],ymm4[20],ymm5[21],ymm4[21],ymm5[22],ymm4[22],ymm5[23],ymm4[23]
4789 ; AVX2-FP-NEXT: vmovdqa 32(%rdi), %ymm7
4790 ; AVX2-FP-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4791 ; AVX2-FP-NEXT: vmovdqa 32(%rsi), %ymm6
4792 ; AVX2-FP-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4793 ; AVX2-FP-NEXT: vpbroadcastq {{.*#+}} ymm5 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
4794 ; AVX2-FP-NEXT: vpshufb %ymm5, %ymm6, %ymm6
4795 ; AVX2-FP-NEXT: vpshufb %ymm5, %ymm7, %ymm7
4796 ; AVX2-FP-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm7[0],ymm6[0],ymm7[1],ymm6[1],ymm7[2],ymm6[2],ymm7[3],ymm6[3],ymm7[4],ymm6[4],ymm7[5],ymm6[5],ymm7[6],ymm6[6],ymm7[7],ymm6[7],ymm7[16],ymm6[16],ymm7[17],ymm6[17],ymm7[18],ymm6[18],ymm7[19],ymm6[19],ymm7[20],ymm6[20],ymm7[21],ymm6[21],ymm7[22],ymm6[22],ymm7[23],ymm6[23]
4797 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
4798 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
4799 ; AVX2-FP-NEXT: vpblendvb %ymm2, %ymm4, %ymm6, %ymm4
4800 ; AVX2-FP-NEXT: vmovdqa (%rdx), %ymm15
4801 ; AVX2-FP-NEXT: vmovdqa (%rcx), %ymm6
4802 ; AVX2-FP-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4803 ; AVX2-FP-NEXT: vpshufb %ymm0, %ymm6, %ymm6
4804 ; AVX2-FP-NEXT: vpshufb %ymm0, %ymm15, %ymm0
4805 ; AVX2-FP-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[1],ymm6[1],ymm0[2],ymm6[2],ymm0[3],ymm6[3],ymm0[4],ymm6[4],ymm0[5],ymm6[5],ymm0[6],ymm6[6],ymm0[7],ymm6[7],ymm0[16],ymm6[16],ymm0[17],ymm6[17],ymm0[18],ymm6[18],ymm0[19],ymm6[19],ymm0[20],ymm6[20],ymm0[21],ymm6[21],ymm0[22],ymm6[22],ymm0[23],ymm6[23]
4806 ; AVX2-FP-NEXT: vmovdqa (%rdi), %ymm10
4807 ; AVX2-FP-NEXT: vmovdqa (%rsi), %ymm6
4808 ; AVX2-FP-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4809 ; AVX2-FP-NEXT: vpshufb %ymm5, %ymm6, %ymm6
4810 ; AVX2-FP-NEXT: vpshufb %ymm5, %ymm10, %ymm5
4811 ; AVX2-FP-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4812 ; AVX2-FP-NEXT: vpunpcklbw {{.*#+}} ymm5 = ymm5[0],ymm6[0],ymm5[1],ymm6[1],ymm5[2],ymm6[2],ymm5[3],ymm6[3],ymm5[4],ymm6[4],ymm5[5],ymm6[5],ymm5[6],ymm6[6],ymm5[7],ymm6[7],ymm5[16],ymm6[16],ymm5[17],ymm6[17],ymm5[18],ymm6[18],ymm5[19],ymm6[19],ymm5[20],ymm6[20],ymm5[21],ymm6[21],ymm5[22],ymm6[22],ymm5[23],ymm6[23]
4813 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
4814 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
4815 ; AVX2-FP-NEXT: vpblendvb %ymm2, %ymm0, %ymm5, %ymm0
4816 ; AVX2-FP-NEXT: vmovdqa (%r8), %xmm5
4817 ; AVX2-FP-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4818 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} xmm2 = [6,5,8,7,9,9,9,9]
4819 ; AVX2-FP-NEXT: vpshufb %xmm2, %xmm5, %xmm5
4820 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
4821 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} ymm6 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
4822 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm1, %ymm5, %ymm1
4823 ; AVX2-FP-NEXT: vmovdqa 32(%r8), %xmm7
4824 ; AVX2-FP-NEXT: vpshufb %xmm2, %xmm7, %xmm2
4825 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
4826 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm3, %ymm2, %ymm2
4827 ; AVX2-FP-NEXT: vmovdqa 32(%r8), %ymm5
4828 ; AVX2-FP-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4829 ; AVX2-FP-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0]
4830 ; AVX2-FP-NEXT: # ymm3 = mem[0,1,0,1]
4831 ; AVX2-FP-NEXT: vpshufb %ymm3, %ymm5, %ymm5
4832 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
4833 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
4834 ; AVX2-FP-NEXT: vmovdqa (%r8), %ymm5
4835 ; AVX2-FP-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4836 ; AVX2-FP-NEXT: vpshufb %ymm3, %ymm5, %ymm3
4837 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
4838 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm0, %ymm3, %ymm6
4839 ; AVX2-FP-NEXT: vmovdqa (%r9), %xmm0
4840 ; AVX2-FP-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4841 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm3 = [u,6,u,5,u,8,u,7,u,9,u,9,u,9,u,9]
4842 ; AVX2-FP-NEXT: vpshufb %xmm3, %xmm0, %xmm5
4843 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
4844 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} ymm0 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
4845 ; AVX2-FP-NEXT: vpblendvb %ymm0, %ymm1, %ymm5, %ymm1
4846 ; AVX2-FP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4847 ; AVX2-FP-NEXT: vmovdqa 32(%r9), %xmm1
4848 ; AVX2-FP-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4849 ; AVX2-FP-NEXT: vpshufb %xmm3, %xmm1, %xmm1
4850 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
4851 ; AVX2-FP-NEXT: vpblendvb %ymm0, %ymm2, %ymm1, %ymm1
4852 ; AVX2-FP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4853 ; AVX2-FP-NEXT: vmovdqa 32(%r9), %ymm2
4854 ; AVX2-FP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4855 ; AVX2-FP-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0]
4856 ; AVX2-FP-NEXT: # ymm1 = mem[0,1,0,1]
4857 ; AVX2-FP-NEXT: vpshufb %ymm1, %ymm2, %ymm2
4858 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
4859 ; AVX2-FP-NEXT: vpblendvb %ymm0, %ymm4, %ymm2, %ymm2
4860 ; AVX2-FP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4861 ; AVX2-FP-NEXT: vmovdqa (%r9), %ymm2
4862 ; AVX2-FP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4863 ; AVX2-FP-NEXT: vpshufb %ymm1, %ymm2, %ymm1
4864 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
4865 ; AVX2-FP-NEXT: vpblendvb %ymm0, %ymm6, %ymm1, %ymm0
4866 ; AVX2-FP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4867 ; AVX2-FP-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm11[8],xmm9[8],xmm11[9],xmm9[9],xmm11[10],xmm9[10],xmm11[11],xmm9[11],xmm11[12],xmm9[12],xmm11[13],xmm9[13],xmm11[14],xmm9[14],xmm11[15],xmm9[15]
4868 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm2 = [8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
4869 ; AVX2-FP-NEXT: vpshufb %xmm2, %xmm0, %xmm0
4870 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
4871 ; AVX2-FP-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm8[8],xmm14[8],xmm8[9],xmm14[9],xmm8[10],xmm14[10],xmm8[11],xmm14[11],xmm8[12],xmm14[12],xmm8[13],xmm14[13],xmm8[14],xmm14[14],xmm8[15],xmm14[15]
4872 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm3 = [10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
4873 ; AVX2-FP-NEXT: vpshufb %xmm3, %xmm1, %xmm1
4874 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
4875 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} ymm4 = [0,65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535,0,0]
4876 ; AVX2-FP-NEXT: vpblendvb %ymm4, %ymm0, %ymm1, %ymm5
4877 ; AVX2-FP-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm13[8],xmm12[8],xmm13[9],xmm12[9],xmm13[10],xmm12[10],xmm13[11],xmm12[11],xmm13[12],xmm12[12],xmm13[13],xmm12[13],xmm13[14],xmm12[14],xmm13[15],xmm12[15]
4878 ; AVX2-FP-NEXT: vpshufb %xmm2, %xmm0, %xmm0
4879 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
4880 ; AVX2-FP-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
4881 ; AVX2-FP-NEXT: # xmm1 = xmm1[8],mem[8],xmm1[9],mem[9],xmm1[10],mem[10],xmm1[11],mem[11],xmm1[12],mem[12],xmm1[13],mem[13],xmm1[14],mem[14],xmm1[15],mem[15]
4882 ; AVX2-FP-NEXT: vpshufb %xmm3, %xmm1, %xmm1
4883 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
4884 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
4885 ; AVX2-FP-NEXT: vpblendvb %ymm4, %ymm0, %ymm1, %ymm2
4886 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4887 ; AVX2-FP-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
4888 ; AVX2-FP-NEXT: # ymm0 = ymm0[8],mem[8],ymm0[9],mem[9],ymm0[10],mem[10],ymm0[11],mem[11],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15],ymm0[24],mem[24],ymm0[25],mem[25],ymm0[26],mem[26],ymm0[27],mem[27],ymm0[28],mem[28],ymm0[29],mem[29],ymm0[30],mem[30],ymm0[31],mem[31]
4889 ; AVX2-FP-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
4890 ; AVX2-FP-NEXT: # ymm1 = mem[0,1,0,1]
4891 ; AVX2-FP-NEXT: vpshufb %ymm1, %ymm0, %ymm0
4892 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm3 = ymm0[2,2,2,3]
4893 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4894 ; AVX2-FP-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm6 # 32-byte Folded Reload
4895 ; AVX2-FP-NEXT: # ymm6 = ymm0[8],mem[8],ymm0[9],mem[9],ymm0[10],mem[10],ymm0[11],mem[11],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15],ymm0[24],mem[24],ymm0[25],mem[25],ymm0[26],mem[26],ymm0[27],mem[27],ymm0[28],mem[28],ymm0[29],mem[29],ymm0[30],mem[30],ymm0[31],mem[31]
4896 ; AVX2-FP-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
4897 ; AVX2-FP-NEXT: # ymm0 = mem[0,1,0,1]
4898 ; AVX2-FP-NEXT: vpshufb %ymm0, %ymm6, %ymm6
4899 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
4900 ; AVX2-FP-NEXT: vpblendvb %ymm4, %ymm3, %ymm6, %ymm3
4901 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
4902 ; AVX2-FP-NEXT: vpunpckhbw {{.*#+}} ymm6 = ymm10[8],ymm9[8],ymm10[9],ymm9[9],ymm10[10],ymm9[10],ymm10[11],ymm9[11],ymm10[12],ymm9[12],ymm10[13],ymm9[13],ymm10[14],ymm9[14],ymm10[15],ymm9[15],ymm10[24],ymm9[24],ymm10[25],ymm9[25],ymm10[26],ymm9[26],ymm10[27],ymm9[27],ymm10[28],ymm9[28],ymm10[29],ymm9[29],ymm10[30],ymm9[30],ymm10[31],ymm9[31]
4903 ; AVX2-FP-NEXT: vpshufb %ymm1, %ymm6, %ymm1
4904 ; AVX2-FP-NEXT: vmovdqa %ymm15, %ymm12
4905 ; AVX2-FP-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm6 # 32-byte Folded Reload
4906 ; AVX2-FP-NEXT: # ymm6 = ymm15[8],mem[8],ymm15[9],mem[9],ymm15[10],mem[10],ymm15[11],mem[11],ymm15[12],mem[12],ymm15[13],mem[13],ymm15[14],mem[14],ymm15[15],mem[15],ymm15[24],mem[24],ymm15[25],mem[25],ymm15[26],mem[26],ymm15[27],mem[27],ymm15[28],mem[28],ymm15[29],mem[29],ymm15[30],mem[30],ymm15[31],mem[31]
4907 ; AVX2-FP-NEXT: vpshufb %ymm0, %ymm6, %ymm0
4908 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
4909 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
4910 ; AVX2-FP-NEXT: vpblendvb %ymm4, %ymm1, %ymm0, %ymm0
4911 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} xmm1 = [10,13,12,11,14,13,14,15]
4912 ; AVX2-FP-NEXT: vmovdqa %xmm7, %xmm15
4913 ; AVX2-FP-NEXT: vpshufb %xmm1, %xmm7, %xmm4
4914 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
4915 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} ymm6 = [0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0]
4916 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm5, %ymm4, %ymm4
4917 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
4918 ; AVX2-FP-NEXT: vpshufb %xmm1, %xmm7, %xmm1
4919 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
4920 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm2, %ymm1, %ymm1
4921 ; AVX2-FP-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0]
4922 ; AVX2-FP-NEXT: # ymm2 = mem[0,1,0,1]
4923 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
4924 ; AVX2-FP-NEXT: vpshufb %ymm2, %ymm10, %ymm5
4925 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
4926 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm3, %ymm5, %ymm5
4927 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
4928 ; AVX2-FP-NEXT: vpshufb %ymm2, %ymm11, %ymm2
4929 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
4930 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm0, %ymm2, %ymm0
4931 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm2 = [u,10,u,13,u,12,u,11,u,14,u,13,u,14,u,15]
4932 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
4933 ; AVX2-FP-NEXT: vpshufb %xmm2, %xmm3, %xmm3
4934 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
4935 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} ymm6 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
4936 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm4, %ymm3, %ymm3
4937 ; AVX2-FP-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4938 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
4939 ; AVX2-FP-NEXT: vpshufb %xmm2, %xmm8, %xmm2
4940 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
4941 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm1, %ymm2, %ymm1
4942 ; AVX2-FP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4943 ; AVX2-FP-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15]
4944 ; AVX2-FP-NEXT: # ymm4 = mem[0,1,0,1]
4945 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
4946 ; AVX2-FP-NEXT: vpshufb %ymm4, %ymm13, %ymm1
4947 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
4948 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm5, %ymm1, %ymm1
4949 ; AVX2-FP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4950 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
4951 ; AVX2-FP-NEXT: vpshufb %ymm4, %ymm14, %ymm4
4952 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
4953 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm0, %ymm4, %ymm0
4954 ; AVX2-FP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4955 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4956 ; AVX2-FP-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm4 # 16-byte Folded Reload
4957 ; AVX2-FP-NEXT: # xmm4 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
4958 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4959 ; AVX2-FP-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm5 # 16-byte Folded Reload
4960 ; AVX2-FP-NEXT: # xmm5 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
4961 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm6 = [0,1,6,7,4,5,2,3,8,9,10,11,12,13,10,11]
4962 ; AVX2-FP-NEXT: vpshufb %xmm6, %xmm4, %xmm4
4963 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
4964 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm1 = [2,3,0,1,6,7,4,5,8,9,8,9,8,9,8,9]
4965 ; AVX2-FP-NEXT: vpshufb %xmm1, %xmm5, %xmm5
4966 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
4967 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} ymm0 = [65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535]
4968 ; AVX2-FP-NEXT: vpblendvb %ymm0, %ymm4, %ymm5, %ymm4
4969 ; AVX2-FP-NEXT: vmovdqa (%rsp), %xmm2 # 16-byte Reload
4970 ; AVX2-FP-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm5 # 16-byte Folded Reload
4971 ; AVX2-FP-NEXT: # xmm5 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3],xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
4972 ; AVX2-FP-NEXT: vpshufb %xmm6, %xmm5, %xmm5
4973 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
4974 ; AVX2-FP-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm6 # 16-byte Folded Reload
4975 ; AVX2-FP-NEXT: # xmm6 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3],xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
4976 ; AVX2-FP-NEXT: vpshufb %xmm1, %xmm6, %xmm1
4977 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
4978 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
4979 ; AVX2-FP-NEXT: vpblendvb %ymm0, %ymm5, %ymm1, %ymm3
4980 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4981 ; AVX2-FP-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm5 # 32-byte Folded Reload
4982 ; AVX2-FP-NEXT: # ymm5 = ymm1[0],mem[0],ymm1[1],mem[1],ymm1[2],mem[2],ymm1[3],mem[3],ymm1[4],mem[4],ymm1[5],mem[5],ymm1[6],mem[6],ymm1[7],mem[7],ymm1[16],mem[16],ymm1[17],mem[17],ymm1[18],mem[18],ymm1[19],mem[19],ymm1[20],mem[20],ymm1[21],mem[21],ymm1[22],mem[22],ymm1[23],mem[23]
4983 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
4984 ; AVX2-FP-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm6 # 32-byte Folded Reload
4985 ; AVX2-FP-NEXT: # ymm6 = ymm1[0],mem[0],ymm1[1],mem[1],ymm1[2],mem[2],ymm1[3],mem[3],ymm1[4],mem[4],ymm1[5],mem[5],ymm1[6],mem[6],ymm1[7],mem[7],ymm1[16],mem[16],ymm1[17],mem[17],ymm1[18],mem[18],ymm1[19],mem[19],ymm1[20],mem[20],ymm1[21],mem[21],ymm1[22],mem[22],ymm1[23],mem[23]
4986 ; AVX2-FP-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27,16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27]
4987 ; AVX2-FP-NEXT: # ymm1 = mem[0,1,0,1]
4988 ; AVX2-FP-NEXT: vpshufb %ymm1, %ymm5, %ymm5
4989 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
4990 ; AVX2-FP-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [18,19,16,17,22,23,20,21,24,25,24,25,24,25,24,25,18,19,16,17,22,23,20,21,24,25,24,25,24,25,24,25]
4991 ; AVX2-FP-NEXT: # ymm2 = mem[0,1,0,1]
4992 ; AVX2-FP-NEXT: vpshufb %ymm2, %ymm6, %ymm6
4993 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
4994 ; AVX2-FP-NEXT: vpblendvb %ymm0, %ymm5, %ymm6, %ymm5
4995 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
4996 ; AVX2-FP-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm6[0],ymm9[0],ymm6[1],ymm9[1],ymm6[2],ymm9[2],ymm6[3],ymm9[3],ymm6[4],ymm9[4],ymm6[5],ymm9[5],ymm6[6],ymm9[6],ymm6[7],ymm9[7],ymm6[16],ymm9[16],ymm6[17],ymm9[17],ymm6[18],ymm9[18],ymm6[19],ymm9[19],ymm6[20],ymm9[20],ymm6[21],ymm9[21],ymm6[22],ymm9[22],ymm6[23],ymm9[23]
4997 ; AVX2-FP-NEXT: vpshufb %ymm1, %ymm6, %ymm1
4998 ; AVX2-FP-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm12, %ymm6 # 32-byte Folded Reload
4999 ; AVX2-FP-NEXT: # ymm6 = ymm12[0],mem[0],ymm12[1],mem[1],ymm12[2],mem[2],ymm12[3],mem[3],ymm12[4],mem[4],ymm12[5],mem[5],ymm12[6],mem[6],ymm12[7],mem[7],ymm12[16],mem[16],ymm12[17],mem[17],ymm12[18],mem[18],ymm12[19],mem[19],ymm12[20],mem[20],ymm12[21],mem[21],ymm12[22],mem[22],ymm12[23],mem[23]
5000 ; AVX2-FP-NEXT: vpshufb %ymm2, %ymm6, %ymm2
5001 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
5002 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
5003 ; AVX2-FP-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm0
5004 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} xmm1 = [2,1,0,3,4,4,4,4]
5005 ; AVX2-FP-NEXT: vpshufb %xmm1, %xmm7, %xmm2
5006 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
5007 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} ymm6 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
5008 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm4, %ymm2, %ymm2
5009 ; AVX2-FP-NEXT: vpshufb %xmm1, %xmm15, %xmm1
5010 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
5011 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm3, %ymm1, %ymm1
5012 ; AVX2-FP-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
5013 ; AVX2-FP-NEXT: # ymm3 = mem[0,1,0,1]
5014 ; AVX2-FP-NEXT: vpshufb %ymm3, %ymm10, %ymm4
5015 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
5016 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm5, %ymm4, %ymm4
5017 ; AVX2-FP-NEXT: vpshufb %ymm3, %ymm11, %ymm3
5018 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
5019 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm0, %ymm3, %ymm0
5020 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm3 = [u,2,u,1,u,0,u,3,u,4,u,4,u,4,u,4]
5021 ; AVX2-FP-NEXT: vpshufb %xmm3, %xmm8, %xmm5
5022 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
5023 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
5024 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm2, %ymm5, %ymm2
5025 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
5026 ; AVX2-FP-NEXT: vpshufb %xmm3, %xmm5, %xmm3
5027 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
5028 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm1, %ymm3, %ymm1
5029 ; AVX2-FP-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
5030 ; AVX2-FP-NEXT: # ymm3 = mem[0,1,0,1]
5031 ; AVX2-FP-NEXT: vpshufb %ymm3, %ymm13, %ymm5
5032 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
5033 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
5034 ; AVX2-FP-NEXT: vpshufb %ymm3, %ymm14, %ymm3
5035 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
5036 ; AVX2-FP-NEXT: vpblendvb %ymm6, %ymm0, %ymm3, %ymm0
5037 ; AVX2-FP-NEXT: movq {{[0-9]+}}(%rsp), %rax
5038 ; AVX2-FP-NEXT: vmovdqa %ymm0, 96(%rax)
5039 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5040 ; AVX2-FP-NEXT: vmovaps %ymm0, 160(%rax)
5041 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5042 ; AVX2-FP-NEXT: vmovaps %ymm0, 128(%rax)
5043 ; AVX2-FP-NEXT: vmovdqa %ymm4, 288(%rax)
5044 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5045 ; AVX2-FP-NEXT: vmovaps %ymm0, 352(%rax)
5046 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5047 ; AVX2-FP-NEXT: vmovaps %ymm0, 320(%rax)
5048 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5049 ; AVX2-FP-NEXT: vmovaps %ymm0, 64(%rax)
5050 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5051 ; AVX2-FP-NEXT: vmovaps %ymm0, 224(%rax)
5052 ; AVX2-FP-NEXT: vmovdqa %ymm1, 192(%rax)
5053 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5054 ; AVX2-FP-NEXT: vmovaps %ymm0, 256(%rax)
5055 ; AVX2-FP-NEXT: vmovdqa %ymm2, (%rax)
5056 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5057 ; AVX2-FP-NEXT: vmovaps %ymm0, 32(%rax)
5058 ; AVX2-FP-NEXT: addq $664, %rsp # imm = 0x298
5059 ; AVX2-FP-NEXT: vzeroupper
5060 ; AVX2-FP-NEXT: retq
5062 ; AVX2-FCP-LABEL: store_i8_stride6_vf64:
5063 ; AVX2-FCP: # %bb.0:
5064 ; AVX2-FCP-NEXT: subq $664, %rsp # imm = 0x298
5065 ; AVX2-FCP-NEXT: vmovdqa 32(%rdx), %ymm6
5066 ; AVX2-FCP-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5067 ; AVX2-FCP-NEXT: vmovdqa 32(%rcx), %ymm7
5068 ; AVX2-FCP-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5069 ; AVX2-FCP-NEXT: vpbroadcastq {{.*#+}} ymm0 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
5070 ; AVX2-FCP-NEXT: vmovdqa (%rcx), %xmm1
5071 ; AVX2-FCP-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5072 ; AVX2-FCP-NEXT: vmovdqa 32(%rcx), %xmm14
5073 ; AVX2-FCP-NEXT: vpshufb %xmm0, %xmm1, %xmm1
5074 ; AVX2-FCP-NEXT: vmovdqa (%rdx), %xmm2
5075 ; AVX2-FCP-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5076 ; AVX2-FCP-NEXT: vmovdqa 32(%rdx), %xmm8
5077 ; AVX2-FCP-NEXT: vpshufb %xmm0, %xmm2, %xmm2
5078 ; AVX2-FCP-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
5079 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
5080 ; AVX2-FCP-NEXT: vmovdqa (%rsi), %xmm12
5081 ; AVX2-FCP-NEXT: vmovdqa 32(%rsi), %xmm9
5082 ; AVX2-FCP-NEXT: vpbroadcastq {{.*#+}} xmm3 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
5083 ; AVX2-FCP-NEXT: vpshufb %xmm3, %xmm12, %xmm2
5084 ; AVX2-FCP-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5085 ; AVX2-FCP-NEXT: vmovdqa (%rdi), %xmm13
5086 ; AVX2-FCP-NEXT: vmovdqa 32(%rdi), %xmm11
5087 ; AVX2-FCP-NEXT: vpshufb %xmm3, %xmm13, %xmm4
5088 ; AVX2-FCP-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5089 ; AVX2-FCP-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm4[8],xmm2[8],xmm4[9],xmm2[9],xmm4[10],xmm2[10],xmm4[11],xmm2[11],xmm4[12],xmm2[12],xmm4[13],xmm2[13],xmm4[14],xmm2[14],xmm4[15],xmm2[15]
5090 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm2[0,0,0,1]
5091 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} ymm2 = [65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535]
5092 ; AVX2-FCP-NEXT: vpblendvb %ymm2, %ymm1, %ymm4, %ymm1
5093 ; AVX2-FCP-NEXT: vpshufb %xmm0, %xmm14, %xmm4
5094 ; AVX2-FCP-NEXT: vmovdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5095 ; AVX2-FCP-NEXT: vpshufb %xmm0, %xmm8, %xmm5
5096 ; AVX2-FCP-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5097 ; AVX2-FCP-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3],xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
5098 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
5099 ; AVX2-FCP-NEXT: vpshufb %xmm3, %xmm9, %xmm5
5100 ; AVX2-FCP-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5101 ; AVX2-FCP-NEXT: vpshufb %xmm3, %xmm11, %xmm3
5102 ; AVX2-FCP-NEXT: vmovdqa %xmm11, (%rsp) # 16-byte Spill
5103 ; AVX2-FCP-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm3[8],xmm5[8],xmm3[9],xmm5[9],xmm3[10],xmm5[10],xmm3[11],xmm5[11],xmm3[12],xmm5[12],xmm3[13],xmm5[13],xmm3[14],xmm5[14],xmm3[15],xmm5[15]
5104 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
5105 ; AVX2-FCP-NEXT: vpblendvb %ymm2, %ymm4, %ymm3, %ymm3
5106 ; AVX2-FCP-NEXT: vpshufb %ymm0, %ymm7, %ymm4
5107 ; AVX2-FCP-NEXT: vpshufb %ymm0, %ymm6, %ymm5
5108 ; AVX2-FCP-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm5[0],ymm4[0],ymm5[1],ymm4[1],ymm5[2],ymm4[2],ymm5[3],ymm4[3],ymm5[4],ymm4[4],ymm5[5],ymm4[5],ymm5[6],ymm4[6],ymm5[7],ymm4[7],ymm5[16],ymm4[16],ymm5[17],ymm4[17],ymm5[18],ymm4[18],ymm5[19],ymm4[19],ymm5[20],ymm4[20],ymm5[21],ymm4[21],ymm5[22],ymm4[22],ymm5[23],ymm4[23]
5109 ; AVX2-FCP-NEXT: vmovdqa 32(%rdi), %ymm7
5110 ; AVX2-FCP-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5111 ; AVX2-FCP-NEXT: vmovdqa 32(%rsi), %ymm6
5112 ; AVX2-FCP-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5113 ; AVX2-FCP-NEXT: vpbroadcastq {{.*#+}} ymm5 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
5114 ; AVX2-FCP-NEXT: vpshufb %ymm5, %ymm6, %ymm6
5115 ; AVX2-FCP-NEXT: vpshufb %ymm5, %ymm7, %ymm7
5116 ; AVX2-FCP-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm7[0],ymm6[0],ymm7[1],ymm6[1],ymm7[2],ymm6[2],ymm7[3],ymm6[3],ymm7[4],ymm6[4],ymm7[5],ymm6[5],ymm7[6],ymm6[6],ymm7[7],ymm6[7],ymm7[16],ymm6[16],ymm7[17],ymm6[17],ymm7[18],ymm6[18],ymm7[19],ymm6[19],ymm7[20],ymm6[20],ymm7[21],ymm6[21],ymm7[22],ymm6[22],ymm7[23],ymm6[23]
5117 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
5118 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
5119 ; AVX2-FCP-NEXT: vpblendvb %ymm2, %ymm4, %ymm6, %ymm4
5120 ; AVX2-FCP-NEXT: vmovdqa (%rdx), %ymm15
5121 ; AVX2-FCP-NEXT: vmovdqa (%rcx), %ymm6
5122 ; AVX2-FCP-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5123 ; AVX2-FCP-NEXT: vpshufb %ymm0, %ymm6, %ymm6
5124 ; AVX2-FCP-NEXT: vpshufb %ymm0, %ymm15, %ymm0
5125 ; AVX2-FCP-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[1],ymm6[1],ymm0[2],ymm6[2],ymm0[3],ymm6[3],ymm0[4],ymm6[4],ymm0[5],ymm6[5],ymm0[6],ymm6[6],ymm0[7],ymm6[7],ymm0[16],ymm6[16],ymm0[17],ymm6[17],ymm0[18],ymm6[18],ymm0[19],ymm6[19],ymm0[20],ymm6[20],ymm0[21],ymm6[21],ymm0[22],ymm6[22],ymm0[23],ymm6[23]
5126 ; AVX2-FCP-NEXT: vmovdqa (%rdi), %ymm10
5127 ; AVX2-FCP-NEXT: vmovdqa (%rsi), %ymm6
5128 ; AVX2-FCP-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5129 ; AVX2-FCP-NEXT: vpshufb %ymm5, %ymm6, %ymm6
5130 ; AVX2-FCP-NEXT: vpshufb %ymm5, %ymm10, %ymm5
5131 ; AVX2-FCP-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5132 ; AVX2-FCP-NEXT: vpunpcklbw {{.*#+}} ymm5 = ymm5[0],ymm6[0],ymm5[1],ymm6[1],ymm5[2],ymm6[2],ymm5[3],ymm6[3],ymm5[4],ymm6[4],ymm5[5],ymm6[5],ymm5[6],ymm6[6],ymm5[7],ymm6[7],ymm5[16],ymm6[16],ymm5[17],ymm6[17],ymm5[18],ymm6[18],ymm5[19],ymm6[19],ymm5[20],ymm6[20],ymm5[21],ymm6[21],ymm5[22],ymm6[22],ymm5[23],ymm6[23]
5133 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
5134 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
5135 ; AVX2-FCP-NEXT: vpblendvb %ymm2, %ymm0, %ymm5, %ymm0
5136 ; AVX2-FCP-NEXT: vmovdqa (%r8), %xmm5
5137 ; AVX2-FCP-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5138 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} xmm2 = [6,5,8,7,9,9,9,9]
5139 ; AVX2-FCP-NEXT: vpshufb %xmm2, %xmm5, %xmm5
5140 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
5141 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} ymm6 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
5142 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm1, %ymm5, %ymm1
5143 ; AVX2-FCP-NEXT: vmovdqa 32(%r8), %xmm7
5144 ; AVX2-FCP-NEXT: vpshufb %xmm2, %xmm7, %xmm2
5145 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
5146 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm3, %ymm2, %ymm2
5147 ; AVX2-FCP-NEXT: vmovdqa 32(%r8), %ymm5
5148 ; AVX2-FCP-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5149 ; AVX2-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0]
5150 ; AVX2-FCP-NEXT: # ymm3 = mem[0,1,0,1]
5151 ; AVX2-FCP-NEXT: vpshufb %ymm3, %ymm5, %ymm5
5152 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
5153 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
5154 ; AVX2-FCP-NEXT: vmovdqa (%r8), %ymm5
5155 ; AVX2-FCP-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5156 ; AVX2-FCP-NEXT: vpshufb %ymm3, %ymm5, %ymm3
5157 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
5158 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm0, %ymm3, %ymm6
5159 ; AVX2-FCP-NEXT: vmovdqa (%r9), %xmm0
5160 ; AVX2-FCP-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5161 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm3 = [u,6,u,5,u,8,u,7,u,9,u,9,u,9,u,9]
5162 ; AVX2-FCP-NEXT: vpshufb %xmm3, %xmm0, %xmm5
5163 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
5164 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} ymm0 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
5165 ; AVX2-FCP-NEXT: vpblendvb %ymm0, %ymm1, %ymm5, %ymm1
5166 ; AVX2-FCP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5167 ; AVX2-FCP-NEXT: vmovdqa 32(%r9), %xmm1
5168 ; AVX2-FCP-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5169 ; AVX2-FCP-NEXT: vpshufb %xmm3, %xmm1, %xmm1
5170 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
5171 ; AVX2-FCP-NEXT: vpblendvb %ymm0, %ymm2, %ymm1, %ymm1
5172 ; AVX2-FCP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5173 ; AVX2-FCP-NEXT: vmovdqa 32(%r9), %ymm2
5174 ; AVX2-FCP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5175 ; AVX2-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0]
5176 ; AVX2-FCP-NEXT: # ymm1 = mem[0,1,0,1]
5177 ; AVX2-FCP-NEXT: vpshufb %ymm1, %ymm2, %ymm2
5178 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
5179 ; AVX2-FCP-NEXT: vpblendvb %ymm0, %ymm4, %ymm2, %ymm2
5180 ; AVX2-FCP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5181 ; AVX2-FCP-NEXT: vmovdqa (%r9), %ymm2
5182 ; AVX2-FCP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5183 ; AVX2-FCP-NEXT: vpshufb %ymm1, %ymm2, %ymm1
5184 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
5185 ; AVX2-FCP-NEXT: vpblendvb %ymm0, %ymm6, %ymm1, %ymm0
5186 ; AVX2-FCP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5187 ; AVX2-FCP-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm11[8],xmm9[8],xmm11[9],xmm9[9],xmm11[10],xmm9[10],xmm11[11],xmm9[11],xmm11[12],xmm9[12],xmm11[13],xmm9[13],xmm11[14],xmm9[14],xmm11[15],xmm9[15]
5188 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm2 = [8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
5189 ; AVX2-FCP-NEXT: vpshufb %xmm2, %xmm0, %xmm0
5190 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
5191 ; AVX2-FCP-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm8[8],xmm14[8],xmm8[9],xmm14[9],xmm8[10],xmm14[10],xmm8[11],xmm14[11],xmm8[12],xmm14[12],xmm8[13],xmm14[13],xmm8[14],xmm14[14],xmm8[15],xmm14[15]
5192 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm3 = [10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
5193 ; AVX2-FCP-NEXT: vpshufb %xmm3, %xmm1, %xmm1
5194 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
5195 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} ymm4 = [0,65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535,0,0]
5196 ; AVX2-FCP-NEXT: vpblendvb %ymm4, %ymm0, %ymm1, %ymm5
5197 ; AVX2-FCP-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm13[8],xmm12[8],xmm13[9],xmm12[9],xmm13[10],xmm12[10],xmm13[11],xmm12[11],xmm13[12],xmm12[12],xmm13[13],xmm12[13],xmm13[14],xmm12[14],xmm13[15],xmm12[15]
5198 ; AVX2-FCP-NEXT: vpshufb %xmm2, %xmm0, %xmm0
5199 ; AVX2-FCP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5200 ; AVX2-FCP-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
5201 ; AVX2-FCP-NEXT: # xmm1 = xmm1[8],mem[8],xmm1[9],mem[9],xmm1[10],mem[10],xmm1[11],mem[11],xmm1[12],mem[12],xmm1[13],mem[13],xmm1[14],mem[14],xmm1[15],mem[15]
5202 ; AVX2-FCP-NEXT: vpshufb %xmm3, %xmm1, %xmm1
5203 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,0,1]
5204 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
5205 ; AVX2-FCP-NEXT: vpblendvb %ymm4, %ymm0, %ymm1, %ymm2
5206 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5207 ; AVX2-FCP-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
5208 ; AVX2-FCP-NEXT: # ymm0 = ymm0[8],mem[8],ymm0[9],mem[9],ymm0[10],mem[10],ymm0[11],mem[11],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15],ymm0[24],mem[24],ymm0[25],mem[25],ymm0[26],mem[26],ymm0[27],mem[27],ymm0[28],mem[28],ymm0[29],mem[29],ymm0[30],mem[30],ymm0[31],mem[31]
5209 ; AVX2-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
5210 ; AVX2-FCP-NEXT: # ymm1 = mem[0,1,0,1]
5211 ; AVX2-FCP-NEXT: vpshufb %ymm1, %ymm0, %ymm0
5212 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm3 = ymm0[2,2,2,3]
5213 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5214 ; AVX2-FCP-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm6 # 32-byte Folded Reload
5215 ; AVX2-FCP-NEXT: # ymm6 = ymm0[8],mem[8],ymm0[9],mem[9],ymm0[10],mem[10],ymm0[11],mem[11],ymm0[12],mem[12],ymm0[13],mem[13],ymm0[14],mem[14],ymm0[15],mem[15],ymm0[24],mem[24],ymm0[25],mem[25],ymm0[26],mem[26],ymm0[27],mem[27],ymm0[28],mem[28],ymm0[29],mem[29],ymm0[30],mem[30],ymm0[31],mem[31]
5216 ; AVX2-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
5217 ; AVX2-FCP-NEXT: # ymm0 = mem[0,1,0,1]
5218 ; AVX2-FCP-NEXT: vpshufb %ymm0, %ymm6, %ymm6
5219 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
5220 ; AVX2-FCP-NEXT: vpblendvb %ymm4, %ymm3, %ymm6, %ymm3
5221 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
5222 ; AVX2-FCP-NEXT: vpunpckhbw {{.*#+}} ymm6 = ymm10[8],ymm9[8],ymm10[9],ymm9[9],ymm10[10],ymm9[10],ymm10[11],ymm9[11],ymm10[12],ymm9[12],ymm10[13],ymm9[13],ymm10[14],ymm9[14],ymm10[15],ymm9[15],ymm10[24],ymm9[24],ymm10[25],ymm9[25],ymm10[26],ymm9[26],ymm10[27],ymm9[27],ymm10[28],ymm9[28],ymm10[29],ymm9[29],ymm10[30],ymm9[30],ymm10[31],ymm9[31]
5223 ; AVX2-FCP-NEXT: vpshufb %ymm1, %ymm6, %ymm1
5224 ; AVX2-FCP-NEXT: vmovdqa %ymm15, %ymm12
5225 ; AVX2-FCP-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm6 # 32-byte Folded Reload
5226 ; AVX2-FCP-NEXT: # ymm6 = ymm15[8],mem[8],ymm15[9],mem[9],ymm15[10],mem[10],ymm15[11],mem[11],ymm15[12],mem[12],ymm15[13],mem[13],ymm15[14],mem[14],ymm15[15],mem[15],ymm15[24],mem[24],ymm15[25],mem[25],ymm15[26],mem[26],ymm15[27],mem[27],ymm15[28],mem[28],ymm15[29],mem[29],ymm15[30],mem[30],ymm15[31],mem[31]
5227 ; AVX2-FCP-NEXT: vpshufb %ymm0, %ymm6, %ymm0
5228 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
5229 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
5230 ; AVX2-FCP-NEXT: vpblendvb %ymm4, %ymm1, %ymm0, %ymm0
5231 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} xmm1 = [10,13,12,11,14,13,14,15]
5232 ; AVX2-FCP-NEXT: vmovdqa %xmm7, %xmm15
5233 ; AVX2-FCP-NEXT: vpshufb %xmm1, %xmm7, %xmm4
5234 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
5235 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} ymm6 = [0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0]
5236 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm5, %ymm4, %ymm4
5237 ; AVX2-FCP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
5238 ; AVX2-FCP-NEXT: vpshufb %xmm1, %xmm7, %xmm1
5239 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
5240 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm2, %ymm1, %ymm1
5241 ; AVX2-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0]
5242 ; AVX2-FCP-NEXT: # ymm2 = mem[0,1,0,1]
5243 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
5244 ; AVX2-FCP-NEXT: vpshufb %ymm2, %ymm10, %ymm5
5245 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
5246 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm3, %ymm5, %ymm5
5247 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
5248 ; AVX2-FCP-NEXT: vpshufb %ymm2, %ymm11, %ymm2
5249 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
5250 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm0, %ymm2, %ymm0
5251 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm2 = [u,10,u,13,u,12,u,11,u,14,u,13,u,14,u,15]
5252 ; AVX2-FCP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
5253 ; AVX2-FCP-NEXT: vpshufb %xmm2, %xmm3, %xmm3
5254 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
5255 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} ymm6 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
5256 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm4, %ymm3, %ymm3
5257 ; AVX2-FCP-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5258 ; AVX2-FCP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
5259 ; AVX2-FCP-NEXT: vpshufb %xmm2, %xmm8, %xmm2
5260 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
5261 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm1, %ymm2, %ymm1
5262 ; AVX2-FCP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5263 ; AVX2-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15]
5264 ; AVX2-FCP-NEXT: # ymm4 = mem[0,1,0,1]
5265 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
5266 ; AVX2-FCP-NEXT: vpshufb %ymm4, %ymm13, %ymm1
5267 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
5268 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm5, %ymm1, %ymm1
5269 ; AVX2-FCP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5270 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
5271 ; AVX2-FCP-NEXT: vpshufb %ymm4, %ymm14, %ymm4
5272 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
5273 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm0, %ymm4, %ymm0
5274 ; AVX2-FCP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5275 ; AVX2-FCP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5276 ; AVX2-FCP-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm4 # 16-byte Folded Reload
5277 ; AVX2-FCP-NEXT: # xmm4 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
5278 ; AVX2-FCP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5279 ; AVX2-FCP-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm5 # 16-byte Folded Reload
5280 ; AVX2-FCP-NEXT: # xmm5 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
5281 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm6 = [0,1,6,7,4,5,2,3,8,9,10,11,12,13,10,11]
5282 ; AVX2-FCP-NEXT: vpshufb %xmm6, %xmm4, %xmm4
5283 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,0,0,1]
5284 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm1 = [2,3,0,1,6,7,4,5,8,9,8,9,8,9,8,9]
5285 ; AVX2-FCP-NEXT: vpshufb %xmm1, %xmm5, %xmm5
5286 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
5287 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} ymm0 = [65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535,0,0,65535]
5288 ; AVX2-FCP-NEXT: vpblendvb %ymm0, %ymm4, %ymm5, %ymm4
5289 ; AVX2-FCP-NEXT: vmovdqa (%rsp), %xmm2 # 16-byte Reload
5290 ; AVX2-FCP-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm5 # 16-byte Folded Reload
5291 ; AVX2-FCP-NEXT: # xmm5 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3],xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
5292 ; AVX2-FCP-NEXT: vpshufb %xmm6, %xmm5, %xmm5
5293 ; AVX2-FCP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
5294 ; AVX2-FCP-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm6 # 16-byte Folded Reload
5295 ; AVX2-FCP-NEXT: # xmm6 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3],xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
5296 ; AVX2-FCP-NEXT: vpshufb %xmm1, %xmm6, %xmm1
5297 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
5298 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
5299 ; AVX2-FCP-NEXT: vpblendvb %ymm0, %ymm5, %ymm1, %ymm3
5300 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5301 ; AVX2-FCP-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm5 # 32-byte Folded Reload
5302 ; AVX2-FCP-NEXT: # ymm5 = ymm1[0],mem[0],ymm1[1],mem[1],ymm1[2],mem[2],ymm1[3],mem[3],ymm1[4],mem[4],ymm1[5],mem[5],ymm1[6],mem[6],ymm1[7],mem[7],ymm1[16],mem[16],ymm1[17],mem[17],ymm1[18],mem[18],ymm1[19],mem[19],ymm1[20],mem[20],ymm1[21],mem[21],ymm1[22],mem[22],ymm1[23],mem[23]
5303 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5304 ; AVX2-FCP-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm6 # 32-byte Folded Reload
5305 ; AVX2-FCP-NEXT: # ymm6 = ymm1[0],mem[0],ymm1[1],mem[1],ymm1[2],mem[2],ymm1[3],mem[3],ymm1[4],mem[4],ymm1[5],mem[5],ymm1[6],mem[6],ymm1[7],mem[7],ymm1[16],mem[16],ymm1[17],mem[17],ymm1[18],mem[18],ymm1[19],mem[19],ymm1[20],mem[20],ymm1[21],mem[21],ymm1[22],mem[22],ymm1[23],mem[23]
5306 ; AVX2-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27,16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27]
5307 ; AVX2-FCP-NEXT: # ymm1 = mem[0,1,0,1]
5308 ; AVX2-FCP-NEXT: vpshufb %ymm1, %ymm5, %ymm5
5309 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
5310 ; AVX2-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [18,19,16,17,22,23,20,21,24,25,24,25,24,25,24,25,18,19,16,17,22,23,20,21,24,25,24,25,24,25,24,25]
5311 ; AVX2-FCP-NEXT: # ymm2 = mem[0,1,0,1]
5312 ; AVX2-FCP-NEXT: vpshufb %ymm2, %ymm6, %ymm6
5313 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
5314 ; AVX2-FCP-NEXT: vpblendvb %ymm0, %ymm5, %ymm6, %ymm5
5315 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
5316 ; AVX2-FCP-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm6[0],ymm9[0],ymm6[1],ymm9[1],ymm6[2],ymm9[2],ymm6[3],ymm9[3],ymm6[4],ymm9[4],ymm6[5],ymm9[5],ymm6[6],ymm9[6],ymm6[7],ymm9[7],ymm6[16],ymm9[16],ymm6[17],ymm9[17],ymm6[18],ymm9[18],ymm6[19],ymm9[19],ymm6[20],ymm9[20],ymm6[21],ymm9[21],ymm6[22],ymm9[22],ymm6[23],ymm9[23]
5317 ; AVX2-FCP-NEXT: vpshufb %ymm1, %ymm6, %ymm1
5318 ; AVX2-FCP-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm12, %ymm6 # 32-byte Folded Reload
5319 ; AVX2-FCP-NEXT: # ymm6 = ymm12[0],mem[0],ymm12[1],mem[1],ymm12[2],mem[2],ymm12[3],mem[3],ymm12[4],mem[4],ymm12[5],mem[5],ymm12[6],mem[6],ymm12[7],mem[7],ymm12[16],mem[16],ymm12[17],mem[17],ymm12[18],mem[18],ymm12[19],mem[19],ymm12[20],mem[20],ymm12[21],mem[21],ymm12[22],mem[22],ymm12[23],mem[23]
5320 ; AVX2-FCP-NEXT: vpshufb %ymm2, %ymm6, %ymm2
5321 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
5322 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
5323 ; AVX2-FCP-NEXT: vpblendvb %ymm0, %ymm1, %ymm2, %ymm0
5324 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} xmm1 = [2,1,0,3,4,4,4,4]
5325 ; AVX2-FCP-NEXT: vpshufb %xmm1, %xmm7, %xmm2
5326 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,0,0,1]
5327 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} ymm6 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
5328 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm4, %ymm2, %ymm2
5329 ; AVX2-FCP-NEXT: vpshufb %xmm1, %xmm15, %xmm1
5330 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,0,0,1]
5331 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm3, %ymm1, %ymm1
5332 ; AVX2-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
5333 ; AVX2-FCP-NEXT: # ymm3 = mem[0,1,0,1]
5334 ; AVX2-FCP-NEXT: vpshufb %ymm3, %ymm10, %ymm4
5335 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
5336 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm5, %ymm4, %ymm4
5337 ; AVX2-FCP-NEXT: vpshufb %ymm3, %ymm11, %ymm3
5338 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
5339 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm0, %ymm3, %ymm0
5340 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm3 = [u,2,u,1,u,0,u,3,u,4,u,4,u,4,u,4]
5341 ; AVX2-FCP-NEXT: vpshufb %xmm3, %xmm8, %xmm5
5342 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,0,0,1]
5343 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} ymm6 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
5344 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm2, %ymm5, %ymm2
5345 ; AVX2-FCP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
5346 ; AVX2-FCP-NEXT: vpshufb %xmm3, %xmm5, %xmm3
5347 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm3 = ymm3[0,0,0,1]
5348 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm1, %ymm3, %ymm1
5349 ; AVX2-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
5350 ; AVX2-FCP-NEXT: # ymm3 = mem[0,1,0,1]
5351 ; AVX2-FCP-NEXT: vpshufb %ymm3, %ymm13, %ymm5
5352 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
5353 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm4, %ymm5, %ymm4
5354 ; AVX2-FCP-NEXT: vpshufb %ymm3, %ymm14, %ymm3
5355 ; AVX2-FCP-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
5356 ; AVX2-FCP-NEXT: vpblendvb %ymm6, %ymm0, %ymm3, %ymm0
5357 ; AVX2-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
5358 ; AVX2-FCP-NEXT: vmovdqa %ymm0, 96(%rax)
5359 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5360 ; AVX2-FCP-NEXT: vmovaps %ymm0, 160(%rax)
5361 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5362 ; AVX2-FCP-NEXT: vmovaps %ymm0, 128(%rax)
5363 ; AVX2-FCP-NEXT: vmovdqa %ymm4, 288(%rax)
5364 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5365 ; AVX2-FCP-NEXT: vmovaps %ymm0, 352(%rax)
5366 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5367 ; AVX2-FCP-NEXT: vmovaps %ymm0, 320(%rax)
5368 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5369 ; AVX2-FCP-NEXT: vmovaps %ymm0, 64(%rax)
5370 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5371 ; AVX2-FCP-NEXT: vmovaps %ymm0, 224(%rax)
5372 ; AVX2-FCP-NEXT: vmovdqa %ymm1, 192(%rax)
5373 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5374 ; AVX2-FCP-NEXT: vmovaps %ymm0, 256(%rax)
5375 ; AVX2-FCP-NEXT: vmovdqa %ymm2, (%rax)
5376 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5377 ; AVX2-FCP-NEXT: vmovaps %ymm0, 32(%rax)
5378 ; AVX2-FCP-NEXT: addq $664, %rsp # imm = 0x298
5379 ; AVX2-FCP-NEXT: vzeroupper
5380 ; AVX2-FCP-NEXT: retq
5382 ; AVX512-LABEL: store_i8_stride6_vf64:
5384 ; AVX512-NEXT: subq $456, %rsp # imm = 0x1C8
5385 ; AVX512-NEXT: vmovdqa (%rsi), %xmm6
5386 ; AVX512-NEXT: vmovdqa 32(%rsi), %xmm3
5387 ; AVX512-NEXT: vmovdqa (%rdi), %xmm7
5388 ; AVX512-NEXT: vmovdqa 32(%rdi), %xmm2
5389 ; AVX512-NEXT: vmovdqa (%r8), %xmm10
5390 ; AVX512-NEXT: vmovdqa 32(%r8), %xmm4
5391 ; AVX512-NEXT: vmovdqa (%r9), %xmm12
5392 ; AVX512-NEXT: vmovdqa 32(%r9), %xmm8
5393 ; AVX512-NEXT: vmovdqa 32(%rcx), %ymm13
5394 ; AVX512-NEXT: vpbroadcastq {{.*#+}} xmm9 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
5395 ; AVX512-NEXT: vpshufb %xmm9, %xmm3, %xmm0
5396 ; AVX512-NEXT: vpshufb %xmm9, %xmm2, %xmm1
5397 ; AVX512-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
5398 ; AVX512-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3],xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
5399 ; AVX512-NEXT: vmovdqa64 %xmm3, %xmm30
5400 ; AVX512-NEXT: vmovdqa64 %xmm2, %xmm31
5401 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,3,2,1,4,5,6,7]
5402 ; AVX512-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,6,5]
5403 ; AVX512-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
5404 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5405 ; AVX512-NEXT: vmovdqa {{.*#+}} xmm0 = [6,u,5,u,8,u,7,u,9,u,9,u,9,u,9,u]
5406 ; AVX512-NEXT: vpshufb %xmm0, %xmm4, %xmm2
5407 ; AVX512-NEXT: vmovdqa {{.*#+}} xmm1 = [2,u,1,u,0,u,3,u,4,u,4,u,4,u,4,u]
5408 ; AVX512-NEXT: vpshufb %xmm1, %xmm4, %xmm3
5409 ; AVX512-NEXT: vmovdqa64 %xmm4, %xmm29
5410 ; AVX512-NEXT: vinserti32x4 $2, %xmm2, %zmm3, %zmm2
5411 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5412 ; AVX512-NEXT: vmovdqa {{.*#+}} xmm2 = [u,6,u,5,u,8,u,7,u,9,u,9,u,9,u,9]
5413 ; AVX512-NEXT: vpshufb %xmm2, %xmm8, %xmm4
5414 ; AVX512-NEXT: vmovdqa {{.*#+}} xmm3 = [u,2,u,1,u,0,u,3,u,4,u,4,u,4,u,4]
5415 ; AVX512-NEXT: vpshufb %xmm3, %xmm8, %xmm5
5416 ; AVX512-NEXT: vmovdqa64 %xmm8, %xmm27
5417 ; AVX512-NEXT: vinserti32x4 $2, %xmm4, %zmm5, %zmm4
5418 ; AVX512-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5419 ; AVX512-NEXT: vmovdqa 32(%rdx), %ymm8
5420 ; AVX512-NEXT: vpshufb %xmm9, %xmm6, %xmm4
5421 ; AVX512-NEXT: vpshufb %xmm9, %xmm7, %xmm5
5422 ; AVX512-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm5[8],xmm4[8],xmm5[9],xmm4[9],xmm5[10],xmm4[10],xmm5[11],xmm4[11],xmm5[12],xmm4[12],xmm5[13],xmm4[13],xmm5[14],xmm4[14],xmm5[15],xmm4[15]
5423 ; AVX512-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3],xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
5424 ; AVX512-NEXT: vmovdqa64 %xmm7, %xmm18
5425 ; AVX512-NEXT: vmovdqa64 %xmm6, %xmm20
5426 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,3,2,1,4,5,6,7]
5427 ; AVX512-NEXT: vpshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,6,5]
5428 ; AVX512-NEXT: vinserti64x4 $1, %ymm4, %zmm5, %zmm4
5429 ; AVX512-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5430 ; AVX512-NEXT: vmovdqa 32(%rsi), %ymm4
5431 ; AVX512-NEXT: vmovdqa 32(%rdi), %ymm11
5432 ; AVX512-NEXT: vpshufb %xmm0, %xmm10, %xmm0
5433 ; AVX512-NEXT: vpshufb %xmm1, %xmm10, %xmm1
5434 ; AVX512-NEXT: vmovdqa64 %xmm10, %xmm22
5435 ; AVX512-NEXT: vinserti32x4 $2, %xmm0, %zmm1, %zmm0
5436 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5437 ; AVX512-NEXT: vpshufb %xmm2, %xmm12, %xmm0
5438 ; AVX512-NEXT: vpshufb %xmm3, %xmm12, %xmm1
5439 ; AVX512-NEXT: vmovdqa64 %xmm12, %xmm21
5440 ; AVX512-NEXT: vinserti32x4 $2, %xmm0, %zmm1, %zmm0
5441 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5442 ; AVX512-NEXT: vpbroadcastq {{.*#+}} ymm15 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
5443 ; AVX512-NEXT: vpshufb %ymm15, %ymm4, %ymm0
5444 ; AVX512-NEXT: vpshufb %ymm15, %ymm11, %ymm1
5445 ; AVX512-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[16],ymm0[16],ymm1[17],ymm0[17],ymm1[18],ymm0[18],ymm1[19],ymm0[19],ymm1[20],ymm0[20],ymm1[21],ymm0[21],ymm1[22],ymm0[22],ymm1[23],ymm0[23]
5446 ; AVX512-NEXT: vpunpckhbw {{.*#+}} ymm1 = ymm11[8],ymm4[8],ymm11[9],ymm4[9],ymm11[10],ymm4[10],ymm11[11],ymm4[11],ymm11[12],ymm4[12],ymm11[13],ymm4[13],ymm11[14],ymm4[14],ymm11[15],ymm4[15],ymm11[24],ymm4[24],ymm11[25],ymm4[25],ymm11[26],ymm4[26],ymm11[27],ymm4[27],ymm11[28],ymm4[28],ymm11[29],ymm4[29],ymm11[30],ymm4[30],ymm11[31],ymm4[31]
5447 ; AVX512-NEXT: vmovdqa64 %ymm4, %ymm19
5448 ; AVX512-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
5449 ; AVX512-NEXT: # ymm6 = mem[0,1,0,1]
5450 ; AVX512-NEXT: vpshufb %ymm6, %ymm1, %ymm1
5451 ; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
5452 ; AVX512-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
5453 ; AVX512-NEXT: vpbroadcastq {{.*#+}} ymm12 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
5454 ; AVX512-NEXT: vpshufb %ymm12, %ymm13, %ymm0
5455 ; AVX512-NEXT: vpshufb %ymm12, %ymm8, %ymm1
5456 ; AVX512-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[16],ymm0[16],ymm1[17],ymm0[17],ymm1[18],ymm0[18],ymm1[19],ymm0[19],ymm1[20],ymm0[20],ymm1[21],ymm0[21],ymm1[22],ymm0[22],ymm1[23],ymm0[23]
5457 ; AVX512-NEXT: vpunpckhbw {{.*#+}} ymm1 = ymm8[8],ymm13[8],ymm8[9],ymm13[9],ymm8[10],ymm13[10],ymm8[11],ymm13[11],ymm8[12],ymm13[12],ymm8[13],ymm13[13],ymm8[14],ymm13[14],ymm8[15],ymm13[15],ymm8[24],ymm13[24],ymm8[25],ymm13[25],ymm8[26],ymm13[26],ymm8[27],ymm13[27],ymm8[28],ymm13[28],ymm8[29],ymm13[29],ymm8[30],ymm13[30],ymm8[31],ymm13[31]
5458 ; AVX512-NEXT: vmovdqa64 %ymm13, %ymm17
5459 ; AVX512-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
5460 ; AVX512-NEXT: # ymm7 = mem[0,1,0,1]
5461 ; AVX512-NEXT: vpshufb %ymm7, %ymm1, %ymm1
5462 ; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
5463 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5464 ; AVX512-NEXT: vmovdqa 32(%r8), %ymm13
5465 ; AVX512-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0]
5466 ; AVX512-NEXT: # ymm5 = mem[0,1,0,1]
5467 ; AVX512-NEXT: vpshufb %ymm5, %ymm13, %ymm0
5468 ; AVX512-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0]
5469 ; AVX512-NEXT: # ymm4 = mem[0,1,0,1]
5470 ; AVX512-NEXT: vpshufb %ymm4, %ymm13, %ymm1
5471 ; AVX512-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
5472 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5473 ; AVX512-NEXT: vmovdqa 32(%r9), %ymm14
5474 ; AVX512-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15]
5475 ; AVX512-NEXT: # ymm3 = mem[0,1,0,1]
5476 ; AVX512-NEXT: vpshufb %ymm3, %ymm14, %ymm0
5477 ; AVX512-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0]
5478 ; AVX512-NEXT: # ymm2 = mem[0,1,0,1]
5479 ; AVX512-NEXT: vpshufb %ymm2, %ymm14, %ymm1
5480 ; AVX512-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm28
5481 ; AVX512-NEXT: vmovdqa (%rsi), %ymm1
5482 ; AVX512-NEXT: vmovdqa (%rdi), %ymm0
5483 ; AVX512-NEXT: vpshufb %ymm15, %ymm1, %ymm10
5484 ; AVX512-NEXT: vpshufb %ymm15, %ymm0, %ymm15
5485 ; AVX512-NEXT: vpunpcklbw {{.*#+}} ymm10 = ymm15[0],ymm10[0],ymm15[1],ymm10[1],ymm15[2],ymm10[2],ymm15[3],ymm10[3],ymm15[4],ymm10[4],ymm15[5],ymm10[5],ymm15[6],ymm10[6],ymm15[7],ymm10[7],ymm15[16],ymm10[16],ymm15[17],ymm10[17],ymm15[18],ymm10[18],ymm15[19],ymm10[19],ymm15[20],ymm10[20],ymm15[21],ymm10[21],ymm15[22],ymm10[22],ymm15[23],ymm10[23]
5486 ; AVX512-NEXT: vpunpckhbw {{.*#+}} ymm15 = ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11],ymm0[12],ymm1[12],ymm0[13],ymm1[13],ymm0[14],ymm1[14],ymm0[15],ymm1[15],ymm0[24],ymm1[24],ymm0[25],ymm1[25],ymm0[26],ymm1[26],ymm0[27],ymm1[27],ymm0[28],ymm1[28],ymm0[29],ymm1[29],ymm0[30],ymm1[30],ymm0[31],ymm1[31]
5487 ; AVX512-NEXT: vpshufb %ymm6, %ymm15, %ymm6
5488 ; AVX512-NEXT: vinserti64x4 $1, %ymm6, %zmm10, %zmm26
5489 ; AVX512-NEXT: vmovdqa (%rcx), %ymm10
5490 ; AVX512-NEXT: vmovdqa (%rdx), %ymm15
5491 ; AVX512-NEXT: vpshufb %ymm12, %ymm10, %ymm6
5492 ; AVX512-NEXT: vpshufb %ymm12, %ymm15, %ymm9
5493 ; AVX512-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm9[0],ymm6[0],ymm9[1],ymm6[1],ymm9[2],ymm6[2],ymm9[3],ymm6[3],ymm9[4],ymm6[4],ymm9[5],ymm6[5],ymm9[6],ymm6[6],ymm9[7],ymm6[7],ymm9[16],ymm6[16],ymm9[17],ymm6[17],ymm9[18],ymm6[18],ymm9[19],ymm6[19],ymm9[20],ymm6[20],ymm9[21],ymm6[21],ymm9[22],ymm6[22],ymm9[23],ymm6[23]
5494 ; AVX512-NEXT: vpunpckhbw {{.*#+}} ymm9 = ymm15[8],ymm10[8],ymm15[9],ymm10[9],ymm15[10],ymm10[10],ymm15[11],ymm10[11],ymm15[12],ymm10[12],ymm15[13],ymm10[13],ymm15[14],ymm10[14],ymm15[15],ymm10[15],ymm15[24],ymm10[24],ymm15[25],ymm10[25],ymm15[26],ymm10[26],ymm15[27],ymm10[27],ymm15[28],ymm10[28],ymm15[29],ymm10[29],ymm15[30],ymm10[30],ymm15[31],ymm10[31]
5495 ; AVX512-NEXT: vpshufb %ymm7, %ymm9, %ymm9
5496 ; AVX512-NEXT: vinserti64x4 $1, %ymm9, %zmm6, %zmm24
5497 ; AVX512-NEXT: vmovdqa (%r8), %ymm6
5498 ; AVX512-NEXT: vpshufb %ymm5, %ymm6, %ymm5
5499 ; AVX512-NEXT: vpshufb %ymm4, %ymm6, %ymm4
5500 ; AVX512-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm23
5501 ; AVX512-NEXT: vmovdqa (%r9), %ymm4
5502 ; AVX512-NEXT: vpshufb %ymm3, %ymm4, %ymm3
5503 ; AVX512-NEXT: vpshufb %ymm2, %ymm4, %ymm2
5504 ; AVX512-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm25
5505 ; AVX512-NEXT: vpunpcklbw {{.*#+}} ymm2 = ymm15[0],ymm10[0],ymm15[1],ymm10[1],ymm15[2],ymm10[2],ymm15[3],ymm10[3],ymm15[4],ymm10[4],ymm15[5],ymm10[5],ymm15[6],ymm10[6],ymm15[7],ymm10[7],ymm15[16],ymm10[16],ymm15[17],ymm10[17],ymm15[18],ymm10[18],ymm15[19],ymm10[19],ymm15[20],ymm10[20],ymm15[21],ymm10[21],ymm15[22],ymm10[22],ymm15[23],ymm10[23]
5506 ; AVX512-NEXT: vmovdqa64 %ymm2, %ymm16
5507 ; AVX512-NEXT: vmovdqa64 %xmm18, %xmm2
5508 ; AVX512-NEXT: vmovdqa64 %xmm20, %xmm3
5509 ; AVX512-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm2[8],xmm3[8],xmm2[9],xmm3[9],xmm2[10],xmm3[10],xmm2[11],xmm3[11],xmm2[12],xmm3[12],xmm2[13],xmm3[13],xmm2[14],xmm3[14],xmm2[15],xmm3[15]
5510 ; AVX512-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[4],ymm1[4],ymm0[5],ymm1[5],ymm0[6],ymm1[6],ymm0[7],ymm1[7],ymm0[16],ymm1[16],ymm0[17],ymm1[17],ymm0[18],ymm1[18],ymm0[19],ymm1[19],ymm0[20],ymm1[20],ymm0[21],ymm1[21],ymm0[22],ymm1[22],ymm0[23],ymm1[23]
5511 ; AVX512-NEXT: vmovdqa64 %ymm0, %ymm18
5512 ; AVX512-NEXT: vmovdqa (%rcx), %xmm15
5513 ; AVX512-NEXT: vmovdqa (%rdx), %xmm2
5514 ; AVX512-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm2[8],xmm15[8],xmm2[9],xmm15[9],xmm2[10],xmm15[10],xmm2[11],xmm15[11],xmm2[12],xmm15[12],xmm2[13],xmm15[13],xmm2[14],xmm15[14],xmm2[15],xmm15[15]
5515 ; AVX512-NEXT: vmovdqa {{.*#+}} xmm3 = [10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
5516 ; AVX512-NEXT: vpshufb %xmm3, %xmm1, %xmm10
5517 ; AVX512-NEXT: vmovdqa 32(%rcx), %xmm1
5518 ; AVX512-NEXT: vmovdqa 32(%rdx), %xmm0
5519 ; AVX512-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
5520 ; AVX512-NEXT: vpshufb %xmm3, %xmm9, %xmm3
5521 ; AVX512-NEXT: vmovdqa64 %ymm3, %ymm20
5522 ; AVX512-NEXT: vmovdqa64 %ymm17, %ymm3
5523 ; AVX512-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm8[0],ymm3[0],ymm8[1],ymm3[1],ymm8[2],ymm3[2],ymm8[3],ymm3[3],ymm8[4],ymm3[4],ymm8[5],ymm3[5],ymm8[6],ymm3[6],ymm8[7],ymm3[7],ymm8[16],ymm3[16],ymm8[17],ymm3[17],ymm8[18],ymm3[18],ymm8[19],ymm3[19],ymm8[20],ymm3[20],ymm8[21],ymm3[21],ymm8[22],ymm3[22],ymm8[23],ymm3[23]
5524 ; AVX512-NEXT: vmovdqa64 %ymm3, %ymm17
5525 ; AVX512-NEXT: vmovdqa64 %xmm30, %xmm3
5526 ; AVX512-NEXT: vmovdqa64 %xmm31, %xmm7
5527 ; AVX512-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm7[8],xmm3[8],xmm7[9],xmm3[9],xmm7[10],xmm3[10],xmm7[11],xmm3[11],xmm7[12],xmm3[12],xmm7[13],xmm3[13],xmm7[14],xmm3[14],xmm7[15],xmm3[15]
5528 ; AVX512-NEXT: vmovdqa {{.*#+}} xmm8 = [8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
5529 ; AVX512-NEXT: vpshufb %xmm8, %xmm5, %xmm9
5530 ; AVX512-NEXT: vpshufb %xmm8, %xmm3, %xmm3
5531 ; AVX512-NEXT: vmovdqa64 %ymm3, %ymm31
5532 ; AVX512-NEXT: vmovdqa64 %ymm19, %ymm3
5533 ; AVX512-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm11[0],ymm3[0],ymm11[1],ymm3[1],ymm11[2],ymm3[2],ymm11[3],ymm3[3],ymm11[4],ymm3[4],ymm11[5],ymm3[5],ymm11[6],ymm3[6],ymm11[7],ymm3[7],ymm11[16],ymm3[16],ymm11[17],ymm3[17],ymm11[18],ymm3[18],ymm11[19],ymm3[19],ymm11[20],ymm3[20],ymm11[21],ymm3[21],ymm11[22],ymm3[22],ymm11[23],ymm3[23]
5534 ; AVX512-NEXT: vmovdqa64 %ymm3, %ymm19
5535 ; AVX512-NEXT: vmovdqa {{.*#+}} xmm3 = [10,u,13,u,12,u,11,u,14,u,13,u,14,u,15,u]
5536 ; AVX512-NEXT: vmovdqa64 %xmm22, %xmm5
5537 ; AVX512-NEXT: vpshufb %xmm3, %xmm5, %xmm7
5538 ; AVX512-NEXT: vmovdqa64 %xmm29, %xmm5
5539 ; AVX512-NEXT: vpshufb %xmm3, %xmm5, %xmm3
5540 ; AVX512-NEXT: vmovdqa64 %ymm3, %ymm22
5541 ; AVX512-NEXT: vbroadcasti128 {{.*#+}} ymm11 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
5542 ; AVX512-NEXT: # ymm11 = mem[0,1,0,1]
5543 ; AVX512-NEXT: vpshufb %ymm11, %ymm6, %ymm5
5544 ; AVX512-NEXT: vpshufb %ymm11, %ymm13, %ymm6
5545 ; AVX512-NEXT: vmovdqa {{.*#+}} xmm11 = [u,10,u,13,u,12,u,11,u,14,u,13,u,14,u,15]
5546 ; AVX512-NEXT: vmovdqa64 %xmm21, %xmm3
5547 ; AVX512-NEXT: vpshufb %xmm11, %xmm3, %xmm13
5548 ; AVX512-NEXT: vmovdqa64 %xmm27, %xmm3
5549 ; AVX512-NEXT: vpshufb %xmm11, %xmm3, %xmm11
5550 ; AVX512-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
5551 ; AVX512-NEXT: # ymm3 = mem[0,1,0,1]
5552 ; AVX512-NEXT: vpshufb %ymm3, %ymm4, %ymm4
5553 ; AVX512-NEXT: vpshufb %ymm3, %ymm14, %ymm3
5554 ; AVX512-NEXT: vpshufb %xmm12, %xmm1, %xmm14
5555 ; AVX512-NEXT: vpshufb %xmm12, %xmm0, %xmm8
5556 ; AVX512-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3],xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
5557 ; AVX512-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
5558 ; AVX512-NEXT: vprold $16, %xmm0, %xmm0
5559 ; AVX512-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm0
5560 ; AVX512-NEXT: vpshufb %xmm12, %xmm15, %xmm1
5561 ; AVX512-NEXT: vpshufb %xmm12, %xmm2, %xmm8
5562 ; AVX512-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm8[0],xmm1[0],xmm8[1],xmm1[1],xmm8[2],xmm1[2],xmm8[3],xmm1[3],xmm8[4],xmm1[4],xmm8[5],xmm1[5],xmm8[6],xmm1[6],xmm8[7],xmm1[7]
5563 ; AVX512-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0],xmm15[0],xmm2[1],xmm15[1],xmm2[2],xmm15[2],xmm2[3],xmm15[3],xmm2[4],xmm15[4],xmm2[5],xmm15[5],xmm2[6],xmm15[6],xmm2[7],xmm15[7]
5564 ; AVX512-NEXT: vprold $16, %xmm2, %xmm2
5565 ; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm30
5566 ; AVX512-NEXT: vpermq {{.*#+}} ymm2 = ymm10[0,0,0,1]
5567 ; AVX512-NEXT: vprold $16, %ymm16, %ymm8
5568 ; AVX512-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,0,1]
5569 ; AVX512-NEXT: vmovdqa64 %ymm18, %ymm1
5570 ; AVX512-NEXT: vpshuflw {{.*#+}} ymm10 = ymm1[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
5571 ; AVX512-NEXT: vpshufhw {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5,6,5,8,9,10,11,12,13,14,13]
5572 ; AVX512-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,2,2,3]
5573 ; AVX512-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,0,1]
5574 ; AVX512-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
5575 ; AVX512-NEXT: vpermq {{.*#+}} ymm12 = ymm13[0,0,0,1]
5576 ; AVX512-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
5577 ; AVX512-NEXT: vpermq {{.*#+}} ymm13 = ymm20[0,0,0,1]
5578 ; AVX512-NEXT: vprold $16, %ymm17, %ymm14
5579 ; AVX512-NEXT: vpermq {{.*#+}} ymm15 = ymm31[0,0,0,1]
5580 ; AVX512-NEXT: vmovdqa64 %ymm19, %ymm1
5581 ; AVX512-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
5582 ; AVX512-NEXT: vpshufhw {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5,6,5,8,9,10,11,12,13,14,13]
5583 ; AVX512-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
5584 ; AVX512-NEXT: vpermq {{.*#+}} ymm31 = ymm22[0,0,0,1]
5585 ; AVX512-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
5586 ; AVX512-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,0,1]
5587 ; AVX512-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
5588 ; AVX512-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,2,2,3]
5589 ; AVX512-NEXT: vinserti64x4 $1, %ymm8, %zmm2, %zmm2
5590 ; AVX512-NEXT: vinserti64x4 $1, %ymm10, %zmm9, %zmm8
5591 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm9 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
5592 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm8 = zmm2 ^ (zmm9 & (zmm8 ^ zmm2))
5593 ; AVX512-NEXT: vinserti64x4 $1, %ymm5, %zmm7, %zmm2
5594 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm5 = [0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
5595 ; AVX512-NEXT: vpternlogd {{.*#+}} zmm2 = zmm2 ^ (zmm5 & (zmm2 ^ zmm8))
5596 ; AVX512-NEXT: vpermq {{.*#+}} ymm7 = ymm14[2,2,2,3]
5597 ; AVX512-NEXT: vinserti64x4 $1, %ymm7, %zmm13, %zmm7
5598 ; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm15, %zmm1
5599 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm1 = zmm7 ^ (zmm9 & (zmm1 ^ zmm7))
5600 ; AVX512-NEXT: vinserti64x4 $1, %ymm6, %zmm31, %zmm6
5601 ; AVX512-NEXT: vpternlogd {{.*#+}} zmm6 = zmm6 ^ (zmm5 & (zmm6 ^ zmm1))
5602 ; AVX512-NEXT: vinserti64x4 $1, %ymm4, %zmm12, %zmm1
5603 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm4 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
5604 ; AVX512-NEXT: vpternlogd {{.*#+}} zmm1 = zmm1 ^ (zmm4 & (zmm1 ^ zmm2))
5605 ; AVX512-NEXT: vinserti64x4 $1, %ymm3, %zmm11, %zmm2
5606 ; AVX512-NEXT: vpternlogd {{.*#+}} zmm2 = zmm2 ^ (zmm4 & (zmm2 ^ zmm6))
5607 ; AVX512-NEXT: vpermq {{.*#+}} zmm0 = zmm0[0,0,0,1,4,4,4,5]
5608 ; AVX512-NEXT: vpermq $64, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Folded Reload
5609 ; AVX512-NEXT: # zmm3 = mem[0,0,0,1,4,4,4,5]
5610 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm4 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0]
5611 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm3 = zmm0 ^ (zmm4 & (zmm3 ^ zmm0))
5612 ; AVX512-NEXT: vpermq $64, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Folded Reload
5613 ; AVX512-NEXT: # zmm0 = mem[0,0,0,1,4,4,4,5]
5614 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm0 = zmm0 ^ (zmm9 & (zmm0 ^ zmm3))
5615 ; AVX512-NEXT: vpermq $64, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Folded Reload
5616 ; AVX512-NEXT: # zmm3 = mem[0,0,0,1,4,4,4,5]
5617 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm5 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
5618 ; AVX512-NEXT: vpternlogd {{.*#+}} zmm3 = zmm3 ^ (zmm5 & (zmm3 ^ zmm0))
5619 ; AVX512-NEXT: vpermq {{.*#+}} zmm0 = zmm30[0,0,0,1,4,4,4,5]
5620 ; AVX512-NEXT: vpermq $64, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Folded Reload
5621 ; AVX512-NEXT: # zmm6 = mem[0,0,0,1,4,4,4,5]
5622 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm6 = zmm0 ^ (zmm4 & (zmm6 ^ zmm0))
5623 ; AVX512-NEXT: vpermq $64, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Folded Reload
5624 ; AVX512-NEXT: # zmm0 = mem[0,0,0,1,4,4,4,5]
5625 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm0 = zmm0 ^ (zmm9 & (zmm0 ^ zmm6))
5626 ; AVX512-NEXT: vpermq $64, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Folded Reload
5627 ; AVX512-NEXT: # zmm6 = mem[0,0,0,1,4,4,4,5]
5628 ; AVX512-NEXT: vpternlogd {{.*#+}} zmm6 = zmm6 ^ (zmm5 & (zmm6 ^ zmm0))
5629 ; AVX512-NEXT: vpermq $234, (%rsp), %zmm0 # 64-byte Folded Reload
5630 ; AVX512-NEXT: # zmm0 = mem[2,2,2,3,6,6,6,7]
5631 ; AVX512-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Folded Reload
5632 ; AVX512-NEXT: # zmm5 = mem[2,2,2,3,6,6,6,7]
5633 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm5 = zmm0 ^ (zmm9 & (zmm5 ^ zmm0))
5634 ; AVX512-NEXT: vpermq {{.*#+}} zmm0 = zmm26[2,2,2,3,6,6,6,7]
5635 ; AVX512-NEXT: vpermq {{.*#+}} zmm7 = zmm24[2,2,2,3,6,6,6,7]
5636 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm7 = zmm0 ^ (zmm9 & (zmm7 ^ zmm0))
5637 ; AVX512-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Folded Reload
5638 ; AVX512-NEXT: # zmm0 = mem[2,2,2,3,6,6,6,7]
5639 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm0 = zmm0 ^ (zmm4 & (zmm0 ^ zmm5))
5640 ; AVX512-NEXT: vpermq {{.*#+}} zmm5 = zmm23[2,2,2,3,6,6,6,7]
5641 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm5 = zmm5 ^ (zmm4 & (zmm5 ^ zmm7))
5642 ; AVX512-NEXT: vpermq {{.*#+}} zmm4 = zmm28[2,2,2,3,6,6,6,7]
5643 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm7 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
5644 ; AVX512-NEXT: vpternlogd {{.*#+}} zmm4 = zmm4 ^ (zmm7 & (zmm4 ^ zmm0))
5645 ; AVX512-NEXT: vpermq {{.*#+}} zmm0 = zmm25[2,2,2,3,6,6,6,7]
5646 ; AVX512-NEXT: vpternlogd {{.*#+}} zmm0 = zmm0 ^ (zmm7 & (zmm0 ^ zmm5))
5647 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
5648 ; AVX512-NEXT: vmovdqa64 %zmm0, 128(%rax)
5649 ; AVX512-NEXT: vmovdqa64 %zmm4, 320(%rax)
5650 ; AVX512-NEXT: vmovdqa64 %zmm6, (%rax)
5651 ; AVX512-NEXT: vmovdqa64 %zmm3, 192(%rax)
5652 ; AVX512-NEXT: vmovdqa64 %zmm2, 256(%rax)
5653 ; AVX512-NEXT: vmovdqa64 %zmm1, 64(%rax)
5654 ; AVX512-NEXT: addq $456, %rsp # imm = 0x1C8
5655 ; AVX512-NEXT: vzeroupper
5658 ; AVX512-FCP-LABEL: store_i8_stride6_vf64:
5659 ; AVX512-FCP: # %bb.0:
5660 ; AVX512-FCP-NEXT: subq $424, %rsp # imm = 0x1A8
5661 ; AVX512-FCP-NEXT: vmovdqa 32(%rsi), %ymm4
5662 ; AVX512-FCP-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5663 ; AVX512-FCP-NEXT: vpbroadcastq {{.*#+}} ymm0 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
5664 ; AVX512-FCP-NEXT: vpshufb %ymm0, %ymm4, %ymm1
5665 ; AVX512-FCP-NEXT: vmovdqa 32(%rdi), %ymm3
5666 ; AVX512-FCP-NEXT: vmovdqu %ymm3, (%rsp) # 32-byte Spill
5667 ; AVX512-FCP-NEXT: vpshufb %ymm0, %ymm3, %ymm2
5668 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} ymm2 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[4],ymm1[4],ymm2[5],ymm1[5],ymm2[6],ymm1[6],ymm2[7],ymm1[7],ymm2[16],ymm1[16],ymm2[17],ymm1[17],ymm2[18],ymm1[18],ymm2[19],ymm1[19],ymm2[20],ymm1[20],ymm2[21],ymm1[21],ymm2[22],ymm1[22],ymm2[23],ymm1[23]
5669 ; AVX512-FCP-NEXT: vpunpckhbw {{.*#+}} ymm3 = ymm3[8],ymm4[8],ymm3[9],ymm4[9],ymm3[10],ymm4[10],ymm3[11],ymm4[11],ymm3[12],ymm4[12],ymm3[13],ymm4[13],ymm3[14],ymm4[14],ymm3[15],ymm4[15],ymm3[24],ymm4[24],ymm3[25],ymm4[25],ymm3[26],ymm4[26],ymm3[27],ymm4[27],ymm3[28],ymm4[28],ymm3[29],ymm4[29],ymm3[30],ymm4[30],ymm3[31],ymm4[31]
5670 ; AVX512-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
5671 ; AVX512-FCP-NEXT: # ymm1 = mem[0,1,0,1]
5672 ; AVX512-FCP-NEXT: vpshufb %ymm1, %ymm3, %ymm3
5673 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm2
5674 ; AVX512-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5675 ; AVX512-FCP-NEXT: vmovdqa 32(%rcx), %ymm5
5676 ; AVX512-FCP-NEXT: vpbroadcastq {{.*#+}} ymm10 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
5677 ; AVX512-FCP-NEXT: vpshufb %ymm10, %ymm5, %ymm2
5678 ; AVX512-FCP-NEXT: vmovdqa 32(%rdx), %ymm4
5679 ; AVX512-FCP-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5680 ; AVX512-FCP-NEXT: vpshufb %ymm10, %ymm4, %ymm3
5681 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[4],ymm2[4],ymm3[5],ymm2[5],ymm3[6],ymm2[6],ymm3[7],ymm2[7],ymm3[16],ymm2[16],ymm3[17],ymm2[17],ymm3[18],ymm2[18],ymm3[19],ymm2[19],ymm3[20],ymm2[20],ymm3[21],ymm2[21],ymm3[22],ymm2[22],ymm3[23],ymm2[23]
5682 ; AVX512-FCP-NEXT: vpunpckhbw {{.*#+}} ymm4 = ymm4[8],ymm5[8],ymm4[9],ymm5[9],ymm4[10],ymm5[10],ymm4[11],ymm5[11],ymm4[12],ymm5[12],ymm4[13],ymm5[13],ymm4[14],ymm5[14],ymm4[15],ymm5[15],ymm4[24],ymm5[24],ymm4[25],ymm5[25],ymm4[26],ymm5[26],ymm4[27],ymm5[27],ymm4[28],ymm5[28],ymm4[29],ymm5[29],ymm4[30],ymm5[30],ymm4[31],ymm5[31]
5683 ; AVX512-FCP-NEXT: vmovdqa64 %ymm5, %ymm22
5684 ; AVX512-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
5685 ; AVX512-FCP-NEXT: # ymm2 = mem[0,1,0,1]
5686 ; AVX512-FCP-NEXT: vpshufb %ymm2, %ymm4, %ymm4
5687 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm4, %zmm3, %zmm3
5688 ; AVX512-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5689 ; AVX512-FCP-NEXT: vmovdqa 32(%r8), %ymm6
5690 ; AVX512-FCP-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5691 ; AVX512-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0]
5692 ; AVX512-FCP-NEXT: # ymm3 = mem[0,1,0,1]
5693 ; AVX512-FCP-NEXT: vpshufb %ymm3, %ymm6, %ymm5
5694 ; AVX512-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0]
5695 ; AVX512-FCP-NEXT: # ymm4 = mem[0,1,0,1]
5696 ; AVX512-FCP-NEXT: vpshufb %ymm4, %ymm6, %ymm6
5697 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm5, %zmm6, %zmm5
5698 ; AVX512-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5699 ; AVX512-FCP-NEXT: vmovdqa 32(%r9), %ymm7
5700 ; AVX512-FCP-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5701 ; AVX512-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15]
5702 ; AVX512-FCP-NEXT: # ymm5 = mem[0,1,0,1]
5703 ; AVX512-FCP-NEXT: vpshufb %ymm5, %ymm7, %ymm6
5704 ; AVX512-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0]
5705 ; AVX512-FCP-NEXT: # ymm9 = mem[0,1,0,1]
5706 ; AVX512-FCP-NEXT: vpshufb %ymm9, %ymm7, %ymm8
5707 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm6, %zmm8, %zmm6
5708 ; AVX512-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5709 ; AVX512-FCP-NEXT: vmovdqa (%rsi), %ymm7
5710 ; AVX512-FCP-NEXT: vpshufb %ymm0, %ymm7, %ymm6
5711 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %ymm8
5712 ; AVX512-FCP-NEXT: vpshufb %ymm0, %ymm8, %ymm0
5713 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[1],ymm6[1],ymm0[2],ymm6[2],ymm0[3],ymm6[3],ymm0[4],ymm6[4],ymm0[5],ymm6[5],ymm0[6],ymm6[6],ymm0[7],ymm6[7],ymm0[16],ymm6[16],ymm0[17],ymm6[17],ymm0[18],ymm6[18],ymm0[19],ymm6[19],ymm0[20],ymm6[20],ymm0[21],ymm6[21],ymm0[22],ymm6[22],ymm0[23],ymm6[23]
5714 ; AVX512-FCP-NEXT: vpunpckhbw {{.*#+}} ymm6 = ymm8[8],ymm7[8],ymm8[9],ymm7[9],ymm8[10],ymm7[10],ymm8[11],ymm7[11],ymm8[12],ymm7[12],ymm8[13],ymm7[13],ymm8[14],ymm7[14],ymm8[15],ymm7[15],ymm8[24],ymm7[24],ymm8[25],ymm7[25],ymm8[26],ymm7[26],ymm8[27],ymm7[27],ymm8[28],ymm7[28],ymm8[29],ymm7[29],ymm8[30],ymm7[30],ymm8[31],ymm7[31]
5715 ; AVX512-FCP-NEXT: vmovdqa64 %ymm8, %ymm18
5716 ; AVX512-FCP-NEXT: vmovdqa64 %ymm7, %ymm17
5717 ; AVX512-FCP-NEXT: vpshufb %ymm1, %ymm6, %ymm1
5718 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
5719 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5720 ; AVX512-FCP-NEXT: vmovdqa (%rcx), %ymm15
5721 ; AVX512-FCP-NEXT: vpshufb %ymm10, %ymm15, %ymm0
5722 ; AVX512-FCP-NEXT: vmovdqa (%rdx), %ymm11
5723 ; AVX512-FCP-NEXT: vpshufb %ymm10, %ymm11, %ymm1
5724 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[16],ymm0[16],ymm1[17],ymm0[17],ymm1[18],ymm0[18],ymm1[19],ymm0[19],ymm1[20],ymm0[20],ymm1[21],ymm0[21],ymm1[22],ymm0[22],ymm1[23],ymm0[23]
5725 ; AVX512-FCP-NEXT: vpunpckhbw {{.*#+}} ymm1 = ymm11[8],ymm15[8],ymm11[9],ymm15[9],ymm11[10],ymm15[10],ymm11[11],ymm15[11],ymm11[12],ymm15[12],ymm11[13],ymm15[13],ymm11[14],ymm15[14],ymm11[15],ymm15[15],ymm11[24],ymm15[24],ymm11[25],ymm15[25],ymm11[26],ymm15[26],ymm11[27],ymm15[27],ymm11[28],ymm15[28],ymm11[29],ymm15[29],ymm11[30],ymm15[30],ymm11[31],ymm15[31]
5726 ; AVX512-FCP-NEXT: vpshufb %ymm2, %ymm1, %ymm1
5727 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
5728 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5729 ; AVX512-FCP-NEXT: vmovdqa (%r8), %ymm2
5730 ; AVX512-FCP-NEXT: vpshufb %ymm3, %ymm2, %ymm0
5731 ; AVX512-FCP-NEXT: vpshufb %ymm4, %ymm2, %ymm1
5732 ; AVX512-FCP-NEXT: vmovdqa64 %ymm2, %ymm19
5733 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm23
5734 ; AVX512-FCP-NEXT: vmovdqa (%r9), %ymm2
5735 ; AVX512-FCP-NEXT: vpshufb %ymm5, %ymm2, %ymm0
5736 ; AVX512-FCP-NEXT: vpshufb %ymm9, %ymm2, %ymm1
5737 ; AVX512-FCP-NEXT: vmovdqa64 %ymm2, %ymm20
5738 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm24
5739 ; AVX512-FCP-NEXT: vmovdqa 32(%rsi), %xmm0
5740 ; AVX512-FCP-NEXT: vpbroadcastq {{.*#+}} xmm5 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
5741 ; AVX512-FCP-NEXT: vpshufb %xmm5, %xmm0, %xmm1
5742 ; AVX512-FCP-NEXT: vmovdqa 32(%rdi), %xmm12
5743 ; AVX512-FCP-NEXT: vpshufb %xmm5, %xmm12, %xmm2
5744 ; AVX512-FCP-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
5745 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm12[0],xmm0[0],xmm12[1],xmm0[1],xmm12[2],xmm0[2],xmm12[3],xmm0[3],xmm12[4],xmm0[4],xmm12[5],xmm0[5],xmm12[6],xmm0[6],xmm12[7],xmm0[7]
5746 ; AVX512-FCP-NEXT: vmovdqa64 %xmm0, %xmm16
5747 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm8 = [0,1,6,7,4,5,2,3,8,9,10,11,12,13,10,11]
5748 ; AVX512-FCP-NEXT: vpshufb %xmm8, %xmm2, %xmm2
5749 ; AVX512-FCP-NEXT: vinserti32x4 $2, %xmm1, %zmm2, %zmm26
5750 ; AVX512-FCP-NEXT: vmovdqa 32(%r8), %xmm13
5751 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm9 = [6,u,5,u,8,u,7,u,9,u,9,u,9,u,9,u]
5752 ; AVX512-FCP-NEXT: vpshufb %xmm9, %xmm13, %xmm1
5753 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm2 = [2,u,1,u,0,u,3,u,4,u,4,u,4,u,4,u]
5754 ; AVX512-FCP-NEXT: vpshufb %xmm2, %xmm13, %xmm3
5755 ; AVX512-FCP-NEXT: vinserti32x4 $2, %xmm1, %zmm3, %zmm29
5756 ; AVX512-FCP-NEXT: vmovdqa 32(%r9), %xmm14
5757 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm7 = [u,6,u,5,u,8,u,7,u,9,u,9,u,9,u,9]
5758 ; AVX512-FCP-NEXT: vpshufb %xmm7, %xmm14, %xmm1
5759 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm3 = [u,2,u,1,u,0,u,3,u,4,u,4,u,4,u,4]
5760 ; AVX512-FCP-NEXT: vpshufb %xmm3, %xmm14, %xmm4
5761 ; AVX512-FCP-NEXT: vinserti32x4 $2, %xmm1, %zmm4, %zmm21
5762 ; AVX512-FCP-NEXT: vmovdqa (%rsi), %xmm6
5763 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %xmm4
5764 ; AVX512-FCP-NEXT: vpshufb %xmm5, %xmm6, %xmm0
5765 ; AVX512-FCP-NEXT: vpshufb %xmm5, %xmm4, %xmm5
5766 ; AVX512-FCP-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm5[8],xmm0[8],xmm5[9],xmm0[9],xmm5[10],xmm0[10],xmm5[11],xmm0[11],xmm5[12],xmm0[12],xmm5[13],xmm0[13],xmm5[14],xmm0[14],xmm5[15],xmm0[15]
5767 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm4[0],xmm6[0],xmm4[1],xmm6[1],xmm4[2],xmm6[2],xmm4[3],xmm6[3],xmm4[4],xmm6[4],xmm4[5],xmm6[5],xmm4[6],xmm6[6],xmm4[7],xmm6[7]
5768 ; AVX512-FCP-NEXT: vpshufb %xmm8, %xmm5, %xmm5
5769 ; AVX512-FCP-NEXT: vinserti32x4 $2, %xmm0, %zmm5, %zmm25
5770 ; AVX512-FCP-NEXT: vmovdqa (%r8), %xmm1
5771 ; AVX512-FCP-NEXT: vpshufb %xmm9, %xmm1, %xmm5
5772 ; AVX512-FCP-NEXT: vpshufb %xmm2, %xmm1, %xmm2
5773 ; AVX512-FCP-NEXT: vinserti32x4 $2, %xmm5, %zmm2, %zmm27
5774 ; AVX512-FCP-NEXT: vmovdqa (%r9), %xmm8
5775 ; AVX512-FCP-NEXT: vpshufb %xmm7, %xmm8, %xmm5
5776 ; AVX512-FCP-NEXT: vpshufb %xmm3, %xmm8, %xmm3
5777 ; AVX512-FCP-NEXT: vinserti32x4 $2, %xmm5, %zmm3, %zmm28
5778 ; AVX512-FCP-NEXT: vmovdqa 32(%rcx), %xmm5
5779 ; AVX512-FCP-NEXT: vpshufb %xmm10, %xmm5, %xmm7
5780 ; AVX512-FCP-NEXT: vmovdqa 32(%rdx), %xmm3
5781 ; AVX512-FCP-NEXT: vpshufb %xmm10, %xmm3, %xmm9
5782 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3],xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
5783 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3],xmm3[4],xmm5[4],xmm3[5],xmm5[5],xmm3[6],xmm5[6],xmm3[7],xmm5[7]
5784 ; AVX512-FCP-NEXT: vprold $16, %xmm9, %xmm9
5785 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm7, %zmm9, %zmm30
5786 ; AVX512-FCP-NEXT: vmovdqa (%rcx), %xmm7
5787 ; AVX512-FCP-NEXT: vmovdqa (%rdx), %xmm9
5788 ; AVX512-FCP-NEXT: vpshufb %xmm10, %xmm7, %xmm0
5789 ; AVX512-FCP-NEXT: vpshufb %xmm10, %xmm9, %xmm10
5790 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm10[0],xmm0[0],xmm10[1],xmm0[1],xmm10[2],xmm0[2],xmm10[3],xmm0[3],xmm10[4],xmm0[4],xmm10[5],xmm0[5],xmm10[6],xmm0[6],xmm10[7],xmm0[7]
5791 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3],xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
5792 ; AVX512-FCP-NEXT: vprold $16, %xmm10, %xmm10
5793 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm10, %zmm31
5794 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm11[0],ymm15[0],ymm11[1],ymm15[1],ymm11[2],ymm15[2],ymm11[3],ymm15[3],ymm11[4],ymm15[4],ymm11[5],ymm15[5],ymm11[6],ymm15[6],ymm11[7],ymm15[7],ymm11[16],ymm15[16],ymm11[17],ymm15[17],ymm11[18],ymm15[18],ymm11[19],ymm15[19],ymm11[20],ymm15[20],ymm11[21],ymm15[21],ymm11[22],ymm15[22],ymm11[23],ymm15[23]
5795 ; AVX512-FCP-NEXT: vprold $16, %ymm0, %ymm0
5796 ; AVX512-FCP-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm9[8],xmm7[8],xmm9[9],xmm7[9],xmm9[10],xmm7[10],xmm9[11],xmm7[11],xmm9[12],xmm7[12],xmm9[13],xmm7[13],xmm9[14],xmm7[14],xmm9[15],xmm7[15]
5797 ; AVX512-FCP-NEXT: vmovdqa64 %ymm18, %ymm2
5798 ; AVX512-FCP-NEXT: vmovdqa64 %ymm17, %ymm9
5799 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} ymm9 = ymm2[0],ymm9[0],ymm2[1],ymm9[1],ymm2[2],ymm9[2],ymm2[3],ymm9[3],ymm2[4],ymm9[4],ymm2[5],ymm9[5],ymm2[6],ymm9[6],ymm2[7],ymm9[7],ymm2[16],ymm9[16],ymm2[17],ymm9[17],ymm2[18],ymm9[18],ymm2[19],ymm9[19],ymm2[20],ymm9[20],ymm2[21],ymm9[21],ymm2[22],ymm9[22],ymm2[23],ymm9[23]
5800 ; AVX512-FCP-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm4[8],xmm6[8],xmm4[9],xmm6[9],xmm4[10],xmm6[10],xmm4[11],xmm6[11],xmm4[12],xmm6[12],xmm4[13],xmm6[13],xmm4[14],xmm6[14],xmm4[15],xmm6[15]
5801 ; AVX512-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27,16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27]
5802 ; AVX512-FCP-NEXT: # ymm2 = mem[0,1,0,1]
5803 ; AVX512-FCP-NEXT: vpshufb %ymm2, %ymm9, %ymm9
5804 ; AVX512-FCP-NEXT: vmovdqa64 %ymm2, %ymm18
5805 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm2 = [8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
5806 ; AVX512-FCP-NEXT: vpshufb %xmm2, %xmm4, %xmm15
5807 ; AVX512-FCP-NEXT: vmovdqa64 %xmm2, %xmm17
5808 ; AVX512-FCP-NEXT: vpmovsxbq {{.*#+}} zmm4 = [0,0,0,1,10,10,10,11]
5809 ; AVX512-FCP-NEXT: vpermt2q %zmm9, %zmm4, %zmm15
5810 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm6 = [10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
5811 ; AVX512-FCP-NEXT: vpshufb %xmm6, %xmm7, %xmm7
5812 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm4, %zmm7
5813 ; AVX512-FCP-NEXT: vmovdqa64 {{.*#+}} zmm9 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
5814 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm15 = zmm7 ^ (zmm9 & (zmm15 ^ zmm7))
5815 ; AVX512-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
5816 ; AVX512-FCP-NEXT: # ymm2 = mem[0,1,0,1]
5817 ; AVX512-FCP-NEXT: vmovdqa64 %ymm19, %ymm0
5818 ; AVX512-FCP-NEXT: vpshufb %ymm2, %ymm0, %ymm0
5819 ; AVX512-FCP-NEXT: vmovdqa64 %ymm2, %ymm19
5820 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm11 = [10,u,13,u,12,u,11,u,14,u,13,u,14,u,15,u]
5821 ; AVX512-FCP-NEXT: vpshufb %xmm11, %xmm1, %xmm1
5822 ; AVX512-FCP-NEXT: vpermt2q %zmm0, %zmm4, %zmm1
5823 ; AVX512-FCP-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
5824 ; AVX512-FCP-NEXT: vpternlogd {{.*#+}} zmm1 = zmm1 ^ (zmm10 & (zmm1 ^ zmm15))
5825 ; AVX512-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm15 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
5826 ; AVX512-FCP-NEXT: # ymm15 = mem[0,1,0,1]
5827 ; AVX512-FCP-NEXT: vmovdqa64 %ymm20, %ymm0
5828 ; AVX512-FCP-NEXT: vpshufb %ymm15, %ymm0, %ymm2
5829 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm7 = [u,10,u,13,u,12,u,11,u,14,u,13,u,14,u,15]
5830 ; AVX512-FCP-NEXT: vpshufb %xmm7, %xmm8, %xmm0
5831 ; AVX512-FCP-NEXT: vpermt2q %zmm2, %zmm4, %zmm0
5832 ; AVX512-FCP-NEXT: vmovdqa64 {{.*#+}} zmm2 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
5833 ; AVX512-FCP-NEXT: vpternlogd {{.*#+}} zmm0 = zmm0 ^ (zmm2 & (zmm0 ^ zmm1))
5834 ; AVX512-FCP-NEXT: vmovdqa64 %ymm22, %ymm8
5835 ; AVX512-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5836 ; AVX512-FCP-NEXT: vpunpcklbw {{.*#+}} ymm1 = ymm1[0],ymm8[0],ymm1[1],ymm8[1],ymm1[2],ymm8[2],ymm1[3],ymm8[3],ymm1[4],ymm8[4],ymm1[5],ymm8[5],ymm1[6],ymm8[6],ymm1[7],ymm8[7],ymm1[16],ymm8[16],ymm1[17],ymm8[17],ymm1[18],ymm8[18],ymm1[19],ymm8[19],ymm1[20],ymm8[20],ymm1[21],ymm8[21],ymm1[22],ymm8[22],ymm1[23],ymm8[23]
5837 ; AVX512-FCP-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm3[8],xmm5[8],xmm3[9],xmm5[9],xmm3[10],xmm5[10],xmm3[11],xmm5[11],xmm3[12],xmm5[12],xmm3[13],xmm5[13],xmm3[14],xmm5[14],xmm3[15],xmm5[15]
5838 ; AVX512-FCP-NEXT: vpshufb %xmm6, %xmm3, %xmm3
5839 ; AVX512-FCP-NEXT: vmovdqu (%rsp), %ymm5 # 32-byte Reload
5840 ; AVX512-FCP-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm5 # 32-byte Folded Reload
5841 ; AVX512-FCP-NEXT: # ymm5 = ymm5[0],mem[0],ymm5[1],mem[1],ymm5[2],mem[2],ymm5[3],mem[3],ymm5[4],mem[4],ymm5[5],mem[5],ymm5[6],mem[6],ymm5[7],mem[7],ymm5[16],mem[16],ymm5[17],mem[17],ymm5[18],mem[18],ymm5[19],mem[19],ymm5[20],mem[20],ymm5[21],mem[21],ymm5[22],mem[22],ymm5[23],mem[23]
5842 ; AVX512-FCP-NEXT: vmovdqa64 %ymm18, %ymm6
5843 ; AVX512-FCP-NEXT: vpshufb %ymm6, %ymm5, %ymm5
5844 ; AVX512-FCP-NEXT: vmovdqa64 %xmm16, %xmm6
5845 ; AVX512-FCP-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm12[8],xmm6[8],xmm12[9],xmm6[9],xmm12[10],xmm6[10],xmm12[11],xmm6[11],xmm12[12],xmm6[12],xmm12[13],xmm6[13],xmm12[14],xmm6[14],xmm12[15],xmm6[15]
5846 ; AVX512-FCP-NEXT: vmovdqa64 %xmm17, %xmm8
5847 ; AVX512-FCP-NEXT: vpshufb %xmm8, %xmm6, %xmm6
5848 ; AVX512-FCP-NEXT: vpermt2q %zmm5, %zmm4, %zmm6
5849 ; AVX512-FCP-NEXT: vprold $16, %ymm1, %ymm1
5850 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
5851 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm6 = zmm3 ^ (zmm9 & (zmm6 ^ zmm3))
5852 ; AVX512-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5853 ; AVX512-FCP-NEXT: vmovdqa64 %ymm19, %ymm3
5854 ; AVX512-FCP-NEXT: vpshufb %ymm3, %ymm1, %ymm1
5855 ; AVX512-FCP-NEXT: vpshufb %xmm11, %xmm13, %xmm3
5856 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
5857 ; AVX512-FCP-NEXT: vpternlogd {{.*#+}} zmm3 = zmm3 ^ (zmm10 & (zmm3 ^ zmm6))
5858 ; AVX512-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
5859 ; AVX512-FCP-NEXT: vpshufb %ymm15, %ymm1, %ymm1
5860 ; AVX512-FCP-NEXT: vpshufb %xmm7, %xmm14, %xmm5
5861 ; AVX512-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm5
5862 ; AVX512-FCP-NEXT: vpternlogd {{.*#+}} zmm5 = zmm5 ^ (zmm2 & (zmm5 ^ zmm3))
5863 ; AVX512-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
5864 ; AVX512-FCP-NEXT: vmovdqa64 %zmm5, 256(%rax)
5865 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, 64(%rax)
5866 ; AVX512-FCP-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Folded Reload
5867 ; AVX512-FCP-NEXT: # zmm0 = mem[2,2,2,3,6,6,6,7]
5868 ; AVX512-FCP-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Folded Reload
5869 ; AVX512-FCP-NEXT: # zmm1 = mem[2,2,2,3,6,6,6,7]
5870 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm1 = zmm0 ^ (zmm9 & (zmm1 ^ zmm0))
5871 ; AVX512-FCP-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Folded Reload
5872 ; AVX512-FCP-NEXT: # zmm0 = mem[2,2,2,3,6,6,6,7]
5873 ; AVX512-FCP-NEXT: vmovdqa64 {{.*#+}} zmm2 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0]
5874 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm0 = zmm0 ^ (zmm2 & (zmm0 ^ zmm1))
5875 ; AVX512-FCP-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Folded Reload
5876 ; AVX512-FCP-NEXT: # zmm1 = mem[2,2,2,3,6,6,6,7]
5877 ; AVX512-FCP-NEXT: vmovdqa64 {{.*#+}} zmm3 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
5878 ; AVX512-FCP-NEXT: vpternlogd {{.*#+}} zmm1 = zmm1 ^ (zmm3 & (zmm1 ^ zmm0))
5879 ; AVX512-FCP-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Folded Reload
5880 ; AVX512-FCP-NEXT: # zmm0 = mem[2,2,2,3,6,6,6,7]
5881 ; AVX512-FCP-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Folded Reload
5882 ; AVX512-FCP-NEXT: # zmm4 = mem[2,2,2,3,6,6,6,7]
5883 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm4 = zmm0 ^ (zmm9 & (zmm4 ^ zmm0))
5884 ; AVX512-FCP-NEXT: vpermq {{.*#+}} zmm0 = zmm23[2,2,2,3,6,6,6,7]
5885 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm0 = zmm0 ^ (zmm2 & (zmm0 ^ zmm4))
5886 ; AVX512-FCP-NEXT: vpermq {{.*#+}} zmm4 = zmm24[2,2,2,3,6,6,6,7]
5887 ; AVX512-FCP-NEXT: vpternlogd {{.*#+}} zmm4 = zmm4 ^ (zmm3 & (zmm4 ^ zmm0))
5888 ; AVX512-FCP-NEXT: vpermq {{.*#+}} zmm0 = zmm30[0,0,0,1,4,4,4,5]
5889 ; AVX512-FCP-NEXT: vpermq {{.*#+}} zmm3 = zmm26[0,0,0,1,4,4,4,5]
5890 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm3 = zmm0 ^ (zmm2 & (zmm3 ^ zmm0))
5891 ; AVX512-FCP-NEXT: vpermq {{.*#+}} zmm0 = zmm31[0,0,0,1,4,4,4,5]
5892 ; AVX512-FCP-NEXT: vpermq {{.*#+}} zmm5 = zmm25[0,0,0,1,4,4,4,5]
5893 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm5 = zmm0 ^ (zmm2 & (zmm5 ^ zmm0))
5894 ; AVX512-FCP-NEXT: vpermq {{.*#+}} zmm0 = zmm29[0,0,0,1,4,4,4,5]
5895 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm0 = zmm0 ^ (zmm9 & (zmm0 ^ zmm3))
5896 ; AVX512-FCP-NEXT: vpermq {{.*#+}} zmm2 = zmm27[0,0,0,1,4,4,4,5]
5897 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm2 = zmm2 ^ (zmm9 & (zmm2 ^ zmm5))
5898 ; AVX512-FCP-NEXT: vpermq {{.*#+}} zmm3 = zmm21[0,0,0,1,4,4,4,5]
5899 ; AVX512-FCP-NEXT: vmovdqa64 {{.*#+}} zmm5 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
5900 ; AVX512-FCP-NEXT: vpternlogd {{.*#+}} zmm3 = zmm3 ^ (zmm5 & (zmm3 ^ zmm0))
5901 ; AVX512-FCP-NEXT: vpermq {{.*#+}} zmm0 = zmm28[0,0,0,1,4,4,4,5]
5902 ; AVX512-FCP-NEXT: vpternlogd {{.*#+}} zmm0 = zmm0 ^ (zmm5 & (zmm0 ^ zmm2))
5903 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, (%rax)
5904 ; AVX512-FCP-NEXT: vmovdqa64 %zmm3, 192(%rax)
5905 ; AVX512-FCP-NEXT: vmovdqa64 %zmm4, 128(%rax)
5906 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, 320(%rax)
5907 ; AVX512-FCP-NEXT: addq $424, %rsp # imm = 0x1A8
5908 ; AVX512-FCP-NEXT: vzeroupper
5909 ; AVX512-FCP-NEXT: retq
5911 ; AVX512DQ-LABEL: store_i8_stride6_vf64:
5912 ; AVX512DQ: # %bb.0:
5913 ; AVX512DQ-NEXT: subq $456, %rsp # imm = 0x1C8
5914 ; AVX512DQ-NEXT: vmovdqa (%rsi), %xmm6
5915 ; AVX512DQ-NEXT: vmovdqa 32(%rsi), %xmm3
5916 ; AVX512DQ-NEXT: vmovdqa (%rdi), %xmm7
5917 ; AVX512DQ-NEXT: vmovdqa 32(%rdi), %xmm2
5918 ; AVX512DQ-NEXT: vmovdqa (%r8), %xmm10
5919 ; AVX512DQ-NEXT: vmovdqa 32(%r8), %xmm4
5920 ; AVX512DQ-NEXT: vmovdqa (%r9), %xmm12
5921 ; AVX512DQ-NEXT: vmovdqa 32(%r9), %xmm8
5922 ; AVX512DQ-NEXT: vmovdqa 32(%rcx), %ymm13
5923 ; AVX512DQ-NEXT: vpbroadcastq {{.*#+}} xmm9 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
5924 ; AVX512DQ-NEXT: vpshufb %xmm9, %xmm3, %xmm0
5925 ; AVX512DQ-NEXT: vpshufb %xmm9, %xmm2, %xmm1
5926 ; AVX512DQ-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
5927 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3],xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
5928 ; AVX512DQ-NEXT: vmovdqa64 %xmm3, %xmm30
5929 ; AVX512DQ-NEXT: vmovdqa64 %xmm2, %xmm31
5930 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,3,2,1,4,5,6,7]
5931 ; AVX512DQ-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,6,5]
5932 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
5933 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5934 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} xmm0 = [6,u,5,u,8,u,7,u,9,u,9,u,9,u,9,u]
5935 ; AVX512DQ-NEXT: vpshufb %xmm0, %xmm4, %xmm2
5936 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} xmm1 = [2,u,1,u,0,u,3,u,4,u,4,u,4,u,4,u]
5937 ; AVX512DQ-NEXT: vpshufb %xmm1, %xmm4, %xmm3
5938 ; AVX512DQ-NEXT: vmovdqa64 %xmm4, %xmm29
5939 ; AVX512DQ-NEXT: vinserti32x4 $2, %xmm2, %zmm3, %zmm2
5940 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5941 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} xmm2 = [u,6,u,5,u,8,u,7,u,9,u,9,u,9,u,9]
5942 ; AVX512DQ-NEXT: vpshufb %xmm2, %xmm8, %xmm4
5943 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} xmm3 = [u,2,u,1,u,0,u,3,u,4,u,4,u,4,u,4]
5944 ; AVX512DQ-NEXT: vpshufb %xmm3, %xmm8, %xmm5
5945 ; AVX512DQ-NEXT: vmovdqa64 %xmm8, %xmm27
5946 ; AVX512DQ-NEXT: vinserti32x4 $2, %xmm4, %zmm5, %zmm4
5947 ; AVX512DQ-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5948 ; AVX512DQ-NEXT: vmovdqa 32(%rdx), %ymm8
5949 ; AVX512DQ-NEXT: vpshufb %xmm9, %xmm6, %xmm4
5950 ; AVX512DQ-NEXT: vpshufb %xmm9, %xmm7, %xmm5
5951 ; AVX512DQ-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm5[8],xmm4[8],xmm5[9],xmm4[9],xmm5[10],xmm4[10],xmm5[11],xmm4[11],xmm5[12],xmm4[12],xmm5[13],xmm4[13],xmm5[14],xmm4[14],xmm5[15],xmm4[15]
5952 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3],xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
5953 ; AVX512DQ-NEXT: vmovdqa64 %xmm7, %xmm18
5954 ; AVX512DQ-NEXT: vmovdqa64 %xmm6, %xmm20
5955 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,3,2,1,4,5,6,7]
5956 ; AVX512DQ-NEXT: vpshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,6,5]
5957 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm4, %zmm5, %zmm4
5958 ; AVX512DQ-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5959 ; AVX512DQ-NEXT: vmovdqa 32(%rsi), %ymm4
5960 ; AVX512DQ-NEXT: vmovdqa 32(%rdi), %ymm11
5961 ; AVX512DQ-NEXT: vpshufb %xmm0, %xmm10, %xmm0
5962 ; AVX512DQ-NEXT: vpshufb %xmm1, %xmm10, %xmm1
5963 ; AVX512DQ-NEXT: vmovdqa64 %xmm10, %xmm22
5964 ; AVX512DQ-NEXT: vinserti32x4 $2, %xmm0, %zmm1, %zmm0
5965 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5966 ; AVX512DQ-NEXT: vpshufb %xmm2, %xmm12, %xmm0
5967 ; AVX512DQ-NEXT: vpshufb %xmm3, %xmm12, %xmm1
5968 ; AVX512DQ-NEXT: vmovdqa64 %xmm12, %xmm21
5969 ; AVX512DQ-NEXT: vinserti32x4 $2, %xmm0, %zmm1, %zmm0
5970 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5971 ; AVX512DQ-NEXT: vpbroadcastq {{.*#+}} ymm15 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
5972 ; AVX512DQ-NEXT: vpshufb %ymm15, %ymm4, %ymm0
5973 ; AVX512DQ-NEXT: vpshufb %ymm15, %ymm11, %ymm1
5974 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[16],ymm0[16],ymm1[17],ymm0[17],ymm1[18],ymm0[18],ymm1[19],ymm0[19],ymm1[20],ymm0[20],ymm1[21],ymm0[21],ymm1[22],ymm0[22],ymm1[23],ymm0[23]
5975 ; AVX512DQ-NEXT: vpunpckhbw {{.*#+}} ymm1 = ymm11[8],ymm4[8],ymm11[9],ymm4[9],ymm11[10],ymm4[10],ymm11[11],ymm4[11],ymm11[12],ymm4[12],ymm11[13],ymm4[13],ymm11[14],ymm4[14],ymm11[15],ymm4[15],ymm11[24],ymm4[24],ymm11[25],ymm4[25],ymm11[26],ymm4[26],ymm11[27],ymm4[27],ymm11[28],ymm4[28],ymm11[29],ymm4[29],ymm11[30],ymm4[30],ymm11[31],ymm4[31]
5976 ; AVX512DQ-NEXT: vmovdqa64 %ymm4, %ymm19
5977 ; AVX512DQ-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
5978 ; AVX512DQ-NEXT: # ymm6 = mem[0,1,0,1]
5979 ; AVX512DQ-NEXT: vpshufb %ymm6, %ymm1, %ymm1
5980 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
5981 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
5982 ; AVX512DQ-NEXT: vpbroadcastq {{.*#+}} ymm12 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
5983 ; AVX512DQ-NEXT: vpshufb %ymm12, %ymm13, %ymm0
5984 ; AVX512DQ-NEXT: vpshufb %ymm12, %ymm8, %ymm1
5985 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[16],ymm0[16],ymm1[17],ymm0[17],ymm1[18],ymm0[18],ymm1[19],ymm0[19],ymm1[20],ymm0[20],ymm1[21],ymm0[21],ymm1[22],ymm0[22],ymm1[23],ymm0[23]
5986 ; AVX512DQ-NEXT: vpunpckhbw {{.*#+}} ymm1 = ymm8[8],ymm13[8],ymm8[9],ymm13[9],ymm8[10],ymm13[10],ymm8[11],ymm13[11],ymm8[12],ymm13[12],ymm8[13],ymm13[13],ymm8[14],ymm13[14],ymm8[15],ymm13[15],ymm8[24],ymm13[24],ymm8[25],ymm13[25],ymm8[26],ymm13[26],ymm8[27],ymm13[27],ymm8[28],ymm13[28],ymm8[29],ymm13[29],ymm8[30],ymm13[30],ymm8[31],ymm13[31]
5987 ; AVX512DQ-NEXT: vmovdqa64 %ymm13, %ymm17
5988 ; AVX512DQ-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
5989 ; AVX512DQ-NEXT: # ymm7 = mem[0,1,0,1]
5990 ; AVX512DQ-NEXT: vpshufb %ymm7, %ymm1, %ymm1
5991 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
5992 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
5993 ; AVX512DQ-NEXT: vmovdqa 32(%r8), %ymm13
5994 ; AVX512DQ-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0]
5995 ; AVX512DQ-NEXT: # ymm5 = mem[0,1,0,1]
5996 ; AVX512DQ-NEXT: vpshufb %ymm5, %ymm13, %ymm0
5997 ; AVX512DQ-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0]
5998 ; AVX512DQ-NEXT: # ymm4 = mem[0,1,0,1]
5999 ; AVX512DQ-NEXT: vpshufb %ymm4, %ymm13, %ymm1
6000 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
6001 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6002 ; AVX512DQ-NEXT: vmovdqa 32(%r9), %ymm14
6003 ; AVX512DQ-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15]
6004 ; AVX512DQ-NEXT: # ymm3 = mem[0,1,0,1]
6005 ; AVX512DQ-NEXT: vpshufb %ymm3, %ymm14, %ymm0
6006 ; AVX512DQ-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0]
6007 ; AVX512DQ-NEXT: # ymm2 = mem[0,1,0,1]
6008 ; AVX512DQ-NEXT: vpshufb %ymm2, %ymm14, %ymm1
6009 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm28
6010 ; AVX512DQ-NEXT: vmovdqa (%rsi), %ymm1
6011 ; AVX512DQ-NEXT: vmovdqa (%rdi), %ymm0
6012 ; AVX512DQ-NEXT: vpshufb %ymm15, %ymm1, %ymm10
6013 ; AVX512DQ-NEXT: vpshufb %ymm15, %ymm0, %ymm15
6014 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} ymm10 = ymm15[0],ymm10[0],ymm15[1],ymm10[1],ymm15[2],ymm10[2],ymm15[3],ymm10[3],ymm15[4],ymm10[4],ymm15[5],ymm10[5],ymm15[6],ymm10[6],ymm15[7],ymm10[7],ymm15[16],ymm10[16],ymm15[17],ymm10[17],ymm15[18],ymm10[18],ymm15[19],ymm10[19],ymm15[20],ymm10[20],ymm15[21],ymm10[21],ymm15[22],ymm10[22],ymm15[23],ymm10[23]
6015 ; AVX512DQ-NEXT: vpunpckhbw {{.*#+}} ymm15 = ymm0[8],ymm1[8],ymm0[9],ymm1[9],ymm0[10],ymm1[10],ymm0[11],ymm1[11],ymm0[12],ymm1[12],ymm0[13],ymm1[13],ymm0[14],ymm1[14],ymm0[15],ymm1[15],ymm0[24],ymm1[24],ymm0[25],ymm1[25],ymm0[26],ymm1[26],ymm0[27],ymm1[27],ymm0[28],ymm1[28],ymm0[29],ymm1[29],ymm0[30],ymm1[30],ymm0[31],ymm1[31]
6016 ; AVX512DQ-NEXT: vpshufb %ymm6, %ymm15, %ymm6
6017 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm6, %zmm10, %zmm26
6018 ; AVX512DQ-NEXT: vmovdqa (%rcx), %ymm10
6019 ; AVX512DQ-NEXT: vmovdqa (%rdx), %ymm15
6020 ; AVX512DQ-NEXT: vpshufb %ymm12, %ymm10, %ymm6
6021 ; AVX512DQ-NEXT: vpshufb %ymm12, %ymm15, %ymm9
6022 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm9[0],ymm6[0],ymm9[1],ymm6[1],ymm9[2],ymm6[2],ymm9[3],ymm6[3],ymm9[4],ymm6[4],ymm9[5],ymm6[5],ymm9[6],ymm6[6],ymm9[7],ymm6[7],ymm9[16],ymm6[16],ymm9[17],ymm6[17],ymm9[18],ymm6[18],ymm9[19],ymm6[19],ymm9[20],ymm6[20],ymm9[21],ymm6[21],ymm9[22],ymm6[22],ymm9[23],ymm6[23]
6023 ; AVX512DQ-NEXT: vpunpckhbw {{.*#+}} ymm9 = ymm15[8],ymm10[8],ymm15[9],ymm10[9],ymm15[10],ymm10[10],ymm15[11],ymm10[11],ymm15[12],ymm10[12],ymm15[13],ymm10[13],ymm15[14],ymm10[14],ymm15[15],ymm10[15],ymm15[24],ymm10[24],ymm15[25],ymm10[25],ymm15[26],ymm10[26],ymm15[27],ymm10[27],ymm15[28],ymm10[28],ymm15[29],ymm10[29],ymm15[30],ymm10[30],ymm15[31],ymm10[31]
6024 ; AVX512DQ-NEXT: vpshufb %ymm7, %ymm9, %ymm9
6025 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm9, %zmm6, %zmm24
6026 ; AVX512DQ-NEXT: vmovdqa (%r8), %ymm6
6027 ; AVX512DQ-NEXT: vpshufb %ymm5, %ymm6, %ymm5
6028 ; AVX512DQ-NEXT: vpshufb %ymm4, %ymm6, %ymm4
6029 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm23
6030 ; AVX512DQ-NEXT: vmovdqa (%r9), %ymm4
6031 ; AVX512DQ-NEXT: vpshufb %ymm3, %ymm4, %ymm3
6032 ; AVX512DQ-NEXT: vpshufb %ymm2, %ymm4, %ymm2
6033 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm25
6034 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} ymm2 = ymm15[0],ymm10[0],ymm15[1],ymm10[1],ymm15[2],ymm10[2],ymm15[3],ymm10[3],ymm15[4],ymm10[4],ymm15[5],ymm10[5],ymm15[6],ymm10[6],ymm15[7],ymm10[7],ymm15[16],ymm10[16],ymm15[17],ymm10[17],ymm15[18],ymm10[18],ymm15[19],ymm10[19],ymm15[20],ymm10[20],ymm15[21],ymm10[21],ymm15[22],ymm10[22],ymm15[23],ymm10[23]
6035 ; AVX512DQ-NEXT: vmovdqa64 %ymm2, %ymm16
6036 ; AVX512DQ-NEXT: vmovdqa64 %xmm18, %xmm2
6037 ; AVX512DQ-NEXT: vmovdqa64 %xmm20, %xmm3
6038 ; AVX512DQ-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm2[8],xmm3[8],xmm2[9],xmm3[9],xmm2[10],xmm3[10],xmm2[11],xmm3[11],xmm2[12],xmm3[12],xmm2[13],xmm3[13],xmm2[14],xmm3[14],xmm2[15],xmm3[15]
6039 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[4],ymm1[4],ymm0[5],ymm1[5],ymm0[6],ymm1[6],ymm0[7],ymm1[7],ymm0[16],ymm1[16],ymm0[17],ymm1[17],ymm0[18],ymm1[18],ymm0[19],ymm1[19],ymm0[20],ymm1[20],ymm0[21],ymm1[21],ymm0[22],ymm1[22],ymm0[23],ymm1[23]
6040 ; AVX512DQ-NEXT: vmovdqa64 %ymm0, %ymm18
6041 ; AVX512DQ-NEXT: vmovdqa (%rcx), %xmm15
6042 ; AVX512DQ-NEXT: vmovdqa (%rdx), %xmm2
6043 ; AVX512DQ-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm2[8],xmm15[8],xmm2[9],xmm15[9],xmm2[10],xmm15[10],xmm2[11],xmm15[11],xmm2[12],xmm15[12],xmm2[13],xmm15[13],xmm2[14],xmm15[14],xmm2[15],xmm15[15]
6044 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} xmm3 = [10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
6045 ; AVX512DQ-NEXT: vpshufb %xmm3, %xmm1, %xmm10
6046 ; AVX512DQ-NEXT: vmovdqa 32(%rcx), %xmm1
6047 ; AVX512DQ-NEXT: vmovdqa 32(%rdx), %xmm0
6048 ; AVX512DQ-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
6049 ; AVX512DQ-NEXT: vpshufb %xmm3, %xmm9, %xmm3
6050 ; AVX512DQ-NEXT: vmovdqa64 %ymm3, %ymm20
6051 ; AVX512DQ-NEXT: vmovdqa64 %ymm17, %ymm3
6052 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm8[0],ymm3[0],ymm8[1],ymm3[1],ymm8[2],ymm3[2],ymm8[3],ymm3[3],ymm8[4],ymm3[4],ymm8[5],ymm3[5],ymm8[6],ymm3[6],ymm8[7],ymm3[7],ymm8[16],ymm3[16],ymm8[17],ymm3[17],ymm8[18],ymm3[18],ymm8[19],ymm3[19],ymm8[20],ymm3[20],ymm8[21],ymm3[21],ymm8[22],ymm3[22],ymm8[23],ymm3[23]
6053 ; AVX512DQ-NEXT: vmovdqa64 %ymm3, %ymm17
6054 ; AVX512DQ-NEXT: vmovdqa64 %xmm30, %xmm3
6055 ; AVX512DQ-NEXT: vmovdqa64 %xmm31, %xmm7
6056 ; AVX512DQ-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm7[8],xmm3[8],xmm7[9],xmm3[9],xmm7[10],xmm3[10],xmm7[11],xmm3[11],xmm7[12],xmm3[12],xmm7[13],xmm3[13],xmm7[14],xmm3[14],xmm7[15],xmm3[15]
6057 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} xmm8 = [8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
6058 ; AVX512DQ-NEXT: vpshufb %xmm8, %xmm5, %xmm9
6059 ; AVX512DQ-NEXT: vpshufb %xmm8, %xmm3, %xmm3
6060 ; AVX512DQ-NEXT: vmovdqa64 %ymm3, %ymm31
6061 ; AVX512DQ-NEXT: vmovdqa64 %ymm19, %ymm3
6062 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm11[0],ymm3[0],ymm11[1],ymm3[1],ymm11[2],ymm3[2],ymm11[3],ymm3[3],ymm11[4],ymm3[4],ymm11[5],ymm3[5],ymm11[6],ymm3[6],ymm11[7],ymm3[7],ymm11[16],ymm3[16],ymm11[17],ymm3[17],ymm11[18],ymm3[18],ymm11[19],ymm3[19],ymm11[20],ymm3[20],ymm11[21],ymm3[21],ymm11[22],ymm3[22],ymm11[23],ymm3[23]
6063 ; AVX512DQ-NEXT: vmovdqa64 %ymm3, %ymm19
6064 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} xmm3 = [10,u,13,u,12,u,11,u,14,u,13,u,14,u,15,u]
6065 ; AVX512DQ-NEXT: vmovdqa64 %xmm22, %xmm5
6066 ; AVX512DQ-NEXT: vpshufb %xmm3, %xmm5, %xmm7
6067 ; AVX512DQ-NEXT: vmovdqa64 %xmm29, %xmm5
6068 ; AVX512DQ-NEXT: vpshufb %xmm3, %xmm5, %xmm3
6069 ; AVX512DQ-NEXT: vmovdqa64 %ymm3, %ymm22
6070 ; AVX512DQ-NEXT: vbroadcasti128 {{.*#+}} ymm11 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
6071 ; AVX512DQ-NEXT: # ymm11 = mem[0,1,0,1]
6072 ; AVX512DQ-NEXT: vpshufb %ymm11, %ymm6, %ymm5
6073 ; AVX512DQ-NEXT: vpshufb %ymm11, %ymm13, %ymm6
6074 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} xmm11 = [u,10,u,13,u,12,u,11,u,14,u,13,u,14,u,15]
6075 ; AVX512DQ-NEXT: vmovdqa64 %xmm21, %xmm3
6076 ; AVX512DQ-NEXT: vpshufb %xmm11, %xmm3, %xmm13
6077 ; AVX512DQ-NEXT: vmovdqa64 %xmm27, %xmm3
6078 ; AVX512DQ-NEXT: vpshufb %xmm11, %xmm3, %xmm11
6079 ; AVX512DQ-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
6080 ; AVX512DQ-NEXT: # ymm3 = mem[0,1,0,1]
6081 ; AVX512DQ-NEXT: vpshufb %ymm3, %ymm4, %ymm4
6082 ; AVX512DQ-NEXT: vpshufb %ymm3, %ymm14, %ymm3
6083 ; AVX512DQ-NEXT: vpshufb %xmm12, %xmm1, %xmm14
6084 ; AVX512DQ-NEXT: vpshufb %xmm12, %xmm0, %xmm8
6085 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3],xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
6086 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
6087 ; AVX512DQ-NEXT: vprold $16, %xmm0, %xmm0
6088 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm0
6089 ; AVX512DQ-NEXT: vpshufb %xmm12, %xmm15, %xmm1
6090 ; AVX512DQ-NEXT: vpshufb %xmm12, %xmm2, %xmm8
6091 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm8[0],xmm1[0],xmm8[1],xmm1[1],xmm8[2],xmm1[2],xmm8[3],xmm1[3],xmm8[4],xmm1[4],xmm8[5],xmm1[5],xmm8[6],xmm1[6],xmm8[7],xmm1[7]
6092 ; AVX512DQ-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0],xmm15[0],xmm2[1],xmm15[1],xmm2[2],xmm15[2],xmm2[3],xmm15[3],xmm2[4],xmm15[4],xmm2[5],xmm15[5],xmm2[6],xmm15[6],xmm2[7],xmm15[7]
6093 ; AVX512DQ-NEXT: vprold $16, %xmm2, %xmm2
6094 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm30
6095 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm2 = ymm10[0,0,0,1]
6096 ; AVX512DQ-NEXT: vprold $16, %ymm16, %ymm8
6097 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm9 = ymm9[0,0,0,1]
6098 ; AVX512DQ-NEXT: vmovdqa64 %ymm18, %ymm1
6099 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} ymm10 = ymm1[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
6100 ; AVX512DQ-NEXT: vpshufhw {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5,6,5,8,9,10,11,12,13,14,13]
6101 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm10 = ymm10[2,2,2,3]
6102 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm7 = ymm7[0,0,0,1]
6103 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
6104 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm12 = ymm13[0,0,0,1]
6105 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
6106 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm13 = ymm20[0,0,0,1]
6107 ; AVX512DQ-NEXT: vprold $16, %ymm17, %ymm14
6108 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm15 = ymm31[0,0,0,1]
6109 ; AVX512DQ-NEXT: vmovdqa64 %ymm19, %ymm1
6110 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} ymm1 = ymm1[0,3,2,1,4,5,6,7,8,11,10,9,12,13,14,15]
6111 ; AVX512DQ-NEXT: vpshufhw {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5,6,5,8,9,10,11,12,13,14,13]
6112 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
6113 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm31 = ymm22[0,0,0,1]
6114 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
6115 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm11 = ymm11[0,0,0,1]
6116 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
6117 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,2,2,3]
6118 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm8, %zmm2, %zmm2
6119 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm10, %zmm9, %zmm8
6120 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm9 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
6121 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm8 = zmm2 ^ (zmm9 & (zmm8 ^ zmm2))
6122 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm5, %zmm7, %zmm2
6123 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm5 = [0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
6124 ; AVX512DQ-NEXT: vpternlogd {{.*#+}} zmm2 = zmm2 ^ (zmm5 & (zmm2 ^ zmm8))
6125 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm7 = ymm14[2,2,2,3]
6126 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm7, %zmm13, %zmm7
6127 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm1, %zmm15, %zmm1
6128 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm1 = zmm7 ^ (zmm9 & (zmm1 ^ zmm7))
6129 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm6, %zmm31, %zmm6
6130 ; AVX512DQ-NEXT: vpternlogd {{.*#+}} zmm6 = zmm6 ^ (zmm5 & (zmm6 ^ zmm1))
6131 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm4, %zmm12, %zmm1
6132 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm4 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
6133 ; AVX512DQ-NEXT: vpternlogd {{.*#+}} zmm1 = zmm1 ^ (zmm4 & (zmm1 ^ zmm2))
6134 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm3, %zmm11, %zmm2
6135 ; AVX512DQ-NEXT: vpternlogd {{.*#+}} zmm2 = zmm2 ^ (zmm4 & (zmm2 ^ zmm6))
6136 ; AVX512DQ-NEXT: vpermq {{.*#+}} zmm0 = zmm0[0,0,0,1,4,4,4,5]
6137 ; AVX512DQ-NEXT: vpermq $64, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Folded Reload
6138 ; AVX512DQ-NEXT: # zmm3 = mem[0,0,0,1,4,4,4,5]
6139 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm4 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0]
6140 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm3 = zmm0 ^ (zmm4 & (zmm3 ^ zmm0))
6141 ; AVX512DQ-NEXT: vpermq $64, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Folded Reload
6142 ; AVX512DQ-NEXT: # zmm0 = mem[0,0,0,1,4,4,4,5]
6143 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm0 = zmm0 ^ (zmm9 & (zmm0 ^ zmm3))
6144 ; AVX512DQ-NEXT: vpermq $64, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Folded Reload
6145 ; AVX512DQ-NEXT: # zmm3 = mem[0,0,0,1,4,4,4,5]
6146 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm5 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
6147 ; AVX512DQ-NEXT: vpternlogd {{.*#+}} zmm3 = zmm3 ^ (zmm5 & (zmm3 ^ zmm0))
6148 ; AVX512DQ-NEXT: vpermq {{.*#+}} zmm0 = zmm30[0,0,0,1,4,4,4,5]
6149 ; AVX512DQ-NEXT: vpermq $64, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Folded Reload
6150 ; AVX512DQ-NEXT: # zmm6 = mem[0,0,0,1,4,4,4,5]
6151 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm6 = zmm0 ^ (zmm4 & (zmm6 ^ zmm0))
6152 ; AVX512DQ-NEXT: vpermq $64, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Folded Reload
6153 ; AVX512DQ-NEXT: # zmm0 = mem[0,0,0,1,4,4,4,5]
6154 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm0 = zmm0 ^ (zmm9 & (zmm0 ^ zmm6))
6155 ; AVX512DQ-NEXT: vpermq $64, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Folded Reload
6156 ; AVX512DQ-NEXT: # zmm6 = mem[0,0,0,1,4,4,4,5]
6157 ; AVX512DQ-NEXT: vpternlogd {{.*#+}} zmm6 = zmm6 ^ (zmm5 & (zmm6 ^ zmm0))
6158 ; AVX512DQ-NEXT: vpermq $234, (%rsp), %zmm0 # 64-byte Folded Reload
6159 ; AVX512DQ-NEXT: # zmm0 = mem[2,2,2,3,6,6,6,7]
6160 ; AVX512DQ-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Folded Reload
6161 ; AVX512DQ-NEXT: # zmm5 = mem[2,2,2,3,6,6,6,7]
6162 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm5 = zmm0 ^ (zmm9 & (zmm5 ^ zmm0))
6163 ; AVX512DQ-NEXT: vpermq {{.*#+}} zmm0 = zmm26[2,2,2,3,6,6,6,7]
6164 ; AVX512DQ-NEXT: vpermq {{.*#+}} zmm7 = zmm24[2,2,2,3,6,6,6,7]
6165 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm7 = zmm0 ^ (zmm9 & (zmm7 ^ zmm0))
6166 ; AVX512DQ-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Folded Reload
6167 ; AVX512DQ-NEXT: # zmm0 = mem[2,2,2,3,6,6,6,7]
6168 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm0 = zmm0 ^ (zmm4 & (zmm0 ^ zmm5))
6169 ; AVX512DQ-NEXT: vpermq {{.*#+}} zmm5 = zmm23[2,2,2,3,6,6,6,7]
6170 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm5 = zmm5 ^ (zmm4 & (zmm5 ^ zmm7))
6171 ; AVX512DQ-NEXT: vpermq {{.*#+}} zmm4 = zmm28[2,2,2,3,6,6,6,7]
6172 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm7 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
6173 ; AVX512DQ-NEXT: vpternlogd {{.*#+}} zmm4 = zmm4 ^ (zmm7 & (zmm4 ^ zmm0))
6174 ; AVX512DQ-NEXT: vpermq {{.*#+}} zmm0 = zmm25[2,2,2,3,6,6,6,7]
6175 ; AVX512DQ-NEXT: vpternlogd {{.*#+}} zmm0 = zmm0 ^ (zmm7 & (zmm0 ^ zmm5))
6176 ; AVX512DQ-NEXT: movq {{[0-9]+}}(%rsp), %rax
6177 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 128(%rax)
6178 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 320(%rax)
6179 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, (%rax)
6180 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 192(%rax)
6181 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 256(%rax)
6182 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 64(%rax)
6183 ; AVX512DQ-NEXT: addq $456, %rsp # imm = 0x1C8
6184 ; AVX512DQ-NEXT: vzeroupper
6185 ; AVX512DQ-NEXT: retq
6187 ; AVX512DQ-FCP-LABEL: store_i8_stride6_vf64:
6188 ; AVX512DQ-FCP: # %bb.0:
6189 ; AVX512DQ-FCP-NEXT: subq $424, %rsp # imm = 0x1A8
6190 ; AVX512DQ-FCP-NEXT: vmovdqa 32(%rsi), %ymm4
6191 ; AVX512DQ-FCP-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6192 ; AVX512DQ-FCP-NEXT: vpbroadcastq {{.*#+}} ymm0 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
6193 ; AVX512DQ-FCP-NEXT: vpshufb %ymm0, %ymm4, %ymm1
6194 ; AVX512DQ-FCP-NEXT: vmovdqa 32(%rdi), %ymm3
6195 ; AVX512DQ-FCP-NEXT: vmovdqu %ymm3, (%rsp) # 32-byte Spill
6196 ; AVX512DQ-FCP-NEXT: vpshufb %ymm0, %ymm3, %ymm2
6197 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} ymm2 = ymm2[0],ymm1[0],ymm2[1],ymm1[1],ymm2[2],ymm1[2],ymm2[3],ymm1[3],ymm2[4],ymm1[4],ymm2[5],ymm1[5],ymm2[6],ymm1[6],ymm2[7],ymm1[7],ymm2[16],ymm1[16],ymm2[17],ymm1[17],ymm2[18],ymm1[18],ymm2[19],ymm1[19],ymm2[20],ymm1[20],ymm2[21],ymm1[21],ymm2[22],ymm1[22],ymm2[23],ymm1[23]
6198 ; AVX512DQ-FCP-NEXT: vpunpckhbw {{.*#+}} ymm3 = ymm3[8],ymm4[8],ymm3[9],ymm4[9],ymm3[10],ymm4[10],ymm3[11],ymm4[11],ymm3[12],ymm4[12],ymm3[13],ymm4[13],ymm3[14],ymm4[14],ymm3[15],ymm4[15],ymm3[24],ymm4[24],ymm3[25],ymm4[25],ymm3[26],ymm4[26],ymm3[27],ymm4[27],ymm3[28],ymm4[28],ymm3[29],ymm4[29],ymm3[30],ymm4[30],ymm3[31],ymm4[31]
6199 ; AVX512DQ-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm1 = [24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31,24,25,22,23,28,29,26,27,30,31,30,31,30,31,30,31]
6200 ; AVX512DQ-FCP-NEXT: # ymm1 = mem[0,1,0,1]
6201 ; AVX512DQ-FCP-NEXT: vpshufb %ymm1, %ymm3, %ymm3
6202 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm2
6203 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6204 ; AVX512DQ-FCP-NEXT: vmovdqa 32(%rcx), %ymm5
6205 ; AVX512DQ-FCP-NEXT: vpbroadcastq {{.*#+}} ymm10 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
6206 ; AVX512DQ-FCP-NEXT: vpshufb %ymm10, %ymm5, %ymm2
6207 ; AVX512DQ-FCP-NEXT: vmovdqa 32(%rdx), %ymm4
6208 ; AVX512DQ-FCP-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6209 ; AVX512DQ-FCP-NEXT: vpshufb %ymm10, %ymm4, %ymm3
6210 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm3[0],ymm2[0],ymm3[1],ymm2[1],ymm3[2],ymm2[2],ymm3[3],ymm2[3],ymm3[4],ymm2[4],ymm3[5],ymm2[5],ymm3[6],ymm2[6],ymm3[7],ymm2[7],ymm3[16],ymm2[16],ymm3[17],ymm2[17],ymm3[18],ymm2[18],ymm3[19],ymm2[19],ymm3[20],ymm2[20],ymm3[21],ymm2[21],ymm3[22],ymm2[22],ymm3[23],ymm2[23]
6211 ; AVX512DQ-FCP-NEXT: vpunpckhbw {{.*#+}} ymm4 = ymm4[8],ymm5[8],ymm4[9],ymm5[9],ymm4[10],ymm5[10],ymm4[11],ymm5[11],ymm4[12],ymm5[12],ymm4[13],ymm5[13],ymm4[14],ymm5[14],ymm4[15],ymm5[15],ymm4[24],ymm5[24],ymm4[25],ymm5[25],ymm4[26],ymm5[26],ymm4[27],ymm5[27],ymm4[28],ymm5[28],ymm4[29],ymm5[29],ymm4[30],ymm5[30],ymm4[31],ymm5[31]
6212 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm5, %ymm22
6213 ; AVX512DQ-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31,26,27,24,25,22,23,28,29,30,31,30,31,30,31,30,31]
6214 ; AVX512DQ-FCP-NEXT: # ymm2 = mem[0,1,0,1]
6215 ; AVX512DQ-FCP-NEXT: vpshufb %ymm2, %ymm4, %ymm4
6216 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm4, %zmm3, %zmm3
6217 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6218 ; AVX512DQ-FCP-NEXT: vmovdqa 32(%r8), %ymm6
6219 ; AVX512DQ-FCP-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6220 ; AVX512DQ-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0]
6221 ; AVX512DQ-FCP-NEXT: # ymm3 = mem[0,1,0,1]
6222 ; AVX512DQ-FCP-NEXT: vpshufb %ymm3, %ymm6, %ymm5
6223 ; AVX512DQ-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0]
6224 ; AVX512DQ-FCP-NEXT: # ymm4 = mem[0,1,0,1]
6225 ; AVX512DQ-FCP-NEXT: vpshufb %ymm4, %ymm6, %ymm6
6226 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm5, %zmm6, %zmm5
6227 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6228 ; AVX512DQ-FCP-NEXT: vmovdqa 32(%r9), %ymm7
6229 ; AVX512DQ-FCP-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6230 ; AVX512DQ-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15,0,10,0,13,0,12,0,11,0,14,0,0,0,0,0,15]
6231 ; AVX512DQ-FCP-NEXT: # ymm5 = mem[0,1,0,1]
6232 ; AVX512DQ-FCP-NEXT: vpshufb %ymm5, %ymm7, %ymm6
6233 ; AVX512DQ-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0,0,6,0,5,0,8,0,7,0,0,0,9,0,0,0,0]
6234 ; AVX512DQ-FCP-NEXT: # ymm9 = mem[0,1,0,1]
6235 ; AVX512DQ-FCP-NEXT: vpshufb %ymm9, %ymm7, %ymm8
6236 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm6, %zmm8, %zmm6
6237 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6238 ; AVX512DQ-FCP-NEXT: vmovdqa (%rsi), %ymm7
6239 ; AVX512DQ-FCP-NEXT: vpshufb %ymm0, %ymm7, %ymm6
6240 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %ymm8
6241 ; AVX512DQ-FCP-NEXT: vpshufb %ymm0, %ymm8, %ymm0
6242 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm0[0],ymm6[0],ymm0[1],ymm6[1],ymm0[2],ymm6[2],ymm0[3],ymm6[3],ymm0[4],ymm6[4],ymm0[5],ymm6[5],ymm0[6],ymm6[6],ymm0[7],ymm6[7],ymm0[16],ymm6[16],ymm0[17],ymm6[17],ymm0[18],ymm6[18],ymm0[19],ymm6[19],ymm0[20],ymm6[20],ymm0[21],ymm6[21],ymm0[22],ymm6[22],ymm0[23],ymm6[23]
6243 ; AVX512DQ-FCP-NEXT: vpunpckhbw {{.*#+}} ymm6 = ymm8[8],ymm7[8],ymm8[9],ymm7[9],ymm8[10],ymm7[10],ymm8[11],ymm7[11],ymm8[12],ymm7[12],ymm8[13],ymm7[13],ymm8[14],ymm7[14],ymm8[15],ymm7[15],ymm8[24],ymm7[24],ymm8[25],ymm7[25],ymm8[26],ymm7[26],ymm8[27],ymm7[27],ymm8[28],ymm7[28],ymm8[29],ymm7[29],ymm8[30],ymm7[30],ymm8[31],ymm7[31]
6244 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm8, %ymm18
6245 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm7, %ymm17
6246 ; AVX512DQ-FCP-NEXT: vpshufb %ymm1, %ymm6, %ymm1
6247 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
6248 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6249 ; AVX512DQ-FCP-NEXT: vmovdqa (%rcx), %ymm15
6250 ; AVX512DQ-FCP-NEXT: vpshufb %ymm10, %ymm15, %ymm0
6251 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdx), %ymm11
6252 ; AVX512DQ-FCP-NEXT: vpshufb %ymm10, %ymm11, %ymm1
6253 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[16],ymm0[16],ymm1[17],ymm0[17],ymm1[18],ymm0[18],ymm1[19],ymm0[19],ymm1[20],ymm0[20],ymm1[21],ymm0[21],ymm1[22],ymm0[22],ymm1[23],ymm0[23]
6254 ; AVX512DQ-FCP-NEXT: vpunpckhbw {{.*#+}} ymm1 = ymm11[8],ymm15[8],ymm11[9],ymm15[9],ymm11[10],ymm15[10],ymm11[11],ymm15[11],ymm11[12],ymm15[12],ymm11[13],ymm15[13],ymm11[14],ymm15[14],ymm11[15],ymm15[15],ymm11[24],ymm15[24],ymm11[25],ymm15[25],ymm11[26],ymm15[26],ymm11[27],ymm15[27],ymm11[28],ymm15[28],ymm11[29],ymm15[29],ymm11[30],ymm15[30],ymm11[31],ymm15[31]
6255 ; AVX512DQ-FCP-NEXT: vpshufb %ymm2, %ymm1, %ymm1
6256 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
6257 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6258 ; AVX512DQ-FCP-NEXT: vmovdqa (%r8), %ymm2
6259 ; AVX512DQ-FCP-NEXT: vpshufb %ymm3, %ymm2, %ymm0
6260 ; AVX512DQ-FCP-NEXT: vpshufb %ymm4, %ymm2, %ymm1
6261 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm2, %ymm19
6262 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm23
6263 ; AVX512DQ-FCP-NEXT: vmovdqa (%r9), %ymm2
6264 ; AVX512DQ-FCP-NEXT: vpshufb %ymm5, %ymm2, %ymm0
6265 ; AVX512DQ-FCP-NEXT: vpshufb %ymm9, %ymm2, %ymm1
6266 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm2, %ymm20
6267 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm24
6268 ; AVX512DQ-FCP-NEXT: vmovdqa 32(%rsi), %xmm0
6269 ; AVX512DQ-FCP-NEXT: vpbroadcastq {{.*#+}} xmm5 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
6270 ; AVX512DQ-FCP-NEXT: vpshufb %xmm5, %xmm0, %xmm1
6271 ; AVX512DQ-FCP-NEXT: vmovdqa 32(%rdi), %xmm12
6272 ; AVX512DQ-FCP-NEXT: vpshufb %xmm5, %xmm12, %xmm2
6273 ; AVX512DQ-FCP-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
6274 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm12[0],xmm0[0],xmm12[1],xmm0[1],xmm12[2],xmm0[2],xmm12[3],xmm0[3],xmm12[4],xmm0[4],xmm12[5],xmm0[5],xmm12[6],xmm0[6],xmm12[7],xmm0[7]
6275 ; AVX512DQ-FCP-NEXT: vmovdqa64 %xmm0, %xmm16
6276 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm8 = [0,1,6,7,4,5,2,3,8,9,10,11,12,13,10,11]
6277 ; AVX512DQ-FCP-NEXT: vpshufb %xmm8, %xmm2, %xmm2
6278 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, %xmm1, %zmm2, %zmm26
6279 ; AVX512DQ-FCP-NEXT: vmovdqa 32(%r8), %xmm13
6280 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm9 = [6,u,5,u,8,u,7,u,9,u,9,u,9,u,9,u]
6281 ; AVX512DQ-FCP-NEXT: vpshufb %xmm9, %xmm13, %xmm1
6282 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm2 = [2,u,1,u,0,u,3,u,4,u,4,u,4,u,4,u]
6283 ; AVX512DQ-FCP-NEXT: vpshufb %xmm2, %xmm13, %xmm3
6284 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, %xmm1, %zmm3, %zmm29
6285 ; AVX512DQ-FCP-NEXT: vmovdqa 32(%r9), %xmm14
6286 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm7 = [u,6,u,5,u,8,u,7,u,9,u,9,u,9,u,9]
6287 ; AVX512DQ-FCP-NEXT: vpshufb %xmm7, %xmm14, %xmm1
6288 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm3 = [u,2,u,1,u,0,u,3,u,4,u,4,u,4,u,4]
6289 ; AVX512DQ-FCP-NEXT: vpshufb %xmm3, %xmm14, %xmm4
6290 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, %xmm1, %zmm4, %zmm21
6291 ; AVX512DQ-FCP-NEXT: vmovdqa (%rsi), %xmm6
6292 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %xmm4
6293 ; AVX512DQ-FCP-NEXT: vpshufb %xmm5, %xmm6, %xmm0
6294 ; AVX512DQ-FCP-NEXT: vpshufb %xmm5, %xmm4, %xmm5
6295 ; AVX512DQ-FCP-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm5[8],xmm0[8],xmm5[9],xmm0[9],xmm5[10],xmm0[10],xmm5[11],xmm0[11],xmm5[12],xmm0[12],xmm5[13],xmm0[13],xmm5[14],xmm0[14],xmm5[15],xmm0[15]
6296 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm4[0],xmm6[0],xmm4[1],xmm6[1],xmm4[2],xmm6[2],xmm4[3],xmm6[3],xmm4[4],xmm6[4],xmm4[5],xmm6[5],xmm4[6],xmm6[6],xmm4[7],xmm6[7]
6297 ; AVX512DQ-FCP-NEXT: vpshufb %xmm8, %xmm5, %xmm5
6298 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, %xmm0, %zmm5, %zmm25
6299 ; AVX512DQ-FCP-NEXT: vmovdqa (%r8), %xmm1
6300 ; AVX512DQ-FCP-NEXT: vpshufb %xmm9, %xmm1, %xmm5
6301 ; AVX512DQ-FCP-NEXT: vpshufb %xmm2, %xmm1, %xmm2
6302 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, %xmm5, %zmm2, %zmm27
6303 ; AVX512DQ-FCP-NEXT: vmovdqa (%r9), %xmm8
6304 ; AVX512DQ-FCP-NEXT: vpshufb %xmm7, %xmm8, %xmm5
6305 ; AVX512DQ-FCP-NEXT: vpshufb %xmm3, %xmm8, %xmm3
6306 ; AVX512DQ-FCP-NEXT: vinserti32x4 $2, %xmm5, %zmm3, %zmm28
6307 ; AVX512DQ-FCP-NEXT: vmovdqa 32(%rcx), %xmm5
6308 ; AVX512DQ-FCP-NEXT: vpshufb %xmm10, %xmm5, %xmm7
6309 ; AVX512DQ-FCP-NEXT: vmovdqa 32(%rdx), %xmm3
6310 ; AVX512DQ-FCP-NEXT: vpshufb %xmm10, %xmm3, %xmm9
6311 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3],xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
6312 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3],xmm3[4],xmm5[4],xmm3[5],xmm5[5],xmm3[6],xmm5[6],xmm3[7],xmm5[7]
6313 ; AVX512DQ-FCP-NEXT: vprold $16, %xmm9, %xmm9
6314 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm7, %zmm9, %zmm30
6315 ; AVX512DQ-FCP-NEXT: vmovdqa (%rcx), %xmm7
6316 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdx), %xmm9
6317 ; AVX512DQ-FCP-NEXT: vpshufb %xmm10, %xmm7, %xmm0
6318 ; AVX512DQ-FCP-NEXT: vpshufb %xmm10, %xmm9, %xmm10
6319 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm10[0],xmm0[0],xmm10[1],xmm0[1],xmm10[2],xmm0[2],xmm10[3],xmm0[3],xmm10[4],xmm0[4],xmm10[5],xmm0[5],xmm10[6],xmm0[6],xmm10[7],xmm0[7]
6320 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3],xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
6321 ; AVX512DQ-FCP-NEXT: vprold $16, %xmm10, %xmm10
6322 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm10, %zmm31
6323 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm11[0],ymm15[0],ymm11[1],ymm15[1],ymm11[2],ymm15[2],ymm11[3],ymm15[3],ymm11[4],ymm15[4],ymm11[5],ymm15[5],ymm11[6],ymm15[6],ymm11[7],ymm15[7],ymm11[16],ymm15[16],ymm11[17],ymm15[17],ymm11[18],ymm15[18],ymm11[19],ymm15[19],ymm11[20],ymm15[20],ymm11[21],ymm15[21],ymm11[22],ymm15[22],ymm11[23],ymm15[23]
6324 ; AVX512DQ-FCP-NEXT: vprold $16, %ymm0, %ymm0
6325 ; AVX512DQ-FCP-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm9[8],xmm7[8],xmm9[9],xmm7[9],xmm9[10],xmm7[10],xmm9[11],xmm7[11],xmm9[12],xmm7[12],xmm9[13],xmm7[13],xmm9[14],xmm7[14],xmm9[15],xmm7[15]
6326 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm18, %ymm2
6327 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm17, %ymm9
6328 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} ymm9 = ymm2[0],ymm9[0],ymm2[1],ymm9[1],ymm2[2],ymm9[2],ymm2[3],ymm9[3],ymm2[4],ymm9[4],ymm2[5],ymm9[5],ymm2[6],ymm9[6],ymm2[7],ymm9[7],ymm2[16],ymm9[16],ymm2[17],ymm9[17],ymm2[18],ymm9[18],ymm2[19],ymm9[19],ymm2[20],ymm9[20],ymm2[21],ymm9[21],ymm2[22],ymm9[22],ymm2[23],ymm9[23]
6329 ; AVX512DQ-FCP-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm4[8],xmm6[8],xmm4[9],xmm6[9],xmm4[10],xmm6[10],xmm4[11],xmm6[11],xmm4[12],xmm6[12],xmm4[13],xmm6[13],xmm4[14],xmm6[14],xmm4[15],xmm6[15]
6330 ; AVX512DQ-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27,16,17,22,23,20,21,18,19,24,25,26,27,28,29,26,27]
6331 ; AVX512DQ-FCP-NEXT: # ymm2 = mem[0,1,0,1]
6332 ; AVX512DQ-FCP-NEXT: vpshufb %ymm2, %ymm9, %ymm9
6333 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm2, %ymm18
6334 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm2 = [8,9,6,7,12,13,10,11,14,15,14,15,14,15,14,15]
6335 ; AVX512DQ-FCP-NEXT: vpshufb %xmm2, %xmm4, %xmm15
6336 ; AVX512DQ-FCP-NEXT: vmovdqa64 %xmm2, %xmm17
6337 ; AVX512DQ-FCP-NEXT: vpmovsxbq {{.*#+}} zmm4 = [0,0,0,1,10,10,10,11]
6338 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm9, %zmm4, %zmm15
6339 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm6 = [10,11,8,9,6,7,12,13,14,15,14,15,14,15,14,15]
6340 ; AVX512DQ-FCP-NEXT: vpshufb %xmm6, %xmm7, %xmm7
6341 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm4, %zmm7
6342 ; AVX512DQ-FCP-NEXT: vmovdqa64 {{.*#+}} zmm9 = [65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535]
6343 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm15 = zmm7 ^ (zmm9 & (zmm15 ^ zmm7))
6344 ; AVX512DQ-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
6345 ; AVX512DQ-FCP-NEXT: # ymm2 = mem[0,1,0,1]
6346 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm19, %ymm0
6347 ; AVX512DQ-FCP-NEXT: vpshufb %ymm2, %ymm0, %ymm0
6348 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm2, %ymm19
6349 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm11 = [10,u,13,u,12,u,11,u,14,u,13,u,14,u,15,u]
6350 ; AVX512DQ-FCP-NEXT: vpshufb %xmm11, %xmm1, %xmm1
6351 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm0, %zmm4, %zmm1
6352 ; AVX512DQ-FCP-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535]
6353 ; AVX512DQ-FCP-NEXT: vpternlogd {{.*#+}} zmm1 = zmm1 ^ (zmm10 & (zmm1 ^ zmm15))
6354 ; AVX512DQ-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm15 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
6355 ; AVX512DQ-FCP-NEXT: # ymm15 = mem[0,1,0,1]
6356 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm20, %ymm0
6357 ; AVX512DQ-FCP-NEXT: vpshufb %ymm15, %ymm0, %ymm2
6358 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm7 = [u,10,u,13,u,12,u,11,u,14,u,13,u,14,u,15]
6359 ; AVX512DQ-FCP-NEXT: vpshufb %xmm7, %xmm8, %xmm0
6360 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm2, %zmm4, %zmm0
6361 ; AVX512DQ-FCP-NEXT: vmovdqa64 {{.*#+}} zmm2 = [255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255]
6362 ; AVX512DQ-FCP-NEXT: vpternlogd {{.*#+}} zmm0 = zmm0 ^ (zmm2 & (zmm0 ^ zmm1))
6363 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm22, %ymm8
6364 ; AVX512DQ-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6365 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{.*#+}} ymm1 = ymm1[0],ymm8[0],ymm1[1],ymm8[1],ymm1[2],ymm8[2],ymm1[3],ymm8[3],ymm1[4],ymm8[4],ymm1[5],ymm8[5],ymm1[6],ymm8[6],ymm1[7],ymm8[7],ymm1[16],ymm8[16],ymm1[17],ymm8[17],ymm1[18],ymm8[18],ymm1[19],ymm8[19],ymm1[20],ymm8[20],ymm1[21],ymm8[21],ymm1[22],ymm8[22],ymm1[23],ymm8[23]
6366 ; AVX512DQ-FCP-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm3[8],xmm5[8],xmm3[9],xmm5[9],xmm3[10],xmm5[10],xmm3[11],xmm5[11],xmm3[12],xmm5[12],xmm3[13],xmm5[13],xmm3[14],xmm5[14],xmm3[15],xmm5[15]
6367 ; AVX512DQ-FCP-NEXT: vpshufb %xmm6, %xmm3, %xmm3
6368 ; AVX512DQ-FCP-NEXT: vmovdqu (%rsp), %ymm5 # 32-byte Reload
6369 ; AVX512DQ-FCP-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm5 # 32-byte Folded Reload
6370 ; AVX512DQ-FCP-NEXT: # ymm5 = ymm5[0],mem[0],ymm5[1],mem[1],ymm5[2],mem[2],ymm5[3],mem[3],ymm5[4],mem[4],ymm5[5],mem[5],ymm5[6],mem[6],ymm5[7],mem[7],ymm5[16],mem[16],ymm5[17],mem[17],ymm5[18],mem[18],ymm5[19],mem[19],ymm5[20],mem[20],ymm5[21],mem[21],ymm5[22],mem[22],ymm5[23],mem[23]
6371 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm18, %ymm6
6372 ; AVX512DQ-FCP-NEXT: vpshufb %ymm6, %ymm5, %ymm5
6373 ; AVX512DQ-FCP-NEXT: vmovdqa64 %xmm16, %xmm6
6374 ; AVX512DQ-FCP-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm12[8],xmm6[8],xmm12[9],xmm6[9],xmm12[10],xmm6[10],xmm12[11],xmm6[11],xmm12[12],xmm6[12],xmm12[13],xmm6[13],xmm12[14],xmm6[14],xmm12[15],xmm6[15]
6375 ; AVX512DQ-FCP-NEXT: vmovdqa64 %xmm17, %xmm8
6376 ; AVX512DQ-FCP-NEXT: vpshufb %xmm8, %xmm6, %xmm6
6377 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm5, %zmm4, %zmm6
6378 ; AVX512DQ-FCP-NEXT: vprold $16, %ymm1, %ymm1
6379 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
6380 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm6 = zmm3 ^ (zmm9 & (zmm6 ^ zmm3))
6381 ; AVX512DQ-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6382 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm19, %ymm3
6383 ; AVX512DQ-FCP-NEXT: vpshufb %ymm3, %ymm1, %ymm1
6384 ; AVX512DQ-FCP-NEXT: vpshufb %xmm11, %xmm13, %xmm3
6385 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm3
6386 ; AVX512DQ-FCP-NEXT: vpternlogd {{.*#+}} zmm3 = zmm3 ^ (zmm10 & (zmm3 ^ zmm6))
6387 ; AVX512DQ-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6388 ; AVX512DQ-FCP-NEXT: vpshufb %ymm15, %ymm1, %ymm1
6389 ; AVX512DQ-FCP-NEXT: vpshufb %xmm7, %xmm14, %xmm5
6390 ; AVX512DQ-FCP-NEXT: vpermt2q %zmm1, %zmm4, %zmm5
6391 ; AVX512DQ-FCP-NEXT: vpternlogd {{.*#+}} zmm5 = zmm5 ^ (zmm2 & (zmm5 ^ zmm3))
6392 ; AVX512DQ-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
6393 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm5, 256(%rax)
6394 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, 64(%rax)
6395 ; AVX512DQ-FCP-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Folded Reload
6396 ; AVX512DQ-FCP-NEXT: # zmm0 = mem[2,2,2,3,6,6,6,7]
6397 ; AVX512DQ-FCP-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Folded Reload
6398 ; AVX512DQ-FCP-NEXT: # zmm1 = mem[2,2,2,3,6,6,6,7]
6399 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm1 = zmm0 ^ (zmm9 & (zmm1 ^ zmm0))
6400 ; AVX512DQ-FCP-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Folded Reload
6401 ; AVX512DQ-FCP-NEXT: # zmm0 = mem[2,2,2,3,6,6,6,7]
6402 ; AVX512DQ-FCP-NEXT: vmovdqa64 {{.*#+}} zmm2 = [65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0,65535,65535,0]
6403 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm0 = zmm0 ^ (zmm2 & (zmm0 ^ zmm1))
6404 ; AVX512DQ-FCP-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Folded Reload
6405 ; AVX512DQ-FCP-NEXT: # zmm1 = mem[2,2,2,3,6,6,6,7]
6406 ; AVX512DQ-FCP-NEXT: vmovdqa64 {{.*#+}} zmm3 = [255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0]
6407 ; AVX512DQ-FCP-NEXT: vpternlogd {{.*#+}} zmm1 = zmm1 ^ (zmm3 & (zmm1 ^ zmm0))
6408 ; AVX512DQ-FCP-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Folded Reload
6409 ; AVX512DQ-FCP-NEXT: # zmm0 = mem[2,2,2,3,6,6,6,7]
6410 ; AVX512DQ-FCP-NEXT: vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Folded Reload
6411 ; AVX512DQ-FCP-NEXT: # zmm4 = mem[2,2,2,3,6,6,6,7]
6412 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm4 = zmm0 ^ (zmm9 & (zmm4 ^ zmm0))
6413 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} zmm0 = zmm23[2,2,2,3,6,6,6,7]
6414 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm0 = zmm0 ^ (zmm2 & (zmm0 ^ zmm4))
6415 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} zmm4 = zmm24[2,2,2,3,6,6,6,7]
6416 ; AVX512DQ-FCP-NEXT: vpternlogd {{.*#+}} zmm4 = zmm4 ^ (zmm3 & (zmm4 ^ zmm0))
6417 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} zmm0 = zmm30[0,0,0,1,4,4,4,5]
6418 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} zmm3 = zmm26[0,0,0,1,4,4,4,5]
6419 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm3 = zmm0 ^ (zmm2 & (zmm3 ^ zmm0))
6420 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} zmm0 = zmm31[0,0,0,1,4,4,4,5]
6421 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} zmm5 = zmm25[0,0,0,1,4,4,4,5]
6422 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm5 = zmm0 ^ (zmm2 & (zmm5 ^ zmm0))
6423 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} zmm0 = zmm29[0,0,0,1,4,4,4,5]
6424 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm0 = zmm0 ^ (zmm9 & (zmm0 ^ zmm3))
6425 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} zmm2 = zmm27[0,0,0,1,4,4,4,5]
6426 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm2 = zmm2 ^ (zmm9 & (zmm2 ^ zmm5))
6427 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} zmm3 = zmm21[0,0,0,1,4,4,4,5]
6428 ; AVX512DQ-FCP-NEXT: vmovdqa64 {{.*#+}} zmm5 = [255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255,255,0,255,255,255,255]
6429 ; AVX512DQ-FCP-NEXT: vpternlogd {{.*#+}} zmm3 = zmm3 ^ (zmm5 & (zmm3 ^ zmm0))
6430 ; AVX512DQ-FCP-NEXT: vpermq {{.*#+}} zmm0 = zmm28[0,0,0,1,4,4,4,5]
6431 ; AVX512DQ-FCP-NEXT: vpternlogd {{.*#+}} zmm0 = zmm0 ^ (zmm5 & (zmm0 ^ zmm2))
6432 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, (%rax)
6433 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm3, 192(%rax)
6434 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm4, 128(%rax)
6435 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, 320(%rax)
6436 ; AVX512DQ-FCP-NEXT: addq $424, %rsp # imm = 0x1A8
6437 ; AVX512DQ-FCP-NEXT: vzeroupper
6438 ; AVX512DQ-FCP-NEXT: retq
6440 ; AVX512BW-LABEL: store_i8_stride6_vf64:
6441 ; AVX512BW: # %bb.0:
6442 ; AVX512BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
6443 ; AVX512BW-NEXT: vmovdqa64 (%r8), %zmm14
6444 ; AVX512BW-NEXT: vmovdqa64 (%r9), %zmm12
6445 ; AVX512BW-NEXT: vmovdqa64 (%rsi), %ymm16
6446 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %ymm17
6447 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm17[0],ymm16[0],ymm17[1],ymm16[1],ymm17[2],ymm16[2],ymm17[3],ymm16[3],ymm17[4],ymm16[4],ymm17[5],ymm16[5],ymm17[6],ymm16[6],ymm17[7],ymm16[7],ymm17[16],ymm16[16],ymm17[17],ymm16[17],ymm17[18],ymm16[18],ymm17[19],ymm16[19],ymm17[20],ymm16[20],ymm17[21],ymm16[21],ymm17[22],ymm16[22],ymm17[23],ymm16[23]
6448 ; AVX512BW-NEXT: vmovdqa (%rsi), %xmm1
6449 ; AVX512BW-NEXT: vmovdqa 32(%rsi), %xmm9
6450 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm3
6451 ; AVX512BW-NEXT: vmovdqa 32(%rdi), %xmm11
6452 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm3[8],xmm1[8],xmm3[9],xmm1[9],xmm3[10],xmm1[10],xmm3[11],xmm1[11],xmm3[12],xmm1[12],xmm3[13],xmm1[13],xmm3[14],xmm1[14],xmm3[15],xmm1[15]
6453 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0
6454 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} zmm7 = [4,3,6,5,4,3,6,5,4,3,6,5,7,7,7,7,24,27,26,25,24,27,26,25,24,27,26,25,28,29,30,29]
6455 ; AVX512BW-NEXT: vpermw %zmm0, %zmm7, %zmm0
6456 ; AVX512BW-NEXT: vmovdqa (%rcx), %xmm2
6457 ; AVX512BW-NEXT: vmovdqa 32(%rcx), %xmm8
6458 ; AVX512BW-NEXT: vmovdqa (%rdx), %xmm4
6459 ; AVX512BW-NEXT: vmovdqa 32(%rdx), %xmm10
6460 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm4[8],xmm2[8],xmm4[9],xmm2[9],xmm4[10],xmm2[10],xmm4[11],xmm2[11],xmm4[12],xmm2[12],xmm4[13],xmm2[13],xmm4[14],xmm2[14],xmm4[15],xmm2[15]
6461 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm20 = [5,4,3,6,5,4,3,6,5,4,3,6,7,7,7,7]
6462 ; AVX512BW-NEXT: vpermw %ymm5, %ymm20, %ymm5
6463 ; AVX512BW-NEXT: vmovdqa64 (%rcx), %ymm18
6464 ; AVX512BW-NEXT: vmovdqa64 (%rdx), %ymm19
6465 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm19[0],ymm18[0],ymm19[1],ymm18[1],ymm19[2],ymm18[2],ymm19[3],ymm18[3],ymm19[4],ymm18[4],ymm19[5],ymm18[5],ymm19[6],ymm18[6],ymm19[7],ymm18[7],ymm19[16],ymm18[16],ymm19[17],ymm18[17],ymm19[18],ymm18[18],ymm19[19],ymm18[19],ymm19[20],ymm18[20],ymm19[21],ymm18[21],ymm19[22],ymm18[22],ymm19[23],ymm18[23]
6466 ; AVX512BW-NEXT: vprold $16, %ymm6, %ymm6
6467 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
6468 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm6, %zmm5, %zmm5
6469 ; AVX512BW-NEXT: movl $613566756, %r10d # imm = 0x24924924
6470 ; AVX512BW-NEXT: kmovd %r10d, %k1
6471 ; AVX512BW-NEXT: vmovdqu16 %zmm5, %zmm0 {%k1}
6472 ; AVX512BW-NEXT: vmovdqa (%r8), %xmm5
6473 ; AVX512BW-NEXT: vmovdqa 32(%r8), %xmm13
6474 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm5[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
6475 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm23 = [2,5,4,3,2,5,4,3,2,5,4,3,6,5,6,7]
6476 ; AVX512BW-NEXT: vpermw %ymm6, %ymm23, %ymm6
6477 ; AVX512BW-NEXT: vmovdqa64 (%r8), %ymm21
6478 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} ymm24 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
6479 ; AVX512BW-NEXT: # ymm24 = mem[0,1,2,3,0,1,2,3]
6480 ; AVX512BW-NEXT: vpshufb %ymm24, %ymm21, %ymm15
6481 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
6482 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm15, %zmm6, %zmm6
6483 ; AVX512BW-NEXT: movl $1227133513, %r10d # imm = 0x49249249
6484 ; AVX512BW-NEXT: kmovd %r10d, %k2
6485 ; AVX512BW-NEXT: vmovdqu16 %zmm6, %zmm0 {%k2}
6486 ; AVX512BW-NEXT: vmovdqa (%r9), %xmm6
6487 ; AVX512BW-NEXT: vmovdqa 32(%r9), %xmm15
6488 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} xmm22 = xmm6[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
6489 ; AVX512BW-NEXT: vpermw %ymm22, %ymm23, %ymm25
6490 ; AVX512BW-NEXT: vmovdqa64 (%r9), %ymm22
6491 ; AVX512BW-NEXT: vbroadcasti32x4 {{.*#+}} ymm26 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
6492 ; AVX512BW-NEXT: # ymm26 = mem[0,1,2,3,0,1,2,3]
6493 ; AVX512BW-NEXT: vpshufb %ymm26, %ymm22, %ymm27
6494 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm27 = ymm27[2,2,2,3]
6495 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm27, %zmm25, %zmm25
6496 ; AVX512BW-NEXT: movabsq $2342443691899625602, %r10 # imm = 0x2082082082082082
6497 ; AVX512BW-NEXT: kmovq %r10, %k3
6498 ; AVX512BW-NEXT: vmovdqu8 %zmm25, %zmm0 {%k3}
6499 ; AVX512BW-NEXT: vmovdqa64 32(%rsi), %ymm25
6500 ; AVX512BW-NEXT: vmovdqa64 32(%rdi), %ymm27
6501 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} ymm28 = ymm27[0],ymm25[0],ymm27[1],ymm25[1],ymm27[2],ymm25[2],ymm27[3],ymm25[3],ymm27[4],ymm25[4],ymm27[5],ymm25[5],ymm27[6],ymm25[6],ymm27[7],ymm25[7],ymm27[16],ymm25[16],ymm27[17],ymm25[17],ymm27[18],ymm25[18],ymm27[19],ymm25[19],ymm27[20],ymm25[20],ymm27[21],ymm25[21],ymm27[22],ymm25[22],ymm27[23],ymm25[23]
6502 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} xmm29 = xmm11[8],xmm9[8],xmm11[9],xmm9[9],xmm11[10],xmm9[10],xmm11[11],xmm9[11],xmm11[12],xmm9[12],xmm11[13],xmm9[13],xmm11[14],xmm9[14],xmm11[15],xmm9[15]
6503 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm28, %zmm29, %zmm28
6504 ; AVX512BW-NEXT: vpermw %zmm28, %zmm7, %zmm7
6505 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} xmm28 = xmm10[8],xmm8[8],xmm10[9],xmm8[9],xmm10[10],xmm8[10],xmm10[11],xmm8[11],xmm10[12],xmm8[12],xmm10[13],xmm8[13],xmm10[14],xmm8[14],xmm10[15],xmm8[15]
6506 ; AVX512BW-NEXT: vpermw %ymm28, %ymm20, %ymm20
6507 ; AVX512BW-NEXT: vmovdqa64 32(%rcx), %ymm28
6508 ; AVX512BW-NEXT: vmovdqa64 32(%rdx), %ymm29
6509 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} ymm30 = ymm29[0],ymm28[0],ymm29[1],ymm28[1],ymm29[2],ymm28[2],ymm29[3],ymm28[3],ymm29[4],ymm28[4],ymm29[5],ymm28[5],ymm29[6],ymm28[6],ymm29[7],ymm28[7],ymm29[16],ymm28[16],ymm29[17],ymm28[17],ymm29[18],ymm28[18],ymm29[19],ymm28[19],ymm29[20],ymm28[20],ymm29[21],ymm28[21],ymm29[22],ymm28[22],ymm29[23],ymm28[23]
6510 ; AVX512BW-NEXT: vprold $16, %ymm30, %ymm30
6511 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm30 = ymm30[2,2,2,3]
6512 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm30, %zmm20, %zmm20
6513 ; AVX512BW-NEXT: vmovdqu16 %zmm20, %zmm7 {%k1}
6514 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} xmm20 = xmm13[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
6515 ; AVX512BW-NEXT: vpermw %ymm20, %ymm23, %ymm20
6516 ; AVX512BW-NEXT: vmovdqa64 32(%r8), %ymm30
6517 ; AVX512BW-NEXT: vpshufb %ymm24, %ymm30, %ymm24
6518 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm24 = ymm24[2,2,2,3]
6519 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm24, %zmm20, %zmm20
6520 ; AVX512BW-NEXT: vmovdqu16 %zmm20, %zmm7 {%k2}
6521 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} xmm20 = xmm15[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
6522 ; AVX512BW-NEXT: vpermw %ymm20, %ymm23, %ymm20
6523 ; AVX512BW-NEXT: vmovdqa64 32(%r9), %ymm24
6524 ; AVX512BW-NEXT: vpshufb %ymm26, %ymm24, %ymm23
6525 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm23 = ymm23[2,2,2,3]
6526 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm23, %zmm20, %zmm20
6527 ; AVX512BW-NEXT: vmovdqu8 %zmm20, %zmm7 {%k3}
6528 ; AVX512BW-NEXT: vpbroadcastq {{.*#+}} ymm26 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
6529 ; AVX512BW-NEXT: vpshufb %ymm26, %ymm25, %ymm20
6530 ; AVX512BW-NEXT: vpshufb %ymm26, %ymm27, %ymm23
6531 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} ymm20 = ymm23[0],ymm20[0],ymm23[1],ymm20[1],ymm23[2],ymm20[2],ymm23[3],ymm20[3],ymm23[4],ymm20[4],ymm23[5],ymm20[5],ymm23[6],ymm20[6],ymm23[7],ymm20[7],ymm23[16],ymm20[16],ymm23[17],ymm20[17],ymm23[18],ymm20[18],ymm23[19],ymm20[19],ymm23[20],ymm20[20],ymm23[21],ymm20[21],ymm23[22],ymm20[22],ymm23[23],ymm20[23]
6532 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
6533 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} ymm23 = ymm27[8],ymm25[8],ymm27[9],ymm25[9],ymm27[10],ymm25[10],ymm27[11],ymm25[11],ymm27[12],ymm25[12],ymm27[13],ymm25[13],ymm27[14],ymm25[14],ymm27[15],ymm25[15],ymm27[24],ymm25[24],ymm27[25],ymm25[25],ymm27[26],ymm25[26],ymm27[27],ymm25[27],ymm27[28],ymm25[28],ymm27[29],ymm25[29],ymm27[30],ymm25[30],ymm27[31],ymm25[31]
6534 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm25 = [12,11,14,13,12,11,14,13,12,11,14,13,15,15,15,15]
6535 ; AVX512BW-NEXT: vpermw %ymm23, %ymm25, %ymm23
6536 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm23, %zmm20, %zmm27
6537 ; AVX512BW-NEXT: vpbroadcastq {{.*#+}} ymm23 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
6538 ; AVX512BW-NEXT: vpshufb %ymm23, %ymm28, %ymm20
6539 ; AVX512BW-NEXT: vpshufb %ymm23, %ymm29, %ymm31
6540 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} ymm20 = ymm31[0],ymm20[0],ymm31[1],ymm20[1],ymm31[2],ymm20[2],ymm31[3],ymm20[3],ymm31[4],ymm20[4],ymm31[5],ymm20[5],ymm31[6],ymm20[6],ymm31[7],ymm20[7],ymm31[16],ymm20[16],ymm31[17],ymm20[17],ymm31[18],ymm20[18],ymm31[19],ymm20[19],ymm31[20],ymm20[20],ymm31[21],ymm20[21],ymm31[22],ymm20[22],ymm31[23],ymm20[23]
6541 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
6542 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} ymm28 = ymm29[8],ymm28[8],ymm29[9],ymm28[9],ymm29[10],ymm28[10],ymm29[11],ymm28[11],ymm29[12],ymm28[12],ymm29[13],ymm28[13],ymm29[14],ymm28[14],ymm29[15],ymm28[15],ymm29[24],ymm28[24],ymm29[25],ymm28[25],ymm29[26],ymm28[26],ymm29[27],ymm28[27],ymm29[28],ymm28[28],ymm29[29],ymm28[29],ymm29[30],ymm28[30],ymm29[31],ymm28[31]
6543 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm29 = [13,12,11,14,13,12,11,14,13,12,11,14,15,15,15,15]
6544 ; AVX512BW-NEXT: vpermw %ymm28, %ymm29, %ymm28
6545 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm28, %zmm20, %zmm20
6546 ; AVX512BW-NEXT: vmovdqu16 %zmm27, %zmm20 {%k1}
6547 ; AVX512BW-NEXT: vshufi64x2 {{.*#+}} zmm27 = zmm30[0,1,2,3],zmm14[4,5,6,7]
6548 ; AVX512BW-NEXT: vpshufb {{.*#+}} zmm27 = zmm27[6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u,42,u,45,u,44,u,43,u,46,u,u,u,u,u,47,u,58,u,61,u,60,u,59,u,62,u,u,u,u,u,63,u]
6549 ; AVX512BW-NEXT: vpermq {{.*#+}} zmm27 = zmm27[2,2,2,3,6,6,6,7]
6550 ; AVX512BW-NEXT: movl $-1840700270, %ecx # imm = 0x92492492
6551 ; AVX512BW-NEXT: kmovd %ecx, %k2
6552 ; AVX512BW-NEXT: vmovdqu16 %zmm27, %zmm20 {%k2}
6553 ; AVX512BW-NEXT: vshufi64x2 {{.*#+}} zmm24 = zmm24[0,1,2,3],zmm12[4,5,6,7]
6554 ; AVX512BW-NEXT: vpshufb {{.*#+}} zmm24 = zmm24[u,6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u,42,u,45,u,44,u,43,u,46,u,u,u,u,u,47,u,58,u,61,u,60,u,59,u,62,u,u,u,u,u,63]
6555 ; AVX512BW-NEXT: vpermq {{.*#+}} zmm24 = zmm24[2,2,2,3,6,6,6,7]
6556 ; AVX512BW-NEXT: movabsq $-9076969306111049208, %rcx # imm = 0x8208208208208208
6557 ; AVX512BW-NEXT: kmovq %rcx, %k3
6558 ; AVX512BW-NEXT: vmovdqu8 %zmm24, %zmm20 {%k3}
6559 ; AVX512BW-NEXT: vpshufb %ymm26, %ymm16, %ymm24
6560 ; AVX512BW-NEXT: vpshufb %ymm26, %ymm17, %ymm26
6561 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} ymm24 = ymm26[0],ymm24[0],ymm26[1],ymm24[1],ymm26[2],ymm24[2],ymm26[3],ymm24[3],ymm26[4],ymm24[4],ymm26[5],ymm24[5],ymm26[6],ymm24[6],ymm26[7],ymm24[7],ymm26[16],ymm24[16],ymm26[17],ymm24[17],ymm26[18],ymm24[18],ymm26[19],ymm24[19],ymm26[20],ymm24[20],ymm26[21],ymm24[21],ymm26[22],ymm24[22],ymm26[23],ymm24[23]
6562 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm24 = ymm24[2,2,2,3]
6563 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} ymm16 = ymm17[8],ymm16[8],ymm17[9],ymm16[9],ymm17[10],ymm16[10],ymm17[11],ymm16[11],ymm17[12],ymm16[12],ymm17[13],ymm16[13],ymm17[14],ymm16[14],ymm17[15],ymm16[15],ymm17[24],ymm16[24],ymm17[25],ymm16[25],ymm17[26],ymm16[26],ymm17[27],ymm16[27],ymm17[28],ymm16[28],ymm17[29],ymm16[29],ymm17[30],ymm16[30],ymm17[31],ymm16[31]
6564 ; AVX512BW-NEXT: vpermw %ymm16, %ymm25, %ymm16
6565 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm16, %zmm24, %zmm17
6566 ; AVX512BW-NEXT: vpshufb %ymm23, %ymm18, %ymm16
6567 ; AVX512BW-NEXT: vpshufb %ymm23, %ymm19, %ymm24
6568 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} ymm16 = ymm24[0],ymm16[0],ymm24[1],ymm16[1],ymm24[2],ymm16[2],ymm24[3],ymm16[3],ymm24[4],ymm16[4],ymm24[5],ymm16[5],ymm24[6],ymm16[6],ymm24[7],ymm16[7],ymm24[16],ymm16[16],ymm24[17],ymm16[17],ymm24[18],ymm16[18],ymm24[19],ymm16[19],ymm24[20],ymm16[20],ymm24[21],ymm16[21],ymm24[22],ymm16[22],ymm24[23],ymm16[23]
6569 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm16 = ymm16[2,2,2,3]
6570 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} ymm18 = ymm19[8],ymm18[8],ymm19[9],ymm18[9],ymm19[10],ymm18[10],ymm19[11],ymm18[11],ymm19[12],ymm18[12],ymm19[13],ymm18[13],ymm19[14],ymm18[14],ymm19[15],ymm18[15],ymm19[24],ymm18[24],ymm19[25],ymm18[25],ymm19[26],ymm18[26],ymm19[27],ymm18[27],ymm19[28],ymm18[28],ymm19[29],ymm18[29],ymm19[30],ymm18[30],ymm19[31],ymm18[31]
6571 ; AVX512BW-NEXT: vpermw %ymm18, %ymm29, %ymm18
6572 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm18, %zmm16, %zmm16
6573 ; AVX512BW-NEXT: vmovdqu16 %zmm17, %zmm16 {%k1}
6574 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm21, %zmm14, %zmm14
6575 ; AVX512BW-NEXT: vpshufb {{.*#+}} zmm14 = zmm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,58,u,61,u,60,u,59,u,62,u,u,u,u,u,63,u]
6576 ; AVX512BW-NEXT: vpermq {{.*#+}} zmm14 = zmm14[2,2,2,3,6,6,6,7]
6577 ; AVX512BW-NEXT: vmovdqu16 %zmm14, %zmm16 {%k2}
6578 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm22, %zmm12, %zmm12
6579 ; AVX512BW-NEXT: vpshufb {{.*#+}} zmm12 = zmm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,58,u,61,u,60,u,59,u,62,u,u,u,u,u,63]
6580 ; AVX512BW-NEXT: vpermq {{.*#+}} zmm12 = zmm12[2,2,2,3,6,6,6,7]
6581 ; AVX512BW-NEXT: vmovdqu8 %zmm12, %zmm16 {%k3}
6582 ; AVX512BW-NEXT: vpbroadcastq {{.*#+}} xmm12 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
6583 ; AVX512BW-NEXT: vpshufb %xmm12, %xmm9, %xmm14
6584 ; AVX512BW-NEXT: vpshufb %xmm12, %xmm11, %xmm17
6585 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} xmm14 = xmm17[8],xmm14[8],xmm17[9],xmm14[9],xmm17[10],xmm14[10],xmm17[11],xmm14[11],xmm17[12],xmm14[12],xmm17[13],xmm14[13],xmm17[14],xmm14[14],xmm17[15],xmm14[15]
6586 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
6587 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3],xmm11[4],xmm9[4],xmm11[5],xmm9[5],xmm11[6],xmm9[6],xmm11[7],xmm9[7]
6588 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm11 = [0,3,2,1,0,3,2,1,0,3,2,1,4,5,6,5]
6589 ; AVX512BW-NEXT: vpermw %ymm9, %ymm11, %ymm9
6590 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm14, %zmm9, %zmm9
6591 ; AVX512BW-NEXT: vpshufb %xmm23, %xmm8, %xmm14
6592 ; AVX512BW-NEXT: vpshufb %xmm23, %xmm10, %xmm17
6593 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm17[0],xmm14[0],xmm17[1],xmm14[1],xmm17[2],xmm14[2],xmm17[3],xmm14[3],xmm17[4],xmm14[4],xmm17[5],xmm14[5],xmm17[6],xmm14[6],xmm17[7],xmm14[7]
6594 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3],xmm10[4],xmm8[4],xmm10[5],xmm8[5],xmm10[6],xmm8[6],xmm10[7],xmm8[7]
6595 ; AVX512BW-NEXT: vprold $16, %xmm8, %xmm8
6596 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm14, %zmm8, %zmm8
6597 ; AVX512BW-NEXT: vpermq {{.*#+}} zmm8 = zmm8[0,0,0,1,4,4,4,5]
6598 ; AVX512BW-NEXT: vmovdqu16 %zmm8, %zmm9 {%k2}
6599 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} xmm8 = xmm13[0],zero,xmm13[1],zero,xmm13[2],zero,xmm13[3],zero,xmm13[4],zero,xmm13[5],zero,xmm13[6],zero,xmm13[7],zero
6600 ; AVX512BW-NEXT: vpshufd {{.*#+}} xmm10 = xmm13[2,1,2,3]
6601 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} xmm10 = xmm10[0],zero,xmm10[1],zero,xmm10[2],zero,xmm10[3],zero,xmm10[4],zero,xmm10[5],zero,xmm10[6],zero,xmm10[7],zero
6602 ; AVX512BW-NEXT: vinserti32x4 $2, %xmm10, %zmm8, %zmm8
6603 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} zmm10 = [2,1,0,3,2,1,0,3,2,1,0,3,4,4,4,4,22,21,16,23,22,21,16,23,22,21,16,23,17,17,17,17]
6604 ; AVX512BW-NEXT: vpermw %zmm8, %zmm10, %zmm9 {%k1}
6605 ; AVX512BW-NEXT: vpshufd {{.*#+}} xmm8 = xmm15[2,1,2,3]
6606 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm8[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
6607 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} xmm13 = xmm15[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
6608 ; AVX512BW-NEXT: vinserti32x4 $2, %xmm8, %zmm13, %zmm8
6609 ; AVX512BW-NEXT: vpermw %zmm8, %zmm10, %zmm8
6610 ; AVX512BW-NEXT: movabsq $585610922974906400, %rcx # imm = 0x820820820820820
6611 ; AVX512BW-NEXT: kmovq %rcx, %k3
6612 ; AVX512BW-NEXT: vmovdqu8 %zmm8, %zmm9 {%k3}
6613 ; AVX512BW-NEXT: vpshufb %xmm12, %xmm1, %xmm8
6614 ; AVX512BW-NEXT: vpshufb %xmm12, %xmm3, %xmm12
6615 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} xmm8 = xmm12[8],xmm8[8],xmm12[9],xmm8[9],xmm12[10],xmm8[10],xmm12[11],xmm8[11],xmm12[12],xmm8[12],xmm12[13],xmm8[13],xmm12[14],xmm8[14],xmm12[15],xmm8[15]
6616 ; AVX512BW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,0,1]
6617 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3],xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
6618 ; AVX512BW-NEXT: vpermw %ymm1, %ymm11, %ymm1
6619 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm8, %zmm1, %zmm1
6620 ; AVX512BW-NEXT: vpshufb %xmm23, %xmm2, %xmm3
6621 ; AVX512BW-NEXT: vpshufb %xmm23, %xmm4, %xmm8
6622 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm8[0],xmm3[0],xmm8[1],xmm3[1],xmm8[2],xmm3[2],xmm8[3],xmm3[3],xmm8[4],xmm3[4],xmm8[5],xmm3[5],xmm8[6],xmm3[6],xmm8[7],xmm3[7]
6623 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3],xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
6624 ; AVX512BW-NEXT: vprold $16, %xmm2, %xmm2
6625 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm2
6626 ; AVX512BW-NEXT: vpermq {{.*#+}} zmm2 = zmm2[0,0,0,1,4,4,4,5]
6627 ; AVX512BW-NEXT: vmovdqu16 %zmm2, %zmm1 {%k2}
6628 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} xmm2 = xmm5[0],zero,xmm5[1],zero,xmm5[2],zero,xmm5[3],zero,xmm5[4],zero,xmm5[5],zero,xmm5[6],zero,xmm5[7],zero
6629 ; AVX512BW-NEXT: vpshufd {{.*#+}} xmm3 = xmm5[2,1,2,3]
6630 ; AVX512BW-NEXT: vpmovzxbw {{.*#+}} xmm3 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero,xmm3[4],zero,xmm3[5],zero,xmm3[6],zero,xmm3[7],zero
6631 ; AVX512BW-NEXT: vinserti32x4 $2, %xmm3, %zmm2, %zmm2
6632 ; AVX512BW-NEXT: vpermw %zmm2, %zmm10, %zmm1 {%k1}
6633 ; AVX512BW-NEXT: vpshufd {{.*#+}} xmm2 = xmm6[2,1,2,3]
6634 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
6635 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm6[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
6636 ; AVX512BW-NEXT: vinserti32x4 $2, %xmm2, %zmm3, %zmm2
6637 ; AVX512BW-NEXT: vpermw %zmm2, %zmm10, %zmm2
6638 ; AVX512BW-NEXT: vmovdqu8 %zmm2, %zmm1 {%k3}
6639 ; AVX512BW-NEXT: vmovdqa64 %zmm1, (%rax)
6640 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 192(%rax)
6641 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 128(%rax)
6642 ; AVX512BW-NEXT: vmovdqa64 %zmm20, 320(%rax)
6643 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 256(%rax)
6644 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 64(%rax)
6645 ; AVX512BW-NEXT: vzeroupper
6646 ; AVX512BW-NEXT: retq
6648 ; AVX512BW-FCP-LABEL: store_i8_stride6_vf64:
6649 ; AVX512BW-FCP: # %bb.0:
6650 ; AVX512BW-FCP-NEXT: vmovdqa64 (%r8), %zmm8
6651 ; AVX512BW-FCP-NEXT: vmovdqa64 (%r9), %zmm10
6652 ; AVX512BW-FCP-NEXT: vmovdqa 32(%rsi), %ymm3
6653 ; AVX512BW-FCP-NEXT: vpbroadcastq {{.*#+}} ymm5 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
6654 ; AVX512BW-FCP-NEXT: vpshufb %ymm5, %ymm3, %ymm0
6655 ; AVX512BW-FCP-NEXT: vmovdqa 32(%rdi), %ymm4
6656 ; AVX512BW-FCP-NEXT: vpshufb %ymm5, %ymm4, %ymm1
6657 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[16],ymm0[16],ymm1[17],ymm0[17],ymm1[18],ymm0[18],ymm1[19],ymm0[19],ymm1[20],ymm0[20],ymm1[21],ymm0[21],ymm1[22],ymm0[22],ymm1[23],ymm0[23]
6658 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
6659 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} ymm1 = ymm4[8],ymm3[8],ymm4[9],ymm3[9],ymm4[10],ymm3[10],ymm4[11],ymm3[11],ymm4[12],ymm3[12],ymm4[13],ymm3[13],ymm4[14],ymm3[14],ymm4[15],ymm3[15],ymm4[24],ymm3[24],ymm4[25],ymm3[25],ymm4[26],ymm3[26],ymm4[27],ymm3[27],ymm4[28],ymm3[28],ymm4[29],ymm3[29],ymm4[30],ymm3[30],ymm4[31],ymm3[31]
6660 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm9 = [12,11,14,13,12,11,14,13,12,11,14,13,15,15,15,15]
6661 ; AVX512BW-FCP-NEXT: vpermw %ymm1, %ymm9, %ymm1
6662 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm1
6663 ; AVX512BW-FCP-NEXT: vmovdqa 32(%rcx), %ymm6
6664 ; AVX512BW-FCP-NEXT: vpbroadcastq {{.*#+}} ymm17 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
6665 ; AVX512BW-FCP-NEXT: vpshufb %ymm17, %ymm6, %ymm0
6666 ; AVX512BW-FCP-NEXT: vmovdqa 32(%rdx), %ymm7
6667 ; AVX512BW-FCP-NEXT: vpshufb %ymm17, %ymm7, %ymm2
6668 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm2[0],ymm0[0],ymm2[1],ymm0[1],ymm2[2],ymm0[2],ymm2[3],ymm0[3],ymm2[4],ymm0[4],ymm2[5],ymm0[5],ymm2[6],ymm0[6],ymm2[7],ymm0[7],ymm2[16],ymm0[16],ymm2[17],ymm0[17],ymm2[18],ymm0[18],ymm2[19],ymm0[19],ymm2[20],ymm0[20],ymm2[21],ymm0[21],ymm2[22],ymm0[22],ymm2[23],ymm0[23]
6669 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
6670 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} ymm2 = ymm7[8],ymm6[8],ymm7[9],ymm6[9],ymm7[10],ymm6[10],ymm7[11],ymm6[11],ymm7[12],ymm6[12],ymm7[13],ymm6[13],ymm7[14],ymm6[14],ymm7[15],ymm6[15],ymm7[24],ymm6[24],ymm7[25],ymm6[25],ymm7[26],ymm6[26],ymm7[27],ymm6[27],ymm7[28],ymm6[28],ymm7[29],ymm6[29],ymm7[30],ymm6[30],ymm7[31],ymm6[31]
6671 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm12 = [13,12,11,14,13,12,11,14,13,12,11,14,15,15,15,15]
6672 ; AVX512BW-FCP-NEXT: vpermw %ymm2, %ymm12, %ymm2
6673 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
6674 ; AVX512BW-FCP-NEXT: movl $613566756, %eax # imm = 0x24924924
6675 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
6676 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm1, %zmm0 {%k1}
6677 ; AVX512BW-FCP-NEXT: vmovdqa 32(%r8), %ymm2
6678 ; AVX512BW-FCP-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm2[0,1,2,3],zmm8[4,5,6,7]
6679 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} zmm1 = zmm1[6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u,42,u,45,u,44,u,43,u,46,u,u,u,u,u,47,u,58,u,61,u,60,u,59,u,62,u,u,u,u,u,63,u]
6680 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} zmm1 = zmm1[2,2,2,3,6,6,6,7]
6681 ; AVX512BW-FCP-NEXT: movl $-1840700270, %eax # imm = 0x92492492
6682 ; AVX512BW-FCP-NEXT: kmovd %eax, %k2
6683 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm1, %zmm0 {%k2}
6684 ; AVX512BW-FCP-NEXT: vmovdqa 32(%r9), %ymm1
6685 ; AVX512BW-FCP-NEXT: vshufi64x2 {{.*#+}} zmm11 = zmm1[0,1,2,3],zmm10[4,5,6,7]
6686 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} zmm11 = zmm11[u,6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u,42,u,45,u,44,u,43,u,46,u,u,u,u,u,47,u,58,u,61,u,60,u,59,u,62,u,u,u,u,u,63]
6687 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} zmm11 = zmm11[2,2,2,3,6,6,6,7]
6688 ; AVX512BW-FCP-NEXT: movabsq $-9076969306111049208, %rax # imm = 0x8208208208208208
6689 ; AVX512BW-FCP-NEXT: kmovq %rax, %k3
6690 ; AVX512BW-FCP-NEXT: vmovdqu8 %zmm11, %zmm0 {%k3}
6691 ; AVX512BW-FCP-NEXT: vmovdqa (%rsi), %ymm11
6692 ; AVX512BW-FCP-NEXT: vpshufb %ymm5, %ymm11, %ymm13
6693 ; AVX512BW-FCP-NEXT: vmovdqa (%rdi), %ymm14
6694 ; AVX512BW-FCP-NEXT: vpshufb %ymm5, %ymm14, %ymm5
6695 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm5 = ymm5[0],ymm13[0],ymm5[1],ymm13[1],ymm5[2],ymm13[2],ymm5[3],ymm13[3],ymm5[4],ymm13[4],ymm5[5],ymm13[5],ymm5[6],ymm13[6],ymm5[7],ymm13[7],ymm5[16],ymm13[16],ymm5[17],ymm13[17],ymm5[18],ymm13[18],ymm5[19],ymm13[19],ymm5[20],ymm13[20],ymm5[21],ymm13[21],ymm5[22],ymm13[22],ymm5[23],ymm13[23]
6696 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
6697 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} ymm13 = ymm14[8],ymm11[8],ymm14[9],ymm11[9],ymm14[10],ymm11[10],ymm14[11],ymm11[11],ymm14[12],ymm11[12],ymm14[13],ymm11[13],ymm14[14],ymm11[14],ymm14[15],ymm11[15],ymm14[24],ymm11[24],ymm14[25],ymm11[25],ymm14[26],ymm11[26],ymm14[27],ymm11[27],ymm14[28],ymm11[28],ymm14[29],ymm11[29],ymm14[30],ymm11[30],ymm14[31],ymm11[31]
6698 ; AVX512BW-FCP-NEXT: vpermw %ymm13, %ymm9, %ymm9
6699 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm9, %zmm5, %zmm9
6700 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rcx), %ymm19
6701 ; AVX512BW-FCP-NEXT: vpshufb %ymm17, %ymm19, %ymm5
6702 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdx), %ymm20
6703 ; AVX512BW-FCP-NEXT: vpshufb %ymm17, %ymm20, %ymm13
6704 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm5 = ymm13[0],ymm5[0],ymm13[1],ymm5[1],ymm13[2],ymm5[2],ymm13[3],ymm5[3],ymm13[4],ymm5[4],ymm13[5],ymm5[5],ymm13[6],ymm5[6],ymm13[7],ymm5[7],ymm13[16],ymm5[16],ymm13[17],ymm5[17],ymm13[18],ymm5[18],ymm13[19],ymm5[19],ymm13[20],ymm5[20],ymm13[21],ymm5[21],ymm13[22],ymm5[22],ymm13[23],ymm5[23]
6705 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
6706 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} ymm13 = ymm20[8],ymm19[8],ymm20[9],ymm19[9],ymm20[10],ymm19[10],ymm20[11],ymm19[11],ymm20[12],ymm19[12],ymm20[13],ymm19[13],ymm20[14],ymm19[14],ymm20[15],ymm19[15],ymm20[24],ymm19[24],ymm20[25],ymm19[25],ymm20[26],ymm19[26],ymm20[27],ymm19[27],ymm20[28],ymm19[28],ymm20[29],ymm19[29],ymm20[30],ymm19[30],ymm20[31],ymm19[31]
6707 ; AVX512BW-FCP-NEXT: vpermw %ymm13, %ymm12, %ymm12
6708 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm12, %zmm5, %zmm5
6709 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm9, %zmm5 {%k1}
6710 ; AVX512BW-FCP-NEXT: vmovdqa (%r8), %ymm9
6711 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm9, %zmm8, %zmm8
6712 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} zmm8 = zmm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,58,u,61,u,60,u,59,u,62,u,u,u,u,u,63,u]
6713 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} zmm8 = zmm8[2,2,2,3,6,6,6,7]
6714 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm8, %zmm5 {%k2}
6715 ; AVX512BW-FCP-NEXT: vmovdqa (%r9), %ymm8
6716 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm8, %zmm10, %zmm10
6717 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} zmm10 = zmm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,58,u,61,u,60,u,59,u,62,u,u,u,u,u,63]
6718 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} zmm10 = zmm10[2,2,2,3,6,6,6,7]
6719 ; AVX512BW-FCP-NEXT: vmovdqu8 %zmm10, %zmm5 {%k3}
6720 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rcx), %xmm22
6721 ; AVX512BW-FCP-NEXT: vmovdqa 32(%rcx), %xmm13
6722 ; AVX512BW-FCP-NEXT: vpshufb %xmm17, %xmm13, %xmm10
6723 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdx), %xmm23
6724 ; AVX512BW-FCP-NEXT: vmovdqa 32(%rdx), %xmm15
6725 ; AVX512BW-FCP-NEXT: vpshufb %xmm17, %xmm15, %xmm12
6726 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm12[0],xmm10[0],xmm12[1],xmm10[1],xmm12[2],xmm10[2],xmm12[3],xmm10[3],xmm12[4],xmm10[4],xmm12[5],xmm10[5],xmm12[6],xmm10[6],xmm12[7],xmm10[7]
6727 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,0,1]
6728 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm15[0],xmm13[0],xmm15[1],xmm13[1],xmm15[2],xmm13[2],xmm15[3],xmm13[3],xmm15[4],xmm13[4],xmm15[5],xmm13[5],xmm15[6],xmm13[6],xmm15[7],xmm13[7]
6729 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm24 = [1,0,3,2,1,0,3,2,1,0,3,2,5,4,7,6]
6730 ; AVX512BW-FCP-NEXT: vpermw %ymm12, %ymm24, %ymm12
6731 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm10, %zmm12, %zmm12
6732 ; AVX512BW-FCP-NEXT: vmovdqa64 32(%rsi), %xmm18
6733 ; AVX512BW-FCP-NEXT: vpbroadcastq {{.*#+}} xmm25 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
6734 ; AVX512BW-FCP-NEXT: vpshufb %xmm25, %xmm18, %xmm10
6735 ; AVX512BW-FCP-NEXT: vmovdqa64 32(%rdi), %xmm21
6736 ; AVX512BW-FCP-NEXT: vpshufb %xmm25, %xmm21, %xmm16
6737 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm10 = xmm16[8],xmm10[8],xmm16[9],xmm10[9],xmm16[10],xmm10[10],xmm16[11],xmm10[11],xmm16[12],xmm10[12],xmm16[13],xmm10[13],xmm16[14],xmm10[14],xmm16[15],xmm10[15]
6738 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,0,1]
6739 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm16 = xmm21[0],xmm18[0],xmm21[1],xmm18[1],xmm21[2],xmm18[2],xmm21[3],xmm18[3],xmm21[4],xmm18[4],xmm21[5],xmm18[5],xmm21[6],xmm18[6],xmm21[7],xmm18[7]
6740 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm26 = [0,3,2,1,0,3,2,1,0,3,2,1,4,5,6,5]
6741 ; AVX512BW-FCP-NEXT: vpermw %ymm16, %ymm26, %ymm16
6742 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm10, %zmm16, %zmm10
6743 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm12, %zmm10 {%k2}
6744 ; AVX512BW-FCP-NEXT: vmovdqa 32(%r8), %xmm12
6745 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} xmm27 = [8,9,0,0,0,5,6,7]
6746 ; AVX512BW-FCP-NEXT: vpshufb %xmm27, %xmm12, %xmm16
6747 ; AVX512BW-FCP-NEXT: vpmovzxbw {{.*#+}} xmm28 = xmm12[0],zero,xmm12[1],zero,xmm12[2],zero,xmm12[3],zero,xmm12[4],zero,xmm12[5],zero,xmm12[6],zero,xmm12[7],zero
6748 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm16, %zmm28, %zmm16
6749 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm28 = [2,1,0,3,2,1,0,3,2,1,0,3,4,4,4,4,22,21,16,23,22,21,16,23,22,21,16,23,17,17,17,17]
6750 ; AVX512BW-FCP-NEXT: vpermw %zmm16, %zmm28, %zmm10 {%k1}
6751 ; AVX512BW-FCP-NEXT: vmovdqa64 32(%r9), %xmm16
6752 ; AVX512BW-FCP-NEXT: vmovdqa64 {{.*#+}} xmm29 = [u,8,u,9,u,10,u,11,u,4,u,5,u,6,u,7]
6753 ; AVX512BW-FCP-NEXT: vpshufb %xmm29, %xmm16, %xmm30
6754 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm31 = xmm16[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
6755 ; AVX512BW-FCP-NEXT: vinserti32x4 $2, %xmm30, %zmm31, %zmm30
6756 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rsi), %xmm31
6757 ; AVX512BW-FCP-NEXT: vpermw %zmm30, %zmm28, %zmm30
6758 ; AVX512BW-FCP-NEXT: movabsq $585610922974906400, %rax # imm = 0x820820820820820
6759 ; AVX512BW-FCP-NEXT: kmovq %rax, %k3
6760 ; AVX512BW-FCP-NEXT: vmovdqu8 %zmm30, %zmm10 {%k3}
6761 ; AVX512BW-FCP-NEXT: vpshufb %xmm17, %xmm22, %xmm30
6762 ; AVX512BW-FCP-NEXT: vpshufb %xmm17, %xmm23, %xmm17
6763 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm17 = xmm17[0],xmm30[0],xmm17[1],xmm30[1],xmm17[2],xmm30[2],xmm17[3],xmm30[3],xmm17[4],xmm30[4],xmm17[5],xmm30[5],xmm17[6],xmm30[6],xmm17[7],xmm30[7]
6764 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm30 = xmm23[0],xmm22[0],xmm23[1],xmm22[1],xmm23[2],xmm22[2],xmm23[3],xmm22[3],xmm23[4],xmm22[4],xmm23[5],xmm22[5],xmm23[6],xmm22[6],xmm23[7],xmm22[7]
6765 ; AVX512BW-FCP-NEXT: vpermw %ymm30, %ymm24, %ymm24
6766 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %xmm30
6767 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm17 = ymm17[0,0,0,1]
6768 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm17, %zmm24, %zmm24
6769 ; AVX512BW-FCP-NEXT: vpshufb %xmm25, %xmm31, %xmm17
6770 ; AVX512BW-FCP-NEXT: vpshufb %xmm25, %xmm30, %xmm25
6771 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm17 = xmm25[8],xmm17[8],xmm25[9],xmm17[9],xmm25[10],xmm17[10],xmm25[11],xmm17[11],xmm25[12],xmm17[12],xmm25[13],xmm17[13],xmm25[14],xmm17[14],xmm25[15],xmm17[15]
6772 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm25 = xmm30[0],xmm31[0],xmm30[1],xmm31[1],xmm30[2],xmm31[2],xmm30[3],xmm31[3],xmm30[4],xmm31[4],xmm30[5],xmm31[5],xmm30[6],xmm31[6],xmm30[7],xmm31[7]
6773 ; AVX512BW-FCP-NEXT: vpermw %ymm25, %ymm26, %ymm25
6774 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm17 = ymm17[0,0,0,1]
6775 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm17, %zmm25, %zmm17
6776 ; AVX512BW-FCP-NEXT: vmovdqa64 (%r8), %xmm25
6777 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm24, %zmm17 {%k2}
6778 ; AVX512BW-FCP-NEXT: vpshufb %xmm27, %xmm25, %xmm24
6779 ; AVX512BW-FCP-NEXT: vpmovzxbw {{.*#+}} xmm26 = xmm25[0],zero,xmm25[1],zero,xmm25[2],zero,xmm25[3],zero,xmm25[4],zero,xmm25[5],zero,xmm25[6],zero,xmm25[7],zero
6780 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm24, %zmm26, %zmm26
6781 ; AVX512BW-FCP-NEXT: vmovdqa64 (%r9), %xmm24
6782 ; AVX512BW-FCP-NEXT: vpermw %zmm26, %zmm28, %zmm17 {%k1}
6783 ; AVX512BW-FCP-NEXT: vpshufb %xmm29, %xmm24, %xmm26
6784 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm27 = xmm24[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
6785 ; AVX512BW-FCP-NEXT: vinserti32x4 $2, %xmm26, %zmm27, %zmm26
6786 ; AVX512BW-FCP-NEXT: vpermw %zmm26, %zmm28, %zmm26
6787 ; AVX512BW-FCP-NEXT: vmovdqu8 %zmm26, %zmm17 {%k3}
6788 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm19 = ymm20[0],ymm19[0],ymm20[1],ymm19[1],ymm20[2],ymm19[2],ymm20[3],ymm19[3],ymm20[4],ymm19[4],ymm20[5],ymm19[5],ymm20[6],ymm19[6],ymm20[7],ymm19[7],ymm20[16],ymm19[16],ymm20[17],ymm19[17],ymm20[18],ymm19[18],ymm20[19],ymm19[19],ymm20[20],ymm19[20],ymm20[21],ymm19[21],ymm20[22],ymm19[22],ymm20[23],ymm19[23]
6789 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm20 = xmm23[8],xmm22[8],xmm23[9],xmm22[9],xmm23[10],xmm22[10],xmm23[11],xmm22[11],xmm23[12],xmm22[12],xmm23[13],xmm22[13],xmm23[14],xmm22[14],xmm23[15],xmm22[15]
6790 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm19, %zmm20, %zmm19
6791 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm11 = ymm14[0],ymm11[0],ymm14[1],ymm11[1],ymm14[2],ymm11[2],ymm14[3],ymm11[3],ymm14[4],ymm11[4],ymm14[5],ymm11[5],ymm14[6],ymm11[6],ymm14[7],ymm11[7],ymm14[16],ymm11[16],ymm14[17],ymm11[17],ymm14[18],ymm11[18],ymm14[19],ymm11[19],ymm14[20],ymm11[20],ymm14[21],ymm11[21],ymm14[22],ymm11[22],ymm14[23],ymm11[23]
6792 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm14 = xmm30[8],xmm31[8],xmm30[9],xmm31[9],xmm30[10],xmm31[10],xmm30[11],xmm31[11],xmm30[12],xmm31[12],xmm30[13],xmm31[13],xmm30[14],xmm31[14],xmm30[15],xmm31[15]
6793 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm11, %zmm14, %zmm11
6794 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm14 = [4,3,6,5,4,3,6,5,4,3,6,5,7,7,7,7,24,27,26,25,24,27,26,25,24,27,26,25,28,29,30,29]
6795 ; AVX512BW-FCP-NEXT: vpermw %zmm11, %zmm14, %zmm11
6796 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm20 = [5,4,3,6,5,4,3,6,5,4,3,6,7,7,7,7,25,24,27,26,25,24,27,26,25,24,27,26,29,28,31,30]
6797 ; AVX512BW-FCP-NEXT: vpermw %zmm19, %zmm20, %zmm11 {%k1}
6798 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm7[0],ymm6[0],ymm7[1],ymm6[1],ymm7[2],ymm6[2],ymm7[3],ymm6[3],ymm7[4],ymm6[4],ymm7[5],ymm6[5],ymm7[6],ymm6[6],ymm7[7],ymm6[7],ymm7[16],ymm6[16],ymm7[17],ymm6[17],ymm7[18],ymm6[18],ymm7[19],ymm6[19],ymm7[20],ymm6[20],ymm7[21],ymm6[21],ymm7[22],ymm6[22],ymm7[23],ymm6[23]
6799 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm15[8],xmm13[8],xmm15[9],xmm13[9],xmm15[10],xmm13[10],xmm15[11],xmm13[11],xmm15[12],xmm13[12],xmm15[13],xmm13[13],xmm15[14],xmm13[14],xmm15[15],xmm13[15]
6800 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm6, %zmm7, %zmm6
6801 ; AVX512BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[1],ymm3[1],ymm4[2],ymm3[2],ymm4[3],ymm3[3],ymm4[4],ymm3[4],ymm4[5],ymm3[5],ymm4[6],ymm3[6],ymm4[7],ymm3[7],ymm4[16],ymm3[16],ymm4[17],ymm3[17],ymm4[18],ymm3[18],ymm4[19],ymm3[19],ymm4[20],ymm3[20],ymm4[21],ymm3[21],ymm4[22],ymm3[22],ymm4[23],ymm3[23]
6802 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm21[8],xmm18[8],xmm21[9],xmm18[9],xmm21[10],xmm18[10],xmm21[11],xmm18[11],xmm21[12],xmm18[12],xmm21[13],xmm18[13],xmm21[14],xmm18[14],xmm21[15],xmm18[15]
6803 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm3, %zmm4, %zmm3
6804 ; AVX512BW-FCP-NEXT: vpermw %zmm3, %zmm14, %zmm3
6805 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm25[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
6806 ; AVX512BW-FCP-NEXT: vpermw %zmm6, %zmm20, %zmm3 {%k1}
6807 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm6 = [2,5,4,3,2,5,4,3,2,5,4,3,6,5,6,7]
6808 ; AVX512BW-FCP-NEXT: vpermw %ymm4, %ymm6, %ymm4
6809 ; AVX512BW-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
6810 ; AVX512BW-FCP-NEXT: # ymm7 = mem[0,1,0,1]
6811 ; AVX512BW-FCP-NEXT: vpshufb %ymm7, %ymm9, %ymm9
6812 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
6813 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm9, %zmm4, %zmm4
6814 ; AVX512BW-FCP-NEXT: movl $1227133513, %eax # imm = 0x49249249
6815 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
6816 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm4, %zmm11 {%k1}
6817 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm24[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
6818 ; AVX512BW-FCP-NEXT: vpermw %ymm4, %ymm6, %ymm4
6819 ; AVX512BW-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
6820 ; AVX512BW-FCP-NEXT: # ymm9 = mem[0,1,0,1]
6821 ; AVX512BW-FCP-NEXT: vpshufb %ymm9, %ymm8, %ymm8
6822 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,2,2,3]
6823 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm8, %zmm4, %zmm4
6824 ; AVX512BW-FCP-NEXT: movabsq $2342443691899625602, %rax # imm = 0x2082082082082082
6825 ; AVX512BW-FCP-NEXT: kmovq %rax, %k2
6826 ; AVX512BW-FCP-NEXT: vmovdqu8 %zmm4, %zmm11 {%k2}
6827 ; AVX512BW-FCP-NEXT: vpshufb %ymm7, %ymm2, %ymm2
6828 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm12[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
6829 ; AVX512BW-FCP-NEXT: vpermw %ymm4, %ymm6, %ymm4
6830 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
6831 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm2, %zmm4, %zmm2
6832 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm2, %zmm3 {%k1}
6833 ; AVX512BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm16[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
6834 ; AVX512BW-FCP-NEXT: vpermw %ymm2, %ymm6, %ymm2
6835 ; AVX512BW-FCP-NEXT: vpshufb %ymm9, %ymm1, %ymm1
6836 ; AVX512BW-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
6837 ; AVX512BW-FCP-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1
6838 ; AVX512BW-FCP-NEXT: vmovdqu8 %zmm1, %zmm3 {%k2}
6839 ; AVX512BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
6840 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, 256(%rax)
6841 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, 64(%rax)
6842 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm17, (%rax)
6843 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, 192(%rax)
6844 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, 128(%rax)
6845 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, 320(%rax)
6846 ; AVX512BW-FCP-NEXT: vzeroupper
6847 ; AVX512BW-FCP-NEXT: retq
6849 ; AVX512DQ-BW-LABEL: store_i8_stride6_vf64:
6850 ; AVX512DQ-BW: # %bb.0:
6851 ; AVX512DQ-BW-NEXT: movq {{[0-9]+}}(%rsp), %rax
6852 ; AVX512DQ-BW-NEXT: vmovdqa64 (%r8), %zmm14
6853 ; AVX512DQ-BW-NEXT: vmovdqa64 (%r9), %zmm12
6854 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rsi), %ymm16
6855 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %ymm17
6856 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm17[0],ymm16[0],ymm17[1],ymm16[1],ymm17[2],ymm16[2],ymm17[3],ymm16[3],ymm17[4],ymm16[4],ymm17[5],ymm16[5],ymm17[6],ymm16[6],ymm17[7],ymm16[7],ymm17[16],ymm16[16],ymm17[17],ymm16[17],ymm17[18],ymm16[18],ymm17[19],ymm16[19],ymm17[20],ymm16[20],ymm17[21],ymm16[21],ymm17[22],ymm16[22],ymm17[23],ymm16[23]
6857 ; AVX512DQ-BW-NEXT: vmovdqa (%rsi), %xmm1
6858 ; AVX512DQ-BW-NEXT: vmovdqa 32(%rsi), %xmm9
6859 ; AVX512DQ-BW-NEXT: vmovdqa (%rdi), %xmm3
6860 ; AVX512DQ-BW-NEXT: vmovdqa 32(%rdi), %xmm11
6861 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm3[8],xmm1[8],xmm3[9],xmm1[9],xmm3[10],xmm1[10],xmm3[11],xmm1[11],xmm3[12],xmm1[12],xmm3[13],xmm1[13],xmm3[14],xmm1[14],xmm3[15],xmm1[15]
6862 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0
6863 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} zmm7 = [4,3,6,5,4,3,6,5,4,3,6,5,7,7,7,7,24,27,26,25,24,27,26,25,24,27,26,25,28,29,30,29]
6864 ; AVX512DQ-BW-NEXT: vpermw %zmm0, %zmm7, %zmm0
6865 ; AVX512DQ-BW-NEXT: vmovdqa (%rcx), %xmm2
6866 ; AVX512DQ-BW-NEXT: vmovdqa 32(%rcx), %xmm8
6867 ; AVX512DQ-BW-NEXT: vmovdqa (%rdx), %xmm4
6868 ; AVX512DQ-BW-NEXT: vmovdqa 32(%rdx), %xmm10
6869 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm4[8],xmm2[8],xmm4[9],xmm2[9],xmm4[10],xmm2[10],xmm4[11],xmm2[11],xmm4[12],xmm2[12],xmm4[13],xmm2[13],xmm4[14],xmm2[14],xmm4[15],xmm2[15]
6870 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm20 = [5,4,3,6,5,4,3,6,5,4,3,6,7,7,7,7]
6871 ; AVX512DQ-BW-NEXT: vpermw %ymm5, %ymm20, %ymm5
6872 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rcx), %ymm18
6873 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdx), %ymm19
6874 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm19[0],ymm18[0],ymm19[1],ymm18[1],ymm19[2],ymm18[2],ymm19[3],ymm18[3],ymm19[4],ymm18[4],ymm19[5],ymm18[5],ymm19[6],ymm18[6],ymm19[7],ymm18[7],ymm19[16],ymm18[16],ymm19[17],ymm18[17],ymm19[18],ymm18[18],ymm19[19],ymm18[19],ymm19[20],ymm18[20],ymm19[21],ymm18[21],ymm19[22],ymm18[22],ymm19[23],ymm18[23]
6875 ; AVX512DQ-BW-NEXT: vprold $16, %ymm6, %ymm6
6876 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
6877 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm6, %zmm5, %zmm5
6878 ; AVX512DQ-BW-NEXT: movl $613566756, %r10d # imm = 0x24924924
6879 ; AVX512DQ-BW-NEXT: kmovd %r10d, %k1
6880 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm5, %zmm0 {%k1}
6881 ; AVX512DQ-BW-NEXT: vmovdqa (%r8), %xmm5
6882 ; AVX512DQ-BW-NEXT: vmovdqa 32(%r8), %xmm13
6883 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm5[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
6884 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm23 = [2,5,4,3,2,5,4,3,2,5,4,3,6,5,6,7]
6885 ; AVX512DQ-BW-NEXT: vpermw %ymm6, %ymm23, %ymm6
6886 ; AVX512DQ-BW-NEXT: vmovdqa64 (%r8), %ymm21
6887 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} ymm24 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
6888 ; AVX512DQ-BW-NEXT: # ymm24 = mem[0,1,2,3,0,1,2,3]
6889 ; AVX512DQ-BW-NEXT: vpshufb %ymm24, %ymm21, %ymm15
6890 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
6891 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm15, %zmm6, %zmm6
6892 ; AVX512DQ-BW-NEXT: movl $1227133513, %r10d # imm = 0x49249249
6893 ; AVX512DQ-BW-NEXT: kmovd %r10d, %k2
6894 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm6, %zmm0 {%k2}
6895 ; AVX512DQ-BW-NEXT: vmovdqa (%r9), %xmm6
6896 ; AVX512DQ-BW-NEXT: vmovdqa 32(%r9), %xmm15
6897 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} xmm22 = xmm6[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
6898 ; AVX512DQ-BW-NEXT: vpermw %ymm22, %ymm23, %ymm25
6899 ; AVX512DQ-BW-NEXT: vmovdqa64 (%r9), %ymm22
6900 ; AVX512DQ-BW-NEXT: vbroadcasti32x4 {{.*#+}} ymm26 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
6901 ; AVX512DQ-BW-NEXT: # ymm26 = mem[0,1,2,3,0,1,2,3]
6902 ; AVX512DQ-BW-NEXT: vpshufb %ymm26, %ymm22, %ymm27
6903 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm27 = ymm27[2,2,2,3]
6904 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm27, %zmm25, %zmm25
6905 ; AVX512DQ-BW-NEXT: movabsq $2342443691899625602, %r10 # imm = 0x2082082082082082
6906 ; AVX512DQ-BW-NEXT: kmovq %r10, %k3
6907 ; AVX512DQ-BW-NEXT: vmovdqu8 %zmm25, %zmm0 {%k3}
6908 ; AVX512DQ-BW-NEXT: vmovdqa64 32(%rsi), %ymm25
6909 ; AVX512DQ-BW-NEXT: vmovdqa64 32(%rdi), %ymm27
6910 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} ymm28 = ymm27[0],ymm25[0],ymm27[1],ymm25[1],ymm27[2],ymm25[2],ymm27[3],ymm25[3],ymm27[4],ymm25[4],ymm27[5],ymm25[5],ymm27[6],ymm25[6],ymm27[7],ymm25[7],ymm27[16],ymm25[16],ymm27[17],ymm25[17],ymm27[18],ymm25[18],ymm27[19],ymm25[19],ymm27[20],ymm25[20],ymm27[21],ymm25[21],ymm27[22],ymm25[22],ymm27[23],ymm25[23]
6911 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} xmm29 = xmm11[8],xmm9[8],xmm11[9],xmm9[9],xmm11[10],xmm9[10],xmm11[11],xmm9[11],xmm11[12],xmm9[12],xmm11[13],xmm9[13],xmm11[14],xmm9[14],xmm11[15],xmm9[15]
6912 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm28, %zmm29, %zmm28
6913 ; AVX512DQ-BW-NEXT: vpermw %zmm28, %zmm7, %zmm7
6914 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} xmm28 = xmm10[8],xmm8[8],xmm10[9],xmm8[9],xmm10[10],xmm8[10],xmm10[11],xmm8[11],xmm10[12],xmm8[12],xmm10[13],xmm8[13],xmm10[14],xmm8[14],xmm10[15],xmm8[15]
6915 ; AVX512DQ-BW-NEXT: vpermw %ymm28, %ymm20, %ymm20
6916 ; AVX512DQ-BW-NEXT: vmovdqa64 32(%rcx), %ymm28
6917 ; AVX512DQ-BW-NEXT: vmovdqa64 32(%rdx), %ymm29
6918 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} ymm30 = ymm29[0],ymm28[0],ymm29[1],ymm28[1],ymm29[2],ymm28[2],ymm29[3],ymm28[3],ymm29[4],ymm28[4],ymm29[5],ymm28[5],ymm29[6],ymm28[6],ymm29[7],ymm28[7],ymm29[16],ymm28[16],ymm29[17],ymm28[17],ymm29[18],ymm28[18],ymm29[19],ymm28[19],ymm29[20],ymm28[20],ymm29[21],ymm28[21],ymm29[22],ymm28[22],ymm29[23],ymm28[23]
6919 ; AVX512DQ-BW-NEXT: vprold $16, %ymm30, %ymm30
6920 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm30 = ymm30[2,2,2,3]
6921 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm30, %zmm20, %zmm20
6922 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm20, %zmm7 {%k1}
6923 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} xmm20 = xmm13[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
6924 ; AVX512DQ-BW-NEXT: vpermw %ymm20, %ymm23, %ymm20
6925 ; AVX512DQ-BW-NEXT: vmovdqa64 32(%r8), %ymm30
6926 ; AVX512DQ-BW-NEXT: vpshufb %ymm24, %ymm30, %ymm24
6927 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm24 = ymm24[2,2,2,3]
6928 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm24, %zmm20, %zmm20
6929 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm20, %zmm7 {%k2}
6930 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} xmm20 = xmm15[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
6931 ; AVX512DQ-BW-NEXT: vpermw %ymm20, %ymm23, %ymm20
6932 ; AVX512DQ-BW-NEXT: vmovdqa64 32(%r9), %ymm24
6933 ; AVX512DQ-BW-NEXT: vpshufb %ymm26, %ymm24, %ymm23
6934 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm23 = ymm23[2,2,2,3]
6935 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm23, %zmm20, %zmm20
6936 ; AVX512DQ-BW-NEXT: vmovdqu8 %zmm20, %zmm7 {%k3}
6937 ; AVX512DQ-BW-NEXT: vpbroadcastq {{.*#+}} ymm26 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
6938 ; AVX512DQ-BW-NEXT: vpshufb %ymm26, %ymm25, %ymm20
6939 ; AVX512DQ-BW-NEXT: vpshufb %ymm26, %ymm27, %ymm23
6940 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} ymm20 = ymm23[0],ymm20[0],ymm23[1],ymm20[1],ymm23[2],ymm20[2],ymm23[3],ymm20[3],ymm23[4],ymm20[4],ymm23[5],ymm20[5],ymm23[6],ymm20[6],ymm23[7],ymm20[7],ymm23[16],ymm20[16],ymm23[17],ymm20[17],ymm23[18],ymm20[18],ymm23[19],ymm20[19],ymm23[20],ymm20[20],ymm23[21],ymm20[21],ymm23[22],ymm20[22],ymm23[23],ymm20[23]
6941 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
6942 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} ymm23 = ymm27[8],ymm25[8],ymm27[9],ymm25[9],ymm27[10],ymm25[10],ymm27[11],ymm25[11],ymm27[12],ymm25[12],ymm27[13],ymm25[13],ymm27[14],ymm25[14],ymm27[15],ymm25[15],ymm27[24],ymm25[24],ymm27[25],ymm25[25],ymm27[26],ymm25[26],ymm27[27],ymm25[27],ymm27[28],ymm25[28],ymm27[29],ymm25[29],ymm27[30],ymm25[30],ymm27[31],ymm25[31]
6943 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm25 = [12,11,14,13,12,11,14,13,12,11,14,13,15,15,15,15]
6944 ; AVX512DQ-BW-NEXT: vpermw %ymm23, %ymm25, %ymm23
6945 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm23, %zmm20, %zmm27
6946 ; AVX512DQ-BW-NEXT: vpbroadcastq {{.*#+}} ymm23 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
6947 ; AVX512DQ-BW-NEXT: vpshufb %ymm23, %ymm28, %ymm20
6948 ; AVX512DQ-BW-NEXT: vpshufb %ymm23, %ymm29, %ymm31
6949 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} ymm20 = ymm31[0],ymm20[0],ymm31[1],ymm20[1],ymm31[2],ymm20[2],ymm31[3],ymm20[3],ymm31[4],ymm20[4],ymm31[5],ymm20[5],ymm31[6],ymm20[6],ymm31[7],ymm20[7],ymm31[16],ymm20[16],ymm31[17],ymm20[17],ymm31[18],ymm20[18],ymm31[19],ymm20[19],ymm31[20],ymm20[20],ymm31[21],ymm20[21],ymm31[22],ymm20[22],ymm31[23],ymm20[23]
6950 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
6951 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} ymm28 = ymm29[8],ymm28[8],ymm29[9],ymm28[9],ymm29[10],ymm28[10],ymm29[11],ymm28[11],ymm29[12],ymm28[12],ymm29[13],ymm28[13],ymm29[14],ymm28[14],ymm29[15],ymm28[15],ymm29[24],ymm28[24],ymm29[25],ymm28[25],ymm29[26],ymm28[26],ymm29[27],ymm28[27],ymm29[28],ymm28[28],ymm29[29],ymm28[29],ymm29[30],ymm28[30],ymm29[31],ymm28[31]
6952 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm29 = [13,12,11,14,13,12,11,14,13,12,11,14,15,15,15,15]
6953 ; AVX512DQ-BW-NEXT: vpermw %ymm28, %ymm29, %ymm28
6954 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm28, %zmm20, %zmm20
6955 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm27, %zmm20 {%k1}
6956 ; AVX512DQ-BW-NEXT: vshufi64x2 {{.*#+}} zmm27 = zmm30[0,1,2,3],zmm14[4,5,6,7]
6957 ; AVX512DQ-BW-NEXT: vpshufb {{.*#+}} zmm27 = zmm27[6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u,42,u,45,u,44,u,43,u,46,u,u,u,u,u,47,u,58,u,61,u,60,u,59,u,62,u,u,u,u,u,63,u]
6958 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} zmm27 = zmm27[2,2,2,3,6,6,6,7]
6959 ; AVX512DQ-BW-NEXT: movl $-1840700270, %ecx # imm = 0x92492492
6960 ; AVX512DQ-BW-NEXT: kmovd %ecx, %k2
6961 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm27, %zmm20 {%k2}
6962 ; AVX512DQ-BW-NEXT: vshufi64x2 {{.*#+}} zmm24 = zmm24[0,1,2,3],zmm12[4,5,6,7]
6963 ; AVX512DQ-BW-NEXT: vpshufb {{.*#+}} zmm24 = zmm24[u,6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u,42,u,45,u,44,u,43,u,46,u,u,u,u,u,47,u,58,u,61,u,60,u,59,u,62,u,u,u,u,u,63]
6964 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} zmm24 = zmm24[2,2,2,3,6,6,6,7]
6965 ; AVX512DQ-BW-NEXT: movabsq $-9076969306111049208, %rcx # imm = 0x8208208208208208
6966 ; AVX512DQ-BW-NEXT: kmovq %rcx, %k3
6967 ; AVX512DQ-BW-NEXT: vmovdqu8 %zmm24, %zmm20 {%k3}
6968 ; AVX512DQ-BW-NEXT: vpshufb %ymm26, %ymm16, %ymm24
6969 ; AVX512DQ-BW-NEXT: vpshufb %ymm26, %ymm17, %ymm26
6970 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} ymm24 = ymm26[0],ymm24[0],ymm26[1],ymm24[1],ymm26[2],ymm24[2],ymm26[3],ymm24[3],ymm26[4],ymm24[4],ymm26[5],ymm24[5],ymm26[6],ymm24[6],ymm26[7],ymm24[7],ymm26[16],ymm24[16],ymm26[17],ymm24[17],ymm26[18],ymm24[18],ymm26[19],ymm24[19],ymm26[20],ymm24[20],ymm26[21],ymm24[21],ymm26[22],ymm24[22],ymm26[23],ymm24[23]
6971 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm24 = ymm24[2,2,2,3]
6972 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} ymm16 = ymm17[8],ymm16[8],ymm17[9],ymm16[9],ymm17[10],ymm16[10],ymm17[11],ymm16[11],ymm17[12],ymm16[12],ymm17[13],ymm16[13],ymm17[14],ymm16[14],ymm17[15],ymm16[15],ymm17[24],ymm16[24],ymm17[25],ymm16[25],ymm17[26],ymm16[26],ymm17[27],ymm16[27],ymm17[28],ymm16[28],ymm17[29],ymm16[29],ymm17[30],ymm16[30],ymm17[31],ymm16[31]
6973 ; AVX512DQ-BW-NEXT: vpermw %ymm16, %ymm25, %ymm16
6974 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm16, %zmm24, %zmm17
6975 ; AVX512DQ-BW-NEXT: vpshufb %ymm23, %ymm18, %ymm16
6976 ; AVX512DQ-BW-NEXT: vpshufb %ymm23, %ymm19, %ymm24
6977 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} ymm16 = ymm24[0],ymm16[0],ymm24[1],ymm16[1],ymm24[2],ymm16[2],ymm24[3],ymm16[3],ymm24[4],ymm16[4],ymm24[5],ymm16[5],ymm24[6],ymm16[6],ymm24[7],ymm16[7],ymm24[16],ymm16[16],ymm24[17],ymm16[17],ymm24[18],ymm16[18],ymm24[19],ymm16[19],ymm24[20],ymm16[20],ymm24[21],ymm16[21],ymm24[22],ymm16[22],ymm24[23],ymm16[23]
6978 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm16 = ymm16[2,2,2,3]
6979 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} ymm18 = ymm19[8],ymm18[8],ymm19[9],ymm18[9],ymm19[10],ymm18[10],ymm19[11],ymm18[11],ymm19[12],ymm18[12],ymm19[13],ymm18[13],ymm19[14],ymm18[14],ymm19[15],ymm18[15],ymm19[24],ymm18[24],ymm19[25],ymm18[25],ymm19[26],ymm18[26],ymm19[27],ymm18[27],ymm19[28],ymm18[28],ymm19[29],ymm18[29],ymm19[30],ymm18[30],ymm19[31],ymm18[31]
6980 ; AVX512DQ-BW-NEXT: vpermw %ymm18, %ymm29, %ymm18
6981 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm18, %zmm16, %zmm16
6982 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm17, %zmm16 {%k1}
6983 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm21, %zmm14, %zmm14
6984 ; AVX512DQ-BW-NEXT: vpshufb {{.*#+}} zmm14 = zmm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,58,u,61,u,60,u,59,u,62,u,u,u,u,u,63,u]
6985 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} zmm14 = zmm14[2,2,2,3,6,6,6,7]
6986 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm14, %zmm16 {%k2}
6987 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm22, %zmm12, %zmm12
6988 ; AVX512DQ-BW-NEXT: vpshufb {{.*#+}} zmm12 = zmm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,58,u,61,u,60,u,59,u,62,u,u,u,u,u,63]
6989 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} zmm12 = zmm12[2,2,2,3,6,6,6,7]
6990 ; AVX512DQ-BW-NEXT: vmovdqu8 %zmm12, %zmm16 {%k3}
6991 ; AVX512DQ-BW-NEXT: vpbroadcastq {{.*#+}} xmm12 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
6992 ; AVX512DQ-BW-NEXT: vpshufb %xmm12, %xmm9, %xmm14
6993 ; AVX512DQ-BW-NEXT: vpshufb %xmm12, %xmm11, %xmm17
6994 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} xmm14 = xmm17[8],xmm14[8],xmm17[9],xmm14[9],xmm17[10],xmm14[10],xmm17[11],xmm14[11],xmm17[12],xmm14[12],xmm17[13],xmm14[13],xmm17[14],xmm14[14],xmm17[15],xmm14[15]
6995 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
6996 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3],xmm11[4],xmm9[4],xmm11[5],xmm9[5],xmm11[6],xmm9[6],xmm11[7],xmm9[7]
6997 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm11 = [0,3,2,1,0,3,2,1,0,3,2,1,4,5,6,5]
6998 ; AVX512DQ-BW-NEXT: vpermw %ymm9, %ymm11, %ymm9
6999 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm14, %zmm9, %zmm9
7000 ; AVX512DQ-BW-NEXT: vpshufb %xmm23, %xmm8, %xmm14
7001 ; AVX512DQ-BW-NEXT: vpshufb %xmm23, %xmm10, %xmm17
7002 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm17[0],xmm14[0],xmm17[1],xmm14[1],xmm17[2],xmm14[2],xmm17[3],xmm14[3],xmm17[4],xmm14[4],xmm17[5],xmm14[5],xmm17[6],xmm14[6],xmm17[7],xmm14[7]
7003 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3],xmm10[4],xmm8[4],xmm10[5],xmm8[5],xmm10[6],xmm8[6],xmm10[7],xmm8[7]
7004 ; AVX512DQ-BW-NEXT: vprold $16, %xmm8, %xmm8
7005 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm14, %zmm8, %zmm8
7006 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} zmm8 = zmm8[0,0,0,1,4,4,4,5]
7007 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm8, %zmm9 {%k2}
7008 ; AVX512DQ-BW-NEXT: vpmovzxbw {{.*#+}} xmm8 = xmm13[0],zero,xmm13[1],zero,xmm13[2],zero,xmm13[3],zero,xmm13[4],zero,xmm13[5],zero,xmm13[6],zero,xmm13[7],zero
7009 ; AVX512DQ-BW-NEXT: vpshufd {{.*#+}} xmm10 = xmm13[2,1,2,3]
7010 ; AVX512DQ-BW-NEXT: vpmovzxbw {{.*#+}} xmm10 = xmm10[0],zero,xmm10[1],zero,xmm10[2],zero,xmm10[3],zero,xmm10[4],zero,xmm10[5],zero,xmm10[6],zero,xmm10[7],zero
7011 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, %xmm10, %zmm8, %zmm8
7012 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} zmm10 = [2,1,0,3,2,1,0,3,2,1,0,3,4,4,4,4,22,21,16,23,22,21,16,23,22,21,16,23,17,17,17,17]
7013 ; AVX512DQ-BW-NEXT: vpermw %zmm8, %zmm10, %zmm9 {%k1}
7014 ; AVX512DQ-BW-NEXT: vpshufd {{.*#+}} xmm8 = xmm15[2,1,2,3]
7015 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm8[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
7016 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} xmm13 = xmm15[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
7017 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, %xmm8, %zmm13, %zmm8
7018 ; AVX512DQ-BW-NEXT: vpermw %zmm8, %zmm10, %zmm8
7019 ; AVX512DQ-BW-NEXT: movabsq $585610922974906400, %rcx # imm = 0x820820820820820
7020 ; AVX512DQ-BW-NEXT: kmovq %rcx, %k3
7021 ; AVX512DQ-BW-NEXT: vmovdqu8 %zmm8, %zmm9 {%k3}
7022 ; AVX512DQ-BW-NEXT: vpshufb %xmm12, %xmm1, %xmm8
7023 ; AVX512DQ-BW-NEXT: vpshufb %xmm12, %xmm3, %xmm12
7024 ; AVX512DQ-BW-NEXT: vpunpckhbw {{.*#+}} xmm8 = xmm12[8],xmm8[8],xmm12[9],xmm8[9],xmm12[10],xmm8[10],xmm12[11],xmm8[11],xmm12[12],xmm8[12],xmm12[13],xmm8[13],xmm12[14],xmm8[14],xmm12[15],xmm8[15]
7025 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} ymm8 = ymm8[0,0,0,1]
7026 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3],xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
7027 ; AVX512DQ-BW-NEXT: vpermw %ymm1, %ymm11, %ymm1
7028 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm8, %zmm1, %zmm1
7029 ; AVX512DQ-BW-NEXT: vpshufb %xmm23, %xmm2, %xmm3
7030 ; AVX512DQ-BW-NEXT: vpshufb %xmm23, %xmm4, %xmm8
7031 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm8[0],xmm3[0],xmm8[1],xmm3[1],xmm8[2],xmm3[2],xmm8[3],xmm3[3],xmm8[4],xmm3[4],xmm8[5],xmm3[5],xmm8[6],xmm3[6],xmm8[7],xmm3[7]
7032 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3],xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
7033 ; AVX512DQ-BW-NEXT: vprold $16, %xmm2, %xmm2
7034 ; AVX512DQ-BW-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm2
7035 ; AVX512DQ-BW-NEXT: vpermq {{.*#+}} zmm2 = zmm2[0,0,0,1,4,4,4,5]
7036 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm2, %zmm1 {%k2}
7037 ; AVX512DQ-BW-NEXT: vpmovzxbw {{.*#+}} xmm2 = xmm5[0],zero,xmm5[1],zero,xmm5[2],zero,xmm5[3],zero,xmm5[4],zero,xmm5[5],zero,xmm5[6],zero,xmm5[7],zero
7038 ; AVX512DQ-BW-NEXT: vpshufd {{.*#+}} xmm3 = xmm5[2,1,2,3]
7039 ; AVX512DQ-BW-NEXT: vpmovzxbw {{.*#+}} xmm3 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero,xmm3[4],zero,xmm3[5],zero,xmm3[6],zero,xmm3[7],zero
7040 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, %xmm3, %zmm2, %zmm2
7041 ; AVX512DQ-BW-NEXT: vpermw %zmm2, %zmm10, %zmm1 {%k1}
7042 ; AVX512DQ-BW-NEXT: vpshufd {{.*#+}} xmm2 = xmm6[2,1,2,3]
7043 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
7044 ; AVX512DQ-BW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm6[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
7045 ; AVX512DQ-BW-NEXT: vinserti32x4 $2, %xmm2, %zmm3, %zmm2
7046 ; AVX512DQ-BW-NEXT: vpermw %zmm2, %zmm10, %zmm2
7047 ; AVX512DQ-BW-NEXT: vmovdqu8 %zmm2, %zmm1 {%k3}
7048 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, (%rax)
7049 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, 192(%rax)
7050 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm16, 128(%rax)
7051 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm20, 320(%rax)
7052 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, 256(%rax)
7053 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, 64(%rax)
7054 ; AVX512DQ-BW-NEXT: vzeroupper
7055 ; AVX512DQ-BW-NEXT: retq
7057 ; AVX512DQ-BW-FCP-LABEL: store_i8_stride6_vf64:
7058 ; AVX512DQ-BW-FCP: # %bb.0:
7059 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%r8), %zmm8
7060 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%r9), %zmm10
7061 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 32(%rsi), %ymm3
7062 ; AVX512DQ-BW-FCP-NEXT: vpbroadcastq {{.*#+}} ymm5 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
7063 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm5, %ymm3, %ymm0
7064 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 32(%rdi), %ymm4
7065 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm5, %ymm4, %ymm1
7066 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[16],ymm0[16],ymm1[17],ymm0[17],ymm1[18],ymm0[18],ymm1[19],ymm0[19],ymm1[20],ymm0[20],ymm1[21],ymm0[21],ymm1[22],ymm0[22],ymm1[23],ymm0[23]
7067 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
7068 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} ymm1 = ymm4[8],ymm3[8],ymm4[9],ymm3[9],ymm4[10],ymm3[10],ymm4[11],ymm3[11],ymm4[12],ymm3[12],ymm4[13],ymm3[13],ymm4[14],ymm3[14],ymm4[15],ymm3[15],ymm4[24],ymm3[24],ymm4[25],ymm3[25],ymm4[26],ymm3[26],ymm4[27],ymm3[27],ymm4[28],ymm3[28],ymm4[29],ymm3[29],ymm4[30],ymm3[30],ymm4[31],ymm3[31]
7069 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm9 = [12,11,14,13,12,11,14,13,12,11,14,13,15,15,15,15]
7070 ; AVX512DQ-BW-FCP-NEXT: vpermw %ymm1, %ymm9, %ymm1
7071 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm1
7072 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 32(%rcx), %ymm6
7073 ; AVX512DQ-BW-FCP-NEXT: vpbroadcastq {{.*#+}} ymm17 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
7074 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm17, %ymm6, %ymm0
7075 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 32(%rdx), %ymm7
7076 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm17, %ymm7, %ymm2
7077 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm0 = ymm2[0],ymm0[0],ymm2[1],ymm0[1],ymm2[2],ymm0[2],ymm2[3],ymm0[3],ymm2[4],ymm0[4],ymm2[5],ymm0[5],ymm2[6],ymm0[6],ymm2[7],ymm0[7],ymm2[16],ymm0[16],ymm2[17],ymm0[17],ymm2[18],ymm0[18],ymm2[19],ymm0[19],ymm2[20],ymm0[20],ymm2[21],ymm0[21],ymm2[22],ymm0[22],ymm2[23],ymm0[23]
7078 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,2,2,3]
7079 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} ymm2 = ymm7[8],ymm6[8],ymm7[9],ymm6[9],ymm7[10],ymm6[10],ymm7[11],ymm6[11],ymm7[12],ymm6[12],ymm7[13],ymm6[13],ymm7[14],ymm6[14],ymm7[15],ymm6[15],ymm7[24],ymm6[24],ymm7[25],ymm6[25],ymm7[26],ymm6[26],ymm7[27],ymm6[27],ymm7[28],ymm6[28],ymm7[29],ymm6[29],ymm7[30],ymm6[30],ymm7[31],ymm6[31]
7080 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm12 = [13,12,11,14,13,12,11,14,13,12,11,14,15,15,15,15]
7081 ; AVX512DQ-BW-FCP-NEXT: vpermw %ymm2, %ymm12, %ymm2
7082 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
7083 ; AVX512DQ-BW-FCP-NEXT: movl $613566756, %eax # imm = 0x24924924
7084 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
7085 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm1, %zmm0 {%k1}
7086 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 32(%r8), %ymm2
7087 ; AVX512DQ-BW-FCP-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm2[0,1,2,3],zmm8[4,5,6,7]
7088 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} zmm1 = zmm1[6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u,42,u,45,u,44,u,43,u,46,u,u,u,u,u,47,u,58,u,61,u,60,u,59,u,62,u,u,u,u,u,63,u]
7089 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} zmm1 = zmm1[2,2,2,3,6,6,6,7]
7090 ; AVX512DQ-BW-FCP-NEXT: movl $-1840700270, %eax # imm = 0x92492492
7091 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k2
7092 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm1, %zmm0 {%k2}
7093 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 32(%r9), %ymm1
7094 ; AVX512DQ-BW-FCP-NEXT: vshufi64x2 {{.*#+}} zmm11 = zmm1[0,1,2,3],zmm10[4,5,6,7]
7095 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} zmm11 = zmm11[u,6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u,42,u,45,u,44,u,43,u,46,u,u,u,u,u,47,u,58,u,61,u,60,u,59,u,62,u,u,u,u,u,63]
7096 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} zmm11 = zmm11[2,2,2,3,6,6,6,7]
7097 ; AVX512DQ-BW-FCP-NEXT: movabsq $-9076969306111049208, %rax # imm = 0x8208208208208208
7098 ; AVX512DQ-BW-FCP-NEXT: kmovq %rax, %k3
7099 ; AVX512DQ-BW-FCP-NEXT: vmovdqu8 %zmm11, %zmm0 {%k3}
7100 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rsi), %ymm11
7101 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm5, %ymm11, %ymm13
7102 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdi), %ymm14
7103 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm5, %ymm14, %ymm5
7104 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm5 = ymm5[0],ymm13[0],ymm5[1],ymm13[1],ymm5[2],ymm13[2],ymm5[3],ymm13[3],ymm5[4],ymm13[4],ymm5[5],ymm13[5],ymm5[6],ymm13[6],ymm5[7],ymm13[7],ymm5[16],ymm13[16],ymm5[17],ymm13[17],ymm5[18],ymm13[18],ymm5[19],ymm13[19],ymm5[20],ymm13[20],ymm5[21],ymm13[21],ymm5[22],ymm13[22],ymm5[23],ymm13[23]
7105 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
7106 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} ymm13 = ymm14[8],ymm11[8],ymm14[9],ymm11[9],ymm14[10],ymm11[10],ymm14[11],ymm11[11],ymm14[12],ymm11[12],ymm14[13],ymm11[13],ymm14[14],ymm11[14],ymm14[15],ymm11[15],ymm14[24],ymm11[24],ymm14[25],ymm11[25],ymm14[26],ymm11[26],ymm14[27],ymm11[27],ymm14[28],ymm11[28],ymm14[29],ymm11[29],ymm14[30],ymm11[30],ymm14[31],ymm11[31]
7107 ; AVX512DQ-BW-FCP-NEXT: vpermw %ymm13, %ymm9, %ymm9
7108 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm9, %zmm5, %zmm9
7109 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rcx), %ymm19
7110 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm17, %ymm19, %ymm5
7111 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdx), %ymm20
7112 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm17, %ymm20, %ymm13
7113 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm5 = ymm13[0],ymm5[0],ymm13[1],ymm5[1],ymm13[2],ymm5[2],ymm13[3],ymm5[3],ymm13[4],ymm5[4],ymm13[5],ymm5[5],ymm13[6],ymm5[6],ymm13[7],ymm5[7],ymm13[16],ymm5[16],ymm13[17],ymm5[17],ymm13[18],ymm5[18],ymm13[19],ymm5[19],ymm13[20],ymm5[20],ymm13[21],ymm5[21],ymm13[22],ymm5[22],ymm13[23],ymm5[23]
7114 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm5 = ymm5[2,2,2,3]
7115 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} ymm13 = ymm20[8],ymm19[8],ymm20[9],ymm19[9],ymm20[10],ymm19[10],ymm20[11],ymm19[11],ymm20[12],ymm19[12],ymm20[13],ymm19[13],ymm20[14],ymm19[14],ymm20[15],ymm19[15],ymm20[24],ymm19[24],ymm20[25],ymm19[25],ymm20[26],ymm19[26],ymm20[27],ymm19[27],ymm20[28],ymm19[28],ymm20[29],ymm19[29],ymm20[30],ymm19[30],ymm20[31],ymm19[31]
7116 ; AVX512DQ-BW-FCP-NEXT: vpermw %ymm13, %ymm12, %ymm12
7117 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm12, %zmm5, %zmm5
7118 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm9, %zmm5 {%k1}
7119 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%r8), %ymm9
7120 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm9, %zmm8, %zmm8
7121 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} zmm8 = zmm8[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,58,u,61,u,60,u,59,u,62,u,u,u,u,u,63,u]
7122 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} zmm8 = zmm8[2,2,2,3,6,6,6,7]
7123 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm8, %zmm5 {%k2}
7124 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%r9), %ymm8
7125 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm8, %zmm10, %zmm10
7126 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} zmm10 = zmm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,u,21,u,24,u,23,u,u,u,25,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,58,u,61,u,60,u,59,u,62,u,u,u,u,u,63]
7127 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} zmm10 = zmm10[2,2,2,3,6,6,6,7]
7128 ; AVX512DQ-BW-FCP-NEXT: vmovdqu8 %zmm10, %zmm5 {%k3}
7129 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rcx), %xmm22
7130 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 32(%rcx), %xmm13
7131 ; AVX512DQ-BW-FCP-NEXT: vpshufb %xmm17, %xmm13, %xmm10
7132 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdx), %xmm23
7133 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 32(%rdx), %xmm15
7134 ; AVX512DQ-BW-FCP-NEXT: vpshufb %xmm17, %xmm15, %xmm12
7135 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm12[0],xmm10[0],xmm12[1],xmm10[1],xmm12[2],xmm10[2],xmm12[3],xmm10[3],xmm12[4],xmm10[4],xmm12[5],xmm10[5],xmm12[6],xmm10[6],xmm12[7],xmm10[7]
7136 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,0,1]
7137 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm15[0],xmm13[0],xmm15[1],xmm13[1],xmm15[2],xmm13[2],xmm15[3],xmm13[3],xmm15[4],xmm13[4],xmm15[5],xmm13[5],xmm15[6],xmm13[6],xmm15[7],xmm13[7]
7138 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm24 = [1,0,3,2,1,0,3,2,1,0,3,2,5,4,7,6]
7139 ; AVX512DQ-BW-FCP-NEXT: vpermw %ymm12, %ymm24, %ymm12
7140 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm10, %zmm12, %zmm12
7141 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 32(%rsi), %xmm18
7142 ; AVX512DQ-BW-FCP-NEXT: vpbroadcastq {{.*#+}} xmm25 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
7143 ; AVX512DQ-BW-FCP-NEXT: vpshufb %xmm25, %xmm18, %xmm10
7144 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 32(%rdi), %xmm21
7145 ; AVX512DQ-BW-FCP-NEXT: vpshufb %xmm25, %xmm21, %xmm16
7146 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm10 = xmm16[8],xmm10[8],xmm16[9],xmm10[9],xmm16[10],xmm10[10],xmm16[11],xmm10[11],xmm16[12],xmm10[12],xmm16[13],xmm10[13],xmm16[14],xmm10[14],xmm16[15],xmm10[15]
7147 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm10 = ymm10[0,0,0,1]
7148 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm16 = xmm21[0],xmm18[0],xmm21[1],xmm18[1],xmm21[2],xmm18[2],xmm21[3],xmm18[3],xmm21[4],xmm18[4],xmm21[5],xmm18[5],xmm21[6],xmm18[6],xmm21[7],xmm18[7]
7149 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm26 = [0,3,2,1,0,3,2,1,0,3,2,1,4,5,6,5]
7150 ; AVX512DQ-BW-FCP-NEXT: vpermw %ymm16, %ymm26, %ymm16
7151 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm10, %zmm16, %zmm10
7152 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm12, %zmm10 {%k2}
7153 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 32(%r8), %xmm12
7154 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} xmm27 = [8,9,0,0,0,5,6,7]
7155 ; AVX512DQ-BW-FCP-NEXT: vpshufb %xmm27, %xmm12, %xmm16
7156 ; AVX512DQ-BW-FCP-NEXT: vpmovzxbw {{.*#+}} xmm28 = xmm12[0],zero,xmm12[1],zero,xmm12[2],zero,xmm12[3],zero,xmm12[4],zero,xmm12[5],zero,xmm12[6],zero,xmm12[7],zero
7157 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm16, %zmm28, %zmm16
7158 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm28 = [2,1,0,3,2,1,0,3,2,1,0,3,4,4,4,4,22,21,16,23,22,21,16,23,22,21,16,23,17,17,17,17]
7159 ; AVX512DQ-BW-FCP-NEXT: vpermw %zmm16, %zmm28, %zmm10 {%k1}
7160 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 32(%r9), %xmm16
7161 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 {{.*#+}} xmm29 = [u,8,u,9,u,10,u,11,u,4,u,5,u,6,u,7]
7162 ; AVX512DQ-BW-FCP-NEXT: vpshufb %xmm29, %xmm16, %xmm30
7163 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm31 = xmm16[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
7164 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $2, %xmm30, %zmm31, %zmm30
7165 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rsi), %xmm31
7166 ; AVX512DQ-BW-FCP-NEXT: vpermw %zmm30, %zmm28, %zmm30
7167 ; AVX512DQ-BW-FCP-NEXT: movabsq $585610922974906400, %rax # imm = 0x820820820820820
7168 ; AVX512DQ-BW-FCP-NEXT: kmovq %rax, %k3
7169 ; AVX512DQ-BW-FCP-NEXT: vmovdqu8 %zmm30, %zmm10 {%k3}
7170 ; AVX512DQ-BW-FCP-NEXT: vpshufb %xmm17, %xmm22, %xmm30
7171 ; AVX512DQ-BW-FCP-NEXT: vpshufb %xmm17, %xmm23, %xmm17
7172 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm17 = xmm17[0],xmm30[0],xmm17[1],xmm30[1],xmm17[2],xmm30[2],xmm17[3],xmm30[3],xmm17[4],xmm30[4],xmm17[5],xmm30[5],xmm17[6],xmm30[6],xmm17[7],xmm30[7]
7173 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm30 = xmm23[0],xmm22[0],xmm23[1],xmm22[1],xmm23[2],xmm22[2],xmm23[3],xmm22[3],xmm23[4],xmm22[4],xmm23[5],xmm22[5],xmm23[6],xmm22[6],xmm23[7],xmm22[7]
7174 ; AVX512DQ-BW-FCP-NEXT: vpermw %ymm30, %ymm24, %ymm24
7175 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %xmm30
7176 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm17 = ymm17[0,0,0,1]
7177 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm17, %zmm24, %zmm24
7178 ; AVX512DQ-BW-FCP-NEXT: vpshufb %xmm25, %xmm31, %xmm17
7179 ; AVX512DQ-BW-FCP-NEXT: vpshufb %xmm25, %xmm30, %xmm25
7180 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm17 = xmm25[8],xmm17[8],xmm25[9],xmm17[9],xmm25[10],xmm17[10],xmm25[11],xmm17[11],xmm25[12],xmm17[12],xmm25[13],xmm17[13],xmm25[14],xmm17[14],xmm25[15],xmm17[15]
7181 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm25 = xmm30[0],xmm31[0],xmm30[1],xmm31[1],xmm30[2],xmm31[2],xmm30[3],xmm31[3],xmm30[4],xmm31[4],xmm30[5],xmm31[5],xmm30[6],xmm31[6],xmm30[7],xmm31[7]
7182 ; AVX512DQ-BW-FCP-NEXT: vpermw %ymm25, %ymm26, %ymm25
7183 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm17 = ymm17[0,0,0,1]
7184 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm17, %zmm25, %zmm17
7185 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%r8), %xmm25
7186 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm24, %zmm17 {%k2}
7187 ; AVX512DQ-BW-FCP-NEXT: vpshufb %xmm27, %xmm25, %xmm24
7188 ; AVX512DQ-BW-FCP-NEXT: vpmovzxbw {{.*#+}} xmm26 = xmm25[0],zero,xmm25[1],zero,xmm25[2],zero,xmm25[3],zero,xmm25[4],zero,xmm25[5],zero,xmm25[6],zero,xmm25[7],zero
7189 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm24, %zmm26, %zmm26
7190 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%r9), %xmm24
7191 ; AVX512DQ-BW-FCP-NEXT: vpermw %zmm26, %zmm28, %zmm17 {%k1}
7192 ; AVX512DQ-BW-FCP-NEXT: vpshufb %xmm29, %xmm24, %xmm26
7193 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} xmm27 = xmm24[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
7194 ; AVX512DQ-BW-FCP-NEXT: vinserti32x4 $2, %xmm26, %zmm27, %zmm26
7195 ; AVX512DQ-BW-FCP-NEXT: vpermw %zmm26, %zmm28, %zmm26
7196 ; AVX512DQ-BW-FCP-NEXT: vmovdqu8 %zmm26, %zmm17 {%k3}
7197 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm19 = ymm20[0],ymm19[0],ymm20[1],ymm19[1],ymm20[2],ymm19[2],ymm20[3],ymm19[3],ymm20[4],ymm19[4],ymm20[5],ymm19[5],ymm20[6],ymm19[6],ymm20[7],ymm19[7],ymm20[16],ymm19[16],ymm20[17],ymm19[17],ymm20[18],ymm19[18],ymm20[19],ymm19[19],ymm20[20],ymm19[20],ymm20[21],ymm19[21],ymm20[22],ymm19[22],ymm20[23],ymm19[23]
7198 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm20 = xmm23[8],xmm22[8],xmm23[9],xmm22[9],xmm23[10],xmm22[10],xmm23[11],xmm22[11],xmm23[12],xmm22[12],xmm23[13],xmm22[13],xmm23[14],xmm22[14],xmm23[15],xmm22[15]
7199 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm19, %zmm20, %zmm19
7200 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm11 = ymm14[0],ymm11[0],ymm14[1],ymm11[1],ymm14[2],ymm11[2],ymm14[3],ymm11[3],ymm14[4],ymm11[4],ymm14[5],ymm11[5],ymm14[6],ymm11[6],ymm14[7],ymm11[7],ymm14[16],ymm11[16],ymm14[17],ymm11[17],ymm14[18],ymm11[18],ymm14[19],ymm11[19],ymm14[20],ymm11[20],ymm14[21],ymm11[21],ymm14[22],ymm11[22],ymm14[23],ymm11[23]
7201 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm14 = xmm30[8],xmm31[8],xmm30[9],xmm31[9],xmm30[10],xmm31[10],xmm30[11],xmm31[11],xmm30[12],xmm31[12],xmm30[13],xmm31[13],xmm30[14],xmm31[14],xmm30[15],xmm31[15]
7202 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm11, %zmm14, %zmm11
7203 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm14 = [4,3,6,5,4,3,6,5,4,3,6,5,7,7,7,7,24,27,26,25,24,27,26,25,24,27,26,25,28,29,30,29]
7204 ; AVX512DQ-BW-FCP-NEXT: vpermw %zmm11, %zmm14, %zmm11
7205 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm20 = [5,4,3,6,5,4,3,6,5,4,3,6,7,7,7,7,25,24,27,26,25,24,27,26,25,24,27,26,29,28,31,30]
7206 ; AVX512DQ-BW-FCP-NEXT: vpermw %zmm19, %zmm20, %zmm11 {%k1}
7207 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm7[0],ymm6[0],ymm7[1],ymm6[1],ymm7[2],ymm6[2],ymm7[3],ymm6[3],ymm7[4],ymm6[4],ymm7[5],ymm6[5],ymm7[6],ymm6[6],ymm7[7],ymm6[7],ymm7[16],ymm6[16],ymm7[17],ymm6[17],ymm7[18],ymm6[18],ymm7[19],ymm6[19],ymm7[20],ymm6[20],ymm7[21],ymm6[21],ymm7[22],ymm6[22],ymm7[23],ymm6[23]
7208 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm15[8],xmm13[8],xmm15[9],xmm13[9],xmm15[10],xmm13[10],xmm15[11],xmm13[11],xmm15[12],xmm13[12],xmm15[13],xmm13[13],xmm15[14],xmm13[14],xmm15[15],xmm13[15]
7209 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm6, %zmm7, %zmm6
7210 ; AVX512DQ-BW-FCP-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm4[0],ymm3[0],ymm4[1],ymm3[1],ymm4[2],ymm3[2],ymm4[3],ymm3[3],ymm4[4],ymm3[4],ymm4[5],ymm3[5],ymm4[6],ymm3[6],ymm4[7],ymm3[7],ymm4[16],ymm3[16],ymm4[17],ymm3[17],ymm4[18],ymm3[18],ymm4[19],ymm3[19],ymm4[20],ymm3[20],ymm4[21],ymm3[21],ymm4[22],ymm3[22],ymm4[23],ymm3[23]
7211 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm21[8],xmm18[8],xmm21[9],xmm18[9],xmm21[10],xmm18[10],xmm21[11],xmm18[11],xmm21[12],xmm18[12],xmm21[13],xmm18[13],xmm21[14],xmm18[14],xmm21[15],xmm18[15]
7212 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm3, %zmm4, %zmm3
7213 ; AVX512DQ-BW-FCP-NEXT: vpermw %zmm3, %zmm14, %zmm3
7214 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm25[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
7215 ; AVX512DQ-BW-FCP-NEXT: vpermw %zmm6, %zmm20, %zmm3 {%k1}
7216 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm6 = [2,5,4,3,2,5,4,3,2,5,4,3,6,5,6,7]
7217 ; AVX512DQ-BW-FCP-NEXT: vpermw %ymm4, %ymm6, %ymm4
7218 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
7219 ; AVX512DQ-BW-FCP-NEXT: # ymm7 = mem[0,1,0,1]
7220 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm7, %ymm9, %ymm9
7221 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm9 = ymm9[2,2,2,3]
7222 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm9, %zmm4, %zmm4
7223 ; AVX512DQ-BW-FCP-NEXT: movl $1227133513, %eax # imm = 0x49249249
7224 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
7225 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm4, %zmm11 {%k1}
7226 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm24[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
7227 ; AVX512DQ-BW-FCP-NEXT: vpermw %ymm4, %ymm6, %ymm4
7228 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm9 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
7229 ; AVX512DQ-BW-FCP-NEXT: # ymm9 = mem[0,1,0,1]
7230 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm9, %ymm8, %ymm8
7231 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm8 = ymm8[2,2,2,3]
7232 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm8, %zmm4, %zmm4
7233 ; AVX512DQ-BW-FCP-NEXT: movabsq $2342443691899625602, %rax # imm = 0x2082082082082082
7234 ; AVX512DQ-BW-FCP-NEXT: kmovq %rax, %k2
7235 ; AVX512DQ-BW-FCP-NEXT: vmovdqu8 %zmm4, %zmm11 {%k2}
7236 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm7, %ymm2, %ymm2
7237 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm12[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
7238 ; AVX512DQ-BW-FCP-NEXT: vpermw %ymm4, %ymm6, %ymm4
7239 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
7240 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm2, %zmm4, %zmm2
7241 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm2, %zmm3 {%k1}
7242 ; AVX512DQ-BW-FCP-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm16[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
7243 ; AVX512DQ-BW-FCP-NEXT: vpermw %ymm2, %ymm6, %ymm2
7244 ; AVX512DQ-BW-FCP-NEXT: vpshufb %ymm9, %ymm1, %ymm1
7245 ; AVX512DQ-BW-FCP-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
7246 ; AVX512DQ-BW-FCP-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1
7247 ; AVX512DQ-BW-FCP-NEXT: vmovdqu8 %zmm1, %zmm3 {%k2}
7248 ; AVX512DQ-BW-FCP-NEXT: movq {{[0-9]+}}(%rsp), %rax
7249 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, 256(%rax)
7250 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, 64(%rax)
7251 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm17, (%rax)
7252 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, 192(%rax)
7253 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, 128(%rax)
7254 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, 320(%rax)
7255 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
7256 ; AVX512DQ-BW-FCP-NEXT: retq
7257 %in.vec0 = load <64 x i8>, ptr %in.vecptr0, align 64
7258 %in.vec1 = load <64 x i8>, ptr %in.vecptr1, align 64
7259 %in.vec2 = load <64 x i8>, ptr %in.vecptr2, align 64
7260 %in.vec3 = load <64 x i8>, ptr %in.vecptr3, align 64
7261 %in.vec4 = load <64 x i8>, ptr %in.vecptr4, align 64
7262 %in.vec5 = load <64 x i8>, ptr %in.vecptr5, align 64
7263 %1 = shufflevector <64 x i8> %in.vec0, <64 x i8> %in.vec1, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
7264 %2 = shufflevector <64 x i8> %in.vec2, <64 x i8> %in.vec3, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
7265 %3 = shufflevector <64 x i8> %in.vec4, <64 x i8> %in.vec5, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
7266 %4 = shufflevector <128 x i8> %1, <128 x i8> %2, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
7267 %5 = shufflevector <128 x i8> %3, <128 x i8> poison, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
7268 %6 = shufflevector <256 x i8> %4, <256 x i8> %5, <384 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255, i32 256, i32 257, i32 258, i32 259, i32 260, i32 261, i32 262, i32 263, i32 264, i32 265, i32 266, i32 267, i32 268, i32 269, i32 270, i32 271, i32 272, i32 273, i32 274, i32 275, i32 276, i32 277, i32 278, i32 279, i32 280, i32 281, i32 282, i32 283, i32 284, i32 285, i32 286, i32 287, i32 288, i32 289, i32 290, i32 291, i32 292, i32 293, i32 294, i32 295, i32 296, i32 297, i32 298, i32 299, i32 300, i32 301, i32 302, i32 303, i32 304, i32 305, i32 306, i32 307, i32 308, i32 309, i32 310, i32 311, i32 312, i32 313, i32 314, i32 315, i32 316, i32 317, i32 318, i32 319, i32 320, i32 321, i32 322, i32 323, i32 324, i32 325, i32 326, i32 327, i32 328, i32 329, i32 330, i32 331, i32 332, i32 333, i32 334, i32 335, i32 336, i32 337, i32 338, i32 339, i32 340, i32 341, i32 342, i32 343, i32 344, i32 345, i32 346, i32 347, i32 348, i32 349, i32 350, i32 351, i32 352, i32 353, i32 354, i32 355, i32 356, i32 357, i32 358, i32 359, i32 360, i32 361, i32 362, i32 363, i32 364, i32 365, i32 366, i32 367, i32 368, i32 369, i32 370, i32 371, i32 372, i32 373, i32 374, i32 375, i32 376, i32 377, i32 378, i32 379, i32 380, i32 381, i32 382, i32 383>
7269 %interleaved.vec = shufflevector <384 x i8> %6, <384 x i8> poison, <384 x i32> <i32 0, i32 64, i32 128, i32 192, i32 256, i32 320, i32 1, i32 65, i32 129, i32 193, i32 257, i32 321, i32 2, i32 66, i32 130, i32 194, i32 258, i32 322, i32 3, i32 67, i32 131, i32 195, i32 259, i32 323, i32 4, i32 68, i32 132, i32 196, i32 260, i32 324, i32 5, i32 69, i32 133, i32 197, i32 261, i32 325, i32 6, i32 70, i32 134, i32 198, i32 262, i32 326, i32 7, i32 71, i32 135, i32 199, i32 263, i32 327, i32 8, i32 72, i32 136, i32 200, i32 264, i32 328, i32 9, i32 73, i32 137, i32 201, i32 265, i32 329, i32 10, i32 74, i32 138, i32 202, i32 266, i32 330, i32 11, i32 75, i32 139, i32 203, i32 267, i32 331, i32 12, i32 76, i32 140, i32 204, i32 268, i32 332, i32 13, i32 77, i32 141, i32 205, i32 269, i32 333, i32 14, i32 78, i32 142, i32 206, i32 270, i32 334, i32 15, i32 79, i32 143, i32 207, i32 271, i32 335, i32 16, i32 80, i32 144, i32 208, i32 272, i32 336, i32 17, i32 81, i32 145, i32 209, i32 273, i32 337, i32 18, i32 82, i32 146, i32 210, i32 274, i32 338, i32 19, i32 83, i32 147, i32 211, i32 275, i32 339, i32 20, i32 84, i32 148, i32 212, i32 276, i32 340, i32 21, i32 85, i32 149, i32 213, i32 277, i32 341, i32 22, i32 86, i32 150, i32 214, i32 278, i32 342, i32 23, i32 87, i32 151, i32 215, i32 279, i32 343, i32 24, i32 88, i32 152, i32 216, i32 280, i32 344, i32 25, i32 89, i32 153, i32 217, i32 281, i32 345, i32 26, i32 90, i32 154, i32 218, i32 282, i32 346, i32 27, i32 91, i32 155, i32 219, i32 283, i32 347, i32 28, i32 92, i32 156, i32 220, i32 284, i32 348, i32 29, i32 93, i32 157, i32 221, i32 285, i32 349, i32 30, i32 94, i32 158, i32 222, i32 286, i32 350, i32 31, i32 95, i32 159, i32 223, i32 287, i32 351, i32 32, i32 96, i32 160, i32 224, i32 288, i32 352, i32 33, i32 97, i32 161, i32 225, i32 289, i32 353, i32 34, i32 98, i32 162, i32 226, i32 290, i32 354, i32 35, i32 99, i32 163, i32 227, i32 291, i32 355, i32 36, i32 100, i32 164, i32 228, i32 292, i32 356, i32 37, i32 101, i32 165, i32 229, i32 293, i32 357, i32 38, i32 102, i32 166, i32 230, i32 294, i32 358, i32 39, i32 103, i32 167, i32 231, i32 295, i32 359, i32 40, i32 104, i32 168, i32 232, i32 296, i32 360, i32 41, i32 105, i32 169, i32 233, i32 297, i32 361, i32 42, i32 106, i32 170, i32 234, i32 298, i32 362, i32 43, i32 107, i32 171, i32 235, i32 299, i32 363, i32 44, i32 108, i32 172, i32 236, i32 300, i32 364, i32 45, i32 109, i32 173, i32 237, i32 301, i32 365, i32 46, i32 110, i32 174, i32 238, i32 302, i32 366, i32 47, i32 111, i32 175, i32 239, i32 303, i32 367, i32 48, i32 112, i32 176, i32 240, i32 304, i32 368, i32 49, i32 113, i32 177, i32 241, i32 305, i32 369, i32 50, i32 114, i32 178, i32 242, i32 306, i32 370, i32 51, i32 115, i32 179, i32 243, i32 307, i32 371, i32 52, i32 116, i32 180, i32 244, i32 308, i32 372, i32 53, i32 117, i32 181, i32 245, i32 309, i32 373, i32 54, i32 118, i32 182, i32 246, i32 310, i32 374, i32 55, i32 119, i32 183, i32 247, i32 311, i32 375, i32 56, i32 120, i32 184, i32 248, i32 312, i32 376, i32 57, i32 121, i32 185, i32 249, i32 313, i32 377, i32 58, i32 122, i32 186, i32 250, i32 314, i32 378, i32 59, i32 123, i32 187, i32 251, i32 315, i32 379, i32 60, i32 124, i32 188, i32 252, i32 316, i32 380, i32 61, i32 125, i32 189, i32 253, i32 317, i32 381, i32 62, i32 126, i32 190, i32 254, i32 318, i32 382, i32 63, i32 127, i32 191, i32 255, i32 319, i32 383>
7270 store <384 x i8> %interleaved.vec, ptr %out.vec, align 64