1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE,FALLBACK0
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx | FileCheck %s --check-prefixes=AVX,AVX1,AVX1-ONLY,FALLBACK1
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-SLOW,FALLBACK2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST,FALLBACK3
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX1,AVX2,AVX2-ONLY,AVX2-FAST-PERLANE,FALLBACK4
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512F-ONLY-SLOW,FALLBACK5
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512F-ONLY-FAST,FALLBACK6
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-SLOW,AVX512F-SLOW,AVX512DQ-SLOW,FALLBACK7
10 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512F,AVX512-FAST,AVX512F-FAST,AVX512DQ-FAST,FALLBACK8
11 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512BW-ONLY-SLOW,FALLBACK9
12 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512BW-ONLY-FAST,FALLBACK10
13 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-SLOW,AVX512BW-SLOW,AVX512DQBW-SLOW,FALLBACK11
14 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX,AVX2,AVX512,AVX512BW,AVX512-FAST,AVX512BW-FAST,AVX512DQBW-FAST,FALLBACK12
16 ; These patterns are produced by LoopVectorizer for interleaved stores.
18 define void @store_i8_stride8_vf2(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
19 ; SSE-LABEL: store_i8_stride8_vf2:
21 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
22 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
23 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r11
24 ; SSE-NEXT: movdqa (%rdi), %xmm0
25 ; SSE-NEXT: movdqa (%rdx), %xmm1
26 ; SSE-NEXT: movdqa (%r8), %xmm2
27 ; SSE-NEXT: movdqa (%r11), %xmm3
28 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
29 ; SSE-NEXT: punpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
30 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
31 ; SSE-NEXT: punpcklwd {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
32 ; SSE-NEXT: punpcklwd {{.*#+}} xmm3 = xmm3[0],mem[0],xmm3[1],mem[1],xmm3[2],mem[2],xmm3[3],mem[3]
33 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
34 ; SSE-NEXT: pxor %xmm1, %xmm1
35 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
36 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm2[0,2,2,3,4,5,6,7]
37 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,6,6,7]
38 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,1,0,2]
39 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[3,1,2,3,4,5,6,7]
40 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,7,5,6,7]
41 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,2,0]
42 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,7,6,5,4]
43 ; SSE-NEXT: packuswb %xmm2, %xmm3
44 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[1,3,2,3]
45 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
46 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm0[0,2,2,3,4,5,6,7]
47 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
48 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
49 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[3,1,2,3,4,5,6,7]
50 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,7,5,6,7]
51 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
52 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[1,0,3,2,4,5,6,7]
53 ; SSE-NEXT: packuswb %xmm0, %xmm1
54 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[0,2,2,3]
55 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
56 ; SSE-NEXT: movdqa %xmm0, (%rax)
59 ; AVX-LABEL: store_i8_stride8_vf2:
61 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
62 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %r10
63 ; AVX-NEXT: movq {{[0-9]+}}(%rsp), %r11
64 ; AVX-NEXT: vmovdqa (%rdi), %xmm0
65 ; AVX-NEXT: vmovdqa (%rdx), %xmm1
66 ; AVX-NEXT: vmovdqa (%r8), %xmm2
67 ; AVX-NEXT: vmovdqa (%r11), %xmm3
68 ; AVX-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3]
69 ; AVX-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
70 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
71 ; AVX-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
72 ; AVX-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],mem[0],xmm3[1],mem[1],xmm3[2],mem[2],xmm3[3],mem[3]
73 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1]
74 ; AVX-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
75 ; AVX-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,1,5,9,13,2,6,10,14,3,7,11,15]
76 ; AVX-NEXT: vmovdqa %xmm0, (%rax)
78 %in.vec0 = load <2 x i8>, ptr %in.vecptr0, align 64
79 %in.vec1 = load <2 x i8>, ptr %in.vecptr1, align 64
80 %in.vec2 = load <2 x i8>, ptr %in.vecptr2, align 64
81 %in.vec3 = load <2 x i8>, ptr %in.vecptr3, align 64
82 %in.vec4 = load <2 x i8>, ptr %in.vecptr4, align 64
83 %in.vec5 = load <2 x i8>, ptr %in.vecptr5, align 64
84 %in.vec6 = load <2 x i8>, ptr %in.vecptr6, align 64
85 %in.vec7 = load <2 x i8>, ptr %in.vecptr7, align 64
86 %1 = shufflevector <2 x i8> %in.vec0, <2 x i8> %in.vec1, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
87 %2 = shufflevector <2 x i8> %in.vec2, <2 x i8> %in.vec3, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
88 %3 = shufflevector <2 x i8> %in.vec4, <2 x i8> %in.vec5, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
89 %4 = shufflevector <2 x i8> %in.vec6, <2 x i8> %in.vec7, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
90 %5 = shufflevector <4 x i8> %1, <4 x i8> %2, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
91 %6 = shufflevector <4 x i8> %3, <4 x i8> %4, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
92 %7 = shufflevector <8 x i8> %5, <8 x i8> %6, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
93 %interleaved.vec = shufflevector <16 x i8> %7, <16 x i8> poison, <16 x i32> <i32 0, i32 2, i32 4, i32 6, i32 8, i32 10, i32 12, i32 14, i32 1, i32 3, i32 5, i32 7, i32 9, i32 11, i32 13, i32 15>
94 store <16 x i8> %interleaved.vec, ptr %out.vec, align 64
98 define void @store_i8_stride8_vf4(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
99 ; SSE-LABEL: store_i8_stride8_vf4:
101 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
102 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
103 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r11
104 ; SSE-NEXT: movdqa (%rdi), %xmm0
105 ; SSE-NEXT: movdqa (%rdx), %xmm1
106 ; SSE-NEXT: movdqa (%r8), %xmm2
107 ; SSE-NEXT: movdqa (%r11), %xmm3
108 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
109 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
110 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1]
111 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],mem[0],xmm3[1],mem[1]
112 ; SSE-NEXT: pxor %xmm6, %xmm6
113 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0],xmm6[0],xmm3[1],xmm6[1],xmm3[2],xmm6[2],xmm3[3],xmm6[3],xmm3[4],xmm6[4],xmm3[5],xmm6[5],xmm3[6],xmm6[6],xmm3[7],xmm6[7]
114 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,1,2,0]
115 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm4[0,1,2,3,4,5,7,5]
116 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm4[0,1,2,3,4,5,6,4]
117 ; SSE-NEXT: packuswb %xmm5, %xmm7
118 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [65535,65535,65535,0,65535,65535,65535,0]
119 ; SSE-NEXT: movdqa %xmm4, %xmm5
120 ; SSE-NEXT: pandn %xmm7, %xmm5
121 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1],xmm2[2],xmm6[2],xmm2[3],xmm6[3],xmm2[4],xmm6[4],xmm2[5],xmm6[5],xmm2[6],xmm6[6],xmm2[7],xmm6[7]
122 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm2[0,1,2,0]
123 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm7[0,1,2,3,7,5,6,7]
124 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,6,4,6,7]
125 ; SSE-NEXT: packuswb %xmm8, %xmm7
126 ; SSE-NEXT: pand %xmm4, %xmm7
127 ; SSE-NEXT: por %xmm5, %xmm7
128 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[1,3,2,3]
129 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0],xmm6[0],xmm1[1],xmm6[1],xmm1[2],xmm6[2],xmm1[3],xmm6[3],xmm1[4],xmm6[4],xmm1[5],xmm6[5],xmm1[6],xmm6[6],xmm1[7],xmm6[7]
130 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm1[0,2,2,3]
131 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm5[0,1,1,3,4,5,6,7]
132 ; SSE-NEXT: pshuflw {{.*#+}} xmm9 = xmm5[0,1,0,2,4,5,6,7]
133 ; SSE-NEXT: packuswb %xmm8, %xmm9
134 ; SSE-NEXT: movdqa {{.*#+}} xmm5 = [65535,0,65535,65535,65535,0,65535,65535]
135 ; SSE-NEXT: movdqa %xmm5, %xmm8
136 ; SSE-NEXT: pandn %xmm9, %xmm8
137 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1],xmm0[2],xmm6[2],xmm0[3],xmm6[3],xmm0[4],xmm6[4],xmm0[5],xmm6[5],xmm0[6],xmm6[6],xmm0[7],xmm6[7]
138 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm0[0,2,2,3]
139 ; SSE-NEXT: pshuflw {{.*#+}} xmm9 = xmm6[1,3,2,3,4,5,6,7]
140 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm6[0,2,2,3,4,5,6,7]
141 ; SSE-NEXT: packuswb %xmm9, %xmm6
142 ; SSE-NEXT: pand %xmm5, %xmm6
143 ; SSE-NEXT: por %xmm8, %xmm6
144 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
145 ; SSE-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1]
146 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,1,1,3]
147 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm3[0,1,2,3,4,5,5,7]
148 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,4,6]
149 ; SSE-NEXT: packuswb %xmm7, %xmm3
150 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,1,3]
151 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm2[0,1,2,3,5,7,6,7]
152 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,6,6,7]
153 ; SSE-NEXT: packuswb %xmm7, %xmm2
154 ; SSE-NEXT: pand %xmm4, %xmm2
155 ; SSE-NEXT: pandn %xmm3, %xmm4
156 ; SSE-NEXT: por %xmm2, %xmm4
157 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[1,3,2,3]
158 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,3]
159 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm1[0,1,3,1,4,5,6,7]
160 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,2,0,4,5,6,7]
161 ; SSE-NEXT: packuswb %xmm3, %xmm1
162 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[3,1,2,3]
163 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm0[3,1,2,3,4,5,6,7]
164 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[2,0,2,3,4,5,6,7]
165 ; SSE-NEXT: packuswb %xmm3, %xmm0
166 ; SSE-NEXT: pand %xmm5, %xmm0
167 ; SSE-NEXT: pandn %xmm1, %xmm5
168 ; SSE-NEXT: por %xmm0, %xmm5
169 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm5[0,2,2,3]
170 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
171 ; SSE-NEXT: movdqa %xmm0, 16(%rax)
172 ; SSE-NEXT: movdqa %xmm6, (%rax)
175 ; AVX1-ONLY-LABEL: store_i8_stride8_vf4:
176 ; AVX1-ONLY: # %bb.0:
177 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
178 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
179 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
180 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm0
181 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm1
182 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm2
183 ; AVX1-ONLY-NEXT: vmovdqa (%r11), %xmm3
184 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
185 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
186 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
187 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],mem[0],xmm2[1],mem[1]
188 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm3[0],mem[0],xmm3[1],mem[1]
189 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm2[0]
190 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm2 = [2,6,10,14,3,7,11,15,2,6,10,14,3,7,11,15]
191 ; AVX1-ONLY-NEXT: # xmm2 = mem[0,0]
192 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm1, %xmm3
193 ; AVX1-ONLY-NEXT: vpshufb %xmm2, %xmm0, %xmm2
194 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
195 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = [0,4,8,12,1,5,9,13,0,4,8,12,1,5,9,13]
196 ; AVX1-ONLY-NEXT: # xmm3 = mem[0,0]
197 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm1, %xmm1
198 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm0, %xmm0
199 ; AVX1-ONLY-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
200 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
201 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rax)
202 ; AVX1-ONLY-NEXT: vzeroupper
203 ; AVX1-ONLY-NEXT: retq
205 ; AVX2-LABEL: store_i8_stride8_vf4:
207 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %rax
208 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %r10
209 ; AVX2-NEXT: movq {{[0-9]+}}(%rsp), %r11
210 ; AVX2-NEXT: vmovdqa (%rdi), %xmm0
211 ; AVX2-NEXT: vmovdqa (%rsi), %xmm1
212 ; AVX2-NEXT: vmovdqa (%rdx), %xmm2
213 ; AVX2-NEXT: vmovdqa (%rcx), %xmm3
214 ; AVX2-NEXT: vinserti128 $1, (%r11), %ymm3, %ymm3
215 ; AVX2-NEXT: vinserti128 $1, (%r10), %ymm2, %ymm2
216 ; AVX2-NEXT: vpunpckldq {{.*#+}} ymm2 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
217 ; AVX2-NEXT: vinserti128 $1, (%r9), %ymm1, %ymm1
218 ; AVX2-NEXT: vinserti128 $1, (%r8), %ymm0, %ymm0
219 ; AVX2-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
220 ; AVX2-NEXT: vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm2[0],ymm0[2],ymm2[2]
221 ; AVX2-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,4,8,12,1,5,9,13,2,6,10,14,3,7,11,15,16,20,24,28,17,21,25,29,18,22,26,30,19,23,27,31]
222 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [0,4,1,5,2,6,3,7]
223 ; AVX2-NEXT: vpermd %ymm0, %ymm1, %ymm0
224 ; AVX2-NEXT: vmovdqa %ymm0, (%rax)
225 ; AVX2-NEXT: vzeroupper
227 %in.vec0 = load <4 x i8>, ptr %in.vecptr0, align 64
228 %in.vec1 = load <4 x i8>, ptr %in.vecptr1, align 64
229 %in.vec2 = load <4 x i8>, ptr %in.vecptr2, align 64
230 %in.vec3 = load <4 x i8>, ptr %in.vecptr3, align 64
231 %in.vec4 = load <4 x i8>, ptr %in.vecptr4, align 64
232 %in.vec5 = load <4 x i8>, ptr %in.vecptr5, align 64
233 %in.vec6 = load <4 x i8>, ptr %in.vecptr6, align 64
234 %in.vec7 = load <4 x i8>, ptr %in.vecptr7, align 64
235 %1 = shufflevector <4 x i8> %in.vec0, <4 x i8> %in.vec1, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
236 %2 = shufflevector <4 x i8> %in.vec2, <4 x i8> %in.vec3, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
237 %3 = shufflevector <4 x i8> %in.vec4, <4 x i8> %in.vec5, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
238 %4 = shufflevector <4 x i8> %in.vec6, <4 x i8> %in.vec7, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
239 %5 = shufflevector <8 x i8> %1, <8 x i8> %2, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
240 %6 = shufflevector <8 x i8> %3, <8 x i8> %4, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
241 %7 = shufflevector <16 x i8> %5, <16 x i8> %6, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
242 %interleaved.vec = shufflevector <32 x i8> %7, <32 x i8> poison, <32 x i32> <i32 0, i32 4, i32 8, i32 12, i32 16, i32 20, i32 24, i32 28, i32 1, i32 5, i32 9, i32 13, i32 17, i32 21, i32 25, i32 29, i32 2, i32 6, i32 10, i32 14, i32 18, i32 22, i32 26, i32 30, i32 3, i32 7, i32 11, i32 15, i32 19, i32 23, i32 27, i32 31>
243 store <32 x i8> %interleaved.vec, ptr %out.vec, align 64
247 define void @store_i8_stride8_vf8(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
248 ; SSE-LABEL: store_i8_stride8_vf8:
250 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
251 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
252 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r11
253 ; SSE-NEXT: movq {{.*#+}} xmm0 = mem[0],zero
254 ; SSE-NEXT: movq {{.*#+}} xmm1 = mem[0],zero
255 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
256 ; SSE-NEXT: movq {{.*#+}} xmm1 = mem[0],zero
257 ; SSE-NEXT: movq {{.*#+}} xmm2 = mem[0],zero
258 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3],xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
259 ; SSE-NEXT: movq {{.*#+}} xmm2 = mem[0],zero
260 ; SSE-NEXT: movq {{.*#+}} xmm3 = mem[0],zero
261 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3],xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
262 ; SSE-NEXT: movq {{.*#+}} xmm5 = mem[0],zero
263 ; SSE-NEXT: movq {{.*#+}} xmm3 = mem[0],zero
264 ; SSE-NEXT: punpcklbw {{.*#+}} xmm5 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3],xmm5[4],xmm3[4],xmm5[5],xmm3[5],xmm5[6],xmm3[6],xmm5[7],xmm3[7]
265 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm5[0,0,2,1,4,5,6,7]
266 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,1]
267 ; SSE-NEXT: movdqa {{.*#+}} xmm3 = [65535,65535,65535,0,65535,65535,65535,0]
268 ; SSE-NEXT: movdqa %xmm3, %xmm6
269 ; SSE-NEXT: pandn %xmm4, %xmm6
270 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm2[0,1,1,3,4,5,6,7]
271 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,0,2,1]
272 ; SSE-NEXT: pand %xmm3, %xmm4
273 ; SSE-NEXT: por %xmm6, %xmm4
274 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm4[1,3,2,3]
275 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm1[0,0,2,1,4,5,6,7]
276 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm4[0,1,1,3]
277 ; SSE-NEXT: movdqa {{.*#+}} xmm4 = [65535,0,65535,65535,65535,0,65535,65535]
278 ; SSE-NEXT: movdqa %xmm4, %xmm8
279 ; SSE-NEXT: pandn %xmm6, %xmm8
280 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm0[0,0,0,0]
281 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,5,5,5,5]
282 ; SSE-NEXT: pand %xmm4, %xmm6
283 ; SSE-NEXT: por %xmm8, %xmm6
284 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
285 ; SSE-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1]
286 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm5[0,2,2,3,4,5,6,7]
287 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,0,2,1]
288 ; SSE-NEXT: movdqa %xmm3, %xmm8
289 ; SSE-NEXT: pandn %xmm7, %xmm8
290 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm2[2,1,3,3,4,5,6,7]
291 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,0,2,1]
292 ; SSE-NEXT: pand %xmm3, %xmm7
293 ; SSE-NEXT: por %xmm8, %xmm7
294 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm7[1,3,2,3]
295 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm1[0,2,2,3,4,5,6,7]
296 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,1,1,3]
297 ; SSE-NEXT: movdqa %xmm4, %xmm9
298 ; SSE-NEXT: pandn %xmm7, %xmm9
299 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm0[1,1,1,1]
300 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,5,5,5,5]
301 ; SSE-NEXT: pand %xmm4, %xmm7
302 ; SSE-NEXT: por %xmm9, %xmm7
303 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,2,2,3]
304 ; SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1]
305 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm5[0,1,2,3,4,4,6,5]
306 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,2,2,3]
307 ; SSE-NEXT: movdqa %xmm3, %xmm9
308 ; SSE-NEXT: pandn %xmm8, %xmm9
309 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm2[0,1,2,3,4,5,5,7]
310 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,2,2,3]
311 ; SSE-NEXT: pand %xmm3, %xmm8
312 ; SSE-NEXT: por %xmm9, %xmm8
313 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[1,3,2,3]
314 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm1[0,1,2,3,4,4,6,5]
315 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[2,1,3,3]
316 ; SSE-NEXT: movdqa %xmm4, %xmm10
317 ; SSE-NEXT: pandn %xmm9, %xmm10
318 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm0[2,2,2,2]
319 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,5,5,5,5]
320 ; SSE-NEXT: pand %xmm4, %xmm9
321 ; SSE-NEXT: por %xmm10, %xmm9
322 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[0,2,2,3]
323 ; SSE-NEXT: punpckldq {{.*#+}} xmm9 = xmm9[0],xmm8[0],xmm9[1],xmm8[1]
324 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,6,6,7]
325 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,2,2,3]
326 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,6,5,7,7]
327 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
328 ; SSE-NEXT: pand %xmm3, %xmm2
329 ; SSE-NEXT: pandn %xmm5, %xmm3
330 ; SSE-NEXT: por %xmm2, %xmm3
331 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[1,3,2,3]
332 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
333 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,3,3]
334 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[3,3,3,3]
335 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,5,5,5,5]
336 ; SSE-NEXT: pand %xmm4, %xmm0
337 ; SSE-NEXT: pandn %xmm1, %xmm4
338 ; SSE-NEXT: por %xmm0, %xmm4
339 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm4[0,2,2,3]
340 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
341 ; SSE-NEXT: movdqa %xmm0, 48(%rax)
342 ; SSE-NEXT: movdqa %xmm9, 32(%rax)
343 ; SSE-NEXT: movdqa %xmm7, 16(%rax)
344 ; SSE-NEXT: movdqa %xmm6, (%rax)
347 ; AVX1-ONLY-LABEL: store_i8_stride8_vf8:
348 ; AVX1-ONLY: # %bb.0:
349 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
350 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
351 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
352 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
353 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
354 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
355 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
356 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
357 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
358 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
359 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
360 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
361 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
362 ; AVX1-ONLY-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
363 ; AVX1-ONLY-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
364 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm4 = [0,0,2,10,0,0,3,11,0,0,2,10,0,0,3,11]
365 ; AVX1-ONLY-NEXT: # xmm4 = mem[0,0]
366 ; AVX1-ONLY-NEXT: vpshufb %xmm4, %xmm3, %xmm5
367 ; AVX1-ONLY-NEXT: vpshufb %xmm4, %xmm2, %xmm4
368 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
369 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm5 = [2,10,0,0,3,11,0,0,2,10,0,0,3,11,0,0]
370 ; AVX1-ONLY-NEXT: # xmm5 = mem[0,0]
371 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm1, %xmm6
372 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm0, %xmm5
373 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
374 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1],xmm4[2,3],xmm5[4,5],xmm4[6,7]
375 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm5 = [0,0,0,8,0,0,1,9,0,0,0,8,0,0,1,9]
376 ; AVX1-ONLY-NEXT: # xmm5 = mem[0,0]
377 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm3, %xmm6
378 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm2, %xmm5
379 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
380 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm6 = [0,8,0,0,1,9,0,0,0,8,0,0,1,9,0,0]
381 ; AVX1-ONLY-NEXT: # xmm6 = mem[0,0]
382 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm1, %xmm7
383 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm0, %xmm6
384 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
385 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2,3],xmm6[4,5],xmm5[6,7]
386 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm5, %ymm4
387 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm5 = [0,0,6,14,0,0,7,15,0,0,6,14,0,0,7,15]
388 ; AVX1-ONLY-NEXT: # xmm5 = mem[0,0]
389 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm3, %xmm6
390 ; AVX1-ONLY-NEXT: vpshufb %xmm5, %xmm2, %xmm5
391 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
392 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm6 = [6,14,0,0,7,15,0,0,6,14,0,0,7,15,0,0]
393 ; AVX1-ONLY-NEXT: # xmm6 = mem[0,0]
394 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm1, %xmm7
395 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm0, %xmm6
396 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
397 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2,3],xmm6[4,5],xmm5[6,7]
398 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm6 = [0,0,4,12,0,0,5,13,0,0,4,12,0,0,5,13]
399 ; AVX1-ONLY-NEXT: # xmm6 = mem[0,0]
400 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm3, %xmm3
401 ; AVX1-ONLY-NEXT: vpshufb %xmm6, %xmm2, %xmm2
402 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
403 ; AVX1-ONLY-NEXT: vmovddup {{.*#+}} xmm3 = [4,12,0,0,5,13,0,0,4,12,0,0,5,13,0,0]
404 ; AVX1-ONLY-NEXT: # xmm3 = mem[0,0]
405 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm1, %xmm1
406 ; AVX1-ONLY-NEXT: vpshufb %xmm3, %xmm0, %xmm0
407 ; AVX1-ONLY-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
408 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm2[2,3],xmm0[4,5],xmm2[6,7]
409 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm0, %ymm0
410 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
411 ; AVX1-ONLY-NEXT: vmovaps %ymm4, (%rax)
412 ; AVX1-ONLY-NEXT: vzeroupper
413 ; AVX1-ONLY-NEXT: retq
415 ; AVX2-SLOW-LABEL: store_i8_stride8_vf8:
416 ; AVX2-SLOW: # %bb.0:
417 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
418 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
419 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r11
420 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
421 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
422 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
423 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
424 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
425 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
426 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
427 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
428 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
429 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
430 ; AVX2-SLOW-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
431 ; AVX2-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
432 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
433 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm1
434 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = zero,zero,zero,zero,ymm1[0,8],zero,zero,zero,zero,zero,zero,ymm1[1,9],zero,zero,zero,zero,zero,zero,zero,zero,ymm1[18,26],zero,zero,zero,zero,zero,zero,ymm1[19,27]
435 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm1[2,3,0,1]
436 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,zero,zero,zero,zero,ymm3[0,8],zero,zero,zero,zero,zero,zero,ymm3[1,9],zero,zero,zero,zero,ymm3[18,26],zero,zero,zero,zero,zero,zero,ymm3[19,27],zero,zero
437 ; AVX2-SLOW-NEXT: vpor %ymm4, %ymm2, %ymm2
438 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm0[0,8],zero,zero,zero,zero,zero,zero,ymm0[1,9],zero,zero,zero,zero,zero,zero,zero,zero,ymm0[18,26],zero,zero,zero,zero,zero,zero,ymm0[19,27],zero,zero,zero,zero
439 ; AVX2-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm0[2,3,0,1]
440 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,ymm5[0,8],zero,zero,zero,zero,zero,zero,ymm5[1,9],zero,zero,zero,zero,ymm5[18,26],zero,zero,zero,zero,zero,zero,ymm5[19,27],zero,zero,zero,zero,zero,zero
441 ; AVX2-SLOW-NEXT: vpor %ymm6, %ymm4, %ymm4
442 ; AVX2-SLOW-NEXT: vpor %ymm2, %ymm4, %ymm2
443 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,ymm1[4,12],zero,zero,zero,zero,zero,zero,ymm1[5,13],zero,zero,zero,zero,zero,zero,zero,zero,ymm1[22,30],zero,zero,zero,zero,zero,zero,ymm1[23,31]
444 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,zero,zero,zero,zero,ymm3[4,12],zero,zero,zero,zero,zero,zero,ymm3[5,13],zero,zero,zero,zero,ymm3[22,30],zero,zero,zero,zero,zero,zero,ymm3[23,31],zero,zero
445 ; AVX2-SLOW-NEXT: vpor %ymm3, %ymm1, %ymm1
446 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[4,12],zero,zero,zero,zero,zero,zero,ymm0[5,13],zero,zero,zero,zero,zero,zero,zero,zero,ymm0[22,30],zero,zero,zero,zero,zero,zero,ymm0[23,31],zero,zero,zero,zero
447 ; AVX2-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,ymm5[4,12],zero,zero,zero,zero,zero,zero,ymm5[5,13],zero,zero,zero,zero,ymm5[22,30],zero,zero,zero,zero,zero,zero,ymm5[23,31],zero,zero,zero,zero,zero,zero
448 ; AVX2-SLOW-NEXT: vpor %ymm3, %ymm0, %ymm0
449 ; AVX2-SLOW-NEXT: vpor %ymm1, %ymm0, %ymm0
450 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, 32(%rax)
451 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, (%rax)
452 ; AVX2-SLOW-NEXT: vzeroupper
453 ; AVX2-SLOW-NEXT: retq
455 ; AVX2-FAST-LABEL: store_i8_stride8_vf8:
456 ; AVX2-FAST: # %bb.0:
457 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
458 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
459 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r11
460 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
461 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
462 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
463 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
464 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
465 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
466 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
467 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
468 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
469 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
470 ; AVX2-FAST-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
471 ; AVX2-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
472 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
473 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm1
474 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [0,2,4,6,0,2,4,6]
475 ; AVX2-FAST-NEXT: # ymm2 = mem[0,1,0,1]
476 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm2, %ymm3
477 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,0,4,8,12,u,u,u,u,1,5,9,13,u,u,u,u,2,6,10,14,u,u,u,u,3,7,11,15>
478 ; AVX2-FAST-NEXT: vpshufb %ymm4, %ymm3, %ymm3
479 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm2, %ymm2
480 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = <0,4,8,12,u,u,u,u,1,5,9,13,u,u,u,u,2,6,10,14,u,u,u,u,3,7,11,15,u,u,u,u>
481 ; AVX2-FAST-NEXT: vpshufb %ymm5, %ymm2, %ymm2
482 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2],ymm3[3],ymm2[4],ymm3[5],ymm2[6],ymm3[7]
483 ; AVX2-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [1,3,5,7,1,3,5,7]
484 ; AVX2-FAST-NEXT: # ymm3 = mem[0,1,0,1]
485 ; AVX2-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm1
486 ; AVX2-FAST-NEXT: vpshufb %ymm4, %ymm1, %ymm1
487 ; AVX2-FAST-NEXT: vpermd %ymm0, %ymm3, %ymm0
488 ; AVX2-FAST-NEXT: vpshufb %ymm5, %ymm0, %ymm0
489 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3],ymm0[4],ymm1[5],ymm0[6],ymm1[7]
490 ; AVX2-FAST-NEXT: vmovdqa %ymm0, 32(%rax)
491 ; AVX2-FAST-NEXT: vmovdqa %ymm2, (%rax)
492 ; AVX2-FAST-NEXT: vzeroupper
493 ; AVX2-FAST-NEXT: retq
495 ; AVX2-FAST-PERLANE-LABEL: store_i8_stride8_vf8:
496 ; AVX2-FAST-PERLANE: # %bb.0:
497 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
498 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %r10
499 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %r11
500 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
501 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
502 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
503 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
504 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
505 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
506 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
507 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
508 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
509 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
510 ; AVX2-FAST-PERLANE-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
511 ; AVX2-FAST-PERLANE-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
512 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
513 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm1
514 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm2 = zero,zero,zero,zero,ymm1[0,8],zero,zero,zero,zero,zero,zero,ymm1[1,9],zero,zero,zero,zero,zero,zero,zero,zero,ymm1[18,26],zero,zero,zero,zero,zero,zero,ymm1[19,27]
515 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm3 = ymm1[2,3,0,1]
516 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,zero,zero,zero,zero,ymm3[0,8],zero,zero,zero,zero,zero,zero,ymm3[1,9],zero,zero,zero,zero,ymm3[18,26],zero,zero,zero,zero,zero,zero,ymm3[19,27],zero,zero
517 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm4, %ymm2, %ymm2
518 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm4 = ymm0[0,8],zero,zero,zero,zero,zero,zero,ymm0[1,9],zero,zero,zero,zero,zero,zero,zero,zero,ymm0[18,26],zero,zero,zero,zero,zero,zero,ymm0[19,27],zero,zero,zero,zero
519 ; AVX2-FAST-PERLANE-NEXT: vpermq {{.*#+}} ymm5 = ymm0[2,3,0,1]
520 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,ymm5[0,8],zero,zero,zero,zero,zero,zero,ymm5[1,9],zero,zero,zero,zero,ymm5[18,26],zero,zero,zero,zero,zero,zero,ymm5[19,27],zero,zero,zero,zero,zero,zero
521 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm6, %ymm4, %ymm4
522 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm2, %ymm4, %ymm2
523 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,ymm1[4,12],zero,zero,zero,zero,zero,zero,ymm1[5,13],zero,zero,zero,zero,zero,zero,zero,zero,ymm1[22,30],zero,zero,zero,zero,zero,zero,ymm1[23,31]
524 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,zero,zero,zero,zero,ymm3[4,12],zero,zero,zero,zero,zero,zero,ymm3[5,13],zero,zero,zero,zero,ymm3[22,30],zero,zero,zero,zero,zero,zero,ymm3[23,31],zero,zero
525 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm3, %ymm1, %ymm1
526 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[4,12],zero,zero,zero,zero,zero,zero,ymm0[5,13],zero,zero,zero,zero,zero,zero,zero,zero,ymm0[22,30],zero,zero,zero,zero,zero,zero,ymm0[23,31],zero,zero,zero,zero
527 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm3 = zero,zero,ymm5[4,12],zero,zero,zero,zero,zero,zero,ymm5[5,13],zero,zero,zero,zero,ymm5[22,30],zero,zero,zero,zero,zero,zero,ymm5[23,31],zero,zero,zero,zero,zero,zero
528 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm3, %ymm0, %ymm0
529 ; AVX2-FAST-PERLANE-NEXT: vpor %ymm1, %ymm0, %ymm0
530 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, 32(%rax)
531 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm2, (%rax)
532 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
533 ; AVX2-FAST-PERLANE-NEXT: retq
535 ; AVX512F-SLOW-LABEL: store_i8_stride8_vf8:
536 ; AVX512F-SLOW: # %bb.0:
537 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
538 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
539 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r11
540 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
541 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
542 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
543 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
544 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
545 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
546 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
547 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
548 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
549 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
550 ; AVX512F-SLOW-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
551 ; AVX512F-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
552 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
553 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm1
554 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm1[u,u,u,u,4,12],zero,zero,ymm1[u,u,u,u,5,13],zero,zero,ymm1[u,u,u,u],zero,zero,ymm1[22,30,u,u,u,u],zero,zero,ymm1[23,31]
555 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm0[4,12],zero,zero,ymm0[u,u,u,u,5,13],zero,zero,ymm0[u,u,u,u],zero,zero,ymm0[22,30,u,u,u,u],zero,zero,ymm0[23,31,u,u,u,u]
556 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3],ymm3[4],ymm2[5],ymm3[6],ymm2[7]
557 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm1[u,u,u,u,0,8],zero,zero,ymm1[u,u,u,u,1,9],zero,zero,ymm1[u,u,u,u],zero,zero,ymm1[18,26,u,u,u,u],zero,zero,ymm1[19,27]
558 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm0[0,8],zero,zero,ymm0[u,u,u,u,1,9],zero,zero,ymm0[u,u,u,u],zero,zero,ymm0[18,26,u,u,u,u],zero,zero,ymm0[19,27,u,u,u,u]
559 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2],ymm3[3],ymm4[4],ymm3[5],ymm4[6],ymm3[7]
560 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm3, %zmm2
561 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[2,3,0,1]
562 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm1[u,u,u,u],zero,zero,ymm1[4,12,u,u,u,u],zero,zero,ymm1[5,13,u,u,u,u,22,30],zero,zero,ymm1[u,u,u,u,23,31],zero,zero
563 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,0,1]
564 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,ymm0[4,12,u,u,u,u],zero,zero,ymm0[5,13,u,u,u,u,22,30],zero,zero,ymm0[u,u,u,u,23,31],zero,zero,ymm0[u,u,u,u]
565 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2],ymm3[3],ymm4[4],ymm3[5],ymm4[6],ymm3[7]
566 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u],zero,zero,ymm1[0,8,u,u,u,u],zero,zero,ymm1[1,9,u,u,u,u,18,26],zero,zero,ymm1[u,u,u,u,19,27],zero,zero
567 ; AVX512F-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = zero,zero,ymm0[0,8,u,u,u,u],zero,zero,ymm0[1,9,u,u,u,u,18,26],zero,zero,ymm0[u,u,u,u,19,27],zero,zero,ymm0[u,u,u,u]
568 ; AVX512F-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3],ymm0[4],ymm1[5],ymm0[6],ymm1[7]
569 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm0, %zmm0
570 ; AVX512F-SLOW-NEXT: vpord %zmm0, %zmm2, %zmm0
571 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm0, (%rax)
572 ; AVX512F-SLOW-NEXT: vzeroupper
573 ; AVX512F-SLOW-NEXT: retq
575 ; AVX512-FAST-LABEL: store_i8_stride8_vf8:
576 ; AVX512-FAST: # %bb.0:
577 ; AVX512-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
578 ; AVX512-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
579 ; AVX512-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r11
580 ; AVX512-FAST-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
581 ; AVX512-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
582 ; AVX512-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
583 ; AVX512-FAST-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
584 ; AVX512-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
585 ; AVX512-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
586 ; AVX512-FAST-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
587 ; AVX512-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
588 ; AVX512-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
589 ; AVX512-FAST-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
590 ; AVX512-FAST-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
591 ; AVX512-FAST-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
592 ; AVX512-FAST-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
593 ; AVX512-FAST-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm1
594 ; AVX512-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [1,3,5,7,1,3,5,7]
595 ; AVX512-FAST-NEXT: # ymm2 = mem[0,1,0,1]
596 ; AVX512-FAST-NEXT: vpermd %ymm1, %ymm2, %ymm3
597 ; AVX512-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <u,u,u,u,0,4,8,12,u,u,u,u,1,5,9,13,u,u,u,u,2,6,10,14,u,u,u,u,3,7,11,15>
598 ; AVX512-FAST-NEXT: vpshufb %ymm4, %ymm3, %ymm3
599 ; AVX512-FAST-NEXT: vpermd %ymm0, %ymm2, %ymm2
600 ; AVX512-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = <0,4,8,12,u,u,u,u,1,5,9,13,u,u,u,u,2,6,10,14,u,u,u,u,3,7,11,15,u,u,u,u>
601 ; AVX512-FAST-NEXT: vpshufb %ymm5, %ymm2, %ymm2
602 ; AVX512-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2],ymm3[3],ymm2[4],ymm3[5],ymm2[6],ymm3[7]
603 ; AVX512-FAST-NEXT: vbroadcasti128 {{.*#+}} ymm3 = [0,2,4,6,0,2,4,6]
604 ; AVX512-FAST-NEXT: # ymm3 = mem[0,1,0,1]
605 ; AVX512-FAST-NEXT: vpermd %ymm1, %ymm3, %ymm1
606 ; AVX512-FAST-NEXT: vpshufb %ymm4, %ymm1, %ymm1
607 ; AVX512-FAST-NEXT: vpermd %ymm0, %ymm3, %ymm0
608 ; AVX512-FAST-NEXT: vpshufb %ymm5, %ymm0, %ymm0
609 ; AVX512-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3],ymm0[4],ymm1[5],ymm0[6],ymm1[7]
610 ; AVX512-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
611 ; AVX512-FAST-NEXT: vmovdqa64 %zmm0, (%rax)
612 ; AVX512-FAST-NEXT: vzeroupper
613 ; AVX512-FAST-NEXT: retq
615 ; AVX512BW-SLOW-LABEL: store_i8_stride8_vf8:
616 ; AVX512BW-SLOW: # %bb.0:
617 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
618 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
619 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r11
620 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
621 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
622 ; AVX512BW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
623 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm1 = mem[0],zero
624 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
625 ; AVX512BW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
626 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
627 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
628 ; AVX512BW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
629 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm3 = mem[0],zero
630 ; AVX512BW-SLOW-NEXT: vmovq {{.*#+}} xmm4 = mem[0],zero
631 ; AVX512BW-SLOW-NEXT: vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
632 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
633 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm1
634 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm2 = ymm1[u,u,u,u,4,12,u,u,u,u,u,u,5,13,u,u,u,u,u,u,u,u,22,30,u,u,u,u,u,u,23,31]
635 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm3 = ymm1[2,3,0,1]
636 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm3[u,u,u,u,u,u,4,12,u,u,u,u,u,u,5,13,u,u,u,u,22,30,u,u,u,u,u,u,23,31,u,u]
637 ; AVX512BW-SLOW-NEXT: movw $17544, %cx # imm = 0x4488
638 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k1
639 ; AVX512BW-SLOW-NEXT: vmovdqu16 %ymm4, %ymm2 {%k1}
640 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm4 = ymm0[4,12,u,u,u,u,u,u,5,13,u,u,u,u,u,u,u,u,22,30,u,u,u,u,u,u,23,31,u,u,u,u]
641 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm5 = ymm0[2,3,0,1]
642 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm6 = ymm5[u,u,4,12,u,u,u,u,u,u,5,13,u,u,u,u,22,30,u,u,u,u,u,u,23,31,u,u,u,u,u,u]
643 ; AVX512BW-SLOW-NEXT: movw $4386, %cx # imm = 0x1122
644 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k2
645 ; AVX512BW-SLOW-NEXT: vmovdqu16 %ymm6, %ymm4 {%k2}
646 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0],ymm2[1],ymm4[2],ymm2[3],ymm4[4],ymm2[5],ymm4[6],ymm2[7]
647 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,u,u,u,u,18,26,u,u,u,u,u,u,19,27]
648 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,u,u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,18,26,u,u,u,u,u,u,19,27,u,u]
649 ; AVX512BW-SLOW-NEXT: vmovdqu16 %ymm3, %ymm1 {%k1}
650 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,8,u,u,u,u,u,u,1,9,u,u,u,u,u,u,u,u,18,26,u,u,u,u,u,u,19,27,u,u,u,u]
651 ; AVX512BW-SLOW-NEXT: vpshufb {{.*#+}} ymm3 = ymm5[u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,18,26,u,u,u,u,u,u,19,27,u,u,u,u,u,u]
652 ; AVX512BW-SLOW-NEXT: vmovdqu16 %ymm3, %ymm0 {%k2}
653 ; AVX512BW-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3],ymm0[4],ymm1[5],ymm0[6],ymm1[7]
654 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
655 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm0, (%rax)
656 ; AVX512BW-SLOW-NEXT: vzeroupper
657 ; AVX512BW-SLOW-NEXT: retq
658 %in.vec0 = load <8 x i8>, ptr %in.vecptr0, align 64
659 %in.vec1 = load <8 x i8>, ptr %in.vecptr1, align 64
660 %in.vec2 = load <8 x i8>, ptr %in.vecptr2, align 64
661 %in.vec3 = load <8 x i8>, ptr %in.vecptr3, align 64
662 %in.vec4 = load <8 x i8>, ptr %in.vecptr4, align 64
663 %in.vec5 = load <8 x i8>, ptr %in.vecptr5, align 64
664 %in.vec6 = load <8 x i8>, ptr %in.vecptr6, align 64
665 %in.vec7 = load <8 x i8>, ptr %in.vecptr7, align 64
666 %1 = shufflevector <8 x i8> %in.vec0, <8 x i8> %in.vec1, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
667 %2 = shufflevector <8 x i8> %in.vec2, <8 x i8> %in.vec3, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
668 %3 = shufflevector <8 x i8> %in.vec4, <8 x i8> %in.vec5, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
669 %4 = shufflevector <8 x i8> %in.vec6, <8 x i8> %in.vec7, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
670 %5 = shufflevector <16 x i8> %1, <16 x i8> %2, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
671 %6 = shufflevector <16 x i8> %3, <16 x i8> %4, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
672 %7 = shufflevector <32 x i8> %5, <32 x i8> %6, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
673 %interleaved.vec = shufflevector <64 x i8> %7, <64 x i8> poison, <64 x i32> <i32 0, i32 8, i32 16, i32 24, i32 32, i32 40, i32 48, i32 56, i32 1, i32 9, i32 17, i32 25, i32 33, i32 41, i32 49, i32 57, i32 2, i32 10, i32 18, i32 26, i32 34, i32 42, i32 50, i32 58, i32 3, i32 11, i32 19, i32 27, i32 35, i32 43, i32 51, i32 59, i32 4, i32 12, i32 20, i32 28, i32 36, i32 44, i32 52, i32 60, i32 5, i32 13, i32 21, i32 29, i32 37, i32 45, i32 53, i32 61, i32 6, i32 14, i32 22, i32 30, i32 38, i32 46, i32 54, i32 62, i32 7, i32 15, i32 23, i32 31, i32 39, i32 47, i32 55, i32 63>
674 store <64 x i8> %interleaved.vec, ptr %out.vec, align 64
678 define void @store_i8_stride8_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
679 ; SSE-LABEL: store_i8_stride8_vf16:
681 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
682 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
683 ; SSE-NEXT: movdqa (%rdi), %xmm10
684 ; SSE-NEXT: movdqa (%rsi), %xmm9
685 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
686 ; SSE-NEXT: movdqa (%rdx), %xmm1
687 ; SSE-NEXT: movdqa (%rcx), %xmm6
688 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
689 ; SSE-NEXT: movdqa (%r8), %xmm3
690 ; SSE-NEXT: movdqa (%r9), %xmm11
691 ; SSE-NEXT: movdqa (%r10), %xmm4
692 ; SSE-NEXT: movdqa (%rax), %xmm13
693 ; SSE-NEXT: movdqa %xmm4, %xmm12
694 ; SSE-NEXT: punpcklbw {{.*#+}} xmm12 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3],xmm12[4],xmm13[4],xmm12[5],xmm13[5],xmm12[6],xmm13[6],xmm12[7],xmm13[7]
695 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm12[0,0,2,1,4,5,6,7]
696 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
697 ; SSE-NEXT: movdqa {{.*#+}} xmm2 = [65535,65535,65535,0,65535,65535,65535,0]
698 ; SSE-NEXT: movdqa %xmm2, %xmm5
699 ; SSE-NEXT: pandn %xmm0, %xmm5
700 ; SSE-NEXT: movdqa %xmm3, %xmm14
701 ; SSE-NEXT: punpcklbw {{.*#+}} xmm14 = xmm14[0],xmm11[0],xmm14[1],xmm11[1],xmm14[2],xmm11[2],xmm14[3],xmm11[3],xmm14[4],xmm11[4],xmm14[5],xmm11[5],xmm14[6],xmm11[6],xmm14[7],xmm11[7]
702 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm14[0,1,1,3,4,5,6,7]
703 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm0[0,0,2,1]
704 ; SSE-NEXT: pand %xmm2, %xmm7
705 ; SSE-NEXT: por %xmm5, %xmm7
706 ; SSE-NEXT: movdqa %xmm1, %xmm15
707 ; SSE-NEXT: punpcklbw {{.*#+}} xmm15 = xmm15[0],xmm6[0],xmm15[1],xmm6[1],xmm15[2],xmm6[2],xmm15[3],xmm6[3],xmm15[4],xmm6[4],xmm15[5],xmm6[5],xmm15[6],xmm6[6],xmm15[7],xmm6[7]
708 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm15[0,0,2,1,4,5,6,7]
709 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm0[0,1,1,3]
710 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,0,65535,65535,65535,0,65535,65535]
711 ; SSE-NEXT: movdqa %xmm0, %xmm8
712 ; SSE-NEXT: pandn %xmm5, %xmm8
713 ; SSE-NEXT: movdqa %xmm10, %xmm6
714 ; SSE-NEXT: punpcklbw {{.*#+}} xmm6 = xmm6[0],xmm9[0],xmm6[1],xmm9[1],xmm6[2],xmm9[2],xmm6[3],xmm9[3],xmm6[4],xmm9[4],xmm6[5],xmm9[5],xmm6[6],xmm9[6],xmm6[7],xmm9[7]
715 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm6[0,0,0,0]
716 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,5,5,5,5]
717 ; SSE-NEXT: pand %xmm0, %xmm9
718 ; SSE-NEXT: por %xmm8, %xmm9
719 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[1,3,2,3]
720 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm9[0,2,2,3]
721 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm7[0],xmm5[1],xmm7[1]
722 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
723 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm12[0,2,2,3,4,5,6,7]
724 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,0,2,1]
725 ; SSE-NEXT: movdqa %xmm2, %xmm8
726 ; SSE-NEXT: pandn %xmm7, %xmm8
727 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm14[2,1,3,3,4,5,6,7]
728 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,0,2,1]
729 ; SSE-NEXT: pand %xmm2, %xmm7
730 ; SSE-NEXT: por %xmm8, %xmm7
731 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm15[0,2,2,3,4,5,6,7]
732 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,1,1,3]
733 ; SSE-NEXT: movdqa %xmm0, %xmm9
734 ; SSE-NEXT: pandn %xmm8, %xmm9
735 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm6[1,1,1,1]
736 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,5,5,5,5]
737 ; SSE-NEXT: pand %xmm0, %xmm8
738 ; SSE-NEXT: por %xmm9, %xmm8
739 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm7[1,3,2,3]
740 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm8[0,2,2,3]
741 ; SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm9[0],xmm7[1],xmm9[1]
742 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm12[0,1,2,3,4,6,6,7]
743 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,2,2,3]
744 ; SSE-NEXT: movdqa %xmm2, %xmm9
745 ; SSE-NEXT: pandn %xmm8, %xmm9
746 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm14[0,1,2,3,6,5,7,7]
747 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,2,2,3]
748 ; SSE-NEXT: pand %xmm2, %xmm8
749 ; SSE-NEXT: por %xmm9, %xmm8
750 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm15[0,1,2,3,4,6,6,7]
751 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[2,1,3,3]
752 ; SSE-NEXT: movdqa %xmm0, %xmm5
753 ; SSE-NEXT: pandn %xmm9, %xmm5
754 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm6[3,3,3,3]
755 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,5,5,5,5]
756 ; SSE-NEXT: pand %xmm0, %xmm9
757 ; SSE-NEXT: por %xmm5, %xmm9
758 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm8[1,3,2,3]
759 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm9[0,2,2,3]
760 ; SSE-NEXT: punpckldq {{.*#+}} xmm8 = xmm8[0],xmm5[0],xmm8[1],xmm5[1]
761 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm12[0,1,2,3,4,4,6,5]
762 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,2,2,3]
763 ; SSE-NEXT: movdqa %xmm2, %xmm9
764 ; SSE-NEXT: pandn %xmm5, %xmm9
765 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm14[0,1,2,3,4,5,5,7]
766 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,2,2,3]
767 ; SSE-NEXT: pand %xmm2, %xmm5
768 ; SSE-NEXT: por %xmm9, %xmm5
769 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm15[0,1,2,3,4,4,6,5]
770 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[2,1,3,3]
771 ; SSE-NEXT: movdqa %xmm0, %xmm12
772 ; SSE-NEXT: pandn %xmm9, %xmm12
773 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[2,2,2,2]
774 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,5,5,5,5]
775 ; SSE-NEXT: pand %xmm0, %xmm6
776 ; SSE-NEXT: por %xmm12, %xmm6
777 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,3,2,3]
778 ; SSE-NEXT: pshufd {{.*#+}} xmm12 = xmm6[0,2,2,3]
779 ; SSE-NEXT: punpckldq {{.*#+}} xmm12 = xmm12[0],xmm5[0],xmm12[1],xmm5[1]
780 ; SSE-NEXT: punpckhbw {{.*#+}} xmm4 = xmm4[8],xmm13[8],xmm4[9],xmm13[9],xmm4[10],xmm13[10],xmm4[11],xmm13[11],xmm4[12],xmm13[12],xmm4[13],xmm13[13],xmm4[14],xmm13[14],xmm4[15],xmm13[15]
781 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm4[0,0,2,1,4,5,6,7]
782 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,0,2,1]
783 ; SSE-NEXT: movdqa %xmm2, %xmm6
784 ; SSE-NEXT: pandn %xmm5, %xmm6
785 ; SSE-NEXT: punpckhbw {{.*#+}} xmm3 = xmm3[8],xmm11[8],xmm3[9],xmm11[9],xmm3[10],xmm11[10],xmm3[11],xmm11[11],xmm3[12],xmm11[12],xmm3[13],xmm11[13],xmm3[14],xmm11[14],xmm3[15],xmm11[15]
786 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm3[0,1,1,3,4,5,6,7]
787 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,0,2,1]
788 ; SSE-NEXT: pand %xmm2, %xmm5
789 ; SSE-NEXT: por %xmm6, %xmm5
790 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,3,2,3]
791 ; SSE-NEXT: punpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
792 ; SSE-NEXT: # xmm1 = xmm1[8],mem[8],xmm1[9],mem[9],xmm1[10],mem[10],xmm1[11],mem[11],xmm1[12],mem[12],xmm1[13],mem[13],xmm1[14],mem[14],xmm1[15],mem[15]
793 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm1[0,0,2,1,4,5,6,7]
794 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,1,1,3]
795 ; SSE-NEXT: movdqa %xmm0, %xmm9
796 ; SSE-NEXT: pandn %xmm6, %xmm9
797 ; SSE-NEXT: punpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Folded Reload
798 ; SSE-NEXT: # xmm10 = xmm10[8],mem[8],xmm10[9],mem[9],xmm10[10],mem[10],xmm10[11],mem[11],xmm10[12],mem[12],xmm10[13],mem[13],xmm10[14],mem[14],xmm10[15],mem[15]
799 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm10[0,0,0,0]
800 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,5,5,5,5]
801 ; SSE-NEXT: pand %xmm0, %xmm6
802 ; SSE-NEXT: por %xmm9, %xmm6
803 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm6[0,2,2,3]
804 ; SSE-NEXT: punpckldq {{.*#+}} xmm9 = xmm9[0],xmm5[0],xmm9[1],xmm5[1]
805 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm4[0,2,2,3,4,5,6,7]
806 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,0,2,1]
807 ; SSE-NEXT: movdqa %xmm2, %xmm6
808 ; SSE-NEXT: pandn %xmm5, %xmm6
809 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm3[2,1,3,3,4,5,6,7]
810 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,0,2,1]
811 ; SSE-NEXT: pand %xmm2, %xmm5
812 ; SSE-NEXT: por %xmm6, %xmm5
813 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm5[1,3,2,3]
814 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm1[0,2,2,3,4,5,6,7]
815 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,1,1,3]
816 ; SSE-NEXT: movdqa %xmm0, %xmm11
817 ; SSE-NEXT: pandn %xmm5, %xmm11
818 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm10[1,1,1,1]
819 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,5,5,5,5]
820 ; SSE-NEXT: pand %xmm0, %xmm5
821 ; SSE-NEXT: por %xmm11, %xmm5
822 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,2,2,3]
823 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1]
824 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm4[0,1,2,3,4,6,6,7]
825 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
826 ; SSE-NEXT: movdqa %xmm2, %xmm11
827 ; SSE-NEXT: pandn %xmm6, %xmm11
828 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm3[0,1,2,3,6,5,7,7]
829 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
830 ; SSE-NEXT: pand %xmm2, %xmm6
831 ; SSE-NEXT: por %xmm11, %xmm6
832 ; SSE-NEXT: pshufhw {{.*#+}} xmm11 = xmm1[0,1,2,3,4,6,6,7]
833 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm11[2,1,3,3]
834 ; SSE-NEXT: movdqa %xmm0, %xmm13
835 ; SSE-NEXT: pandn %xmm11, %xmm13
836 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm10[3,3,3,3]
837 ; SSE-NEXT: pshufhw {{.*#+}} xmm11 = xmm11[0,1,2,3,5,5,5,5]
838 ; SSE-NEXT: pand %xmm0, %xmm11
839 ; SSE-NEXT: por %xmm13, %xmm11
840 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,3,2,3]
841 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm11[0,2,2,3]
842 ; SSE-NEXT: punpckldq {{.*#+}} xmm11 = xmm11[0],xmm6[0],xmm11[1],xmm6[1]
843 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,4,6,5]
844 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,2,2,3]
845 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,5,7]
846 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,3]
847 ; SSE-NEXT: pand %xmm2, %xmm3
848 ; SSE-NEXT: pandn %xmm4, %xmm2
849 ; SSE-NEXT: por %xmm3, %xmm2
850 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,4,6,5]
851 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,3,3]
852 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm10[2,2,2,2]
853 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,5,5,5,5]
854 ; SSE-NEXT: pand %xmm0, %xmm3
855 ; SSE-NEXT: pandn %xmm1, %xmm0
856 ; SSE-NEXT: por %xmm3, %xmm0
857 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,3,2,3]
858 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
859 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
860 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
861 ; SSE-NEXT: movdqa %xmm0, 96(%rax)
862 ; SSE-NEXT: movdqa %xmm11, 112(%rax)
863 ; SSE-NEXT: movdqa %xmm5, 80(%rax)
864 ; SSE-NEXT: movdqa %xmm9, 64(%rax)
865 ; SSE-NEXT: movdqa %xmm12, 32(%rax)
866 ; SSE-NEXT: movdqa %xmm8, 48(%rax)
867 ; SSE-NEXT: movdqa %xmm7, 16(%rax)
868 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
869 ; SSE-NEXT: movaps %xmm0, (%rax)
872 ; AVX1-ONLY-LABEL: store_i8_stride8_vf16:
873 ; AVX1-ONLY: # %bb.0:
874 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
875 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
876 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
877 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm1
878 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm2
879 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm3
880 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm4
881 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm5
882 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm6
883 ; AVX1-ONLY-NEXT: vmovdqa (%r11), %xmm8
884 ; AVX1-ONLY-NEXT: vmovdqa (%r10), %xmm9
885 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3],xmm8[4],xmm9[4],xmm8[5],xmm9[5],xmm8[6],xmm9[6],xmm8[7],xmm9[7]
886 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm0 = xmm7[0,2,2,3,4,5,6,7]
887 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
888 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3],xmm5[4],xmm6[4],xmm5[5],xmm6[5],xmm5[6],xmm6[6],xmm5[7],xmm6[7]
889 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm11 = xmm10[2,1,3,3,4,5,6,7]
890 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[0,0,2,1]
891 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm0 = xmm11[0,1,2],xmm0[3],xmm11[4,5,6],xmm0[7]
892 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm11 = xmm7[0,0,2,1,4,5,6,7]
893 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[0,0,2,1]
894 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm12 = xmm10[0,1,1,3,4,5,6,7]
895 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[0,0,2,1]
896 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm11 = xmm12[0,1,2],xmm11[3],xmm12[4,5,6],xmm11[7]
897 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm11, %ymm0
898 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3],xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
899 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm12 = xmm11[0,2,2,3,4,5,6,7]
900 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm12 = xmm12[0],zero,xmm12[1],zero
901 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm13 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3],xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
902 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm13[1,1,1,1]
903 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm14 = xmm14[0],zero,zero,zero,xmm14[1],zero,zero,zero
904 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm14[0],xmm12[1],xmm14[2,3,4],xmm12[5],xmm14[6,7]
905 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm14 = xmm13[0],zero,zero,zero,xmm13[1],zero,zero,zero
906 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm15 = xmm11[0,0,2,1,4,5,6,7]
907 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm15 = xmm15[0],zero,xmm15[1],zero
908 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm14 = xmm14[0],xmm15[1],xmm14[2,3,4],xmm15[5],xmm14[6,7]
909 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm14, %ymm12
910 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm12[0],ymm0[1],ymm12[2],ymm0[3],ymm12[4],ymm0[5],ymm12[6],ymm0[7]
911 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm12 = xmm7[0,1,2,3,4,6,6,7]
912 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[0,2,2,3]
913 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm14 = xmm10[0,1,2,3,6,5,7,7]
914 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm14 = xmm14[0,2,2,3]
915 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm12 = xmm14[0,1,2],xmm12[3],xmm14[4,5,6],xmm12[7]
916 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,4,6,5]
917 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[0,2,2,3]
918 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm10 = xmm10[0,1,2,3,4,5,5,7]
919 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[0,2,2,3]
920 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm7 = xmm10[0,1,2],xmm7[3],xmm10[4,5,6],xmm7[7]
921 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm12, %ymm7, %ymm7
922 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm10 = xmm11[0,1,2,3,4,6,6,7]
923 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[2,1,3,3]
924 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm13[3,3,3,3]
925 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm12 = xmm12[0],zero,zero,zero,xmm12[1],zero,zero,zero
926 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm10 = xmm12[0],xmm10[1],xmm12[2,3,4],xmm10[5],xmm12[6,7]
927 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm11 = xmm11[0,1,2,3,4,4,6,5]
928 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm11[2,1,3,3]
929 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm12 = xmm13[2,3,2,3]
930 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm12 = xmm12[0],zero,zero,zero,xmm12[1],zero,zero,zero
931 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm11 = xmm12[0],xmm11[1],xmm12[2,3,4],xmm11[5],xmm12[6,7]
932 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm11, %ymm10
933 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm10[0],ymm7[1],ymm10[2],ymm7[3],ymm10[4],ymm7[5],ymm10[6],ymm7[7]
934 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm8 = xmm8[8],xmm9[8],xmm8[9],xmm9[9],xmm8[10],xmm9[10],xmm8[11],xmm9[11],xmm8[12],xmm9[12],xmm8[13],xmm9[13],xmm8[14],xmm9[14],xmm8[15],xmm9[15]
935 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm9 = xmm8[0,2,2,3,4,5,6,7]
936 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[0,0,2,1]
937 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm5[8],xmm6[8],xmm5[9],xmm6[9],xmm5[10],xmm6[10],xmm5[11],xmm6[11],xmm5[12],xmm6[12],xmm5[13],xmm6[13],xmm5[14],xmm6[14],xmm5[15],xmm6[15]
938 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm6 = xmm5[2,1,3,3,4,5,6,7]
939 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[0,0,2,1]
940 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1,2],xmm9[3],xmm6[4,5,6],xmm9[7]
941 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm9 = xmm8[0,0,2,1,4,5,6,7]
942 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm9[0,0,2,1]
943 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm10 = xmm5[0,1,1,3,4,5,6,7]
944 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[0,0,2,1]
945 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm9 = xmm10[0,1,2],xmm9[3],xmm10[4,5,6],xmm9[7]
946 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm9, %ymm6
947 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm3[8],xmm4[8],xmm3[9],xmm4[9],xmm3[10],xmm4[10],xmm3[11],xmm4[11],xmm3[12],xmm4[12],xmm3[13],xmm4[13],xmm3[14],xmm4[14],xmm3[15],xmm4[15]
948 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm3[0,2,2,3,4,5,6,7]
949 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm4 = xmm4[0],zero,xmm4[1],zero
950 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm1[8],xmm2[8],xmm1[9],xmm2[9],xmm1[10],xmm2[10],xmm1[11],xmm2[11],xmm1[12],xmm2[12],xmm1[13],xmm2[13],xmm1[14],xmm2[14],xmm1[15],xmm2[15]
951 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[1,1,1,1]
952 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
953 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm4[1],xmm2[2,3,4],xmm4[5],xmm2[6,7]
954 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
955 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm9 = xmm3[0,0,2,1,4,5,6,7]
956 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm9[0],zero,xmm9[1],zero
957 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0],xmm9[1],xmm4[2,3,4],xmm9[5],xmm4[6,7]
958 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm4, %ymm2
959 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0],ymm6[1],ymm2[2],ymm6[3],ymm2[4],ymm6[5],ymm2[6],ymm6[7]
960 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm8[0,1,2,3,4,6,6,7]
961 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[0,2,2,3]
962 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm6 = xmm5[0,1,2,3,6,5,7,7]
963 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
964 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm4 = xmm6[0,1,2],xmm4[3],xmm6[4,5,6],xmm4[7]
965 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm6 = xmm8[0,1,2,3,4,4,6,5]
966 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
967 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,5,7]
968 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,2,2,3]
969 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2],xmm6[3],xmm5[4,5,6],xmm6[7]
970 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm5, %ymm4
971 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm5 = xmm3[0,1,2,3,4,6,6,7]
972 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[2,1,3,3]
973 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm1[3,3,3,3]
974 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm6[0],zero,zero,zero,xmm6[1],zero,zero,zero
975 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3,4],xmm5[5],xmm6[6,7]
976 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,4,6,5]
977 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,1,3,3]
978 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,3,2,3]
979 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
980 ; AVX1-ONLY-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0],xmm3[1],xmm1[2,3,4],xmm3[5],xmm1[6,7]
981 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm1, %ymm1
982 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0],ymm4[1],ymm1[2],ymm4[3],ymm1[4],ymm4[5],ymm1[6],ymm4[7]
983 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 96(%rax)
984 ; AVX1-ONLY-NEXT: vmovaps %ymm2, 64(%rax)
985 ; AVX1-ONLY-NEXT: vmovaps %ymm7, 32(%rax)
986 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rax)
987 ; AVX1-ONLY-NEXT: vzeroupper
988 ; AVX1-ONLY-NEXT: retq
990 ; AVX2-ONLY-LABEL: store_i8_stride8_vf16:
991 ; AVX2-ONLY: # %bb.0:
992 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
993 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
994 ; AVX2-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r11
995 ; AVX2-ONLY-NEXT: vmovdqa (%rdi), %xmm0
996 ; AVX2-ONLY-NEXT: vmovdqa (%rdx), %xmm1
997 ; AVX2-ONLY-NEXT: vmovdqa (%r8), %xmm2
998 ; AVX2-ONLY-NEXT: vmovdqa (%r11), %xmm3
999 ; AVX2-ONLY-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
1000 ; AVX2-ONLY-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
1001 ; AVX2-ONLY-NEXT: vinserti128 $1, (%r9), %ymm2, %ymm4
1002 ; AVX2-ONLY-NEXT: vinserti128 $1, (%r10), %ymm3, %ymm3
1003 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm5 = ymm3[0,2,0,2]
1004 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm6 = <u,u,u,u,u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,u,u,2,10,u,u,u,u,u,u,3,11>
1005 ; AVX2-ONLY-NEXT: vpshufb %ymm6, %ymm5, %ymm2
1006 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm7 = ymm4[0,2,0,2]
1007 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm8 = <u,u,u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,u,u,2,10,u,u,u,u,u,u,3,11,u,u>
1008 ; AVX2-ONLY-NEXT: vpshufb %ymm8, %ymm7, %ymm9
1009 ; AVX2-ONLY-NEXT: vpblendw {{.*#+}} ymm2 = ymm9[0,1,2],ymm2[3],ymm9[4,5,6],ymm2[7],ymm9[8,9,10],ymm2[11],ymm9[12,13,14],ymm2[15]
1010 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm9 = ymm1[0,2,0,2]
1011 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm10 = <u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,u,u,2,10,u,u,u,u,u,u,3,11,u,u,u,u>
1012 ; AVX2-ONLY-NEXT: vpshufb %ymm10, %ymm9, %ymm11
1013 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm12 = ymm0[0,2,0,2]
1014 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm13 = <0,8,u,u,u,u,u,u,1,9,u,u,u,u,u,u,2,10,u,u,u,u,u,u,3,11,u,u,u,u,u,u>
1015 ; AVX2-ONLY-NEXT: vpshufb %ymm13, %ymm12, %ymm14
1016 ; AVX2-ONLY-NEXT: vpblendw {{.*#+}} ymm11 = ymm14[0],ymm11[1],ymm14[2,3,4],ymm11[5],ymm14[6,7,8],ymm11[9],ymm14[10,11,12],ymm11[13],ymm14[14,15]
1017 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} ymm2 = ymm11[0],ymm2[1],ymm11[2],ymm2[3],ymm11[4],ymm2[5],ymm11[6],ymm2[7]
1018 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm11 = <u,u,u,u,u,u,4,12,u,u,u,u,u,u,5,13,u,u,u,u,u,u,6,14,u,u,u,u,u,u,7,15>
1019 ; AVX2-ONLY-NEXT: vpshufb %ymm11, %ymm5, %ymm5
1020 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm14 = <u,u,u,u,4,12,u,u,u,u,u,u,5,13,u,u,u,u,u,u,6,14,u,u,u,u,u,u,7,15,u,u>
1021 ; AVX2-ONLY-NEXT: vpshufb %ymm14, %ymm7, %ymm7
1022 ; AVX2-ONLY-NEXT: vpblendw {{.*#+}} ymm5 = ymm7[0,1,2],ymm5[3],ymm7[4,5,6],ymm5[7],ymm7[8,9,10],ymm5[11],ymm7[12,13,14],ymm5[15]
1023 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm7 = <u,u,4,12,u,u,u,u,u,u,5,13,u,u,u,u,u,u,6,14,u,u,u,u,u,u,7,15,u,u,u,u>
1024 ; AVX2-ONLY-NEXT: vpshufb %ymm7, %ymm9, %ymm9
1025 ; AVX2-ONLY-NEXT: vmovdqa {{.*#+}} ymm15 = <4,12,u,u,u,u,u,u,5,13,u,u,u,u,u,u,6,14,u,u,u,u,u,u,7,15,u,u,u,u,u,u>
1026 ; AVX2-ONLY-NEXT: vpshufb %ymm15, %ymm12, %ymm12
1027 ; AVX2-ONLY-NEXT: vpblendw {{.*#+}} ymm9 = ymm12[0],ymm9[1],ymm12[2,3,4],ymm9[5],ymm12[6,7,8],ymm9[9],ymm12[10,11,12],ymm9[13],ymm12[14,15]
1028 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} ymm5 = ymm9[0],ymm5[1],ymm9[2],ymm5[3],ymm9[4],ymm5[5],ymm9[6],ymm5[7]
1029 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm3 = ymm3[1,3,1,3]
1030 ; AVX2-ONLY-NEXT: vpshufb %ymm6, %ymm3, %ymm6
1031 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm4 = ymm4[1,3,1,3]
1032 ; AVX2-ONLY-NEXT: vpshufb %ymm8, %ymm4, %ymm8
1033 ; AVX2-ONLY-NEXT: vpblendw {{.*#+}} ymm6 = ymm8[0,1,2],ymm6[3],ymm8[4,5,6],ymm6[7],ymm8[8,9,10],ymm6[11],ymm8[12,13,14],ymm6[15]
1034 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
1035 ; AVX2-ONLY-NEXT: vpshufb %ymm10, %ymm1, %ymm8
1036 ; AVX2-ONLY-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
1037 ; AVX2-ONLY-NEXT: vpshufb %ymm13, %ymm0, %ymm9
1038 ; AVX2-ONLY-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0],ymm8[1],ymm9[2,3,4],ymm8[5],ymm9[6,7,8],ymm8[9],ymm9[10,11,12],ymm8[13],ymm9[14,15]
1039 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} ymm6 = ymm8[0],ymm6[1],ymm8[2],ymm6[3],ymm8[4],ymm6[5],ymm8[6],ymm6[7]
1040 ; AVX2-ONLY-NEXT: vpshufb %ymm11, %ymm3, %ymm3
1041 ; AVX2-ONLY-NEXT: vpshufb %ymm14, %ymm4, %ymm4
1042 ; AVX2-ONLY-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7],ymm4[8,9,10],ymm3[11],ymm4[12,13,14],ymm3[15]
1043 ; AVX2-ONLY-NEXT: vpshufb %ymm7, %ymm1, %ymm1
1044 ; AVX2-ONLY-NEXT: vpshufb %ymm15, %ymm0, %ymm0
1045 ; AVX2-ONLY-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3,4],ymm1[5],ymm0[6,7,8],ymm1[9],ymm0[10,11,12],ymm1[13],ymm0[14,15]
1046 ; AVX2-ONLY-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm3[1],ymm0[2],ymm3[3],ymm0[4],ymm3[5],ymm0[6],ymm3[7]
1047 ; AVX2-ONLY-NEXT: vmovdqa %ymm0, 96(%rax)
1048 ; AVX2-ONLY-NEXT: vmovdqa %ymm6, 64(%rax)
1049 ; AVX2-ONLY-NEXT: vmovdqa %ymm5, 32(%rax)
1050 ; AVX2-ONLY-NEXT: vmovdqa %ymm2, (%rax)
1051 ; AVX2-ONLY-NEXT: vzeroupper
1052 ; AVX2-ONLY-NEXT: retq
1054 ; AVX512-LABEL: store_i8_stride8_vf16:
1056 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %rax
1057 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %r10
1058 ; AVX512-NEXT: movq {{[0-9]+}}(%rsp), %r11
1059 ; AVX512-NEXT: vmovdqa (%rdi), %xmm0
1060 ; AVX512-NEXT: vmovdqa (%rdx), %xmm1
1061 ; AVX512-NEXT: vmovdqa (%r8), %xmm2
1062 ; AVX512-NEXT: vmovdqa (%r11), %xmm3
1063 ; AVX512-NEXT: vinserti128 $1, (%rsi), %ymm0, %ymm0
1064 ; AVX512-NEXT: vinserti128 $1, (%rcx), %ymm1, %ymm1
1065 ; AVX512-NEXT: vinserti128 $1, (%r9), %ymm2, %ymm2
1066 ; AVX512-NEXT: vinserti128 $1, (%r10), %ymm3, %ymm3
1067 ; AVX512-NEXT: vpermq {{.*#+}} ymm4 = ymm3[0,2,0,2]
1068 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm5 = <u,u,u,u,u,u,4,12,u,u,u,u,u,u,5,13,u,u,u,u,u,u,6,14,u,u,u,u,u,u,7,15>
1069 ; AVX512-NEXT: vpshufb %ymm5, %ymm4, %ymm6
1070 ; AVX512-NEXT: vpermq {{.*#+}} ymm7 = ymm2[0,2,0,2]
1071 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm8 = <u,u,u,u,4,12,u,u,u,u,u,u,5,13,u,u,u,u,u,u,6,14,u,u,u,u,u,u,7,15,u,u>
1072 ; AVX512-NEXT: vpshufb %ymm8, %ymm7, %ymm9
1073 ; AVX512-NEXT: vpblendw {{.*#+}} ymm6 = ymm9[0,1,2],ymm6[3],ymm9[4,5,6],ymm6[7],ymm9[8,9,10],ymm6[11],ymm9[12,13,14],ymm6[15]
1074 ; AVX512-NEXT: vpermq {{.*#+}} ymm9 = ymm1[0,2,0,2]
1075 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm10 = <u,u,4,12,u,u,u,u,u,u,5,13,u,u,u,u,u,u,6,14,u,u,u,u,u,u,7,15,u,u,u,u>
1076 ; AVX512-NEXT: vpshufb %ymm10, %ymm9, %ymm11
1077 ; AVX512-NEXT: vpermq {{.*#+}} ymm12 = ymm0[0,2,0,2]
1078 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm13 = <4,12,u,u,u,u,u,u,5,13,u,u,u,u,u,u,6,14,u,u,u,u,u,u,7,15,u,u,u,u,u,u>
1079 ; AVX512-NEXT: vpshufb %ymm13, %ymm12, %ymm14
1080 ; AVX512-NEXT: vpblendw {{.*#+}} ymm11 = ymm14[0],ymm11[1],ymm14[2,3,4],ymm11[5],ymm14[6,7,8],ymm11[9],ymm14[10,11,12],ymm11[13],ymm14[14,15]
1081 ; AVX512-NEXT: vpblendd {{.*#+}} ymm6 = ymm11[0],ymm6[1],ymm11[2],ymm6[3],ymm11[4],ymm6[5],ymm11[6],ymm6[7]
1082 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm11 = <u,u,u,u,u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,u,u,2,10,u,u,u,u,u,u,3,11>
1083 ; AVX512-NEXT: vpshufb %ymm11, %ymm4, %ymm4
1084 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm14 = <u,u,u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,u,u,2,10,u,u,u,u,u,u,3,11,u,u>
1085 ; AVX512-NEXT: vpshufb %ymm14, %ymm7, %ymm7
1086 ; AVX512-NEXT: vpblendw {{.*#+}} ymm4 = ymm7[0,1,2],ymm4[3],ymm7[4,5,6],ymm4[7],ymm7[8,9,10],ymm4[11],ymm7[12,13,14],ymm4[15]
1087 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm7 = <u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,u,u,2,10,u,u,u,u,u,u,3,11,u,u,u,u>
1088 ; AVX512-NEXT: vpshufb %ymm7, %ymm9, %ymm9
1089 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm15 = <0,8,u,u,u,u,u,u,1,9,u,u,u,u,u,u,2,10,u,u,u,u,u,u,3,11,u,u,u,u,u,u>
1090 ; AVX512-NEXT: vpshufb %ymm15, %ymm12, %ymm12
1091 ; AVX512-NEXT: vpblendw {{.*#+}} ymm9 = ymm12[0],ymm9[1],ymm12[2,3,4],ymm9[5],ymm12[6,7,8],ymm9[9],ymm12[10,11,12],ymm9[13],ymm12[14,15]
1092 ; AVX512-NEXT: vpblendd {{.*#+}} ymm4 = ymm9[0],ymm4[1],ymm9[2],ymm4[3],ymm9[4],ymm4[5],ymm9[6],ymm4[7]
1093 ; AVX512-NEXT: vinserti64x4 $1, %ymm6, %zmm4, %zmm4
1094 ; AVX512-NEXT: vpermq {{.*#+}} ymm3 = ymm3[1,3,1,3]
1095 ; AVX512-NEXT: vpshufb %ymm5, %ymm3, %ymm5
1096 ; AVX512-NEXT: vpermq {{.*#+}} ymm2 = ymm2[1,3,1,3]
1097 ; AVX512-NEXT: vpshufb %ymm8, %ymm2, %ymm6
1098 ; AVX512-NEXT: vpblendw {{.*#+}} ymm5 = ymm6[0,1,2],ymm5[3],ymm6[4,5,6],ymm5[7],ymm6[8,9,10],ymm5[11],ymm6[12,13,14],ymm5[15]
1099 ; AVX512-NEXT: vpermq {{.*#+}} ymm1 = ymm1[1,3,1,3]
1100 ; AVX512-NEXT: vpshufb %ymm10, %ymm1, %ymm6
1101 ; AVX512-NEXT: vpermq {{.*#+}} ymm0 = ymm0[1,3,1,3]
1102 ; AVX512-NEXT: vpshufb %ymm13, %ymm0, %ymm8
1103 ; AVX512-NEXT: vpblendw {{.*#+}} ymm6 = ymm8[0],ymm6[1],ymm8[2,3,4],ymm6[5],ymm8[6,7,8],ymm6[9],ymm8[10,11,12],ymm6[13],ymm8[14,15]
1104 ; AVX512-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2],ymm5[3],ymm6[4],ymm5[5],ymm6[6],ymm5[7]
1105 ; AVX512-NEXT: vpshufb %ymm11, %ymm3, %ymm3
1106 ; AVX512-NEXT: vpshufb %ymm14, %ymm2, %ymm2
1107 ; AVX512-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1,2],ymm3[3],ymm2[4,5,6],ymm3[7],ymm2[8,9,10],ymm3[11],ymm2[12,13,14],ymm3[15]
1108 ; AVX512-NEXT: vpshufb %ymm7, %ymm1, %ymm1
1109 ; AVX512-NEXT: vpshufb %ymm15, %ymm0, %ymm0
1110 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3,4],ymm1[5],ymm0[6,7,8],ymm1[9],ymm0[10,11,12],ymm1[13],ymm0[14,15]
1111 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2],ymm2[3],ymm0[4],ymm2[5],ymm0[6],ymm2[7]
1112 ; AVX512-NEXT: vinserti64x4 $1, %ymm5, %zmm0, %zmm0
1113 ; AVX512-NEXT: vmovdqa64 %zmm0, 64(%rax)
1114 ; AVX512-NEXT: vmovdqa64 %zmm4, (%rax)
1115 ; AVX512-NEXT: vzeroupper
1117 %in.vec0 = load <16 x i8>, ptr %in.vecptr0, align 64
1118 %in.vec1 = load <16 x i8>, ptr %in.vecptr1, align 64
1119 %in.vec2 = load <16 x i8>, ptr %in.vecptr2, align 64
1120 %in.vec3 = load <16 x i8>, ptr %in.vecptr3, align 64
1121 %in.vec4 = load <16 x i8>, ptr %in.vecptr4, align 64
1122 %in.vec5 = load <16 x i8>, ptr %in.vecptr5, align 64
1123 %in.vec6 = load <16 x i8>, ptr %in.vecptr6, align 64
1124 %in.vec7 = load <16 x i8>, ptr %in.vecptr7, align 64
1125 %1 = shufflevector <16 x i8> %in.vec0, <16 x i8> %in.vec1, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1126 %2 = shufflevector <16 x i8> %in.vec2, <16 x i8> %in.vec3, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1127 %3 = shufflevector <16 x i8> %in.vec4, <16 x i8> %in.vec5, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1128 %4 = shufflevector <16 x i8> %in.vec6, <16 x i8> %in.vec7, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1129 %5 = shufflevector <32 x i8> %1, <32 x i8> %2, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
1130 %6 = shufflevector <32 x i8> %3, <32 x i8> %4, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
1131 %7 = shufflevector <64 x i8> %5, <64 x i8> %6, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
1132 %interleaved.vec = shufflevector <128 x i8> %7, <128 x i8> poison, <128 x i32> <i32 0, i32 16, i32 32, i32 48, i32 64, i32 80, i32 96, i32 112, i32 1, i32 17, i32 33, i32 49, i32 65, i32 81, i32 97, i32 113, i32 2, i32 18, i32 34, i32 50, i32 66, i32 82, i32 98, i32 114, i32 3, i32 19, i32 35, i32 51, i32 67, i32 83, i32 99, i32 115, i32 4, i32 20, i32 36, i32 52, i32 68, i32 84, i32 100, i32 116, i32 5, i32 21, i32 37, i32 53, i32 69, i32 85, i32 101, i32 117, i32 6, i32 22, i32 38, i32 54, i32 70, i32 86, i32 102, i32 118, i32 7, i32 23, i32 39, i32 55, i32 71, i32 87, i32 103, i32 119, i32 8, i32 24, i32 40, i32 56, i32 72, i32 88, i32 104, i32 120, i32 9, i32 25, i32 41, i32 57, i32 73, i32 89, i32 105, i32 121, i32 10, i32 26, i32 42, i32 58, i32 74, i32 90, i32 106, i32 122, i32 11, i32 27, i32 43, i32 59, i32 75, i32 91, i32 107, i32 123, i32 12, i32 28, i32 44, i32 60, i32 76, i32 92, i32 108, i32 124, i32 13, i32 29, i32 45, i32 61, i32 77, i32 93, i32 109, i32 125, i32 14, i32 30, i32 46, i32 62, i32 78, i32 94, i32 110, i32 126, i32 15, i32 31, i32 47, i32 63, i32 79, i32 95, i32 111, i32 127>
1133 store <128 x i8> %interleaved.vec, ptr %out.vec, align 64
1137 define void @store_i8_stride8_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
1138 ; SSE-LABEL: store_i8_stride8_vf32:
1140 ; SSE-NEXT: subq $232, %rsp
1141 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1142 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
1143 ; SSE-NEXT: movdqa (%rdi), %xmm5
1144 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1145 ; SSE-NEXT: movdqa (%rsi), %xmm4
1146 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1147 ; SSE-NEXT: movdqa (%rdx), %xmm1
1148 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1149 ; SSE-NEXT: movdqa (%rcx), %xmm8
1150 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1151 ; SSE-NEXT: movdqa (%r8), %xmm13
1152 ; SSE-NEXT: movdqa (%r9), %xmm12
1153 ; SSE-NEXT: movdqa (%r10), %xmm14
1154 ; SSE-NEXT: movdqa (%rax), %xmm11
1155 ; SSE-NEXT: movdqa %xmm14, %xmm2
1156 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm11[0],xmm2[1],xmm11[1],xmm2[2],xmm11[2],xmm2[3],xmm11[3],xmm2[4],xmm11[4],xmm2[5],xmm11[5],xmm2[6],xmm11[6],xmm2[7],xmm11[7]
1157 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm2[0,2,2,3,4,5,6,7]
1158 ; SSE-NEXT: movdqa %xmm2, %xmm15
1159 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1160 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm0[0,0,2,1]
1161 ; SSE-NEXT: movdqa {{.*#+}} xmm9 = [65535,65535,65535,0,65535,65535,65535,0]
1162 ; SSE-NEXT: movdqa %xmm9, %xmm6
1163 ; SSE-NEXT: pandn %xmm2, %xmm6
1164 ; SSE-NEXT: movdqa %xmm13, %xmm3
1165 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0],xmm12[0],xmm3[1],xmm12[1],xmm3[2],xmm12[2],xmm3[3],xmm12[3],xmm3[4],xmm12[4],xmm3[5],xmm12[5],xmm3[6],xmm12[6],xmm3[7],xmm12[7]
1166 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm3[2,1,3,3,4,5,6,7]
1167 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1168 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm2[0,0,2,1]
1169 ; SSE-NEXT: pand %xmm9, %xmm7
1170 ; SSE-NEXT: por %xmm6, %xmm7
1171 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0],xmm8[0],xmm1[1],xmm8[1],xmm1[2],xmm8[2],xmm1[3],xmm8[3],xmm1[4],xmm8[4],xmm1[5],xmm8[5],xmm1[6],xmm8[6],xmm1[7],xmm8[7]
1172 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm1[0,2,2,3,4,5,6,7]
1173 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1174 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm2[0,1,1,3]
1175 ; SSE-NEXT: movdqa {{.*#+}} xmm8 = [65535,0,65535,65535,65535,0,65535,65535]
1176 ; SSE-NEXT: movdqa %xmm8, %xmm10
1177 ; SSE-NEXT: pandn %xmm6, %xmm10
1178 ; SSE-NEXT: punpcklbw {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3],xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
1179 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm5[1,1,1,1]
1180 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1181 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,5,5,5,5]
1182 ; SSE-NEXT: pand %xmm8, %xmm6
1183 ; SSE-NEXT: por %xmm10, %xmm6
1184 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[1,3,2,3]
1185 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[0,2,2,3]
1186 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm7[0],xmm0[1],xmm7[1]
1187 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1188 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm15[0,0,2,1,4,5,6,7]
1189 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,0,2,1]
1190 ; SSE-NEXT: movdqa %xmm9, %xmm7
1191 ; SSE-NEXT: pandn %xmm6, %xmm7
1192 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm3[0,1,1,3,4,5,6,7]
1193 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,0,2,1]
1194 ; SSE-NEXT: pand %xmm9, %xmm6
1195 ; SSE-NEXT: por %xmm7, %xmm6
1196 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm1[0,0,2,1,4,5,6,7]
1197 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,1,1,3]
1198 ; SSE-NEXT: movdqa %xmm8, %xmm10
1199 ; SSE-NEXT: pandn %xmm7, %xmm10
1200 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm5[0,0,0,0]
1201 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,5,5,5,5]
1202 ; SSE-NEXT: pand %xmm8, %xmm7
1203 ; SSE-NEXT: por %xmm10, %xmm7
1204 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,3,2,3]
1205 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm7[0,2,2,3]
1206 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1]
1207 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1208 ; SSE-NEXT: punpckhbw {{.*#+}} xmm14 = xmm14[8],xmm11[8],xmm14[9],xmm11[9],xmm14[10],xmm11[10],xmm14[11],xmm11[11],xmm14[12],xmm11[12],xmm14[13],xmm11[13],xmm14[14],xmm11[14],xmm14[15],xmm11[15]
1209 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm14[0,2,2,3,4,5,6,7]
1210 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1211 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,0,2,1]
1212 ; SSE-NEXT: movdqa %xmm9, %xmm6
1213 ; SSE-NEXT: pandn %xmm5, %xmm6
1214 ; SSE-NEXT: punpckhbw {{.*#+}} xmm13 = xmm13[8],xmm12[8],xmm13[9],xmm12[9],xmm13[10],xmm12[10],xmm13[11],xmm12[11],xmm13[12],xmm12[12],xmm13[13],xmm12[13],xmm13[14],xmm12[14],xmm13[15],xmm12[15]
1215 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm13[2,1,3,3,4,5,6,7]
1216 ; SSE-NEXT: movdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1217 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,0,2,1]
1218 ; SSE-NEXT: pand %xmm9, %xmm4
1219 ; SSE-NEXT: por %xmm6, %xmm4
1220 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[1,3,2,3]
1221 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
1222 ; SSE-NEXT: punpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Folded Reload
1223 ; SSE-NEXT: # xmm12 = xmm12[8],mem[8],xmm12[9],mem[9],xmm12[10],mem[10],xmm12[11],mem[11],xmm12[12],mem[12],xmm12[13],mem[13],xmm12[14],mem[14],xmm12[15],mem[15]
1224 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm12[0,2,2,3,4,5,6,7]
1225 ; SSE-NEXT: movdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1226 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,1,1,3]
1227 ; SSE-NEXT: movdqa %xmm8, %xmm5
1228 ; SSE-NEXT: pandn %xmm3, %xmm5
1229 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
1230 ; SSE-NEXT: punpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Folded Reload
1231 ; SSE-NEXT: # xmm11 = xmm11[8],mem[8],xmm11[9],mem[9],xmm11[10],mem[10],xmm11[11],mem[11],xmm11[12],mem[12],xmm11[13],mem[13],xmm11[14],mem[14],xmm11[15],mem[15]
1232 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm11[1,1,1,1]
1233 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1234 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,5,5,5]
1235 ; SSE-NEXT: pand %xmm8, %xmm1
1236 ; SSE-NEXT: por %xmm5, %xmm1
1237 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[0,2,2,3]
1238 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1]
1239 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1240 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm14[0,0,2,1,4,5,6,7]
1241 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,2,1]
1242 ; SSE-NEXT: movdqa %xmm9, %xmm3
1243 ; SSE-NEXT: pandn %xmm1, %xmm3
1244 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm13[0,1,1,3,4,5,6,7]
1245 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,2,1]
1246 ; SSE-NEXT: pand %xmm9, %xmm1
1247 ; SSE-NEXT: por %xmm3, %xmm1
1248 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm12[0,0,2,1,4,5,6,7]
1249 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,1,1,3]
1250 ; SSE-NEXT: movdqa %xmm8, %xmm4
1251 ; SSE-NEXT: pandn %xmm3, %xmm4
1252 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm11[0,0,0,0]
1253 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,5,5,5,5]
1254 ; SSE-NEXT: pand %xmm8, %xmm3
1255 ; SSE-NEXT: por %xmm4, %xmm3
1256 ; SSE-NEXT: movdqa 16(%r10), %xmm10
1257 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,3,2,3]
1258 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm3[0,2,2,3]
1259 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
1260 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1261 ; SSE-NEXT: movdqa 16(%rax), %xmm1
1262 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1263 ; SSE-NEXT: movdqa %xmm10, %xmm5
1264 ; SSE-NEXT: punpcklbw {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3],xmm5[4],xmm1[4],xmm5[5],xmm1[5],xmm5[6],xmm1[6],xmm5[7],xmm1[7]
1265 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm5[0,2,2,3,4,5,6,7]
1266 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1267 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,0,2,1]
1268 ; SSE-NEXT: movdqa %xmm9, %xmm3
1269 ; SSE-NEXT: pandn %xmm1, %xmm3
1270 ; SSE-NEXT: movdqa 16(%r8), %xmm12
1271 ; SSE-NEXT: movdqa 16(%r9), %xmm11
1272 ; SSE-NEXT: movdqa %xmm12, %xmm4
1273 ; SSE-NEXT: punpcklbw {{.*#+}} xmm4 = xmm4[0],xmm11[0],xmm4[1],xmm11[1],xmm4[2],xmm11[2],xmm4[3],xmm11[3],xmm4[4],xmm11[4],xmm4[5],xmm11[5],xmm4[6],xmm11[6],xmm4[7],xmm11[7]
1274 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm4[2,1,3,3,4,5,6,7]
1275 ; SSE-NEXT: movdqa %xmm4, (%rsp) # 16-byte Spill
1276 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm1[0,0,2,1]
1277 ; SSE-NEXT: pand %xmm9, %xmm0
1278 ; SSE-NEXT: por %xmm3, %xmm0
1279 ; SSE-NEXT: movdqa 16(%rdx), %xmm13
1280 ; SSE-NEXT: movdqa 16(%rcx), %xmm7
1281 ; SSE-NEXT: movdqa %xmm13, %xmm3
1282 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0],xmm7[0],xmm3[1],xmm7[1],xmm3[2],xmm7[2],xmm3[3],xmm7[3],xmm3[4],xmm7[4],xmm3[5],xmm7[5],xmm3[6],xmm7[6],xmm3[7],xmm7[7]
1283 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm3[0,2,2,3,4,5,6,7]
1284 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1285 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,1,3]
1286 ; SSE-NEXT: movdqa %xmm8, %xmm2
1287 ; SSE-NEXT: pandn %xmm1, %xmm2
1288 ; SSE-NEXT: movdqa 16(%rdi), %xmm14
1289 ; SSE-NEXT: movdqa 16(%rsi), %xmm6
1290 ; SSE-NEXT: movdqa %xmm14, %xmm1
1291 ; SSE-NEXT: punpcklbw {{.*#+}} xmm1 = xmm1[0],xmm6[0],xmm1[1],xmm6[1],xmm1[2],xmm6[2],xmm1[3],xmm6[3],xmm1[4],xmm6[4],xmm1[5],xmm6[5],xmm1[6],xmm6[6],xmm1[7],xmm6[7]
1292 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm1[1,1,1,1]
1293 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1294 ; SSE-NEXT: pshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,5,5,5,5]
1295 ; SSE-NEXT: pand %xmm8, %xmm15
1296 ; SSE-NEXT: por %xmm2, %xmm15
1297 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,3]
1298 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm15[0,2,2,3]
1299 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
1300 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1301 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm5[0,0,2,1,4,5,6,7]
1302 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
1303 ; SSE-NEXT: movdqa %xmm9, %xmm2
1304 ; SSE-NEXT: pandn %xmm0, %xmm2
1305 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm4[0,1,1,3,4,5,6,7]
1306 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
1307 ; SSE-NEXT: pand %xmm9, %xmm0
1308 ; SSE-NEXT: por %xmm2, %xmm0
1309 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm3[0,0,2,1,4,5,6,7]
1310 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,1,3]
1311 ; SSE-NEXT: movdqa %xmm8, %xmm15
1312 ; SSE-NEXT: pandn %xmm2, %xmm15
1313 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[0,0,0,0]
1314 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,5,5,5,5]
1315 ; SSE-NEXT: pand %xmm8, %xmm2
1316 ; SSE-NEXT: por %xmm15, %xmm2
1317 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,3]
1318 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
1319 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
1320 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1321 ; SSE-NEXT: punpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Folded Reload
1322 ; SSE-NEXT: # xmm10 = xmm10[8],mem[8],xmm10[9],mem[9],xmm10[10],mem[10],xmm10[11],mem[11],xmm10[12],mem[12],xmm10[13],mem[13],xmm10[14],mem[14],xmm10[15],mem[15]
1323 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm10[0,2,2,3,4,5,6,7]
1324 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1325 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
1326 ; SSE-NEXT: movdqa %xmm9, %xmm2
1327 ; SSE-NEXT: pandn %xmm0, %xmm2
1328 ; SSE-NEXT: movdqa %xmm12, %xmm15
1329 ; SSE-NEXT: punpckhbw {{.*#+}} xmm15 = xmm15[8],xmm11[8],xmm15[9],xmm11[9],xmm15[10],xmm11[10],xmm15[11],xmm11[11],xmm15[12],xmm11[12],xmm15[13],xmm11[13],xmm15[14],xmm11[14],xmm15[15],xmm11[15]
1330 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm15[2,1,3,3,4,5,6,7]
1331 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
1332 ; SSE-NEXT: pand %xmm9, %xmm0
1333 ; SSE-NEXT: por %xmm2, %xmm0
1334 ; SSE-NEXT: punpckhbw {{.*#+}} xmm13 = xmm13[8],xmm7[8],xmm13[9],xmm7[9],xmm13[10],xmm7[10],xmm13[11],xmm7[11],xmm13[12],xmm7[12],xmm13[13],xmm7[13],xmm13[14],xmm7[14],xmm13[15],xmm7[15]
1335 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm13[0,2,2,3,4,5,6,7]
1336 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,1,3]
1337 ; SSE-NEXT: movdqa %xmm8, %xmm3
1338 ; SSE-NEXT: pandn %xmm2, %xmm3
1339 ; SSE-NEXT: punpckhbw {{.*#+}} xmm14 = xmm14[8],xmm6[8],xmm14[9],xmm6[9],xmm14[10],xmm6[10],xmm14[11],xmm6[11],xmm14[12],xmm6[12],xmm14[13],xmm6[13],xmm14[14],xmm6[14],xmm14[15],xmm6[15]
1340 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm14[1,1,1,1]
1341 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,5,5,5]
1342 ; SSE-NEXT: pand %xmm8, %xmm1
1343 ; SSE-NEXT: por %xmm3, %xmm1
1344 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,3]
1345 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
1346 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
1347 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1348 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm10[0,0,2,1,4,5,6,7]
1349 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
1350 ; SSE-NEXT: movdqa %xmm9, %xmm1
1351 ; SSE-NEXT: pandn %xmm0, %xmm1
1352 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm15[0,1,1,3,4,5,6,7]
1353 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
1354 ; SSE-NEXT: pand %xmm9, %xmm0
1355 ; SSE-NEXT: por %xmm1, %xmm0
1356 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm13[0,0,2,1,4,5,6,7]
1357 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,1,3]
1358 ; SSE-NEXT: movdqa %xmm8, %xmm2
1359 ; SSE-NEXT: pandn %xmm1, %xmm2
1360 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm14[0,0,0,0]
1361 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,5,5,5]
1362 ; SSE-NEXT: pand %xmm8, %xmm1
1363 ; SSE-NEXT: por %xmm2, %xmm1
1364 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,3]
1365 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
1366 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
1367 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1368 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
1369 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm3[0,1,2,3,4,6,6,7]
1370 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
1371 ; SSE-NEXT: movdqa %xmm9, %xmm1
1372 ; SSE-NEXT: pandn %xmm0, %xmm1
1373 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
1374 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm4[0,1,2,3,6,5,7,7]
1375 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
1376 ; SSE-NEXT: pand %xmm9, %xmm0
1377 ; SSE-NEXT: por %xmm1, %xmm0
1378 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
1379 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm5[0,1,2,3,4,6,6,7]
1380 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,3,3]
1381 ; SSE-NEXT: movdqa %xmm8, %xmm2
1382 ; SSE-NEXT: pandn %xmm1, %xmm2
1383 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
1384 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm10[3,3,3,3]
1385 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,5,5,5]
1386 ; SSE-NEXT: pand %xmm8, %xmm1
1387 ; SSE-NEXT: por %xmm2, %xmm1
1388 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,3]
1389 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
1390 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
1391 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1392 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm3[0,1,2,3,4,4,6,5]
1393 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
1394 ; SSE-NEXT: movdqa %xmm9, %xmm1
1395 ; SSE-NEXT: pandn %xmm0, %xmm1
1396 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm4[0,1,2,3,4,5,5,7]
1397 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
1398 ; SSE-NEXT: pand %xmm9, %xmm0
1399 ; SSE-NEXT: por %xmm1, %xmm0
1400 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm5[0,1,2,3,4,4,6,5]
1401 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[2,1,3,3]
1402 ; SSE-NEXT: movdqa %xmm8, %xmm2
1403 ; SSE-NEXT: pandn %xmm1, %xmm2
1404 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm10[2,2,2,2]
1405 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,5,5,5]
1406 ; SSE-NEXT: pand %xmm8, %xmm1
1407 ; SSE-NEXT: por %xmm2, %xmm1
1408 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,3]
1409 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
1410 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
1411 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
1412 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm4[0,1,2,3,4,6,6,7]
1413 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
1414 ; SSE-NEXT: movdqa %xmm9, %xmm2
1415 ; SSE-NEXT: pandn %xmm0, %xmm2
1416 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
1417 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm5[0,1,2,3,6,5,7,7]
1418 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
1419 ; SSE-NEXT: pand %xmm9, %xmm0
1420 ; SSE-NEXT: por %xmm2, %xmm0
1421 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
1422 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm10[0,1,2,3,4,6,6,7]
1423 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,1,3,3]
1424 ; SSE-NEXT: movdqa %xmm8, %xmm3
1425 ; SSE-NEXT: pandn %xmm2, %xmm3
1426 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
1427 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm12[3,3,3,3]
1428 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,5,5,5,5]
1429 ; SSE-NEXT: pand %xmm8, %xmm2
1430 ; SSE-NEXT: por %xmm3, %xmm2
1431 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,3]
1432 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,2,2,3]
1433 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
1434 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm4[0,1,2,3,4,4,6,5]
1435 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
1436 ; SSE-NEXT: movdqa %xmm9, %xmm2
1437 ; SSE-NEXT: pandn %xmm0, %xmm2
1438 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm5[0,1,2,3,4,5,5,7]
1439 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
1440 ; SSE-NEXT: pand %xmm9, %xmm0
1441 ; SSE-NEXT: por %xmm2, %xmm0
1442 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm10[0,1,2,3,4,4,6,5]
1443 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,1,3,3]
1444 ; SSE-NEXT: movdqa %xmm8, %xmm4
1445 ; SSE-NEXT: pandn %xmm2, %xmm4
1446 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm12[2,2,2,2]
1447 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,5,5,5,5]
1448 ; SSE-NEXT: pand %xmm8, %xmm2
1449 ; SSE-NEXT: por %xmm4, %xmm2
1450 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm0[1,3,2,3]
1451 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[0,2,2,3]
1452 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1]
1453 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
1454 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm10[0,1,2,3,4,6,6,7]
1455 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
1456 ; SSE-NEXT: movdqa %xmm9, %xmm4
1457 ; SSE-NEXT: pandn %xmm2, %xmm4
1458 ; SSE-NEXT: movdqa (%rsp), %xmm6 # 16-byte Reload
1459 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm6[0,1,2,3,6,5,7,7]
1460 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
1461 ; SSE-NEXT: pand %xmm9, %xmm2
1462 ; SSE-NEXT: por %xmm4, %xmm2
1463 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
1464 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm7[0,1,2,3,4,6,6,7]
1465 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[2,1,3,3]
1466 ; SSE-NEXT: movdqa %xmm8, %xmm5
1467 ; SSE-NEXT: pandn %xmm4, %xmm5
1468 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
1469 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm11[3,3,3,3]
1470 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,5,5,5,5]
1471 ; SSE-NEXT: pand %xmm8, %xmm4
1472 ; SSE-NEXT: por %xmm5, %xmm4
1473 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[1,3,2,3]
1474 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[0,2,2,3]
1475 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1]
1476 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm10[0,1,2,3,4,4,6,5]
1477 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,2,2,3]
1478 ; SSE-NEXT: movdqa %xmm9, %xmm5
1479 ; SSE-NEXT: pandn %xmm4, %xmm5
1480 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm6[0,1,2,3,4,5,5,7]
1481 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,2,2,3]
1482 ; SSE-NEXT: pand %xmm9, %xmm4
1483 ; SSE-NEXT: por %xmm5, %xmm4
1484 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm7[0,1,2,3,4,4,6,5]
1485 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[2,1,3,3]
1486 ; SSE-NEXT: movdqa %xmm8, %xmm10
1487 ; SSE-NEXT: pandn %xmm5, %xmm10
1488 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm11[2,2,2,2]
1489 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,5,5,5,5]
1490 ; SSE-NEXT: pand %xmm8, %xmm5
1491 ; SSE-NEXT: por %xmm10, %xmm5
1492 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm4[1,3,2,3]
1493 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm5[0,2,2,3]
1494 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm10[0],xmm4[1],xmm10[1]
1495 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
1496 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm6[0,1,2,3,4,6,6,7]
1497 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,2,2,3]
1498 ; SSE-NEXT: movdqa %xmm9, %xmm10
1499 ; SSE-NEXT: pandn %xmm5, %xmm10
1500 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm15[0,1,2,3,6,5,7,7]
1501 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,2,2,3]
1502 ; SSE-NEXT: pand %xmm9, %xmm5
1503 ; SSE-NEXT: por %xmm10, %xmm5
1504 ; SSE-NEXT: pshufhw {{.*#+}} xmm10 = xmm13[0,1,2,3,4,6,6,7]
1505 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[2,1,3,3]
1506 ; SSE-NEXT: movdqa %xmm8, %xmm12
1507 ; SSE-NEXT: pandn %xmm10, %xmm12
1508 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm14[3,3,3,3]
1509 ; SSE-NEXT: pshufhw {{.*#+}} xmm10 = xmm10[0,1,2,3,5,5,5,5]
1510 ; SSE-NEXT: pand %xmm8, %xmm10
1511 ; SSE-NEXT: por %xmm12, %xmm10
1512 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,3,2,3]
1513 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[0,2,2,3]
1514 ; SSE-NEXT: punpckldq {{.*#+}} xmm10 = xmm10[0],xmm5[0],xmm10[1],xmm5[1]
1515 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm6[0,1,2,3,4,4,6,5]
1516 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,2,2,3]
1517 ; SSE-NEXT: pshufhw {{.*#+}} xmm11 = xmm15[0,1,2,3,4,5,5,7]
1518 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm11[0,2,2,3]
1519 ; SSE-NEXT: pand %xmm9, %xmm11
1520 ; SSE-NEXT: pandn %xmm5, %xmm9
1521 ; SSE-NEXT: por %xmm11, %xmm9
1522 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm13[0,1,2,3,4,4,6,5]
1523 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[2,1,3,3]
1524 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm14[2,2,2,2]
1525 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,5,5,5,5]
1526 ; SSE-NEXT: pand %xmm8, %xmm6
1527 ; SSE-NEXT: pandn %xmm5, %xmm8
1528 ; SSE-NEXT: por %xmm6, %xmm8
1529 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm9[1,3,2,3]
1530 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm8[0,2,2,3]
1531 ; SSE-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm5[0],xmm6[1],xmm5[1]
1532 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
1533 ; SSE-NEXT: movdqa %xmm6, 224(%rax)
1534 ; SSE-NEXT: movdqa %xmm10, 240(%rax)
1535 ; SSE-NEXT: movdqa %xmm4, 160(%rax)
1536 ; SSE-NEXT: movdqa %xmm2, 176(%rax)
1537 ; SSE-NEXT: movdqa %xmm0, 96(%rax)
1538 ; SSE-NEXT: movdqa %xmm3, 112(%rax)
1539 ; SSE-NEXT: movdqa %xmm1, 32(%rax)
1540 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1541 ; SSE-NEXT: movaps %xmm0, 48(%rax)
1542 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1543 ; SSE-NEXT: movaps %xmm0, 192(%rax)
1544 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1545 ; SSE-NEXT: movaps %xmm0, 208(%rax)
1546 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1547 ; SSE-NEXT: movaps %xmm0, 128(%rax)
1548 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1549 ; SSE-NEXT: movaps %xmm0, 144(%rax)
1550 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1551 ; SSE-NEXT: movaps %xmm0, 64(%rax)
1552 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1553 ; SSE-NEXT: movaps %xmm0, 80(%rax)
1554 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1555 ; SSE-NEXT: movaps %xmm0, (%rax)
1556 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1557 ; SSE-NEXT: movaps %xmm0, 16(%rax)
1558 ; SSE-NEXT: addq $232, %rsp
1561 ; AVX1-ONLY-LABEL: store_i8_stride8_vf32:
1562 ; AVX1-ONLY: # %bb.0:
1563 ; AVX1-ONLY-NEXT: subq $72, %rsp
1564 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1565 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
1566 ; AVX1-ONLY-NEXT: vmovdqa (%r10), %xmm8
1567 ; AVX1-ONLY-NEXT: vmovdqa (%rax), %xmm4
1568 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm4[0],xmm8[0],xmm4[1],xmm8[1],xmm4[2],xmm8[2],xmm4[3],xmm8[3],xmm4[4],xmm8[4],xmm4[5],xmm8[5],xmm4[6],xmm8[6],xmm4[7],xmm8[7]
1569 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm0 = xmm1[0,0,2,1,4,5,6,7]
1570 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm1[0,2,2,3,4,5,6,7]
1571 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
1572 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm0[0,0,2,1,4,4,6,5]
1573 ; AVX1-ONLY-NEXT: vbroadcastsd {{.*#+}} ymm0 = [65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0]
1574 ; AVX1-ONLY-NEXT: vandnps %ymm2, %ymm0, %ymm2
1575 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm7
1576 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm9
1577 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3],xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
1578 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm12[0,1,1,3,4,5,6,7]
1579 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm12[2,1,3,3,4,5,6,7]
1580 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm3, %ymm3
1581 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
1582 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm3, %ymm3
1583 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm3, %ymm5
1584 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm2
1585 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1586 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm14
1587 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm14[0],xmm2[0],xmm14[1],xmm2[1],xmm14[2],xmm2[2],xmm14[3],xmm2[3],xmm14[4],xmm2[4],xmm14[5],xmm2[5],xmm14[6],xmm2[6],xmm14[7],xmm2[7]
1588 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm3[0],zero,zero,zero,xmm3[1],zero,zero,zero
1589 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm3[1,1,1,1]
1590 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm6[0],zero,zero,zero,xmm6[1],zero,zero,zero
1591 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm2, %ymm6
1592 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm10
1593 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm11
1594 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3],xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
1595 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm15[0,0,2,1,4,5,6,7]
1596 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm2[0],zero,xmm2[1],zero
1597 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm13 = xmm15[0,2,2,3,4,5,6,7]
1598 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm13[0],zero,xmm13[1],zero
1599 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm2, %ymm13
1600 ; AVX1-ONLY-NEXT: vbroadcastsd {{.*#+}} ymm2 = [65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535]
1601 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm6, %ymm6
1602 ; AVX1-ONLY-NEXT: vandnps %ymm13, %ymm2, %ymm13
1603 ; AVX1-ONLY-NEXT: vorps %ymm6, %ymm13, %ymm6
1604 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2],ymm5[3],ymm6[4],ymm5[5],ymm6[6],ymm5[7]
1605 ; AVX1-ONLY-NEXT: vmovups %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1606 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm5 = xmm1[0,1,2,3,4,4,6,5]
1607 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
1608 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm5, %ymm1
1609 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm5 = xmm12[0,1,2,3,4,5,5,7]
1610 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm6 = xmm12[0,1,2,3,6,5,7,7]
1611 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
1612 ; AVX1-ONLY-NEXT: vmovdqa 16(%r10), %xmm12
1613 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
1614 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm0, %ymm1
1615 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm5 = ymm5[0,2,2,3,4,6,6,7]
1616 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm5, %ymm5
1617 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm5, %ymm1
1618 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm5 = xmm15[0,1,2,3,4,4,6,5]
1619 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm6 = xmm15[0,1,2,3,4,6,6,7]
1620 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
1621 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm3[2,3,2,3]
1622 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm6[0],zero,zero,zero,xmm6[1],zero,zero,zero
1623 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[3,3,3,3]
1624 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm3 = xmm3[0],zero,zero,zero,xmm3[1],zero,zero,zero
1625 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm6, %ymm3
1626 ; AVX1-ONLY-NEXT: vmovdqa 16(%rax), %xmm6
1627 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1628 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm5 = ymm5[2,1,3,3,6,5,7,7]
1629 ; AVX1-ONLY-NEXT: vandnps %ymm5, %ymm2, %ymm5
1630 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm3, %ymm3
1631 ; AVX1-ONLY-NEXT: vorps %ymm5, %ymm3, %ymm3
1632 ; AVX1-ONLY-NEXT: vmovdqa 16(%r9), %xmm13
1633 ; AVX1-ONLY-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1634 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0],ymm1[1],ymm3[2],ymm1[3],ymm3[4],ymm1[5],ymm3[6],ymm1[7]
1635 ; AVX1-ONLY-NEXT: vmovups %ymm1, (%rsp) # 32-byte Spill
1636 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm4[8],xmm8[8],xmm4[9],xmm8[9],xmm4[10],xmm8[10],xmm4[11],xmm8[11],xmm4[12],xmm8[12],xmm4[13],xmm8[13],xmm4[14],xmm8[14],xmm4[15],xmm8[15]
1637 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1638 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm3[0,1,2,3,4,4,6,5]
1639 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,6,6,7]
1640 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
1641 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm9[8],xmm7[8],xmm9[9],xmm7[9],xmm9[10],xmm7[10],xmm9[11],xmm7[11],xmm9[12],xmm7[12],xmm9[13],xmm7[13],xmm9[14],xmm7[14],xmm9[15],xmm7[15]
1642 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1643 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm4[0,1,2,3,4,5,5,7]
1644 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,6,5,7,7]
1645 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm3, %ymm3
1646 ; AVX1-ONLY-NEXT: vmovdqa 16(%r8), %xmm8
1647 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
1648 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm0, %ymm1
1649 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[0,2,2,3,4,6,6,7]
1650 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm3, %ymm3
1651 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm3, %ymm1
1652 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm11[8],xmm10[8],xmm11[9],xmm10[9],xmm11[10],xmm10[10],xmm11[11],xmm10[11],xmm11[12],xmm10[12],xmm11[13],xmm10[13],xmm11[14],xmm10[14],xmm11[15],xmm10[15]
1653 ; AVX1-ONLY-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1654 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm4[0,1,2,3,4,4,6,5]
1655 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,6,6,7]
1656 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm3, %ymm3
1657 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[2,1,3,3,6,5,7,7]
1658 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm2, %ymm3
1659 ; AVX1-ONLY-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm5 # 16-byte Folded Reload
1660 ; AVX1-ONLY-NEXT: # xmm5 = xmm14[8],mem[8],xmm14[9],mem[9],xmm14[10],mem[10],xmm14[11],mem[11],xmm14[12],mem[12],xmm14[13],mem[13],xmm14[14],mem[14],xmm14[15],mem[15]
1661 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1662 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm5[2,3,2,3]
1663 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
1664 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm5[3,3,3,3]
1665 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm10 = xmm10[0],zero,zero,zero,xmm10[1],zero,zero,zero
1666 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm4, %ymm4
1667 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm4, %ymm4
1668 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm4, %ymm3
1669 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0],ymm1[1],ymm3[2],ymm1[3],ymm3[4],ymm1[5],ymm3[6],ymm1[7]
1670 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1671 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm6[0],xmm12[0],xmm6[1],xmm12[1],xmm6[2],xmm12[2],xmm6[3],xmm12[3],xmm6[4],xmm12[4],xmm6[5],xmm12[5],xmm6[6],xmm12[6],xmm6[7],xmm12[7]
1672 ; AVX1-ONLY-NEXT: vmovdqa %xmm12, %xmm6
1673 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm11[0,1,2,3,4,4,6,5]
1674 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm11[0,1,2,3,4,6,6,7]
1675 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
1676 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm8[0],xmm13[0],xmm8[1],xmm13[1],xmm8[2],xmm13[2],xmm8[3],xmm13[3],xmm8[4],xmm13[4],xmm8[5],xmm13[5],xmm8[6],xmm13[6],xmm8[7],xmm13[7]
1677 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm12[0,1,2,3,4,5,5,7]
1678 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm12[0,1,2,3,6,5,7,7]
1679 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm3, %ymm3
1680 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
1681 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm0, %ymm1
1682 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[0,2,2,3,4,6,6,7]
1683 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm3, %ymm3
1684 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm3, %ymm5
1685 ; AVX1-ONLY-NEXT: vmovdqa 16(%rcx), %xmm7
1686 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdx), %xmm4
1687 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm4[0],xmm7[0],xmm4[1],xmm7[1],xmm4[2],xmm7[2],xmm4[3],xmm7[3],xmm4[4],xmm7[4],xmm4[5],xmm7[5],xmm4[6],xmm7[6],xmm4[7],xmm7[7]
1688 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm10[0,1,2,3,4,4,6,5]
1689 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm13 = xmm10[0,1,2,3,4,6,6,7]
1690 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm13, %ymm3, %ymm1
1691 ; AVX1-ONLY-NEXT: vmovdqa 16(%rsi), %xmm3
1692 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm13
1693 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm13[0],xmm3[0],xmm13[1],xmm3[1],xmm13[2],xmm3[2],xmm13[3],xmm3[3],xmm13[4],xmm3[4],xmm13[5],xmm3[5],xmm13[6],xmm3[6],xmm13[7],xmm3[7]
1694 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm14[2,3,2,3]
1695 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm9 = xmm9[0],zero,zero,zero,xmm9[1],zero,zero,zero
1696 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm15 = xmm14[3,3,3,3]
1697 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm15 = xmm15[0],zero,zero,zero,xmm15[1],zero,zero,zero
1698 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm15, %ymm9, %ymm9
1699 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm1[2,1,3,3,6,5,7,7]
1700 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm2, %ymm1
1701 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm9, %ymm9
1702 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm9, %ymm1
1703 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm15 = ymm1[0],ymm5[1],ymm1[2],ymm5[3],ymm1[4],ymm5[5],ymm1[6],ymm5[7]
1704 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
1705 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm1[8],xmm6[8],xmm1[9],xmm6[9],xmm1[10],xmm6[10],xmm1[11],xmm6[11],xmm1[12],xmm6[12],xmm1[13],xmm6[13],xmm1[14],xmm6[14],xmm1[15],xmm6[15]
1706 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm6[0,1,2,3,4,4,6,5]
1707 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm5 = xmm6[0,1,2,3,4,6,6,7]
1708 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm1, %ymm1
1709 ; AVX1-ONLY-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm8, %xmm5 # 16-byte Folded Reload
1710 ; AVX1-ONLY-NEXT: # xmm5 = xmm8[8],mem[8],xmm8[9],mem[9],xmm8[10],mem[10],xmm8[11],mem[11],xmm8[12],mem[12],xmm8[13],mem[13],xmm8[14],mem[14],xmm8[15],mem[15]
1711 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm8 = xmm5[0,1,2,3,4,5,5,7]
1712 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm9 = xmm5[0,1,2,3,6,5,7,7]
1713 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm8, %ymm8
1714 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
1715 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm0, %ymm1
1716 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm8 = ymm8[0,2,2,3,4,6,6,7]
1717 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm8, %ymm8
1718 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm8, %ymm8
1719 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm4[8],xmm7[8],xmm4[9],xmm7[9],xmm4[10],xmm7[10],xmm4[11],xmm7[11],xmm4[12],xmm7[12],xmm4[13],xmm7[13],xmm4[14],xmm7[14],xmm4[15],xmm7[15]
1720 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm1[0,1,2,3,4,4,6,5]
1721 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm7 = xmm1[0,1,2,3,4,6,6,7]
1722 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm4, %ymm4
1723 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm13[8],xmm3[8],xmm13[9],xmm3[9],xmm13[10],xmm3[10],xmm13[11],xmm3[11],xmm13[12],xmm3[12],xmm13[13],xmm3[13],xmm13[14],xmm3[14],xmm13[15],xmm3[15]
1724 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm3[2,3,2,3]
1725 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm7 = xmm7[0],zero,zero,zero,xmm7[1],zero,zero,zero
1726 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm9 = xmm3[3,3,3,3]
1727 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm9 = xmm9[0],zero,zero,zero,xmm9[1],zero,zero,zero
1728 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm7, %ymm7
1729 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm4[2,1,3,3,6,5,7,7]
1730 ; AVX1-ONLY-NEXT: vandnps %ymm4, %ymm2, %ymm4
1731 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm7, %ymm7
1732 ; AVX1-ONLY-NEXT: vorps %ymm4, %ymm7, %ymm4
1733 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm13 = ymm4[0],ymm8[1],ymm4[2],ymm8[3],ymm4[4],ymm8[5],ymm4[6],ymm8[7]
1734 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm6[0,0,2,1,4,5,6,7]
1735 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[0,2,2,3,4,5,6,7]
1736 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm4, %ymm4
1737 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm6 = xmm5[0,1,1,3,4,5,6,7]
1738 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[2,1,3,3,4,5,6,7]
1739 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm6, %ymm5
1740 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm4[0,0,2,1,4,4,6,5]
1741 ; AVX1-ONLY-NEXT: vandnps %ymm4, %ymm0, %ymm4
1742 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm5 = ymm5[0,0,2,1,4,4,6,5]
1743 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm5, %ymm5
1744 ; AVX1-ONLY-NEXT: vorps %ymm4, %ymm5, %ymm4
1745 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm5 = xmm3[0],zero,zero,zero,xmm3[1],zero,zero,zero
1746 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[1,1,1,1]
1747 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm3 = xmm3[0],zero,zero,zero,xmm3[1],zero,zero,zero
1748 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm3
1749 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm1[0,0,2,1,4,5,6,7]
1750 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm5[0],zero,xmm5[1],zero
1751 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,2,2,3,4,5,6,7]
1752 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero
1753 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm5, %ymm1
1754 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm3, %ymm3
1755 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm2, %ymm1
1756 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm3, %ymm1
1757 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0],ymm4[1],ymm1[2],ymm4[3],ymm1[4],ymm4[5],ymm1[6],ymm4[7]
1758 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm11[0,0,2,1,4,5,6,7]
1759 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm11[0,2,2,3,4,5,6,7]
1760 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm3, %ymm3
1761 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm12[0,1,1,3,4,5,6,7]
1762 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm12[2,1,3,3,4,5,6,7]
1763 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm4, %ymm4
1764 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
1765 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm0, %ymm3
1766 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm4[0,0,2,1,4,4,6,5]
1767 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm4, %ymm4
1768 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm4, %ymm3
1769 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm14[0],zero,zero,zero,xmm14[1],zero,zero,zero
1770 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm14[1,1,1,1]
1771 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm5 = xmm5[0],zero,zero,zero,xmm5[1],zero,zero,zero
1772 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm4, %ymm4
1773 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm10[0,0,2,1,4,5,6,7]
1774 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm5[0],zero,xmm5[1],zero
1775 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm6 = xmm10[0,2,2,3,4,5,6,7]
1776 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm6[0],zero,xmm6[1],zero
1777 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
1778 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm4, %ymm4
1779 ; AVX1-ONLY-NEXT: vandnps %ymm5, %ymm2, %ymm5
1780 ; AVX1-ONLY-NEXT: vorps %ymm5, %ymm4, %ymm4
1781 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2],ymm3[3],ymm4[4],ymm3[5],ymm4[6],ymm3[7]
1782 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
1783 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm5[0,0,2,1,4,5,6,7]
1784 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,2,2,3,4,5,6,7]
1785 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm4, %ymm4
1786 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
1787 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm6[0,1,1,3,4,5,6,7]
1788 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[2,1,3,3,4,5,6,7]
1789 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
1790 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm4[0,0,2,1,4,4,6,5]
1791 ; AVX1-ONLY-NEXT: vandnps %ymm4, %ymm0, %ymm4
1792 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm5 = ymm5[0,0,2,1,4,4,6,5]
1793 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm5, %ymm0
1794 ; AVX1-ONLY-NEXT: vorps %ymm4, %ymm0, %ymm0
1795 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
1796 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm5[0],zero,zero,zero,xmm5[1],zero,zero,zero
1797 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[1,1,1,1]
1798 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm5 = xmm5[0],zero,zero,zero,xmm5[1],zero,zero,zero
1799 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm5, %ymm4, %ymm4
1800 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
1801 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm5 = xmm6[0,0,2,1,4,5,6,7]
1802 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm5[0],zero,xmm5[1],zero
1803 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[0,2,2,3,4,5,6,7]
1804 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm6[0],zero,xmm6[1],zero
1805 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm5
1806 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm4, %ymm4
1807 ; AVX1-ONLY-NEXT: vandnps %ymm5, %ymm2, %ymm2
1808 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm4, %ymm2
1809 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm2[0],ymm0[1],ymm2[2],ymm0[3],ymm2[4],ymm0[5],ymm2[6],ymm0[7]
1810 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
1811 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 64(%rax)
1812 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 128(%rax)
1813 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 192(%rax)
1814 ; AVX1-ONLY-NEXT: vmovaps %ymm13, 224(%rax)
1815 ; AVX1-ONLY-NEXT: vmovaps %ymm15, 160(%rax)
1816 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1817 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 96(%rax)
1818 ; AVX1-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
1819 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
1820 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
1821 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rax)
1822 ; AVX1-ONLY-NEXT: addq $72, %rsp
1823 ; AVX1-ONLY-NEXT: vzeroupper
1824 ; AVX1-ONLY-NEXT: retq
1826 ; AVX2-SLOW-LABEL: store_i8_stride8_vf32:
1827 ; AVX2-SLOW: # %bb.0:
1828 ; AVX2-SLOW-NEXT: subq $88, %rsp
1829 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
1830 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
1831 ; AVX2-SLOW-NEXT: vmovdqa (%r10), %xmm2
1832 ; AVX2-SLOW-NEXT: vmovdqa (%rax), %xmm3
1833 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
1834 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm1[0,0,2,1,4,5,6,7]
1835 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm1[0,2,2,3,4,5,6,7]
1836 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm0
1837 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
1838 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %xmm4
1839 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm5
1840 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3],xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
1841 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm12[0,1,1,3,4,5,6,7]
1842 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm7 = xmm12[2,1,3,3,4,5,6,7]
1843 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm6, %ymm6
1844 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[0,0,2,1,4,4,6,5]
1845 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm6[0,1,2],ymm0[3],ymm6[4,5,6],ymm0[7],ymm6[8,9,10],ymm0[11],ymm6[12,13,14],ymm0[15]
1846 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm6
1847 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm7
1848 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm13 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3],xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
1849 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm8 = xmm13[0],zero,zero,zero,xmm13[1],zero,zero,zero
1850 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm13[1,1,1,1]
1851 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm9 = xmm9[0],zero,zero,zero,xmm9[1],zero,zero,zero
1852 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm8, %ymm9
1853 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm10
1854 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm11
1855 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3],xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
1856 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm8 = xmm14[0,0,2,1,4,5,6,7]
1857 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm8 = xmm8[0],zero,xmm8[1],zero
1858 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm15 = xmm14[0,2,2,3,4,5,6,7]
1859 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm15 = xmm15[0],zero,xmm15[1],zero
1860 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm15, %ymm8, %ymm15
1861 ; AVX2-SLOW-NEXT: vmovaps 16(%r10), %xmm8
1862 ; AVX2-SLOW-NEXT: vmovaps %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1863 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm15 = ymm9[0],ymm15[1],ymm9[2,3,4],ymm15[5],ymm9[6,7,8],ymm15[9],ymm9[10,11,12],ymm15[13],ymm9[14,15]
1864 ; AVX2-SLOW-NEXT: vmovdqa 16(%rax), %xmm9
1865 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm15[0],ymm0[1],ymm15[2],ymm0[3],ymm15[4],ymm0[5],ymm15[6],ymm0[7]
1866 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1867 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm1[0,1,2,3,4,4,6,5]
1868 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
1869 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
1870 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm12[0,1,2,3,4,5,5,7]
1871 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm12 = xmm12[0,1,2,3,6,5,7,7]
1872 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm12, %ymm1, %ymm1
1873 ; AVX2-SLOW-NEXT: vmovdqa 16(%r9), %xmm8
1874 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
1875 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
1876 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7],ymm1[8,9,10],ymm0[11],ymm1[12,13,14],ymm0[15]
1877 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm13[2,3,2,3]
1878 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
1879 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm13 = xmm13[3,3,3,3]
1880 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm13 = xmm13[0],zero,zero,zero,xmm13[1],zero,zero,zero
1881 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm1, %ymm1
1882 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm13 = xmm14[0,1,2,3,4,4,6,5]
1883 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,4,6,6,7]
1884 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm13, %ymm13
1885 ; AVX2-SLOW-NEXT: vmovdqa 16(%r8), %xmm15
1886 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm13 = ymm13[2,1,3,3,6,5,7,7]
1887 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm13[1],ymm1[2,3,4],ymm13[5],ymm1[6,7,8],ymm13[9],ymm1[10,11,12],ymm13[13],ymm1[14,15]
1888 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
1889 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1890 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm3[8],xmm2[8],xmm3[9],xmm2[9],xmm3[10],xmm2[10],xmm3[11],xmm2[11],xmm3[12],xmm2[12],xmm3[13],xmm2[13],xmm3[14],xmm2[14],xmm3[15],xmm2[15]
1891 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1892 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm1[0,1,2,3,4,4,6,5]
1893 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm1[0,1,2,3,4,6,6,7]
1894 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
1895 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
1896 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm5[8],xmm4[8],xmm5[9],xmm4[9],xmm5[10],xmm4[10],xmm5[11],xmm4[11],xmm5[12],xmm4[12],xmm5[13],xmm4[13],xmm5[14],xmm4[14],xmm5[15],xmm4[15]
1897 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1898 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm1[0,1,2,3,4,5,5,7]
1899 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm1[0,1,2,3,6,5,7,7]
1900 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm2, %ymm2
1901 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
1902 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7],ymm2[8,9,10],ymm0[11],ymm2[12,13,14],ymm0[15]
1903 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm7[8],xmm6[8],xmm7[9],xmm6[9],xmm7[10],xmm6[10],xmm7[11],xmm6[11],xmm7[12],xmm6[12],xmm7[13],xmm6[13],xmm7[14],xmm6[14],xmm7[15],xmm6[15]
1904 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1905 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[2,3,2,3]
1906 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
1907 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm1[3,3,3,3]
1908 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm6[0],zero,zero,zero,xmm6[1],zero,zero,zero
1909 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm2, %ymm2
1910 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm11[8],xmm10[8],xmm11[9],xmm10[9],xmm11[10],xmm10[10],xmm11[11],xmm10[11],xmm11[12],xmm10[12],xmm11[13],xmm10[13],xmm11[14],xmm10[14],xmm11[15],xmm10[15]
1911 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1912 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm7 = xmm1[0,1,2,3,4,4,6,5]
1913 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm10 = xmm1[0,1,2,3,4,6,6,7]
1914 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm7, %ymm7
1915 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm7[2,1,3,3,6,5,7,7]
1916 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0],ymm7[1],ymm2[2,3,4],ymm7[5],ymm2[6,7,8],ymm7[9],ymm2[10,11,12],ymm7[13],ymm2[14,15]
1917 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0],ymm0[1],ymm2[2],ymm0[3],ymm2[4],ymm0[5],ymm2[6],ymm0[7]
1918 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1919 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
1920 ; AVX2-SLOW-NEXT: vmovdqa %xmm9, %xmm5
1921 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm10 = xmm9[0],xmm6[0],xmm9[1],xmm6[1],xmm9[2],xmm6[2],xmm9[3],xmm6[3],xmm9[4],xmm6[4],xmm9[5],xmm6[5],xmm9[6],xmm6[6],xmm9[7],xmm6[7]
1922 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm10[0,1,2,3,4,4,6,5]
1923 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm7 = xmm10[0,1,2,3,4,6,6,7]
1924 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
1925 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm15[0],xmm8[0],xmm15[1],xmm8[1],xmm15[2],xmm8[2],xmm15[3],xmm8[3],xmm15[4],xmm8[4],xmm15[5],xmm8[5],xmm15[6],xmm8[6],xmm15[7],xmm8[7]
1926 ; AVX2-SLOW-NEXT: vmovdqa %xmm8, %xmm9
1927 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm11[0,1,2,3,4,5,5,7]
1928 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm13 = xmm11[0,1,2,3,6,5,7,7]
1929 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
1930 ; AVX2-SLOW-NEXT: vmovdqa 16(%rsi), %xmm8
1931 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm7[0,2,2,3,4,6,6,7]
1932 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm13 = ymm13[0,2,2,3,4,6,6,7]
1933 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm7 = ymm13[0,1,2],ymm7[3],ymm13[4,5,6],ymm7[7],ymm13[8,9,10],ymm7[11],ymm13[12,13,14],ymm7[15]
1934 ; AVX2-SLOW-NEXT: vmovdqa 16(%rdi), %xmm4
1935 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm13 = xmm4[0],xmm8[0],xmm4[1],xmm8[1],xmm4[2],xmm8[2],xmm4[3],xmm8[3],xmm4[4],xmm8[4],xmm4[5],xmm8[5],xmm4[6],xmm8[6],xmm4[7],xmm8[7]
1936 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm14 = xmm13[2,3,2,3]
1937 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm14 = xmm14[0],zero,zero,zero,xmm14[1],zero,zero,zero
1938 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm13[3,3,3,3]
1939 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
1940 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm14, %ymm2
1941 ; AVX2-SLOW-NEXT: vmovdqa 16(%rcx), %xmm3
1942 ; AVX2-SLOW-NEXT: vmovdqa 16(%rdx), %xmm1
1943 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3],xmm1[4],xmm3[4],xmm1[5],xmm3[5],xmm1[6],xmm3[6],xmm1[7],xmm3[7]
1944 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm14[0,1,2,3,4,4,6,5]
1945 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm12 = xmm14[0,1,2,3,4,6,6,7]
1946 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm12, %ymm0, %ymm0
1947 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,1,3,3,6,5,7,7]
1948 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0],ymm0[1],ymm2[2,3,4],ymm0[5],ymm2[6,7,8],ymm0[9],ymm2[10,11,12],ymm0[13],ymm2[14,15]
1949 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm7[1],ymm0[2],ymm7[3],ymm0[4],ymm7[5],ymm0[6],ymm7[7]
1950 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
1951 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm5[8],xmm6[8],xmm5[9],xmm6[9],xmm5[10],xmm6[10],xmm5[11],xmm6[11],xmm5[12],xmm6[12],xmm5[13],xmm6[13],xmm5[14],xmm6[14],xmm5[15],xmm6[15]
1952 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm2[0,1,2,3,4,4,6,5]
1953 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm12 = xmm2[0,1,2,3,4,6,6,7]
1954 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm12, %ymm0, %ymm0
1955 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm12 = xmm15[8],xmm9[8],xmm15[9],xmm9[9],xmm15[10],xmm9[10],xmm15[11],xmm9[11],xmm15[12],xmm9[12],xmm15[13],xmm9[13],xmm15[14],xmm9[14],xmm15[15],xmm9[15]
1956 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm15 = xmm12[0,1,2,3,4,5,5,7]
1957 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm7 = xmm12[0,1,2,3,6,5,7,7]
1958 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm15, %ymm7
1959 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
1960 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm7[0,2,2,3,4,6,6,7]
1961 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm7[0,1,2],ymm0[3],ymm7[4,5,6],ymm0[7],ymm7[8,9,10],ymm0[11],ymm7[12,13,14],ymm0[15]
1962 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm4[8],xmm8[8],xmm4[9],xmm8[9],xmm4[10],xmm8[10],xmm4[11],xmm8[11],xmm4[12],xmm8[12],xmm4[13],xmm8[13],xmm4[14],xmm8[14],xmm4[15],xmm8[15]
1963 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm4[2,3,2,3]
1964 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm5 = xmm5[0],zero,zero,zero,xmm5[1],zero,zero,zero
1965 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm4[3,3,3,3]
1966 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm7 = xmm7[0],zero,zero,zero,xmm7[1],zero,zero,zero
1967 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm5, %ymm5
1968 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm1[8],xmm3[8],xmm1[9],xmm3[9],xmm1[10],xmm3[10],xmm1[11],xmm3[11],xmm1[12],xmm3[12],xmm1[13],xmm3[13],xmm1[14],xmm3[14],xmm1[15],xmm3[15]
1969 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm1[0,1,2,3,4,4,6,5]
1970 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm7 = xmm1[0,1,2,3,4,6,6,7]
1971 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm3, %ymm3
1972 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[2,1,3,3,6,5,7,7]
1973 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2,3,4],ymm3[5],ymm5[6,7,8],ymm3[9],ymm5[10,11,12],ymm3[13],ymm5[14,15]
1974 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0],ymm0[1],ymm3[2],ymm0[3],ymm3[4],ymm0[5],ymm3[6],ymm0[7]
1975 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm2[0,0,2,1,4,5,6,7]
1976 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,2,2,3,4,5,6,7]
1977 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2
1978 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm12[0,1,1,3,4,5,6,7]
1979 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm12[2,1,3,3,4,5,6,7]
1980 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm3, %ymm3
1981 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,0,2,1,4,4,6,5]
1982 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
1983 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7],ymm3[8,9,10],ymm2[11],ymm3[12,13,14],ymm2[15]
1984 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm3 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
1985 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[1,1,1,1]
1986 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
1987 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm3
1988 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm1[0,0,2,1,4,5,6,7]
1989 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm4 = xmm4[0],zero,xmm4[1],zero
1990 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,2,2,3,4,5,6,7]
1991 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero
1992 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm4, %ymm1
1993 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm3[0],ymm1[1],ymm3[2,3,4],ymm1[5],ymm3[6,7,8],ymm1[9],ymm3[10,11,12],ymm1[13],ymm3[14,15]
1994 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2],ymm2[3],ymm1[4],ymm2[5],ymm1[6],ymm2[7]
1995 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm10[0,0,2,1,4,5,6,7]
1996 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm10[0,2,2,3,4,5,6,7]
1997 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
1998 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm11[0,1,1,3,4,5,6,7]
1999 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm11[2,1,3,3,4,5,6,7]
2000 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm3
2001 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,0,2,1,4,4,6,5]
2002 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
2003 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7],ymm3[8,9,10],ymm2[11],ymm3[12,13,14],ymm2[15]
2004 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm3 = xmm13[0],zero,zero,zero,xmm13[1],zero,zero,zero
2005 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm13[1,1,1,1]
2006 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
2007 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm3
2008 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm14[0,0,2,1,4,5,6,7]
2009 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm4 = xmm4[0],zero,xmm4[1],zero
2010 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm14[0,2,2,3,4,5,6,7]
2011 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm5[0],zero,xmm5[1],zero
2012 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm4, %ymm4
2013 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2,3,4],ymm4[5],ymm3[6,7,8],ymm4[9],ymm3[10,11,12],ymm4[13],ymm3[14,15]
2014 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3],ymm3[4],ymm2[5],ymm3[6],ymm2[7]
2015 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
2016 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm4[0,0,2,1,4,5,6,7]
2017 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,2,2,3,4,5,6,7]
2018 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm3
2019 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
2020 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm5[0,1,1,3,4,5,6,7]
2021 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[2,1,3,3,4,5,6,7]
2022 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm4, %ymm4
2023 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
2024 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[0,0,2,1,4,4,6,5]
2025 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7],ymm4[8,9,10],ymm3[11],ymm4[12,13,14],ymm3[15]
2026 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
2027 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm5[0],zero,zero,zero,xmm5[1],zero,zero,zero
2028 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[1,1,1,1]
2029 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm5 = xmm5[0],zero,zero,zero,xmm5[1],zero,zero,zero
2030 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm4, %ymm4
2031 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
2032 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm6[0,0,2,1,4,5,6,7]
2033 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm5[0],zero,xmm5[1],zero
2034 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[0,2,2,3,4,5,6,7]
2035 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm6[0],zero,xmm6[1],zero
2036 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm5, %ymm5
2037 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0],ymm5[1],ymm4[2,3,4],ymm5[5],ymm4[6,7,8],ymm5[9],ymm4[10,11,12],ymm5[13],ymm4[14,15]
2038 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2],ymm3[3],ymm4[4],ymm3[5],ymm4[6],ymm3[7]
2039 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2040 ; AVX2-SLOW-NEXT: vmovdqa %ymm3, 64(%rax)
2041 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, 128(%rax)
2042 ; AVX2-SLOW-NEXT: vmovdqa %ymm1, 192(%rax)
2043 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, 224(%rax)
2044 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2045 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 160(%rax)
2046 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2047 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 96(%rax)
2048 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2049 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 32(%rax)
2050 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2051 ; AVX2-SLOW-NEXT: vmovaps %ymm0, (%rax)
2052 ; AVX2-SLOW-NEXT: addq $88, %rsp
2053 ; AVX2-SLOW-NEXT: vzeroupper
2054 ; AVX2-SLOW-NEXT: retq
2056 ; AVX2-FAST-LABEL: store_i8_stride8_vf32:
2057 ; AVX2-FAST: # %bb.0:
2058 ; AVX2-FAST-NEXT: subq $72, %rsp
2059 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
2060 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
2061 ; AVX2-FAST-NEXT: vmovdqa (%r10), %xmm5
2062 ; AVX2-FAST-NEXT: vmovdqa (%rax), %xmm6
2063 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3],xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
2064 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm8
2065 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm8[0,1,0,1,0,1,0,1,8,9,10,11,4,5,2,3,16,17,20,21,16,17,20,21,24,25,26,27,20,21,22,23]
2066 ; AVX2-FAST-NEXT: vmovdqa (%r9), %xmm1
2067 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm2
2068 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
2069 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm4, %ymm4, %ymm9
2070 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm9[0,1,2,3,0,1,2,3,8,9,10,11,2,3,6,7,20,21,18,19,20,21,18,19,24,25,26,27,22,23,22,23]
2071 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm11 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7],ymm4[8,9,10],ymm3[11],ymm4[12,13,14],ymm3[15]
2072 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm7
2073 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm3
2074 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm10
2075 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm10[0],xmm3[0],xmm10[1],xmm3[1],xmm10[2],xmm3[2],xmm10[3],xmm3[3],xmm10[4],xmm3[4],xmm10[5],xmm3[5],xmm10[6],xmm3[6],xmm10[7],xmm3[7]
2076 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm4 = xmm12[4,5,u,u,u,u,u,u,6,7,u,u,u,u,u,u]
2077 ; AVX2-FAST-NEXT: vpmovzxwq {{.*#+}} xmm13 = xmm12[0],zero,zero,zero,xmm12[1],zero,zero,zero
2078 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm4, %ymm13, %ymm13
2079 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm15
2080 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm15[0],xmm7[0],xmm15[1],xmm7[1],xmm15[2],xmm7[2],xmm15[3],xmm7[3],xmm15[4],xmm7[4],xmm15[5],xmm7[5],xmm15[6],xmm7[6],xmm15[7],xmm7[7]
2081 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm4, %ymm4, %ymm0
2082 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm0[0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,16,17,20,21,20,21,22,23,20,21,22,23,28,29,30,31]
2083 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm13 = ymm13[0],ymm14[1],ymm13[2,3,4],ymm14[5],ymm13[6,7,8],ymm14[9],ymm13[10,11,12],ymm14[13],ymm13[14,15]
2084 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm13[0],ymm11[1],ymm13[2],ymm11[3],ymm13[4],ymm11[5],ymm13[6],ymm11[7]
2085 ; AVX2-FAST-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2086 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,2,3,8,9,8,9,8,9,8,9,12,13,10,11,0,1,2,3,8,9,12,13,8,9,12,13,12,13,14,15]
2087 ; AVX2-FAST-NEXT: vpshufb %ymm4, %ymm8, %ymm8
2088 ; AVX2-FAST-NEXT: vmovdqa %ymm4, %ymm14
2089 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,2,3,8,9,10,11,8,9,10,11,10,11,14,15,0,1,2,3,12,13,10,11,12,13,10,11,14,15,14,15]
2090 ; AVX2-FAST-NEXT: vpshufb %ymm4, %ymm9, %ymm9
2091 ; AVX2-FAST-NEXT: vmovdqa %ymm4, %ymm13
2092 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0,1,2],ymm8[3],ymm9[4,5,6],ymm8[7],ymm9[8,9,10],ymm8[11],ymm9[12,13,14],ymm8[15]
2093 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,8,9,12,13,4,5,6,7,12,13,14,15,12,13,14,15]
2094 ; AVX2-FAST-NEXT: vpshufb %ymm4, %ymm0, %ymm0
2095 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm12, %ymm12, %ymm9
2096 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = <8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,12,13,u,u,u,u,u,u,14,15,u,u,u,u,u,u>
2097 ; AVX2-FAST-NEXT: vpshufb %ymm11, %ymm9, %ymm9
2098 ; AVX2-FAST-NEXT: vmovdqa %ymm11, %ymm12
2099 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm9[0],ymm0[1],ymm9[2,3,4],ymm0[5],ymm9[6,7,8],ymm0[9],ymm9[10,11,12],ymm0[13],ymm9[14,15]
2100 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm8[1],ymm0[2],ymm8[3],ymm0[4],ymm8[5],ymm0[6],ymm8[7]
2101 ; AVX2-FAST-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
2102 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm6[8],xmm5[8],xmm6[9],xmm5[9],xmm6[10],xmm5[10],xmm6[11],xmm5[11],xmm6[12],xmm5[12],xmm6[13],xmm5[13],xmm6[14],xmm5[14],xmm6[15],xmm5[15]
2103 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
2104 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
2105 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2106 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
2107 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2108 ; AVX2-FAST-NEXT: vpshufb %ymm14, %ymm0, %ymm0
2109 ; AVX2-FAST-NEXT: vpshufb %ymm13, %ymm1, %ymm1
2110 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7],ymm1[8,9,10],ymm0[11],ymm1[12,13,14],ymm0[15]
2111 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm15[8],xmm7[8],xmm15[9],xmm7[9],xmm15[10],xmm7[10],xmm15[11],xmm7[11],xmm15[12],xmm7[12],xmm15[13],xmm7[13],xmm15[14],xmm7[14],xmm15[15],xmm7[15]
2112 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm2
2113 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2114 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm11 = xmm10[8],xmm3[8],xmm10[9],xmm3[9],xmm10[10],xmm3[10],xmm10[11],xmm3[11],xmm10[12],xmm3[12],xmm10[13],xmm3[13],xmm10[14],xmm3[14],xmm10[15],xmm3[15]
2115 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm11, %ymm11, %ymm1
2116 ; AVX2-FAST-NEXT: vpshufb %ymm12, %ymm1, %ymm1
2117 ; AVX2-FAST-NEXT: vpshufb %ymm4, %ymm2, %ymm2
2118 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7,8],ymm2[9],ymm1[10,11,12],ymm2[13],ymm1[14,15]
2119 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
2120 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2121 ; AVX2-FAST-NEXT: vmovdqa 16(%r10), %xmm8
2122 ; AVX2-FAST-NEXT: vmovdqa 16(%rax), %xmm6
2123 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm6[0],xmm8[0],xmm6[1],xmm8[1],xmm6[2],xmm8[2],xmm6[3],xmm8[3],xmm6[4],xmm8[4],xmm6[5],xmm8[5],xmm6[6],xmm8[6],xmm6[7],xmm8[7]
2124 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm12
2125 ; AVX2-FAST-NEXT: vmovdqa 16(%r9), %xmm7
2126 ; AVX2-FAST-NEXT: vmovdqa 16(%r8), %xmm5
2127 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm5[0],xmm7[0],xmm5[1],xmm7[1],xmm5[2],xmm7[2],xmm5[3],xmm7[3],xmm5[4],xmm7[4],xmm5[5],xmm7[5],xmm5[6],xmm7[6],xmm5[7],xmm7[7]
2128 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm3, %ymm3, %ymm10
2129 ; AVX2-FAST-NEXT: vpshufb %ymm14, %ymm12, %ymm3
2130 ; AVX2-FAST-NEXT: vpshufb %ymm13, %ymm10, %ymm4
2131 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm15 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7],ymm4[8,9,10],ymm3[11],ymm4[12,13,14],ymm3[15]
2132 ; AVX2-FAST-NEXT: vmovdqa 16(%rcx), %xmm4
2133 ; AVX2-FAST-NEXT: vmovdqa 16(%rdx), %xmm2
2134 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3],xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
2135 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm3, %ymm3, %ymm9
2136 ; AVX2-FAST-NEXT: vmovdqa 16(%rsi), %xmm1
2137 ; AVX2-FAST-NEXT: vmovdqa 16(%rdi), %xmm0
2138 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
2139 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm3, %ymm3, %ymm14
2140 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm14[8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,28,29,u,u,u,u,u,u,30,31,u,u,u,u,u,u]
2141 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm13 = ymm9[8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,24,25,28,29,20,21,22,23,28,29,30,31,28,29,30,31]
2142 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm13 = ymm14[0],ymm13[1],ymm14[2,3,4],ymm13[5],ymm14[6,7,8],ymm13[9],ymm14[10,11,12],ymm13[13],ymm14[14,15]
2143 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm15 = ymm13[0],ymm15[1],ymm13[2],ymm15[3],ymm13[4],ymm15[5],ymm13[6],ymm15[7]
2144 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm6[8],xmm8[8],xmm6[9],xmm8[9],xmm6[10],xmm8[10],xmm6[11],xmm8[11],xmm6[12],xmm8[12],xmm6[13],xmm8[13],xmm6[14],xmm8[14],xmm6[15],xmm8[15]
2145 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm5[8],xmm7[8],xmm5[9],xmm7[9],xmm5[10],xmm7[10],xmm5[11],xmm7[11],xmm5[12],xmm7[12],xmm5[13],xmm7[13],xmm5[14],xmm7[14],xmm5[15],xmm7[15]
2146 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm6, %ymm6, %ymm6
2147 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm7 = ymm6[0,1,2,3,8,9,8,9,8,9,8,9,12,13,10,11,16,17,18,19,24,25,28,29,24,25,28,29,28,29,30,31]
2148 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm5, %ymm5, %ymm5
2149 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm8 = ymm5[0,1,2,3,8,9,10,11,8,9,10,11,10,11,14,15,16,17,18,19,28,29,26,27,28,29,26,27,30,31,30,31]
2150 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm7 = ymm8[0,1,2],ymm7[3],ymm8[4,5,6],ymm7[7],ymm8[8,9,10],ymm7[11],ymm8[12,13,14],ymm7[15]
2151 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm2[8],xmm4[8],xmm2[9],xmm4[9],xmm2[10],xmm4[10],xmm2[11],xmm4[11],xmm2[12],xmm4[12],xmm2[13],xmm4[13],xmm2[14],xmm4[14],xmm2[15],xmm4[15]
2152 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
2153 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm0
2154 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,28,29,u,u,u,u,u,u,30,31,u,u,u,u,u,u]
2155 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
2156 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm2[8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,24,25,28,29,20,21,22,23,28,29,30,31,28,29,30,31]
2157 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm4[1],ymm0[2,3,4],ymm4[5],ymm0[6,7,8],ymm4[9],ymm0[10,11,12],ymm4[13],ymm0[14,15]
2158 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm7[1],ymm0[2],ymm7[3],ymm0[4],ymm7[5],ymm0[6],ymm7[7]
2159 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = [0,1,0,1,0,1,0,1,8,9,10,11,4,5,2,3,0,1,4,5,0,1,4,5,8,9,10,11,4,5,6,7]
2160 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm6, %ymm4
2161 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [0,1,2,3,0,1,2,3,8,9,10,11,2,3,6,7,4,5,2,3,4,5,2,3,8,9,10,11,6,7,6,7]
2162 ; AVX2-FAST-NEXT: vpshufb %ymm6, %ymm5, %ymm5
2163 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7],ymm5[8,9,10],ymm4[11],ymm5[12,13,14],ymm4[15]
2164 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm8 = <4,5,u,u,u,u,u,u,6,7,u,u,u,u,u,u>
2165 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm1, %xmm5
2166 ; AVX2-FAST-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
2167 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm5, %ymm1, %ymm1
2168 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = [0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,0,1,4,5,4,5,6,7,4,5,6,7,12,13,14,15]
2169 ; AVX2-FAST-NEXT: vpshufb %ymm5, %ymm2, %ymm2
2170 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7,8],ymm2[9],ymm1[10,11,12],ymm2[13],ymm1[14,15]
2171 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm4[1],ymm1[2],ymm4[3],ymm1[4],ymm4[5],ymm1[6],ymm4[7]
2172 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm12, %ymm2
2173 ; AVX2-FAST-NEXT: vpshufb %ymm6, %ymm10, %ymm4
2174 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3],ymm4[4,5,6],ymm2[7],ymm4[8,9,10],ymm2[11],ymm4[12,13,14],ymm2[15]
2175 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm3, %xmm4
2176 ; AVX2-FAST-NEXT: vpmovzxwq {{.*#+}} xmm3 = xmm3[0],zero,zero,zero,xmm3[1],zero,zero,zero
2177 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm3
2178 ; AVX2-FAST-NEXT: vpshufb %ymm5, %ymm9, %ymm4
2179 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2,3,4],ymm4[5],ymm3[6,7,8],ymm4[9],ymm3[10,11,12],ymm4[13],ymm3[14,15]
2180 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3],ymm3[4],ymm2[5],ymm3[6],ymm2[7]
2181 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2182 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm3, %ymm3
2183 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2184 ; AVX2-FAST-NEXT: vpshufb %ymm6, %ymm4, %ymm4
2185 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7],ymm4[8,9,10],ymm3[11],ymm4[12,13,14],ymm3[15]
2186 ; AVX2-FAST-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2187 ; AVX2-FAST-NEXT: vpshufb %ymm5, %ymm4, %ymm4
2188 ; AVX2-FAST-NEXT: vpshufb %xmm8, %xmm11, %xmm5
2189 ; AVX2-FAST-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm11[0],zero,zero,zero,xmm11[1],zero,zero,zero
2190 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm5, %ymm6, %ymm5
2191 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm5[0],ymm4[1],ymm5[2,3,4],ymm4[5],ymm5[6,7,8],ymm4[9],ymm5[10,11,12],ymm4[13],ymm5[14,15]
2192 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2],ymm3[3],ymm4[4],ymm3[5],ymm4[6],ymm3[7]
2193 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
2194 ; AVX2-FAST-NEXT: vmovdqa %ymm3, 64(%rax)
2195 ; AVX2-FAST-NEXT: vmovdqa %ymm2, 128(%rax)
2196 ; AVX2-FAST-NEXT: vmovdqa %ymm1, 192(%rax)
2197 ; AVX2-FAST-NEXT: vmovdqa %ymm0, 224(%rax)
2198 ; AVX2-FAST-NEXT: vmovdqa %ymm15, 160(%rax)
2199 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2200 ; AVX2-FAST-NEXT: vmovaps %ymm0, 96(%rax)
2201 ; AVX2-FAST-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
2202 ; AVX2-FAST-NEXT: vmovaps %ymm0, 32(%rax)
2203 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2204 ; AVX2-FAST-NEXT: vmovaps %ymm0, (%rax)
2205 ; AVX2-FAST-NEXT: addq $72, %rsp
2206 ; AVX2-FAST-NEXT: vzeroupper
2207 ; AVX2-FAST-NEXT: retq
2209 ; AVX2-FAST-PERLANE-LABEL: store_i8_stride8_vf32:
2210 ; AVX2-FAST-PERLANE: # %bb.0:
2211 ; AVX2-FAST-PERLANE-NEXT: subq $72, %rsp
2212 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2213 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %r10
2214 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r10), %xmm5
2215 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rax), %xmm6
2216 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3],xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
2217 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm8
2218 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm3 = ymm8[0,1,0,1,0,1,0,1,8,9,10,11,4,5,2,3,16,17,20,21,16,17,20,21,24,25,26,27,20,21,22,23]
2219 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %xmm1
2220 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm2
2221 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
2222 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm4, %ymm4, %ymm9
2223 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm4 = ymm9[0,1,2,3,0,1,2,3,8,9,10,11,2,3,6,7,20,21,18,19,20,21,18,19,24,25,26,27,22,23,22,23]
2224 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm11 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7],ymm4[8,9,10],ymm3[11],ymm4[12,13,14],ymm3[15]
2225 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm7
2226 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm3
2227 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm10
2228 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm10[0],xmm3[0],xmm10[1],xmm3[1],xmm10[2],xmm3[2],xmm10[3],xmm3[3],xmm10[4],xmm3[4],xmm10[5],xmm3[5],xmm10[6],xmm3[6],xmm10[7],xmm3[7]
2229 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} xmm4 = xmm12[4,5,u,u,u,u,u,u,6,7,u,u,u,u,u,u]
2230 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwq {{.*#+}} xmm13 = xmm12[0],zero,zero,zero,xmm12[1],zero,zero,zero
2231 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm4, %ymm13, %ymm13
2232 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm15
2233 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm15[0],xmm7[0],xmm15[1],xmm7[1],xmm15[2],xmm7[2],xmm15[3],xmm7[3],xmm15[4],xmm7[4],xmm15[5],xmm7[5],xmm15[6],xmm7[6],xmm15[7],xmm7[7]
2234 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm4, %ymm4, %ymm0
2235 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm14 = ymm0[0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,16,17,20,21,20,21,22,23,20,21,22,23,28,29,30,31]
2236 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm13 = ymm13[0],ymm14[1],ymm13[2,3,4],ymm14[5],ymm13[6,7,8],ymm14[9],ymm13[10,11,12],ymm14[13],ymm13[14,15]
2237 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm13[0],ymm11[1],ymm13[2],ymm11[3],ymm13[4],ymm11[5],ymm13[6],ymm11[7]
2238 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2239 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,2,3,8,9,8,9,8,9,8,9,12,13,10,11,0,1,2,3,8,9,12,13,8,9,12,13,12,13,14,15]
2240 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm4, %ymm8, %ymm8
2241 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm4, %ymm14
2242 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,2,3,8,9,10,11,8,9,10,11,10,11,14,15,0,1,2,3,12,13,10,11,12,13,10,11,14,15,14,15]
2243 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm4, %ymm9, %ymm9
2244 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm4, %ymm13
2245 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0,1,2],ymm8[3],ymm9[4,5,6],ymm8[7],ymm9[8,9,10],ymm8[11],ymm9[12,13,14],ymm8[15]
2246 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm4 = [8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,8,9,12,13,4,5,6,7,12,13,14,15,12,13,14,15]
2247 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm4, %ymm0, %ymm0
2248 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm12, %ymm12, %ymm9
2249 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = <8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,12,13,u,u,u,u,u,u,14,15,u,u,u,u,u,u>
2250 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm11, %ymm9, %ymm9
2251 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm11, %ymm12
2252 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm0 = ymm9[0],ymm0[1],ymm9[2,3,4],ymm0[5],ymm9[6,7,8],ymm0[9],ymm9[10,11,12],ymm0[13],ymm9[14,15]
2253 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm8[1],ymm0[2],ymm8[3],ymm0[4],ymm8[5],ymm0[6],ymm8[7]
2254 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
2255 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm6[8],xmm5[8],xmm6[9],xmm5[9],xmm6[10],xmm5[10],xmm6[11],xmm5[11],xmm6[12],xmm5[12],xmm6[13],xmm5[13],xmm6[14],xmm5[14],xmm6[15],xmm5[15]
2256 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
2257 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
2258 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2259 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
2260 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2261 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm14, %ymm0, %ymm0
2262 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm13, %ymm1, %ymm1
2263 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7],ymm1[8,9,10],ymm0[11],ymm1[12,13,14],ymm0[15]
2264 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm15[8],xmm7[8],xmm15[9],xmm7[9],xmm15[10],xmm7[10],xmm15[11],xmm7[11],xmm15[12],xmm7[12],xmm15[13],xmm7[13],xmm15[14],xmm7[14],xmm15[15],xmm7[15]
2265 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm2
2266 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2267 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm11 = xmm10[8],xmm3[8],xmm10[9],xmm3[9],xmm10[10],xmm3[10],xmm10[11],xmm3[11],xmm10[12],xmm3[12],xmm10[13],xmm3[13],xmm10[14],xmm3[14],xmm10[15],xmm3[15]
2268 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm11, %ymm11, %ymm1
2269 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm12, %ymm1, %ymm1
2270 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm4, %ymm2, %ymm2
2271 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7,8],ymm2[9],ymm1[10,11,12],ymm2[13],ymm1[14,15]
2272 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
2273 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2274 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%r10), %xmm8
2275 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rax), %xmm6
2276 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm6[0],xmm8[0],xmm6[1],xmm8[1],xmm6[2],xmm8[2],xmm6[3],xmm8[3],xmm6[4],xmm8[4],xmm6[5],xmm8[5],xmm6[6],xmm8[6],xmm6[7],xmm8[7]
2277 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm12
2278 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%r9), %xmm7
2279 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%r8), %xmm5
2280 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm5[0],xmm7[0],xmm5[1],xmm7[1],xmm5[2],xmm7[2],xmm5[3],xmm7[3],xmm5[4],xmm7[4],xmm5[5],xmm7[5],xmm5[6],xmm7[6],xmm5[7],xmm7[7]
2281 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm3, %ymm10
2282 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm14, %ymm12, %ymm3
2283 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm13, %ymm10, %ymm4
2284 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm15 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7],ymm4[8,9,10],ymm3[11],ymm4[12,13,14],ymm3[15]
2285 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rcx), %xmm4
2286 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rdx), %xmm2
2287 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3],xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
2288 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm3, %ymm9
2289 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rsi), %xmm1
2290 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rdi), %xmm0
2291 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
2292 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm3, %ymm14
2293 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm14 = ymm14[8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,28,29,u,u,u,u,u,u,30,31,u,u,u,u,u,u]
2294 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm13 = ymm9[8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,24,25,28,29,20,21,22,23,28,29,30,31,28,29,30,31]
2295 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm13 = ymm14[0],ymm13[1],ymm14[2,3,4],ymm13[5],ymm14[6,7,8],ymm13[9],ymm14[10,11,12],ymm13[13],ymm14[14,15]
2296 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm15 = ymm13[0],ymm15[1],ymm13[2],ymm15[3],ymm13[4],ymm15[5],ymm13[6],ymm15[7]
2297 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm6[8],xmm8[8],xmm6[9],xmm8[9],xmm6[10],xmm8[10],xmm6[11],xmm8[11],xmm6[12],xmm8[12],xmm6[13],xmm8[13],xmm6[14],xmm8[14],xmm6[15],xmm8[15]
2298 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm5[8],xmm7[8],xmm5[9],xmm7[9],xmm5[10],xmm7[10],xmm5[11],xmm7[11],xmm5[12],xmm7[12],xmm5[13],xmm7[13],xmm5[14],xmm7[14],xmm5[15],xmm7[15]
2299 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm6, %ymm6, %ymm6
2300 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm7 = ymm6[0,1,2,3,8,9,8,9,8,9,8,9,12,13,10,11,16,17,18,19,24,25,28,29,24,25,28,29,28,29,30,31]
2301 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm5, %ymm5
2302 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm8 = ymm5[0,1,2,3,8,9,10,11,8,9,10,11,10,11,14,15,16,17,18,19,28,29,26,27,28,29,26,27,30,31,30,31]
2303 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm7 = ymm8[0,1,2],ymm7[3],ymm8[4,5,6],ymm7[7],ymm8[8,9,10],ymm7[11],ymm8[12,13,14],ymm7[15]
2304 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm2[8],xmm4[8],xmm2[9],xmm4[9],xmm2[10],xmm4[10],xmm2[11],xmm4[11],xmm2[12],xmm4[12],xmm2[13],xmm4[13],xmm2[14],xmm4[14],xmm2[15],xmm4[15]
2305 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
2306 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm0
2307 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,28,29,u,u,u,u,u,u,30,31,u,u,u,u,u,u]
2308 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
2309 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm4 = ymm2[8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,24,25,28,29,20,21,22,23,28,29,30,31,28,29,30,31]
2310 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm4[1],ymm0[2,3,4],ymm4[5],ymm0[6,7,8],ymm4[9],ymm0[10,11,12],ymm4[13],ymm0[14,15]
2311 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm7[1],ymm0[2],ymm7[3],ymm0[4],ymm7[5],ymm0[6],ymm7[7]
2312 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm7 = [0,1,0,1,0,1,0,1,8,9,10,11,4,5,2,3,0,1,4,5,0,1,4,5,8,9,10,11,4,5,6,7]
2313 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm7, %ymm6, %ymm4
2314 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = [0,1,2,3,0,1,2,3,8,9,10,11,2,3,6,7,4,5,2,3,4,5,2,3,8,9,10,11,6,7,6,7]
2315 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm6, %ymm5, %ymm5
2316 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7],ymm5[8,9,10],ymm4[11],ymm5[12,13,14],ymm4[15]
2317 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm8 = <4,5,u,u,u,u,u,u,6,7,u,u,u,u,u,u>
2318 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm8, %xmm1, %xmm5
2319 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
2320 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm1, %ymm1
2321 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm5 = [0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,0,1,4,5,4,5,6,7,4,5,6,7,12,13,14,15]
2322 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm5, %ymm2, %ymm2
2323 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7,8],ymm2[9],ymm1[10,11,12],ymm2[13],ymm1[14,15]
2324 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm4[1],ymm1[2],ymm4[3],ymm1[4],ymm4[5],ymm1[6],ymm4[7]
2325 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm7, %ymm12, %ymm2
2326 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm6, %ymm10, %ymm4
2327 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3],ymm4[4,5,6],ymm2[7],ymm4[8,9,10],ymm2[11],ymm4[12,13,14],ymm2[15]
2328 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm8, %xmm3, %xmm4
2329 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwq {{.*#+}} xmm3 = xmm3[0],zero,zero,zero,xmm3[1],zero,zero,zero
2330 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm3
2331 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm5, %ymm9, %ymm4
2332 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2,3,4],ymm4[5],ymm3[6,7,8],ymm4[9],ymm3[10,11,12],ymm4[13],ymm3[14,15]
2333 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3],ymm3[4],ymm2[5],ymm3[6],ymm2[7]
2334 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
2335 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm7, %ymm3, %ymm3
2336 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2337 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm6, %ymm4, %ymm4
2338 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7],ymm4[8,9,10],ymm3[11],ymm4[12,13,14],ymm3[15]
2339 ; AVX2-FAST-PERLANE-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
2340 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm5, %ymm4, %ymm4
2341 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm8, %xmm11, %xmm5
2342 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm11[0],zero,zero,zero,xmm11[1],zero,zero,zero
2343 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm6, %ymm5
2344 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm4 = ymm5[0],ymm4[1],ymm5[2,3,4],ymm4[5],ymm5[6,7,8],ymm4[9],ymm5[10,11,12],ymm4[13],ymm5[14,15]
2345 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2],ymm3[3],ymm4[4],ymm3[5],ymm4[6],ymm3[7]
2346 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
2347 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm3, 64(%rax)
2348 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm2, 128(%rax)
2349 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm1, 192(%rax)
2350 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, 224(%rax)
2351 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm15, 160(%rax)
2352 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2353 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 96(%rax)
2354 ; AVX2-FAST-PERLANE-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
2355 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 32(%rax)
2356 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
2357 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, (%rax)
2358 ; AVX2-FAST-PERLANE-NEXT: addq $72, %rsp
2359 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
2360 ; AVX2-FAST-PERLANE-NEXT: retq
2362 ; AVX512F-ONLY-SLOW-LABEL: store_i8_stride8_vf32:
2363 ; AVX512F-ONLY-SLOW: # %bb.0:
2364 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2365 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
2366 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r10), %xmm1
2367 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 16(%r10), %xmm11
2368 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rax), %xmm2
2369 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 16(%rax), %xmm12
2370 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
2371 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm2, %xmm21
2372 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm1, %xmm22
2373 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,4,6,5]
2374 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm0[0,1,2,3,4,6,6,7]
2375 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
2376 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
2377 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r9), %xmm3
2378 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 16(%r9), %xmm13
2379 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%r8), %xmm4
2380 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 16(%r8), %xmm14
2381 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm4[8],xmm3[8],xmm4[9],xmm3[9],xmm4[10],xmm3[10],xmm4[11],xmm3[11],xmm4[12],xmm3[12],xmm4[13],xmm3[13],xmm4[14],xmm3[14],xmm4[15],xmm3[15]
2382 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm4, %xmm23
2383 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm3, %xmm24
2384 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm2[0,1,2,3,4,5,5,7]
2385 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm7 = xmm2[0,1,2,3,6,5,7,7]
2386 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm5, %ymm5
2387 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[0,2,2,3,4,6,6,7]
2388 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm5[0,1,2],ymm1[3],ymm5[4,5,6],ymm1[7],ymm5[8,9,10],ymm1[11],ymm5[12,13,14],ymm1[15]
2389 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm0[0,0,2,1,4,5,6,7]
2390 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,2,2,3,4,5,6,7]
2391 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm5, %ymm0
2392 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
2393 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm2[0,1,1,3,4,5,6,7]
2394 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[2,1,3,3,4,5,6,7]
2395 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm5, %ymm2
2396 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,0,2,1,4,4,6,5]
2397 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7],ymm2[8,9,10],ymm0[11],ymm2[12,13,14],ymm0[15]
2398 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm16
2399 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rsi), %xmm1
2400 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdi), %xmm7
2401 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm7[8],xmm1[8],xmm7[9],xmm1[9],xmm7[10],xmm1[10],xmm7[11],xmm1[11],xmm7[12],xmm1[12],xmm7[13],xmm1[13],xmm7[14],xmm1[14],xmm7[15],xmm1[15]
2402 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm1, %xmm25
2403 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,2,3]
2404 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
2405 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[3,3,3,3]
2406 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
2407 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rcx), %xmm8
2408 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa (%rdx), %xmm9
2409 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm10 = xmm9[8],xmm8[8],xmm9[9],xmm8[9],xmm9[10],xmm8[10],xmm9[11],xmm8[11],xmm9[12],xmm8[12],xmm9[13],xmm8[13],xmm9[14],xmm8[14],xmm9[15],xmm8[15]
2410 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm15 = xmm10[0,1,2,3,4,4,6,5]
2411 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm10[0,1,2,3,4,6,6,7]
2412 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm15, %ymm3
2413 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
2414 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm3[2,1,3,3,6,5,7,7]
2415 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7,8],ymm2[9],ymm1[10,11,12],ymm2[13],ymm1[14,15]
2416 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
2417 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
2418 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
2419 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0
2420 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm10[0,0,2,1,4,5,6,7]
2421 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm2[0],zero,xmm2[1],zero
2422 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm10[0,2,2,3,4,5,6,7]
2423 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm3[0],zero,xmm3[1],zero
2424 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
2425 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2,3,4],ymm2[5],ymm0[6,7,8],ymm2[9],ymm0[10,11,12],ymm2[13],ymm0[14,15]
2426 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm18
2427 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3],xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
2428 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,4,6,5]
2429 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm0[0,1,2,3,4,6,6,7]
2430 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
2431 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3],xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
2432 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm3[0,1,2,3,4,5,5,7]
2433 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm10 = xmm3[0,1,2,3,6,5,7,7]
2434 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm2, %ymm10
2435 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 16(%rcx), %xmm5
2436 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm15 = xmm0[0,0,2,1,4,5,6,7]
2437 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,2,2,3,4,5,6,7]
2438 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm15, %ymm0
2439 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
2440 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm10[0,2,2,3,4,6,6,7]
2441 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm10[0,1,2],ymm1[3],ymm10[4,5,6],ymm1[7],ymm10[8,9,10],ymm1[11],ymm10[12,13,14],ymm1[15]
2442 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm10 = xmm3[0,1,1,3,4,5,6,7]
2443 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,1,3,3,4,5,6,7]
2444 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm10, %ymm3
2445 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
2446 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
2447 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm3[0,1,2],ymm0[3],ymm3[4,5,6],ymm0[7],ymm3[8,9,10],ymm0[11],ymm3[12,13,14],ymm0[15]
2448 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 16(%rdx), %xmm10
2449 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm17
2450 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm10[0],xmm5[0],xmm10[1],xmm5[1],xmm10[2],xmm5[2],xmm10[3],xmm5[3],xmm10[4],xmm5[4],xmm10[5],xmm5[5],xmm10[6],xmm5[6],xmm10[7],xmm5[7]
2451 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm3[0,1,2,3,4,4,6,5]
2452 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm3[0,1,2,3,4,6,6,7]
2453 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm15
2454 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 16(%rsi), %xmm4
2455 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa 16(%rdi), %xmm2
2456 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3],xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
2457 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm19 = xmm0[2,3,2,3]
2458 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm19 = xmm19[0],zero,zero,zero,xmm19[1],zero,zero,zero
2459 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm20 = xmm0[3,3,3,3]
2460 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm20 = xmm20[0],zero,zero,zero,xmm20[1],zero,zero,zero
2461 ; AVX512F-ONLY-SLOW-NEXT: vinserti32x4 $1, %xmm20, %ymm19, %ymm1
2462 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm15 = ymm15[2,1,3,3,6,5,7,7]
2463 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm15[1],ymm1[2,3,4],ymm15[5],ymm1[6,7,8],ymm15[9],ymm1[10,11,12],ymm15[13],ymm1[14,15]
2464 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm15 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
2465 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
2466 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm3[0,0,2,1,4,5,6,7]
2467 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm6[0],zero,xmm6[1],zero
2468 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,2,2,3,4,5,6,7]
2469 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm3[0],zero,xmm3[1],zero
2470 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
2471 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm15, %ymm0
2472 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm6, %ymm3
2473 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm3[1],ymm0[2,3,4],ymm3[5],ymm0[6,7,8],ymm3[9],ymm0[10,11,12],ymm3[13],ymm0[14,15]
2474 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm15
2475 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm12[8],xmm11[8],xmm12[9],xmm11[9],xmm12[10],xmm11[10],xmm12[11],xmm11[11],xmm12[12],xmm11[12],xmm12[13],xmm11[13],xmm12[14],xmm11[14],xmm12[15],xmm11[15]
2476 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,4,6,5]
2477 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm0[0,1,2,3,4,6,6,7]
2478 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm1, %ymm1
2479 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm14[8],xmm13[8],xmm14[9],xmm13[9],xmm14[10],xmm13[10],xmm14[11],xmm13[11],xmm14[12],xmm13[12],xmm14[13],xmm13[13],xmm14[14],xmm13[14],xmm14[15],xmm13[15]
2480 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm6 = xmm3[0,1,2,3,4,5,5,7]
2481 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm11 = xmm3[0,1,2,3,6,5,7,7]
2482 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm11, %ymm6, %ymm6
2483 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm11 = xmm0[0,0,2,1,4,5,6,7]
2484 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,2,2,3,4,5,6,7]
2485 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm11, %ymm0
2486 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
2487 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm6 = ymm6[0,2,2,3,4,6,6,7]
2488 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm6[0,1,2],ymm1[3],ymm6[4,5,6],ymm1[7],ymm6[8,9,10],ymm1[11],ymm6[12,13,14],ymm1[15]
2489 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm3[0,1,1,3,4,5,6,7]
2490 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,1,3,3,4,5,6,7]
2491 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm6, %ymm3
2492 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
2493 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
2494 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm3[0,1,2],ymm0[3],ymm3[4,5,6],ymm0[7],ymm3[8,9,10],ymm0[11],ymm3[12,13,14],ymm0[15]
2495 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm11
2496 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm2[8],xmm4[8],xmm2[9],xmm4[9],xmm2[10],xmm4[10],xmm2[11],xmm4[11],xmm2[12],xmm4[12],xmm2[13],xmm4[13],xmm2[14],xmm4[14],xmm2[15],xmm4[15]
2497 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,2,3]
2498 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
2499 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[3,3,3,3]
2500 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
2501 ; AVX512F-ONLY-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm10[8],xmm5[8],xmm10[9],xmm5[9],xmm10[10],xmm5[10],xmm10[11],xmm5[11],xmm10[12],xmm5[12],xmm10[13],xmm5[13],xmm10[14],xmm5[14],xmm10[15],xmm5[15]
2502 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm3[0,1,2,3,4,4,6,5]
2503 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm3[0,1,2,3,4,6,6,7]
2504 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm4, %ymm4
2505 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
2506 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm4[2,1,3,3,6,5,7,7]
2507 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7,8],ymm2[9],ymm1[10,11,12],ymm2[13],ymm1[14,15]
2508 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
2509 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
2510 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
2511 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm3[0,0,2,1,4,5,6,7]
2512 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm4 = xmm4[0],zero,xmm4[1],zero
2513 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0
2514 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm3[0,2,2,3,4,5,6,7]
2515 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm2[0],zero,xmm2[1],zero
2516 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
2517 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2,3,4],ymm2[5],ymm0[6,7,8],ymm2[9],ymm0[10,11,12],ymm2[13],ymm0[14,15]
2518 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
2519 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm21, %xmm1
2520 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm22, %xmm2
2521 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3],xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
2522 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm1[0,1,2,3,4,4,6,5]
2523 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm1[0,1,2,3,4,6,6,7]
2524 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
2525 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm23, %xmm3
2526 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm24, %xmm4
2527 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3],xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
2528 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm3[0,1,2,3,4,5,5,7]
2529 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm3[0,1,2,3,6,5,7,7]
2530 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm4, %ymm4
2531 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm1[0,0,2,1,4,5,6,7]
2532 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,2,2,3,4,5,6,7]
2533 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm5, %ymm1
2534 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
2535 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[0,2,2,3,4,6,6,7]
2536 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3],ymm4[4,5,6],ymm2[7],ymm4[8,9,10],ymm2[11],ymm4[12,13,14],ymm2[15]
2537 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm3[0,1,1,3,4,5,6,7]
2538 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,1,3,3,4,5,6,7]
2539 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm4, %ymm3
2540 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,0,2,1,4,4,6,5]
2541 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
2542 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7],ymm3[8,9,10],ymm1[11],ymm3[12,13,14],ymm1[15]
2543 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
2544 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %xmm25, %xmm2
2545 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm7[0],xmm2[0],xmm7[1],xmm2[1],xmm7[2],xmm2[2],xmm7[3],xmm2[3],xmm7[4],xmm2[4],xmm7[5],xmm2[5],xmm7[6],xmm2[6],xmm7[7],xmm2[7]
2546 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[2,3,2,3]
2547 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm3 = xmm3[0],zero,zero,zero,xmm3[1],zero,zero,zero
2548 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm2[3,3,3,3]
2549 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
2550 ; AVX512F-ONLY-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3],xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
2551 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm6 = xmm5[0,1,2,3,4,4,6,5]
2552 ; AVX512F-ONLY-SLOW-NEXT: vpshufhw {{.*#+}} xmm7 = xmm5[0,1,2,3,4,6,6,7]
2553 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm6, %ymm6
2554 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm3
2555 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm6[2,1,3,3,6,5,7,7]
2556 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2,3,4],ymm4[5],ymm3[6,7,8],ymm4[9],ymm3[10,11,12],ymm4[13],ymm3[14,15]
2557 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
2558 ; AVX512F-ONLY-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,1,1,1]
2559 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
2560 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm5[0,0,2,1,4,5,6,7]
2561 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm6[0],zero,xmm6[1],zero
2562 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
2563 ; AVX512F-ONLY-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm5[0,2,2,3,4,5,6,7]
2564 ; AVX512F-ONLY-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm4 = xmm4[0],zero,xmm4[1],zero
2565 ; AVX512F-ONLY-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm6, %ymm4
2566 ; AVX512F-ONLY-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2,3,4],ymm4[5],ymm2[6,7,8],ymm4[9],ymm2[10,11,12],ymm4[13],ymm2[14,15]
2567 ; AVX512F-ONLY-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm2
2568 ; AVX512F-ONLY-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2569 ; AVX512F-ONLY-SLOW-NEXT: movw $-21846, %cx # imm = 0xAAAA
2570 ; AVX512F-ONLY-SLOW-NEXT: kmovw %ecx, %k1
2571 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm16, %zmm18 {%k1}
2572 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm17, %zmm15 {%k1}
2573 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm11, %zmm0 {%k1}
2574 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa32 %zmm1, %zmm2 {%k1}
2575 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm2, (%rax)
2576 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm0, 192(%rax)
2577 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm15, 128(%rax)
2578 ; AVX512F-ONLY-SLOW-NEXT: vmovdqa64 %zmm18, 64(%rax)
2579 ; AVX512F-ONLY-SLOW-NEXT: vzeroupper
2580 ; AVX512F-ONLY-SLOW-NEXT: retq
2582 ; AVX512F-ONLY-FAST-LABEL: store_i8_stride8_vf32:
2583 ; AVX512F-ONLY-FAST: # %bb.0:
2584 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
2585 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
2586 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r10), %xmm1
2587 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rax), %xmm2
2588 ; AVX512F-ONLY-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
2589 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm2, %xmm21
2590 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm1, %xmm22
2591 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
2592 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[0,1,2,3,8,9,8,9,8,9,8,9,12,13,10,11,16,17,18,19,24,25,28,29,24,25,28,29,28,29,30,31]
2593 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r9), %xmm3
2594 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%r8), %xmm4
2595 ; AVX512F-ONLY-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm4[8],xmm3[8],xmm4[9],xmm3[9],xmm4[10],xmm3[10],xmm4[11],xmm3[11],xmm4[12],xmm3[12],xmm4[13],xmm3[13],xmm4[14],xmm3[14],xmm4[15],xmm3[15]
2596 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm4, %xmm24
2597 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm3, %xmm25
2598 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
2599 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,2,3,8,9,10,11,8,9,10,11,10,11,14,15,0,1,2,3,12,13,10,11,12,13,10,11,14,15,14,15]
2600 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm4, %ymm2, %ymm3
2601 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm4, %ymm20
2602 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7],ymm3[8,9,10],ymm1[11],ymm3[12,13,14],ymm1[15]
2603 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,0,1,0,1,0,1,8,9,10,11,4,5,2,3,0,1,4,5,0,1,4,5,8,9,10,11,4,5,6,7]
2604 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm3, %ymm0, %ymm0
2605 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm3, %ymm26
2606 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,2,3,0,1,2,3,8,9,10,11,2,3,6,7,4,5,2,3,4,5,2,3,8,9,10,11,6,7,6,7]
2607 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm3, %ymm2, %ymm2
2608 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm3, %ymm27
2609 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7],ymm2[8,9,10],ymm0[11],ymm2[12,13,14],ymm0[15]
2610 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm23
2611 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rcx), %xmm1
2612 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdx), %xmm10
2613 ; AVX512F-ONLY-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm10[8],xmm1[8],xmm10[9],xmm1[9],xmm10[10],xmm1[10],xmm10[11],xmm1[11],xmm10[12],xmm1[12],xmm10[13],xmm1[13],xmm10[14],xmm1[14],xmm10[15],xmm1[15]
2614 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm1, %xmm30
2615 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
2616 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rsi), %xmm12
2617 ; AVX512F-ONLY-FAST-NEXT: vmovdqa (%rdi), %xmm13
2618 ; AVX512F-ONLY-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm13[8],xmm12[8],xmm13[9],xmm12[9],xmm13[10],xmm12[10],xmm13[11],xmm12[11],xmm13[12],xmm12[12],xmm13[13],xmm12[13],xmm13[14],xmm12[14],xmm13[15],xmm12[15]
2619 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm2
2620 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = <8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,12,13,u,u,u,u,u,u,14,15,u,u,u,u,u,u>
2621 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm3, %ymm2, %ymm2
2622 ; AVX512F-ONLY-FAST-NEXT: vmovdqa %ymm3, %ymm14
2623 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,8,9,12,13,4,5,6,7,12,13,14,15,12,13,14,15]
2624 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm4, %ymm0, %ymm3
2625 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm4, %ymm28
2626 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2,3,4],ymm3[5],ymm2[6,7,8],ymm3[9],ymm2[10,11,12],ymm3[13],ymm2[14,15]
2627 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,0,1,4,5,4,5,6,7,4,5,6,7,12,13,14,15]
2628 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm3, %ymm0, %ymm0
2629 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm3, %ymm29
2630 ; AVX512F-ONLY-FAST-NEXT: vpmovzxwq {{.*#+}} xmm3 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
2631 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} xmm9 = <4,5,u,u,u,u,u,u,6,7,u,u,u,u,u,u>
2632 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm9, %xmm1, %xmm1
2633 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm1, %ymm3, %ymm1
2634 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3,4],ymm0[5],ymm1[6,7,8],ymm0[9],ymm1[10,11,12],ymm0[13],ymm1[14,15]
2635 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm19
2636 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 16(%r10), %xmm8
2637 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 16(%rax), %xmm11
2638 ; AVX512F-ONLY-FAST-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm11[0],xmm8[0],xmm11[1],xmm8[1],xmm11[2],xmm8[2],xmm11[3],xmm8[3],xmm11[4],xmm8[4],xmm11[5],xmm8[5],xmm11[6],xmm8[6],xmm11[7],xmm8[7]
2639 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
2640 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 16(%r9), %xmm7
2641 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 16(%r8), %xmm6
2642 ; AVX512F-ONLY-FAST-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3],xmm6[4],xmm7[4],xmm6[5],xmm7[5],xmm6[6],xmm7[6],xmm6[7],xmm7[7]
2643 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
2644 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm0[u,u,u,u,u,u,8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,28,29,u,u,u,u,u,u,30,31]
2645 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm5 = ymm1[0,1,2,3,8,9,u,u,8,9,10,11,10,11,u,u,16,17,18,19,28,29,u,u,28,29,26,27,30,31,u,u]
2646 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7],ymm5[8,9,10],ymm4[11],ymm5[12,13,14],ymm4[15]
2647 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,0,1,u,u,u,u,u,u,2,3,u,u,u,u,u,u,20,21,u,u,u,u,u,u,22,23]
2648 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[0,1,2,3,0,1,u,u,8,9,10,11,2,3,u,u,20,21,18,19,20,21,u,u,24,25,26,27,22,23,u,u]
2649 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7],ymm1[8,9,10],ymm0[11],ymm1[12,13,14],ymm0[15]
2650 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm17
2651 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 16(%rsi), %xmm5
2652 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 16(%rdi), %xmm4
2653 ; AVX512F-ONLY-FAST-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3],xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
2654 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm1
2655 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm14, %ymm1, %ymm1
2656 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm14, %ymm16
2657 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 16(%rcx), %xmm3
2658 ; AVX512F-ONLY-FAST-NEXT: vmovdqa 16(%rdx), %xmm2
2659 ; AVX512F-ONLY-FAST-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3],xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
2660 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm15, %ymm15, %ymm15
2661 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm15[u,u,8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,28,29,u,u,u,u,u,u,30,31,u,u,u,u]
2662 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm14[1],ymm1[2,3,4],ymm14[5],ymm1[6,7,8],ymm14[9],ymm1[10,11,12],ymm14[13],ymm1[14,15]
2663 ; AVX512F-ONLY-FAST-NEXT: vpmovzxwq {{.*#+}} xmm14 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
2664 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm9, %xmm0, %xmm0
2665 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm9, %xmm31
2666 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm14, %ymm0
2667 ; AVX512F-ONLY-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm15[u,u,0,1,u,u,u,u,u,u,2,3,u,u,u,u,u,u,20,21,u,u,u,u,u,u,22,23,u,u,u,u]
2668 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm14[1],ymm0[2,3,4],ymm14[5],ymm0[6,7,8],ymm14[9],ymm0[10,11,12],ymm14[13],ymm0[14,15]
2669 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm18
2670 ; AVX512F-ONLY-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm11[8],xmm8[8],xmm11[9],xmm8[9],xmm11[10],xmm8[10],xmm11[11],xmm8[11],xmm11[12],xmm8[12],xmm11[13],xmm8[13],xmm11[14],xmm8[14],xmm11[15],xmm8[15]
2671 ; AVX512F-ONLY-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm6[8],xmm7[8],xmm6[9],xmm7[9],xmm6[10],xmm7[10],xmm6[11],xmm7[11],xmm6[12],xmm7[12],xmm6[13],xmm7[13],xmm6[14],xmm7[14],xmm6[15],xmm7[15]
2672 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
2673 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
2674 ; AVX512F-ONLY-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [0,1,2,3,8,9,8,9,8,9,8,9,12,13,10,11,0,1,2,3,8,9,12,13,8,9,12,13,12,13,14,15]
2675 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm8, %ymm0, %ymm6
2676 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm20, %ymm11
2677 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm11, %ymm1, %ymm7
2678 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm6 = ymm7[0,1,2],ymm6[3],ymm7[4,5,6],ymm6[7],ymm7[8,9,10],ymm6[11],ymm7[12,13,14],ymm6[15]
2679 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm26, %ymm7
2680 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm7, %ymm0, %ymm0
2681 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm27, %ymm14
2682 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm14, %ymm1, %ymm1
2683 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7],ymm1[8,9,10],ymm0[11],ymm1[12,13,14],ymm0[15]
2684 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm6, %zmm0, %zmm20
2685 ; AVX512F-ONLY-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm2[8],xmm3[8],xmm2[9],xmm3[9],xmm2[10],xmm3[10],xmm2[11],xmm3[11],xmm2[12],xmm3[12],xmm2[13],xmm3[13],xmm2[14],xmm3[14],xmm2[15],xmm3[15]
2686 ; AVX512F-ONLY-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm4[8],xmm5[8],xmm4[9],xmm5[9],xmm4[10],xmm5[10],xmm4[11],xmm5[11],xmm4[12],xmm5[12],xmm4[13],xmm5[13],xmm4[14],xmm5[14],xmm4[15],xmm5[15]
2687 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
2688 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm3
2689 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm16, %ymm15
2690 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm15, %ymm3, %ymm3
2691 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm28, %ymm6
2692 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm6, %ymm1, %ymm4
2693 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2,3,4],ymm4[5],ymm3[6,7,8],ymm4[9],ymm3[10,11,12],ymm4[13],ymm3[14,15]
2694 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %ymm29, %ymm9
2695 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm9, %ymm1, %ymm1
2696 ; AVX512F-ONLY-FAST-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
2697 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm31, %xmm0
2698 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm0, %xmm2, %xmm2
2699 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
2700 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7,8],ymm1[9],ymm2[10,11,12],ymm1[13],ymm2[14,15]
2701 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm1, %zmm1
2702 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm21, %xmm2
2703 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm22, %xmm3
2704 ; AVX512F-ONLY-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3],xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
2705 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm24, %xmm3
2706 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm25, %xmm4
2707 ; AVX512F-ONLY-FAST-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3],xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
2708 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
2709 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm8, %ymm2, %ymm4
2710 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm3, %ymm3, %ymm3
2711 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm11, %ymm3, %ymm5
2712 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7],ymm5[8,9,10],ymm4[11],ymm5[12,13,14],ymm4[15]
2713 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm7, %ymm2, %ymm2
2714 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm14, %ymm3, %ymm3
2715 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7],ymm3[8,9,10],ymm2[11],ymm3[12,13,14],ymm2[15]
2716 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm4, %zmm2, %zmm2
2717 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %xmm30, %xmm3
2718 ; AVX512F-ONLY-FAST-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm10[0],xmm3[0],xmm10[1],xmm3[1],xmm10[2],xmm3[2],xmm10[3],xmm3[3],xmm10[4],xmm3[4],xmm10[5],xmm3[5],xmm10[6],xmm3[6],xmm10[7],xmm3[7]
2719 ; AVX512F-ONLY-FAST-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3],xmm13[4],xmm12[4],xmm13[5],xmm12[5],xmm13[6],xmm12[6],xmm13[7],xmm12[7]
2720 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm4, %ymm4, %ymm5
2721 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm15, %ymm5, %ymm5
2722 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm3, %ymm3, %ymm3
2723 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm6, %ymm3, %ymm6
2724 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm5 = ymm5[0],ymm6[1],ymm5[2,3,4],ymm6[5],ymm5[6,7,8],ymm6[9],ymm5[10,11,12],ymm6[13],ymm5[14,15]
2725 ; AVX512F-ONLY-FAST-NEXT: vpshufb %ymm9, %ymm3, %ymm3
2726 ; AVX512F-ONLY-FAST-NEXT: vpshufb %xmm0, %xmm4, %xmm6
2727 ; AVX512F-ONLY-FAST-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
2728 ; AVX512F-ONLY-FAST-NEXT: vinserti128 $1, %xmm6, %ymm4, %ymm4
2729 ; AVX512F-ONLY-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2,3,4],ymm3[5],ymm4[6,7,8],ymm3[9],ymm4[10,11,12],ymm3[13],ymm4[14,15]
2730 ; AVX512F-ONLY-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm3, %zmm3
2731 ; AVX512F-ONLY-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
2732 ; AVX512F-ONLY-FAST-NEXT: movw $-21846, %cx # imm = 0xAAAA
2733 ; AVX512F-ONLY-FAST-NEXT: kmovw %ecx, %k1
2734 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm23, %zmm19 {%k1}
2735 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm17, %zmm18 {%k1}
2736 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm20, %zmm1 {%k1}
2737 ; AVX512F-ONLY-FAST-NEXT: vmovdqa32 %zmm2, %zmm3 {%k1}
2738 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm3, (%rax)
2739 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm1, 192(%rax)
2740 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm18, 128(%rax)
2741 ; AVX512F-ONLY-FAST-NEXT: vmovdqa64 %zmm19, 64(%rax)
2742 ; AVX512F-ONLY-FAST-NEXT: vzeroupper
2743 ; AVX512F-ONLY-FAST-NEXT: retq
2745 ; AVX512DQ-SLOW-LABEL: store_i8_stride8_vf32:
2746 ; AVX512DQ-SLOW: # %bb.0:
2747 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2748 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
2749 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rsi), %xmm1
2750 ; AVX512DQ-SLOW-NEXT: vmovdqa 16(%rsi), %xmm10
2751 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdi), %xmm2
2752 ; AVX512DQ-SLOW-NEXT: vmovdqa 16(%rdi), %xmm11
2753 ; AVX512DQ-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
2754 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm2, %xmm19
2755 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm1, %xmm20
2756 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[2,3,2,3]
2757 ; AVX512DQ-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm3 = xmm3[0],zero,zero,zero,xmm3[1],zero,zero,zero
2758 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm0[3,3,3,3]
2759 ; AVX512DQ-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
2760 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm5
2761 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rcx), %xmm1
2762 ; AVX512DQ-SLOW-NEXT: vmovdqa 16(%rcx), %xmm12
2763 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rdx), %xmm2
2764 ; AVX512DQ-SLOW-NEXT: vmovdqa 16(%rdx), %xmm13
2765 ; AVX512DQ-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
2766 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm2, %xmm21
2767 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm1, %xmm22
2768 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm7 = xmm6[0,1,2,3,4,4,6,5]
2769 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm8 = xmm6[0,1,2,3,4,6,6,7]
2770 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm7, %ymm7
2771 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm7[2,1,3,3,6,5,7,7]
2772 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm5 = ymm5[0],ymm7[1],ymm5[2,3,4],ymm7[5],ymm5[6,7,8],ymm7[9],ymm5[10,11,12],ymm7[13],ymm5[14,15]
2773 ; AVX512DQ-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm7 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
2774 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
2775 ; AVX512DQ-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
2776 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm7, %ymm0
2777 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm7 = xmm6[0,0,2,1,4,5,6,7]
2778 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm7[0],zero,xmm7[1],zero
2779 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[0,2,2,3,4,5,6,7]
2780 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm6[0],zero,xmm6[1],zero
2781 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm6
2782 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm6[1],ymm0[2,3,4],ymm6[5],ymm0[6,7,8],ymm6[9],ymm0[10,11,12],ymm6[13],ymm0[14,15]
2783 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm0, %zmm16
2784 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r10), %xmm5
2785 ; AVX512DQ-SLOW-NEXT: vmovdqa (%rax), %xmm6
2786 ; AVX512DQ-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm6[8],xmm5[8],xmm6[9],xmm5[9],xmm6[10],xmm5[10],xmm6[11],xmm5[11],xmm6[12],xmm5[12],xmm6[13],xmm5[13],xmm6[14],xmm5[14],xmm6[15],xmm5[15]
2787 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm7 = xmm0[0,1,2,3,4,4,6,5]
2788 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm8 = xmm0[0,1,2,3,4,6,6,7]
2789 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm7, %ymm9
2790 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r9), %xmm7
2791 ; AVX512DQ-SLOW-NEXT: vmovdqa (%r8), %xmm8
2792 ; AVX512DQ-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm14 = xmm8[8],xmm7[8],xmm8[9],xmm7[9],xmm8[10],xmm7[10],xmm8[11],xmm7[11],xmm8[12],xmm7[12],xmm8[13],xmm7[13],xmm8[14],xmm7[14],xmm8[15],xmm7[15]
2793 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm15 = xmm14[0,1,2,3,4,5,5,7]
2794 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm14[0,1,2,3,6,5,7,7]
2795 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm15, %ymm1
2796 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm9[0,2,2,3,4,6,6,7]
2797 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
2798 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1,2],ymm9[3],ymm1[4,5,6],ymm9[7],ymm1[8,9,10],ymm9[11],ymm1[12,13,14],ymm9[15]
2799 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm9 = xmm0[0,0,2,1,4,5,6,7]
2800 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,2,2,3,4,5,6,7]
2801 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm9, %ymm0
2802 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
2803 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm9 = xmm14[0,1,1,3,4,5,6,7]
2804 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm14 = xmm14[2,1,3,3,4,5,6,7]
2805 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm9, %ymm9
2806 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm9[0,0,2,1,4,4,6,5]
2807 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm9[0,1,2],ymm0[3],ymm9[4,5,6],ymm0[7],ymm9[8,9,10],ymm0[11],ymm9[12,13,14],ymm0[15]
2808 ; AVX512DQ-SLOW-NEXT: movw $-21846, %cx # imm = 0xAAAA
2809 ; AVX512DQ-SLOW-NEXT: kmovw %ecx, %k1
2810 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm16 {%k1}
2811 ; AVX512DQ-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3],xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
2812 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,2,3]
2813 ; AVX512DQ-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
2814 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm0[3,3,3,3]
2815 ; AVX512DQ-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm9 = xmm9[0],zero,zero,zero,xmm9[1],zero,zero,zero
2816 ; AVX512DQ-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3],xmm13[4],xmm12[4],xmm13[5],xmm12[5],xmm13[6],xmm12[6],xmm13[7],xmm12[7]
2817 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm14 = xmm15[0,1,2,3,4,4,6,5]
2818 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm15[0,1,2,3,4,6,6,7]
2819 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm14, %ymm2
2820 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm1, %ymm1
2821 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,1,3,3,6,5,7,7]
2822 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7,8],ymm2[9],ymm1[10,11,12],ymm2[13],ymm1[14,15]
2823 ; AVX512DQ-SLOW-NEXT: vmovdqa 16(%r10), %xmm14
2824 ; AVX512DQ-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
2825 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
2826 ; AVX512DQ-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
2827 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm9 = xmm15[0,0,2,1,4,5,6,7]
2828 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm9[0],zero,xmm9[1],zero
2829 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0
2830 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm15[0,2,2,3,4,5,6,7]
2831 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm2[0],zero,xmm2[1],zero
2832 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm9, %ymm2
2833 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2,3,4],ymm2[5],ymm0[6,7,8],ymm2[9],ymm0[10,11,12],ymm2[13],ymm0[14,15]
2834 ; AVX512DQ-SLOW-NEXT: vmovdqa 16(%rax), %xmm15
2835 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm17
2836 ; AVX512DQ-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3],xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
2837 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm1[0,1,2,3,4,4,6,5]
2838 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm1[0,1,2,3,4,6,6,7]
2839 ; AVX512DQ-SLOW-NEXT: vinserti32x4 $1, %xmm2, %ymm0, %ymm18
2840 ; AVX512DQ-SLOW-NEXT: vmovdqa 16(%r9), %xmm3
2841 ; AVX512DQ-SLOW-NEXT: vmovdqa 16(%r8), %xmm9
2842 ; AVX512DQ-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm9[0],xmm3[0],xmm9[1],xmm3[1],xmm9[2],xmm3[2],xmm9[3],xmm3[3],xmm9[4],xmm3[4],xmm9[5],xmm3[5],xmm9[6],xmm3[6],xmm9[7],xmm3[7]
2843 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm0[0,1,2,3,4,5,5,7]
2844 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm0[0,1,2,3,6,5,7,7]
2845 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm2, %ymm2
2846 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm1[0,0,2,1,4,5,6,7]
2847 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,2,2,3,4,5,6,7]
2848 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm4, %ymm1
2849 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm0[0,1,1,3,4,5,6,7]
2850 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[2,1,3,3,4,5,6,7]
2851 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm4, %ymm0
2852 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm18[0,2,2,3,4,6,6,7]
2853 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
2854 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0,1,2],ymm4[3],ymm2[4,5,6],ymm4[7],ymm2[8,9,10],ymm4[11],ymm2[12,13,14],ymm4[15]
2855 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,0,2,1,4,4,6,5]
2856 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
2857 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1,2],ymm1[3],ymm0[4,5,6],ymm1[7],ymm0[8,9,10],ymm1[11],ymm0[12,13,14],ymm1[15]
2858 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm2, %zmm0, %zmm17 {%k1}
2859 ; AVX512DQ-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm11[8],xmm10[8],xmm11[9],xmm10[9],xmm11[10],xmm10[10],xmm11[11],xmm10[11],xmm11[12],xmm10[12],xmm11[13],xmm10[13],xmm11[14],xmm10[14],xmm11[15],xmm10[15]
2860 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,2,3]
2861 ; AVX512DQ-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
2862 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[3,3,3,3]
2863 ; AVX512DQ-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
2864 ; AVX512DQ-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm13[8],xmm12[8],xmm13[9],xmm12[9],xmm13[10],xmm12[10],xmm13[11],xmm12[11],xmm13[12],xmm12[12],xmm13[13],xmm12[13],xmm13[14],xmm12[14],xmm13[15],xmm12[15]
2865 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm10 = xmm4[0,1,2,3,4,4,6,5]
2866 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm11 = xmm4[0,1,2,3,4,6,6,7]
2867 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm11, %ymm10, %ymm10
2868 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
2869 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm10[2,1,3,3,6,5,7,7]
2870 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7,8],ymm2[9],ymm1[10,11,12],ymm2[13],ymm1[14,15]
2871 ; AVX512DQ-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
2872 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
2873 ; AVX512DQ-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
2874 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm10 = xmm4[0,0,2,1,4,5,6,7]
2875 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm10[0],zero,xmm10[1],zero
2876 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0
2877 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm4[0,2,2,3,4,5,6,7]
2878 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm2[0],zero,xmm2[1],zero
2879 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm10, %ymm2
2880 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2,3,4],ymm2[5],ymm0[6,7,8],ymm2[9],ymm0[10,11,12],ymm2[13],ymm0[14,15]
2881 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm10
2882 ; AVX512DQ-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm15[8],xmm14[8],xmm15[9],xmm14[9],xmm15[10],xmm14[10],xmm15[11],xmm14[11],xmm15[12],xmm14[12],xmm15[13],xmm14[13],xmm15[14],xmm14[14],xmm15[15],xmm14[15]
2883 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,4,6,5]
2884 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm0[0,1,2,3,4,6,6,7]
2885 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
2886 ; AVX512DQ-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm9[8],xmm3[8],xmm9[9],xmm3[9],xmm9[10],xmm3[10],xmm9[11],xmm3[11],xmm9[12],xmm3[12],xmm9[13],xmm3[13],xmm9[14],xmm3[14],xmm9[15],xmm3[15]
2887 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm2[0,1,2,3,4,5,5,7]
2888 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm2[0,1,2,3,6,5,7,7]
2889 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm3
2890 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm0[0,0,2,1,4,5,6,7]
2891 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,2,2,3,4,5,6,7]
2892 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm4, %ymm0
2893 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
2894 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,2,2,3,4,6,6,7]
2895 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7],ymm3[8,9,10],ymm1[11],ymm3[12,13,14],ymm1[15]
2896 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm2[0,1,1,3,4,5,6,7]
2897 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[2,1,3,3,4,5,6,7]
2898 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2
2899 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
2900 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,0,2,1,4,4,6,5]
2901 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7],ymm2[8,9,10],ymm0[11],ymm2[12,13,14],ymm0[15]
2902 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm10 {%k1}
2903 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm19, %xmm0
2904 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm20, %xmm1
2905 ; AVX512DQ-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
2906 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,2,3]
2907 ; AVX512DQ-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
2908 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[3,3,3,3]
2909 ; AVX512DQ-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
2910 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm21, %xmm3
2911 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %xmm22, %xmm4
2912 ; AVX512DQ-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3],xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
2913 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm3[0,1,2,3,4,4,6,5]
2914 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm9 = xmm3[0,1,2,3,4,6,6,7]
2915 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm4, %ymm4
2916 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
2917 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm4[2,1,3,3,6,5,7,7]
2918 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7,8],ymm2[9],ymm1[10,11,12],ymm2[13],ymm1[14,15]
2919 ; AVX512DQ-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
2920 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
2921 ; AVX512DQ-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
2922 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm3[0,0,2,1,4,5,6,7]
2923 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm4 = xmm4[0],zero,xmm4[1],zero
2924 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0
2925 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm3[0,2,2,3,4,5,6,7]
2926 ; AVX512DQ-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm2[0],zero,xmm2[1],zero
2927 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
2928 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2,3,4],ymm2[5],ymm0[6,7,8],ymm2[9],ymm0[10,11,12],ymm2[13],ymm0[14,15]
2929 ; AVX512DQ-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
2930 ; AVX512DQ-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3],xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
2931 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm1[0,1,2,3,4,4,6,5]
2932 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm1[0,1,2,3,4,6,6,7]
2933 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
2934 ; AVX512DQ-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3],xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
2935 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm3[0,1,2,3,4,5,5,7]
2936 ; AVX512DQ-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm3[0,1,2,3,6,5,7,7]
2937 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm4, %ymm4
2938 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm1[0,0,2,1,4,5,6,7]
2939 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,2,2,3,4,5,6,7]
2940 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm5, %ymm1
2941 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
2942 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[0,2,2,3,4,6,6,7]
2943 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3],ymm4[4,5,6],ymm2[7],ymm4[8,9,10],ymm2[11],ymm4[12,13,14],ymm2[15]
2944 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm3[0,1,1,3,4,5,6,7]
2945 ; AVX512DQ-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,1,3,3,4,5,6,7]
2946 ; AVX512DQ-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm4, %ymm3
2947 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,0,2,1,4,4,6,5]
2948 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
2949 ; AVX512DQ-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7],ymm3[8,9,10],ymm1[11],ymm3[12,13,14],ymm1[15]
2950 ; AVX512DQ-SLOW-NEXT: vinserti32x8 $1, %ymm2, %zmm1, %zmm0 {%k1}
2951 ; AVX512DQ-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
2952 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, (%rax)
2953 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm10, 192(%rax)
2954 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm17, 128(%rax)
2955 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm16, 64(%rax)
2956 ; AVX512DQ-SLOW-NEXT: vzeroupper
2957 ; AVX512DQ-SLOW-NEXT: retq
2959 ; AVX512DQ-FAST-LABEL: store_i8_stride8_vf32:
2960 ; AVX512DQ-FAST: # %bb.0:
2961 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
2962 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
2963 ; AVX512DQ-FAST-NEXT: vmovdqa (%rcx), %xmm1
2964 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdx), %xmm2
2965 ; AVX512DQ-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
2966 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm2, %xmm19
2967 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm1, %xmm20
2968 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
2969 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,8,9,12,13,4,5,6,7,12,13,14,15,12,13,14,15]
2970 ; AVX512DQ-FAST-NEXT: vpshufb %ymm2, %ymm0, %ymm1
2971 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm2, %ymm18
2972 ; AVX512DQ-FAST-NEXT: vmovdqa (%rsi), %xmm3
2973 ; AVX512DQ-FAST-NEXT: vmovdqa (%rdi), %xmm4
2974 ; AVX512DQ-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm4[8],xmm3[8],xmm4[9],xmm3[9],xmm4[10],xmm3[10],xmm4[11],xmm3[11],xmm4[12],xmm3[12],xmm4[13],xmm3[13],xmm4[14],xmm3[14],xmm4[15],xmm3[15]
2975 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm4, %xmm21
2976 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm3, %xmm22
2977 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm3
2978 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = <8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,12,13,u,u,u,u,u,u,14,15,u,u,u,u,u,u>
2979 ; AVX512DQ-FAST-NEXT: vpshufb %ymm4, %ymm3, %ymm3
2980 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm4, %ymm5
2981 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm3[0],ymm1[1],ymm3[2,3,4],ymm1[5],ymm3[6,7,8],ymm1[9],ymm3[10,11,12],ymm1[13],ymm3[14,15]
2982 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,0,1,4,5,4,5,6,7,4,5,6,7,12,13,14,15]
2983 ; AVX512DQ-FAST-NEXT: vpshufb %ymm3, %ymm0, %ymm0
2984 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm3, %ymm23
2985 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} xmm4 = <4,5,u,u,u,u,u,u,6,7,u,u,u,u,u,u>
2986 ; AVX512DQ-FAST-NEXT: vpshufb %xmm4, %xmm2, %xmm3
2987 ; AVX512DQ-FAST-NEXT: vmovdqa %xmm4, %xmm10
2988 ; AVX512DQ-FAST-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
2989 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
2990 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0],ymm0[1],ymm2[2,3,4],ymm0[5],ymm2[6,7,8],ymm0[9],ymm2[10,11,12],ymm0[13],ymm2[14,15]
2991 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm17
2992 ; AVX512DQ-FAST-NEXT: vmovdqa (%r10), %xmm1
2993 ; AVX512DQ-FAST-NEXT: vmovdqa (%rax), %xmm11
2994 ; AVX512DQ-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm11[8],xmm1[8],xmm11[9],xmm1[9],xmm11[10],xmm1[10],xmm11[11],xmm1[11],xmm11[12],xmm1[12],xmm11[13],xmm1[13],xmm11[14],xmm1[14],xmm11[15],xmm1[15]
2995 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm1, %xmm28
2996 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
2997 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm2 = [0,1,2,3,8,9,8,9,8,9,8,9,12,13,10,11,0,1,2,3,8,9,12,13,8,9,12,13,12,13,14,15]
2998 ; AVX512DQ-FAST-NEXT: vpshufb %ymm2, %ymm0, %ymm1
2999 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm2, %ymm24
3000 ; AVX512DQ-FAST-NEXT: vmovdqa (%r9), %xmm12
3001 ; AVX512DQ-FAST-NEXT: vmovdqa (%r8), %xmm13
3002 ; AVX512DQ-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm13[8],xmm12[8],xmm13[9],xmm12[9],xmm13[10],xmm12[10],xmm13[11],xmm12[11],xmm13[12],xmm12[12],xmm13[13],xmm12[13],xmm13[14],xmm12[14],xmm13[15],xmm12[15]
3003 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
3004 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,2,3,8,9,10,11,8,9,10,11,10,11,14,15,0,1,2,3,12,13,10,11,12,13,10,11,14,15,14,15]
3005 ; AVX512DQ-FAST-NEXT: vpshufb %ymm4, %ymm2, %ymm3
3006 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm4, %ymm25
3007 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7],ymm3[8,9,10],ymm1[11],ymm3[12,13,14],ymm1[15]
3008 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,0,1,0,1,0,1,8,9,10,11,4,5,2,3,0,1,4,5,0,1,4,5,8,9,10,11,4,5,6,7]
3009 ; AVX512DQ-FAST-NEXT: vpshufb %ymm3, %ymm0, %ymm0
3010 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm3, %ymm26
3011 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,2,3,0,1,2,3,8,9,10,11,2,3,6,7,4,5,2,3,4,5,2,3,8,9,10,11,6,7,6,7]
3012 ; AVX512DQ-FAST-NEXT: vpshufb %ymm3, %ymm2, %ymm2
3013 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm3, %ymm27
3014 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7],ymm2[8,9,10],ymm0[11],ymm2[12,13,14],ymm0[15]
3015 ; AVX512DQ-FAST-NEXT: movw $-21846, %r11w # imm = 0xAAAA
3016 ; AVX512DQ-FAST-NEXT: kmovw %r11d, %k1
3017 ; AVX512DQ-FAST-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm17 {%k1}
3018 ; AVX512DQ-FAST-NEXT: vmovdqa 16(%rsi), %xmm9
3019 ; AVX512DQ-FAST-NEXT: vmovdqa 16(%rdi), %xmm8
3020 ; AVX512DQ-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3],xmm8[4],xmm9[4],xmm8[5],xmm9[5],xmm8[6],xmm9[6],xmm8[7],xmm9[7]
3021 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm0
3022 ; AVX512DQ-FAST-NEXT: vpshufb %ymm5, %ymm0, %ymm3
3023 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm5, %ymm30
3024 ; AVX512DQ-FAST-NEXT: vmovdqa 16(%rcx), %xmm7
3025 ; AVX512DQ-FAST-NEXT: vmovdqa 16(%rdx), %xmm6
3026 ; AVX512DQ-FAST-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3],xmm6[4],xmm7[4],xmm6[5],xmm7[5],xmm6[6],xmm7[6],xmm6[7],xmm7[7]
3027 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm4, %ymm4, %ymm4
3028 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm5 = ymm4[u,u,8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,28,29,u,u,u,u,u,u,30,31,u,u,u,u]
3029 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0],ymm5[1],ymm3[2,3,4],ymm5[5],ymm3[6,7,8],ymm5[9],ymm3[10,11,12],ymm5[13],ymm3[14,15]
3030 ; AVX512DQ-FAST-NEXT: vpmovzxwq {{.*#+}} xmm5 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
3031 ; AVX512DQ-FAST-NEXT: vpshufb %xmm10, %xmm2, %xmm2
3032 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm10, %xmm29
3033 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm2, %ymm5, %ymm2
3034 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm4 = ymm4[u,u,0,1,u,u,u,u,u,u,2,3,u,u,u,u,u,u,20,21,u,u,u,u,u,u,22,23,u,u,u,u]
3035 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2,3,4],ymm4[5],ymm2[6,7,8],ymm4[9],ymm2[10,11,12],ymm4[13],ymm2[14,15]
3036 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm16
3037 ; AVX512DQ-FAST-NEXT: vmovdqa 16(%r10), %xmm5
3038 ; AVX512DQ-FAST-NEXT: vmovdqa 16(%rax), %xmm4
3039 ; AVX512DQ-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3],xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
3040 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm0
3041 ; AVX512DQ-FAST-NEXT: vmovdqa 16(%r9), %xmm3
3042 ; AVX512DQ-FAST-NEXT: vmovdqa 16(%r8), %xmm2
3043 ; AVX512DQ-FAST-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3],xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
3044 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm15, %ymm15, %ymm15
3045 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm1 = ymm0[u,u,u,u,u,u,8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,28,29,u,u,u,u,u,u,30,31]
3046 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm15[0,1,2,3,8,9,u,u,8,9,10,11,10,11,u,u,16,17,18,19,28,29,u,u,28,29,26,27,30,31,u,u]
3047 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm14[0,1,2],ymm1[3],ymm14[4,5,6],ymm1[7],ymm14[8,9,10],ymm1[11],ymm14[12,13,14],ymm1[15]
3048 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,0,1,u,u,u,u,u,u,2,3,u,u,u,u,u,u,20,21,u,u,u,u,u,u,22,23]
3049 ; AVX512DQ-FAST-NEXT: vpshufb {{.*#+}} ymm14 = ymm15[0,1,2,3,0,1,u,u,8,9,10,11,2,3,u,u,20,21,18,19,20,21,u,u,24,25,26,27,22,23,u,u]
3050 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm14[0,1,2],ymm0[3],ymm14[4,5,6],ymm0[7],ymm14[8,9,10],ymm0[11],ymm14[12,13,14],ymm0[15]
3051 ; AVX512DQ-FAST-NEXT: vinserti32x8 $1, %ymm1, %zmm0, %zmm16 {%k1}
3052 ; AVX512DQ-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm6[8],xmm7[8],xmm6[9],xmm7[9],xmm6[10],xmm7[10],xmm6[11],xmm7[11],xmm6[12],xmm7[12],xmm6[13],xmm7[13],xmm6[14],xmm7[14],xmm6[15],xmm7[15]
3053 ; AVX512DQ-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm8[8],xmm9[8],xmm8[9],xmm9[9],xmm8[10],xmm9[10],xmm8[11],xmm9[11],xmm8[12],xmm9[12],xmm8[13],xmm9[13],xmm8[14],xmm9[14],xmm8[15],xmm9[15]
3054 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
3055 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm6
3056 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm30, %ymm14
3057 ; AVX512DQ-FAST-NEXT: vpshufb %ymm14, %ymm6, %ymm6
3058 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm18, %ymm10
3059 ; AVX512DQ-FAST-NEXT: vpshufb %ymm10, %ymm0, %ymm7
3060 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2,3,4],ymm7[5],ymm6[6,7,8],ymm7[9],ymm6[10,11,12],ymm7[13],ymm6[14,15]
3061 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm23, %ymm9
3062 ; AVX512DQ-FAST-NEXT: vpshufb %ymm9, %ymm0, %ymm0
3063 ; AVX512DQ-FAST-NEXT: vpmovzxwq {{.*#+}} xmm7 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
3064 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm29, %xmm15
3065 ; AVX512DQ-FAST-NEXT: vpshufb %xmm15, %xmm1, %xmm1
3066 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm1, %ymm7, %ymm1
3067 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3,4],ymm0[5],ymm1[6,7,8],ymm0[9],ymm1[10,11,12],ymm0[13],ymm1[14,15]
3068 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm6, %zmm0, %zmm0
3069 ; AVX512DQ-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm4[8],xmm5[8],xmm4[9],xmm5[9],xmm4[10],xmm5[10],xmm4[11],xmm5[11],xmm4[12],xmm5[12],xmm4[13],xmm5[13],xmm4[14],xmm5[14],xmm4[15],xmm5[15]
3070 ; AVX512DQ-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm2[8],xmm3[8],xmm2[9],xmm3[9],xmm2[10],xmm3[10],xmm2[11],xmm3[11],xmm2[12],xmm3[12],xmm2[13],xmm3[13],xmm2[14],xmm3[14],xmm2[15],xmm3[15]
3071 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
3072 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
3073 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm24, %ymm5
3074 ; AVX512DQ-FAST-NEXT: vpshufb %ymm5, %ymm1, %ymm3
3075 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm25, %ymm6
3076 ; AVX512DQ-FAST-NEXT: vpshufb %ymm6, %ymm2, %ymm4
3077 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7],ymm4[8,9,10],ymm3[11],ymm4[12,13,14],ymm3[15]
3078 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm26, %ymm7
3079 ; AVX512DQ-FAST-NEXT: vpshufb %ymm7, %ymm1, %ymm1
3080 ; AVX512DQ-FAST-NEXT: vmovdqa64 %ymm27, %ymm8
3081 ; AVX512DQ-FAST-NEXT: vpshufb %ymm8, %ymm2, %ymm2
3082 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5,6],ymm1[7],ymm2[8,9,10],ymm1[11],ymm2[12,13,14],ymm1[15]
3083 ; AVX512DQ-FAST-NEXT: vinserti32x8 $1, %ymm3, %zmm1, %zmm0 {%k1}
3084 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm19, %xmm1
3085 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm20, %xmm2
3086 ; AVX512DQ-FAST-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3],xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
3087 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm21, %xmm2
3088 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm22, %xmm3
3089 ; AVX512DQ-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3],xmm2[4],xmm3[4],xmm2[5],xmm3[5],xmm2[6],xmm3[6],xmm2[7],xmm3[7]
3090 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm3
3091 ; AVX512DQ-FAST-NEXT: vpshufb %ymm14, %ymm3, %ymm3
3092 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
3093 ; AVX512DQ-FAST-NEXT: vpshufb %ymm10, %ymm1, %ymm4
3094 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0],ymm4[1],ymm3[2,3,4],ymm4[5],ymm3[6,7,8],ymm4[9],ymm3[10,11,12],ymm4[13],ymm3[14,15]
3095 ; AVX512DQ-FAST-NEXT: vpshufb %ymm9, %ymm1, %ymm1
3096 ; AVX512DQ-FAST-NEXT: vpshufb %xmm15, %xmm2, %xmm4
3097 ; AVX512DQ-FAST-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
3098 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm4, %ymm2, %ymm2
3099 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7,8],ymm1[9],ymm2[10,11,12],ymm1[13],ymm2[14,15]
3100 ; AVX512DQ-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm1, %zmm1
3101 ; AVX512DQ-FAST-NEXT: vmovdqa64 %xmm28, %xmm2
3102 ; AVX512DQ-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm11[0],xmm2[0],xmm11[1],xmm2[1],xmm11[2],xmm2[2],xmm11[3],xmm2[3],xmm11[4],xmm2[4],xmm11[5],xmm2[5],xmm11[6],xmm2[6],xmm11[7],xmm2[7]
3103 ; AVX512DQ-FAST-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3],xmm13[4],xmm12[4],xmm13[5],xmm12[5],xmm13[6],xmm12[6],xmm13[7],xmm12[7]
3104 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
3105 ; AVX512DQ-FAST-NEXT: vpshufb %ymm5, %ymm2, %ymm4
3106 ; AVX512DQ-FAST-NEXT: vinserti128 $1, %xmm3, %ymm3, %ymm3
3107 ; AVX512DQ-FAST-NEXT: vpshufb %ymm6, %ymm3, %ymm5
3108 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7],ymm5[8,9,10],ymm4[11],ymm5[12,13,14],ymm4[15]
3109 ; AVX512DQ-FAST-NEXT: vpshufb %ymm7, %ymm2, %ymm2
3110 ; AVX512DQ-FAST-NEXT: vpshufb %ymm8, %ymm3, %ymm3
3111 ; AVX512DQ-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7],ymm3[8,9,10],ymm2[11],ymm3[12,13,14],ymm2[15]
3112 ; AVX512DQ-FAST-NEXT: vinserti32x8 $1, %ymm4, %zmm2, %zmm1 {%k1}
3113 ; AVX512DQ-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
3114 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, (%rax)
3115 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, 192(%rax)
3116 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm16, 128(%rax)
3117 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm17, 64(%rax)
3118 ; AVX512DQ-FAST-NEXT: vzeroupper
3119 ; AVX512DQ-FAST-NEXT: retq
3121 ; AVX512BW-SLOW-LABEL: store_i8_stride8_vf32:
3122 ; AVX512BW-SLOW: # %bb.0:
3123 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
3124 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
3125 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r11
3126 ; AVX512BW-SLOW-NEXT: vmovdqa64 (%r11), %xmm25
3127 ; AVX512BW-SLOW-NEXT: vmovdqa 16(%r11), %xmm11
3128 ; AVX512BW-SLOW-NEXT: vmovdqa (%r10), %xmm1
3129 ; AVX512BW-SLOW-NEXT: vmovdqa 16(%r10), %xmm12
3130 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm1[8],xmm25[8],xmm1[9],xmm25[9],xmm1[10],xmm25[10],xmm1[11],xmm25[11],xmm1[12],xmm25[12],xmm1[13],xmm25[13],xmm1[14],xmm25[14],xmm1[15],xmm25[15]
3131 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm2, %zmm2, %zmm5
3132 ; AVX512BW-SLOW-NEXT: vmovdqa (%r9), %xmm2
3133 ; AVX512BW-SLOW-NEXT: vmovdqa 16(%r9), %xmm13
3134 ; AVX512BW-SLOW-NEXT: vmovdqa (%r8), %xmm3
3135 ; AVX512BW-SLOW-NEXT: vmovdqa 16(%r8), %xmm14
3136 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm3[8],xmm2[8],xmm3[9],xmm2[9],xmm3[10],xmm2[10],xmm3[11],xmm2[11],xmm3[12],xmm2[12],xmm3[13],xmm2[13],xmm3[14],xmm2[14],xmm3[15],xmm2[15]
3137 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm4, %zmm4, %zmm19
3138 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm23 = [0,1,0,32,4,5,1,33,2,1,2,34,4,5,3,35,16,17,20,52,20,21,21,53,16,17,22,54,22,21,23,55]
3139 ; AVX512BW-SLOW-NEXT: vpermt2w %zmm5, %zmm23, %zmm19
3140 ; AVX512BW-SLOW-NEXT: vmovdqa (%rsi), %xmm5
3141 ; AVX512BW-SLOW-NEXT: vmovdqa 16(%rsi), %xmm15
3142 ; AVX512BW-SLOW-NEXT: vmovdqa (%rdi), %xmm6
3143 ; AVX512BW-SLOW-NEXT: vmovdqa64 16(%rdi), %xmm16
3144 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm6[8],xmm5[8],xmm6[9],xmm5[9],xmm6[10],xmm5[10],xmm6[11],xmm5[11],xmm6[12],xmm5[12],xmm6[13],xmm5[13],xmm6[14],xmm5[14],xmm6[15],xmm5[15]
3145 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm8 = xmm7[0],zero,zero,zero,xmm7[1],zero,zero,zero
3146 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm7[1,1,1,1]
3147 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm9 = xmm9[0],zero,zero,zero,xmm9[1],zero,zero,zero
3148 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm8, %ymm10
3149 ; AVX512BW-SLOW-NEXT: vmovdqa (%rcx), %xmm8
3150 ; AVX512BW-SLOW-NEXT: vmovdqa64 16(%rcx), %xmm17
3151 ; AVX512BW-SLOW-NEXT: vmovdqa (%rdx), %xmm9
3152 ; AVX512BW-SLOW-NEXT: vmovdqa64 16(%rdx), %xmm18
3153 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm20 = xmm9[8],xmm8[8],xmm9[9],xmm8[9],xmm9[10],xmm8[10],xmm9[11],xmm8[11],xmm9[12],xmm8[12],xmm9[13],xmm8[13],xmm9[14],xmm8[14],xmm9[15],xmm8[15]
3154 ; AVX512BW-SLOW-NEXT: vpshuflw {{.*#+}} xmm21 = xmm20[0,0,2,1,4,5,6,7]
3155 ; AVX512BW-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm21 = xmm21[0],zero,xmm21[1],zero
3156 ; AVX512BW-SLOW-NEXT: vpshuflw {{.*#+}} xmm22 = xmm20[0,2,2,3,4,5,6,7]
3157 ; AVX512BW-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm22 = xmm22[0],zero,xmm22[1],zero
3158 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm22, %ymm21, %ymm4
3159 ; AVX512BW-SLOW-NEXT: vpblendw {{.*#+}} ymm4 = ymm10[0],ymm4[1],ymm10[2,3,4],ymm4[5],ymm10[6,7,8],ymm4[9],ymm10[10,11,12],ymm4[13],ymm10[14,15]
3160 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{.*#+}} ymm22 = <4,20,u,u,5,21,u,u,6,22,u,u,7,23,u,u>
3161 ; AVX512BW-SLOW-NEXT: vpermt2w %ymm20, %ymm22, %ymm7
3162 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm4, %zmm7
3163 ; AVX512BW-SLOW-NEXT: movw $-21846, %cx # imm = 0xAAAA
3164 ; AVX512BW-SLOW-NEXT: kmovd %ecx, %k1
3165 ; AVX512BW-SLOW-NEXT: vmovdqa32 %zmm19, %zmm7 {%k1}
3166 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3],xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
3167 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm4, %zmm4, %zmm4
3168 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm19 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3],xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
3169 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm19, %zmm19, %zmm20
3170 ; AVX512BW-SLOW-NEXT: vpermt2w %zmm4, %zmm23, %zmm20
3171 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm16[0],xmm15[0],xmm16[1],xmm15[1],xmm16[2],xmm15[2],xmm16[3],xmm15[3],xmm16[4],xmm15[4],xmm16[5],xmm15[5],xmm16[6],xmm15[6],xmm16[7],xmm15[7]
3172 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm19 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
3173 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm21 = xmm4[1,1,1,1]
3174 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm21 = xmm21[0],zero,zero,zero,xmm21[1],zero,zero,zero
3175 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm21, %ymm19, %ymm10
3176 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm19 = xmm18[0],xmm17[0],xmm18[1],xmm17[1],xmm18[2],xmm17[2],xmm18[3],xmm17[3],xmm18[4],xmm17[4],xmm18[5],xmm17[5],xmm18[6],xmm17[6],xmm18[7],xmm17[7]
3177 ; AVX512BW-SLOW-NEXT: vpshuflw {{.*#+}} xmm21 = xmm19[0,0,2,1,4,5,6,7]
3178 ; AVX512BW-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm21 = xmm21[0],zero,xmm21[1],zero
3179 ; AVX512BW-SLOW-NEXT: vpshuflw {{.*#+}} xmm24 = xmm19[0,2,2,3,4,5,6,7]
3180 ; AVX512BW-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm24 = xmm24[0],zero,xmm24[1],zero
3181 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm24, %ymm21, %ymm0
3182 ; AVX512BW-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm10[0],ymm0[1],ymm10[2,3,4],ymm0[5],ymm10[6,7,8],ymm0[9],ymm10[10,11,12],ymm0[13],ymm10[14,15]
3183 ; AVX512BW-SLOW-NEXT: vpermt2w %ymm19, %ymm22, %ymm4
3184 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm19
3185 ; AVX512BW-SLOW-NEXT: vmovdqa32 %zmm20, %zmm19 {%k1}
3186 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm12[8],xmm11[8],xmm12[9],xmm11[9],xmm12[10],xmm11[10],xmm12[11],xmm11[11],xmm12[12],xmm11[12],xmm12[13],xmm11[13],xmm12[14],xmm11[14],xmm12[15],xmm11[15]
3187 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm0
3188 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm14[8],xmm13[8],xmm14[9],xmm13[9],xmm14[10],xmm13[10],xmm14[11],xmm13[11],xmm14[12],xmm13[12],xmm14[13],xmm13[13],xmm14[14],xmm13[14],xmm14[15],xmm13[15]
3189 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm4, %zmm4, %zmm4
3190 ; AVX512BW-SLOW-NEXT: vpermt2w %zmm0, %zmm23, %zmm4
3191 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm16[8],xmm15[8],xmm16[9],xmm15[9],xmm16[10],xmm15[10],xmm16[11],xmm15[11],xmm16[12],xmm15[12],xmm16[13],xmm15[13],xmm16[14],xmm15[14],xmm16[15],xmm15[15]
3192 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm10 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
3193 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm11 = xmm0[1,1,1,1]
3194 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm11 = xmm11[0],zero,zero,zero,xmm11[1],zero,zero,zero
3195 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm11, %ymm10, %ymm10
3196 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm11 = xmm18[8],xmm17[8],xmm18[9],xmm17[9],xmm18[10],xmm17[10],xmm18[11],xmm17[11],xmm18[12],xmm17[12],xmm18[13],xmm17[13],xmm18[14],xmm17[14],xmm18[15],xmm17[15]
3197 ; AVX512BW-SLOW-NEXT: vpshuflw {{.*#+}} xmm12 = xmm11[0,0,2,1,4,5,6,7]
3198 ; AVX512BW-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm12 = xmm12[0],zero,xmm12[1],zero
3199 ; AVX512BW-SLOW-NEXT: vpshuflw {{.*#+}} xmm13 = xmm11[0,2,2,3,4,5,6,7]
3200 ; AVX512BW-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm13 = xmm13[0],zero,xmm13[1],zero
3201 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm12, %ymm12
3202 ; AVX512BW-SLOW-NEXT: vpblendw {{.*#+}} ymm10 = ymm10[0],ymm12[1],ymm10[2,3,4],ymm12[5],ymm10[6,7,8],ymm12[9],ymm10[10,11,12],ymm12[13],ymm10[14,15]
3203 ; AVX512BW-SLOW-NEXT: vpermt2w %ymm11, %ymm22, %ymm0
3204 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm10, %zmm0
3205 ; AVX512BW-SLOW-NEXT: vmovdqa32 %zmm4, %zmm0 {%k1}
3206 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm1[0],xmm25[0],xmm1[1],xmm25[1],xmm1[2],xmm25[2],xmm1[3],xmm25[3],xmm1[4],xmm25[4],xmm1[5],xmm25[5],xmm1[6],xmm25[6],xmm1[7],xmm25[7]
3207 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm1, %zmm1, %zmm1
3208 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
3209 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm2, %zmm2, %zmm2
3210 ; AVX512BW-SLOW-NEXT: vpermt2w %zmm1, %zmm23, %zmm2
3211 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3],xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
3212 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm3 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
3213 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm1[1,1,1,1]
3214 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
3215 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm3
3216 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3],xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
3217 ; AVX512BW-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm4[0,0,2,1,4,5,6,7]
3218 ; AVX512BW-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm5[0],zero,xmm5[1],zero
3219 ; AVX512BW-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm4[0,2,2,3,4,5,6,7]
3220 ; AVX512BW-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm6[0],zero,xmm6[1],zero
3221 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm5, %ymm5
3222 ; AVX512BW-SLOW-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0],ymm5[1],ymm3[2,3,4],ymm5[5],ymm3[6,7,8],ymm5[9],ymm3[10,11,12],ymm5[13],ymm3[14,15]
3223 ; AVX512BW-SLOW-NEXT: vpermt2w %ymm4, %ymm22, %ymm1
3224 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm3, %zmm1
3225 ; AVX512BW-SLOW-NEXT: vmovdqa32 %zmm2, %zmm1 {%k1}
3226 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm1, (%rax)
3227 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm0, 192(%rax)
3228 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm19, 128(%rax)
3229 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm7, 64(%rax)
3230 ; AVX512BW-SLOW-NEXT: vzeroupper
3231 ; AVX512BW-SLOW-NEXT: retq
3233 ; AVX512BW-FAST-LABEL: store_i8_stride8_vf32:
3234 ; AVX512BW-FAST: # %bb.0:
3235 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
3236 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
3237 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r11
3238 ; AVX512BW-FAST-NEXT: vmovdqa (%r11), %xmm0
3239 ; AVX512BW-FAST-NEXT: vmovdqa 16(%r11), %xmm7
3240 ; AVX512BW-FAST-NEXT: vmovdqa (%r10), %xmm1
3241 ; AVX512BW-FAST-NEXT: vmovdqa 16(%r10), %xmm8
3242 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
3243 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm2, %zmm2, %zmm5
3244 ; AVX512BW-FAST-NEXT: vmovdqa (%r9), %xmm2
3245 ; AVX512BW-FAST-NEXT: vmovdqa 16(%r9), %xmm9
3246 ; AVX512BW-FAST-NEXT: vmovdqa (%r8), %xmm3
3247 ; AVX512BW-FAST-NEXT: vmovdqa 16(%r8), %xmm10
3248 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm3[8],xmm2[8],xmm3[9],xmm2[9],xmm3[10],xmm2[10],xmm3[11],xmm2[11],xmm3[12],xmm2[12],xmm3[13],xmm2[13],xmm3[14],xmm2[14],xmm3[15],xmm2[15]
3249 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm4, %zmm4, %zmm11
3250 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,1,0,32,4,5,1,33,2,1,2,34,4,5,3,35,16,17,20,52,20,21,21,53,16,17,22,54,22,21,23,55]
3251 ; AVX512BW-FAST-NEXT: vpermt2w %zmm5, %zmm4, %zmm11
3252 ; AVX512BW-FAST-NEXT: vmovdqa (%rcx), %xmm5
3253 ; AVX512BW-FAST-NEXT: vmovdqa 16(%rcx), %xmm12
3254 ; AVX512BW-FAST-NEXT: vmovdqa (%rdx), %xmm13
3255 ; AVX512BW-FAST-NEXT: vmovdqa 16(%rdx), %xmm14
3256 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm13[8],xmm5[8],xmm13[9],xmm5[9],xmm13[10],xmm5[10],xmm13[11],xmm5[11],xmm13[12],xmm5[12],xmm13[13],xmm5[13],xmm13[14],xmm5[14],xmm13[15],xmm5[15]
3257 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm6, %zmm6, %zmm15
3258 ; AVX512BW-FAST-NEXT: vmovdqa64 (%rsi), %xmm16
3259 ; AVX512BW-FAST-NEXT: vmovdqa64 16(%rsi), %xmm17
3260 ; AVX512BW-FAST-NEXT: vmovdqa64 (%rdi), %xmm18
3261 ; AVX512BW-FAST-NEXT: vmovdqa64 16(%rdi), %xmm19
3262 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm6 = xmm18[8],xmm16[8],xmm18[9],xmm16[9],xmm18[10],xmm16[10],xmm18[11],xmm16[11],xmm18[12],xmm16[12],xmm18[13],xmm16[13],xmm18[14],xmm16[14],xmm18[15],xmm16[15]
3263 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm6, %zmm6, %zmm6
3264 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm20 = <0,32,u,u,1,33,u,u,2,34,u,u,3,35,u,u,20,52,u,u,21,53,u,u,22,54,u,u,23,55,u,u>
3265 ; AVX512BW-FAST-NEXT: vpermt2w %zmm15, %zmm20, %zmm6
3266 ; AVX512BW-FAST-NEXT: movw $-21846, %cx # imm = 0xAAAA
3267 ; AVX512BW-FAST-NEXT: kmovd %ecx, %k1
3268 ; AVX512BW-FAST-NEXT: vmovdqa32 %zmm11, %zmm6 {%k1}
3269 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3],xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
3270 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm11, %zmm11, %zmm11
3271 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3],xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
3272 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm15, %zmm15, %zmm15
3273 ; AVX512BW-FAST-NEXT: vpermt2w %zmm11, %zmm4, %zmm15
3274 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm14[0],xmm12[0],xmm14[1],xmm12[1],xmm14[2],xmm12[2],xmm14[3],xmm12[3],xmm14[4],xmm12[4],xmm14[5],xmm12[5],xmm14[6],xmm12[6],xmm14[7],xmm12[7]
3275 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm11, %zmm11, %zmm11
3276 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm21 = xmm19[0],xmm17[0],xmm19[1],xmm17[1],xmm19[2],xmm17[2],xmm19[3],xmm17[3],xmm19[4],xmm17[4],xmm19[5],xmm17[5],xmm19[6],xmm17[6],xmm19[7],xmm17[7]
3277 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm21, %zmm21, %zmm21
3278 ; AVX512BW-FAST-NEXT: vpermt2w %zmm11, %zmm20, %zmm21
3279 ; AVX512BW-FAST-NEXT: vmovdqa32 %zmm15, %zmm21 {%k1}
3280 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm8[8],xmm7[8],xmm8[9],xmm7[9],xmm8[10],xmm7[10],xmm8[11],xmm7[11],xmm8[12],xmm7[12],xmm8[13],xmm7[13],xmm8[14],xmm7[14],xmm8[15],xmm7[15]
3281 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm7, %zmm7, %zmm7
3282 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm8 = xmm10[8],xmm9[8],xmm10[9],xmm9[9],xmm10[10],xmm9[10],xmm10[11],xmm9[11],xmm10[12],xmm9[12],xmm10[13],xmm9[13],xmm10[14],xmm9[14],xmm10[15],xmm9[15]
3283 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm8, %zmm8, %zmm8
3284 ; AVX512BW-FAST-NEXT: vpermt2w %zmm7, %zmm4, %zmm8
3285 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm7 = xmm14[8],xmm12[8],xmm14[9],xmm12[9],xmm14[10],xmm12[10],xmm14[11],xmm12[11],xmm14[12],xmm12[12],xmm14[13],xmm12[13],xmm14[14],xmm12[14],xmm14[15],xmm12[15]
3286 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm7, %zmm7, %zmm7
3287 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm19[8],xmm17[8],xmm19[9],xmm17[9],xmm19[10],xmm17[10],xmm19[11],xmm17[11],xmm19[12],xmm17[12],xmm19[13],xmm17[13],xmm19[14],xmm17[14],xmm19[15],xmm17[15]
3288 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm9, %zmm9, %zmm9
3289 ; AVX512BW-FAST-NEXT: vpermt2w %zmm7, %zmm20, %zmm9
3290 ; AVX512BW-FAST-NEXT: vmovdqa32 %zmm8, %zmm9 {%k1}
3291 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
3292 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm0
3293 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
3294 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm1, %zmm1, %zmm1
3295 ; AVX512BW-FAST-NEXT: vpermt2w %zmm0, %zmm4, %zmm1
3296 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm13[0],xmm5[0],xmm13[1],xmm5[1],xmm13[2],xmm5[2],xmm13[3],xmm5[3],xmm13[4],xmm5[4],xmm13[5],xmm5[5],xmm13[6],xmm5[6],xmm13[7],xmm5[7]
3297 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm0
3298 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm18[0],xmm16[0],xmm18[1],xmm16[1],xmm18[2],xmm16[2],xmm18[3],xmm16[3],xmm18[4],xmm16[4],xmm18[5],xmm16[5],xmm18[6],xmm16[6],xmm18[7],xmm16[7]
3299 ; AVX512BW-FAST-NEXT: vinserti32x4 $2, %xmm2, %zmm2, %zmm2
3300 ; AVX512BW-FAST-NEXT: vpermt2w %zmm0, %zmm20, %zmm2
3301 ; AVX512BW-FAST-NEXT: vmovdqa32 %zmm1, %zmm2 {%k1}
3302 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm2, (%rax)
3303 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm9, 192(%rax)
3304 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm21, 128(%rax)
3305 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm6, 64(%rax)
3306 ; AVX512BW-FAST-NEXT: vzeroupper
3307 ; AVX512BW-FAST-NEXT: retq
3308 %in.vec0 = load <32 x i8>, ptr %in.vecptr0, align 64
3309 %in.vec1 = load <32 x i8>, ptr %in.vecptr1, align 64
3310 %in.vec2 = load <32 x i8>, ptr %in.vecptr2, align 64
3311 %in.vec3 = load <32 x i8>, ptr %in.vecptr3, align 64
3312 %in.vec4 = load <32 x i8>, ptr %in.vecptr4, align 64
3313 %in.vec5 = load <32 x i8>, ptr %in.vecptr5, align 64
3314 %in.vec6 = load <32 x i8>, ptr %in.vecptr6, align 64
3315 %in.vec7 = load <32 x i8>, ptr %in.vecptr7, align 64
3316 %1 = shufflevector <32 x i8> %in.vec0, <32 x i8> %in.vec1, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
3317 %2 = shufflevector <32 x i8> %in.vec2, <32 x i8> %in.vec3, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
3318 %3 = shufflevector <32 x i8> %in.vec4, <32 x i8> %in.vec5, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
3319 %4 = shufflevector <32 x i8> %in.vec6, <32 x i8> %in.vec7, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
3320 %5 = shufflevector <64 x i8> %1, <64 x i8> %2, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
3321 %6 = shufflevector <64 x i8> %3, <64 x i8> %4, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
3322 %7 = shufflevector <128 x i8> %5, <128 x i8> %6, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
3323 %interleaved.vec = shufflevector <256 x i8> %7, <256 x i8> poison, <256 x i32> <i32 0, i32 32, i32 64, i32 96, i32 128, i32 160, i32 192, i32 224, i32 1, i32 33, i32 65, i32 97, i32 129, i32 161, i32 193, i32 225, i32 2, i32 34, i32 66, i32 98, i32 130, i32 162, i32 194, i32 226, i32 3, i32 35, i32 67, i32 99, i32 131, i32 163, i32 195, i32 227, i32 4, i32 36, i32 68, i32 100, i32 132, i32 164, i32 196, i32 228, i32 5, i32 37, i32 69, i32 101, i32 133, i32 165, i32 197, i32 229, i32 6, i32 38, i32 70, i32 102, i32 134, i32 166, i32 198, i32 230, i32 7, i32 39, i32 71, i32 103, i32 135, i32 167, i32 199, i32 231, i32 8, i32 40, i32 72, i32 104, i32 136, i32 168, i32 200, i32 232, i32 9, i32 41, i32 73, i32 105, i32 137, i32 169, i32 201, i32 233, i32 10, i32 42, i32 74, i32 106, i32 138, i32 170, i32 202, i32 234, i32 11, i32 43, i32 75, i32 107, i32 139, i32 171, i32 203, i32 235, i32 12, i32 44, i32 76, i32 108, i32 140, i32 172, i32 204, i32 236, i32 13, i32 45, i32 77, i32 109, i32 141, i32 173, i32 205, i32 237, i32 14, i32 46, i32 78, i32 110, i32 142, i32 174, i32 206, i32 238, i32 15, i32 47, i32 79, i32 111, i32 143, i32 175, i32 207, i32 239, i32 16, i32 48, i32 80, i32 112, i32 144, i32 176, i32 208, i32 240, i32 17, i32 49, i32 81, i32 113, i32 145, i32 177, i32 209, i32 241, i32 18, i32 50, i32 82, i32 114, i32 146, i32 178, i32 210, i32 242, i32 19, i32 51, i32 83, i32 115, i32 147, i32 179, i32 211, i32 243, i32 20, i32 52, i32 84, i32 116, i32 148, i32 180, i32 212, i32 244, i32 21, i32 53, i32 85, i32 117, i32 149, i32 181, i32 213, i32 245, i32 22, i32 54, i32 86, i32 118, i32 150, i32 182, i32 214, i32 246, i32 23, i32 55, i32 87, i32 119, i32 151, i32 183, i32 215, i32 247, i32 24, i32 56, i32 88, i32 120, i32 152, i32 184, i32 216, i32 248, i32 25, i32 57, i32 89, i32 121, i32 153, i32 185, i32 217, i32 249, i32 26, i32 58, i32 90, i32 122, i32 154, i32 186, i32 218, i32 250, i32 27, i32 59, i32 91, i32 123, i32 155, i32 187, i32 219, i32 251, i32 28, i32 60, i32 92, i32 124, i32 156, i32 188, i32 220, i32 252, i32 29, i32 61, i32 93, i32 125, i32 157, i32 189, i32 221, i32 253, i32 30, i32 62, i32 94, i32 126, i32 158, i32 190, i32 222, i32 254, i32 31, i32 63, i32 95, i32 127, i32 159, i32 191, i32 223, i32 255>
3324 store <256 x i8> %interleaved.vec, ptr %out.vec, align 64
3328 define void @store_i8_stride8_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecptr2, ptr %in.vecptr3, ptr %in.vecptr4, ptr %in.vecptr5, ptr %in.vecptr6, ptr %in.vecptr7, ptr %out.vec) nounwind {
3329 ; SSE-LABEL: store_i8_stride8_vf64:
3331 ; SSE-NEXT: subq $312, %rsp # imm = 0x138
3332 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
3333 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %r10
3334 ; SSE-NEXT: movdqa (%rdi), %xmm3
3335 ; SSE-NEXT: movdqa (%rsi), %xmm5
3336 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3337 ; SSE-NEXT: movdqa (%rdx), %xmm4
3338 ; SSE-NEXT: movdqa (%rcx), %xmm8
3339 ; SSE-NEXT: movdqa (%r8), %xmm6
3340 ; SSE-NEXT: movdqa (%r9), %xmm9
3341 ; SSE-NEXT: movdqa (%r10), %xmm7
3342 ; SSE-NEXT: movdqa (%rax), %xmm10
3343 ; SSE-NEXT: movdqa %xmm7, %xmm0
3344 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm10[0],xmm0[1],xmm10[1],xmm0[2],xmm10[2],xmm0[3],xmm10[3],xmm0[4],xmm10[4],xmm0[5],xmm10[5],xmm0[6],xmm10[6],xmm0[7],xmm10[7]
3345 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm0[0,0,2,1,4,5,6,7]
3346 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm1[0,0,2,1]
3347 ; SSE-NEXT: movdqa {{.*#+}} xmm13 = [65535,65535,65535,0,65535,65535,65535,0]
3348 ; SSE-NEXT: movdqa %xmm13, %xmm12
3349 ; SSE-NEXT: pandn %xmm2, %xmm12
3350 ; SSE-NEXT: movdqa %xmm6, %xmm11
3351 ; SSE-NEXT: punpcklbw {{.*#+}} xmm11 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3],xmm11[4],xmm9[4],xmm11[5],xmm9[5],xmm11[6],xmm9[6],xmm11[7],xmm9[7]
3352 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm11[0,1,1,3,4,5,6,7]
3353 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm2[0,0,2,1]
3354 ; SSE-NEXT: pand %xmm13, %xmm14
3355 ; SSE-NEXT: por %xmm12, %xmm14
3356 ; SSE-NEXT: movdqa %xmm4, %xmm12
3357 ; SSE-NEXT: punpcklbw {{.*#+}} xmm12 = xmm12[0],xmm8[0],xmm12[1],xmm8[1],xmm12[2],xmm8[2],xmm12[3],xmm8[3],xmm12[4],xmm8[4],xmm12[5],xmm8[5],xmm12[6],xmm8[6],xmm12[7],xmm8[7]
3358 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm12[0,0,2,1,4,5,6,7]
3359 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,1,3]
3360 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,0,65535,65535,65535,0,65535,65535]
3361 ; SSE-NEXT: movdqa %xmm1, %xmm15
3362 ; SSE-NEXT: pandn %xmm2, %xmm15
3363 ; SSE-NEXT: movdqa %xmm3, %xmm2
3364 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3],xmm2[4],xmm5[4],xmm2[5],xmm5[5],xmm2[6],xmm5[6],xmm2[7],xmm5[7]
3365 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[0,0,0,0]
3366 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,5,5,5,5]
3367 ; SSE-NEXT: pand %xmm1, %xmm5
3368 ; SSE-NEXT: por %xmm15, %xmm5
3369 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[1,3,2,3]
3370 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,2,2,3]
3371 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm14[0],xmm5[1],xmm14[1]
3372 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3373 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm0[0,2,2,3,4,5,6,7]
3374 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,0,2,1]
3375 ; SSE-NEXT: movdqa %xmm13, %xmm14
3376 ; SSE-NEXT: pandn %xmm5, %xmm14
3377 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm11[2,1,3,3,4,5,6,7]
3378 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,0,2,1]
3379 ; SSE-NEXT: pand %xmm13, %xmm5
3380 ; SSE-NEXT: por %xmm14, %xmm5
3381 ; SSE-NEXT: pshuflw {{.*#+}} xmm14 = xmm12[0,2,2,3,4,5,6,7]
3382 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[0,1,1,3]
3383 ; SSE-NEXT: movdqa %xmm1, %xmm15
3384 ; SSE-NEXT: pandn %xmm14, %xmm15
3385 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm2[1,1,1,1]
3386 ; SSE-NEXT: pshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,5,5,5,5]
3387 ; SSE-NEXT: pand %xmm1, %xmm14
3388 ; SSE-NEXT: por %xmm15, %xmm14
3389 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,3,2,3]
3390 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[0,2,2,3]
3391 ; SSE-NEXT: punpckldq {{.*#+}} xmm14 = xmm14[0],xmm5[0],xmm14[1],xmm5[1]
3392 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3393 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm0[0,1,2,3,4,4,6,5]
3394 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,2,2,3]
3395 ; SSE-NEXT: movdqa %xmm13, %xmm14
3396 ; SSE-NEXT: pandn %xmm5, %xmm14
3397 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm11[0,1,2,3,4,5,5,7]
3398 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,2,2,3]
3399 ; SSE-NEXT: pand %xmm13, %xmm5
3400 ; SSE-NEXT: por %xmm14, %xmm5
3401 ; SSE-NEXT: pshufhw {{.*#+}} xmm14 = xmm12[0,1,2,3,4,4,6,5]
3402 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[2,1,3,3]
3403 ; SSE-NEXT: movdqa %xmm1, %xmm15
3404 ; SSE-NEXT: pandn %xmm14, %xmm15
3405 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm2[2,2,2,2]
3406 ; SSE-NEXT: pshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,5,5,5,5]
3407 ; SSE-NEXT: pand %xmm1, %xmm14
3408 ; SSE-NEXT: por %xmm15, %xmm14
3409 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,3,2,3]
3410 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[0,2,2,3]
3411 ; SSE-NEXT: punpckldq {{.*#+}} xmm14 = xmm14[0],xmm5[0],xmm14[1],xmm5[1]
3412 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3413 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,6,7]
3414 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
3415 ; SSE-NEXT: movdqa %xmm13, %xmm5
3416 ; SSE-NEXT: pandn %xmm0, %xmm5
3417 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm11[0,1,2,3,6,5,7,7]
3418 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm0[0,2,2,3]
3419 ; SSE-NEXT: pand %xmm13, %xmm11
3420 ; SSE-NEXT: por %xmm5, %xmm11
3421 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm12[0,1,2,3,4,6,6,7]
3422 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,3,3]
3423 ; SSE-NEXT: movdqa %xmm1, %xmm5
3424 ; SSE-NEXT: pandn %xmm0, %xmm5
3425 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[3,3,3,3]
3426 ; SSE-NEXT: pshufhw {{.*#+}} xmm12 = xmm0[0,1,2,3,5,5,5,5]
3427 ; SSE-NEXT: pand %xmm1, %xmm12
3428 ; SSE-NEXT: por %xmm5, %xmm12
3429 ; SSE-NEXT: movdqa 16(%r8), %xmm0
3430 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm11[1,3,2,3]
3431 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm12[0,2,2,3]
3432 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1]
3433 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3434 ; SSE-NEXT: punpckhbw {{.*#+}} xmm7 = xmm7[8],xmm10[8],xmm7[9],xmm10[9],xmm7[10],xmm10[10],xmm7[11],xmm10[11],xmm7[12],xmm10[12],xmm7[13],xmm10[13],xmm7[14],xmm10[14],xmm7[15],xmm10[15]
3435 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm7[0,0,2,1,4,5,6,7]
3436 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,0,2,1]
3437 ; SSE-NEXT: movdqa %xmm13, %xmm10
3438 ; SSE-NEXT: pandn %xmm5, %xmm10
3439 ; SSE-NEXT: punpckhbw {{.*#+}} xmm6 = xmm6[8],xmm9[8],xmm6[9],xmm9[9],xmm6[10],xmm9[10],xmm6[11],xmm9[11],xmm6[12],xmm9[12],xmm6[13],xmm9[13],xmm6[14],xmm9[14],xmm6[15],xmm9[15]
3440 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm6[0,1,1,3,4,5,6,7]
3441 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,0,2,1]
3442 ; SSE-NEXT: pand %xmm13, %xmm5
3443 ; SSE-NEXT: por %xmm10, %xmm5
3444 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,3,2,3]
3445 ; SSE-NEXT: punpckhbw {{.*#+}} xmm4 = xmm4[8],xmm8[8],xmm4[9],xmm8[9],xmm4[10],xmm8[10],xmm4[11],xmm8[11],xmm4[12],xmm8[12],xmm4[13],xmm8[13],xmm4[14],xmm8[14],xmm4[15],xmm8[15]
3446 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm4[0,0,2,1,4,5,6,7]
3447 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,1,1,3]
3448 ; SSE-NEXT: movdqa %xmm1, %xmm9
3449 ; SSE-NEXT: pandn %xmm8, %xmm9
3450 ; SSE-NEXT: punpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
3451 ; SSE-NEXT: # xmm3 = xmm3[8],mem[8],xmm3[9],mem[9],xmm3[10],mem[10],xmm3[11],mem[11],xmm3[12],mem[12],xmm3[13],mem[13],xmm3[14],mem[14],xmm3[15],mem[15]
3452 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm3[0,0,0,0]
3453 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,5,5,5,5]
3454 ; SSE-NEXT: pand %xmm1, %xmm8
3455 ; SSE-NEXT: por %xmm9, %xmm8
3456 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm8[0,2,2,3]
3457 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1]
3458 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3459 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm7[0,2,2,3,4,5,6,7]
3460 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,0,2,1]
3461 ; SSE-NEXT: movdqa %xmm13, %xmm8
3462 ; SSE-NEXT: pandn %xmm5, %xmm8
3463 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm6[2,1,3,3,4,5,6,7]
3464 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,0,2,1]
3465 ; SSE-NEXT: pand %xmm13, %xmm5
3466 ; SSE-NEXT: por %xmm8, %xmm5
3467 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,3,2,3]
3468 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm4[0,2,2,3,4,5,6,7]
3469 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,1,1,3]
3470 ; SSE-NEXT: movdqa %xmm1, %xmm9
3471 ; SSE-NEXT: pandn %xmm8, %xmm9
3472 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm3[1,1,1,1]
3473 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,5,5,5,5]
3474 ; SSE-NEXT: pand %xmm1, %xmm8
3475 ; SSE-NEXT: por %xmm9, %xmm8
3476 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm8[0,2,2,3]
3477 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1]
3478 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3479 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm7[0,1,2,3,4,4,6,5]
3480 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,2,2,3]
3481 ; SSE-NEXT: movdqa %xmm13, %xmm8
3482 ; SSE-NEXT: pandn %xmm5, %xmm8
3483 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm6[0,1,2,3,4,5,5,7]
3484 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm5[0,2,2,3]
3485 ; SSE-NEXT: pand %xmm13, %xmm9
3486 ; SSE-NEXT: por %xmm8, %xmm9
3487 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm4[0,1,2,3,4,4,6,5]
3488 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[2,1,3,3]
3489 ; SSE-NEXT: movdqa %xmm1, %xmm8
3490 ; SSE-NEXT: pandn %xmm5, %xmm8
3491 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm3[2,2,2,2]
3492 ; SSE-NEXT: pshufhw {{.*#+}} xmm10 = xmm5[0,1,2,3,5,5,5,5]
3493 ; SSE-NEXT: pand %xmm1, %xmm10
3494 ; SSE-NEXT: por %xmm8, %xmm10
3495 ; SSE-NEXT: movdqa 16(%r10), %xmm5
3496 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm9[1,3,2,3]
3497 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm10[0,2,2,3]
3498 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm8[0],xmm2[1],xmm8[1]
3499 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3500 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,6,6,7]
3501 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,2,2,3]
3502 ; SSE-NEXT: movdqa %xmm13, %xmm8
3503 ; SSE-NEXT: pandn %xmm7, %xmm8
3504 ; SSE-NEXT: movdqa 16(%rax), %xmm7
3505 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3506 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,6,5,7,7]
3507 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
3508 ; SSE-NEXT: pand %xmm13, %xmm6
3509 ; SSE-NEXT: por %xmm8, %xmm6
3510 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,3,2,3]
3511 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,6,6,7]
3512 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[2,1,3,3]
3513 ; SSE-NEXT: movdqa %xmm1, %xmm8
3514 ; SSE-NEXT: pandn %xmm4, %xmm8
3515 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[3,3,3,3]
3516 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,5,5,5,5]
3517 ; SSE-NEXT: pand %xmm1, %xmm3
3518 ; SSE-NEXT: por %xmm8, %xmm3
3519 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[0,2,2,3]
3520 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1]
3521 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3522 ; SSE-NEXT: movdqa %xmm5, %xmm10
3523 ; SSE-NEXT: punpcklbw {{.*#+}} xmm10 = xmm10[0],xmm7[0],xmm10[1],xmm7[1],xmm10[2],xmm7[2],xmm10[3],xmm7[3],xmm10[4],xmm7[4],xmm10[5],xmm7[5],xmm10[6],xmm7[6],xmm10[7],xmm7[7]
3524 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm10[0,0,2,1,4,5,6,7]
3525 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,0,2,1]
3526 ; SSE-NEXT: movdqa %xmm13, %xmm4
3527 ; SSE-NEXT: pandn %xmm3, %xmm4
3528 ; SSE-NEXT: movdqa 16(%r9), %xmm6
3529 ; SSE-NEXT: movdqa %xmm0, %xmm11
3530 ; SSE-NEXT: punpcklbw {{.*#+}} xmm11 = xmm11[0],xmm6[0],xmm11[1],xmm6[1],xmm11[2],xmm6[2],xmm11[3],xmm6[3],xmm11[4],xmm6[4],xmm11[5],xmm6[5],xmm11[6],xmm6[6],xmm11[7],xmm6[7]
3531 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm11[0,1,1,3,4,5,6,7]
3532 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm3[0,0,2,1]
3533 ; SSE-NEXT: pand %xmm13, %xmm14
3534 ; SSE-NEXT: por %xmm4, %xmm14
3535 ; SSE-NEXT: movdqa 16(%rdx), %xmm3
3536 ; SSE-NEXT: movdqa 16(%rcx), %xmm8
3537 ; SSE-NEXT: movdqa %xmm3, %xmm12
3538 ; SSE-NEXT: punpcklbw {{.*#+}} xmm12 = xmm12[0],xmm8[0],xmm12[1],xmm8[1],xmm12[2],xmm8[2],xmm12[3],xmm8[3],xmm12[4],xmm8[4],xmm12[5],xmm8[5],xmm12[6],xmm8[6],xmm12[7],xmm8[7]
3539 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm12[0,0,2,1,4,5,6,7]
3540 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,1,1,3]
3541 ; SSE-NEXT: movdqa %xmm1, %xmm15
3542 ; SSE-NEXT: pandn %xmm4, %xmm15
3543 ; SSE-NEXT: movdqa 16(%rdi), %xmm4
3544 ; SSE-NEXT: movdqa 16(%rsi), %xmm9
3545 ; SSE-NEXT: movdqa %xmm4, %xmm2
3546 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm9[0],xmm2[1],xmm9[1],xmm2[2],xmm9[2],xmm2[3],xmm9[3],xmm2[4],xmm9[4],xmm2[5],xmm9[5],xmm2[6],xmm9[6],xmm2[7],xmm9[7]
3547 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm2[0,0,0,0]
3548 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,5,5,5,5]
3549 ; SSE-NEXT: pand %xmm1, %xmm7
3550 ; SSE-NEXT: por %xmm15, %xmm7
3551 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[1,3,2,3]
3552 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,2,2,3]
3553 ; SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm14[0],xmm7[1],xmm14[1]
3554 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3555 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm10[0,2,2,3,4,5,6,7]
3556 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,0,2,1]
3557 ; SSE-NEXT: movdqa %xmm13, %xmm14
3558 ; SSE-NEXT: pandn %xmm7, %xmm14
3559 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm11[2,1,3,3,4,5,6,7]
3560 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,0,2,1]
3561 ; SSE-NEXT: pand %xmm13, %xmm7
3562 ; SSE-NEXT: por %xmm14, %xmm7
3563 ; SSE-NEXT: pshuflw {{.*#+}} xmm14 = xmm12[0,2,2,3,4,5,6,7]
3564 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[0,1,1,3]
3565 ; SSE-NEXT: movdqa %xmm1, %xmm15
3566 ; SSE-NEXT: pandn %xmm14, %xmm15
3567 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm2[1,1,1,1]
3568 ; SSE-NEXT: pshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,5,5,5,5]
3569 ; SSE-NEXT: pand %xmm1, %xmm14
3570 ; SSE-NEXT: por %xmm15, %xmm14
3571 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[1,3,2,3]
3572 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[0,2,2,3]
3573 ; SSE-NEXT: punpckldq {{.*#+}} xmm14 = xmm14[0],xmm7[0],xmm14[1],xmm7[1]
3574 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3575 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm10[0,1,2,3,4,4,6,5]
3576 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,2,2,3]
3577 ; SSE-NEXT: movdqa %xmm13, %xmm14
3578 ; SSE-NEXT: pandn %xmm7, %xmm14
3579 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm11[0,1,2,3,4,5,5,7]
3580 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,2,2,3]
3581 ; SSE-NEXT: pand %xmm13, %xmm7
3582 ; SSE-NEXT: por %xmm14, %xmm7
3583 ; SSE-NEXT: pshufhw {{.*#+}} xmm14 = xmm12[0,1,2,3,4,4,6,5]
3584 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[2,1,3,3]
3585 ; SSE-NEXT: movdqa %xmm1, %xmm15
3586 ; SSE-NEXT: pandn %xmm14, %xmm15
3587 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm2[2,2,2,2]
3588 ; SSE-NEXT: pshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,5,5,5,5]
3589 ; SSE-NEXT: pand %xmm1, %xmm14
3590 ; SSE-NEXT: por %xmm15, %xmm14
3591 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[1,3,2,3]
3592 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[0,2,2,3]
3593 ; SSE-NEXT: punpckldq {{.*#+}} xmm14 = xmm14[0],xmm7[0],xmm14[1],xmm7[1]
3594 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3595 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm10[0,1,2,3,4,6,6,7]
3596 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,2,2,3]
3597 ; SSE-NEXT: movdqa %xmm13, %xmm10
3598 ; SSE-NEXT: pandn %xmm7, %xmm10
3599 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm11[0,1,2,3,6,5,7,7]
3600 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,2,2,3]
3601 ; SSE-NEXT: pand %xmm13, %xmm7
3602 ; SSE-NEXT: por %xmm10, %xmm7
3603 ; SSE-NEXT: pshufhw {{.*#+}} xmm10 = xmm12[0,1,2,3,4,6,6,7]
3604 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[2,1,3,3]
3605 ; SSE-NEXT: movdqa %xmm1, %xmm11
3606 ; SSE-NEXT: pandn %xmm10, %xmm11
3607 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm2[3,3,3,3]
3608 ; SSE-NEXT: pshufhw {{.*#+}} xmm10 = xmm10[0,1,2,3,5,5,5,5]
3609 ; SSE-NEXT: pand %xmm1, %xmm10
3610 ; SSE-NEXT: por %xmm11, %xmm10
3611 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[1,3,2,3]
3612 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm10[0,2,2,3]
3613 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm7[0],xmm2[1],xmm7[1]
3614 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3615 ; SSE-NEXT: punpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
3616 ; SSE-NEXT: # xmm5 = xmm5[8],mem[8],xmm5[9],mem[9],xmm5[10],mem[10],xmm5[11],mem[11],xmm5[12],mem[12],xmm5[13],mem[13],xmm5[14],mem[14],xmm5[15],mem[15]
3617 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm5[0,0,2,1,4,5,6,7]
3618 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,0,2,1]
3619 ; SSE-NEXT: movdqa %xmm13, %xmm10
3620 ; SSE-NEXT: pandn %xmm7, %xmm10
3621 ; SSE-NEXT: punpckhbw {{.*#+}} xmm0 = xmm0[8],xmm6[8],xmm0[9],xmm6[9],xmm0[10],xmm6[10],xmm0[11],xmm6[11],xmm0[12],xmm6[12],xmm0[13],xmm6[13],xmm0[14],xmm6[14],xmm0[15],xmm6[15]
3622 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm0[0,1,1,3,4,5,6,7]
3623 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,0,2,1]
3624 ; SSE-NEXT: pand %xmm13, %xmm6
3625 ; SSE-NEXT: por %xmm10, %xmm6
3626 ; SSE-NEXT: punpckhbw {{.*#+}} xmm3 = xmm3[8],xmm8[8],xmm3[9],xmm8[9],xmm3[10],xmm8[10],xmm3[11],xmm8[11],xmm3[12],xmm8[12],xmm3[13],xmm8[13],xmm3[14],xmm8[14],xmm3[15],xmm8[15]
3627 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm3[0,0,2,1,4,5,6,7]
3628 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,1,1,3]
3629 ; SSE-NEXT: movdqa %xmm1, %xmm8
3630 ; SSE-NEXT: pandn %xmm7, %xmm8
3631 ; SSE-NEXT: punpckhbw {{.*#+}} xmm4 = xmm4[8],xmm9[8],xmm4[9],xmm9[9],xmm4[10],xmm9[10],xmm4[11],xmm9[11],xmm4[12],xmm9[12],xmm4[13],xmm9[13],xmm4[14],xmm9[14],xmm4[15],xmm9[15]
3632 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm4[0,0,0,0]
3633 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,5,5,5,5]
3634 ; SSE-NEXT: pand %xmm1, %xmm7
3635 ; SSE-NEXT: por %xmm8, %xmm7
3636 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,3,2,3]
3637 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm7[0,2,2,3]
3638 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1]
3639 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3640 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm5[0,2,2,3,4,5,6,7]
3641 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,0,2,1]
3642 ; SSE-NEXT: movdqa %xmm13, %xmm7
3643 ; SSE-NEXT: pandn %xmm6, %xmm7
3644 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm0[2,1,3,3,4,5,6,7]
3645 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,0,2,1]
3646 ; SSE-NEXT: pand %xmm13, %xmm6
3647 ; SSE-NEXT: por %xmm7, %xmm6
3648 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm3[0,2,2,3,4,5,6,7]
3649 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,1,1,3]
3650 ; SSE-NEXT: movdqa %xmm1, %xmm8
3651 ; SSE-NEXT: pandn %xmm7, %xmm8
3652 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm4[1,1,1,1]
3653 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,5,5,5,5]
3654 ; SSE-NEXT: pand %xmm1, %xmm7
3655 ; SSE-NEXT: por %xmm8, %xmm7
3656 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,3,2,3]
3657 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm7[0,2,2,3]
3658 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1]
3659 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3660 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm5[0,1,2,3,4,4,6,5]
3661 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
3662 ; SSE-NEXT: movdqa %xmm13, %xmm7
3663 ; SSE-NEXT: pandn %xmm6, %xmm7
3664 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm0[0,1,2,3,4,5,5,7]
3665 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
3666 ; SSE-NEXT: pand %xmm13, %xmm6
3667 ; SSE-NEXT: por %xmm7, %xmm6
3668 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm3[0,1,2,3,4,4,6,5]
3669 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[2,1,3,3]
3670 ; SSE-NEXT: movdqa %xmm1, %xmm8
3671 ; SSE-NEXT: pandn %xmm7, %xmm8
3672 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm4[2,2,2,2]
3673 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,5,5,5,5]
3674 ; SSE-NEXT: pand %xmm1, %xmm7
3675 ; SSE-NEXT: por %xmm8, %xmm7
3676 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,3,2,3]
3677 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm7[0,2,2,3]
3678 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1]
3679 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3680 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,6,6,7]
3681 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,2,2,3]
3682 ; SSE-NEXT: movdqa %xmm13, %xmm6
3683 ; SSE-NEXT: pandn %xmm5, %xmm6
3684 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,6,5,7,7]
3685 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
3686 ; SSE-NEXT: pand %xmm13, %xmm0
3687 ; SSE-NEXT: por %xmm6, %xmm0
3688 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,6,6,7]
3689 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,1,3,3]
3690 ; SSE-NEXT: movdqa %xmm1, %xmm5
3691 ; SSE-NEXT: pandn %xmm3, %xmm5
3692 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm4[3,3,3,3]
3693 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,5,5,5,5]
3694 ; SSE-NEXT: pand %xmm1, %xmm3
3695 ; SSE-NEXT: por %xmm5, %xmm3
3696 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,3]
3697 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[0,2,2,3]
3698 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
3699 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3700 ; SSE-NEXT: movdqa 32(%r10), %xmm0
3701 ; SSE-NEXT: movdqa 32(%rax), %xmm2
3702 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3703 ; SSE-NEXT: movdqa %xmm0, %xmm10
3704 ; SSE-NEXT: punpcklbw {{.*#+}} xmm10 = xmm10[0],xmm2[0],xmm10[1],xmm2[1],xmm10[2],xmm2[2],xmm10[3],xmm2[3],xmm10[4],xmm2[4],xmm10[5],xmm2[5],xmm10[6],xmm2[6],xmm10[7],xmm2[7]
3705 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm10[0,0,2,1,4,5,6,7]
3706 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,0,2,1]
3707 ; SSE-NEXT: movdqa %xmm13, %xmm4
3708 ; SSE-NEXT: pandn %xmm3, %xmm4
3709 ; SSE-NEXT: movdqa 32(%r8), %xmm3
3710 ; SSE-NEXT: movdqa 32(%r9), %xmm7
3711 ; SSE-NEXT: movdqa %xmm3, %xmm11
3712 ; SSE-NEXT: punpcklbw {{.*#+}} xmm11 = xmm11[0],xmm7[0],xmm11[1],xmm7[1],xmm11[2],xmm7[2],xmm11[3],xmm7[3],xmm11[4],xmm7[4],xmm11[5],xmm7[5],xmm11[6],xmm7[6],xmm11[7],xmm7[7]
3713 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm11[0,1,1,3,4,5,6,7]
3714 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm5[0,0,2,1]
3715 ; SSE-NEXT: pand %xmm13, %xmm14
3716 ; SSE-NEXT: por %xmm4, %xmm14
3717 ; SSE-NEXT: movdqa 32(%rdx), %xmm4
3718 ; SSE-NEXT: movdqa 32(%rcx), %xmm8
3719 ; SSE-NEXT: movdqa %xmm4, %xmm12
3720 ; SSE-NEXT: punpcklbw {{.*#+}} xmm12 = xmm12[0],xmm8[0],xmm12[1],xmm8[1],xmm12[2],xmm8[2],xmm12[3],xmm8[3],xmm12[4],xmm8[4],xmm12[5],xmm8[5],xmm12[6],xmm8[6],xmm12[7],xmm8[7]
3721 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm12[0,0,2,1,4,5,6,7]
3722 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[0,1,1,3]
3723 ; SSE-NEXT: movdqa %xmm1, %xmm15
3724 ; SSE-NEXT: pandn %xmm5, %xmm15
3725 ; SSE-NEXT: movdqa 32(%rdi), %xmm5
3726 ; SSE-NEXT: movdqa 32(%rsi), %xmm9
3727 ; SSE-NEXT: movdqa %xmm5, %xmm2
3728 ; SSE-NEXT: punpcklbw {{.*#+}} xmm2 = xmm2[0],xmm9[0],xmm2[1],xmm9[1],xmm2[2],xmm9[2],xmm2[3],xmm9[3],xmm2[4],xmm9[4],xmm2[5],xmm9[5],xmm2[6],xmm9[6],xmm2[7],xmm9[7]
3729 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm2[0,0,0,0]
3730 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,5,5,5,5]
3731 ; SSE-NEXT: pand %xmm1, %xmm6
3732 ; SSE-NEXT: por %xmm15, %xmm6
3733 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[1,3,2,3]
3734 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
3735 ; SSE-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm14[0],xmm6[1],xmm14[1]
3736 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3737 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm10[0,2,2,3,4,5,6,7]
3738 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,0,2,1]
3739 ; SSE-NEXT: movdqa %xmm13, %xmm14
3740 ; SSE-NEXT: pandn %xmm6, %xmm14
3741 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm11[2,1,3,3,4,5,6,7]
3742 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,0,2,1]
3743 ; SSE-NEXT: pand %xmm13, %xmm6
3744 ; SSE-NEXT: por %xmm14, %xmm6
3745 ; SSE-NEXT: pshuflw {{.*#+}} xmm14 = xmm12[0,2,2,3,4,5,6,7]
3746 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[0,1,1,3]
3747 ; SSE-NEXT: movdqa %xmm1, %xmm15
3748 ; SSE-NEXT: pandn %xmm14, %xmm15
3749 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm2[1,1,1,1]
3750 ; SSE-NEXT: pshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,5,5,5,5]
3751 ; SSE-NEXT: pand %xmm1, %xmm14
3752 ; SSE-NEXT: por %xmm15, %xmm14
3753 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,3,2,3]
3754 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[0,2,2,3]
3755 ; SSE-NEXT: punpckldq {{.*#+}} xmm14 = xmm14[0],xmm6[0],xmm14[1],xmm6[1]
3756 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3757 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm10[0,1,2,3,4,4,6,5]
3758 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
3759 ; SSE-NEXT: movdqa %xmm13, %xmm14
3760 ; SSE-NEXT: pandn %xmm6, %xmm14
3761 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm11[0,1,2,3,4,5,5,7]
3762 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
3763 ; SSE-NEXT: pand %xmm13, %xmm6
3764 ; SSE-NEXT: por %xmm14, %xmm6
3765 ; SSE-NEXT: pshufhw {{.*#+}} xmm14 = xmm12[0,1,2,3,4,4,6,5]
3766 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[2,1,3,3]
3767 ; SSE-NEXT: movdqa %xmm1, %xmm15
3768 ; SSE-NEXT: pandn %xmm14, %xmm15
3769 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm2[2,2,2,2]
3770 ; SSE-NEXT: pshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,5,5,5,5]
3771 ; SSE-NEXT: pand %xmm1, %xmm14
3772 ; SSE-NEXT: por %xmm15, %xmm14
3773 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,3,2,3]
3774 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[0,2,2,3]
3775 ; SSE-NEXT: punpckldq {{.*#+}} xmm14 = xmm14[0],xmm6[0],xmm14[1],xmm6[1]
3776 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3777 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm10[0,1,2,3,4,6,6,7]
3778 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
3779 ; SSE-NEXT: movdqa %xmm13, %xmm10
3780 ; SSE-NEXT: pandn %xmm6, %xmm10
3781 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm11[0,1,2,3,6,5,7,7]
3782 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
3783 ; SSE-NEXT: pand %xmm13, %xmm6
3784 ; SSE-NEXT: por %xmm10, %xmm6
3785 ; SSE-NEXT: pshufhw {{.*#+}} xmm10 = xmm12[0,1,2,3,4,6,6,7]
3786 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[2,1,3,3]
3787 ; SSE-NEXT: movdqa %xmm1, %xmm11
3788 ; SSE-NEXT: pandn %xmm10, %xmm11
3789 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm2[3,3,3,3]
3790 ; SSE-NEXT: pshufhw {{.*#+}} xmm10 = xmm10[0,1,2,3,5,5,5,5]
3791 ; SSE-NEXT: pand %xmm1, %xmm10
3792 ; SSE-NEXT: por %xmm11, %xmm10
3793 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,3,2,3]
3794 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm10[0,2,2,3]
3795 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1]
3796 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3797 ; SSE-NEXT: punpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
3798 ; SSE-NEXT: # xmm0 = xmm0[8],mem[8],xmm0[9],mem[9],xmm0[10],mem[10],xmm0[11],mem[11],xmm0[12],mem[12],xmm0[13],mem[13],xmm0[14],mem[14],xmm0[15],mem[15]
3799 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm0[0,0,2,1,4,5,6,7]
3800 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,0,2,1]
3801 ; SSE-NEXT: movdqa %xmm13, %xmm10
3802 ; SSE-NEXT: pandn %xmm6, %xmm10
3803 ; SSE-NEXT: punpckhbw {{.*#+}} xmm3 = xmm3[8],xmm7[8],xmm3[9],xmm7[9],xmm3[10],xmm7[10],xmm3[11],xmm7[11],xmm3[12],xmm7[12],xmm3[13],xmm7[13],xmm3[14],xmm7[14],xmm3[15],xmm7[15]
3804 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm3[0,1,1,3,4,5,6,7]
3805 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,0,2,1]
3806 ; SSE-NEXT: pand %xmm13, %xmm6
3807 ; SSE-NEXT: por %xmm10, %xmm6
3808 ; SSE-NEXT: punpckhbw {{.*#+}} xmm4 = xmm4[8],xmm8[8],xmm4[9],xmm8[9],xmm4[10],xmm8[10],xmm4[11],xmm8[11],xmm4[12],xmm8[12],xmm4[13],xmm8[13],xmm4[14],xmm8[14],xmm4[15],xmm8[15]
3809 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm4[0,0,2,1,4,5,6,7]
3810 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,1,1,3]
3811 ; SSE-NEXT: movdqa %xmm1, %xmm8
3812 ; SSE-NEXT: pandn %xmm7, %xmm8
3813 ; SSE-NEXT: punpckhbw {{.*#+}} xmm5 = xmm5[8],xmm9[8],xmm5[9],xmm9[9],xmm5[10],xmm9[10],xmm5[11],xmm9[11],xmm5[12],xmm9[12],xmm5[13],xmm9[13],xmm5[14],xmm9[14],xmm5[15],xmm9[15]
3814 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm5[0,0,0,0]
3815 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,5,5,5,5]
3816 ; SSE-NEXT: pand %xmm1, %xmm7
3817 ; SSE-NEXT: por %xmm8, %xmm7
3818 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,3,2,3]
3819 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm7[0,2,2,3]
3820 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1]
3821 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3822 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm0[0,2,2,3,4,5,6,7]
3823 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,0,2,1]
3824 ; SSE-NEXT: movdqa %xmm13, %xmm7
3825 ; SSE-NEXT: pandn %xmm6, %xmm7
3826 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm3[2,1,3,3,4,5,6,7]
3827 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,0,2,1]
3828 ; SSE-NEXT: pand %xmm13, %xmm6
3829 ; SSE-NEXT: por %xmm7, %xmm6
3830 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm4[0,2,2,3,4,5,6,7]
3831 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,1,1,3]
3832 ; SSE-NEXT: movdqa %xmm1, %xmm8
3833 ; SSE-NEXT: pandn %xmm7, %xmm8
3834 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm5[1,1,1,1]
3835 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,5,5,5,5]
3836 ; SSE-NEXT: pand %xmm1, %xmm7
3837 ; SSE-NEXT: por %xmm8, %xmm7
3838 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,3,2,3]
3839 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm7[0,2,2,3]
3840 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1]
3841 ; SSE-NEXT: movdqa %xmm2, (%rsp) # 16-byte Spill
3842 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm0[0,1,2,3,4,4,6,5]
3843 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
3844 ; SSE-NEXT: movdqa %xmm13, %xmm7
3845 ; SSE-NEXT: pandn %xmm6, %xmm7
3846 ; SSE-NEXT: pshufhw {{.*#+}} xmm6 = xmm3[0,1,2,3,4,5,5,7]
3847 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
3848 ; SSE-NEXT: pand %xmm13, %xmm6
3849 ; SSE-NEXT: por %xmm7, %xmm6
3850 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm4[0,1,2,3,4,4,6,5]
3851 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[2,1,3,3]
3852 ; SSE-NEXT: movdqa %xmm1, %xmm8
3853 ; SSE-NEXT: pandn %xmm7, %xmm8
3854 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm5[2,2,2,2]
3855 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,5,5,5,5]
3856 ; SSE-NEXT: pand %xmm1, %xmm7
3857 ; SSE-NEXT: por %xmm8, %xmm7
3858 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[1,3,2,3]
3859 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm7[0,2,2,3]
3860 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1]
3861 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3862 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,6,7]
3863 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
3864 ; SSE-NEXT: movdqa %xmm13, %xmm6
3865 ; SSE-NEXT: pandn %xmm0, %xmm6
3866 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm3[0,1,2,3,6,5,7,7]
3867 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
3868 ; SSE-NEXT: pand %xmm13, %xmm0
3869 ; SSE-NEXT: por %xmm6, %xmm0
3870 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm4[0,1,2,3,4,6,6,7]
3871 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,1,3,3]
3872 ; SSE-NEXT: movdqa %xmm1, %xmm4
3873 ; SSE-NEXT: pandn %xmm3, %xmm4
3874 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm5[3,3,3,3]
3875 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,5,5,5,5]
3876 ; SSE-NEXT: pand %xmm1, %xmm3
3877 ; SSE-NEXT: por %xmm4, %xmm3
3878 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,3]
3879 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[0,2,2,3]
3880 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
3881 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3882 ; SSE-NEXT: movdqa 48(%r10), %xmm9
3883 ; SSE-NEXT: movdqa 48(%rax), %xmm0
3884 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3885 ; SSE-NEXT: movdqa %xmm9, %xmm6
3886 ; SSE-NEXT: punpcklbw {{.*#+}} xmm6 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3],xmm6[4],xmm0[4],xmm6[5],xmm0[5],xmm6[6],xmm0[6],xmm6[7],xmm0[7]
3887 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm6[0,0,2,1,4,5,6,7]
3888 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
3889 ; SSE-NEXT: movdqa %xmm13, %xmm3
3890 ; SSE-NEXT: pandn %xmm0, %xmm3
3891 ; SSE-NEXT: movdqa 48(%r8), %xmm8
3892 ; SSE-NEXT: movdqa 48(%r9), %xmm0
3893 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3894 ; SSE-NEXT: movdqa %xmm8, %xmm4
3895 ; SSE-NEXT: punpcklbw {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3],xmm4[4],xmm0[4],xmm4[5],xmm0[5],xmm4[6],xmm0[6],xmm4[7],xmm0[7]
3896 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm4[0,1,1,3,4,5,6,7]
3897 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm0[0,0,2,1]
3898 ; SSE-NEXT: pand %xmm13, %xmm10
3899 ; SSE-NEXT: por %xmm3, %xmm10
3900 ; SSE-NEXT: movdqa 48(%rdx), %xmm7
3901 ; SSE-NEXT: movdqa 48(%rcx), %xmm12
3902 ; SSE-NEXT: movdqa %xmm7, %xmm3
3903 ; SSE-NEXT: punpcklbw {{.*#+}} xmm3 = xmm3[0],xmm12[0],xmm3[1],xmm12[1],xmm3[2],xmm12[2],xmm3[3],xmm12[3],xmm3[4],xmm12[4],xmm3[5],xmm12[5],xmm3[6],xmm12[6],xmm3[7],xmm12[7]
3904 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm3[0,0,2,1,4,5,6,7]
3905 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,1,3]
3906 ; SSE-NEXT: movdqa %xmm1, %xmm15
3907 ; SSE-NEXT: pandn %xmm0, %xmm15
3908 ; SSE-NEXT: movdqa 48(%rdi), %xmm5
3909 ; SSE-NEXT: movdqa 48(%rsi), %xmm11
3910 ; SSE-NEXT: movdqa %xmm5, %xmm0
3911 ; SSE-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm11[0],xmm0[1],xmm11[1],xmm0[2],xmm11[2],xmm0[3],xmm11[3],xmm0[4],xmm11[4],xmm0[5],xmm11[5],xmm0[6],xmm11[6],xmm0[7],xmm11[7]
3912 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm0[0,0,0,0]
3913 ; SSE-NEXT: pshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,5,5,5,5]
3914 ; SSE-NEXT: pand %xmm1, %xmm14
3915 ; SSE-NEXT: por %xmm15, %xmm14
3916 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[1,3,2,3]
3917 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm14[0,2,2,3]
3918 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm10[0],xmm2[1],xmm10[1]
3919 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3920 ; SSE-NEXT: pshuflw {{.*#+}} xmm10 = xmm6[0,2,2,3,4,5,6,7]
3921 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[0,0,2,1]
3922 ; SSE-NEXT: movdqa %xmm13, %xmm14
3923 ; SSE-NEXT: pandn %xmm10, %xmm14
3924 ; SSE-NEXT: pshuflw {{.*#+}} xmm10 = xmm4[2,1,3,3,4,5,6,7]
3925 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[0,0,2,1]
3926 ; SSE-NEXT: pand %xmm13, %xmm10
3927 ; SSE-NEXT: por %xmm14, %xmm10
3928 ; SSE-NEXT: pshuflw {{.*#+}} xmm14 = xmm3[0,2,2,3,4,5,6,7]
3929 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[0,1,1,3]
3930 ; SSE-NEXT: movdqa %xmm1, %xmm15
3931 ; SSE-NEXT: pandn %xmm14, %xmm15
3932 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm0[1,1,1,1]
3933 ; SSE-NEXT: pshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,5,5,5,5]
3934 ; SSE-NEXT: pand %xmm1, %xmm14
3935 ; SSE-NEXT: por %xmm15, %xmm14
3936 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[1,3,2,3]
3937 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm14[0,2,2,3]
3938 ; SSE-NEXT: punpckldq {{.*#+}} xmm15 = xmm15[0],xmm10[0],xmm15[1],xmm10[1]
3939 ; SSE-NEXT: pshufhw {{.*#+}} xmm10 = xmm6[0,1,2,3,4,4,6,5]
3940 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[0,2,2,3]
3941 ; SSE-NEXT: movdqa %xmm13, %xmm14
3942 ; SSE-NEXT: pandn %xmm10, %xmm14
3943 ; SSE-NEXT: pshufhw {{.*#+}} xmm10 = xmm4[0,1,2,3,4,5,5,7]
3944 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm10[0,2,2,3]
3945 ; SSE-NEXT: pand %xmm13, %xmm10
3946 ; SSE-NEXT: por %xmm14, %xmm10
3947 ; SSE-NEXT: pshufhw {{.*#+}} xmm14 = xmm3[0,1,2,3,4,4,6,5]
3948 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm14[2,1,3,3]
3949 ; SSE-NEXT: movdqa %xmm1, %xmm2
3950 ; SSE-NEXT: pandn %xmm14, %xmm2
3951 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm0[2,2,2,2]
3952 ; SSE-NEXT: pshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,5,5,5,5]
3953 ; SSE-NEXT: pand %xmm1, %xmm14
3954 ; SSE-NEXT: por %xmm2, %xmm14
3955 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm10[1,3,2,3]
3956 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm14[0,2,2,3]
3957 ; SSE-NEXT: punpckldq {{.*#+}} xmm10 = xmm10[0],xmm2[0],xmm10[1],xmm2[1]
3958 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm6[0,1,2,3,4,6,6,7]
3959 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
3960 ; SSE-NEXT: movdqa %xmm13, %xmm6
3961 ; SSE-NEXT: pandn %xmm2, %xmm6
3962 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm4[0,1,2,3,6,5,7,7]
3963 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
3964 ; SSE-NEXT: pand %xmm13, %xmm2
3965 ; SSE-NEXT: por %xmm6, %xmm2
3966 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,6,6,7]
3967 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,1,3,3]
3968 ; SSE-NEXT: movdqa %xmm1, %xmm4
3969 ; SSE-NEXT: pandn %xmm3, %xmm4
3970 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[3,3,3,3]
3971 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,5,5,5,5]
3972 ; SSE-NEXT: pand %xmm1, %xmm0
3973 ; SSE-NEXT: por %xmm4, %xmm0
3974 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,3,2,3]
3975 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm0[0,2,2,3]
3976 ; SSE-NEXT: punpckldq {{.*#+}} xmm6 = xmm6[0],xmm2[0],xmm6[1],xmm2[1]
3977 ; SSE-NEXT: punpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Folded Reload
3978 ; SSE-NEXT: # xmm9 = xmm9[8],mem[8],xmm9[9],mem[9],xmm9[10],mem[10],xmm9[11],mem[11],xmm9[12],mem[12],xmm9[13],mem[13],xmm9[14],mem[14],xmm9[15],mem[15]
3979 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm9[0,0,2,1,4,5,6,7]
3980 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
3981 ; SSE-NEXT: movdqa %xmm13, %xmm2
3982 ; SSE-NEXT: pandn %xmm0, %xmm2
3983 ; SSE-NEXT: punpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Folded Reload
3984 ; SSE-NEXT: # xmm8 = xmm8[8],mem[8],xmm8[9],mem[9],xmm8[10],mem[10],xmm8[11],mem[11],xmm8[12],mem[12],xmm8[13],mem[13],xmm8[14],mem[14],xmm8[15],mem[15]
3985 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm8[0,1,1,3,4,5,6,7]
3986 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
3987 ; SSE-NEXT: pand %xmm13, %xmm0
3988 ; SSE-NEXT: por %xmm2, %xmm0
3989 ; SSE-NEXT: punpckhbw {{.*#+}} xmm7 = xmm7[8],xmm12[8],xmm7[9],xmm12[9],xmm7[10],xmm12[10],xmm7[11],xmm12[11],xmm7[12],xmm12[12],xmm7[13],xmm12[13],xmm7[14],xmm12[14],xmm7[15],xmm12[15]
3990 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm7[0,0,2,1,4,5,6,7]
3991 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,1,3]
3992 ; SSE-NEXT: movdqa %xmm1, %xmm3
3993 ; SSE-NEXT: pandn %xmm2, %xmm3
3994 ; SSE-NEXT: punpckhbw {{.*#+}} xmm5 = xmm5[8],xmm11[8],xmm5[9],xmm11[9],xmm5[10],xmm11[10],xmm5[11],xmm11[11],xmm5[12],xmm11[12],xmm5[13],xmm11[13],xmm5[14],xmm11[14],xmm5[15],xmm11[15]
3995 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm5[0,0,0,0]
3996 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,5,5,5,5]
3997 ; SSE-NEXT: pand %xmm1, %xmm2
3998 ; SSE-NEXT: por %xmm3, %xmm2
3999 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,3]
4000 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm2[0,2,2,3]
4001 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1]
4002 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm9[0,2,2,3,4,5,6,7]
4003 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
4004 ; SSE-NEXT: movdqa %xmm13, %xmm2
4005 ; SSE-NEXT: pandn %xmm0, %xmm2
4006 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm8[2,1,3,3,4,5,6,7]
4007 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,2,1]
4008 ; SSE-NEXT: pand %xmm13, %xmm0
4009 ; SSE-NEXT: por %xmm2, %xmm0
4010 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm7[0,2,2,3,4,5,6,7]
4011 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,1,3]
4012 ; SSE-NEXT: movdqa %xmm1, %xmm3
4013 ; SSE-NEXT: pandn %xmm2, %xmm3
4014 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm5[1,1,1,1]
4015 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,5,5,5,5]
4016 ; SSE-NEXT: pand %xmm1, %xmm2
4017 ; SSE-NEXT: por %xmm3, %xmm2
4018 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm0[1,3,2,3]
4019 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm2[0,2,2,3]
4020 ; SSE-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1]
4021 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm9[0,1,2,3,4,4,6,5]
4022 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
4023 ; SSE-NEXT: movdqa %xmm13, %xmm3
4024 ; SSE-NEXT: pandn %xmm2, %xmm3
4025 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm8[0,1,2,3,4,5,5,7]
4026 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
4027 ; SSE-NEXT: pand %xmm13, %xmm2
4028 ; SSE-NEXT: por %xmm3, %xmm2
4029 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm7[0,1,2,3,4,4,6,5]
4030 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[2,1,3,3]
4031 ; SSE-NEXT: movdqa %xmm1, %xmm11
4032 ; SSE-NEXT: pandn %xmm3, %xmm11
4033 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm5[2,2,2,2]
4034 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,5,5,5,5]
4035 ; SSE-NEXT: pand %xmm1, %xmm3
4036 ; SSE-NEXT: por %xmm11, %xmm3
4037 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,3,2,3]
4038 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,3]
4039 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
4040 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm9[0,1,2,3,4,6,6,7]
4041 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
4042 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,6,5,7,7]
4043 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,2,2,3]
4044 ; SSE-NEXT: pand %xmm13, %xmm8
4045 ; SSE-NEXT: pandn %xmm2, %xmm13
4046 ; SSE-NEXT: por %xmm8, %xmm13
4047 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm7[0,1,2,3,4,6,6,7]
4048 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[2,1,3,3]
4049 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[3,3,3,3]
4050 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,5,5,5,5]
4051 ; SSE-NEXT: pand %xmm1, %xmm5
4052 ; SSE-NEXT: pandn %xmm2, %xmm1
4053 ; SSE-NEXT: por %xmm5, %xmm1
4054 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm13[1,3,2,3]
4055 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
4056 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1]
4057 ; SSE-NEXT: movq {{[0-9]+}}(%rsp), %rax
4058 ; SSE-NEXT: movdqa %xmm1, 496(%rax)
4059 ; SSE-NEXT: movdqa %xmm3, 480(%rax)
4060 ; SSE-NEXT: movdqa %xmm0, 464(%rax)
4061 ; SSE-NEXT: movdqa %xmm4, 448(%rax)
4062 ; SSE-NEXT: movdqa %xmm6, 432(%rax)
4063 ; SSE-NEXT: movdqa %xmm10, 416(%rax)
4064 ; SSE-NEXT: movdqa %xmm15, 400(%rax)
4065 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4066 ; SSE-NEXT: movaps %xmm0, 384(%rax)
4067 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4068 ; SSE-NEXT: movaps %xmm0, 368(%rax)
4069 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4070 ; SSE-NEXT: movaps %xmm0, 352(%rax)
4071 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
4072 ; SSE-NEXT: movaps %xmm0, 336(%rax)
4073 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4074 ; SSE-NEXT: movaps %xmm0, 320(%rax)
4075 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4076 ; SSE-NEXT: movaps %xmm0, 304(%rax)
4077 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4078 ; SSE-NEXT: movaps %xmm0, 288(%rax)
4079 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4080 ; SSE-NEXT: movaps %xmm0, 272(%rax)
4081 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4082 ; SSE-NEXT: movaps %xmm0, 256(%rax)
4083 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4084 ; SSE-NEXT: movaps %xmm0, 240(%rax)
4085 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4086 ; SSE-NEXT: movaps %xmm0, 224(%rax)
4087 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4088 ; SSE-NEXT: movaps %xmm0, 208(%rax)
4089 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4090 ; SSE-NEXT: movaps %xmm0, 192(%rax)
4091 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4092 ; SSE-NEXT: movaps %xmm0, 176(%rax)
4093 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4094 ; SSE-NEXT: movaps %xmm0, 160(%rax)
4095 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4096 ; SSE-NEXT: movaps %xmm0, 144(%rax)
4097 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4098 ; SSE-NEXT: movaps %xmm0, 128(%rax)
4099 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4100 ; SSE-NEXT: movaps %xmm0, 112(%rax)
4101 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4102 ; SSE-NEXT: movaps %xmm0, 96(%rax)
4103 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4104 ; SSE-NEXT: movaps %xmm0, 80(%rax)
4105 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4106 ; SSE-NEXT: movaps %xmm0, 64(%rax)
4107 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4108 ; SSE-NEXT: movaps %xmm0, 48(%rax)
4109 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4110 ; SSE-NEXT: movaps %xmm0, 32(%rax)
4111 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4112 ; SSE-NEXT: movaps %xmm0, 16(%rax)
4113 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4114 ; SSE-NEXT: movaps %xmm0, (%rax)
4115 ; SSE-NEXT: addq $312, %rsp # imm = 0x138
4118 ; AVX1-ONLY-LABEL: store_i8_stride8_vf64:
4119 ; AVX1-ONLY: # %bb.0:
4120 ; AVX1-ONLY-NEXT: subq $360, %rsp # imm = 0x168
4121 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
4122 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %r10
4123 ; AVX1-ONLY-NEXT: vmovdqa (%r10), %xmm0
4124 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4125 ; AVX1-ONLY-NEXT: vmovdqa (%rax), %xmm1
4126 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4127 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
4128 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm0 = xmm1[0,1,2,3,4,4,6,5]
4129 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm2 = xmm1[0,1,2,3,4,6,6,7]
4130 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
4131 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
4132 ; AVX1-ONLY-NEXT: vbroadcastsd {{.*#+}} ymm13 = [65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0]
4133 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm13, %ymm0
4134 ; AVX1-ONLY-NEXT: vmovdqa (%r9), %xmm2
4135 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4136 ; AVX1-ONLY-NEXT: vmovdqa (%r8), %xmm3
4137 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4138 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
4139 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm2 = xmm4[0,1,2,3,4,5,5,7]
4140 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm4[0,1,2,3,6,5,7,7]
4141 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
4142 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
4143 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm13, %ymm2
4144 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm2, %ymm0
4145 ; AVX1-ONLY-NEXT: vmovdqa (%rcx), %xmm2
4146 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4147 ; AVX1-ONLY-NEXT: vmovdqa (%rdx), %xmm3
4148 ; AVX1-ONLY-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4149 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
4150 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm2 = xmm3[0,1,2,3,4,4,6,5]
4151 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm6 = xmm3[0,1,2,3,4,6,6,7]
4152 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm2, %ymm2
4153 ; AVX1-ONLY-NEXT: vmovdqa (%rsi), %xmm5
4154 ; AVX1-ONLY-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4155 ; AVX1-ONLY-NEXT: vmovdqa (%rdi), %xmm6
4156 ; AVX1-ONLY-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4157 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm6 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3],xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
4158 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm6[2,3,2,3]
4159 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm7 = xmm7[0],zero,zero,zero,xmm7[1],zero,zero,zero
4160 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm8 = xmm6[3,3,3,3]
4161 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm8 = xmm8[0],zero,zero,zero,xmm8[1],zero,zero,zero
4162 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm7, %ymm7
4163 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm8 = ymm2[2,1,3,3,6,5,7,7]
4164 ; AVX1-ONLY-NEXT: vbroadcastsd {{.*#+}} ymm5 = [65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535]
4165 ; AVX1-ONLY-NEXT: vandnps %ymm8, %ymm5, %ymm8
4166 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm7, %ymm7
4167 ; AVX1-ONLY-NEXT: vorps %ymm7, %ymm8, %ymm7
4168 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm7[0],ymm0[1],ymm7[2],ymm0[3],ymm7[4],ymm0[5],ymm7[6],ymm0[7]
4169 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4170 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm0 = xmm1[0,0,2,1,4,5,6,7]
4171 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,2,2,3,4,5,6,7]
4172 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
4173 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm4[0,1,1,3,4,5,6,7]
4174 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[2,1,3,3,4,5,6,7]
4175 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm1, %ymm4
4176 ; AVX1-ONLY-NEXT: vmovdqa 48(%r10), %xmm1
4177 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4178 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
4179 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm13, %ymm0
4180 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm4[0,0,2,1,4,4,6,5]
4181 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm13, %ymm4
4182 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm4, %ymm0
4183 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm6[0],zero,zero,zero,xmm6[1],zero,zero,zero
4184 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[1,1,1,1]
4185 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm6[0],zero,zero,zero,xmm6[1],zero,zero,zero
4186 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm4, %ymm4
4187 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm6 = xmm3[0,0,2,1,4,5,6,7]
4188 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm6[0],zero,xmm6[1],zero
4189 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,2,2,3,4,5,6,7]
4190 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm3[0],zero,xmm3[1],zero
4191 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm6, %ymm6
4192 ; AVX1-ONLY-NEXT: vmovdqa 48(%rax), %xmm2
4193 ; AVX1-ONLY-NEXT: vmovdqa %xmm2, (%rsp) # 16-byte Spill
4194 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm4, %ymm4
4195 ; AVX1-ONLY-NEXT: vandnps %ymm6, %ymm5, %ymm6
4196 ; AVX1-ONLY-NEXT: vorps %ymm6, %ymm4, %ymm4
4197 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm4[0],ymm0[1],ymm4[2],ymm0[3],ymm4[4],ymm0[5],ymm4[6],ymm0[7]
4198 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4199 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm11 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
4200 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm0 = xmm11[0,0,2,1,4,5,6,7]
4201 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm11[0,2,2,3,4,5,6,7]
4202 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm6
4203 ; AVX1-ONLY-NEXT: vmovdqa 48(%r9), %xmm4
4204 ; AVX1-ONLY-NEXT: vmovdqa 48(%r8), %xmm0
4205 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm12 = xmm0[8],xmm4[8],xmm0[9],xmm4[9],xmm0[10],xmm4[10],xmm0[11],xmm4[11],xmm0[12],xmm4[12],xmm0[13],xmm4[13],xmm0[14],xmm4[14],xmm0[15],xmm4[15]
4206 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm7 = xmm12[0,1,1,3,4,5,6,7]
4207 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm8 = xmm12[2,1,3,3,4,5,6,7]
4208 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm7, %ymm7
4209 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm6 = ymm6[0,0,2,1,4,4,6,5]
4210 ; AVX1-ONLY-NEXT: vandnps %ymm6, %ymm13, %ymm6
4211 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm7[0,0,2,1,4,4,6,5]
4212 ; AVX1-ONLY-NEXT: vandps %ymm7, %ymm13, %ymm7
4213 ; AVX1-ONLY-NEXT: vorps %ymm6, %ymm7, %ymm2
4214 ; AVX1-ONLY-NEXT: vmovdqa 48(%rsi), %xmm6
4215 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdi), %xmm8
4216 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm10 = xmm8[8],xmm6[8],xmm8[9],xmm6[9],xmm8[10],xmm6[10],xmm8[11],xmm6[11],xmm8[12],xmm6[12],xmm8[13],xmm6[13],xmm8[14],xmm6[14],xmm8[15],xmm6[15]
4217 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm10[1,1,1,1]
4218 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm7 = xmm7[0],zero,zero,zero,xmm7[1],zero,zero,zero
4219 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm9 = xmm10[0],zero,zero,zero,xmm10[1],zero,zero,zero
4220 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm9, %ymm14
4221 ; AVX1-ONLY-NEXT: vmovdqa 48(%rcx), %xmm7
4222 ; AVX1-ONLY-NEXT: vmovdqa 48(%rdx), %xmm9
4223 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm15 = xmm9[8],xmm7[8],xmm9[9],xmm7[9],xmm9[10],xmm7[10],xmm9[11],xmm7[11],xmm9[12],xmm7[12],xmm9[13],xmm7[13],xmm9[14],xmm7[14],xmm9[15],xmm7[15]
4224 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm15[0,0,2,1,4,5,6,7]
4225 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero
4226 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm15[0,2,2,3,4,5,6,7]
4227 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm3[0],zero,xmm3[1],zero
4228 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
4229 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm14, %ymm3
4230 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm5, %ymm1
4231 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm3, %ymm1
4232 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2],ymm2[3],ymm1[4],ymm2[5],ymm1[6],ymm2[7]
4233 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4234 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm11[0,1,2,3,4,4,6,5]
4235 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm11[0,1,2,3,4,6,6,7]
4236 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
4237 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm12[0,1,2,3,4,5,5,7]
4238 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm11 = xmm12[0,1,2,3,6,5,7,7]
4239 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm3, %ymm3
4240 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
4241 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm13, %ymm1
4242 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[0,2,2,3,4,6,6,7]
4243 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm13, %ymm3
4244 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm3, %ymm1
4245 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm15[0,1,2,3,4,4,6,5]
4246 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm11 = xmm15[0,1,2,3,4,6,6,7]
4247 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm3, %ymm3
4248 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm10[2,3,2,3]
4249 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm11 = xmm11[0],zero,zero,zero,xmm11[1],zero,zero,zero
4250 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[3,3,3,3]
4251 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm10 = xmm10[0],zero,zero,zero,xmm10[1],zero,zero,zero
4252 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm11, %ymm10
4253 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[2,1,3,3,6,5,7,7]
4254 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm5, %ymm3
4255 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm10, %ymm10
4256 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm10, %ymm3
4257 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0],ymm1[1],ymm3[2],ymm1[3],ymm3[4],ymm1[5],ymm3[6],ymm1[7]
4258 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4259 ; AVX1-ONLY-NEXT: vmovdqa (%rsp), %xmm1 # 16-byte Reload
4260 ; AVX1-ONLY-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
4261 ; AVX1-ONLY-NEXT: # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3],xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
4262 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm1[0,0,2,1,4,5,6,7]
4263 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm10 = xmm1[0,2,2,3,4,5,6,7]
4264 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm3, %ymm10
4265 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm0[0],xmm4[0],xmm0[1],xmm4[1],xmm0[2],xmm4[2],xmm0[3],xmm4[3],xmm0[4],xmm4[4],xmm0[5],xmm4[5],xmm0[6],xmm4[6],xmm0[7],xmm4[7]
4266 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm0 = xmm3[0,1,1,3,4,5,6,7]
4267 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm3[2,1,3,3,4,5,6,7]
4268 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm0
4269 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm10[0,0,2,1,4,4,6,5]
4270 ; AVX1-ONLY-NEXT: vandnps %ymm4, %ymm13, %ymm4
4271 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
4272 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm13, %ymm0
4273 ; AVX1-ONLY-NEXT: vorps %ymm4, %ymm0, %ymm4
4274 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3],xmm8[4],xmm6[4],xmm8[5],xmm6[5],xmm8[6],xmm6[6],xmm8[7],xmm6[7]
4275 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm0[1,1,1,1]
4276 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm6[0],zero,zero,zero,xmm6[1],zero,zero,zero
4277 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm8 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
4278 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm8, %ymm6
4279 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3],xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
4280 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm8 = xmm7[0,0,2,1,4,5,6,7]
4281 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm8 = xmm8[0],zero,xmm8[1],zero
4282 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm9 = xmm7[0,2,2,3,4,5,6,7]
4283 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm9[0],zero,xmm9[1],zero
4284 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm8, %ymm8
4285 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm6, %ymm6
4286 ; AVX1-ONLY-NEXT: vandnps %ymm8, %ymm5, %ymm8
4287 ; AVX1-ONLY-NEXT: vorps %ymm6, %ymm8, %ymm6
4288 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm6[0],ymm4[1],ymm6[2],ymm4[3],ymm6[4],ymm4[5],ymm6[6],ymm4[7]
4289 ; AVX1-ONLY-NEXT: vmovups %ymm2, (%rsp) # 32-byte Spill
4290 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm1[0,1,2,3,4,4,6,5]
4291 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
4292 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm4, %ymm1
4293 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm3[0,1,2,3,4,5,5,7]
4294 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,6,5,7,7]
4295 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
4296 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
4297 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm13, %ymm1
4298 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[0,2,2,3,4,6,6,7]
4299 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm13, %ymm3
4300 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm3, %ymm1
4301 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm7[0,1,2,3,4,4,6,5]
4302 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm7[0,1,2,3,4,6,6,7]
4303 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm3, %ymm3
4304 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm0[2,3,2,3]
4305 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
4306 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[3,3,3,3]
4307 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
4308 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm4, %ymm0
4309 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[2,1,3,3,6,5,7,7]
4310 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm5, %ymm3
4311 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm0, %ymm0
4312 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm0, %ymm0
4313 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3],ymm0[4],ymm1[5],ymm0[6],ymm1[7]
4314 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4315 ; AVX1-ONLY-NEXT: vmovdqa 32(%r10), %xmm0
4316 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4317 ; AVX1-ONLY-NEXT: vmovdqa 32(%rax), %xmm4
4318 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm11 = xmm4[8],xmm0[8],xmm4[9],xmm0[9],xmm4[10],xmm0[10],xmm4[11],xmm0[11],xmm4[12],xmm0[12],xmm4[13],xmm0[13],xmm4[14],xmm0[14],xmm4[15],xmm0[15]
4319 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm0 = xmm11[0,0,2,1,4,5,6,7]
4320 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm11[0,2,2,3,4,5,6,7]
4321 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm6
4322 ; AVX1-ONLY-NEXT: vmovdqa 32(%r9), %xmm1
4323 ; AVX1-ONLY-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4324 ; AVX1-ONLY-NEXT: vmovdqa 32(%r8), %xmm0
4325 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm12 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
4326 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm7 = xmm12[0,1,1,3,4,5,6,7]
4327 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm8 = xmm12[2,1,3,3,4,5,6,7]
4328 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm8, %ymm7, %ymm7
4329 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm6 = ymm6[0,0,2,1,4,4,6,5]
4330 ; AVX1-ONLY-NEXT: vandnps %ymm6, %ymm13, %ymm6
4331 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm7 = ymm7[0,0,2,1,4,4,6,5]
4332 ; AVX1-ONLY-NEXT: vandps %ymm7, %ymm13, %ymm7
4333 ; AVX1-ONLY-NEXT: vorps %ymm6, %ymm7, %ymm2
4334 ; AVX1-ONLY-NEXT: vmovdqa 32(%rsi), %xmm6
4335 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdi), %xmm8
4336 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm10 = xmm8[8],xmm6[8],xmm8[9],xmm6[9],xmm8[10],xmm6[10],xmm8[11],xmm6[11],xmm8[12],xmm6[12],xmm8[13],xmm6[13],xmm8[14],xmm6[14],xmm8[15],xmm6[15]
4337 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm10[1,1,1,1]
4338 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm7 = xmm7[0],zero,zero,zero,xmm7[1],zero,zero,zero
4339 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm9 = xmm10[0],zero,zero,zero,xmm10[1],zero,zero,zero
4340 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm9, %ymm14
4341 ; AVX1-ONLY-NEXT: vmovdqa 32(%rcx), %xmm7
4342 ; AVX1-ONLY-NEXT: vmovdqa 32(%rdx), %xmm9
4343 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm15 = xmm9[8],xmm7[8],xmm9[9],xmm7[9],xmm9[10],xmm7[10],xmm9[11],xmm7[11],xmm9[12],xmm7[12],xmm9[13],xmm7[13],xmm9[14],xmm7[14],xmm9[15],xmm7[15]
4344 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm15[0,0,2,1,4,5,6,7]
4345 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero
4346 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm15[0,2,2,3,4,5,6,7]
4347 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm3[0],zero,xmm3[1],zero
4348 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
4349 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm14, %ymm3
4350 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm5, %ymm1
4351 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm3, %ymm1
4352 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2],ymm2[3],ymm1[4],ymm2[5],ymm1[6],ymm2[7]
4353 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4354 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm11[0,1,2,3,4,4,6,5]
4355 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm11[0,1,2,3,4,6,6,7]
4356 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
4357 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm12[0,1,2,3,4,5,5,7]
4358 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm11 = xmm12[0,1,2,3,6,5,7,7]
4359 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm3, %ymm3
4360 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
4361 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm13, %ymm1
4362 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[0,2,2,3,4,6,6,7]
4363 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm13, %ymm3
4364 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm3, %ymm1
4365 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm15[0,1,2,3,4,4,6,5]
4366 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm11 = xmm15[0,1,2,3,4,6,6,7]
4367 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm3, %ymm3
4368 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm11 = xmm10[2,3,2,3]
4369 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm11 = xmm11[0],zero,zero,zero,xmm11[1],zero,zero,zero
4370 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[3,3,3,3]
4371 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm10 = xmm10[0],zero,zero,zero,xmm10[1],zero,zero,zero
4372 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm11, %ymm10
4373 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[2,1,3,3,6,5,7,7]
4374 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm5, %ymm3
4375 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm10, %ymm10
4376 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm10, %ymm3
4377 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0],ymm1[1],ymm3[2],ymm1[3],ymm3[4],ymm1[5],ymm3[6],ymm1[7]
4378 ; AVX1-ONLY-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4379 ; AVX1-ONLY-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm4, %xmm1 # 16-byte Folded Reload
4380 ; AVX1-ONLY-NEXT: # xmm1 = xmm4[0],mem[0],xmm4[1],mem[1],xmm4[2],mem[2],xmm4[3],mem[3],xmm4[4],mem[4],xmm4[5],mem[5],xmm4[6],mem[6],xmm4[7],mem[7]
4381 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm1[0,0,2,1,4,5,6,7]
4382 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm1[0,2,2,3,4,5,6,7]
4383 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm3, %ymm4
4384 ; AVX1-ONLY-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm3 # 16-byte Folded Reload
4385 ; AVX1-ONLY-NEXT: # xmm3 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
4386 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm0 = xmm3[0,1,1,3,4,5,6,7]
4387 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm10 = xmm3[2,1,3,3,4,5,6,7]
4388 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm10, %ymm0, %ymm0
4389 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm4[0,0,2,1,4,4,6,5]
4390 ; AVX1-ONLY-NEXT: vandnps %ymm4, %ymm13, %ymm4
4391 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
4392 ; AVX1-ONLY-NEXT: vandps %ymm0, %ymm13, %ymm0
4393 ; AVX1-ONLY-NEXT: vorps %ymm4, %ymm0, %ymm4
4394 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm8[0],xmm6[0],xmm8[1],xmm6[1],xmm8[2],xmm6[2],xmm8[3],xmm6[3],xmm8[4],xmm6[4],xmm8[5],xmm6[5],xmm8[6],xmm6[6],xmm8[7],xmm6[7]
4395 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm0[1,1,1,1]
4396 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm6[0],zero,zero,zero,xmm6[1],zero,zero,zero
4397 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm8 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
4398 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm8, %ymm6
4399 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3],xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
4400 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm8 = xmm7[0,0,2,1,4,5,6,7]
4401 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm8 = xmm8[0],zero,xmm8[1],zero
4402 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm9 = xmm7[0,2,2,3,4,5,6,7]
4403 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm9[0],zero,xmm9[1],zero
4404 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm9, %ymm8, %ymm8
4405 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm6, %ymm6
4406 ; AVX1-ONLY-NEXT: vandnps %ymm8, %ymm5, %ymm8
4407 ; AVX1-ONLY-NEXT: vorps %ymm6, %ymm8, %ymm6
4408 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm2 = ymm6[0],ymm4[1],ymm6[2],ymm4[3],ymm6[4],ymm4[5],ymm6[6],ymm4[7]
4409 ; AVX1-ONLY-NEXT: vmovups %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4410 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm1[0,1,2,3,4,4,6,5]
4411 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
4412 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm4, %ymm1
4413 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm3[0,1,2,3,4,5,5,7]
4414 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,6,5,7,7]
4415 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
4416 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
4417 ; AVX1-ONLY-NEXT: vandnps %ymm1, %ymm13, %ymm1
4418 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[0,2,2,3,4,6,6,7]
4419 ; AVX1-ONLY-NEXT: vandps %ymm3, %ymm13, %ymm3
4420 ; AVX1-ONLY-NEXT: vorps %ymm1, %ymm3, %ymm1
4421 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm7[0,1,2,3,4,4,6,5]
4422 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm7[0,1,2,3,4,6,6,7]
4423 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm3, %ymm3
4424 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm0[2,3,2,3]
4425 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
4426 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[3,3,3,3]
4427 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
4428 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm4, %ymm0
4429 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[2,1,3,3,6,5,7,7]
4430 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm5, %ymm3
4431 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm0, %ymm0
4432 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm0, %ymm0
4433 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3],ymm0[4],ymm1[5],ymm0[6],ymm1[7]
4434 ; AVX1-ONLY-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4435 ; AVX1-ONLY-NEXT: vmovdqa 16(%r10), %xmm0
4436 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4437 ; AVX1-ONLY-NEXT: vmovdqa 16(%rax), %xmm8
4438 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm8[8],xmm0[8],xmm8[9],xmm0[9],xmm8[10],xmm0[10],xmm8[11],xmm0[11],xmm8[12],xmm0[12],xmm8[13],xmm0[13],xmm8[14],xmm0[14],xmm8[15],xmm0[15]
4439 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm0 = xmm2[0,0,2,1,4,5,6,7]
4440 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm3 = xmm2[0,2,2,3,4,5,6,7]
4441 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm3
4442 ; AVX1-ONLY-NEXT: vmovdqa 16(%r9), %xmm0
4443 ; AVX1-ONLY-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4444 ; AVX1-ONLY-NEXT: vmovdqa 16(%r8), %xmm9
4445 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm9[8],xmm0[8],xmm9[9],xmm0[9],xmm9[10],xmm0[10],xmm9[11],xmm0[11],xmm9[12],xmm0[12],xmm9[13],xmm0[13],xmm9[14],xmm0[14],xmm9[15],xmm0[15]
4446 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm1[0,1,1,3,4,5,6,7]
4447 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm6 = xmm1[2,1,3,3,4,5,6,7]
4448 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm4, %ymm4
4449 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
4450 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm13, %ymm3
4451 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm4[0,0,2,1,4,4,6,5]
4452 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm13, %ymm4
4453 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm4, %ymm15
4454 ; AVX1-ONLY-NEXT: vmovdqa 16(%rsi), %xmm11
4455 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdi), %xmm6
4456 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm10 = xmm6[8],xmm11[8],xmm6[9],xmm11[9],xmm6[10],xmm11[10],xmm6[11],xmm11[11],xmm6[12],xmm11[12],xmm6[13],xmm11[13],xmm6[14],xmm11[14],xmm6[15],xmm11[15]
4457 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm10[1,1,1,1]
4458 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm3 = xmm3[0],zero,zero,zero,xmm3[1],zero,zero,zero
4459 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm10[0],zero,zero,zero,xmm10[1],zero,zero,zero
4460 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
4461 ; AVX1-ONLY-NEXT: vmovdqa 16(%rcx), %xmm7
4462 ; AVX1-ONLY-NEXT: vmovdqa 16(%rdx), %xmm4
4463 ; AVX1-ONLY-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm4[8],xmm7[8],xmm4[9],xmm7[9],xmm4[10],xmm7[10],xmm4[11],xmm7[11],xmm4[12],xmm7[12],xmm4[13],xmm7[13],xmm4[14],xmm7[14],xmm4[15],xmm7[15]
4464 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm12 = xmm0[0,0,2,1,4,5,6,7]
4465 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm12 = xmm12[0],zero,xmm12[1],zero
4466 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm14 = xmm0[0,2,2,3,4,5,6,7]
4467 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm14[0],zero,xmm14[1],zero
4468 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm14, %ymm12, %ymm12
4469 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm3, %ymm3
4470 ; AVX1-ONLY-NEXT: vandnps %ymm12, %ymm5, %ymm12
4471 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm12, %ymm3
4472 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm15 = ymm3[0],ymm15[1],ymm3[2],ymm15[3],ymm3[4],ymm15[5],ymm3[6],ymm15[7]
4473 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm2[0,1,2,3,4,4,6,5]
4474 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,6,6,7]
4475 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2
4476 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm1[0,1,2,3,4,5,5,7]
4477 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,7,7]
4478 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm3, %ymm1
4479 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
4480 ; AVX1-ONLY-NEXT: vandnps %ymm2, %ymm13, %ymm2
4481 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
4482 ; AVX1-ONLY-NEXT: vandps %ymm1, %ymm13, %ymm1
4483 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm1, %ymm1
4484 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm2 = xmm0[0,1,2,3,4,4,6,5]
4485 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,6,7]
4486 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm0
4487 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm10[2,3,2,3]
4488 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
4489 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm3 = xmm10[3,3,3,3]
4490 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm3 = xmm3[0],zero,zero,zero,xmm3[1],zero,zero,zero
4491 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
4492 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[2,1,3,3,6,5,7,7]
4493 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm5, %ymm0
4494 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm2, %ymm2
4495 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm2, %ymm0
4496 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm10 = ymm0[0],ymm1[1],ymm0[2],ymm1[3],ymm0[4],ymm1[5],ymm0[6],ymm1[7]
4497 ; AVX1-ONLY-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm8, %xmm8 # 16-byte Folded Reload
4498 ; AVX1-ONLY-NEXT: # xmm8 = xmm8[0],mem[0],xmm8[1],mem[1],xmm8[2],mem[2],xmm8[3],mem[3],xmm8[4],mem[4],xmm8[5],mem[5],xmm8[6],mem[6],xmm8[7],mem[7]
4499 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm0 = xmm8[0,0,2,1,4,5,6,7]
4500 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm8[0,2,2,3,4,5,6,7]
4501 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
4502 ; AVX1-ONLY-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm9 # 16-byte Folded Reload
4503 ; AVX1-ONLY-NEXT: # xmm9 = xmm9[0],mem[0],xmm9[1],mem[1],xmm9[2],mem[2],xmm9[3],mem[3],xmm9[4],mem[4],xmm9[5],mem[5],xmm9[6],mem[6],xmm9[7],mem[7]
4504 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm1 = xmm9[0,1,1,3,4,5,6,7]
4505 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm9[2,1,3,3,4,5,6,7]
4506 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
4507 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
4508 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm13, %ymm0
4509 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm1 = ymm1[0,0,2,1,4,4,6,5]
4510 ; AVX1-ONLY-NEXT: vandps %ymm1, %ymm13, %ymm1
4511 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm1, %ymm1
4512 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm6[0],xmm11[0],xmm6[1],xmm11[1],xmm6[2],xmm11[2],xmm6[3],xmm11[3],xmm6[4],xmm11[4],xmm6[5],xmm11[5],xmm6[6],xmm11[6],xmm6[7],xmm11[7]
4513 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[1,1,1,1]
4514 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
4515 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm3 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
4516 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2
4517 ; AVX1-ONLY-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm4[0],xmm7[0],xmm4[1],xmm7[1],xmm4[2],xmm7[2],xmm4[3],xmm7[3],xmm4[4],xmm7[4],xmm4[5],xmm7[5],xmm4[6],xmm7[6],xmm4[7],xmm7[7]
4518 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm4 = xmm3[0,0,2,1,4,5,6,7]
4519 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm4 = xmm4[0],zero,xmm4[1],zero
4520 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm6 = xmm3[0,2,2,3,4,5,6,7]
4521 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm6[0],zero,xmm6[1],zero
4522 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm4, %ymm4
4523 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm2, %ymm2
4524 ; AVX1-ONLY-NEXT: vandnps %ymm4, %ymm5, %ymm4
4525 ; AVX1-ONLY-NEXT: vorps %ymm4, %ymm2, %ymm2
4526 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2],ymm1[3],ymm2[4],ymm1[5],ymm2[6],ymm1[7]
4527 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm2 = xmm8[0,1,2,3,4,4,6,5]
4528 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm8[0,1,2,3,4,6,6,7]
4529 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
4530 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm9[0,1,2,3,4,5,5,7]
4531 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm6 = xmm9[0,1,2,3,6,5,7,7]
4532 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm4, %ymm4
4533 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
4534 ; AVX1-ONLY-NEXT: vandnps %ymm2, %ymm13, %ymm2
4535 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm4[0,2,2,3,4,6,6,7]
4536 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm13, %ymm4
4537 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm4, %ymm2
4538 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm3[0,1,2,3,4,4,6,5]
4539 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,6,6,7]
4540 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
4541 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm4 = xmm0[2,3,2,3]
4542 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
4543 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[3,3,3,3]
4544 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
4545 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm4, %ymm0
4546 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm3 = ymm3[2,1,3,3,6,5,7,7]
4547 ; AVX1-ONLY-NEXT: vandnps %ymm3, %ymm5, %ymm3
4548 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm0, %ymm0
4549 ; AVX1-ONLY-NEXT: vorps %ymm3, %ymm0, %ymm0
4550 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm3 = ymm0[0],ymm2[1],ymm0[2],ymm2[3],ymm0[4],ymm2[5],ymm0[6],ymm2[7]
4551 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4552 ; AVX1-ONLY-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm4 # 16-byte Folded Reload
4553 ; AVX1-ONLY-NEXT: # xmm4 = xmm0[8],mem[8],xmm0[9],mem[9],xmm0[10],mem[10],xmm0[11],mem[11],xmm0[12],mem[12],xmm0[13],mem[13],xmm0[14],mem[14],xmm0[15],mem[15]
4554 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm0 = xmm4[0,0,2,1,4,5,6,7]
4555 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm4[0,2,2,3,4,5,6,7]
4556 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0
4557 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
4558 ; AVX1-ONLY-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm6 # 16-byte Folded Reload
4559 ; AVX1-ONLY-NEXT: # xmm6 = xmm2[8],mem[8],xmm2[9],mem[9],xmm2[10],mem[10],xmm2[11],mem[11],xmm2[12],mem[12],xmm2[13],mem[13],xmm2[14],mem[14],xmm2[15],mem[15]
4560 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm2 = xmm6[0,1,1,3,4,5,6,7]
4561 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm7 = xmm6[2,1,3,3,4,5,6,7]
4562 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm2, %ymm2
4563 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
4564 ; AVX1-ONLY-NEXT: vandnps %ymm0, %ymm13, %ymm0
4565 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm2[0,0,2,1,4,4,6,5]
4566 ; AVX1-ONLY-NEXT: vandps %ymm2, %ymm13, %ymm2
4567 ; AVX1-ONLY-NEXT: vorps %ymm0, %ymm2, %ymm2
4568 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4569 ; AVX1-ONLY-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
4570 ; AVX1-ONLY-NEXT: # xmm0 = xmm0[8],mem[8],xmm0[9],mem[9],xmm0[10],mem[10],xmm0[11],mem[11],xmm0[12],mem[12],xmm0[13],mem[13],xmm0[14],mem[14],xmm0[15],mem[15]
4571 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm7 = xmm0[1,1,1,1]
4572 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm7 = xmm7[0],zero,zero,zero,xmm7[1],zero,zero,zero
4573 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm8 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
4574 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm7, %ymm8, %ymm7
4575 ; AVX1-ONLY-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
4576 ; AVX1-ONLY-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm8, %xmm8 # 16-byte Folded Reload
4577 ; AVX1-ONLY-NEXT: # xmm8 = xmm8[8],mem[8],xmm8[9],mem[9],xmm8[10],mem[10],xmm8[11],mem[11],xmm8[12],mem[12],xmm8[13],mem[13],xmm8[14],mem[14],xmm8[15],mem[15]
4578 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm9 = xmm8[0,0,2,1,4,5,6,7]
4579 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm9[0],zero,xmm9[1],zero
4580 ; AVX1-ONLY-NEXT: vpshuflw {{.*#+}} xmm11 = xmm8[0,2,2,3,4,5,6,7]
4581 ; AVX1-ONLY-NEXT: vpmovzxdq {{.*#+}} xmm11 = xmm11[0],zero,xmm11[1],zero
4582 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm11, %ymm9, %ymm9
4583 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm7, %ymm7
4584 ; AVX1-ONLY-NEXT: vandnps %ymm9, %ymm5, %ymm9
4585 ; AVX1-ONLY-NEXT: vorps %ymm7, %ymm9, %ymm7
4586 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm7 = ymm7[0],ymm2[1],ymm7[2],ymm2[3],ymm7[4],ymm2[5],ymm7[6],ymm2[7]
4587 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm2 = xmm4[0,1,2,3,4,4,6,5]
4588 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,6,6,7]
4589 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm4, %ymm2, %ymm2
4590 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm6[0,1,2,3,4,5,5,7]
4591 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,6,5,7,7]
4592 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm4, %ymm4
4593 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
4594 ; AVX1-ONLY-NEXT: vandnps %ymm2, %ymm13, %ymm2
4595 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm4[0,2,2,3,4,6,6,7]
4596 ; AVX1-ONLY-NEXT: vandps %ymm4, %ymm13, %ymm4
4597 ; AVX1-ONLY-NEXT: vorps %ymm2, %ymm4, %ymm2
4598 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm4 = xmm8[0,1,2,3,4,4,6,5]
4599 ; AVX1-ONLY-NEXT: vpshufhw {{.*#+}} xmm6 = xmm8[0,1,2,3,4,6,6,7]
4600 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm6, %ymm4, %ymm4
4601 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm6 = xmm0[2,3,2,3]
4602 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm6[0],zero,zero,zero,xmm6[1],zero,zero,zero
4603 ; AVX1-ONLY-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[3,3,3,3]
4604 ; AVX1-ONLY-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
4605 ; AVX1-ONLY-NEXT: vinsertf128 $1, %xmm0, %ymm6, %ymm0
4606 ; AVX1-ONLY-NEXT: vshufps {{.*#+}} ymm4 = ymm4[2,1,3,3,6,5,7,7]
4607 ; AVX1-ONLY-NEXT: vandnps %ymm4, %ymm5, %ymm4
4608 ; AVX1-ONLY-NEXT: vandps %ymm5, %ymm0, %ymm0
4609 ; AVX1-ONLY-NEXT: vorps %ymm4, %ymm0, %ymm0
4610 ; AVX1-ONLY-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2],ymm2[3],ymm0[4],ymm2[5],ymm0[6],ymm2[7]
4611 ; AVX1-ONLY-NEXT: movq {{[0-9]+}}(%rsp), %rax
4612 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 96(%rax)
4613 ; AVX1-ONLY-NEXT: vmovaps %ymm7, 64(%rax)
4614 ; AVX1-ONLY-NEXT: vmovaps %ymm3, 160(%rax)
4615 ; AVX1-ONLY-NEXT: vmovaps %ymm1, 128(%rax)
4616 ; AVX1-ONLY-NEXT: vmovaps %ymm10, 224(%rax)
4617 ; AVX1-ONLY-NEXT: vmovaps %ymm15, 192(%rax)
4618 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4619 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 288(%rax)
4620 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4621 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 256(%rax)
4622 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4623 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 352(%rax)
4624 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4625 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 320(%rax)
4626 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4627 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 416(%rax)
4628 ; AVX1-ONLY-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
4629 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 384(%rax)
4630 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4631 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 480(%rax)
4632 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4633 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 448(%rax)
4634 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4635 ; AVX1-ONLY-NEXT: vmovaps %ymm0, (%rax)
4636 ; AVX1-ONLY-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4637 ; AVX1-ONLY-NEXT: vmovaps %ymm0, 32(%rax)
4638 ; AVX1-ONLY-NEXT: addq $360, %rsp # imm = 0x168
4639 ; AVX1-ONLY-NEXT: vzeroupper
4640 ; AVX1-ONLY-NEXT: retq
4642 ; AVX2-SLOW-LABEL: store_i8_stride8_vf64:
4643 ; AVX2-SLOW: # %bb.0:
4644 ; AVX2-SLOW-NEXT: subq $328, %rsp # imm = 0x148
4645 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
4646 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
4647 ; AVX2-SLOW-NEXT: vmovdqa (%r10), %xmm0
4648 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4649 ; AVX2-SLOW-NEXT: vmovdqa (%rax), %xmm1
4650 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4651 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
4652 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm2[0,1,2,3,4,4,6,5]
4653 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm2[0,1,2,3,4,6,6,7]
4654 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
4655 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
4656 ; AVX2-SLOW-NEXT: vmovdqa (%r9), %xmm1
4657 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4658 ; AVX2-SLOW-NEXT: vmovdqa (%r8), %xmm3
4659 ; AVX2-SLOW-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4660 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3],xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
4661 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm3[0,1,2,3,4,5,5,7]
4662 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm3[0,1,2,3,6,5,7,7]
4663 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1
4664 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
4665 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm5 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7],ymm1[8,9,10],ymm0[11],ymm1[12,13,14],ymm0[15]
4666 ; AVX2-SLOW-NEXT: vmovdqa (%rsi), %xmm0
4667 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4668 ; AVX2-SLOW-NEXT: vmovdqa (%rdi), %xmm1
4669 ; AVX2-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4670 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
4671 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm4[2,3,2,3]
4672 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
4673 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm4[3,3,3,3]
4674 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
4675 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
4676 ; AVX2-SLOW-NEXT: vmovdqa (%rcx), %xmm0
4677 ; AVX2-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4678 ; AVX2-SLOW-NEXT: vmovdqa (%rdx), %xmm6
4679 ; AVX2-SLOW-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4680 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm6 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3],xmm6[4],xmm0[4],xmm6[5],xmm0[5],xmm6[6],xmm0[6],xmm6[7],xmm0[7]
4681 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm6[0,1,2,3,4,4,6,5]
4682 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm7 = xmm6[0,1,2,3,4,6,6,7]
4683 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
4684 ; AVX2-SLOW-NEXT: vmovdqa 48(%r10), %xmm0
4685 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm7[2,1,3,3,6,5,7,7]
4686 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm7 = ymm1[0],ymm7[1],ymm1[2,3,4],ymm7[5],ymm1[6,7,8],ymm7[9],ymm1[10,11,12],ymm7[13],ymm1[14,15]
4687 ; AVX2-SLOW-NEXT: vmovdqa 48(%rax), %xmm1
4688 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm5 = ymm7[0],ymm5[1],ymm7[2],ymm5[3],ymm7[4],ymm5[5],ymm7[6],ymm5[7]
4689 ; AVX2-SLOW-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4690 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm2[0,0,2,1,4,5,6,7]
4691 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,2,2,3,4,5,6,7]
4692 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm5, %ymm5
4693 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm3[0,1,1,3,4,5,6,7]
4694 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,1,3,3,4,5,6,7]
4695 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm3
4696 ; AVX2-SLOW-NEXT: vmovdqa 48(%r9), %xmm2
4697 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[0,0,2,1,4,4,6,5]
4698 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
4699 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0,1,2],ymm5[3],ymm3[4,5,6],ymm5[7],ymm3[8,9,10],ymm5[11],ymm3[12,13,14],ymm5[15]
4700 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm5 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
4701 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[1,1,1,1]
4702 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
4703 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
4704 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm6[0,0,2,1,4,5,6,7]
4705 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm5[0],zero,xmm5[1],zero
4706 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[0,2,2,3,4,5,6,7]
4707 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm6[0],zero,xmm6[1],zero
4708 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm5, %ymm5
4709 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0],ymm5[1],ymm4[2,3,4],ymm5[5],ymm4[6,7,8],ymm5[9],ymm4[10,11,12],ymm5[13],ymm4[14,15]
4710 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2],ymm3[3],ymm4[4],ymm3[5],ymm4[6],ymm3[7]
4711 ; AVX2-SLOW-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4712 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm8 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
4713 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm8[0,0,2,1,4,5,6,7]
4714 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm8[0,2,2,3,4,5,6,7]
4715 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm4
4716 ; AVX2-SLOW-NEXT: vmovdqa 48(%r8), %xmm3
4717 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm3[8],xmm2[8],xmm3[9],xmm2[9],xmm3[10],xmm2[10],xmm3[11],xmm2[11],xmm3[12],xmm2[12],xmm3[13],xmm2[13],xmm3[14],xmm2[14],xmm3[15],xmm2[15]
4718 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm9[0,1,1,3,4,5,6,7]
4719 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm9[2,1,3,3,4,5,6,7]
4720 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm5, %ymm5
4721 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[0,0,2,1,4,4,6,5]
4722 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[0,0,2,1,4,4,6,5]
4723 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm11 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7],ymm5[8,9,10],ymm4[11],ymm5[12,13,14],ymm4[15]
4724 ; AVX2-SLOW-NEXT: vmovdqa 48(%rsi), %xmm4
4725 ; AVX2-SLOW-NEXT: vmovdqa 48(%rdi), %xmm5
4726 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm10 = xmm5[8],xmm4[8],xmm5[9],xmm4[9],xmm5[10],xmm4[10],xmm5[11],xmm4[11],xmm5[12],xmm4[12],xmm5[13],xmm4[13],xmm5[14],xmm4[14],xmm5[15],xmm4[15]
4727 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm10[1,1,1,1]
4728 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm6[0],zero,zero,zero,xmm6[1],zero,zero,zero
4729 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm7 = xmm10[0],zero,zero,zero,xmm10[1],zero,zero,zero
4730 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm12
4731 ; AVX2-SLOW-NEXT: vmovdqa 48(%rcx), %xmm6
4732 ; AVX2-SLOW-NEXT: vmovdqa 48(%rdx), %xmm7
4733 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm13 = xmm7[8],xmm6[8],xmm7[9],xmm6[9],xmm7[10],xmm6[10],xmm7[11],xmm6[11],xmm7[12],xmm6[12],xmm7[13],xmm6[13],xmm7[14],xmm6[14],xmm7[15],xmm6[15]
4734 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm14 = xmm13[0,0,2,1,4,5,6,7]
4735 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm14[0],zero,xmm14[1],zero
4736 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm15 = xmm13[0,2,2,3,4,5,6,7]
4737 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm15 = xmm15[0],zero,xmm15[1],zero
4738 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm15, %ymm14, %ymm14
4739 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm12[0],ymm14[1],ymm12[2,3,4],ymm14[5],ymm12[6,7,8],ymm14[9],ymm12[10,11,12],ymm14[13],ymm12[14,15]
4740 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm11 = ymm12[0],ymm11[1],ymm12[2],ymm11[3],ymm12[4],ymm11[5],ymm12[6],ymm11[7]
4741 ; AVX2-SLOW-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4742 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm11 = xmm8[0,1,2,3,4,4,6,5]
4743 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,6,6,7]
4744 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm11, %ymm8
4745 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm11 = xmm9[0,1,2,3,4,5,5,7]
4746 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,6,5,7,7]
4747 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm11, %ymm9
4748 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm8[0,2,2,3,4,6,6,7]
4749 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm9[0,2,2,3,4,6,6,7]
4750 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0,1,2],ymm8[3],ymm9[4,5,6],ymm8[7],ymm9[8,9,10],ymm8[11],ymm9[12,13,14],ymm8[15]
4751 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm10[2,3,2,3]
4752 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm9 = xmm9[0],zero,zero,zero,xmm9[1],zero,zero,zero
4753 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[3,3,3,3]
4754 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm10 = xmm10[0],zero,zero,zero,xmm10[1],zero,zero,zero
4755 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm9, %ymm9
4756 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm10 = xmm13[0,1,2,3,4,4,6,5]
4757 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm11 = xmm13[0,1,2,3,4,6,6,7]
4758 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm11, %ymm10, %ymm10
4759 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm10[2,1,3,3,6,5,7,7]
4760 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm9 = ymm9[0],ymm10[1],ymm9[2,3,4],ymm10[5],ymm9[6,7,8],ymm10[9],ymm9[10,11,12],ymm10[13],ymm9[14,15]
4761 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0],ymm8[1],ymm9[2],ymm8[3],ymm9[4],ymm8[5],ymm9[6],ymm8[7]
4762 ; AVX2-SLOW-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4763 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
4764 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm0[0,0,2,1,4,5,6,7]
4765 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm8 = xmm0[0,2,2,3,4,5,6,7]
4766 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm1, %ymm1
4767 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
4768 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm2[0,1,1,3,4,5,6,7]
4769 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm8 = xmm2[2,1,3,3,4,5,6,7]
4770 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm3, %ymm3
4771 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,0,2,1,4,4,6,5]
4772 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
4773 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7],ymm3[8,9,10],ymm1[11],ymm3[12,13,14],ymm1[15]
4774 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3],xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
4775 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm3[1,1,1,1]
4776 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
4777 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm5 = xmm3[0],zero,zero,zero,xmm3[1],zero,zero,zero
4778 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
4779 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3],xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
4780 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm5[0,0,2,1,4,5,6,7]
4781 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm6[0],zero,xmm6[1],zero
4782 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm7 = xmm5[0,2,2,3,4,5,6,7]
4783 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm7[0],zero,xmm7[1],zero
4784 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm6, %ymm6
4785 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0],ymm6[1],ymm4[2,3,4],ymm6[5],ymm4[6,7,8],ymm6[9],ymm4[10,11,12],ymm6[13],ymm4[14,15]
4786 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm4[0],ymm1[1],ymm4[2],ymm1[3],ymm4[4],ymm1[5],ymm4[6],ymm1[7]
4787 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4788 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,4,6,5]
4789 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,6,7]
4790 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
4791 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm2[0,1,2,3,4,5,5,7]
4792 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,6,5,7,7]
4793 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
4794 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
4795 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
4796 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7],ymm1[8,9,10],ymm0[11],ymm1[12,13,14],ymm0[15]
4797 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm3[2,3,2,3]
4798 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
4799 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[3,3,3,3]
4800 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
4801 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
4802 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm5[0,1,2,3,4,4,6,5]
4803 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm5[0,1,2,3,4,6,6,7]
4804 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
4805 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,1,3,3,6,5,7,7]
4806 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm0[0],ymm2[1],ymm0[2,3,4],ymm2[5],ymm0[6,7,8],ymm2[9],ymm0[10,11,12],ymm2[13],ymm0[14,15]
4807 ; AVX2-SLOW-NEXT: vmovdqa 32(%r10), %xmm0
4808 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2],ymm1[3],ymm2[4],ymm1[5],ymm2[6],ymm1[7]
4809 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4810 ; AVX2-SLOW-NEXT: vmovdqa 32(%rax), %xmm1
4811 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm8 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
4812 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm8[0,0,2,1,4,5,6,7]
4813 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm8[0,2,2,3,4,5,6,7]
4814 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm4
4815 ; AVX2-SLOW-NEXT: vmovdqa 32(%r9), %xmm2
4816 ; AVX2-SLOW-NEXT: vmovdqa 32(%r8), %xmm3
4817 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm3[8],xmm2[8],xmm3[9],xmm2[9],xmm3[10],xmm2[10],xmm3[11],xmm2[11],xmm3[12],xmm2[12],xmm3[13],xmm2[13],xmm3[14],xmm2[14],xmm3[15],xmm2[15]
4818 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm9[0,1,1,3,4,5,6,7]
4819 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm9[2,1,3,3,4,5,6,7]
4820 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm5, %ymm5
4821 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[0,0,2,1,4,4,6,5]
4822 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[0,0,2,1,4,4,6,5]
4823 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm11 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7],ymm5[8,9,10],ymm4[11],ymm5[12,13,14],ymm4[15]
4824 ; AVX2-SLOW-NEXT: vmovdqa 32(%rsi), %xmm4
4825 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdi), %xmm5
4826 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm10 = xmm5[8],xmm4[8],xmm5[9],xmm4[9],xmm5[10],xmm4[10],xmm5[11],xmm4[11],xmm5[12],xmm4[12],xmm5[13],xmm4[13],xmm5[14],xmm4[14],xmm5[15],xmm4[15]
4827 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm10[1,1,1,1]
4828 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm6[0],zero,zero,zero,xmm6[1],zero,zero,zero
4829 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm7 = xmm10[0],zero,zero,zero,xmm10[1],zero,zero,zero
4830 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm12
4831 ; AVX2-SLOW-NEXT: vmovdqa 32(%rcx), %xmm6
4832 ; AVX2-SLOW-NEXT: vmovdqa 32(%rdx), %xmm7
4833 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm13 = xmm7[8],xmm6[8],xmm7[9],xmm6[9],xmm7[10],xmm6[10],xmm7[11],xmm6[11],xmm7[12],xmm6[12],xmm7[13],xmm6[13],xmm7[14],xmm6[14],xmm7[15],xmm6[15]
4834 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm14 = xmm13[0,0,2,1,4,5,6,7]
4835 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm14[0],zero,xmm14[1],zero
4836 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm15 = xmm13[0,2,2,3,4,5,6,7]
4837 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm15 = xmm15[0],zero,xmm15[1],zero
4838 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm15, %ymm14, %ymm14
4839 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm12 = ymm12[0],ymm14[1],ymm12[2,3,4],ymm14[5],ymm12[6,7,8],ymm14[9],ymm12[10,11,12],ymm14[13],ymm12[14,15]
4840 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm11 = ymm12[0],ymm11[1],ymm12[2],ymm11[3],ymm12[4],ymm11[5],ymm12[6],ymm11[7]
4841 ; AVX2-SLOW-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4842 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm11 = xmm8[0,1,2,3,4,4,6,5]
4843 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,6,6,7]
4844 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm11, %ymm8
4845 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm11 = xmm9[0,1,2,3,4,5,5,7]
4846 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,6,5,7,7]
4847 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm11, %ymm9
4848 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm8[0,2,2,3,4,6,6,7]
4849 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm9 = ymm9[0,2,2,3,4,6,6,7]
4850 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0,1,2],ymm8[3],ymm9[4,5,6],ymm8[7],ymm9[8,9,10],ymm8[11],ymm9[12,13,14],ymm8[15]
4851 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm10[2,3,2,3]
4852 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm9 = xmm9[0],zero,zero,zero,xmm9[1],zero,zero,zero
4853 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[3,3,3,3]
4854 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm10 = xmm10[0],zero,zero,zero,xmm10[1],zero,zero,zero
4855 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm9, %ymm9
4856 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm10 = xmm13[0,1,2,3,4,4,6,5]
4857 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm11 = xmm13[0,1,2,3,4,6,6,7]
4858 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm11, %ymm10, %ymm10
4859 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm10[2,1,3,3,6,5,7,7]
4860 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm9 = ymm9[0],ymm10[1],ymm9[2,3,4],ymm10[5],ymm9[6,7,8],ymm10[9],ymm9[10,11,12],ymm10[13],ymm9[14,15]
4861 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0],ymm8[1],ymm9[2],ymm8[3],ymm9[4],ymm8[5],ymm9[6],ymm8[7]
4862 ; AVX2-SLOW-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4863 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
4864 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm0[0,0,2,1,4,5,6,7]
4865 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm8 = xmm0[0,2,2,3,4,5,6,7]
4866 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm1, %ymm1
4867 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
4868 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm2[0,1,1,3,4,5,6,7]
4869 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm8 = xmm2[2,1,3,3,4,5,6,7]
4870 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm3, %ymm3
4871 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,0,2,1,4,4,6,5]
4872 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
4873 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5,6],ymm1[7],ymm3[8,9,10],ymm1[11],ymm3[12,13,14],ymm1[15]
4874 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3],xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
4875 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm3[1,1,1,1]
4876 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
4877 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm5 = xmm3[0],zero,zero,zero,xmm3[1],zero,zero,zero
4878 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
4879 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3],xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
4880 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm5[0,0,2,1,4,5,6,7]
4881 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm6 = xmm6[0],zero,xmm6[1],zero
4882 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm7 = xmm5[0,2,2,3,4,5,6,7]
4883 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm7[0],zero,xmm7[1],zero
4884 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm6, %ymm6
4885 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0],ymm6[1],ymm4[2,3,4],ymm6[5],ymm4[6,7,8],ymm6[9],ymm4[10,11,12],ymm6[13],ymm4[14,15]
4886 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm4[0],ymm1[1],ymm4[2],ymm1[3],ymm4[4],ymm1[5],ymm4[6],ymm1[7]
4887 ; AVX2-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4888 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,4,6,5]
4889 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,6,7]
4890 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
4891 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm2[0,1,2,3,4,5,5,7]
4892 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,6,5,7,7]
4893 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
4894 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,2,2,3,4,6,6,7]
4895 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
4896 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7],ymm1[8,9,10],ymm0[11],ymm1[12,13,14],ymm0[15]
4897 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm3[2,3,2,3]
4898 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
4899 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[3,3,3,3]
4900 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
4901 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
4902 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm5[0,1,2,3,4,4,6,5]
4903 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm5[0,1,2,3,4,6,6,7]
4904 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
4905 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[2,1,3,3,6,5,7,7]
4906 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7,8],ymm2[9],ymm1[10,11,12],ymm2[13],ymm1[14,15]
4907 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
4908 ; AVX2-SLOW-NEXT: vmovdqu %ymm0, (%rsp) # 32-byte Spill
4909 ; AVX2-SLOW-NEXT: vmovdqa 16(%r10), %xmm13
4910 ; AVX2-SLOW-NEXT: vmovdqa 16(%rax), %xmm12
4911 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm8 = xmm12[8],xmm13[8],xmm12[9],xmm13[9],xmm12[10],xmm13[10],xmm12[11],xmm13[11],xmm12[12],xmm13[12],xmm12[13],xmm13[13],xmm12[14],xmm13[14],xmm12[15],xmm13[15]
4912 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm8[0,0,2,1,4,5,6,7]
4913 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm8[0,2,2,3,4,5,6,7]
4914 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
4915 ; AVX2-SLOW-NEXT: vmovdqa 16(%r9), %xmm10
4916 ; AVX2-SLOW-NEXT: vmovdqa 16(%r8), %xmm9
4917 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm4 = xmm9[8],xmm10[8],xmm9[9],xmm10[9],xmm9[10],xmm10[10],xmm9[11],xmm10[11],xmm9[12],xmm10[12],xmm9[13],xmm10[13],xmm9[14],xmm10[14],xmm9[15],xmm10[15]
4918 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm4[0,1,1,3,4,5,6,7]
4919 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm4[2,1,3,3,4,5,6,7]
4920 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
4921 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
4922 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,0,2,1,4,4,6,5]
4923 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm11 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7],ymm2[8,9,10],ymm0[11],ymm2[12,13,14],ymm0[15]
4924 ; AVX2-SLOW-NEXT: vmovdqa 16(%rsi), %xmm7
4925 ; AVX2-SLOW-NEXT: vmovdqa 16(%rdi), %xmm6
4926 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm6[8],xmm7[8],xmm6[9],xmm7[9],xmm6[10],xmm7[10],xmm6[11],xmm7[11],xmm6[12],xmm7[12],xmm6[13],xmm7[13],xmm6[14],xmm7[14],xmm6[15],xmm7[15]
4927 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[1,1,1,1]
4928 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
4929 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm3 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
4930 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm3, %ymm2
4931 ; AVX2-SLOW-NEXT: vmovdqa 16(%rcx), %xmm5
4932 ; AVX2-SLOW-NEXT: vmovdqa 16(%rdx), %xmm3
4933 ; AVX2-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm3[8],xmm5[8],xmm3[9],xmm5[9],xmm3[10],xmm5[10],xmm3[11],xmm5[11],xmm3[12],xmm5[12],xmm3[13],xmm5[13],xmm3[14],xmm5[14],xmm3[15],xmm5[15]
4934 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm15 = xmm0[0,0,2,1,4,5,6,7]
4935 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm15 = xmm15[0],zero,xmm15[1],zero
4936 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm14 = xmm0[0,2,2,3,4,5,6,7]
4937 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm14 = xmm14[0],zero,xmm14[1],zero
4938 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm15, %ymm14
4939 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0],ymm14[1],ymm2[2,3,4],ymm14[5],ymm2[6,7,8],ymm14[9],ymm2[10,11,12],ymm14[13],ymm2[14,15]
4940 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm11 = ymm2[0],ymm11[1],ymm2[2],ymm11[3],ymm2[4],ymm11[5],ymm2[6],ymm11[7]
4941 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm8[0,1,2,3,4,4,6,5]
4942 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,6,6,7]
4943 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm2, %ymm2
4944 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm8 = xmm4[0,1,2,3,4,5,5,7]
4945 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,6,5,7,7]
4946 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm8, %ymm4
4947 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
4948 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[0,2,2,3,4,6,6,7]
4949 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3],ymm4[4,5,6],ymm2[7],ymm4[8,9,10],ymm2[11],ymm4[12,13,14],ymm2[15]
4950 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm1[2,3,2,3]
4951 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
4952 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[3,3,3,3]
4953 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
4954 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm4, %ymm1
4955 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm0[0,1,2,3,4,4,6,5]
4956 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,6,7]
4957 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm4, %ymm0
4958 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,1,3,3,6,5,7,7]
4959 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3,4],ymm0[5],ymm1[6,7,8],ymm0[9],ymm1[10,11,12],ymm0[13],ymm1[14,15]
4960 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm8 = ymm0[0],ymm2[1],ymm0[2],ymm2[3],ymm0[4],ymm2[5],ymm0[6],ymm2[7]
4961 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3],xmm12[4],xmm13[4],xmm12[5],xmm13[5],xmm12[6],xmm13[6],xmm12[7],xmm13[7]
4962 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm1[0,0,2,1,4,5,6,7]
4963 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm1[0,2,2,3,4,5,6,7]
4964 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
4965 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3],xmm9[4],xmm10[4],xmm9[5],xmm10[5],xmm9[6],xmm10[6],xmm9[7],xmm10[7]
4966 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm2[0,1,1,3,4,5,6,7]
4967 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm9 = xmm2[2,1,3,3,4,5,6,7]
4968 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm4, %ymm4
4969 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
4970 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[0,0,2,1,4,4,6,5]
4971 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm0 = ymm4[0,1,2],ymm0[3],ymm4[4,5,6],ymm0[7],ymm4[8,9,10],ymm0[11],ymm4[12,13,14],ymm0[15]
4972 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3],xmm6[4],xmm7[4],xmm6[5],xmm7[5],xmm6[6],xmm7[6],xmm6[7],xmm7[7]
4973 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm4[1,1,1,1]
4974 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm6[0],zero,zero,zero,xmm6[1],zero,zero,zero
4975 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm7 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
4976 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm6
4977 ; AVX2-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3],xmm3[4],xmm5[4],xmm3[5],xmm5[5],xmm3[6],xmm5[6],xmm3[7],xmm5[7]
4978 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm3[0,0,2,1,4,5,6,7]
4979 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm5 = xmm5[0],zero,xmm5[1],zero
4980 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm7 = xmm3[0,2,2,3,4,5,6,7]
4981 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm7 = xmm7[0],zero,xmm7[1],zero
4982 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm5, %ymm5
4983 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm5 = ymm6[0],ymm5[1],ymm6[2,3,4],ymm5[5],ymm6[6,7,8],ymm5[9],ymm6[10,11,12],ymm5[13],ymm6[14,15]
4984 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm0 = ymm5[0],ymm0[1],ymm5[2],ymm0[3],ymm5[4],ymm0[5],ymm5[6],ymm0[7]
4985 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm1[0,1,2,3,4,4,6,5]
4986 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
4987 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm5, %ymm1
4988 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm2[0,1,2,3,4,5,5,7]
4989 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,6,5,7,7]
4990 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm5, %ymm2
4991 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
4992 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
4993 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5,6],ymm1[7],ymm2[8,9,10],ymm1[11],ymm2[12,13,14],ymm1[15]
4994 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm4[2,3,2,3]
4995 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
4996 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[3,3,3,3]
4997 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
4998 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm2, %ymm2
4999 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm3[0,1,2,3,4,4,6,5]
5000 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,6,6,7]
5001 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm4, %ymm3
5002 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[2,1,3,3,6,5,7,7]
5003 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2,3,4],ymm3[5],ymm2[6,7,8],ymm3[9],ymm2[10,11,12],ymm3[13],ymm2[14,15]
5004 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2],ymm1[3],ymm2[4],ymm1[5],ymm2[6],ymm1[7]
5005 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
5006 ; AVX2-SLOW-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
5007 ; AVX2-SLOW-NEXT: # xmm2 = xmm2[8],mem[8],xmm2[9],mem[9],xmm2[10],mem[10],xmm2[11],mem[11],xmm2[12],mem[12],xmm2[13],mem[13],xmm2[14],mem[14],xmm2[15],mem[15]
5008 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm2[0,0,2,1,4,5,6,7]
5009 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm2[0,2,2,3,4,5,6,7]
5010 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm3
5011 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
5012 ; AVX2-SLOW-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm4, %xmm4 # 16-byte Folded Reload
5013 ; AVX2-SLOW-NEXT: # xmm4 = xmm4[8],mem[8],xmm4[9],mem[9],xmm4[10],mem[10],xmm4[11],mem[11],xmm4[12],mem[12],xmm4[13],mem[13],xmm4[14],mem[14],xmm4[15],mem[15]
5014 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm4[0,1,1,3,4,5,6,7]
5015 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm4[2,1,3,3,4,5,6,7]
5016 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm5, %ymm5
5017 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm3[0,0,2,1,4,4,6,5]
5018 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[0,0,2,1,4,4,6,5]
5019 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm3 = ymm5[0,1,2],ymm3[3],ymm5[4,5,6],ymm3[7],ymm5[8,9,10],ymm3[11],ymm5[12,13,14],ymm3[15]
5020 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
5021 ; AVX2-SLOW-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm5 # 16-byte Folded Reload
5022 ; AVX2-SLOW-NEXT: # xmm5 = xmm5[8],mem[8],xmm5[9],mem[9],xmm5[10],mem[10],xmm5[11],mem[11],xmm5[12],mem[12],xmm5[13],mem[13],xmm5[14],mem[14],xmm5[15],mem[15]
5023 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm5[1,1,1,1]
5024 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm6[0],zero,zero,zero,xmm6[1],zero,zero,zero
5025 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm7 = xmm5[0],zero,zero,zero,xmm5[1],zero,zero,zero
5026 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm7, %ymm6
5027 ; AVX2-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
5028 ; AVX2-SLOW-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm7, %xmm7 # 16-byte Folded Reload
5029 ; AVX2-SLOW-NEXT: # xmm7 = xmm7[8],mem[8],xmm7[9],mem[9],xmm7[10],mem[10],xmm7[11],mem[11],xmm7[12],mem[12],xmm7[13],mem[13],xmm7[14],mem[14],xmm7[15],mem[15]
5030 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm9 = xmm7[0,0,2,1,4,5,6,7]
5031 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm9 = xmm9[0],zero,xmm9[1],zero
5032 ; AVX2-SLOW-NEXT: vpshuflw {{.*#+}} xmm10 = xmm7[0,2,2,3,4,5,6,7]
5033 ; AVX2-SLOW-NEXT: vpmovzxdq {{.*#+}} xmm10 = xmm10[0],zero,xmm10[1],zero
5034 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm9, %ymm9
5035 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm6 = ymm6[0],ymm9[1],ymm6[2,3,4],ymm9[5],ymm6[6,7,8],ymm9[9],ymm6[10,11,12],ymm9[13],ymm6[14,15]
5036 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm3 = ymm6[0],ymm3[1],ymm6[2],ymm3[3],ymm6[4],ymm3[5],ymm6[6],ymm3[7]
5037 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm6 = xmm2[0,1,2,3,4,4,6,5]
5038 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,6,6,7]
5039 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm6, %ymm2
5040 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm6 = xmm4[0,1,2,3,4,5,5,7]
5041 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,6,5,7,7]
5042 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm6, %ymm4
5043 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm2[0,2,2,3,4,6,6,7]
5044 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm4[0,2,2,3,4,6,6,7]
5045 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3],ymm4[4,5,6],ymm2[7],ymm4[8,9,10],ymm2[11],ymm4[12,13,14],ymm2[15]
5046 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm5[2,3,2,3]
5047 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
5048 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[3,3,3,3]
5049 ; AVX2-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm5 = xmm5[0],zero,zero,zero,xmm5[1],zero,zero,zero
5050 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm4, %ymm4
5051 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm7[0,1,2,3,4,4,6,5]
5052 ; AVX2-SLOW-NEXT: vpshufhw {{.*#+}} xmm6 = xmm7[0,1,2,3,4,6,6,7]
5053 ; AVX2-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm5, %ymm5
5054 ; AVX2-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm5[2,1,3,3,6,5,7,7]
5055 ; AVX2-SLOW-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0],ymm5[1],ymm4[2,3,4],ymm5[5],ymm4[6,7,8],ymm5[9],ymm4[10,11,12],ymm5[13],ymm4[14,15]
5056 ; AVX2-SLOW-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0],ymm2[1],ymm4[2],ymm2[3],ymm4[4],ymm2[5],ymm4[6],ymm2[7]
5057 ; AVX2-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
5058 ; AVX2-SLOW-NEXT: vmovdqa %ymm2, 96(%rax)
5059 ; AVX2-SLOW-NEXT: vmovdqa %ymm3, 64(%rax)
5060 ; AVX2-SLOW-NEXT: vmovdqa %ymm1, 160(%rax)
5061 ; AVX2-SLOW-NEXT: vmovdqa %ymm0, 128(%rax)
5062 ; AVX2-SLOW-NEXT: vmovdqa %ymm8, 224(%rax)
5063 ; AVX2-SLOW-NEXT: vmovdqa %ymm11, 192(%rax)
5064 ; AVX2-SLOW-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
5065 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 288(%rax)
5066 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5067 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 256(%rax)
5068 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5069 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 352(%rax)
5070 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5071 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 320(%rax)
5072 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5073 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 416(%rax)
5074 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5075 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 384(%rax)
5076 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5077 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 480(%rax)
5078 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5079 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 448(%rax)
5080 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5081 ; AVX2-SLOW-NEXT: vmovaps %ymm0, (%rax)
5082 ; AVX2-SLOW-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5083 ; AVX2-SLOW-NEXT: vmovaps %ymm0, 32(%rax)
5084 ; AVX2-SLOW-NEXT: addq $328, %rsp # imm = 0x148
5085 ; AVX2-SLOW-NEXT: vzeroupper
5086 ; AVX2-SLOW-NEXT: retq
5088 ; AVX2-FAST-LABEL: store_i8_stride8_vf64:
5089 ; AVX2-FAST: # %bb.0:
5090 ; AVX2-FAST-NEXT: subq $392, %rsp # imm = 0x188
5091 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
5092 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
5093 ; AVX2-FAST-NEXT: vmovdqa (%r10), %xmm1
5094 ; AVX2-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5095 ; AVX2-FAST-NEXT: vmovdqa (%rax), %xmm0
5096 ; AVX2-FAST-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5097 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
5098 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5099 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm2 = ymm0[0,1,2,3,8,9,8,9,8,9,8,9,12,13,10,11,16,17,18,19,24,25,28,29,24,25,28,29,28,29,30,31]
5100 ; AVX2-FAST-NEXT: vmovdqa (%r9), %xmm3
5101 ; AVX2-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5102 ; AVX2-FAST-NEXT: vmovdqa (%r8), %xmm1
5103 ; AVX2-FAST-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5104 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3],xmm1[4],xmm3[4],xmm1[5],xmm3[5],xmm1[6],xmm3[6],xmm1[7],xmm3[7]
5105 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
5106 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm3 = ymm1[0,1,2,3,8,9,10,11,8,9,10,11,10,11,14,15,16,17,18,19,28,29,26,27,28,29,26,27,30,31,30,31]
5107 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7],ymm3[8,9,10],ymm2[11],ymm3[12,13,14],ymm2[15]
5108 ; AVX2-FAST-NEXT: vmovdqa (%rcx), %xmm4
5109 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5110 ; AVX2-FAST-NEXT: vmovdqa (%rdx), %xmm2
5111 ; AVX2-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5112 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3],xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
5113 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
5114 ; AVX2-FAST-NEXT: vmovdqa (%rsi), %xmm5
5115 ; AVX2-FAST-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5116 ; AVX2-FAST-NEXT: vmovdqa (%rdi), %xmm4
5117 ; AVX2-FAST-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5118 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3],xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
5119 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm4, %ymm4, %ymm5
5120 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,28,29,u,u,u,u,u,u,30,31,u,u,u,u,u,u]
5121 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm6 = ymm2[8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,24,25,28,29,20,21,22,23,28,29,30,31,28,29,30,31]
5122 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm5 = ymm5[0],ymm6[1],ymm5[2,3,4],ymm6[5],ymm5[6,7,8],ymm6[9],ymm5[10,11,12],ymm6[13],ymm5[14,15]
5123 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2],ymm3[3],ymm5[4],ymm3[5],ymm5[6],ymm3[7]
5124 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5125 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,0,1,0,1,0,1,8,9,10,11,4,5,2,3,0,1,4,5,0,1,4,5,8,9,10,11,4,5,6,7]
5126 ; AVX2-FAST-NEXT: vpshufb %ymm3, %ymm0, %ymm0
5127 ; AVX2-FAST-NEXT: vmovdqa %ymm3, %ymm9
5128 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,2,3,0,1,2,3,8,9,10,11,2,3,6,7,4,5,2,3,4,5,2,3,8,9,10,11,6,7,6,7]
5129 ; AVX2-FAST-NEXT: vpshufb %ymm3, %ymm1, %ymm1
5130 ; AVX2-FAST-NEXT: vmovdqa %ymm3, %ymm10
5131 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7],ymm1[8,9,10],ymm0[11],ymm1[12,13,14],ymm0[15]
5132 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm3 = <4,5,u,u,u,u,u,u,6,7,u,u,u,u,u,u>
5133 ; AVX2-FAST-NEXT: vpshufb %xmm3, %xmm4, %xmm1
5134 ; AVX2-FAST-NEXT: vmovdqa %xmm3, %xmm13
5135 ; AVX2-FAST-NEXT: vpmovzxwq {{.*#+}} xmm3 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
5136 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm3, %ymm1
5137 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = [0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,0,1,4,5,4,5,6,7,4,5,6,7,12,13,14,15]
5138 ; AVX2-FAST-NEXT: vpshufb %ymm12, %ymm2, %ymm2
5139 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7,8],ymm2[9],ymm1[10,11,12],ymm2[13],ymm1[14,15]
5140 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
5141 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5142 ; AVX2-FAST-NEXT: vmovdqa 48(%r10), %xmm8
5143 ; AVX2-FAST-NEXT: vmovdqa 48(%rax), %xmm3
5144 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm3[8],xmm8[8],xmm3[9],xmm8[9],xmm3[10],xmm8[10],xmm3[11],xmm8[11],xmm3[12],xmm8[12],xmm3[13],xmm8[13],xmm3[14],xmm8[14],xmm3[15],xmm8[15]
5145 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm6
5146 ; AVX2-FAST-NEXT: vmovdqa 48(%r9), %xmm4
5147 ; AVX2-FAST-NEXT: vmovdqa 48(%r8), %xmm5
5148 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm5[8],xmm4[8],xmm5[9],xmm4[9],xmm5[10],xmm4[10],xmm5[11],xmm4[11],xmm5[12],xmm4[12],xmm5[13],xmm4[13],xmm5[14],xmm4[14],xmm5[15],xmm4[15]
5149 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm7
5150 ; AVX2-FAST-NEXT: vpshufb %ymm9, %ymm6, %ymm0
5151 ; AVX2-FAST-NEXT: vpshufb %ymm10, %ymm7, %ymm2
5152 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm11 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7],ymm2[8,9,10],ymm0[11],ymm2[12,13,14],ymm0[15]
5153 ; AVX2-FAST-NEXT: vmovdqa 48(%rsi), %xmm10
5154 ; AVX2-FAST-NEXT: vmovdqa 48(%rdi), %xmm9
5155 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm9[8],xmm10[8],xmm9[9],xmm10[9],xmm9[10],xmm10[10],xmm9[11],xmm10[11],xmm9[12],xmm10[12],xmm9[13],xmm10[13],xmm9[14],xmm10[14],xmm9[15],xmm10[15]
5156 ; AVX2-FAST-NEXT: vpshufb %xmm13, %xmm0, %xmm1
5157 ; AVX2-FAST-NEXT: vpmovzxwq {{.*#+}} xmm13 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
5158 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm13, %ymm1
5159 ; AVX2-FAST-NEXT: vmovdqa 48(%rcx), %xmm13
5160 ; AVX2-FAST-NEXT: vmovdqa 48(%rdx), %xmm2
5161 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm15 = xmm2[8],xmm13[8],xmm2[9],xmm13[9],xmm2[10],xmm13[10],xmm2[11],xmm13[11],xmm2[12],xmm13[12],xmm2[13],xmm13[13],xmm2[14],xmm13[14],xmm2[15],xmm13[15]
5162 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm15, %ymm15, %ymm15
5163 ; AVX2-FAST-NEXT: vpshufb %ymm12, %ymm15, %ymm14
5164 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm14[1],ymm1[2,3,4],ymm14[5],ymm1[6,7,8],ymm14[9],ymm1[10,11,12],ymm14[13],ymm1[14,15]
5165 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm11[1],ymm1[2],ymm11[3],ymm1[4],ymm11[5],ymm1[6],ymm11[7]
5166 ; AVX2-FAST-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5167 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = [0,1,2,3,8,9,8,9,8,9,8,9,12,13,10,11,0,1,2,3,8,9,12,13,8,9,12,13,12,13,14,15]
5168 ; AVX2-FAST-NEXT: vpshufb %ymm12, %ymm6, %ymm1
5169 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = [0,1,2,3,8,9,10,11,8,9,10,11,10,11,14,15,0,1,2,3,12,13,10,11,12,13,10,11,14,15,14,15]
5170 ; AVX2-FAST-NEXT: vpshufb %ymm11, %ymm7, %ymm6
5171 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm6[0,1,2],ymm1[3],ymm6[4,5,6],ymm1[7],ymm6[8,9,10],ymm1[11],ymm6[12,13,14],ymm1[15]
5172 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = [8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,8,9,12,13,4,5,6,7,12,13,14,15,12,13,14,15]
5173 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm15, %ymm6
5174 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5175 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = <8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,12,13,u,u,u,u,u,u,14,15,u,u,u,u,u,u>
5176 ; AVX2-FAST-NEXT: vpshufb %ymm14, %ymm0, %ymm0
5177 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm6[1],ymm0[2,3,4],ymm6[5],ymm0[6,7,8],ymm6[9],ymm0[10,11,12],ymm6[13],ymm0[14,15]
5178 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3],ymm0[4],ymm1[5],ymm0[6],ymm1[7]
5179 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5180 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm3[0],xmm8[0],xmm3[1],xmm8[1],xmm3[2],xmm8[2],xmm3[3],xmm8[3],xmm3[4],xmm8[4],xmm3[5],xmm8[5],xmm3[6],xmm8[6],xmm3[7],xmm8[7]
5181 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3],xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
5182 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5183 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
5184 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = [0,1,0,1,0,1,0,1,8,9,10,11,4,5,2,3,0,1,4,5,0,1,4,5,8,9,10,11,4,5,6,7]
5185 ; AVX2-FAST-NEXT: vpshufb %ymm15, %ymm0, %ymm3
5186 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [0,1,2,3,0,1,2,3,8,9,10,11,2,3,6,7,4,5,2,3,4,5,2,3,8,9,10,11,6,7,6,7]
5187 ; AVX2-FAST-NEXT: vpshufb %ymm8, %ymm1, %ymm4
5188 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7],ymm4[8,9,10],ymm3[11],ymm4[12,13,14],ymm3[15]
5189 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0],xmm13[0],xmm2[1],xmm13[1],xmm2[2],xmm13[2],xmm2[3],xmm13[3],xmm2[4],xmm13[4],xmm2[5],xmm13[5],xmm2[6],xmm13[6],xmm2[7],xmm13[7]
5190 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3],xmm9[4],xmm10[4],xmm9[5],xmm10[5],xmm9[6],xmm10[6],xmm9[7],xmm10[7]
5191 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm10 = <4,5,u,u,u,u,u,u,6,7,u,u,u,u,u,u>
5192 ; AVX2-FAST-NEXT: vpshufb %xmm10, %xmm4, %xmm5
5193 ; AVX2-FAST-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
5194 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm5, %ymm6, %ymm5
5195 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
5196 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm6 = ymm2[0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,16,17,20,21,20,21,22,23,20,21,22,23,28,29,30,31]
5197 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm5 = ymm5[0],ymm6[1],ymm5[2,3,4],ymm6[5],ymm5[6,7,8],ymm6[9],ymm5[10,11,12],ymm6[13],ymm5[14,15]
5198 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2],ymm3[3],ymm5[4],ymm3[5],ymm5[6],ymm3[7]
5199 ; AVX2-FAST-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5200 ; AVX2-FAST-NEXT: vpshufb %ymm12, %ymm0, %ymm0
5201 ; AVX2-FAST-NEXT: vpshufb %ymm11, %ymm1, %ymm1
5202 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7],ymm1[8,9,10],ymm0[11],ymm1[12,13,14],ymm0[15]
5203 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm2, %ymm1
5204 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm4, %ymm4, %ymm2
5205 ; AVX2-FAST-NEXT: vpshufb %ymm14, %ymm2, %ymm2
5206 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7,8],ymm1[9],ymm2[10,11,12],ymm1[13],ymm2[14,15]
5207 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
5208 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5209 ; AVX2-FAST-NEXT: vmovdqa 32(%r10), %xmm1
5210 ; AVX2-FAST-NEXT: vmovdqa 32(%rax), %xmm3
5211 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm3[8],xmm1[8],xmm3[9],xmm1[9],xmm3[10],xmm1[10],xmm3[11],xmm1[11],xmm3[12],xmm1[12],xmm3[13],xmm1[13],xmm3[14],xmm1[14],xmm3[15],xmm1[15]
5212 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm6
5213 ; AVX2-FAST-NEXT: vmovdqa 32(%r9), %xmm4
5214 ; AVX2-FAST-NEXT: vmovdqa 32(%r8), %xmm5
5215 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm5[8],xmm4[8],xmm5[9],xmm4[9],xmm5[10],xmm4[10],xmm5[11],xmm4[11],xmm5[12],xmm4[12],xmm5[13],xmm4[13],xmm5[14],xmm4[14],xmm5[15],xmm4[15]
5216 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm7
5217 ; AVX2-FAST-NEXT: vpshufb %ymm15, %ymm6, %ymm0
5218 ; AVX2-FAST-NEXT: vpshufb %ymm8, %ymm7, %ymm2
5219 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm8 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7],ymm2[8,9,10],ymm0[11],ymm2[12,13,14],ymm0[15]
5220 ; AVX2-FAST-NEXT: vmovdqa 32(%rsi), %xmm0
5221 ; AVX2-FAST-NEXT: vmovdqa 32(%rdi), %xmm2
5222 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm2[8],xmm0[8],xmm2[9],xmm0[9],xmm2[10],xmm0[10],xmm2[11],xmm0[11],xmm2[12],xmm0[12],xmm2[13],xmm0[13],xmm2[14],xmm0[14],xmm2[15],xmm0[15]
5223 ; AVX2-FAST-NEXT: vpshufb %xmm10, %xmm9, %xmm10
5224 ; AVX2-FAST-NEXT: vpmovzxwq {{.*#+}} xmm11 = xmm9[0],zero,zero,zero,xmm9[1],zero,zero,zero
5225 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm10, %ymm11, %ymm10
5226 ; AVX2-FAST-NEXT: vmovdqa 32(%rcx), %xmm11
5227 ; AVX2-FAST-NEXT: vmovdqa 32(%rdx), %xmm13
5228 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm14 = xmm13[8],xmm11[8],xmm13[9],xmm11[9],xmm13[10],xmm11[10],xmm13[11],xmm11[11],xmm13[12],xmm11[12],xmm13[13],xmm11[13],xmm13[14],xmm11[14],xmm13[15],xmm11[15]
5229 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm14, %ymm14, %ymm14
5230 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} ymm15 = ymm14[0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,16,17,20,21,20,21,22,23,20,21,22,23,28,29,30,31]
5231 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm10 = ymm10[0],ymm15[1],ymm10[2,3,4],ymm15[5],ymm10[6,7,8],ymm15[9],ymm10[10,11,12],ymm15[13],ymm10[14,15]
5232 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm8 = ymm10[0],ymm8[1],ymm10[2],ymm8[3],ymm10[4],ymm8[5],ymm10[6],ymm8[7]
5233 ; AVX2-FAST-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5234 ; AVX2-FAST-NEXT: vmovdqa %ymm12, %ymm10
5235 ; AVX2-FAST-NEXT: vpshufb %ymm12, %ymm6, %ymm6
5236 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm15 = [0,1,2,3,8,9,10,11,8,9,10,11,10,11,14,15,0,1,2,3,12,13,10,11,12,13,10,11,14,15,14,15]
5237 ; AVX2-FAST-NEXT: vpshufb %ymm15, %ymm7, %ymm7
5238 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm6 = ymm7[0,1,2],ymm6[3],ymm7[4,5,6],ymm6[7],ymm7[8,9,10],ymm6[11],ymm7[12,13,14],ymm6[15]
5239 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = [8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,8,9,12,13,4,5,6,7,12,13,14,15,12,13,14,15]
5240 ; AVX2-FAST-NEXT: vpshufb %ymm12, %ymm14, %ymm7
5241 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm9, %ymm9, %ymm8
5242 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm14 = <8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,12,13,u,u,u,u,u,u,14,15,u,u,u,u,u,u>
5243 ; AVX2-FAST-NEXT: vpshufb %ymm14, %ymm8, %ymm8
5244 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm7 = ymm8[0],ymm7[1],ymm8[2,3,4],ymm7[5],ymm8[6,7,8],ymm7[9],ymm8[10,11,12],ymm7[13],ymm8[14,15]
5245 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0],ymm6[1],ymm7[2],ymm6[3],ymm7[4],ymm6[5],ymm7[6],ymm6[7]
5246 ; AVX2-FAST-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5247 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3],xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
5248 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3],xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
5249 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
5250 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm3, %ymm3, %ymm3
5251 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [0,1,0,1,0,1,0,1,8,9,10,11,4,5,2,3,0,1,4,5,0,1,4,5,8,9,10,11,4,5,6,7]
5252 ; AVX2-FAST-NEXT: vpshufb %ymm9, %ymm1, %ymm4
5253 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = [0,1,2,3,0,1,2,3,8,9,10,11,2,3,6,7,4,5,2,3,4,5,2,3,8,9,10,11,6,7,6,7]
5254 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm3, %ymm5
5255 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7],ymm5[8,9,10],ymm4[11],ymm5[12,13,14],ymm4[15]
5256 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm13[0],xmm11[0],xmm13[1],xmm11[1],xmm13[2],xmm11[2],xmm13[3],xmm11[3],xmm13[4],xmm11[4],xmm13[5],xmm11[5],xmm13[6],xmm11[6],xmm13[7],xmm11[7]
5257 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3],xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7]
5258 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm11 = <4,5,u,u,u,u,u,u,6,7,u,u,u,u,u,u>
5259 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm0, %xmm2
5260 ; AVX2-FAST-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
5261 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm2, %ymm6, %ymm2
5262 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm5, %ymm5, %ymm5
5263 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,0,1,4,5,4,5,6,7,4,5,6,7,12,13,14,15]
5264 ; AVX2-FAST-NEXT: vpshufb %ymm6, %ymm5, %ymm6
5265 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0],ymm6[1],ymm2[2,3,4],ymm6[5],ymm2[6,7,8],ymm6[9],ymm2[10,11,12],ymm6[13],ymm2[14,15]
5266 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2],ymm4[3],ymm2[4],ymm4[5],ymm2[6],ymm4[7]
5267 ; AVX2-FAST-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5268 ; AVX2-FAST-NEXT: vpshufb %ymm10, %ymm1, %ymm1
5269 ; AVX2-FAST-NEXT: vpshufb %ymm15, %ymm3, %ymm2
5270 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5,6],ymm1[7],ymm2[8,9,10],ymm1[11],ymm2[12,13,14],ymm1[15]
5271 ; AVX2-FAST-NEXT: vpshufb %ymm12, %ymm5, %ymm2
5272 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5273 ; AVX2-FAST-NEXT: vpshufb %ymm14, %ymm0, %ymm0
5274 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2,3,4],ymm2[5],ymm0[6,7,8],ymm2[9],ymm0[10,11,12],ymm2[13],ymm0[14,15]
5275 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3],ymm0[4],ymm1[5],ymm0[6],ymm1[7]
5276 ; AVX2-FAST-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5277 ; AVX2-FAST-NEXT: vmovdqa 16(%r10), %xmm4
5278 ; AVX2-FAST-NEXT: vmovdqa 16(%rax), %xmm2
5279 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm2[8],xmm4[8],xmm2[9],xmm4[9],xmm2[10],xmm4[10],xmm2[11],xmm4[11],xmm2[12],xmm4[12],xmm2[13],xmm4[13],xmm2[14],xmm4[14],xmm2[15],xmm4[15]
5280 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm6
5281 ; AVX2-FAST-NEXT: vmovdqa 16(%r9), %xmm1
5282 ; AVX2-FAST-NEXT: vmovdqa 16(%r8), %xmm0
5283 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
5284 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm3, %ymm3, %ymm8
5285 ; AVX2-FAST-NEXT: vpshufb %ymm9, %ymm6, %ymm3
5286 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm8, %ymm5
5287 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm7 = ymm5[0,1,2],ymm3[3],ymm5[4,5,6],ymm3[7],ymm5[8,9,10],ymm3[11],ymm5[12,13,14],ymm3[15]
5288 ; AVX2-FAST-NEXT: vmovdqa 16(%rsi), %xmm5
5289 ; AVX2-FAST-NEXT: vmovdqa 16(%rdi), %xmm3
5290 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm3[8],xmm5[8],xmm3[9],xmm5[9],xmm3[10],xmm5[10],xmm3[11],xmm5[11],xmm3[12],xmm5[12],xmm3[13],xmm5[13],xmm3[14],xmm5[14],xmm3[15],xmm5[15]
5291 ; AVX2-FAST-NEXT: vpshufb %xmm11, %xmm9, %xmm10
5292 ; AVX2-FAST-NEXT: vpmovzxwq {{.*#+}} xmm13 = xmm9[0],zero,zero,zero,xmm9[1],zero,zero,zero
5293 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm10, %ymm13, %ymm10
5294 ; AVX2-FAST-NEXT: vmovdqa 16(%rcx), %xmm13
5295 ; AVX2-FAST-NEXT: vmovdqa 16(%rdx), %xmm14
5296 ; AVX2-FAST-NEXT: vpunpckhbw {{.*#+}} xmm15 = xmm14[8],xmm13[8],xmm14[9],xmm13[9],xmm14[10],xmm13[10],xmm14[11],xmm13[11],xmm14[12],xmm13[12],xmm14[13],xmm13[13],xmm14[14],xmm13[14],xmm14[15],xmm13[15]
5297 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm15, %ymm15, %ymm15
5298 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = [0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,0,1,4,5,4,5,6,7,4,5,6,7,12,13,14,15]
5299 ; AVX2-FAST-NEXT: vpshufb %ymm11, %ymm15, %ymm11
5300 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm10 = ymm10[0],ymm11[1],ymm10[2,3,4],ymm11[5],ymm10[6,7,8],ymm11[9],ymm10[10,11,12],ymm11[13],ymm10[14,15]
5301 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm7 = ymm10[0],ymm7[1],ymm10[2],ymm7[3],ymm10[4],ymm7[5],ymm10[6],ymm7[7]
5302 ; AVX2-FAST-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5303 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = [0,1,2,3,8,9,8,9,8,9,8,9,12,13,10,11,0,1,2,3,8,9,12,13,8,9,12,13,12,13,14,15]
5304 ; AVX2-FAST-NEXT: vpshufb %ymm12, %ymm6, %ymm6
5305 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm7 = [0,1,2,3,8,9,10,11,8,9,10,11,10,11,14,15,0,1,2,3,12,13,10,11,12,13,10,11,14,15,14,15]
5306 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm8, %ymm8
5307 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm6 = ymm8[0,1,2],ymm6[3],ymm8[4,5,6],ymm6[7],ymm8[8,9,10],ymm6[11],ymm8[12,13,14],ymm6[15]
5308 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = [8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,8,9,12,13,4,5,6,7,12,13,14,15,12,13,14,15]
5309 ; AVX2-FAST-NEXT: vpshufb %ymm11, %ymm15, %ymm8
5310 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm9, %ymm9, %ymm9
5311 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = <8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,12,13,u,u,u,u,u,u,14,15,u,u,u,u,u,u>
5312 ; AVX2-FAST-NEXT: vpshufb %ymm10, %ymm9, %ymm9
5313 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0],ymm8[1],ymm9[2,3,4],ymm8[5],ymm9[6,7,8],ymm8[9],ymm9[10,11,12],ymm8[13],ymm9[14,15]
5314 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm6 = ymm8[0],ymm6[1],ymm8[2],ymm6[3],ymm8[4],ymm6[5],ymm8[6],ymm6[7]
5315 ; AVX2-FAST-NEXT: vmovdqu %ymm6, (%rsp) # 32-byte Spill
5316 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3],xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
5317 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
5318 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm1
5319 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm2
5320 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [0,1,0,1,0,1,0,1,8,9,10,11,4,5,2,3,0,1,4,5,0,1,4,5,8,9,10,11,4,5,6,7]
5321 ; AVX2-FAST-NEXT: vpshufb %ymm6, %ymm1, %ymm0
5322 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm9 = [0,1,2,3,0,1,2,3,8,9,10,11,2,3,6,7,4,5,2,3,4,5,2,3,8,9,10,11,6,7,6,7]
5323 ; AVX2-FAST-NEXT: vpshufb %ymm9, %ymm2, %ymm4
5324 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm0 = ymm4[0,1,2],ymm0[3],ymm4[4,5,6],ymm0[7],ymm4[8,9,10],ymm0[11],ymm4[12,13,14],ymm0[15]
5325 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3],xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
5326 ; AVX2-FAST-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3],xmm3[4],xmm5[4],xmm3[5],xmm5[5],xmm3[6],xmm5[6],xmm3[7],xmm5[7]
5327 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} xmm14 = <4,5,u,u,u,u,u,u,6,7,u,u,u,u,u,u>
5328 ; AVX2-FAST-NEXT: vpshufb %xmm14, %xmm3, %xmm5
5329 ; AVX2-FAST-NEXT: vpmovzxwq {{.*#+}} xmm8 = xmm3[0],zero,zero,zero,xmm3[1],zero,zero,zero
5330 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm5, %ymm8, %ymm5
5331 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm4, %ymm4, %ymm4
5332 ; AVX2-FAST-NEXT: vmovdqa {{.*#+}} ymm13 = [0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,0,1,4,5,4,5,6,7,4,5,6,7,12,13,14,15]
5333 ; AVX2-FAST-NEXT: vpshufb %ymm13, %ymm4, %ymm8
5334 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm5 = ymm5[0],ymm8[1],ymm5[2,3,4],ymm8[5],ymm5[6,7,8],ymm8[9],ymm5[10,11,12],ymm8[13],ymm5[14,15]
5335 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm0 = ymm5[0],ymm0[1],ymm5[2],ymm0[3],ymm5[4],ymm0[5],ymm5[6],ymm0[7]
5336 ; AVX2-FAST-NEXT: vpshufb %ymm12, %ymm1, %ymm1
5337 ; AVX2-FAST-NEXT: vmovdqa %ymm7, %ymm15
5338 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm2, %ymm2
5339 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5,6],ymm1[7],ymm2[8,9,10],ymm1[11],ymm2[12,13,14],ymm1[15]
5340 ; AVX2-FAST-NEXT: vpshufb %ymm11, %ymm4, %ymm2
5341 ; AVX2-FAST-NEXT: vmovdqa %ymm11, %ymm7
5342 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm3, %ymm3, %ymm3
5343 ; AVX2-FAST-NEXT: vpshufb %ymm10, %ymm3, %ymm3
5344 ; AVX2-FAST-NEXT: vmovdqa %ymm10, %ymm11
5345 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3,4],ymm2[5],ymm3[6,7,8],ymm2[9],ymm3[10,11,12],ymm2[13],ymm3[14,15]
5346 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2],ymm1[3],ymm2[4],ymm1[5],ymm2[6],ymm1[7]
5347 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
5348 ; AVX2-FAST-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
5349 ; AVX2-FAST-NEXT: # xmm2 = xmm2[8],mem[8],xmm2[9],mem[9],xmm2[10],mem[10],xmm2[11],mem[11],xmm2[12],mem[12],xmm2[13],mem[13],xmm2[14],mem[14],xmm2[15],mem[15]
5350 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
5351 ; AVX2-FAST-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm3, %xmm3 # 16-byte Folded Reload
5352 ; AVX2-FAST-NEXT: # xmm3 = xmm3[8],mem[8],xmm3[9],mem[9],xmm3[10],mem[10],xmm3[11],mem[11],xmm3[12],mem[12],xmm3[13],mem[13],xmm3[14],mem[14],xmm3[15],mem[15]
5353 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
5354 ; AVX2-FAST-NEXT: vpshufb %ymm6, %ymm2, %ymm4
5355 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm3, %ymm3, %ymm3
5356 ; AVX2-FAST-NEXT: vpshufb %ymm9, %ymm3, %ymm5
5357 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7],ymm5[8,9,10],ymm4[11],ymm5[12,13,14],ymm4[15]
5358 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
5359 ; AVX2-FAST-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm5 # 16-byte Folded Reload
5360 ; AVX2-FAST-NEXT: # xmm5 = xmm5[8],mem[8],xmm5[9],mem[9],xmm5[10],mem[10],xmm5[11],mem[11],xmm5[12],mem[12],xmm5[13],mem[13],xmm5[14],mem[14],xmm5[15],mem[15]
5361 ; AVX2-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
5362 ; AVX2-FAST-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm8, %xmm8 # 16-byte Folded Reload
5363 ; AVX2-FAST-NEXT: # xmm8 = xmm8[8],mem[8],xmm8[9],mem[9],xmm8[10],mem[10],xmm8[11],mem[11],xmm8[12],mem[12],xmm8[13],mem[13],xmm8[14],mem[14],xmm8[15],mem[15]
5364 ; AVX2-FAST-NEXT: vpshufb %xmm14, %xmm8, %xmm9
5365 ; AVX2-FAST-NEXT: vpmovzxwq {{.*#+}} xmm10 = xmm8[0],zero,zero,zero,xmm8[1],zero,zero,zero
5366 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm9, %ymm10, %ymm9
5367 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm5, %ymm5, %ymm5
5368 ; AVX2-FAST-NEXT: vpshufb %ymm13, %ymm5, %ymm10
5369 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm9 = ymm9[0],ymm10[1],ymm9[2,3,4],ymm10[5],ymm9[6,7,8],ymm10[9],ymm9[10,11,12],ymm10[13],ymm9[14,15]
5370 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm4 = ymm9[0],ymm4[1],ymm9[2],ymm4[3],ymm9[4],ymm4[5],ymm9[6],ymm4[7]
5371 ; AVX2-FAST-NEXT: vpshufb %ymm12, %ymm2, %ymm2
5372 ; AVX2-FAST-NEXT: vpshufb %ymm15, %ymm3, %ymm3
5373 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7],ymm3[8,9,10],ymm2[11],ymm3[12,13,14],ymm2[15]
5374 ; AVX2-FAST-NEXT: vpshufb %ymm7, %ymm5, %ymm3
5375 ; AVX2-FAST-NEXT: vinserti128 $1, %xmm8, %ymm8, %ymm5
5376 ; AVX2-FAST-NEXT: vpshufb %ymm11, %ymm5, %ymm5
5377 ; AVX2-FAST-NEXT: vpblendw {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2,3,4],ymm3[5],ymm5[6,7,8],ymm3[9],ymm5[10,11,12],ymm3[13],ymm5[14,15]
5378 ; AVX2-FAST-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3],ymm3[4],ymm2[5],ymm3[6],ymm2[7]
5379 ; AVX2-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
5380 ; AVX2-FAST-NEXT: vmovdqa %ymm2, 96(%rax)
5381 ; AVX2-FAST-NEXT: vmovdqa %ymm4, 64(%rax)
5382 ; AVX2-FAST-NEXT: vmovdqa %ymm1, 160(%rax)
5383 ; AVX2-FAST-NEXT: vmovdqa %ymm0, 128(%rax)
5384 ; AVX2-FAST-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
5385 ; AVX2-FAST-NEXT: vmovaps %ymm0, 224(%rax)
5386 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5387 ; AVX2-FAST-NEXT: vmovaps %ymm0, 192(%rax)
5388 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5389 ; AVX2-FAST-NEXT: vmovaps %ymm0, 288(%rax)
5390 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5391 ; AVX2-FAST-NEXT: vmovaps %ymm0, 256(%rax)
5392 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5393 ; AVX2-FAST-NEXT: vmovaps %ymm0, 352(%rax)
5394 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5395 ; AVX2-FAST-NEXT: vmovaps %ymm0, 320(%rax)
5396 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5397 ; AVX2-FAST-NEXT: vmovaps %ymm0, 416(%rax)
5398 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5399 ; AVX2-FAST-NEXT: vmovaps %ymm0, 384(%rax)
5400 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5401 ; AVX2-FAST-NEXT: vmovaps %ymm0, 480(%rax)
5402 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5403 ; AVX2-FAST-NEXT: vmovaps %ymm0, 448(%rax)
5404 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5405 ; AVX2-FAST-NEXT: vmovaps %ymm0, (%rax)
5406 ; AVX2-FAST-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5407 ; AVX2-FAST-NEXT: vmovaps %ymm0, 32(%rax)
5408 ; AVX2-FAST-NEXT: addq $392, %rsp # imm = 0x188
5409 ; AVX2-FAST-NEXT: vzeroupper
5410 ; AVX2-FAST-NEXT: retq
5412 ; AVX2-FAST-PERLANE-LABEL: store_i8_stride8_vf64:
5413 ; AVX2-FAST-PERLANE: # %bb.0:
5414 ; AVX2-FAST-PERLANE-NEXT: subq $392, %rsp # imm = 0x188
5415 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
5416 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %r10
5417 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r10), %xmm1
5418 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5419 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rax), %xmm0
5420 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5421 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
5422 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5423 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm2 = ymm0[0,1,2,3,8,9,8,9,8,9,8,9,12,13,10,11,16,17,18,19,24,25,28,29,24,25,28,29,28,29,30,31]
5424 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r9), %xmm3
5425 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5426 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%r8), %xmm1
5427 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5428 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3],xmm1[4],xmm3[4],xmm1[5],xmm3[5],xmm1[6],xmm3[6],xmm1[7],xmm3[7]
5429 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
5430 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm3 = ymm1[0,1,2,3,8,9,10,11,8,9,10,11,10,11,14,15,16,17,18,19,28,29,26,27,28,29,26,27,30,31,30,31]
5431 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7],ymm3[8,9,10],ymm2[11],ymm3[12,13,14],ymm2[15]
5432 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rcx), %xmm4
5433 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5434 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdx), %xmm2
5435 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5436 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3],xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
5437 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
5438 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rsi), %xmm5
5439 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5440 ; AVX2-FAST-PERLANE-NEXT: vmovdqa (%rdi), %xmm4
5441 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5442 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3],xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
5443 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm4, %ymm4, %ymm5
5444 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,28,29,u,u,u,u,u,u,30,31,u,u,u,u,u,u]
5445 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm6 = ymm2[8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,24,25,28,29,20,21,22,23,28,29,30,31,28,29,30,31]
5446 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm5 = ymm5[0],ymm6[1],ymm5[2,3,4],ymm6[5],ymm5[6,7,8],ymm6[9],ymm5[10,11,12],ymm6[13],ymm5[14,15]
5447 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2],ymm3[3],ymm5[4],ymm3[5],ymm5[6],ymm3[7]
5448 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5449 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,0,1,0,1,0,1,8,9,10,11,4,5,2,3,0,1,4,5,0,1,4,5,8,9,10,11,4,5,6,7]
5450 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm3, %ymm0, %ymm0
5451 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm3, %ymm9
5452 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,2,3,0,1,2,3,8,9,10,11,2,3,6,7,4,5,2,3,4,5,2,3,8,9,10,11,6,7,6,7]
5453 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm3, %ymm1, %ymm1
5454 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm3, %ymm10
5455 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7],ymm1[8,9,10],ymm0[11],ymm1[12,13,14],ymm0[15]
5456 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm3 = <4,5,u,u,u,u,u,u,6,7,u,u,u,u,u,u>
5457 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm3, %xmm4, %xmm1
5458 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %xmm3, %xmm13
5459 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwq {{.*#+}} xmm3 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
5460 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm3, %ymm1
5461 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm12 = [0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,0,1,4,5,4,5,6,7,4,5,6,7,12,13,14,15]
5462 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm12, %ymm2, %ymm2
5463 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3,4],ymm2[5],ymm1[6,7,8],ymm2[9],ymm1[10,11,12],ymm2[13],ymm1[14,15]
5464 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
5465 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5466 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 48(%r10), %xmm8
5467 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 48(%rax), %xmm3
5468 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm3[8],xmm8[8],xmm3[9],xmm8[9],xmm3[10],xmm8[10],xmm3[11],xmm8[11],xmm3[12],xmm8[12],xmm3[13],xmm8[13],xmm3[14],xmm8[14],xmm3[15],xmm8[15]
5469 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm6
5470 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 48(%r9), %xmm4
5471 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 48(%r8), %xmm5
5472 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm5[8],xmm4[8],xmm5[9],xmm4[9],xmm5[10],xmm4[10],xmm5[11],xmm4[11],xmm5[12],xmm4[12],xmm5[13],xmm4[13],xmm5[14],xmm4[14],xmm5[15],xmm4[15]
5473 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm7
5474 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm9, %ymm6, %ymm0
5475 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm10, %ymm7, %ymm2
5476 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm11 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7],ymm2[8,9,10],ymm0[11],ymm2[12,13,14],ymm0[15]
5477 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 48(%rsi), %xmm10
5478 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 48(%rdi), %xmm9
5479 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm9[8],xmm10[8],xmm9[9],xmm10[9],xmm9[10],xmm10[10],xmm9[11],xmm10[11],xmm9[12],xmm10[12],xmm9[13],xmm10[13],xmm9[14],xmm10[14],xmm9[15],xmm10[15]
5480 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm13, %xmm0, %xmm1
5481 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwq {{.*#+}} xmm13 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
5482 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm13, %ymm1
5483 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 48(%rcx), %xmm13
5484 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 48(%rdx), %xmm2
5485 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm15 = xmm2[8],xmm13[8],xmm2[9],xmm13[9],xmm2[10],xmm13[10],xmm2[11],xmm13[11],xmm2[12],xmm13[12],xmm2[13],xmm13[13],xmm2[14],xmm13[14],xmm2[15],xmm13[15]
5486 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm15, %ymm15, %ymm15
5487 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm12, %ymm15, %ymm14
5488 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm14[1],ymm1[2,3,4],ymm14[5],ymm1[6,7,8],ymm14[9],ymm1[10,11,12],ymm14[13],ymm1[14,15]
5489 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0],ymm11[1],ymm1[2],ymm11[3],ymm1[4],ymm11[5],ymm1[6],ymm11[7]
5490 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5491 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm12 = [0,1,2,3,8,9,8,9,8,9,8,9,12,13,10,11,0,1,2,3,8,9,12,13,8,9,12,13,12,13,14,15]
5492 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm12, %ymm6, %ymm1
5493 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = [0,1,2,3,8,9,10,11,8,9,10,11,10,11,14,15,0,1,2,3,12,13,10,11,12,13,10,11,14,15,14,15]
5494 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm11, %ymm7, %ymm6
5495 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm1 = ymm6[0,1,2],ymm1[3],ymm6[4,5,6],ymm1[7],ymm6[8,9,10],ymm1[11],ymm6[12,13,14],ymm1[15]
5496 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm7 = [8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,8,9,12,13,4,5,6,7,12,13,14,15,12,13,14,15]
5497 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm7, %ymm15, %ymm6
5498 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5499 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm14 = <8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,12,13,u,u,u,u,u,u,14,15,u,u,u,u,u,u>
5500 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm14, %ymm0, %ymm0
5501 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm6[1],ymm0[2,3,4],ymm6[5],ymm0[6,7,8],ymm6[9],ymm0[10,11,12],ymm6[13],ymm0[14,15]
5502 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3],ymm0[4],ymm1[5],ymm0[6],ymm1[7]
5503 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5504 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm3[0],xmm8[0],xmm3[1],xmm8[1],xmm3[2],xmm8[2],xmm3[3],xmm8[3],xmm3[4],xmm8[4],xmm3[5],xmm8[5],xmm3[6],xmm8[6],xmm3[7],xmm8[7]
5505 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3],xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
5506 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5507 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
5508 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = [0,1,0,1,0,1,0,1,8,9,10,11,4,5,2,3,0,1,4,5,0,1,4,5,8,9,10,11,4,5,6,7]
5509 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm15, %ymm0, %ymm3
5510 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm8 = [0,1,2,3,0,1,2,3,8,9,10,11,2,3,6,7,4,5,2,3,4,5,2,3,8,9,10,11,6,7,6,7]
5511 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm8, %ymm1, %ymm4
5512 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0,1,2],ymm3[3],ymm4[4,5,6],ymm3[7],ymm4[8,9,10],ymm3[11],ymm4[12,13,14],ymm3[15]
5513 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0],xmm13[0],xmm2[1],xmm13[1],xmm2[2],xmm13[2],xmm2[3],xmm13[3],xmm2[4],xmm13[4],xmm2[5],xmm13[5],xmm2[6],xmm13[6],xmm2[7],xmm13[7]
5514 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3],xmm9[4],xmm10[4],xmm9[5],xmm10[5],xmm9[6],xmm10[6],xmm9[7],xmm10[7]
5515 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm10 = <4,5,u,u,u,u,u,u,6,7,u,u,u,u,u,u>
5516 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm10, %xmm4, %xmm5
5517 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
5518 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm6, %ymm5
5519 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
5520 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm6 = ymm2[0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,16,17,20,21,20,21,22,23,20,21,22,23,28,29,30,31]
5521 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm5 = ymm5[0],ymm6[1],ymm5[2,3,4],ymm6[5],ymm5[6,7,8],ymm6[9],ymm5[10,11,12],ymm6[13],ymm5[14,15]
5522 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2],ymm3[3],ymm5[4],ymm3[5],ymm5[6],ymm3[7]
5523 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5524 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm12, %ymm0, %ymm0
5525 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm11, %ymm1, %ymm1
5526 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5,6],ymm0[7],ymm1[8,9,10],ymm0[11],ymm1[12,13,14],ymm0[15]
5527 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm7, %ymm2, %ymm1
5528 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm4, %ymm4, %ymm2
5529 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm14, %ymm2, %ymm2
5530 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3,4],ymm1[5],ymm2[6,7,8],ymm1[9],ymm2[10,11,12],ymm1[13],ymm2[14,15]
5531 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
5532 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5533 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r10), %xmm1
5534 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rax), %xmm3
5535 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm3[8],xmm1[8],xmm3[9],xmm1[9],xmm3[10],xmm1[10],xmm3[11],xmm1[11],xmm3[12],xmm1[12],xmm3[13],xmm1[13],xmm3[14],xmm1[14],xmm3[15],xmm1[15]
5536 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm6
5537 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r9), %xmm4
5538 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%r8), %xmm5
5539 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm5[8],xmm4[8],xmm5[9],xmm4[9],xmm5[10],xmm4[10],xmm5[11],xmm4[11],xmm5[12],xmm4[12],xmm5[13],xmm4[13],xmm5[14],xmm4[14],xmm5[15],xmm4[15]
5540 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm7
5541 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm15, %ymm6, %ymm0
5542 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm8, %ymm7, %ymm2
5543 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm8 = ymm2[0,1,2],ymm0[3],ymm2[4,5,6],ymm0[7],ymm2[8,9,10],ymm0[11],ymm2[12,13,14],ymm0[15]
5544 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rsi), %xmm0
5545 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdi), %xmm2
5546 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm2[8],xmm0[8],xmm2[9],xmm0[9],xmm2[10],xmm0[10],xmm2[11],xmm0[11],xmm2[12],xmm0[12],xmm2[13],xmm0[13],xmm2[14],xmm0[14],xmm2[15],xmm0[15]
5547 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm10, %xmm9, %xmm10
5548 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwq {{.*#+}} xmm11 = xmm9[0],zero,zero,zero,xmm9[1],zero,zero,zero
5549 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm10, %ymm11, %ymm10
5550 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rcx), %xmm11
5551 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 32(%rdx), %xmm13
5552 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm14 = xmm13[8],xmm11[8],xmm13[9],xmm11[9],xmm13[10],xmm11[10],xmm13[11],xmm11[11],xmm13[12],xmm11[12],xmm13[13],xmm11[13],xmm13[14],xmm11[14],xmm13[15],xmm11[15]
5553 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm14, %ymm14, %ymm14
5554 ; AVX2-FAST-PERLANE-NEXT: vpshufb {{.*#+}} ymm15 = ymm14[0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,16,17,20,21,20,21,22,23,20,21,22,23,28,29,30,31]
5555 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm10 = ymm10[0],ymm15[1],ymm10[2,3,4],ymm15[5],ymm10[6,7,8],ymm15[9],ymm10[10,11,12],ymm15[13],ymm10[14,15]
5556 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm8 = ymm10[0],ymm8[1],ymm10[2],ymm8[3],ymm10[4],ymm8[5],ymm10[6],ymm8[7]
5557 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5558 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm12, %ymm10
5559 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm12, %ymm6, %ymm6
5560 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm15 = [0,1,2,3,8,9,10,11,8,9,10,11,10,11,14,15,0,1,2,3,12,13,10,11,12,13,10,11,14,15,14,15]
5561 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm15, %ymm7, %ymm7
5562 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm6 = ymm7[0,1,2],ymm6[3],ymm7[4,5,6],ymm6[7],ymm7[8,9,10],ymm6[11],ymm7[12,13,14],ymm6[15]
5563 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm12 = [8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,8,9,12,13,4,5,6,7,12,13,14,15,12,13,14,15]
5564 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm12, %ymm14, %ymm7
5565 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm9, %ymm9, %ymm8
5566 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm14 = <8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,12,13,u,u,u,u,u,u,14,15,u,u,u,u,u,u>
5567 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm14, %ymm8, %ymm8
5568 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm7 = ymm8[0],ymm7[1],ymm8[2,3,4],ymm7[5],ymm8[6,7,8],ymm7[9],ymm8[10,11,12],ymm7[13],ymm8[14,15]
5569 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm6 = ymm7[0],ymm6[1],ymm7[2],ymm6[3],ymm7[4],ymm6[5],ymm7[6],ymm6[7]
5570 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5571 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3],xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
5572 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3],xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
5573 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
5574 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm3, %ymm3
5575 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm9 = [0,1,0,1,0,1,0,1,8,9,10,11,4,5,2,3,0,1,4,5,0,1,4,5,8,9,10,11,4,5,6,7]
5576 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm9, %ymm1, %ymm4
5577 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm7 = [0,1,2,3,0,1,2,3,8,9,10,11,2,3,6,7,4,5,2,3,4,5,2,3,8,9,10,11,6,7,6,7]
5578 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm7, %ymm3, %ymm5
5579 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7],ymm5[8,9,10],ymm4[11],ymm5[12,13,14],ymm4[15]
5580 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm13[0],xmm11[0],xmm13[1],xmm11[1],xmm13[2],xmm11[2],xmm13[3],xmm11[3],xmm13[4],xmm11[4],xmm13[5],xmm11[5],xmm13[6],xmm11[6],xmm13[7],xmm11[7]
5581 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3],xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7]
5582 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm11 = <4,5,u,u,u,u,u,u,6,7,u,u,u,u,u,u>
5583 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm11, %xmm0, %xmm2
5584 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
5585 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm6, %ymm2
5586 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm5, %ymm5
5587 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = [0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,0,1,4,5,4,5,6,7,4,5,6,7,12,13,14,15]
5588 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm6, %ymm5, %ymm6
5589 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0],ymm6[1],ymm2[2,3,4],ymm6[5],ymm2[6,7,8],ymm6[9],ymm2[10,11,12],ymm6[13],ymm2[14,15]
5590 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm4[1],ymm2[2],ymm4[3],ymm2[4],ymm4[5],ymm2[6],ymm4[7]
5591 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5592 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm10, %ymm1, %ymm1
5593 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm15, %ymm3, %ymm2
5594 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5,6],ymm1[7],ymm2[8,9,10],ymm1[11],ymm2[12,13,14],ymm1[15]
5595 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm12, %ymm5, %ymm2
5596 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
5597 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm14, %ymm0, %ymm0
5598 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm2[1],ymm0[2,3,4],ymm2[5],ymm0[6,7,8],ymm2[9],ymm0[10,11,12],ymm2[13],ymm0[14,15]
5599 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2],ymm1[3],ymm0[4],ymm1[5],ymm0[6],ymm1[7]
5600 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5601 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%r10), %xmm4
5602 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rax), %xmm2
5603 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm2[8],xmm4[8],xmm2[9],xmm4[9],xmm2[10],xmm4[10],xmm2[11],xmm4[11],xmm2[12],xmm4[12],xmm2[13],xmm4[13],xmm2[14],xmm4[14],xmm2[15],xmm4[15]
5604 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm6
5605 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%r9), %xmm1
5606 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%r8), %xmm0
5607 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
5608 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm3, %ymm8
5609 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm9, %ymm6, %ymm3
5610 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm7, %ymm8, %ymm5
5611 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm7 = ymm5[0,1,2],ymm3[3],ymm5[4,5,6],ymm3[7],ymm5[8,9,10],ymm3[11],ymm5[12,13,14],ymm3[15]
5612 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rsi), %xmm5
5613 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rdi), %xmm3
5614 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm9 = xmm3[8],xmm5[8],xmm3[9],xmm5[9],xmm3[10],xmm5[10],xmm3[11],xmm5[11],xmm3[12],xmm5[12],xmm3[13],xmm5[13],xmm3[14],xmm5[14],xmm3[15],xmm5[15]
5615 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm11, %xmm9, %xmm10
5616 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwq {{.*#+}} xmm13 = xmm9[0],zero,zero,zero,xmm9[1],zero,zero,zero
5617 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm10, %ymm13, %ymm10
5618 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rcx), %xmm13
5619 ; AVX2-FAST-PERLANE-NEXT: vmovdqa 16(%rdx), %xmm14
5620 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{.*#+}} xmm15 = xmm14[8],xmm13[8],xmm14[9],xmm13[9],xmm14[10],xmm13[10],xmm14[11],xmm13[11],xmm14[12],xmm13[12],xmm14[13],xmm13[13],xmm14[14],xmm13[14],xmm14[15],xmm13[15]
5621 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm15, %ymm15, %ymm15
5622 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = [0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,0,1,4,5,4,5,6,7,4,5,6,7,12,13,14,15]
5623 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm11, %ymm15, %ymm11
5624 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm10 = ymm10[0],ymm11[1],ymm10[2,3,4],ymm11[5],ymm10[6,7,8],ymm11[9],ymm10[10,11,12],ymm11[13],ymm10[14,15]
5625 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm7 = ymm10[0],ymm7[1],ymm10[2],ymm7[3],ymm10[4],ymm7[5],ymm10[6],ymm7[7]
5626 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5627 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm12 = [0,1,2,3,8,9,8,9,8,9,8,9,12,13,10,11,0,1,2,3,8,9,12,13,8,9,12,13,12,13,14,15]
5628 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm12, %ymm6, %ymm6
5629 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm7 = [0,1,2,3,8,9,10,11,8,9,10,11,10,11,14,15,0,1,2,3,12,13,10,11,12,13,10,11,14,15,14,15]
5630 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm7, %ymm8, %ymm8
5631 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm6 = ymm8[0,1,2],ymm6[3],ymm8[4,5,6],ymm6[7],ymm8[8,9,10],ymm6[11],ymm8[12,13,14],ymm6[15]
5632 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm11 = [8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,8,9,12,13,4,5,6,7,12,13,14,15,12,13,14,15]
5633 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm11, %ymm15, %ymm8
5634 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm9, %ymm9, %ymm9
5635 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm10 = <8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,12,13,u,u,u,u,u,u,14,15,u,u,u,u,u,u>
5636 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm10, %ymm9, %ymm9
5637 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0],ymm8[1],ymm9[2,3,4],ymm8[5],ymm9[6,7,8],ymm8[9],ymm9[10,11,12],ymm8[13],ymm9[14,15]
5638 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm6 = ymm8[0],ymm6[1],ymm8[2],ymm6[3],ymm8[4],ymm6[5],ymm8[6],ymm6[7]
5639 ; AVX2-FAST-PERLANE-NEXT: vmovdqu %ymm6, (%rsp) # 32-byte Spill
5640 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3],xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
5641 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
5642 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm1
5643 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm2
5644 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm6 = [0,1,0,1,0,1,0,1,8,9,10,11,4,5,2,3,0,1,4,5,0,1,4,5,8,9,10,11,4,5,6,7]
5645 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm6, %ymm1, %ymm0
5646 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm9 = [0,1,2,3,0,1,2,3,8,9,10,11,2,3,6,7,4,5,2,3,4,5,2,3,8,9,10,11,6,7,6,7]
5647 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm9, %ymm2, %ymm4
5648 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm0 = ymm4[0,1,2],ymm0[3],ymm4[4,5,6],ymm0[7],ymm4[8,9,10],ymm0[11],ymm4[12,13,14],ymm0[15]
5649 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3],xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
5650 ; AVX2-FAST-PERLANE-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3],xmm3[4],xmm5[4],xmm3[5],xmm5[5],xmm3[6],xmm5[6],xmm3[7],xmm5[7]
5651 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} xmm14 = <4,5,u,u,u,u,u,u,6,7,u,u,u,u,u,u>
5652 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm14, %xmm3, %xmm5
5653 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwq {{.*#+}} xmm8 = xmm3[0],zero,zero,zero,xmm3[1],zero,zero,zero
5654 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm8, %ymm5
5655 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm4, %ymm4, %ymm4
5656 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{.*#+}} ymm13 = [0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,0,1,4,5,4,5,6,7,4,5,6,7,12,13,14,15]
5657 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm13, %ymm4, %ymm8
5658 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm5 = ymm5[0],ymm8[1],ymm5[2,3,4],ymm8[5],ymm5[6,7,8],ymm8[9],ymm5[10,11,12],ymm8[13],ymm5[14,15]
5659 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm0 = ymm5[0],ymm0[1],ymm5[2],ymm0[3],ymm5[4],ymm0[5],ymm5[6],ymm0[7]
5660 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm12, %ymm1, %ymm1
5661 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm7, %ymm15
5662 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm7, %ymm2, %ymm2
5663 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5,6],ymm1[7],ymm2[8,9,10],ymm1[11],ymm2[12,13,14],ymm1[15]
5664 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm11, %ymm4, %ymm2
5665 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm11, %ymm7
5666 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm3, %ymm3
5667 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm10, %ymm3, %ymm3
5668 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm10, %ymm11
5669 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3,4],ymm2[5],ymm3[6,7,8],ymm2[9],ymm3[10,11,12],ymm2[13],ymm3[14,15]
5670 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2],ymm1[3],ymm2[4],ymm1[5],ymm2[6],ymm1[7]
5671 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
5672 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
5673 ; AVX2-FAST-PERLANE-NEXT: # xmm2 = xmm2[8],mem[8],xmm2[9],mem[9],xmm2[10],mem[10],xmm2[11],mem[11],xmm2[12],mem[12],xmm2[13],mem[13],xmm2[14],mem[14],xmm2[15],mem[15]
5674 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
5675 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm3, %xmm3 # 16-byte Folded Reload
5676 ; AVX2-FAST-PERLANE-NEXT: # xmm3 = xmm3[8],mem[8],xmm3[9],mem[9],xmm3[10],mem[10],xmm3[11],mem[11],xmm3[12],mem[12],xmm3[13],mem[13],xmm3[14],mem[14],xmm3[15],mem[15]
5677 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
5678 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm6, %ymm2, %ymm4
5679 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm3, %ymm3, %ymm3
5680 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm9, %ymm3, %ymm5
5681 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3],ymm5[4,5,6],ymm4[7],ymm5[8,9,10],ymm4[11],ymm5[12,13,14],ymm4[15]
5682 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
5683 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm5 # 16-byte Folded Reload
5684 ; AVX2-FAST-PERLANE-NEXT: # xmm5 = xmm5[8],mem[8],xmm5[9],mem[9],xmm5[10],mem[10],xmm5[11],mem[11],xmm5[12],mem[12],xmm5[13],mem[13],xmm5[14],mem[14],xmm5[15],mem[15]
5685 ; AVX2-FAST-PERLANE-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
5686 ; AVX2-FAST-PERLANE-NEXT: vpunpckhbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm8, %xmm8 # 16-byte Folded Reload
5687 ; AVX2-FAST-PERLANE-NEXT: # xmm8 = xmm8[8],mem[8],xmm8[9],mem[9],xmm8[10],mem[10],xmm8[11],mem[11],xmm8[12],mem[12],xmm8[13],mem[13],xmm8[14],mem[14],xmm8[15],mem[15]
5688 ; AVX2-FAST-PERLANE-NEXT: vpshufb %xmm14, %xmm8, %xmm9
5689 ; AVX2-FAST-PERLANE-NEXT: vpmovzxwq {{.*#+}} xmm10 = xmm8[0],zero,zero,zero,xmm8[1],zero,zero,zero
5690 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm9, %ymm10, %ymm9
5691 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm5, %ymm5, %ymm5
5692 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm13, %ymm5, %ymm10
5693 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm9 = ymm9[0],ymm10[1],ymm9[2,3,4],ymm10[5],ymm9[6,7,8],ymm10[9],ymm9[10,11,12],ymm10[13],ymm9[14,15]
5694 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm4 = ymm9[0],ymm4[1],ymm9[2],ymm4[3],ymm9[4],ymm4[5],ymm9[6],ymm4[7]
5695 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm12, %ymm2, %ymm2
5696 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm15, %ymm3, %ymm3
5697 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7],ymm3[8,9,10],ymm2[11],ymm3[12,13,14],ymm2[15]
5698 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm7, %ymm5, %ymm3
5699 ; AVX2-FAST-PERLANE-NEXT: vinserti128 $1, %xmm8, %ymm8, %ymm5
5700 ; AVX2-FAST-PERLANE-NEXT: vpshufb %ymm11, %ymm5, %ymm5
5701 ; AVX2-FAST-PERLANE-NEXT: vpblendw {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2,3,4],ymm3[5],ymm5[6,7,8],ymm3[9],ymm5[10,11,12],ymm3[13],ymm5[14,15]
5702 ; AVX2-FAST-PERLANE-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3],ymm3[4],ymm2[5],ymm3[6],ymm2[7]
5703 ; AVX2-FAST-PERLANE-NEXT: movq {{[0-9]+}}(%rsp), %rax
5704 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm2, 96(%rax)
5705 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm4, 64(%rax)
5706 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm1, 160(%rax)
5707 ; AVX2-FAST-PERLANE-NEXT: vmovdqa %ymm0, 128(%rax)
5708 ; AVX2-FAST-PERLANE-NEXT: vmovups (%rsp), %ymm0 # 32-byte Reload
5709 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 224(%rax)
5710 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5711 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 192(%rax)
5712 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5713 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 288(%rax)
5714 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5715 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 256(%rax)
5716 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5717 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 352(%rax)
5718 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5719 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 320(%rax)
5720 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5721 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 416(%rax)
5722 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5723 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 384(%rax)
5724 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5725 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 480(%rax)
5726 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5727 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 448(%rax)
5728 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5729 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, (%rax)
5730 ; AVX2-FAST-PERLANE-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5731 ; AVX2-FAST-PERLANE-NEXT: vmovaps %ymm0, 32(%rax)
5732 ; AVX2-FAST-PERLANE-NEXT: addq $392, %rsp # imm = 0x188
5733 ; AVX2-FAST-PERLANE-NEXT: vzeroupper
5734 ; AVX2-FAST-PERLANE-NEXT: retq
5736 ; AVX512F-SLOW-LABEL: store_i8_stride8_vf64:
5737 ; AVX512F-SLOW: # %bb.0:
5738 ; AVX512F-SLOW-NEXT: subq $648, %rsp # imm = 0x288
5739 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
5740 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
5741 ; AVX512F-SLOW-NEXT: vmovdqa (%rcx), %xmm1
5742 ; AVX512F-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5743 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rcx), %xmm10
5744 ; AVX512F-SLOW-NEXT: vmovdqa 48(%rcx), %xmm2
5745 ; AVX512F-SLOW-NEXT: vmovdqa (%rdx), %xmm0
5746 ; AVX512F-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5747 ; AVX512F-SLOW-NEXT: vmovdqa 48(%rdx), %xmm3
5748 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
5749 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm0[0,0,2,1,4,5,6,7]
5750 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm0[0,2,2,3,4,5,6,7]
5751 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1
5752 ; AVX512F-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5753 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,4,6,5]
5754 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,6,7]
5755 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
5756 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5757 ; AVX512F-SLOW-NEXT: vmovdqa (%r10), %xmm1
5758 ; AVX512F-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5759 ; AVX512F-SLOW-NEXT: vmovdqa 48(%r10), %xmm4
5760 ; AVX512F-SLOW-NEXT: vmovdqa (%rax), %xmm0
5761 ; AVX512F-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5762 ; AVX512F-SLOW-NEXT: vmovdqa 48(%rax), %xmm5
5763 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
5764 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm0[0,0,2,1,4,5,6,7]
5765 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm8 = xmm0[0,2,2,3,4,5,6,7]
5766 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm1, %ymm1
5767 ; AVX512F-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5768 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,4,6,5]
5769 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,6,7]
5770 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
5771 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5772 ; AVX512F-SLOW-NEXT: vmovdqa (%r9), %xmm1
5773 ; AVX512F-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5774 ; AVX512F-SLOW-NEXT: vmovdqa 48(%r9), %xmm12
5775 ; AVX512F-SLOW-NEXT: vmovdqa (%r8), %xmm0
5776 ; AVX512F-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5777 ; AVX512F-SLOW-NEXT: vmovdqa 48(%r8), %xmm13
5778 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
5779 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm0[0,1,1,3,4,5,6,7]
5780 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm8 = xmm0[2,1,3,3,4,5,6,7]
5781 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm1, %ymm1
5782 ; AVX512F-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5783 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,5,5,7]
5784 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,6,5,7,7]
5785 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm8
5786 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
5787 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm0[0,0,2,1,4,5,6,7]
5788 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm9 = xmm0[0,2,2,3,4,5,6,7]
5789 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm1, %ymm1
5790 ; AVX512F-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5791 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,4,6,5]
5792 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,6,7]
5793 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
5794 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5795 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3],xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
5796 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm0[0,0,2,1,4,5,6,7]
5797 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm11 = xmm0[0,2,2,3,4,5,6,7]
5798 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm11, %ymm1, %ymm1
5799 ; AVX512F-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5800 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,4,6,5]
5801 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,6,7]
5802 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
5803 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5804 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3],xmm13[4],xmm12[4],xmm13[5],xmm12[5],xmm13[6],xmm12[6],xmm13[7],xmm12[7]
5805 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm0[0,1,1,3,4,5,6,7]
5806 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm14 = xmm0[2,1,3,3,4,5,6,7]
5807 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm1, %ymm1
5808 ; AVX512F-SLOW-NEXT: vmovdqu %ymm1, (%rsp) # 32-byte Spill
5809 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rdx), %xmm1
5810 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm14 = xmm0[0,1,2,3,4,5,5,7]
5811 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,6,5,7,7]
5812 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm0, %ymm14, %ymm28
5813 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r10), %xmm0
5814 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm3[8],xmm2[8],xmm3[9],xmm2[9],xmm3[10],xmm2[10],xmm3[11],xmm2[11],xmm3[12],xmm2[12],xmm3[13],xmm2[13],xmm3[14],xmm2[14],xmm3[15],xmm2[15]
5815 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm3[0,0,2,1,4,5,6,7]
5816 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm14 = xmm3[0,2,2,3,4,5,6,7]
5817 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm14, %ymm2, %ymm11
5818 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rax), %xmm2
5819 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm14 = xmm3[0,1,2,3,4,4,6,5]
5820 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,6,6,7]
5821 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm3, %ymm14, %ymm31
5822 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm5[8],xmm4[8],xmm5[9],xmm4[9],xmm5[10],xmm4[10],xmm5[11],xmm4[11],xmm5[12],xmm4[12],xmm5[13],xmm4[13],xmm5[14],xmm4[14],xmm5[15],xmm4[15]
5823 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm3[0,0,2,1,4,5,6,7]
5824 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm3[0,2,2,3,4,5,6,7]
5825 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm5, %ymm4, %ymm24
5826 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm3[0,1,2,3,4,4,6,5]
5827 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,6,6,7]
5828 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm3, %ymm4, %ymm22
5829 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm3 = xmm13[8],xmm12[8],xmm13[9],xmm12[9],xmm13[10],xmm12[10],xmm13[11],xmm12[11],xmm13[12],xmm12[12],xmm13[13],xmm12[13],xmm13[14],xmm12[14],xmm13[15],xmm12[15]
5830 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm3[0,1,1,3,4,5,6,7]
5831 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm3[2,1,3,3,4,5,6,7]
5832 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm5, %ymm4, %ymm20
5833 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm4 = xmm3[0,1,2,3,4,5,5,7]
5834 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,6,5,7,7]
5835 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm3, %ymm4, %ymm18
5836 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm1[0],xmm10[0],xmm1[1],xmm10[1],xmm1[2],xmm10[2],xmm1[3],xmm10[3],xmm1[4],xmm10[4],xmm1[5],xmm10[5],xmm1[6],xmm10[6],xmm1[7],xmm10[7]
5837 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm3[0,0,2,1,4,5,6,7]
5838 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm12 = xmm3[0,2,2,3,4,5,6,7]
5839 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm12, %ymm5, %ymm30
5840 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm3[0,1,2,3,4,4,6,5]
5841 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,6,6,7]
5842 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm3, %ymm5, %ymm29
5843 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3],xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7]
5844 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm3[0,0,2,1,4,5,6,7]
5845 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm12 = xmm3[0,2,2,3,4,5,6,7]
5846 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm12, %ymm5, %ymm23
5847 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm3[0,1,2,3,4,4,6,5]
5848 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,6,6,7]
5849 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm3, %ymm5, %ymm21
5850 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r9), %xmm3
5851 ; AVX512F-SLOW-NEXT: vmovdqa 32(%r8), %xmm5
5852 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3],xmm5[4],xmm3[4],xmm5[5],xmm3[5],xmm5[6],xmm3[6],xmm5[7],xmm3[7]
5853 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm13 = xmm12[0,1,1,3,4,5,6,7]
5854 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm14 = xmm12[2,1,3,3,4,5,6,7]
5855 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm14, %ymm13, %ymm19
5856 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm13 = xmm12[0,1,2,3,4,5,5,7]
5857 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm12 = xmm12[0,1,2,3,6,5,7,7]
5858 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm12, %ymm13, %ymm17
5859 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm1[8],xmm10[8],xmm1[9],xmm10[9],xmm1[10],xmm10[10],xmm1[11],xmm10[11],xmm1[12],xmm10[12],xmm1[13],xmm10[13],xmm1[14],xmm10[14],xmm1[15],xmm10[15]
5860 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm10 = xmm1[0,0,2,1,4,5,6,7]
5861 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm12 = xmm1[0,2,2,3,4,5,6,7]
5862 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm12, %ymm10, %ymm4
5863 ; AVX512F-SLOW-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5864 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm10 = xmm1[0,1,2,3,4,4,6,5]
5865 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,6,6,7]
5866 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm10, %ymm1
5867 ; AVX512F-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5868 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm2[8],xmm0[8],xmm2[9],xmm0[9],xmm2[10],xmm0[10],xmm2[11],xmm0[11],xmm2[12],xmm0[12],xmm2[13],xmm0[13],xmm2[14],xmm0[14],xmm2[15],xmm0[15]
5869 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm0[0,0,2,1,4,5,6,7]
5870 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm0[0,2,2,3,4,5,6,7]
5871 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
5872 ; AVX512F-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5873 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,4,6,5]
5874 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,6,7]
5875 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
5876 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5877 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm5[8],xmm3[8],xmm5[9],xmm3[9],xmm5[10],xmm3[10],xmm5[11],xmm3[11],xmm5[12],xmm3[12],xmm5[13],xmm3[13],xmm5[14],xmm3[14],xmm5[15],xmm3[15]
5878 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm0[0,1,1,3,4,5,6,7]
5879 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm0[2,1,3,3,4,5,6,7]
5880 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
5881 ; AVX512F-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5882 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,5,5,7]
5883 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,6,5,7,7]
5884 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
5885 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5886 ; AVX512F-SLOW-NEXT: vmovdqa 16(%rcx), %xmm13
5887 ; AVX512F-SLOW-NEXT: vmovdqa 16(%rdx), %xmm12
5888 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3],xmm12[4],xmm13[4],xmm12[5],xmm13[5],xmm12[6],xmm13[6],xmm12[7],xmm13[7]
5889 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm0[0,0,2,1,4,5,6,7]
5890 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm0[0,2,2,3,4,5,6,7]
5891 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm1, %ymm1
5892 ; AVX512F-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5893 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,4,6,5]
5894 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,6,7]
5895 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
5896 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5897 ; AVX512F-SLOW-NEXT: vmovdqa 16(%r10), %xmm1
5898 ; AVX512F-SLOW-NEXT: vmovdqa 16(%rax), %xmm2
5899 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3],xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
5900 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm2, %xmm26
5901 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm1, %xmm16
5902 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm0[0,0,2,1,4,5,6,7]
5903 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm0[0,2,2,3,4,5,6,7]
5904 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm1, %ymm1
5905 ; AVX512F-SLOW-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5906 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,4,6,5]
5907 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,6,7]
5908 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
5909 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5910 ; AVX512F-SLOW-NEXT: vmovdqa 16(%r9), %xmm15
5911 ; AVX512F-SLOW-NEXT: vmovdqa 16(%r8), %xmm14
5912 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm14[0],xmm15[0],xmm14[1],xmm15[1],xmm14[2],xmm15[2],xmm14[3],xmm15[3],xmm14[4],xmm15[4],xmm14[5],xmm15[5],xmm14[6],xmm15[6],xmm14[7],xmm15[7]
5913 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm7[0,1,1,3,4,5,6,7]
5914 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm6 = xmm7[2,1,3,3,4,5,6,7]
5915 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm0
5916 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5917 ; AVX512F-SLOW-NEXT: vmovdqa (%rsi), %xmm1
5918 ; AVX512F-SLOW-NEXT: vmovdqa (%rdi), %xmm2
5919 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm2[8],xmm1[8],xmm2[9],xmm1[9],xmm2[10],xmm1[10],xmm2[11],xmm1[11],xmm2[12],xmm1[12],xmm2[13],xmm1[13],xmm2[14],xmm1[14],xmm2[15],xmm1[15]
5920 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm2, %xmm25
5921 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm1, %xmm27
5922 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm0[2,3,2,3]
5923 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[3,3,3,3]
5924 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm5 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
5925 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm0[1,1,1,1]
5926 ; AVX512F-SLOW-NEXT: vmovdqa 48(%rsi), %xmm2
5927 ; AVX512F-SLOW-NEXT: vmovdqa 48(%rdi), %xmm1
5928 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3],xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
5929 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm1[8],xmm2[8],xmm1[9],xmm2[9],xmm1[10],xmm2[10],xmm1[11],xmm2[11],xmm1[12],xmm2[12],xmm1[13],xmm2[13],xmm1[14],xmm2[14],xmm1[15],xmm2[15]
5930 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[2,3,2,3]
5931 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm0[3,3,3,3]
5932 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm10 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
5933 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
5934 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm6[0],zero,zero,zero,xmm6[1],zero,zero,zero
5935 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm3 = xmm3[0],zero,zero,zero,xmm3[1],zero,zero,zero
5936 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm3, %ymm6, %ymm3
5937 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
5938 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
5939 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm4, %zmm4
5940 ; AVX512F-SLOW-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
5941 ; AVX512F-SLOW-NEXT: # ymm3 = mem[0,1,1,3,4,5,5,7]
5942 ; AVX512F-SLOW-NEXT: vpshufd $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
5943 ; AVX512F-SLOW-NEXT: # ymm5 = mem[2,1,3,3,6,5,7,7]
5944 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm3, %zmm6
5945 ; AVX512F-SLOW-NEXT: vpbroadcastq {{.*#+}} zmm3 = [65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535]
5946 ; AVX512F-SLOW-NEXT: vpternlogq $184, %zmm4, %zmm3, %zmm6
5947 ; AVX512F-SLOW-NEXT: vpshufd $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
5948 ; AVX512F-SLOW-NEXT: # ymm4 = mem[0,0,2,1,4,4,6,5]
5949 ; AVX512F-SLOW-NEXT: vpshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
5950 ; AVX512F-SLOW-NEXT: # ymm5 = mem[0,2,2,3,4,6,6,7]
5951 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm4
5952 ; AVX512F-SLOW-NEXT: vpshufd $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
5953 ; AVX512F-SLOW-NEXT: # ymm5 = mem[0,0,2,1,4,4,6,5]
5954 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm8 = ymm8[0,2,2,3,4,6,6,7]
5955 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm8, %zmm5, %zmm5
5956 ; AVX512F-SLOW-NEXT: vpbroadcastq {{.*#+}} zmm8 = [65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0]
5957 ; AVX512F-SLOW-NEXT: vpandnq %zmm4, %zmm8, %zmm4
5958 ; AVX512F-SLOW-NEXT: vpandq %zmm8, %zmm5, %zmm5
5959 ; AVX512F-SLOW-NEXT: movw $-21846, %ax # imm = 0xAAAA
5960 ; AVX512F-SLOW-NEXT: kmovw %eax, %k1
5961 ; AVX512F-SLOW-NEXT: vpord %zmm4, %zmm5, %zmm6 {%k1}
5962 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
5963 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm9[0],zero,zero,zero,xmm9[1],zero,zero,zero
5964 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm2, %ymm2
5965 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
5966 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm10, %ymm0
5967 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
5968 ; AVX512F-SLOW-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Folded Reload
5969 ; AVX512F-SLOW-NEXT: # ymm2 = mem[0,1,1,3,4,5,5,7]
5970 ; AVX512F-SLOW-NEXT: vpshufd $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
5971 ; AVX512F-SLOW-NEXT: # ymm4 = mem[2,1,3,3,6,5,7,7]
5972 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm2, %zmm9
5973 ; AVX512F-SLOW-NEXT: vpternlogq $184, %zmm0, %zmm3, %zmm9
5974 ; AVX512F-SLOW-NEXT: vpshufd $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Folded Reload
5975 ; AVX512F-SLOW-NEXT: # ymm0 = mem[0,0,2,1,4,4,6,5]
5976 ; AVX512F-SLOW-NEXT: vpshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Folded Reload
5977 ; AVX512F-SLOW-NEXT: # ymm2 = mem[0,2,2,3,4,6,6,7]
5978 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
5979 ; AVX512F-SLOW-NEXT: vpshufd $96, (%rsp), %ymm2 # 32-byte Folded Reload
5980 ; AVX512F-SLOW-NEXT: # ymm2 = mem[0,0,2,1,4,4,6,5]
5981 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm28[0,2,2,3,4,6,6,7]
5982 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm4, %zmm2, %zmm2
5983 ; AVX512F-SLOW-NEXT: vpandnq %zmm0, %zmm8, %zmm0
5984 ; AVX512F-SLOW-NEXT: vpandq %zmm8, %zmm2, %zmm2
5985 ; AVX512F-SLOW-NEXT: vpord %zmm0, %zmm2, %zmm9 {%k1}
5986 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm1[2,3,2,3]
5987 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
5988 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm1[3,3,3,3]
5989 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
5990 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
5991 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
5992 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
5993 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
5994 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
5995 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
5996 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm11[0,1,1,3,4,5,5,7]
5997 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm31[2,1,3,3,6,5,7,7]
5998 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm11
5999 ; AVX512F-SLOW-NEXT: vpternlogq $184, %zmm0, %zmm3, %zmm11
6000 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm24[0,0,2,1,4,4,6,5]
6001 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm22[0,2,2,3,4,6,6,7]
6002 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
6003 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm20[0,0,2,1,4,4,6,5]
6004 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm18[0,2,2,3,4,6,6,7]
6005 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
6006 ; AVX512F-SLOW-NEXT: vpandnq %zmm0, %zmm8, %zmm0
6007 ; AVX512F-SLOW-NEXT: vpandq %zmm8, %zmm1, %zmm1
6008 ; AVX512F-SLOW-NEXT: vpord %zmm0, %zmm1, %zmm11 {%k1}
6009 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm0 = xmm7[0,1,2,3,4,5,5,7]
6010 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm7[0,1,2,3,6,5,7,7]
6011 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
6012 ; AVX512F-SLOW-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6013 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm12[8],xmm13[8],xmm12[9],xmm13[9],xmm12[10],xmm13[10],xmm12[11],xmm13[11],xmm12[12],xmm13[12],xmm12[13],xmm13[13],xmm12[14],xmm13[14],xmm12[15],xmm13[15]
6014 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm2[0,0,2,1,4,5,6,7]
6015 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm1 = xmm2[0,2,2,3,4,5,6,7]
6016 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm1, %ymm0, %ymm28
6017 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rsi), %xmm0
6018 ; AVX512F-SLOW-NEXT: vmovdqa 32(%rdi), %xmm1
6019 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
6020 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm4[2,3,2,3]
6021 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm5 = xmm5[0],zero,zero,zero,xmm5[1],zero,zero,zero
6022 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm4[3,3,3,3]
6023 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm7 = xmm7[0],zero,zero,zero,xmm7[1],zero,zero,zero
6024 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm5, %ymm5
6025 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm7 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
6026 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[1,1,1,1]
6027 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
6028 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm4, %ymm7, %ymm4
6029 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm5
6030 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm4 = ymm30[0,1,1,3,4,5,5,7]
6031 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm29[2,1,3,3,6,5,7,7]
6032 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm4, %zmm4
6033 ; AVX512F-SLOW-NEXT: vpternlogq $184, %zmm5, %zmm3, %zmm4
6034 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm5 = ymm23[0,0,2,1,4,4,6,5]
6035 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm21[0,2,2,3,4,6,6,7]
6036 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm5, %zmm5
6037 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm19[0,0,2,1,4,4,6,5]
6038 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm17[0,2,2,3,4,6,6,7]
6039 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm7, %zmm7
6040 ; AVX512F-SLOW-NEXT: vpandnq %zmm5, %zmm8, %zmm5
6041 ; AVX512F-SLOW-NEXT: vpandq %zmm8, %zmm7, %zmm7
6042 ; AVX512F-SLOW-NEXT: vpord %zmm5, %zmm7, %zmm4 {%k1}
6043 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm2[0,1,2,3,4,4,6,5]
6044 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,6,6,7]
6045 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm2, %ymm5, %ymm18
6046 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm26, %xmm2
6047 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm16, %xmm5
6048 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm2[8],xmm5[8],xmm2[9],xmm5[9],xmm2[10],xmm5[10],xmm2[11],xmm5[11],xmm2[12],xmm5[12],xmm2[13],xmm5[13],xmm2[14],xmm5[14],xmm2[15],xmm5[15]
6049 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm5 = xmm2[0,0,2,1,4,5,6,7]
6050 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm7 = xmm2[0,2,2,3,4,5,6,7]
6051 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm7, %ymm5, %ymm21
6052 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm2[0,1,2,3,4,4,6,5]
6053 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,6,6,7]
6054 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm2, %ymm5, %ymm22
6055 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm14[8],xmm15[8],xmm14[9],xmm15[9],xmm14[10],xmm15[10],xmm14[11],xmm15[11],xmm14[12],xmm15[12],xmm14[13],xmm15[13],xmm14[14],xmm15[14],xmm14[15],xmm15[15]
6056 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm5[0,1,1,3,4,5,6,7]
6057 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm7 = xmm5[2,1,3,3,4,5,6,7]
6058 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm7, %ymm2, %ymm26
6059 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm7 = xmm5[0,1,2,3,4,5,5,7]
6060 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,6,5,7,7]
6061 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm5, %ymm7, %ymm19
6062 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6063 ; AVX512F-SLOW-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm5 # 16-byte Folded Reload
6064 ; AVX512F-SLOW-NEXT: # xmm5 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3],xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
6065 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm10 = xmm5[0,0,2,1,4,5,6,7]
6066 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm13 = xmm5[0,2,2,3,4,5,6,7]
6067 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm13, %ymm10, %ymm14
6068 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm10 = xmm5[0,1,2,3,4,4,6,5]
6069 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,6,6,7]
6070 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm5, %ymm10, %ymm15
6071 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6072 ; AVX512F-SLOW-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm5 # 16-byte Folded Reload
6073 ; AVX512F-SLOW-NEXT: # xmm5 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3],xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
6074 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm10 = xmm5[0,0,2,1,4,5,6,7]
6075 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm13 = xmm5[0,2,2,3,4,5,6,7]
6076 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm13, %ymm10, %ymm17
6077 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm10 = xmm5[0,1,2,3,4,4,6,5]
6078 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,6,6,7]
6079 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm5, %ymm10, %ymm20
6080 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
6081 ; AVX512F-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6082 ; AVX512F-SLOW-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm1 # 16-byte Folded Reload
6083 ; AVX512F-SLOW-NEXT: # xmm1 = xmm0[0],mem[0],xmm0[1],mem[1],xmm0[2],mem[2],xmm0[3],mem[3],xmm0[4],mem[4],xmm0[5],mem[5],xmm0[6],mem[6],xmm0[7],mem[7]
6084 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm0 = xmm1[0,1,1,3,4,5,6,7]
6085 ; AVX512F-SLOW-NEXT: vpshuflw {{.*#+}} xmm10 = xmm1[2,1,3,3,4,5,6,7]
6086 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm0
6087 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm10 = xmm1[0,1,2,3,4,5,5,7]
6088 ; AVX512F-SLOW-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,7,7]
6089 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm10, %ymm1
6090 ; AVX512F-SLOW-NEXT: vmovdqa 16(%rsi), %xmm10
6091 ; AVX512F-SLOW-NEXT: vmovdqa 16(%rdi), %xmm13
6092 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm12 = xmm13[0],xmm10[0],xmm13[1],xmm10[1],xmm13[2],xmm10[2],xmm13[3],xmm10[3],xmm13[4],xmm10[4],xmm13[5],xmm10[5],xmm13[6],xmm10[6],xmm13[7],xmm10[7]
6093 ; AVX512F-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm10 = xmm13[8],xmm10[8],xmm13[9],xmm10[9],xmm13[10],xmm10[10],xmm13[11],xmm10[11],xmm13[12],xmm10[12],xmm13[13],xmm10[13],xmm13[14],xmm10[14],xmm13[15],xmm10[15]
6094 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm13 = xmm5[2,3,2,3]
6095 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm13 = xmm13[0],zero,zero,zero,xmm13[1],zero,zero,zero
6096 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm23 = xmm5[3,3,3,3]
6097 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm23 = xmm23[0],zero,zero,zero,xmm23[1],zero,zero,zero
6098 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm23, %ymm13, %ymm13
6099 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm23 = xmm5[0],zero,zero,zero,xmm5[1],zero,zero,zero
6100 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[1,1,1,1]
6101 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm5 = xmm5[0],zero,zero,zero,xmm5[1],zero,zero,zero
6102 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm5, %ymm23, %ymm5
6103 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm13, %zmm5, %zmm13
6104 ; AVX512F-SLOW-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
6105 ; AVX512F-SLOW-NEXT: # ymm5 = mem[0,1,1,3,4,5,5,7]
6106 ; AVX512F-SLOW-NEXT: vpshufd $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm23 # 32-byte Folded Reload
6107 ; AVX512F-SLOW-NEXT: # ymm23 = mem[2,1,3,3,6,5,7,7]
6108 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm23, %zmm5, %zmm5
6109 ; AVX512F-SLOW-NEXT: vpternlogq $184, %zmm13, %zmm3, %zmm5
6110 ; AVX512F-SLOW-NEXT: vpshufd $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Folded Reload
6111 ; AVX512F-SLOW-NEXT: # ymm13 = mem[0,0,2,1,4,4,6,5]
6112 ; AVX512F-SLOW-NEXT: vpshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm23 # 32-byte Folded Reload
6113 ; AVX512F-SLOW-NEXT: # ymm23 = mem[0,2,2,3,4,6,6,7]
6114 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm23, %zmm13, %zmm13
6115 ; AVX512F-SLOW-NEXT: vpandnq %zmm13, %zmm8, %zmm13
6116 ; AVX512F-SLOW-NEXT: vpshufd $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm23 # 32-byte Folded Reload
6117 ; AVX512F-SLOW-NEXT: # ymm23 = mem[0,0,2,1,4,4,6,5]
6118 ; AVX512F-SLOW-NEXT: vpshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm24 # 32-byte Folded Reload
6119 ; AVX512F-SLOW-NEXT: # ymm24 = mem[0,2,2,3,4,6,6,7]
6120 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm24, %zmm23, %zmm23
6121 ; AVX512F-SLOW-NEXT: vpandq %zmm8, %zmm23, %zmm23
6122 ; AVX512F-SLOW-NEXT: vpord %zmm13, %zmm23, %zmm5 {%k1}
6123 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm13 = xmm12[2,3,2,3]
6124 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm23 = xmm12[3,3,3,3]
6125 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm24 = xmm12[0],zero,zero,zero,xmm12[1],zero,zero,zero
6126 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm12 = xmm12[1,1,1,1]
6127 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm25, %xmm2
6128 ; AVX512F-SLOW-NEXT: vmovdqa64 %xmm27, %xmm7
6129 ; AVX512F-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm2[0],xmm7[0],xmm2[1],xmm7[1],xmm2[2],xmm7[2],xmm2[3],xmm7[3],xmm2[4],xmm7[4],xmm2[5],xmm7[5],xmm2[6],xmm7[6],xmm2[7],xmm7[7]
6130 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm13 = xmm13[0],zero,zero,zero,xmm13[1],zero,zero,zero
6131 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm23 = xmm23[0],zero,zero,zero,xmm23[1],zero,zero,zero
6132 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm23, %ymm13, %ymm13
6133 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm12 = xmm12[0],zero,zero,zero,xmm12[1],zero,zero,zero
6134 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm12, %ymm24, %ymm12
6135 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm23 = xmm10[2,3,2,3]
6136 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm24 = xmm10[3,3,3,3]
6137 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm29 = xmm10[0],zero,zero,zero,xmm10[1],zero,zero,zero
6138 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm10 = xmm10[1,1,1,1]
6139 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm13, %zmm12, %zmm12
6140 ; AVX512F-SLOW-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Folded Reload
6141 ; AVX512F-SLOW-NEXT: # ymm13 = mem[0,1,1,3,4,5,5,7]
6142 ; AVX512F-SLOW-NEXT: vpshufd $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm30 # 32-byte Folded Reload
6143 ; AVX512F-SLOW-NEXT: # ymm30 = mem[2,1,3,3,6,5,7,7]
6144 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm30, %zmm13, %zmm13
6145 ; AVX512F-SLOW-NEXT: vpternlogq $184, %zmm12, %zmm3, %zmm13
6146 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm12 = xmm2[2,3,2,3]
6147 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm30 = xmm2[3,3,3,3]
6148 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm31 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
6149 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,1,1,1]
6150 ; AVX512F-SLOW-NEXT: vpshufd $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm25 # 32-byte Folded Reload
6151 ; AVX512F-SLOW-NEXT: # ymm25 = mem[0,0,2,1,4,4,6,5]
6152 ; AVX512F-SLOW-NEXT: vpshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm27 # 32-byte Folded Reload
6153 ; AVX512F-SLOW-NEXT: # ymm27 = mem[0,2,2,3,4,6,6,7]
6154 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm27, %zmm25, %zmm25
6155 ; AVX512F-SLOW-NEXT: vpshufd $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm27 # 32-byte Folded Reload
6156 ; AVX512F-SLOW-NEXT: # ymm27 = mem[0,0,2,1,4,4,6,5]
6157 ; AVX512F-SLOW-NEXT: vpshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm16 # 32-byte Folded Reload
6158 ; AVX512F-SLOW-NEXT: # ymm16 = mem[0,2,2,3,4,6,6,7]
6159 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm16, %zmm27, %zmm16
6160 ; AVX512F-SLOW-NEXT: vpandnq %zmm25, %zmm8, %zmm25
6161 ; AVX512F-SLOW-NEXT: vpandq %zmm8, %zmm16, %zmm16
6162 ; AVX512F-SLOW-NEXT: vpord %zmm25, %zmm16, %zmm13 {%k1}
6163 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm16 = xmm23[0],zero,zero,zero,xmm23[1],zero,zero,zero
6164 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm23 = xmm24[0],zero,zero,zero,xmm24[1],zero,zero,zero
6165 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm23, %ymm16, %ymm16
6166 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm10 = xmm10[0],zero,zero,zero,xmm10[1],zero,zero,zero
6167 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm10, %ymm29, %ymm10
6168 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm16, %zmm10, %zmm10
6169 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm16 = ymm28[0,1,1,3,4,5,5,7]
6170 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm18 = ymm18[2,1,3,3,6,5,7,7]
6171 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm18, %zmm16, %zmm16
6172 ; AVX512F-SLOW-NEXT: vpternlogq $184, %zmm10, %zmm3, %zmm16
6173 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm21[0,0,2,1,4,4,6,5]
6174 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm18 = ymm22[0,2,2,3,4,6,6,7]
6175 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm18, %zmm10, %zmm10
6176 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm18 = ymm26[0,0,2,1,4,4,6,5]
6177 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm19[0,2,2,3,4,6,6,7]
6178 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm18, %zmm7
6179 ; AVX512F-SLOW-NEXT: vpandnq %zmm10, %zmm8, %zmm10
6180 ; AVX512F-SLOW-NEXT: vpandq %zmm8, %zmm7, %zmm7
6181 ; AVX512F-SLOW-NEXT: vpord %zmm10, %zmm7, %zmm16 {%k1}
6182 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm7 = xmm12[0],zero,zero,zero,xmm12[1],zero,zero,zero
6183 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm10 = xmm30[0],zero,zero,zero,xmm30[1],zero,zero,zero
6184 ; AVX512F-SLOW-NEXT: vinserti128 $1, %xmm10, %ymm7, %ymm7
6185 ; AVX512F-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
6186 ; AVX512F-SLOW-NEXT: vinserti32x4 $1, %xmm2, %ymm31, %ymm2
6187 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm2, %zmm2
6188 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm7 = ymm14[0,1,1,3,4,5,5,7]
6189 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm10 = ymm15[2,1,3,3,6,5,7,7]
6190 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm10, %zmm7, %zmm7
6191 ; AVX512F-SLOW-NEXT: vpternlogq $184, %zmm2, %zmm3, %zmm7
6192 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm2 = ymm17[0,0,2,1,4,4,6,5]
6193 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm3 = ymm20[0,2,2,3,4,6,6,7]
6194 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm2
6195 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
6196 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm1 = ymm1[0,2,2,3,4,6,6,7]
6197 ; AVX512F-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
6198 ; AVX512F-SLOW-NEXT: vpandnq %zmm2, %zmm8, %zmm1
6199 ; AVX512F-SLOW-NEXT: vpandq %zmm8, %zmm0, %zmm0
6200 ; AVX512F-SLOW-NEXT: vpord %zmm1, %zmm0, %zmm7 {%k1}
6201 ; AVX512F-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
6202 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm7, (%rax)
6203 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm16, 192(%rax)
6204 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm13, 128(%rax)
6205 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm5, 320(%rax)
6206 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm4, 256(%rax)
6207 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm11, 448(%rax)
6208 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm9, 384(%rax)
6209 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm6, 64(%rax)
6210 ; AVX512F-SLOW-NEXT: addq $648, %rsp # imm = 0x288
6211 ; AVX512F-SLOW-NEXT: vzeroupper
6212 ; AVX512F-SLOW-NEXT: retq
6214 ; AVX512F-FAST-LABEL: store_i8_stride8_vf64:
6215 ; AVX512F-FAST: # %bb.0:
6216 ; AVX512F-FAST-NEXT: subq $392, %rsp # imm = 0x188
6217 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
6218 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
6219 ; AVX512F-FAST-NEXT: vmovdqa (%rcx), %xmm2
6220 ; AVX512F-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6221 ; AVX512F-FAST-NEXT: vmovdqa 48(%rcx), %xmm0
6222 ; AVX512F-FAST-NEXT: vmovdqa (%rdx), %xmm15
6223 ; AVX512F-FAST-NEXT: vmovdqa 48(%rdx), %xmm1
6224 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm15[8],xmm2[8],xmm15[9],xmm2[9],xmm15[10],xmm2[10],xmm15[11],xmm2[11],xmm15[12],xmm2[12],xmm15[13],xmm2[13],xmm15[14],xmm2[14],xmm15[15],xmm2[15]
6225 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
6226 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm6 = [8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,8,9,12,13,4,5,6,7,12,13,14,15,12,13,14,15]
6227 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm2, %ymm3
6228 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm8 = [0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,0,1,4,5,4,5,6,7,4,5,6,7,12,13,14,15]
6229 ; AVX512F-FAST-NEXT: vpshufb %ymm8, %ymm2, %ymm2
6230 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm2
6231 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6232 ; AVX512F-FAST-NEXT: vmovdqa (%r10), %xmm2
6233 ; AVX512F-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6234 ; AVX512F-FAST-NEXT: vmovdqa (%rax), %xmm14
6235 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm14[8],xmm2[8],xmm14[9],xmm2[9],xmm14[10],xmm2[10],xmm14[11],xmm2[11],xmm14[12],xmm2[12],xmm14[13],xmm2[13],xmm14[14],xmm2[14],xmm14[15],xmm2[15]
6236 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
6237 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm11 = [0,1,2,3,8,9,8,9,8,9,8,9,12,13,10,11,0,1,2,3,8,9,12,13,8,9,12,13,12,13,14,15]
6238 ; AVX512F-FAST-NEXT: vpshufb %ymm11, %ymm2, %ymm3
6239 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm10 = [0,1,0,1,0,1,0,1,8,9,10,11,4,5,2,3,0,1,4,5,0,1,4,5,8,9,10,11,4,5,6,7]
6240 ; AVX512F-FAST-NEXT: vpshufb %ymm10, %ymm2, %ymm2
6241 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm19
6242 ; AVX512F-FAST-NEXT: vmovdqa (%r9), %xmm3
6243 ; AVX512F-FAST-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6244 ; AVX512F-FAST-NEXT: vmovdqa (%r8), %xmm2
6245 ; AVX512F-FAST-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6246 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm2[8],xmm3[8],xmm2[9],xmm3[9],xmm2[10],xmm3[10],xmm2[11],xmm3[11],xmm2[12],xmm3[12],xmm2[13],xmm3[13],xmm2[14],xmm3[14],xmm2[15],xmm3[15]
6247 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
6248 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,2,3,8,9,10,11,8,9,10,11,10,11,14,15,0,1,2,3,12,13,10,11,12,13,10,11,14,15,14,15]
6249 ; AVX512F-FAST-NEXT: vpshufb %ymm4, %ymm2, %ymm3
6250 ; AVX512F-FAST-NEXT: vmovdqa %ymm4, %ymm9
6251 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,2,3,0,1,2,3,8,9,10,11,2,3,6,7,4,5,2,3,4,5,2,3,8,9,10,11,6,7,6,7]
6252 ; AVX512F-FAST-NEXT: vpshufb %ymm4, %ymm2, %ymm2
6253 ; AVX512F-FAST-NEXT: vmovdqa %ymm4, %ymm12
6254 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm21
6255 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
6256 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
6257 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm2, %ymm3
6258 ; AVX512F-FAST-NEXT: vpshufb %ymm8, %ymm2, %ymm2
6259 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm2
6260 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6261 ; AVX512F-FAST-NEXT: vmovdqa 48(%r10), %xmm2
6262 ; AVX512F-FAST-NEXT: vmovdqa 48(%rax), %xmm3
6263 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
6264 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm4, %ymm4, %ymm4
6265 ; AVX512F-FAST-NEXT: vpshufb %ymm11, %ymm4, %ymm5
6266 ; AVX512F-FAST-NEXT: vpshufb %ymm10, %ymm4, %ymm4
6267 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm4
6268 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6269 ; AVX512F-FAST-NEXT: vmovdqa 48(%r9), %xmm4
6270 ; AVX512F-FAST-NEXT: vmovdqa 48(%r8), %xmm5
6271 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3],xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
6272 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm7, %ymm7, %ymm7
6273 ; AVX512F-FAST-NEXT: vpshufb %ymm9, %ymm7, %ymm13
6274 ; AVX512F-FAST-NEXT: vpshufb %ymm12, %ymm7, %ymm7
6275 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm13, %zmm7, %zmm7
6276 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6277 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
6278 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6279 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm0, %ymm1
6280 ; AVX512F-FAST-NEXT: vpshufb %ymm8, %ymm0, %ymm0
6281 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
6282 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6283 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm3[8],xmm2[8],xmm3[9],xmm2[9],xmm3[10],xmm2[10],xmm3[11],xmm2[11],xmm3[12],xmm2[12],xmm3[13],xmm2[13],xmm3[14],xmm2[14],xmm3[15],xmm2[15]
6284 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6285 ; AVX512F-FAST-NEXT: vpshufb %ymm11, %ymm0, %ymm1
6286 ; AVX512F-FAST-NEXT: vpshufb %ymm10, %ymm0, %ymm0
6287 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
6288 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
6289 ; AVX512F-FAST-NEXT: vmovdqa 32(%rcx), %xmm0
6290 ; AVX512F-FAST-NEXT: vmovdqa 32(%rdx), %xmm1
6291 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm5[8],xmm4[8],xmm5[9],xmm4[9],xmm5[10],xmm4[10],xmm5[11],xmm4[11],xmm5[12],xmm4[12],xmm5[13],xmm4[13],xmm5[14],xmm4[14],xmm5[15],xmm4[15]
6292 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
6293 ; AVX512F-FAST-NEXT: vpshufb %ymm9, %ymm2, %ymm3
6294 ; AVX512F-FAST-NEXT: vpshufb %ymm12, %ymm2, %ymm2
6295 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm2
6296 ; AVX512F-FAST-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
6297 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
6298 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
6299 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm2, %ymm3
6300 ; AVX512F-FAST-NEXT: vpshufb %ymm8, %ymm2, %ymm2
6301 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm20
6302 ; AVX512F-FAST-NEXT: vmovdqa 32(%r10), %xmm2
6303 ; AVX512F-FAST-NEXT: vmovdqa 32(%rax), %xmm3
6304 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
6305 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm4, %ymm4, %ymm4
6306 ; AVX512F-FAST-NEXT: vpshufb %ymm11, %ymm4, %ymm5
6307 ; AVX512F-FAST-NEXT: vpshufb %ymm10, %ymm4, %ymm4
6308 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm22
6309 ; AVX512F-FAST-NEXT: vmovdqa 32(%r9), %xmm4
6310 ; AVX512F-FAST-NEXT: vmovdqa 32(%r8), %xmm5
6311 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3],xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
6312 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm7, %ymm7, %ymm7
6313 ; AVX512F-FAST-NEXT: vpshufb %ymm9, %ymm7, %ymm13
6314 ; AVX512F-FAST-NEXT: vpshufb %ymm12, %ymm7, %ymm7
6315 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm13, %zmm7, %zmm23
6316 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
6317 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6318 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm0, %ymm1
6319 ; AVX512F-FAST-NEXT: vpshufb %ymm8, %ymm0, %ymm0
6320 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm24
6321 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm3[8],xmm2[8],xmm3[9],xmm2[9],xmm3[10],xmm2[10],xmm3[11],xmm2[11],xmm3[12],xmm2[12],xmm3[13],xmm2[13],xmm3[14],xmm2[14],xmm3[15],xmm2[15]
6322 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6323 ; AVX512F-FAST-NEXT: vpshufb %ymm11, %ymm0, %ymm1
6324 ; AVX512F-FAST-NEXT: vpshufb %ymm10, %ymm0, %ymm0
6325 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm25
6326 ; AVX512F-FAST-NEXT: vmovdqa 16(%rcx), %xmm0
6327 ; AVX512F-FAST-NEXT: vmovdqa 16(%rdx), %xmm1
6328 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm5[8],xmm4[8],xmm5[9],xmm4[9],xmm5[10],xmm4[10],xmm5[11],xmm4[11],xmm5[12],xmm4[12],xmm5[13],xmm4[13],xmm5[14],xmm4[14],xmm5[15],xmm4[15]
6329 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
6330 ; AVX512F-FAST-NEXT: vpshufb %ymm9, %ymm2, %ymm3
6331 ; AVX512F-FAST-NEXT: vpshufb %ymm12, %ymm2, %ymm2
6332 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm26
6333 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
6334 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
6335 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm2, %ymm3
6336 ; AVX512F-FAST-NEXT: vpshufb %ymm8, %ymm2, %ymm2
6337 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm27
6338 ; AVX512F-FAST-NEXT: vmovdqa 16(%r10), %xmm2
6339 ; AVX512F-FAST-NEXT: vmovdqa 16(%rax), %xmm3
6340 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm4 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
6341 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm4, %ymm4, %ymm4
6342 ; AVX512F-FAST-NEXT: vpshufb %ymm11, %ymm4, %ymm5
6343 ; AVX512F-FAST-NEXT: vpshufb %ymm10, %ymm4, %ymm4
6344 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm4, %zmm28
6345 ; AVX512F-FAST-NEXT: vmovdqa 16(%r9), %xmm4
6346 ; AVX512F-FAST-NEXT: vmovdqa 16(%r8), %xmm5
6347 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm7 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3],xmm5[4],xmm4[4],xmm5[5],xmm4[5],xmm5[6],xmm4[6],xmm5[7],xmm4[7]
6348 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm7, %ymm7, %ymm7
6349 ; AVX512F-FAST-NEXT: vpshufb %ymm9, %ymm7, %ymm13
6350 ; AVX512F-FAST-NEXT: vpshufb %ymm12, %ymm7, %ymm7
6351 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm13, %zmm7, %zmm29
6352 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
6353 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6354 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm0, %ymm1
6355 ; AVX512F-FAST-NEXT: vpshufb %ymm8, %ymm0, %ymm0
6356 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm30
6357 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm3[8],xmm2[8],xmm3[9],xmm2[9],xmm3[10],xmm2[10],xmm3[11],xmm2[11],xmm3[12],xmm2[12],xmm3[13],xmm2[13],xmm3[14],xmm2[14],xmm3[15],xmm2[15]
6358 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6359 ; AVX512F-FAST-NEXT: vpshufb %ymm11, %ymm0, %ymm1
6360 ; AVX512F-FAST-NEXT: vpshufb %ymm10, %ymm0, %ymm0
6361 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm31
6362 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm5[8],xmm4[8],xmm5[9],xmm4[9],xmm5[10],xmm4[10],xmm5[11],xmm4[11],xmm5[12],xmm4[12],xmm5[13],xmm4[13],xmm5[14],xmm4[14],xmm5[15],xmm4[15]
6363 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6364 ; AVX512F-FAST-NEXT: vpshufb %ymm9, %ymm0, %ymm1
6365 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm9, %ymm17
6366 ; AVX512F-FAST-NEXT: vpshufb %ymm12, %ymm0, %ymm0
6367 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm12, %ymm18
6368 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm16
6369 ; AVX512F-FAST-NEXT: vmovdqa (%rsi), %xmm12
6370 ; AVX512F-FAST-NEXT: vmovdqa (%rdi), %xmm9
6371 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm9[8],xmm12[8],xmm9[9],xmm12[9],xmm9[10],xmm12[10],xmm9[11],xmm12[11],xmm9[12],xmm12[12],xmm9[13],xmm12[13],xmm9[14],xmm12[14],xmm9[15],xmm12[15]
6372 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} xmm7 = <4,5,u,u,u,u,u,u,6,7,u,u,u,u,u,u>
6373 ; AVX512F-FAST-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
6374 ; AVX512F-FAST-NEXT: vpshufb %xmm7, %xmm0, %xmm2
6375 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
6376 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6377 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm5 = <8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,12,13,u,u,u,u,u,u,14,15,u,u,u,u,u,u>
6378 ; AVX512F-FAST-NEXT: vpshufb %ymm5, %ymm0, %ymm0
6379 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm4
6380 ; AVX512F-FAST-NEXT: vpbroadcastq {{.*#+}} zmm3 = [65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535]
6381 ; AVX512F-FAST-NEXT: vpternlogq $226, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm4 # 64-byte Folded Reload
6382 ; AVX512F-FAST-NEXT: vpbroadcastq {{.*#+}} zmm0 = [65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0,65535,65535,65535,0]
6383 ; AVX512F-FAST-NEXT: vpandnq %zmm19, %zmm0, %zmm19
6384 ; AVX512F-FAST-NEXT: vpandq %zmm0, %zmm21, %zmm21
6385 ; AVX512F-FAST-NEXT: movw $-21846, %ax # imm = 0xAAAA
6386 ; AVX512F-FAST-NEXT: kmovw %eax, %k1
6387 ; AVX512F-FAST-NEXT: vpord %zmm19, %zmm21, %zmm4 {%k1}
6388 ; AVX512F-FAST-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm15 # 16-byte Folded Reload
6389 ; AVX512F-FAST-NEXT: # xmm15 = xmm15[0],mem[0],xmm15[1],mem[1],xmm15[2],mem[2],xmm15[3],mem[3],xmm15[4],mem[4],xmm15[5],mem[5],xmm15[6],mem[6],xmm15[7],mem[7]
6390 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm15, %ymm15, %ymm15
6391 ; AVX512F-FAST-NEXT: vpshufb %ymm6, %ymm15, %ymm6
6392 ; AVX512F-FAST-NEXT: vpshufb %ymm8, %ymm15, %ymm8
6393 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm6, %zmm8, %zmm19
6394 ; AVX512F-FAST-NEXT: vmovdqa 48(%rsi), %xmm15
6395 ; AVX512F-FAST-NEXT: vmovdqa 48(%rdi), %xmm8
6396 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm6 = xmm8[0],xmm15[0],xmm8[1],xmm15[1],xmm8[2],xmm15[2],xmm8[3],xmm15[3],xmm8[4],xmm15[4],xmm8[5],xmm15[5],xmm8[6],xmm15[6],xmm8[7],xmm15[7]
6397 ; AVX512F-FAST-NEXT: vpshufb %xmm7, %xmm6, %xmm1
6398 ; AVX512F-FAST-NEXT: vpmovzxwq {{.*#+}} xmm21 = xmm6[0],zero,zero,zero,xmm6[1],zero,zero,zero
6399 ; AVX512F-FAST-NEXT: vinserti32x4 $1, %xmm1, %ymm21, %ymm1
6400 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm6, %ymm6, %ymm6
6401 ; AVX512F-FAST-NEXT: vpshufb %ymm5, %ymm6, %ymm6
6402 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm6, %zmm1, %zmm6
6403 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm8[8],xmm15[8],xmm8[9],xmm15[9],xmm8[10],xmm15[10],xmm8[11],xmm15[11],xmm8[12],xmm15[12],xmm8[13],xmm15[13],xmm8[14],xmm15[14],xmm8[15],xmm15[15]
6404 ; AVX512F-FAST-NEXT: vpshufb %xmm7, %xmm1, %xmm8
6405 ; AVX512F-FAST-NEXT: vpmovzxwq {{.*#+}} xmm15 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
6406 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm8, %ymm15, %ymm8
6407 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
6408 ; AVX512F-FAST-NEXT: vpshufb %ymm5, %ymm1, %ymm1
6409 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm8, %zmm15
6410 ; AVX512F-FAST-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm1 # 16-byte Folded Reload
6411 ; AVX512F-FAST-NEXT: # xmm1 = xmm14[0],mem[0],xmm14[1],mem[1],xmm14[2],mem[2],xmm14[3],mem[3],xmm14[4],mem[4],xmm14[5],mem[5],xmm14[6],mem[6],xmm14[7],mem[7]
6412 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
6413 ; AVX512F-FAST-NEXT: vpshufb %ymm11, %ymm1, %ymm8
6414 ; AVX512F-FAST-NEXT: vpshufb %ymm10, %ymm1, %ymm1
6415 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm8, %zmm1, %zmm13
6416 ; AVX512F-FAST-NEXT: vmovdqa 32(%rsi), %xmm1
6417 ; AVX512F-FAST-NEXT: vmovdqa 32(%rdi), %xmm8
6418 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm11 = xmm8[0],xmm1[0],xmm8[1],xmm1[1],xmm8[2],xmm1[2],xmm8[3],xmm1[3],xmm8[4],xmm1[4],xmm8[5],xmm1[5],xmm8[6],xmm1[6],xmm8[7],xmm1[7]
6419 ; AVX512F-FAST-NEXT: vpshufb %xmm7, %xmm11, %xmm14
6420 ; AVX512F-FAST-NEXT: vpmovzxwq {{.*#+}} xmm21 = xmm11[0],zero,zero,zero,xmm11[1],zero,zero,zero
6421 ; AVX512F-FAST-NEXT: vinserti32x4 $1, %xmm14, %ymm21, %ymm14
6422 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm11, %ymm11, %ymm11
6423 ; AVX512F-FAST-NEXT: vpshufb %ymm5, %ymm11, %ymm11
6424 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm11, %zmm14, %zmm11
6425 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm8[8],xmm1[8],xmm8[9],xmm1[9],xmm8[10],xmm1[10],xmm8[11],xmm1[11],xmm8[12],xmm1[12],xmm8[13],xmm1[13],xmm8[14],xmm1[14],xmm8[15],xmm1[15]
6426 ; AVX512F-FAST-NEXT: vpshufb %xmm7, %xmm1, %xmm8
6427 ; AVX512F-FAST-NEXT: vpmovzxwq {{.*#+}} xmm14 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
6428 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm8, %ymm14, %ymm8
6429 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
6430 ; AVX512F-FAST-NEXT: vpshufb %ymm5, %ymm1, %ymm1
6431 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm8, %zmm14
6432 ; AVX512F-FAST-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6433 ; AVX512F-FAST-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
6434 ; AVX512F-FAST-NEXT: # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3],xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
6435 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
6436 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm17, %ymm2
6437 ; AVX512F-FAST-NEXT: vpshufb %ymm2, %ymm1, %ymm8
6438 ; AVX512F-FAST-NEXT: vmovdqa64 %ymm18, %ymm2
6439 ; AVX512F-FAST-NEXT: vpshufb %ymm2, %ymm1, %ymm1
6440 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm8, %zmm1, %zmm21
6441 ; AVX512F-FAST-NEXT: vmovdqa 16(%rsi), %xmm8
6442 ; AVX512F-FAST-NEXT: vmovdqa 16(%rdi), %xmm10
6443 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3],xmm10[4],xmm8[4],xmm10[5],xmm8[5],xmm10[6],xmm8[6],xmm10[7],xmm8[7]
6444 ; AVX512F-FAST-NEXT: vpshufb %xmm7, %xmm1, %xmm2
6445 ; AVX512F-FAST-NEXT: vpmovzxwq {{.*#+}} xmm17 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
6446 ; AVX512F-FAST-NEXT: vinserti32x4 $1, %xmm2, %ymm17, %ymm2
6447 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
6448 ; AVX512F-FAST-NEXT: vpshufb %ymm5, %ymm1, %ymm1
6449 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1
6450 ; AVX512F-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm10[8],xmm8[8],xmm10[9],xmm8[9],xmm10[10],xmm8[10],xmm10[11],xmm8[11],xmm10[12],xmm8[12],xmm10[13],xmm8[13],xmm10[14],xmm8[14],xmm10[15],xmm8[15]
6451 ; AVX512F-FAST-NEXT: vpshufb %xmm7, %xmm2, %xmm8
6452 ; AVX512F-FAST-NEXT: vpmovzxwq {{.*#+}} xmm10 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
6453 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm8, %ymm10, %ymm8
6454 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
6455 ; AVX512F-FAST-NEXT: vpshufb %ymm5, %ymm2, %ymm2
6456 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm8, %zmm2
6457 ; AVX512F-FAST-NEXT: vpunpcklbw {{.*#+}} xmm8 = xmm9[0],xmm12[0],xmm9[1],xmm12[1],xmm9[2],xmm12[2],xmm9[3],xmm12[3],xmm9[4],xmm12[4],xmm9[5],xmm12[5],xmm9[6],xmm12[6],xmm9[7],xmm12[7]
6458 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm8, %ymm8, %ymm9
6459 ; AVX512F-FAST-NEXT: vpshufb %ymm5, %ymm9, %ymm5
6460 ; AVX512F-FAST-NEXT: vpshufb %xmm7, %xmm8, %xmm7
6461 ; AVX512F-FAST-NEXT: vpmovzxwq {{.*#+}} xmm8 = xmm8[0],zero,zero,zero,xmm8[1],zero,zero,zero
6462 ; AVX512F-FAST-NEXT: vinserti128 $1, %xmm7, %ymm8, %ymm7
6463 ; AVX512F-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm7, %zmm5
6464 ; AVX512F-FAST-NEXT: vpternlogq $226, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm6 # 64-byte Folded Reload
6465 ; AVX512F-FAST-NEXT: vpandnq {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm7 # 64-byte Folded Reload
6466 ; AVX512F-FAST-NEXT: vpandq {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm8 # 64-byte Folded Reload
6467 ; AVX512F-FAST-NEXT: vpord %zmm7, %zmm8, %zmm6 {%k1}
6468 ; AVX512F-FAST-NEXT: vpternlogq $226, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm15 # 64-byte Folded Reload
6469 ; AVX512F-FAST-NEXT: vpandnq (%rsp), %zmm0, %zmm7 # 64-byte Folded Reload
6470 ; AVX512F-FAST-NEXT: vpandq {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm8 # 64-byte Folded Reload
6471 ; AVX512F-FAST-NEXT: vpord %zmm7, %zmm8, %zmm15 {%k1}
6472 ; AVX512F-FAST-NEXT: vpternlogq $226, %zmm20, %zmm3, %zmm11
6473 ; AVX512F-FAST-NEXT: vpandnq %zmm22, %zmm0, %zmm7
6474 ; AVX512F-FAST-NEXT: vpandq %zmm0, %zmm23, %zmm8
6475 ; AVX512F-FAST-NEXT: vpord %zmm7, %zmm8, %zmm11 {%k1}
6476 ; AVX512F-FAST-NEXT: vpternlogq $226, %zmm24, %zmm3, %zmm14
6477 ; AVX512F-FAST-NEXT: vpandnq %zmm25, %zmm0, %zmm7
6478 ; AVX512F-FAST-NEXT: vpandq %zmm0, %zmm26, %zmm8
6479 ; AVX512F-FAST-NEXT: vpord %zmm7, %zmm8, %zmm14 {%k1}
6480 ; AVX512F-FAST-NEXT: vpternlogq $226, %zmm27, %zmm3, %zmm1
6481 ; AVX512F-FAST-NEXT: vpandnq %zmm28, %zmm0, %zmm7
6482 ; AVX512F-FAST-NEXT: vpandq %zmm0, %zmm29, %zmm8
6483 ; AVX512F-FAST-NEXT: vpord %zmm7, %zmm8, %zmm1 {%k1}
6484 ; AVX512F-FAST-NEXT: vpternlogq $226, %zmm30, %zmm3, %zmm2
6485 ; AVX512F-FAST-NEXT: vpandnq %zmm31, %zmm0, %zmm7
6486 ; AVX512F-FAST-NEXT: vpandq %zmm0, %zmm16, %zmm8
6487 ; AVX512F-FAST-NEXT: vpord %zmm7, %zmm8, %zmm2 {%k1}
6488 ; AVX512F-FAST-NEXT: vpternlogq $226, %zmm19, %zmm3, %zmm5
6489 ; AVX512F-FAST-NEXT: vpandnq %zmm13, %zmm0, %zmm3
6490 ; AVX512F-FAST-NEXT: vpandq %zmm0, %zmm21, %zmm0
6491 ; AVX512F-FAST-NEXT: vpord %zmm3, %zmm0, %zmm5 {%k1}
6492 ; AVX512F-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
6493 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm5, (%rax)
6494 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm2, 192(%rax)
6495 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm1, 128(%rax)
6496 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm14, 320(%rax)
6497 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm11, 256(%rax)
6498 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm15, 448(%rax)
6499 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm6, 384(%rax)
6500 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm4, 64(%rax)
6501 ; AVX512F-FAST-NEXT: addq $392, %rsp # imm = 0x188
6502 ; AVX512F-FAST-NEXT: vzeroupper
6503 ; AVX512F-FAST-NEXT: retq
6505 ; AVX512BW-SLOW-LABEL: store_i8_stride8_vf64:
6506 ; AVX512BW-SLOW: # %bb.0:
6507 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
6508 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %r10
6509 ; AVX512BW-SLOW-NEXT: vmovdqa (%r10), %xmm1
6510 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6511 ; AVX512BW-SLOW-NEXT: vmovdqa 16(%r10), %xmm13
6512 ; AVX512BW-SLOW-NEXT: vmovdqa64 32(%r10), %xmm22
6513 ; AVX512BW-SLOW-NEXT: vmovdqa64 48(%r10), %xmm19
6514 ; AVX512BW-SLOW-NEXT: vmovdqa (%rax), %xmm0
6515 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6516 ; AVX512BW-SLOW-NEXT: vmovdqa64 16(%rax), %xmm16
6517 ; AVX512BW-SLOW-NEXT: vmovdqa64 32(%rax), %xmm23
6518 ; AVX512BW-SLOW-NEXT: vmovdqa64 48(%rax), %xmm20
6519 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
6520 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm0
6521 ; AVX512BW-SLOW-NEXT: vmovdqa (%r9), %xmm5
6522 ; AVX512BW-SLOW-NEXT: vmovdqa64 16(%r9), %xmm17
6523 ; AVX512BW-SLOW-NEXT: vmovdqa64 32(%r9), %xmm25
6524 ; AVX512BW-SLOW-NEXT: vmovdqa64 48(%r9), %xmm21
6525 ; AVX512BW-SLOW-NEXT: vmovdqa (%r8), %xmm7
6526 ; AVX512BW-SLOW-NEXT: vmovdqa64 16(%r8), %xmm18
6527 ; AVX512BW-SLOW-NEXT: vmovdqa64 32(%r8), %xmm26
6528 ; AVX512BW-SLOW-NEXT: vmovdqa64 48(%r8), %xmm24
6529 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm7[8],xmm5[8],xmm7[9],xmm5[9],xmm7[10],xmm5[10],xmm7[11],xmm5[11],xmm7[12],xmm5[12],xmm7[13],xmm5[13],xmm7[14],xmm5[14],xmm7[15],xmm5[15]
6530 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm1, %zmm1, %zmm1
6531 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm10 = [0,1,0,1,4,5,1,3,2,1,2,1,4,5,3,3,16,17,20,21,20,21,21,23,16,17,22,21,22,21,23,23]
6532 ; AVX512BW-SLOW-NEXT: vpermw %zmm1, %zmm10, %zmm4
6533 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm9 = [0,0,0,0,4,5,2,1,0,2,0,2,4,5,2,3,16,17,20,20,20,20,22,21,16,17,20,22,20,22,22,23]
6534 ; AVX512BW-SLOW-NEXT: movl $-2004318072, %eax # imm = 0x88888888
6535 ; AVX512BW-SLOW-NEXT: kmovd %eax, %k1
6536 ; AVX512BW-SLOW-NEXT: vpermw %zmm0, %zmm9, %zmm4 {%k1}
6537 ; AVX512BW-SLOW-NEXT: vmovdqa (%rsi), %xmm0
6538 ; AVX512BW-SLOW-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6539 ; AVX512BW-SLOW-NEXT: vmovdqa64 48(%rsi), %xmm28
6540 ; AVX512BW-SLOW-NEXT: vmovdqa (%rdi), %xmm6
6541 ; AVX512BW-SLOW-NEXT: vmovdqa64 48(%rdi), %xmm30
6542 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm6[8],xmm0[8],xmm6[9],xmm0[9],xmm6[10],xmm0[10],xmm6[11],xmm0[11],xmm6[12],xmm0[12],xmm6[13],xmm0[13],xmm6[14],xmm0[14],xmm6[15],xmm0[15]
6543 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,2,3]
6544 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
6545 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm8 = xmm0[3,3,3,3]
6546 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm8 = xmm8[0],zero,zero,zero,xmm8[1],zero,zero,zero
6547 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm8, %ymm1, %ymm1
6548 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm8 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
6549 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
6550 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
6551 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm8, %ymm0
6552 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm3
6553 ; AVX512BW-SLOW-NEXT: vmovdqa (%rcx), %xmm8
6554 ; AVX512BW-SLOW-NEXT: vmovdqa 48(%rcx), %xmm1
6555 ; AVX512BW-SLOW-NEXT: vmovdqa (%rdx), %xmm11
6556 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm12 = xmm11[8],xmm8[8],xmm11[9],xmm8[9],xmm11[10],xmm8[10],xmm11[11],xmm8[11],xmm11[12],xmm8[12],xmm11[13],xmm8[13],xmm11[14],xmm8[14],xmm11[15],xmm8[15]
6557 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm12, %zmm12, %zmm14
6558 ; AVX512BW-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm12 = [0,0,2,1,2,1,6,7,0,2,2,3,2,3,6,7,20,20,18,19,22,21,22,21,20,22,18,19,22,23,22,23]
6559 ; AVX512BW-SLOW-NEXT: movl $572662306, %eax # imm = 0x22222222
6560 ; AVX512BW-SLOW-NEXT: kmovd %eax, %k2
6561 ; AVX512BW-SLOW-NEXT: vpermw %zmm14, %zmm12, %zmm3 {%k2}
6562 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm20[0],xmm19[0],xmm20[1],xmm19[1],xmm20[2],xmm19[2],xmm20[3],xmm19[3],xmm20[4],xmm19[4],xmm20[5],xmm19[5],xmm20[6],xmm19[6],xmm20[7],xmm19[7]
6563 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm14, %zmm14, %zmm15
6564 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm14 = xmm24[0],xmm21[0],xmm24[1],xmm21[1],xmm24[2],xmm21[2],xmm24[3],xmm21[3],xmm24[4],xmm21[4],xmm24[5],xmm21[5],xmm24[6],xmm21[6],xmm24[7],xmm21[7]
6565 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm14, %zmm14, %zmm14
6566 ; AVX512BW-SLOW-NEXT: vpermw %zmm14, %zmm10, %zmm14
6567 ; AVX512BW-SLOW-NEXT: vpermw %zmm15, %zmm9, %zmm14 {%k1}
6568 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm15 = xmm30[0],xmm28[0],xmm30[1],xmm28[1],xmm30[2],xmm28[2],xmm30[3],xmm28[3],xmm30[4],xmm28[4],xmm30[5],xmm28[5],xmm30[6],xmm28[6],xmm30[7],xmm28[7]
6569 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm27 = xmm15[2,3,2,3]
6570 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm27 = xmm27[0],zero,zero,zero,xmm27[1],zero,zero,zero
6571 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm29 = xmm15[3,3,3,3]
6572 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm29 = xmm29[0],zero,zero,zero,xmm29[1],zero,zero,zero
6573 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm29, %ymm27, %ymm27
6574 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm29 = xmm15[0],zero,zero,zero,xmm15[1],zero,zero,zero
6575 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm15 = xmm15[1,1,1,1]
6576 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm15 = xmm15[0],zero,zero,zero,xmm15[1],zero,zero,zero
6577 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm15, %ymm29, %ymm15
6578 ; AVX512BW-SLOW-NEXT: vmovdqa 48(%rdx), %xmm0
6579 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm27, %zmm15, %zmm15
6580 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm27 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
6581 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm27, %zmm27, %zmm27
6582 ; AVX512BW-SLOW-NEXT: vpermw %zmm27, %zmm12, %zmm15 {%k2}
6583 ; AVX512BW-SLOW-NEXT: vmovdqa64 32(%rsi), %xmm29
6584 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm19 = xmm20[8],xmm19[8],xmm20[9],xmm19[9],xmm20[10],xmm19[10],xmm20[11],xmm19[11],xmm20[12],xmm19[12],xmm20[13],xmm19[13],xmm20[14],xmm19[14],xmm20[15],xmm19[15]
6585 ; AVX512BW-SLOW-NEXT: vmovdqa64 32(%rdi), %xmm31
6586 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm20 = xmm24[8],xmm21[8],xmm24[9],xmm21[9],xmm24[10],xmm21[10],xmm24[11],xmm21[11],xmm24[12],xmm21[12],xmm24[13],xmm21[13],xmm24[14],xmm21[14],xmm24[15],xmm21[15]
6587 ; AVX512BW-SLOW-NEXT: vmovdqa64 32(%rcx), %xmm27
6588 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm19, %zmm19, %zmm21
6589 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm20, %zmm20, %zmm19
6590 ; AVX512BW-SLOW-NEXT: vpermw %zmm19, %zmm10, %zmm19
6591 ; AVX512BW-SLOW-NEXT: vpermw %zmm21, %zmm9, %zmm19 {%k1}
6592 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm20 = xmm30[8],xmm28[8],xmm30[9],xmm28[9],xmm30[10],xmm28[10],xmm30[11],xmm28[11],xmm30[12],xmm28[12],xmm30[13],xmm28[13],xmm30[14],xmm28[14],xmm30[15],xmm28[15]
6593 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm21 = xmm20[2,3,2,3]
6594 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm21 = xmm21[0],zero,zero,zero,xmm21[1],zero,zero,zero
6595 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm24 = xmm20[3,3,3,3]
6596 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm24 = xmm24[0],zero,zero,zero,xmm24[1],zero,zero,zero
6597 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm24, %ymm21, %ymm21
6598 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm24 = xmm20[0],zero,zero,zero,xmm20[1],zero,zero,zero
6599 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm20 = xmm20[1,1,1,1]
6600 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm20 = xmm20[0],zero,zero,zero,xmm20[1],zero,zero,zero
6601 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm20, %ymm24, %ymm20
6602 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm21, %zmm20, %zmm20
6603 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm0[8],xmm1[8],xmm0[9],xmm1[9],xmm0[10],xmm1[10],xmm0[11],xmm1[11],xmm0[12],xmm1[12],xmm0[13],xmm1[13],xmm0[14],xmm1[14],xmm0[15],xmm1[15]
6604 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm0
6605 ; AVX512BW-SLOW-NEXT: vpermw %zmm0, %zmm12, %zmm20 {%k2}
6606 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm23[0],xmm22[0],xmm23[1],xmm22[1],xmm23[2],xmm22[2],xmm23[3],xmm22[3],xmm23[4],xmm22[4],xmm23[5],xmm22[5],xmm23[6],xmm22[6],xmm23[7],xmm22[7]
6607 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm0
6608 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm26[0],xmm25[0],xmm26[1],xmm25[1],xmm26[2],xmm25[2],xmm26[3],xmm25[3],xmm26[4],xmm25[4],xmm26[5],xmm25[5],xmm26[6],xmm25[6],xmm26[7],xmm25[7]
6609 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm1, %zmm1, %zmm1
6610 ; AVX512BW-SLOW-NEXT: vpermw %zmm1, %zmm10, %zmm21
6611 ; AVX512BW-SLOW-NEXT: vpermw %zmm0, %zmm9, %zmm21 {%k1}
6612 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm31[0],xmm29[0],xmm31[1],xmm29[1],xmm31[2],xmm29[2],xmm31[3],xmm29[3],xmm31[4],xmm29[4],xmm31[5],xmm29[5],xmm31[6],xmm29[6],xmm31[7],xmm29[7]
6613 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,2,3]
6614 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
6615 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm24 = xmm0[3,3,3,3]
6616 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm24 = xmm24[0],zero,zero,zero,xmm24[1],zero,zero,zero
6617 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm24, %ymm1, %ymm1
6618 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm24 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
6619 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
6620 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
6621 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm0, %ymm24, %ymm0
6622 ; AVX512BW-SLOW-NEXT: vmovdqa 32(%rdx), %xmm2
6623 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm24
6624 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm2[0],xmm27[0],xmm2[1],xmm27[1],xmm2[2],xmm27[2],xmm2[3],xmm27[3],xmm2[4],xmm27[4],xmm2[5],xmm27[5],xmm2[6],xmm27[6],xmm2[7],xmm27[7]
6625 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm0
6626 ; AVX512BW-SLOW-NEXT: vpermw %zmm0, %zmm12, %zmm24 {%k2}
6627 ; AVX512BW-SLOW-NEXT: vmovdqa64 16(%rsi), %xmm28
6628 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm23[8],xmm22[8],xmm23[9],xmm22[9],xmm23[10],xmm22[10],xmm23[11],xmm22[11],xmm23[12],xmm22[12],xmm23[13],xmm22[13],xmm23[14],xmm22[14],xmm23[15],xmm22[15]
6629 ; AVX512BW-SLOW-NEXT: vmovdqa64 16(%rdi), %xmm30
6630 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm26[8],xmm25[8],xmm26[9],xmm25[9],xmm26[10],xmm25[10],xmm26[11],xmm25[11],xmm26[12],xmm25[12],xmm26[13],xmm25[13],xmm26[14],xmm25[14],xmm26[15],xmm25[15]
6631 ; AVX512BW-SLOW-NEXT: vmovdqa64 16(%rcx), %xmm25
6632 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm0
6633 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm1, %zmm1, %zmm1
6634 ; AVX512BW-SLOW-NEXT: vpermw %zmm1, %zmm10, %zmm22
6635 ; AVX512BW-SLOW-NEXT: vpermw %zmm0, %zmm9, %zmm22 {%k1}
6636 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm31[8],xmm29[8],xmm31[9],xmm29[9],xmm31[10],xmm29[10],xmm31[11],xmm29[11],xmm31[12],xmm29[12],xmm31[13],xmm29[13],xmm31[14],xmm29[14],xmm31[15],xmm29[15]
6637 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,2,3]
6638 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
6639 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm23 = xmm0[3,3,3,3]
6640 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm23 = xmm23[0],zero,zero,zero,xmm23[1],zero,zero,zero
6641 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm23, %ymm1, %ymm1
6642 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm23 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
6643 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
6644 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
6645 ; AVX512BW-SLOW-NEXT: vinserti32x4 $1, %xmm0, %ymm23, %ymm0
6646 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm23
6647 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm2[8],xmm27[8],xmm2[9],xmm27[9],xmm2[10],xmm27[10],xmm2[11],xmm27[11],xmm2[12],xmm27[12],xmm2[13],xmm27[13],xmm2[14],xmm27[14],xmm2[15],xmm27[15]
6648 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm0
6649 ; AVX512BW-SLOW-NEXT: vpermw %zmm0, %zmm12, %zmm23 {%k2}
6650 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm16[0],xmm13[0],xmm16[1],xmm13[1],xmm16[2],xmm13[2],xmm16[3],xmm13[3],xmm16[4],xmm13[4],xmm16[5],xmm13[5],xmm16[6],xmm13[6],xmm16[7],xmm13[7]
6651 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm0
6652 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm18[0],xmm17[0],xmm18[1],xmm17[1],xmm18[2],xmm17[2],xmm18[3],xmm17[3],xmm18[4],xmm17[4],xmm18[5],xmm17[5],xmm18[6],xmm17[6],xmm18[7],xmm17[7]
6653 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm1, %zmm1, %zmm1
6654 ; AVX512BW-SLOW-NEXT: vpermw %zmm1, %zmm10, %zmm26
6655 ; AVX512BW-SLOW-NEXT: vpermw %zmm0, %zmm9, %zmm26 {%k1}
6656 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm30[0],xmm28[0],xmm30[1],xmm28[1],xmm30[2],xmm28[2],xmm30[3],xmm28[3],xmm30[4],xmm28[4],xmm30[5],xmm28[5],xmm30[6],xmm28[6],xmm30[7],xmm28[7]
6657 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,3,2,3]
6658 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
6659 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[3,3,3,3]
6660 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
6661 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
6662 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
6663 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[1,1,1,1]
6664 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
6665 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0
6666 ; AVX512BW-SLOW-NEXT: vmovdqa 16(%rdx), %xmm2
6667 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm27
6668 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm2[0],xmm25[0],xmm2[1],xmm25[1],xmm2[2],xmm25[2],xmm2[3],xmm25[3],xmm2[4],xmm25[4],xmm2[5],xmm25[5],xmm2[6],xmm25[6],xmm2[7],xmm25[7]
6669 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm0
6670 ; AVX512BW-SLOW-NEXT: vpermw %zmm0, %zmm12, %zmm27 {%k2}
6671 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm0 = xmm16[8],xmm13[8],xmm16[9],xmm13[9],xmm16[10],xmm13[10],xmm16[11],xmm13[11],xmm16[12],xmm13[12],xmm16[13],xmm13[13],xmm16[14],xmm13[14],xmm16[15],xmm13[15]
6672 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm18[8],xmm17[8],xmm18[9],xmm17[9],xmm18[10],xmm17[10],xmm18[11],xmm17[11],xmm18[12],xmm17[12],xmm18[13],xmm17[13],xmm18[14],xmm17[14],xmm18[15],xmm17[15]
6673 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm0, %zmm0, %zmm13
6674 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm1, %zmm1, %zmm0
6675 ; AVX512BW-SLOW-NEXT: vpermw %zmm0, %zmm10, %zmm0
6676 ; AVX512BW-SLOW-NEXT: vpermw %zmm13, %zmm9, %zmm0 {%k1}
6677 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm1 = xmm30[8],xmm28[8],xmm30[9],xmm28[9],xmm30[10],xmm28[10],xmm30[11],xmm28[11],xmm30[12],xmm28[12],xmm30[13],xmm28[13],xmm30[14],xmm28[14],xmm30[15],xmm28[15]
6678 ; AVX512BW-SLOW-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
6679 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm13 # 16-byte Folded Reload
6680 ; AVX512BW-SLOW-NEXT: # xmm13 = xmm13[0],mem[0],xmm13[1],mem[1],xmm13[2],mem[2],xmm13[3],mem[3],xmm13[4],mem[4],xmm13[5],mem[5],xmm13[6],mem[6],xmm13[7],mem[7]
6681 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm5 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3],xmm7[4],xmm5[4],xmm7[5],xmm5[5],xmm7[6],xmm5[6],xmm7[7],xmm5[7]
6682 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm5, %zmm5, %zmm5
6683 ; AVX512BW-SLOW-NEXT: vpermw %zmm5, %zmm10, %zmm5
6684 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm13, %zmm13, %zmm7
6685 ; AVX512BW-SLOW-NEXT: vpermw %zmm7, %zmm9, %zmm5 {%k1}
6686 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm1[2,3,2,3]
6687 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm7 = xmm7[0],zero,zero,zero,xmm7[1],zero,zero,zero
6688 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm9 = xmm1[3,3,3,3]
6689 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm9 = xmm9[0],zero,zero,zero,xmm9[1],zero,zero,zero
6690 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm9, %ymm7, %ymm7
6691 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm9 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
6692 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[1,1,1,1]
6693 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero
6694 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm1, %ymm9, %ymm1
6695 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm7, %zmm1, %zmm1
6696 ; AVX512BW-SLOW-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm2[8],xmm25[8],xmm2[9],xmm25[9],xmm2[10],xmm25[10],xmm2[11],xmm25[11],xmm2[12],xmm25[12],xmm2[13],xmm25[13],xmm2[14],xmm25[14],xmm2[15],xmm25[15]
6697 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm2, %zmm2, %zmm2
6698 ; AVX512BW-SLOW-NEXT: vpermw %zmm2, %zmm12, %zmm1 {%k2}
6699 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm6, %xmm2 # 16-byte Folded Reload
6700 ; AVX512BW-SLOW-NEXT: # xmm2 = xmm6[0],mem[0],xmm6[1],mem[1],xmm6[2],mem[2],xmm6[3],mem[3],xmm6[4],mem[4],xmm6[5],mem[5],xmm6[6],mem[6],xmm6[7],mem[7]
6701 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm6 = xmm2[2,3,2,3]
6702 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm6 = xmm6[0],zero,zero,zero,xmm6[1],zero,zero,zero
6703 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm7 = xmm2[3,3,3,3]
6704 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm7 = xmm7[0],zero,zero,zero,xmm7[1],zero,zero,zero
6705 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm7, %ymm6, %ymm6
6706 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm7 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
6707 ; AVX512BW-SLOW-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,1,1,1]
6708 ; AVX512BW-SLOW-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
6709 ; AVX512BW-SLOW-NEXT: vinserti128 $1, %xmm2, %ymm7, %ymm2
6710 ; AVX512BW-SLOW-NEXT: vinserti64x4 $1, %ymm6, %zmm2, %zmm2
6711 ; AVX512BW-SLOW-NEXT: vpunpcklbw {{.*#+}} xmm6 = xmm11[0],xmm8[0],xmm11[1],xmm8[1],xmm11[2],xmm8[2],xmm11[3],xmm8[3],xmm11[4],xmm8[4],xmm11[5],xmm8[5],xmm11[6],xmm8[6],xmm11[7],xmm8[7]
6712 ; AVX512BW-SLOW-NEXT: vinserti32x4 $2, %xmm6, %zmm6, %zmm6
6713 ; AVX512BW-SLOW-NEXT: vpermw %zmm6, %zmm12, %zmm2 {%k2}
6714 ; AVX512BW-SLOW-NEXT: movw $-21846, %ax # imm = 0xAAAA
6715 ; AVX512BW-SLOW-NEXT: kmovd %eax, %k1
6716 ; AVX512BW-SLOW-NEXT: vmovdqa32 %zmm4, %zmm3 {%k1}
6717 ; AVX512BW-SLOW-NEXT: vmovdqa32 %zmm14, %zmm15 {%k1}
6718 ; AVX512BW-SLOW-NEXT: vmovdqa32 %zmm19, %zmm20 {%k1}
6719 ; AVX512BW-SLOW-NEXT: vmovdqa32 %zmm21, %zmm24 {%k1}
6720 ; AVX512BW-SLOW-NEXT: vmovdqa32 %zmm22, %zmm23 {%k1}
6721 ; AVX512BW-SLOW-NEXT: vmovdqa32 %zmm26, %zmm27 {%k1}
6722 ; AVX512BW-SLOW-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
6723 ; AVX512BW-SLOW-NEXT: vmovdqa32 %zmm5, %zmm2 {%k1}
6724 ; AVX512BW-SLOW-NEXT: movq {{[0-9]+}}(%rsp), %rax
6725 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm2, (%rax)
6726 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm1, 192(%rax)
6727 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm27, 128(%rax)
6728 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm23, 320(%rax)
6729 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm24, 256(%rax)
6730 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm20, 448(%rax)
6731 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm15, 384(%rax)
6732 ; AVX512BW-SLOW-NEXT: vmovdqa64 %zmm3, 64(%rax)
6733 ; AVX512BW-SLOW-NEXT: vzeroupper
6734 ; AVX512BW-SLOW-NEXT: retq
6736 ; AVX512BW-FAST-LABEL: store_i8_stride8_vf64:
6737 ; AVX512BW-FAST: # %bb.0:
6738 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
6739 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %r10
6740 ; AVX512BW-FAST-NEXT: vmovdqa (%r10), %xmm0
6741 ; AVX512BW-FAST-NEXT: vmovdqa 16(%r10), %xmm14
6742 ; AVX512BW-FAST-NEXT: vmovdqa64 32(%r10), %xmm18
6743 ; AVX512BW-FAST-NEXT: vmovdqa64 48(%r10), %xmm17
6744 ; AVX512BW-FAST-NEXT: vmovdqa (%rax), %xmm1
6745 ; AVX512BW-FAST-NEXT: vmovdqa 16(%rax), %xmm15
6746 ; AVX512BW-FAST-NEXT: vmovdqa64 32(%rax), %xmm19
6747 ; AVX512BW-FAST-NEXT: vmovdqa64 48(%rax), %xmm20
6748 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
6749 ; AVX512BW-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm2
6750 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm2, %zmm2, %zmm3
6751 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm2 = [0,1,0,1,0,1,0,1,8,9,10,11,4,5,2,3,0,1,4,5,0,1,4,5,8,9,10,11,4,5,6,7,0,1,2,3,8,9,8,9,8,9,8,9,12,13,10,11,0,1,2,3,8,9,12,13,8,9,12,13,12,13,14,15]
6752 ; AVX512BW-FAST-NEXT: vpshufb %zmm2, %zmm3, %zmm6
6753 ; AVX512BW-FAST-NEXT: vmovdqa (%r9), %xmm3
6754 ; AVX512BW-FAST-NEXT: vmovdqa64 48(%r9), %xmm21
6755 ; AVX512BW-FAST-NEXT: vmovdqa (%r8), %xmm4
6756 ; AVX512BW-FAST-NEXT: vmovdqa64 48(%r8), %xmm22
6757 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm5 = xmm4[8],xmm3[8],xmm4[9],xmm3[9],xmm4[10],xmm3[10],xmm4[11],xmm3[11],xmm4[12],xmm3[12],xmm4[13],xmm3[13],xmm4[14],xmm3[14],xmm4[15],xmm3[15]
6758 ; AVX512BW-FAST-NEXT: vinserti128 $1, %xmm5, %ymm5, %ymm5
6759 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm5, %zmm5, %zmm7
6760 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm5 = [0,1,2,3,0,1,2,3,8,9,10,11,2,3,6,7,4,5,2,3,4,5,2,3,8,9,10,11,6,7,6,7,0,1,2,3,8,9,10,11,8,9,10,11,10,11,14,15,0,1,2,3,12,13,10,11,12,13,10,11,14,15,14,15]
6761 ; AVX512BW-FAST-NEXT: vpshufb %zmm5, %zmm7, %zmm16
6762 ; AVX512BW-FAST-NEXT: movl $-2004318072, %eax # imm = 0x88888888
6763 ; AVX512BW-FAST-NEXT: kmovd %eax, %k1
6764 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm6, %zmm16 {%k1}
6765 ; AVX512BW-FAST-NEXT: vmovdqa (%rcx), %xmm6
6766 ; AVX512BW-FAST-NEXT: vmovdqa64 48(%rcx), %xmm23
6767 ; AVX512BW-FAST-NEXT: vmovdqa (%rdx), %xmm7
6768 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm8 = xmm7[8],xmm6[8],xmm7[9],xmm6[9],xmm7[10],xmm6[10],xmm7[11],xmm6[11],xmm7[12],xmm6[12],xmm7[13],xmm6[13],xmm7[14],xmm6[14],xmm7[15],xmm6[15]
6769 ; AVX512BW-FAST-NEXT: vinserti128 $1, %xmm8, %ymm8, %ymm8
6770 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm8, %zmm8, %zmm8
6771 ; AVX512BW-FAST-NEXT: vmovdqa64 {{.*#+}} zmm9 = [0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,0,1,4,5,4,5,6,7,4,5,6,7,12,13,14,15,8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,8,9,12,13,4,5,6,7,12,13,14,15,12,13,14,15]
6772 ; AVX512BW-FAST-NEXT: vpshufb %zmm9, %zmm8, %zmm24
6773 ; AVX512BW-FAST-NEXT: vmovdqa (%rsi), %xmm10
6774 ; AVX512BW-FAST-NEXT: vmovdqa64 48(%rsi), %xmm25
6775 ; AVX512BW-FAST-NEXT: vmovdqa (%rdi), %xmm11
6776 ; AVX512BW-FAST-NEXT: vmovdqa64 48(%rdi), %xmm28
6777 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm8 = xmm11[8],xmm10[8],xmm11[9],xmm10[9],xmm11[10],xmm10[10],xmm11[11],xmm10[11],xmm11[12],xmm10[12],xmm11[13],xmm10[13],xmm11[14],xmm10[14],xmm11[15],xmm10[15]
6778 ; AVX512BW-FAST-NEXT: vinserti128 $1, %xmm8, %ymm8, %ymm13
6779 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm12 = <8,9,u,u,u,u,u,u,10,11,u,u,u,u,u,u,12,13,u,u,u,u,u,u,14,15,u,u,u,u,u,u>
6780 ; AVX512BW-FAST-NEXT: vpshufb %ymm12, %ymm13, %ymm26
6781 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} xmm13 = <4,5,u,u,u,u,u,u,6,7,u,u,u,u,u,u>
6782 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm8, %xmm27
6783 ; AVX512BW-FAST-NEXT: vpmovzxwq {{.*#+}} xmm8 = xmm8[0],zero,zero,zero,xmm8[1],zero,zero,zero
6784 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm27, %ymm8, %ymm8
6785 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm26, %zmm8, %zmm8
6786 ; AVX512BW-FAST-NEXT: movl $572662306, %eax # imm = 0x22222222
6787 ; AVX512BW-FAST-NEXT: kmovd %eax, %k2
6788 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm24, %zmm8 {%k2}
6789 ; AVX512BW-FAST-NEXT: movw $-21846, %ax # imm = 0xAAAA
6790 ; AVX512BW-FAST-NEXT: kmovd %eax, %k3
6791 ; AVX512BW-FAST-NEXT: vmovdqa32 %zmm16, %zmm8 {%k3}
6792 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm16 = xmm20[0],xmm17[0],xmm20[1],xmm17[1],xmm20[2],xmm17[2],xmm20[3],xmm17[3],xmm20[4],xmm17[4],xmm20[5],xmm17[5],xmm20[6],xmm17[6],xmm20[7],xmm17[7]
6793 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm16, %ymm16, %ymm16
6794 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm16, %zmm16, %zmm16
6795 ; AVX512BW-FAST-NEXT: vpshufb %zmm2, %zmm16, %zmm16
6796 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm24 = xmm22[0],xmm21[0],xmm22[1],xmm21[1],xmm22[2],xmm21[2],xmm22[3],xmm21[3],xmm22[4],xmm21[4],xmm22[5],xmm21[5],xmm22[6],xmm21[6],xmm22[7],xmm21[7]
6797 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm24, %ymm24, %ymm24
6798 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm24, %zmm24, %zmm24
6799 ; AVX512BW-FAST-NEXT: vpshufb %zmm5, %zmm24, %zmm24
6800 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm16, %zmm24 {%k1}
6801 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm16 = xmm28[0],xmm25[0],xmm28[1],xmm25[1],xmm28[2],xmm25[2],xmm28[3],xmm25[3],xmm28[4],xmm25[4],xmm28[5],xmm25[5],xmm28[6],xmm25[6],xmm28[7],xmm25[7]
6802 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm16, %xmm26
6803 ; AVX512BW-FAST-NEXT: vpmovzxwq {{.*#+}} xmm27 = xmm16[0],zero,zero,zero,xmm16[1],zero,zero,zero
6804 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm26, %ymm27, %ymm26
6805 ; AVX512BW-FAST-NEXT: vmovdqa64 48(%rdx), %xmm30
6806 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm16, %ymm16, %ymm16
6807 ; AVX512BW-FAST-NEXT: vpshufb %ymm12, %ymm16, %ymm16
6808 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm16, %zmm26, %zmm16
6809 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm26 = xmm30[0],xmm23[0],xmm30[1],xmm23[1],xmm30[2],xmm23[2],xmm30[3],xmm23[3],xmm30[4],xmm23[4],xmm30[5],xmm23[5],xmm30[6],xmm23[6],xmm30[7],xmm23[7]
6810 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm26, %ymm26, %ymm26
6811 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm26, %zmm26, %zmm26
6812 ; AVX512BW-FAST-NEXT: vpshufb %zmm9, %zmm26, %zmm26
6813 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm26, %zmm16 {%k2}
6814 ; AVX512BW-FAST-NEXT: vmovdqa64 32(%r9), %xmm26
6815 ; AVX512BW-FAST-NEXT: vmovdqa32 %zmm24, %zmm16 {%k3}
6816 ; AVX512BW-FAST-NEXT: vmovdqa64 32(%r8), %xmm27
6817 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm17 = xmm20[8],xmm17[8],xmm20[9],xmm17[9],xmm20[10],xmm17[10],xmm20[11],xmm17[11],xmm20[12],xmm17[12],xmm20[13],xmm17[13],xmm20[14],xmm17[14],xmm20[15],xmm17[15]
6818 ; AVX512BW-FAST-NEXT: vmovdqa64 32(%rcx), %xmm24
6819 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm20 = xmm22[8],xmm21[8],xmm22[9],xmm21[9],xmm22[10],xmm21[10],xmm22[11],xmm21[11],xmm22[12],xmm21[12],xmm22[13],xmm21[13],xmm22[14],xmm21[14],xmm22[15],xmm21[15]
6820 ; AVX512BW-FAST-NEXT: vmovdqa64 32(%rsi), %xmm21
6821 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm17, %ymm17, %ymm17
6822 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm17, %zmm17, %zmm17
6823 ; AVX512BW-FAST-NEXT: vpshufb %zmm2, %zmm17, %zmm17
6824 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm20, %ymm20, %ymm20
6825 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm20, %zmm20, %zmm20
6826 ; AVX512BW-FAST-NEXT: vpshufb %zmm5, %zmm20, %zmm20
6827 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm17, %zmm20 {%k1}
6828 ; AVX512BW-FAST-NEXT: vmovdqa64 32(%rdi), %xmm29
6829 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm17 = xmm30[8],xmm23[8],xmm30[9],xmm23[9],xmm30[10],xmm23[10],xmm30[11],xmm23[11],xmm30[12],xmm23[12],xmm30[13],xmm23[13],xmm30[14],xmm23[14],xmm30[15],xmm23[15]
6830 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm17, %ymm17, %ymm17
6831 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm17, %zmm17, %zmm17
6832 ; AVX512BW-FAST-NEXT: vpshufb %zmm9, %zmm17, %zmm22
6833 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm17 = xmm28[8],xmm25[8],xmm28[9],xmm25[9],xmm28[10],xmm25[10],xmm28[11],xmm25[11],xmm28[12],xmm25[12],xmm28[13],xmm25[13],xmm28[14],xmm25[14],xmm28[15],xmm25[15]
6834 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm17, %ymm17, %ymm23
6835 ; AVX512BW-FAST-NEXT: vpshufb %ymm12, %ymm23, %ymm23
6836 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm17, %xmm25
6837 ; AVX512BW-FAST-NEXT: vpmovzxwq {{.*#+}} xmm17 = xmm17[0],zero,zero,zero,xmm17[1],zero,zero,zero
6838 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm25, %ymm17, %ymm17
6839 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm23, %zmm17, %zmm17
6840 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm22, %zmm17 {%k2}
6841 ; AVX512BW-FAST-NEXT: vmovdqa32 %zmm20, %zmm17 {%k3}
6842 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm20 = xmm19[0],xmm18[0],xmm19[1],xmm18[1],xmm19[2],xmm18[2],xmm19[3],xmm18[3],xmm19[4],xmm18[4],xmm19[5],xmm18[5],xmm19[6],xmm18[6],xmm19[7],xmm18[7]
6843 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm20, %ymm20, %ymm20
6844 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm20, %zmm20, %zmm20
6845 ; AVX512BW-FAST-NEXT: vpshufb %zmm2, %zmm20, %zmm20
6846 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm22 = xmm27[0],xmm26[0],xmm27[1],xmm26[1],xmm27[2],xmm26[2],xmm27[3],xmm26[3],xmm27[4],xmm26[4],xmm27[5],xmm26[5],xmm27[6],xmm26[6],xmm27[7],xmm26[7]
6847 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm22, %ymm22, %ymm22
6848 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm22, %zmm22, %zmm22
6849 ; AVX512BW-FAST-NEXT: vpshufb %zmm5, %zmm22, %zmm22
6850 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm20, %zmm22 {%k1}
6851 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm20 = xmm29[0],xmm21[0],xmm29[1],xmm21[1],xmm29[2],xmm21[2],xmm29[3],xmm21[3],xmm29[4],xmm21[4],xmm29[5],xmm21[5],xmm29[6],xmm21[6],xmm29[7],xmm21[7]
6852 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm20, %xmm23
6853 ; AVX512BW-FAST-NEXT: vpmovzxwq {{.*#+}} xmm25 = xmm20[0],zero,zero,zero,xmm20[1],zero,zero,zero
6854 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm23, %ymm25, %ymm23
6855 ; AVX512BW-FAST-NEXT: vmovdqa64 32(%rdx), %xmm28
6856 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm20, %ymm20, %ymm20
6857 ; AVX512BW-FAST-NEXT: vpshufb %ymm12, %ymm20, %ymm20
6858 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm20, %zmm23, %zmm20
6859 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm23 = xmm28[0],xmm24[0],xmm28[1],xmm24[1],xmm28[2],xmm24[2],xmm28[3],xmm24[3],xmm28[4],xmm24[4],xmm28[5],xmm24[5],xmm28[6],xmm24[6],xmm28[7],xmm24[7]
6860 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm23, %ymm23, %ymm23
6861 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm23, %zmm23, %zmm23
6862 ; AVX512BW-FAST-NEXT: vpshufb %zmm9, %zmm23, %zmm23
6863 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm23, %zmm20 {%k2}
6864 ; AVX512BW-FAST-NEXT: vmovdqa64 16(%r9), %xmm23
6865 ; AVX512BW-FAST-NEXT: vmovdqa32 %zmm22, %zmm20 {%k3}
6866 ; AVX512BW-FAST-NEXT: vmovdqa64 16(%r8), %xmm25
6867 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm18 = xmm19[8],xmm18[8],xmm19[9],xmm18[9],xmm19[10],xmm18[10],xmm19[11],xmm18[11],xmm19[12],xmm18[12],xmm19[13],xmm18[13],xmm19[14],xmm18[14],xmm19[15],xmm18[15]
6868 ; AVX512BW-FAST-NEXT: vmovdqa64 16(%rcx), %xmm19
6869 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm26 = xmm27[8],xmm26[8],xmm27[9],xmm26[9],xmm27[10],xmm26[10],xmm27[11],xmm26[11],xmm27[12],xmm26[12],xmm27[13],xmm26[13],xmm27[14],xmm26[14],xmm27[15],xmm26[15]
6870 ; AVX512BW-FAST-NEXT: vmovdqa64 16(%rsi), %xmm22
6871 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm18, %ymm18, %ymm18
6872 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm18, %zmm18, %zmm18
6873 ; AVX512BW-FAST-NEXT: vpshufb %zmm2, %zmm18, %zmm18
6874 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm26, %ymm26, %ymm26
6875 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm26, %zmm26, %zmm26
6876 ; AVX512BW-FAST-NEXT: vpshufb %zmm5, %zmm26, %zmm27
6877 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm18, %zmm27 {%k1}
6878 ; AVX512BW-FAST-NEXT: vmovdqa64 16(%rdi), %xmm26
6879 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm18 = xmm28[8],xmm24[8],xmm28[9],xmm24[9],xmm28[10],xmm24[10],xmm28[11],xmm24[11],xmm28[12],xmm24[12],xmm28[13],xmm24[13],xmm28[14],xmm24[14],xmm28[15],xmm24[15]
6880 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm18, %ymm18, %ymm18
6881 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm18, %zmm18, %zmm18
6882 ; AVX512BW-FAST-NEXT: vpshufb %zmm9, %zmm18, %zmm24
6883 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm18 = xmm29[8],xmm21[8],xmm29[9],xmm21[9],xmm29[10],xmm21[10],xmm29[11],xmm21[11],xmm29[12],xmm21[12],xmm29[13],xmm21[13],xmm29[14],xmm21[14],xmm29[15],xmm21[15]
6884 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm18, %ymm18, %ymm21
6885 ; AVX512BW-FAST-NEXT: vpshufb %ymm12, %ymm21, %ymm21
6886 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm18, %xmm28
6887 ; AVX512BW-FAST-NEXT: vpmovzxwq {{.*#+}} xmm18 = xmm18[0],zero,zero,zero,xmm18[1],zero,zero,zero
6888 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm28, %ymm18, %ymm18
6889 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm21, %zmm18, %zmm18
6890 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm24, %zmm18 {%k2}
6891 ; AVX512BW-FAST-NEXT: vmovdqa32 %zmm27, %zmm18 {%k3}
6892 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm21 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3],xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
6893 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm21, %ymm21, %ymm21
6894 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm21, %zmm21, %zmm21
6895 ; AVX512BW-FAST-NEXT: vpshufb %zmm2, %zmm21, %zmm21
6896 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm24 = xmm25[0],xmm23[0],xmm25[1],xmm23[1],xmm25[2],xmm23[2],xmm25[3],xmm23[3],xmm25[4],xmm23[4],xmm25[5],xmm23[5],xmm25[6],xmm23[6],xmm25[7],xmm23[7]
6897 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm24, %ymm24, %ymm24
6898 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm24, %zmm24, %zmm24
6899 ; AVX512BW-FAST-NEXT: vpshufb %zmm5, %zmm24, %zmm24
6900 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm21, %zmm24 {%k1}
6901 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm21 = xmm26[0],xmm22[0],xmm26[1],xmm22[1],xmm26[2],xmm22[2],xmm26[3],xmm22[3],xmm26[4],xmm22[4],xmm26[5],xmm22[5],xmm26[6],xmm22[6],xmm26[7],xmm22[7]
6902 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm21, %xmm27
6903 ; AVX512BW-FAST-NEXT: vpmovzxwq {{.*#+}} xmm28 = xmm21[0],zero,zero,zero,xmm21[1],zero,zero,zero
6904 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm27, %ymm28, %ymm27
6905 ; AVX512BW-FAST-NEXT: vmovdqa64 16(%rdx), %xmm28
6906 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm21, %ymm21, %ymm21
6907 ; AVX512BW-FAST-NEXT: vpshufb %ymm12, %ymm21, %ymm21
6908 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm21, %zmm27, %zmm21
6909 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm27 = xmm28[0],xmm19[0],xmm28[1],xmm19[1],xmm28[2],xmm19[2],xmm28[3],xmm19[3],xmm28[4],xmm19[4],xmm28[5],xmm19[5],xmm28[6],xmm19[6],xmm28[7],xmm19[7]
6910 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm27, %ymm27, %ymm27
6911 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm27, %zmm27, %zmm27
6912 ; AVX512BW-FAST-NEXT: vpshufb %zmm9, %zmm27, %zmm27
6913 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm27, %zmm21 {%k2}
6914 ; AVX512BW-FAST-NEXT: vmovdqa32 %zmm24, %zmm21 {%k3}
6915 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm14 = xmm15[8],xmm14[8],xmm15[9],xmm14[9],xmm15[10],xmm14[10],xmm15[11],xmm14[11],xmm15[12],xmm14[12],xmm15[13],xmm14[13],xmm15[14],xmm14[14],xmm15[15],xmm14[15]
6916 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm15 = xmm25[8],xmm23[8],xmm25[9],xmm23[9],xmm25[10],xmm23[10],xmm25[11],xmm23[11],xmm25[12],xmm23[12],xmm25[13],xmm23[13],xmm25[14],xmm23[14],xmm25[15],xmm23[15]
6917 ; AVX512BW-FAST-NEXT: vinserti128 $1, %xmm14, %ymm14, %ymm14
6918 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm14, %zmm14, %zmm14
6919 ; AVX512BW-FAST-NEXT: vpshufb %zmm2, %zmm14, %zmm14
6920 ; AVX512BW-FAST-NEXT: vinserti128 $1, %xmm15, %ymm15, %ymm15
6921 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm15, %zmm15, %zmm15
6922 ; AVX512BW-FAST-NEXT: vpshufb %zmm5, %zmm15, %zmm15
6923 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm14, %zmm15 {%k1}
6924 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm14 = xmm28[8],xmm19[8],xmm28[9],xmm19[9],xmm28[10],xmm19[10],xmm28[11],xmm19[11],xmm28[12],xmm19[12],xmm28[13],xmm19[13],xmm28[14],xmm19[14],xmm28[15],xmm19[15]
6925 ; AVX512BW-FAST-NEXT: vpunpckhbw {{.*#+}} xmm19 = xmm26[8],xmm22[8],xmm26[9],xmm22[9],xmm26[10],xmm22[10],xmm26[11],xmm22[11],xmm26[12],xmm22[12],xmm26[13],xmm22[13],xmm26[14],xmm22[14],xmm26[15],xmm22[15]
6926 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm19, %xmm22
6927 ; AVX512BW-FAST-NEXT: vpmovzxwq {{.*#+}} xmm23 = xmm19[0],zero,zero,zero,xmm19[1],zero,zero,zero
6928 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm22, %ymm23, %ymm22
6929 ; AVX512BW-FAST-NEXT: vinserti32x4 $1, %xmm19, %ymm19, %ymm19
6930 ; AVX512BW-FAST-NEXT: vpshufb %ymm12, %ymm19, %ymm19
6931 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm19, %zmm22, %zmm19
6932 ; AVX512BW-FAST-NEXT: vinserti128 $1, %xmm14, %ymm14, %ymm14
6933 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm14, %zmm14, %zmm14
6934 ; AVX512BW-FAST-NEXT: vpshufb %zmm9, %zmm14, %zmm14
6935 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm14, %zmm19 {%k2}
6936 ; AVX512BW-FAST-NEXT: vmovdqa32 %zmm15, %zmm19 {%k3}
6937 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
6938 ; AVX512BW-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6939 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm0
6940 ; AVX512BW-FAST-NEXT: vpshufb %zmm2, %zmm0, %zmm0
6941 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm1 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3],xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
6942 ; AVX512BW-FAST-NEXT: vinserti128 $1, %xmm1, %ymm1, %ymm1
6943 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm1, %zmm1, %zmm1
6944 ; AVX512BW-FAST-NEXT: vpshufb %zmm5, %zmm1, %zmm1
6945 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm0, %zmm1 {%k1}
6946 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm0 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3],xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
6947 ; AVX512BW-FAST-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6948 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm0, %zmm0, %zmm0
6949 ; AVX512BW-FAST-NEXT: vpshufb %zmm9, %zmm0, %zmm0
6950 ; AVX512BW-FAST-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3],xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
6951 ; AVX512BW-FAST-NEXT: vinserti128 $1, %xmm2, %ymm2, %ymm3
6952 ; AVX512BW-FAST-NEXT: vpshufb %ymm12, %ymm3, %ymm3
6953 ; AVX512BW-FAST-NEXT: vpshufb %xmm13, %xmm2, %xmm4
6954 ; AVX512BW-FAST-NEXT: vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
6955 ; AVX512BW-FAST-NEXT: vinserti128 $1, %xmm4, %ymm2, %ymm2
6956 ; AVX512BW-FAST-NEXT: vinserti64x4 $1, %ymm3, %zmm2, %zmm2
6957 ; AVX512BW-FAST-NEXT: vmovdqu16 %zmm0, %zmm2 {%k2}
6958 ; AVX512BW-FAST-NEXT: vmovdqa32 %zmm1, %zmm2 {%k3}
6959 ; AVX512BW-FAST-NEXT: movq {{[0-9]+}}(%rsp), %rax
6960 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm2, (%rax)
6961 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm19, 192(%rax)
6962 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm21, 128(%rax)
6963 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm18, 320(%rax)
6964 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm20, 256(%rax)
6965 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm17, 448(%rax)
6966 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm16, 384(%rax)
6967 ; AVX512BW-FAST-NEXT: vmovdqa64 %zmm8, 64(%rax)
6968 ; AVX512BW-FAST-NEXT: vzeroupper
6969 ; AVX512BW-FAST-NEXT: retq
6970 %in.vec0 = load <64 x i8>, ptr %in.vecptr0, align 64
6971 %in.vec1 = load <64 x i8>, ptr %in.vecptr1, align 64
6972 %in.vec2 = load <64 x i8>, ptr %in.vecptr2, align 64
6973 %in.vec3 = load <64 x i8>, ptr %in.vecptr3, align 64
6974 %in.vec4 = load <64 x i8>, ptr %in.vecptr4, align 64
6975 %in.vec5 = load <64 x i8>, ptr %in.vecptr5, align 64
6976 %in.vec6 = load <64 x i8>, ptr %in.vecptr6, align 64
6977 %in.vec7 = load <64 x i8>, ptr %in.vecptr7, align 64
6978 %1 = shufflevector <64 x i8> %in.vec0, <64 x i8> %in.vec1, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
6979 %2 = shufflevector <64 x i8> %in.vec2, <64 x i8> %in.vec3, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
6980 %3 = shufflevector <64 x i8> %in.vec4, <64 x i8> %in.vec5, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
6981 %4 = shufflevector <64 x i8> %in.vec6, <64 x i8> %in.vec7, <128 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127>
6982 %5 = shufflevector <128 x i8> %1, <128 x i8> %2, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
6983 %6 = shufflevector <128 x i8> %3, <128 x i8> %4, <256 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255>
6984 %7 = shufflevector <256 x i8> %5, <256 x i8> %6, <512 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63, i32 64, i32 65, i32 66, i32 67, i32 68, i32 69, i32 70, i32 71, i32 72, i32 73, i32 74, i32 75, i32 76, i32 77, i32 78, i32 79, i32 80, i32 81, i32 82, i32 83, i32 84, i32 85, i32 86, i32 87, i32 88, i32 89, i32 90, i32 91, i32 92, i32 93, i32 94, i32 95, i32 96, i32 97, i32 98, i32 99, i32 100, i32 101, i32 102, i32 103, i32 104, i32 105, i32 106, i32 107, i32 108, i32 109, i32 110, i32 111, i32 112, i32 113, i32 114, i32 115, i32 116, i32 117, i32 118, i32 119, i32 120, i32 121, i32 122, i32 123, i32 124, i32 125, i32 126, i32 127, i32 128, i32 129, i32 130, i32 131, i32 132, i32 133, i32 134, i32 135, i32 136, i32 137, i32 138, i32 139, i32 140, i32 141, i32 142, i32 143, i32 144, i32 145, i32 146, i32 147, i32 148, i32 149, i32 150, i32 151, i32 152, i32 153, i32 154, i32 155, i32 156, i32 157, i32 158, i32 159, i32 160, i32 161, i32 162, i32 163, i32 164, i32 165, i32 166, i32 167, i32 168, i32 169, i32 170, i32 171, i32 172, i32 173, i32 174, i32 175, i32 176, i32 177, i32 178, i32 179, i32 180, i32 181, i32 182, i32 183, i32 184, i32 185, i32 186, i32 187, i32 188, i32 189, i32 190, i32 191, i32 192, i32 193, i32 194, i32 195, i32 196, i32 197, i32 198, i32 199, i32 200, i32 201, i32 202, i32 203, i32 204, i32 205, i32 206, i32 207, i32 208, i32 209, i32 210, i32 211, i32 212, i32 213, i32 214, i32 215, i32 216, i32 217, i32 218, i32 219, i32 220, i32 221, i32 222, i32 223, i32 224, i32 225, i32 226, i32 227, i32 228, i32 229, i32 230, i32 231, i32 232, i32 233, i32 234, i32 235, i32 236, i32 237, i32 238, i32 239, i32 240, i32 241, i32 242, i32 243, i32 244, i32 245, i32 246, i32 247, i32 248, i32 249, i32 250, i32 251, i32 252, i32 253, i32 254, i32 255, i32 256, i32 257, i32 258, i32 259, i32 260, i32 261, i32 262, i32 263, i32 264, i32 265, i32 266, i32 267, i32 268, i32 269, i32 270, i32 271, i32 272, i32 273, i32 274, i32 275, i32 276, i32 277, i32 278, i32 279, i32 280, i32 281, i32 282, i32 283, i32 284, i32 285, i32 286, i32 287, i32 288, i32 289, i32 290, i32 291, i32 292, i32 293, i32 294, i32 295, i32 296, i32 297, i32 298, i32 299, i32 300, i32 301, i32 302, i32 303, i32 304, i32 305, i32 306, i32 307, i32 308, i32 309, i32 310, i32 311, i32 312, i32 313, i32 314, i32 315, i32 316, i32 317, i32 318, i32 319, i32 320, i32 321, i32 322, i32 323, i32 324, i32 325, i32 326, i32 327, i32 328, i32 329, i32 330, i32 331, i32 332, i32 333, i32 334, i32 335, i32 336, i32 337, i32 338, i32 339, i32 340, i32 341, i32 342, i32 343, i32 344, i32 345, i32 346, i32 347, i32 348, i32 349, i32 350, i32 351, i32 352, i32 353, i32 354, i32 355, i32 356, i32 357, i32 358, i32 359, i32 360, i32 361, i32 362, i32 363, i32 364, i32 365, i32 366, i32 367, i32 368, i32 369, i32 370, i32 371, i32 372, i32 373, i32 374, i32 375, i32 376, i32 377, i32 378, i32 379, i32 380, i32 381, i32 382, i32 383, i32 384, i32 385, i32 386, i32 387, i32 388, i32 389, i32 390, i32 391, i32 392, i32 393, i32 394, i32 395, i32 396, i32 397, i32 398, i32 399, i32 400, i32 401, i32 402, i32 403, i32 404, i32 405, i32 406, i32 407, i32 408, i32 409, i32 410, i32 411, i32 412, i32 413, i32 414, i32 415, i32 416, i32 417, i32 418, i32 419, i32 420, i32 421, i32 422, i32 423, i32 424, i32 425, i32 426, i32 427, i32 428, i32 429, i32 430, i32 431, i32 432, i32 433, i32 434, i32 435, i32 436, i32 437, i32 438, i32 439, i32 440, i32 441, i32 442, i32 443, i32 444, i32 445, i32 446, i32 447, i32 448, i32 449, i32 450, i32 451, i32 452, i32 453, i32 454, i32 455, i32 456, i32 457, i32 458, i32 459, i32 460, i32 461, i32 462, i32 463, i32 464, i32 465, i32 466, i32 467, i32 468, i32 469, i32 470, i32 471, i32 472, i32 473, i32 474, i32 475, i32 476, i32 477, i32 478, i32 479, i32 480, i32 481, i32 482, i32 483, i32 484, i32 485, i32 486, i32 487, i32 488, i32 489, i32 490, i32 491, i32 492, i32 493, i32 494, i32 495, i32 496, i32 497, i32 498, i32 499, i32 500, i32 501, i32 502, i32 503, i32 504, i32 505, i32 506, i32 507, i32 508, i32 509, i32 510, i32 511>
6985 %interleaved.vec = shufflevector <512 x i8> %7, <512 x i8> poison, <512 x i32> <i32 0, i32 64, i32 128, i32 192, i32 256, i32 320, i32 384, i32 448, i32 1, i32 65, i32 129, i32 193, i32 257, i32 321, i32 385, i32 449, i32 2, i32 66, i32 130, i32 194, i32 258, i32 322, i32 386, i32 450, i32 3, i32 67, i32 131, i32 195, i32 259, i32 323, i32 387, i32 451, i32 4, i32 68, i32 132, i32 196, i32 260, i32 324, i32 388, i32 452, i32 5, i32 69, i32 133, i32 197, i32 261, i32 325, i32 389, i32 453, i32 6, i32 70, i32 134, i32 198, i32 262, i32 326, i32 390, i32 454, i32 7, i32 71, i32 135, i32 199, i32 263, i32 327, i32 391, i32 455, i32 8, i32 72, i32 136, i32 200, i32 264, i32 328, i32 392, i32 456, i32 9, i32 73, i32 137, i32 201, i32 265, i32 329, i32 393, i32 457, i32 10, i32 74, i32 138, i32 202, i32 266, i32 330, i32 394, i32 458, i32 11, i32 75, i32 139, i32 203, i32 267, i32 331, i32 395, i32 459, i32 12, i32 76, i32 140, i32 204, i32 268, i32 332, i32 396, i32 460, i32 13, i32 77, i32 141, i32 205, i32 269, i32 333, i32 397, i32 461, i32 14, i32 78, i32 142, i32 206, i32 270, i32 334, i32 398, i32 462, i32 15, i32 79, i32 143, i32 207, i32 271, i32 335, i32 399, i32 463, i32 16, i32 80, i32 144, i32 208, i32 272, i32 336, i32 400, i32 464, i32 17, i32 81, i32 145, i32 209, i32 273, i32 337, i32 401, i32 465, i32 18, i32 82, i32 146, i32 210, i32 274, i32 338, i32 402, i32 466, i32 19, i32 83, i32 147, i32 211, i32 275, i32 339, i32 403, i32 467, i32 20, i32 84, i32 148, i32 212, i32 276, i32 340, i32 404, i32 468, i32 21, i32 85, i32 149, i32 213, i32 277, i32 341, i32 405, i32 469, i32 22, i32 86, i32 150, i32 214, i32 278, i32 342, i32 406, i32 470, i32 23, i32 87, i32 151, i32 215, i32 279, i32 343, i32 407, i32 471, i32 24, i32 88, i32 152, i32 216, i32 280, i32 344, i32 408, i32 472, i32 25, i32 89, i32 153, i32 217, i32 281, i32 345, i32 409, i32 473, i32 26, i32 90, i32 154, i32 218, i32 282, i32 346, i32 410, i32 474, i32 27, i32 91, i32 155, i32 219, i32 283, i32 347, i32 411, i32 475, i32 28, i32 92, i32 156, i32 220, i32 284, i32 348, i32 412, i32 476, i32 29, i32 93, i32 157, i32 221, i32 285, i32 349, i32 413, i32 477, i32 30, i32 94, i32 158, i32 222, i32 286, i32 350, i32 414, i32 478, i32 31, i32 95, i32 159, i32 223, i32 287, i32 351, i32 415, i32 479, i32 32, i32 96, i32 160, i32 224, i32 288, i32 352, i32 416, i32 480, i32 33, i32 97, i32 161, i32 225, i32 289, i32 353, i32 417, i32 481, i32 34, i32 98, i32 162, i32 226, i32 290, i32 354, i32 418, i32 482, i32 35, i32 99, i32 163, i32 227, i32 291, i32 355, i32 419, i32 483, i32 36, i32 100, i32 164, i32 228, i32 292, i32 356, i32 420, i32 484, i32 37, i32 101, i32 165, i32 229, i32 293, i32 357, i32 421, i32 485, i32 38, i32 102, i32 166, i32 230, i32 294, i32 358, i32 422, i32 486, i32 39, i32 103, i32 167, i32 231, i32 295, i32 359, i32 423, i32 487, i32 40, i32 104, i32 168, i32 232, i32 296, i32 360, i32 424, i32 488, i32 41, i32 105, i32 169, i32 233, i32 297, i32 361, i32 425, i32 489, i32 42, i32 106, i32 170, i32 234, i32 298, i32 362, i32 426, i32 490, i32 43, i32 107, i32 171, i32 235, i32 299, i32 363, i32 427, i32 491, i32 44, i32 108, i32 172, i32 236, i32 300, i32 364, i32 428, i32 492, i32 45, i32 109, i32 173, i32 237, i32 301, i32 365, i32 429, i32 493, i32 46, i32 110, i32 174, i32 238, i32 302, i32 366, i32 430, i32 494, i32 47, i32 111, i32 175, i32 239, i32 303, i32 367, i32 431, i32 495, i32 48, i32 112, i32 176, i32 240, i32 304, i32 368, i32 432, i32 496, i32 49, i32 113, i32 177, i32 241, i32 305, i32 369, i32 433, i32 497, i32 50, i32 114, i32 178, i32 242, i32 306, i32 370, i32 434, i32 498, i32 51, i32 115, i32 179, i32 243, i32 307, i32 371, i32 435, i32 499, i32 52, i32 116, i32 180, i32 244, i32 308, i32 372, i32 436, i32 500, i32 53, i32 117, i32 181, i32 245, i32 309, i32 373, i32 437, i32 501, i32 54, i32 118, i32 182, i32 246, i32 310, i32 374, i32 438, i32 502, i32 55, i32 119, i32 183, i32 247, i32 311, i32 375, i32 439, i32 503, i32 56, i32 120, i32 184, i32 248, i32 312, i32 376, i32 440, i32 504, i32 57, i32 121, i32 185, i32 249, i32 313, i32 377, i32 441, i32 505, i32 58, i32 122, i32 186, i32 250, i32 314, i32 378, i32 442, i32 506, i32 59, i32 123, i32 187, i32 251, i32 315, i32 379, i32 443, i32 507, i32 60, i32 124, i32 188, i32 252, i32 316, i32 380, i32 444, i32 508, i32 61, i32 125, i32 189, i32 253, i32 317, i32 381, i32 445, i32 509, i32 62, i32 126, i32 190, i32 254, i32 318, i32 382, i32 446, i32 510, i32 63, i32 127, i32 191, i32 255, i32 319, i32 383, i32 447, i32 511>
6986 store <512 x i8> %interleaved.vec, ptr %out.vec, align 64
6989 ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
6991 ; AVX512-SLOW: {{.*}}
6993 ; AVX512BW-ONLY-FAST: {{.*}}
6994 ; AVX512BW-ONLY-SLOW: {{.*}}
6995 ; AVX512DQBW-FAST: {{.*}}
6996 ; AVX512DQBW-SLOW: {{.*}}
7000 ; FALLBACK10: {{.*}}
7001 ; FALLBACK11: {{.*}}
7002 ; FALLBACK12: {{.*}}