1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx | FileCheck %s --check-prefixes=AVX
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2 | FileCheck %s --check-prefixes=AVX2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX2-FP
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx2,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX2-FCP
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX512
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512-FCP
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX512DQ
10 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512DQ-FCP
11 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX512BW
12 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512BW-FCP
13 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefixes=AVX512DQ-BW
14 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512DQ-BW-FCP
16 ; These patterns are produced by LoopVectorizer for interleaved loads.
18 define void @load_i16_stride5_vf2(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4) nounwind {
19 ; SSE-LABEL: load_i16_stride5_vf2:
21 ; SSE-NEXT: movdqa (%rdi), %xmm0
22 ; SSE-NEXT: movdqa 16(%rdi), %xmm1
23 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm0[0,2,2,3]
24 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[0,3,2,3,4,5,6,7]
25 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm0[0,3,2,3]
26 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[1,2,2,3,4,5,6,7]
27 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm0[3,1,2,3]
28 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[2,1,2,3,4,5,6,7]
29 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm0[2,3,2,3]
30 ; SSE-NEXT: psrlq $48, %xmm0
31 ; SSE-NEXT: punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
32 ; SSE-NEXT: psrld $16, %xmm1
33 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
34 ; SSE-NEXT: movd %xmm2, (%rsi)
35 ; SSE-NEXT: movd %xmm3, (%rdx)
36 ; SSE-NEXT: movd %xmm4, (%rcx)
37 ; SSE-NEXT: movd %xmm0, (%r8)
38 ; SSE-NEXT: movd %xmm5, (%r9)
41 ; AVX-LABEL: load_i16_stride5_vf2:
43 ; AVX-NEXT: vmovdqa (%rdi), %xmm0
44 ; AVX-NEXT: vmovdqa 16(%rdi), %xmm1
45 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,2,2,3]
46 ; AVX-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,3,2,3,4,5,6,7]
47 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,3,2,3]
48 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[1,2,2,3,4,5,6,7]
49 ; AVX-NEXT: vpshufd {{.*#+}} xmm4 = xmm0[3,1,2,3]
50 ; AVX-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[2,1,2,3,4,5,6,7]
51 ; AVX-NEXT: vpsrlq $48, %xmm0, %xmm5
52 ; AVX-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
53 ; AVX-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,3,2,3]
54 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2,3,4,5,6,7]
55 ; AVX-NEXT: vmovd %xmm2, (%rsi)
56 ; AVX-NEXT: vmovd %xmm3, (%rdx)
57 ; AVX-NEXT: vmovd %xmm4, (%rcx)
58 ; AVX-NEXT: vmovd %xmm5, (%r8)
59 ; AVX-NEXT: vmovd %xmm0, (%r9)
62 ; AVX2-LABEL: load_i16_stride5_vf2:
64 ; AVX2-NEXT: vmovdqa (%rdi), %xmm0
65 ; AVX2-NEXT: vmovdqa 16(%rdi), %xmm1
66 ; AVX2-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,2,2,3]
67 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,3,2,3,4,5,6,7]
68 ; AVX2-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,3,2,3]
69 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[1,2,2,3,4,5,6,7]
70 ; AVX2-NEXT: vpshufd {{.*#+}} xmm4 = xmm0[3,1,2,3]
71 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[2,1,2,3,4,5,6,7]
72 ; AVX2-NEXT: vpsrlq $48, %xmm0, %xmm0
73 ; AVX2-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
74 ; AVX2-NEXT: vpbroadcastw 8(%rdi), %xmm5
75 ; AVX2-NEXT: vpblendw {{.*#+}} xmm1 = xmm5[0],xmm1[1],xmm5[2,3,4,5,6,7]
76 ; AVX2-NEXT: vmovd %xmm2, (%rsi)
77 ; AVX2-NEXT: vmovd %xmm3, (%rdx)
78 ; AVX2-NEXT: vmovd %xmm4, (%rcx)
79 ; AVX2-NEXT: vmovd %xmm0, (%r8)
80 ; AVX2-NEXT: vmovd %xmm1, (%r9)
83 ; AVX2-FP-LABEL: load_i16_stride5_vf2:
85 ; AVX2-FP-NEXT: vmovdqa (%rdi), %xmm0
86 ; AVX2-FP-NEXT: vmovdqa 16(%rdi), %xmm1
87 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm2 = xmm0[0,1,10,11,u,u,u,u,u,u,u,u,u,u,u,u]
88 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm3 = xmm0[2,3,12,13,u,u,u,u,u,u,u,u,u,u,u,u]
89 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm4 = xmm0[4,5,14,15,u,u,u,u,u,u,u,u,u,u,u,u]
90 ; AVX2-FP-NEXT: vpsrlq $48, %xmm0, %xmm0
91 ; AVX2-FP-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
92 ; AVX2-FP-NEXT: vpbroadcastw 8(%rdi), %xmm5
93 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm1 = xmm5[0],xmm1[1],xmm5[2,3,4,5,6,7]
94 ; AVX2-FP-NEXT: vmovd %xmm2, (%rsi)
95 ; AVX2-FP-NEXT: vmovd %xmm3, (%rdx)
96 ; AVX2-FP-NEXT: vmovd %xmm4, (%rcx)
97 ; AVX2-FP-NEXT: vmovd %xmm0, (%r8)
98 ; AVX2-FP-NEXT: vmovd %xmm1, (%r9)
101 ; AVX2-FCP-LABEL: load_i16_stride5_vf2:
103 ; AVX2-FCP-NEXT: vmovdqa (%rdi), %xmm0
104 ; AVX2-FCP-NEXT: vmovdqa 16(%rdi), %xmm1
105 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm2 = xmm0[0,1,10,11,u,u,u,u,u,u,u,u,u,u,u,u]
106 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm3 = xmm0[2,3,12,13,u,u,u,u,u,u,u,u,u,u,u,u]
107 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm4 = xmm0[4,5,14,15,u,u,u,u,u,u,u,u,u,u,u,u]
108 ; AVX2-FCP-NEXT: vpsrlq $48, %xmm0, %xmm0
109 ; AVX2-FCP-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
110 ; AVX2-FCP-NEXT: vpbroadcastw 8(%rdi), %xmm5
111 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm1 = xmm5[0],xmm1[1],xmm5[2,3,4,5,6,7]
112 ; AVX2-FCP-NEXT: vmovd %xmm2, (%rsi)
113 ; AVX2-FCP-NEXT: vmovd %xmm3, (%rdx)
114 ; AVX2-FCP-NEXT: vmovd %xmm4, (%rcx)
115 ; AVX2-FCP-NEXT: vmovd %xmm0, (%r8)
116 ; AVX2-FCP-NEXT: vmovd %xmm1, (%r9)
117 ; AVX2-FCP-NEXT: retq
119 ; AVX512-LABEL: load_i16_stride5_vf2:
121 ; AVX512-NEXT: vmovdqa (%rdi), %xmm0
122 ; AVX512-NEXT: vmovdqa 16(%rdi), %xmm1
123 ; AVX512-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,2,2,3]
124 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,3,2,3,4,5,6,7]
125 ; AVX512-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,3,2,3]
126 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[1,2,2,3,4,5,6,7]
127 ; AVX512-NEXT: vpshufd {{.*#+}} xmm4 = xmm0[3,1,2,3]
128 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[2,1,2,3,4,5,6,7]
129 ; AVX512-NEXT: vpsrlq $48, %xmm0, %xmm0
130 ; AVX512-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
131 ; AVX512-NEXT: vpbroadcastw 8(%rdi), %xmm5
132 ; AVX512-NEXT: vpblendw {{.*#+}} xmm1 = xmm5[0],xmm1[1],xmm5[2,3,4,5,6,7]
133 ; AVX512-NEXT: vmovd %xmm2, (%rsi)
134 ; AVX512-NEXT: vmovd %xmm3, (%rdx)
135 ; AVX512-NEXT: vmovd %xmm4, (%rcx)
136 ; AVX512-NEXT: vmovd %xmm0, (%r8)
137 ; AVX512-NEXT: vmovd %xmm1, (%r9)
140 ; AVX512-FCP-LABEL: load_i16_stride5_vf2:
141 ; AVX512-FCP: # %bb.0:
142 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %xmm0
143 ; AVX512-FCP-NEXT: vmovdqa 16(%rdi), %xmm1
144 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm2 = xmm0[0,1,10,11,u,u,u,u,u,u,u,u,u,u,u,u]
145 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm3 = xmm0[2,3,12,13,u,u,u,u,u,u,u,u,u,u,u,u]
146 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm4 = xmm0[4,5,14,15,u,u,u,u,u,u,u,u,u,u,u,u]
147 ; AVX512-FCP-NEXT: vpsrlq $48, %xmm0, %xmm0
148 ; AVX512-FCP-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
149 ; AVX512-FCP-NEXT: vpbroadcastw 8(%rdi), %xmm5
150 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm1 = xmm5[0],xmm1[1],xmm5[2,3,4,5,6,7]
151 ; AVX512-FCP-NEXT: vmovd %xmm2, (%rsi)
152 ; AVX512-FCP-NEXT: vmovd %xmm3, (%rdx)
153 ; AVX512-FCP-NEXT: vmovd %xmm4, (%rcx)
154 ; AVX512-FCP-NEXT: vmovd %xmm0, (%r8)
155 ; AVX512-FCP-NEXT: vmovd %xmm1, (%r9)
156 ; AVX512-FCP-NEXT: retq
158 ; AVX512DQ-LABEL: load_i16_stride5_vf2:
160 ; AVX512DQ-NEXT: vmovdqa (%rdi), %xmm0
161 ; AVX512DQ-NEXT: vmovdqa 16(%rdi), %xmm1
162 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,2,2,3]
163 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,3,2,3,4,5,6,7]
164 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,3,2,3]
165 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[1,2,2,3,4,5,6,7]
166 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm4 = xmm0[3,1,2,3]
167 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[2,1,2,3,4,5,6,7]
168 ; AVX512DQ-NEXT: vpsrlq $48, %xmm0, %xmm0
169 ; AVX512DQ-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
170 ; AVX512DQ-NEXT: vpbroadcastw 8(%rdi), %xmm5
171 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm1 = xmm5[0],xmm1[1],xmm5[2,3,4,5,6,7]
172 ; AVX512DQ-NEXT: vmovd %xmm2, (%rsi)
173 ; AVX512DQ-NEXT: vmovd %xmm3, (%rdx)
174 ; AVX512DQ-NEXT: vmovd %xmm4, (%rcx)
175 ; AVX512DQ-NEXT: vmovd %xmm0, (%r8)
176 ; AVX512DQ-NEXT: vmovd %xmm1, (%r9)
177 ; AVX512DQ-NEXT: retq
179 ; AVX512DQ-FCP-LABEL: load_i16_stride5_vf2:
180 ; AVX512DQ-FCP: # %bb.0:
181 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %xmm0
182 ; AVX512DQ-FCP-NEXT: vmovdqa 16(%rdi), %xmm1
183 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm2 = xmm0[0,1,10,11,u,u,u,u,u,u,u,u,u,u,u,u]
184 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm3 = xmm0[2,3,12,13,u,u,u,u,u,u,u,u,u,u,u,u]
185 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm4 = xmm0[4,5,14,15,u,u,u,u,u,u,u,u,u,u,u,u]
186 ; AVX512DQ-FCP-NEXT: vpsrlq $48, %xmm0, %xmm0
187 ; AVX512DQ-FCP-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
188 ; AVX512DQ-FCP-NEXT: vpbroadcastw 8(%rdi), %xmm5
189 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm1 = xmm5[0],xmm1[1],xmm5[2,3,4,5,6,7]
190 ; AVX512DQ-FCP-NEXT: vmovd %xmm2, (%rsi)
191 ; AVX512DQ-FCP-NEXT: vmovd %xmm3, (%rdx)
192 ; AVX512DQ-FCP-NEXT: vmovd %xmm4, (%rcx)
193 ; AVX512DQ-FCP-NEXT: vmovd %xmm0, (%r8)
194 ; AVX512DQ-FCP-NEXT: vmovd %xmm1, (%r9)
195 ; AVX512DQ-FCP-NEXT: retq
197 ; AVX512BW-LABEL: load_i16_stride5_vf2:
199 ; AVX512BW-NEXT: vmovdqa (%rdi), %xmm0
200 ; AVX512BW-NEXT: vmovdqa 16(%rdi), %xmm1
201 ; AVX512BW-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,2,2,3]
202 ; AVX512BW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,3,2,3,4,5,6,7]
203 ; AVX512BW-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,3,2,3]
204 ; AVX512BW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[1,2,2,3,4,5,6,7]
205 ; AVX512BW-NEXT: vpshufd {{.*#+}} xmm4 = xmm0[3,1,2,3]
206 ; AVX512BW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[2,1,2,3,4,5,6,7]
207 ; AVX512BW-NEXT: vpsrlq $48, %xmm0, %xmm0
208 ; AVX512BW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
209 ; AVX512BW-NEXT: vpbroadcastw 8(%rdi), %xmm5
210 ; AVX512BW-NEXT: vpblendw {{.*#+}} xmm1 = xmm5[0],xmm1[1],xmm5[2,3,4,5,6,7]
211 ; AVX512BW-NEXT: vmovd %xmm2, (%rsi)
212 ; AVX512BW-NEXT: vmovd %xmm3, (%rdx)
213 ; AVX512BW-NEXT: vmovd %xmm4, (%rcx)
214 ; AVX512BW-NEXT: vmovd %xmm0, (%r8)
215 ; AVX512BW-NEXT: vmovd %xmm1, (%r9)
216 ; AVX512BW-NEXT: retq
218 ; AVX512BW-FCP-LABEL: load_i16_stride5_vf2:
219 ; AVX512BW-FCP: # %bb.0:
220 ; AVX512BW-FCP-NEXT: vmovdqa (%rdi), %xmm0
221 ; AVX512BW-FCP-NEXT: vmovdqa 16(%rdi), %xmm1
222 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} xmm2 = xmm0[0,1,10,11,u,u,u,u,u,u,u,u,u,u,u,u]
223 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} xmm3 = xmm0[2,3,12,13,u,u,u,u,u,u,u,u,u,u,u,u]
224 ; AVX512BW-FCP-NEXT: vpshufb {{.*#+}} xmm4 = xmm0[4,5,14,15,u,u,u,u,u,u,u,u,u,u,u,u]
225 ; AVX512BW-FCP-NEXT: vpsrlq $48, %xmm0, %xmm0
226 ; AVX512BW-FCP-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
227 ; AVX512BW-FCP-NEXT: vpbroadcastw 8(%rdi), %xmm5
228 ; AVX512BW-FCP-NEXT: vpblendw {{.*#+}} xmm1 = xmm5[0],xmm1[1],xmm5[2,3,4,5,6,7]
229 ; AVX512BW-FCP-NEXT: vmovd %xmm2, (%rsi)
230 ; AVX512BW-FCP-NEXT: vmovd %xmm3, (%rdx)
231 ; AVX512BW-FCP-NEXT: vmovd %xmm4, (%rcx)
232 ; AVX512BW-FCP-NEXT: vmovd %xmm0, (%r8)
233 ; AVX512BW-FCP-NEXT: vmovd %xmm1, (%r9)
234 ; AVX512BW-FCP-NEXT: retq
236 ; AVX512DQ-BW-LABEL: load_i16_stride5_vf2:
237 ; AVX512DQ-BW: # %bb.0:
238 ; AVX512DQ-BW-NEXT: vmovdqa (%rdi), %xmm0
239 ; AVX512DQ-BW-NEXT: vmovdqa 16(%rdi), %xmm1
240 ; AVX512DQ-BW-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,2,2,3]
241 ; AVX512DQ-BW-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,3,2,3,4,5,6,7]
242 ; AVX512DQ-BW-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,3,2,3]
243 ; AVX512DQ-BW-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[1,2,2,3,4,5,6,7]
244 ; AVX512DQ-BW-NEXT: vpshufd {{.*#+}} xmm4 = xmm0[3,1,2,3]
245 ; AVX512DQ-BW-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[2,1,2,3,4,5,6,7]
246 ; AVX512DQ-BW-NEXT: vpsrlq $48, %xmm0, %xmm0
247 ; AVX512DQ-BW-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
248 ; AVX512DQ-BW-NEXT: vpbroadcastw 8(%rdi), %xmm5
249 ; AVX512DQ-BW-NEXT: vpblendw {{.*#+}} xmm1 = xmm5[0],xmm1[1],xmm5[2,3,4,5,6,7]
250 ; AVX512DQ-BW-NEXT: vmovd %xmm2, (%rsi)
251 ; AVX512DQ-BW-NEXT: vmovd %xmm3, (%rdx)
252 ; AVX512DQ-BW-NEXT: vmovd %xmm4, (%rcx)
253 ; AVX512DQ-BW-NEXT: vmovd %xmm0, (%r8)
254 ; AVX512DQ-BW-NEXT: vmovd %xmm1, (%r9)
255 ; AVX512DQ-BW-NEXT: retq
257 ; AVX512DQ-BW-FCP-LABEL: load_i16_stride5_vf2:
258 ; AVX512DQ-BW-FCP: # %bb.0:
259 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdi), %xmm0
260 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 16(%rdi), %xmm1
261 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} xmm2 = xmm0[0,1,10,11,u,u,u,u,u,u,u,u,u,u,u,u]
262 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} xmm3 = xmm0[2,3,12,13,u,u,u,u,u,u,u,u,u,u,u,u]
263 ; AVX512DQ-BW-FCP-NEXT: vpshufb {{.*#+}} xmm4 = xmm0[4,5,14,15,u,u,u,u,u,u,u,u,u,u,u,u]
264 ; AVX512DQ-BW-FCP-NEXT: vpsrlq $48, %xmm0, %xmm0
265 ; AVX512DQ-BW-FCP-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
266 ; AVX512DQ-BW-FCP-NEXT: vpbroadcastw 8(%rdi), %xmm5
267 ; AVX512DQ-BW-FCP-NEXT: vpblendw {{.*#+}} xmm1 = xmm5[0],xmm1[1],xmm5[2,3,4,5,6,7]
268 ; AVX512DQ-BW-FCP-NEXT: vmovd %xmm2, (%rsi)
269 ; AVX512DQ-BW-FCP-NEXT: vmovd %xmm3, (%rdx)
270 ; AVX512DQ-BW-FCP-NEXT: vmovd %xmm4, (%rcx)
271 ; AVX512DQ-BW-FCP-NEXT: vmovd %xmm0, (%r8)
272 ; AVX512DQ-BW-FCP-NEXT: vmovd %xmm1, (%r9)
273 ; AVX512DQ-BW-FCP-NEXT: retq
274 %wide.vec = load <10 x i16>, ptr %in.vec, align 64
275 %strided.vec0 = shufflevector <10 x i16> %wide.vec, <10 x i16> poison, <2 x i32> <i32 0, i32 5>
276 %strided.vec1 = shufflevector <10 x i16> %wide.vec, <10 x i16> poison, <2 x i32> <i32 1, i32 6>
277 %strided.vec2 = shufflevector <10 x i16> %wide.vec, <10 x i16> poison, <2 x i32> <i32 2, i32 7>
278 %strided.vec3 = shufflevector <10 x i16> %wide.vec, <10 x i16> poison, <2 x i32> <i32 3, i32 8>
279 %strided.vec4 = shufflevector <10 x i16> %wide.vec, <10 x i16> poison, <2 x i32> <i32 4, i32 9>
280 store <2 x i16> %strided.vec0, ptr %out.vec0, align 64
281 store <2 x i16> %strided.vec1, ptr %out.vec1, align 64
282 store <2 x i16> %strided.vec2, ptr %out.vec2, align 64
283 store <2 x i16> %strided.vec3, ptr %out.vec3, align 64
284 store <2 x i16> %strided.vec4, ptr %out.vec4, align 64
288 define void @load_i16_stride5_vf4(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4) nounwind {
289 ; SSE-LABEL: load_i16_stride5_vf4:
291 ; SSE-NEXT: movdqa (%rdi), %xmm2
292 ; SSE-NEXT: movdqa 16(%rdi), %xmm3
293 ; SSE-NEXT: movdqa 32(%rdi), %xmm0
294 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm3[3,1,2,3]
295 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm1[2,1,2,3,4,5,6,7]
296 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,2,2,3]
297 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,3,2,3,4,5,6,7]
298 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1]
299 ; SSE-NEXT: movdqa %xmm3, %xmm4
300 ; SSE-NEXT: psrlq $48, %xmm4
301 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[0,3,2,3]
302 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm5[1,2,2,3,4,5,6,7]
303 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
304 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm0[0],xmm5[1],xmm0[1],xmm5[2],xmm0[2],xmm5[3],xmm0[3]
305 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm5[0,3,2,1]
306 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,6,6,7]
307 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,2,2,3]
308 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[0,3,2,1,4,5,6,7]
309 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[0,1,1,3]
310 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,7,6,7]
311 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm3[2],xmm5[3],xmm3[3]
312 ; SSE-NEXT: punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm0[0],xmm5[1],xmm0[1],xmm5[2],xmm0[2],xmm5[3],xmm0[3]
313 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[3,1,2,0]
314 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,6,6,7]
315 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm5[2,1,2,3]
316 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm5[1,2,0,3,4,5,6,7]
317 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm2[1,1,1,1]
318 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm3[0,2,2,3]
319 ; SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm6[0],xmm7[1],xmm6[1]
320 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm7[0,3,2,3,4,5,6,7]
321 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
322 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm6[1,0,3,3,4,5,6,7]
323 ; SSE-NEXT: punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3]
324 ; SSE-NEXT: pshuflw {{.*#+}} xmm6 = xmm6[0,2,2,3,4,5,6,7]
325 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
326 ; SSE-NEXT: movdqa {{.*#+}} xmm7 = [65535,65535,65535,0,65535,65535,65535,65535]
327 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[2,0],xmm3[3,0]
328 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[0,2]
329 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm3[0,1,2,3,4,6,6,7]
330 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
331 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[2,1,3,3,4,5,6,7]
332 ; SSE-NEXT: pand %xmm7, %xmm2
333 ; SSE-NEXT: pandn %xmm0, %xmm7
334 ; SSE-NEXT: por %xmm2, %xmm7
335 ; SSE-NEXT: movq %xmm1, (%rsi)
336 ; SSE-NEXT: movq %xmm4, (%rdx)
337 ; SSE-NEXT: movq %xmm5, (%rcx)
338 ; SSE-NEXT: movq %xmm6, (%r8)
339 ; SSE-NEXT: movq %xmm7, (%r9)
342 ; AVX-LABEL: load_i16_stride5_vf4:
344 ; AVX-NEXT: vpshufd {{.*#+}} xmm0 = mem[3,1,2,3]
345 ; AVX-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[2,1,2,3,4,5,6,7]
346 ; AVX-NEXT: vmovdqa (%rdi), %xmm1
347 ; AVX-NEXT: vmovdqa 16(%rdi), %xmm2
348 ; AVX-NEXT: vmovdqa 32(%rdi), %xmm3
349 ; AVX-NEXT: vpshufd {{.*#+}} xmm4 = xmm1[0,2,2,3]
350 ; AVX-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,3,2,3,4,5,6,7]
351 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm4[0],xmm0[0],xmm4[1],xmm0[1]
352 ; AVX-NEXT: vpsrlq $48, %xmm2, %xmm4
353 ; AVX-NEXT: vpshufd {{.*#+}} xmm5 = xmm1[0,3,2,3]
354 ; AVX-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[1,2,2,3,4,5,6,7]
355 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
356 ; AVX-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
357 ; AVX-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[0,1,4,5,8,9,2,3,u,u,u,u,u,u,u,u]
358 ; AVX-NEXT: vpshufd {{.*#+}} xmm5 = xmm1[0,1,1,3]
359 ; AVX-NEXT: vpshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,7,6,7]
360 ; AVX-NEXT: vpunpckhdq {{.*#+}} xmm5 = xmm5[2],xmm2[2],xmm5[3],xmm2[3]
361 ; AVX-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm3[0],xmm5[1],xmm3[1],xmm5[2],xmm3[2],xmm5[3],xmm3[3]
362 ; AVX-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[0,1,4,5,8,9,6,7,u,u,u,u,u,u,u,u]
363 ; AVX-NEXT: vpblendw {{.*#+}} xmm6 = xmm2[0,1],xmm1[2,3],xmm2[4,5,6,7]
364 ; AVX-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[6,7,0,1,10,11,10,11,u,u,u,u,u,u,u,u]
365 ; AVX-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm3[0],xmm6[1],xmm3[1],xmm6[2],xmm3[2],xmm6[3],xmm3[3]
366 ; AVX-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[0,2,2,3,4,5,6,7]
367 ; AVX-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[0,2,2,3]
368 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1,2,3],xmm1[4,5],xmm2[6,7]
369 ; AVX-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[8,9,2,3,12,13,u,u,u,u,u,u,u,u,u,u]
370 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm3[3],xmm1[4,5,6,7]
371 ; AVX-NEXT: vmovq %xmm0, (%rsi)
372 ; AVX-NEXT: vmovq %xmm4, (%rdx)
373 ; AVX-NEXT: vmovq %xmm5, (%rcx)
374 ; AVX-NEXT: vmovq %xmm6, (%r8)
375 ; AVX-NEXT: vmovq %xmm1, (%r9)
378 ; AVX2-LABEL: load_i16_stride5_vf4:
380 ; AVX2-NEXT: vmovdqa (%rdi), %xmm0
381 ; AVX2-NEXT: vmovdqa 16(%rdi), %xmm1
382 ; AVX2-NEXT: vmovdqa 32(%rdi), %xmm2
383 ; AVX2-NEXT: vpshufd {{.*#+}} xmm3 = xmm1[3,1,2,3]
384 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,1,2,3,4,5,6,7]
385 ; AVX2-NEXT: vpshufd {{.*#+}} xmm4 = xmm0[0,2,2,3]
386 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,3,2,3,4,5,6,7]
387 ; AVX2-NEXT: vpunpckldq {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
388 ; AVX2-NEXT: vpblendw {{.*#+}} xmm4 = xmm2[0],xmm0[1,2,3,4,5,6,7]
389 ; AVX2-NEXT: vpblendd {{.*#+}} xmm4 = xmm4[0],xmm1[1],xmm4[2,3]
390 ; AVX2-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[2,3,12,13,6,7,0,1,u,u,u,u,u,u,u,u]
391 ; AVX2-NEXT: vpblendd {{.*#+}} xmm5 = xmm2[0],xmm0[1,2,3]
392 ; AVX2-NEXT: vpblendd {{.*#+}} xmm5 = xmm5[0,1],xmm1[2],xmm5[3]
393 ; AVX2-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[4,5,14,15,8,9,2,3,u,u,u,u,u,u,u,u]
394 ; AVX2-NEXT: vpblendw {{.*#+}} xmm6 = xmm0[0,1],xmm2[2],xmm0[3,4,5,6,7]
395 ; AVX2-NEXT: vpblendd {{.*#+}} xmm6 = xmm1[0],xmm6[1],xmm1[2,3]
396 ; AVX2-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[6,7,0,1,10,11,4,5,u,u,u,u,u,u,u,u]
397 ; AVX2-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1],xmm0[2],xmm1[3]
398 ; AVX2-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[8,9,2,3,12,13,u,u,u,u,u,u,u,u,u,u]
399 ; AVX2-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm2[3],xmm0[4,5,6,7]
400 ; AVX2-NEXT: vmovq %xmm3, (%rsi)
401 ; AVX2-NEXT: vmovq %xmm4, (%rdx)
402 ; AVX2-NEXT: vmovq %xmm5, (%rcx)
403 ; AVX2-NEXT: vmovq %xmm6, (%r8)
404 ; AVX2-NEXT: vmovq %xmm0, (%r9)
407 ; AVX2-FP-LABEL: load_i16_stride5_vf4:
409 ; AVX2-FP-NEXT: vmovdqa (%rdi), %xmm0
410 ; AVX2-FP-NEXT: vmovdqa 16(%rdi), %xmm1
411 ; AVX2-FP-NEXT: vmovdqa 32(%rdi), %xmm2
412 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm3 = xmm1[4,5,14,15,u,u,u,u,u,u,u,u,u,u,u,u]
413 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm4 = xmm0[0,1,10,11,u,u,u,u,u,u,u,u,u,u,u,u]
414 ; AVX2-FP-NEXT: vpunpckldq {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
415 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm4 = xmm2[0],xmm0[1,2,3,4,5,6,7]
416 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm4 = xmm4[0],xmm1[1],xmm4[2,3]
417 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[2,3,12,13,6,7,0,1,u,u,u,u,u,u,u,u]
418 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm5 = xmm2[0],xmm0[1,2,3]
419 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm5 = xmm5[0,1],xmm1[2],xmm5[3]
420 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[4,5,14,15,8,9,2,3,u,u,u,u,u,u,u,u]
421 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm6 = xmm0[0,1],xmm2[2],xmm0[3,4,5,6,7]
422 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm6 = xmm1[0],xmm6[1],xmm1[2,3]
423 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[6,7,0,1,10,11,4,5,u,u,u,u,u,u,u,u]
424 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1],xmm0[2],xmm1[3]
425 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[8,9,2,3,12,13,u,u,u,u,u,u,u,u,u,u]
426 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm2[3],xmm0[4,5,6,7]
427 ; AVX2-FP-NEXT: vmovq %xmm3, (%rsi)
428 ; AVX2-FP-NEXT: vmovq %xmm4, (%rdx)
429 ; AVX2-FP-NEXT: vmovq %xmm5, (%rcx)
430 ; AVX2-FP-NEXT: vmovq %xmm6, (%r8)
431 ; AVX2-FP-NEXT: vmovq %xmm0, (%r9)
434 ; AVX2-FCP-LABEL: load_i16_stride5_vf4:
436 ; AVX2-FCP-NEXT: vmovdqa (%rdi), %xmm0
437 ; AVX2-FCP-NEXT: vmovdqa 16(%rdi), %xmm1
438 ; AVX2-FCP-NEXT: vmovdqa 32(%rdi), %xmm2
439 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm3 = xmm1[4,5,14,15,u,u,u,u,u,u,u,u,u,u,u,u]
440 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm4 = xmm0[0,1,10,11,u,u,u,u,u,u,u,u,u,u,u,u]
441 ; AVX2-FCP-NEXT: vpunpckldq {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
442 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm4 = xmm2[0],xmm0[1,2,3,4,5,6,7]
443 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} xmm4 = xmm4[0],xmm1[1],xmm4[2,3]
444 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[2,3,12,13,6,7,0,1,u,u,u,u,u,u,u,u]
445 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} xmm5 = xmm2[0],xmm0[1,2,3]
446 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} xmm5 = xmm5[0,1],xmm1[2],xmm5[3]
447 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[4,5,14,15,8,9,2,3,u,u,u,u,u,u,u,u]
448 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm6 = xmm0[0,1],xmm2[2],xmm0[3,4,5,6,7]
449 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} xmm6 = xmm1[0],xmm6[1],xmm1[2,3]
450 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[6,7,0,1,10,11,4,5,u,u,u,u,u,u,u,u]
451 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0,1],xmm0[2],xmm1[3]
452 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[8,9,2,3,12,13,u,u,u,u,u,u,u,u,u,u]
453 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm2[3],xmm0[4,5,6,7]
454 ; AVX2-FCP-NEXT: vmovq %xmm3, (%rsi)
455 ; AVX2-FCP-NEXT: vmovq %xmm4, (%rdx)
456 ; AVX2-FCP-NEXT: vmovq %xmm5, (%rcx)
457 ; AVX2-FCP-NEXT: vmovq %xmm6, (%r8)
458 ; AVX2-FCP-NEXT: vmovq %xmm0, (%r9)
459 ; AVX2-FCP-NEXT: retq
461 ; AVX512-LABEL: load_i16_stride5_vf4:
463 ; AVX512-NEXT: vmovdqa (%rdi), %xmm0
464 ; AVX512-NEXT: vmovdqa 16(%rdi), %xmm1
465 ; AVX512-NEXT: vmovdqa 32(%rdi), %xmm2
466 ; AVX512-NEXT: vpextrw $5, %xmm0, %eax
467 ; AVX512-NEXT: vpinsrw $1, %eax, %xmm0, %xmm3
468 ; AVX512-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm1[2],xmm3[3,4,5,6,7]
469 ; AVX512-NEXT: vpextrw $7, %xmm1, %eax
470 ; AVX512-NEXT: vpinsrw $3, %eax, %xmm3, %xmm3
471 ; AVX512-NEXT: vpextrw $6, %xmm0, %eax
472 ; AVX512-NEXT: vpextrw $1, %xmm0, %r10d
473 ; AVX512-NEXT: vmovd %r10d, %xmm4
474 ; AVX512-NEXT: vpinsrw $1, %eax, %xmm4, %xmm4
475 ; AVX512-NEXT: vpextrw $3, %xmm1, %eax
476 ; AVX512-NEXT: vpinsrw $2, %eax, %xmm4, %xmm1
477 ; AVX512-NEXT: vmovd %xmm2, %eax
478 ; AVX512-NEXT: vpinsrw $3, %eax, %xmm1, %xmm1
479 ; AVX512-NEXT: vmovdqa 16(%rdi), %xmm4
480 ; AVX512-NEXT: vpblendd {{.*#+}} xmm5 = xmm2[0],xmm0[1,2,3]
481 ; AVX512-NEXT: vpblendd {{.*#+}} xmm5 = xmm5[0,1],xmm4[2],xmm5[3]
482 ; AVX512-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[4,5,14,15,8,9,2,3,u,u,u,u,u,u,u,u]
483 ; AVX512-NEXT: vpblendw {{.*#+}} xmm6 = xmm0[0,1],xmm2[2],xmm0[3,4,5,6,7]
484 ; AVX512-NEXT: vpblendd {{.*#+}} xmm6 = xmm4[0],xmm6[1],xmm4[2,3]
485 ; AVX512-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[6,7,0,1,10,11,4,5,u,u,u,u,u,u,u,u]
486 ; AVX512-NEXT: vpblendd {{.*#+}} xmm0 = xmm4[0,1],xmm0[2],xmm4[3]
487 ; AVX512-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[8,9,2,3,12,13,u,u,u,u,u,u,u,u,u,u]
488 ; AVX512-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm2[3],xmm0[4,5,6,7]
489 ; AVX512-NEXT: vmovq %xmm3, (%rsi)
490 ; AVX512-NEXT: vmovq %xmm1, (%rdx)
491 ; AVX512-NEXT: vmovq %xmm5, (%rcx)
492 ; AVX512-NEXT: vmovq %xmm6, (%r8)
493 ; AVX512-NEXT: vmovq %xmm0, (%r9)
496 ; AVX512-FCP-LABEL: load_i16_stride5_vf4:
497 ; AVX512-FCP: # %bb.0:
498 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %xmm0
499 ; AVX512-FCP-NEXT: vmovdqa 16(%rdi), %xmm1
500 ; AVX512-FCP-NEXT: vmovdqa 32(%rdi), %xmm2
501 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm3 = xmm0[0,1,10,11,u,u,u,u,u,u,u,u,u,u,u,u]
502 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm1[2],xmm3[3,4,5,6,7]
503 ; AVX512-FCP-NEXT: vpextrw $7, %xmm1, %eax
504 ; AVX512-FCP-NEXT: vpinsrw $3, %eax, %xmm3, %xmm3
505 ; AVX512-FCP-NEXT: vpextrw $3, %xmm1, %eax
506 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm1 = xmm0[2,3,12,13,u,u,u,u,u,u,u,u,u,u,u,u]
507 ; AVX512-FCP-NEXT: vpinsrw $2, %eax, %xmm1, %xmm1
508 ; AVX512-FCP-NEXT: vmovd %xmm2, %eax
509 ; AVX512-FCP-NEXT: vpinsrw $3, %eax, %xmm1, %xmm1
510 ; AVX512-FCP-NEXT: vmovdqa 16(%rdi), %xmm4
511 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} xmm5 = xmm2[0],xmm0[1,2,3]
512 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} xmm5 = xmm5[0,1],xmm4[2],xmm5[3]
513 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[4,5,14,15,8,9,2,3,u,u,u,u,u,u,u,u]
514 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm6 = xmm0[0,1],xmm2[2],xmm0[3,4,5,6,7]
515 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} xmm6 = xmm4[0],xmm6[1],xmm4[2,3]
516 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[6,7,0,1,10,11,4,5,u,u,u,u,u,u,u,u]
517 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} xmm0 = xmm4[0,1],xmm0[2],xmm4[3]
518 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[8,9,2,3,12,13,u,u,u,u,u,u,u,u,u,u]
519 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm2[3],xmm0[4,5,6,7]
520 ; AVX512-FCP-NEXT: vmovq %xmm3, (%rsi)
521 ; AVX512-FCP-NEXT: vmovq %xmm1, (%rdx)
522 ; AVX512-FCP-NEXT: vmovq %xmm5, (%rcx)
523 ; AVX512-FCP-NEXT: vmovq %xmm6, (%r8)
524 ; AVX512-FCP-NEXT: vmovq %xmm0, (%r9)
525 ; AVX512-FCP-NEXT: retq
527 ; AVX512DQ-LABEL: load_i16_stride5_vf4:
529 ; AVX512DQ-NEXT: vmovdqa (%rdi), %xmm0
530 ; AVX512DQ-NEXT: vmovdqa 16(%rdi), %xmm1
531 ; AVX512DQ-NEXT: vmovdqa 32(%rdi), %xmm2
532 ; AVX512DQ-NEXT: vpextrw $5, %xmm0, %eax
533 ; AVX512DQ-NEXT: vpinsrw $1, %eax, %xmm0, %xmm3
534 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm1[2],xmm3[3,4,5,6,7]
535 ; AVX512DQ-NEXT: vpextrw $7, %xmm1, %eax
536 ; AVX512DQ-NEXT: vpinsrw $3, %eax, %xmm3, %xmm3
537 ; AVX512DQ-NEXT: vpextrw $6, %xmm0, %eax
538 ; AVX512DQ-NEXT: vpextrw $1, %xmm0, %r10d
539 ; AVX512DQ-NEXT: vmovd %r10d, %xmm4
540 ; AVX512DQ-NEXT: vpinsrw $1, %eax, %xmm4, %xmm4
541 ; AVX512DQ-NEXT: vpextrw $3, %xmm1, %eax
542 ; AVX512DQ-NEXT: vpinsrw $2, %eax, %xmm4, %xmm1
543 ; AVX512DQ-NEXT: vmovd %xmm2, %eax
544 ; AVX512DQ-NEXT: vpinsrw $3, %eax, %xmm1, %xmm1
545 ; AVX512DQ-NEXT: vmovdqa 16(%rdi), %xmm4
546 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm5 = xmm2[0],xmm0[1,2,3]
547 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm5 = xmm5[0,1],xmm4[2],xmm5[3]
548 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[4,5,14,15,8,9,2,3,u,u,u,u,u,u,u,u]
549 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm6 = xmm0[0,1],xmm2[2],xmm0[3,4,5,6,7]
550 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm6 = xmm4[0],xmm6[1],xmm4[2,3]
551 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[6,7,0,1,10,11,4,5,u,u,u,u,u,u,u,u]
552 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm0 = xmm4[0,1],xmm0[2],xmm4[3]
553 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[8,9,2,3,12,13,u,u,u,u,u,u,u,u,u,u]
554 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm2[3],xmm0[4,5,6,7]
555 ; AVX512DQ-NEXT: vmovq %xmm3, (%rsi)
556 ; AVX512DQ-NEXT: vmovq %xmm1, (%rdx)
557 ; AVX512DQ-NEXT: vmovq %xmm5, (%rcx)
558 ; AVX512DQ-NEXT: vmovq %xmm6, (%r8)
559 ; AVX512DQ-NEXT: vmovq %xmm0, (%r9)
560 ; AVX512DQ-NEXT: retq
562 ; AVX512DQ-FCP-LABEL: load_i16_stride5_vf4:
563 ; AVX512DQ-FCP: # %bb.0:
564 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %xmm0
565 ; AVX512DQ-FCP-NEXT: vmovdqa 16(%rdi), %xmm1
566 ; AVX512DQ-FCP-NEXT: vmovdqa 32(%rdi), %xmm2
567 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm3 = xmm0[0,1,10,11,u,u,u,u,u,u,u,u,u,u,u,u]
568 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm1[2],xmm3[3,4,5,6,7]
569 ; AVX512DQ-FCP-NEXT: vpextrw $7, %xmm1, %eax
570 ; AVX512DQ-FCP-NEXT: vpinsrw $3, %eax, %xmm3, %xmm3
571 ; AVX512DQ-FCP-NEXT: vpextrw $3, %xmm1, %eax
572 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm1 = xmm0[2,3,12,13,u,u,u,u,u,u,u,u,u,u,u,u]
573 ; AVX512DQ-FCP-NEXT: vpinsrw $2, %eax, %xmm1, %xmm1
574 ; AVX512DQ-FCP-NEXT: vmovd %xmm2, %eax
575 ; AVX512DQ-FCP-NEXT: vpinsrw $3, %eax, %xmm1, %xmm1
576 ; AVX512DQ-FCP-NEXT: vmovdqa 16(%rdi), %xmm4
577 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} xmm5 = xmm2[0],xmm0[1,2,3]
578 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} xmm5 = xmm5[0,1],xmm4[2],xmm5[3]
579 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[4,5,14,15,8,9,2,3,u,u,u,u,u,u,u,u]
580 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm6 = xmm0[0,1],xmm2[2],xmm0[3,4,5,6,7]
581 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} xmm6 = xmm4[0],xmm6[1],xmm4[2,3]
582 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[6,7,0,1,10,11,4,5,u,u,u,u,u,u,u,u]
583 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} xmm0 = xmm4[0,1],xmm0[2],xmm4[3]
584 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[8,9,2,3,12,13,u,u,u,u,u,u,u,u,u,u]
585 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm2[3],xmm0[4,5,6,7]
586 ; AVX512DQ-FCP-NEXT: vmovq %xmm3, (%rsi)
587 ; AVX512DQ-FCP-NEXT: vmovq %xmm1, (%rdx)
588 ; AVX512DQ-FCP-NEXT: vmovq %xmm5, (%rcx)
589 ; AVX512DQ-FCP-NEXT: vmovq %xmm6, (%r8)
590 ; AVX512DQ-FCP-NEXT: vmovq %xmm0, (%r9)
591 ; AVX512DQ-FCP-NEXT: retq
593 ; AVX512BW-LABEL: load_i16_stride5_vf4:
595 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm0 = [1,6,11,0,0,0,0,0]
596 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm1
597 ; AVX512BW-NEXT: vpermw %zmm1, %zmm0, %zmm0
598 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm2 = [0,5,10,0,0,0,0,0]
599 ; AVX512BW-NEXT: vpermw %zmm1, %zmm2, %zmm1
600 ; AVX512BW-NEXT: vmovdqa 16(%rdi), %xmm2
601 ; AVX512BW-NEXT: vpextrw $7, %xmm2, %eax
602 ; AVX512BW-NEXT: vpinsrw $3, %eax, %xmm1, %xmm1
603 ; AVX512BW-NEXT: vpinsrw $3, 32(%rdi), %xmm0, %xmm0
604 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm2 = [2,7,12,17,0,0,0,0]
605 ; AVX512BW-NEXT: vmovdqa 32(%rdi), %ymm3
606 ; AVX512BW-NEXT: vmovdqa (%rdi), %ymm4
607 ; AVX512BW-NEXT: vpermi2w %ymm3, %ymm4, %ymm2
608 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm5 = [3,8,13,18,0,0,0,0]
609 ; AVX512BW-NEXT: vpermi2w %ymm3, %ymm4, %ymm5
610 ; AVX512BW-NEXT: vmovq {{.*#+}} xmm6 = [4,9,14,19,0,0,0,0]
611 ; AVX512BW-NEXT: vpermi2w %ymm3, %ymm4, %ymm6
612 ; AVX512BW-NEXT: vmovq %xmm1, (%rsi)
613 ; AVX512BW-NEXT: vmovq %xmm0, (%rdx)
614 ; AVX512BW-NEXT: vmovq %xmm2, (%rcx)
615 ; AVX512BW-NEXT: vmovq %xmm5, (%r8)
616 ; AVX512BW-NEXT: vmovq %xmm6, (%r9)
617 ; AVX512BW-NEXT: vzeroupper
618 ; AVX512BW-NEXT: retq
620 ; AVX512BW-FCP-LABEL: load_i16_stride5_vf4:
621 ; AVX512BW-FCP: # %bb.0:
622 ; AVX512BW-FCP-NEXT: vmovq {{.*#+}} xmm0 = [1,6,11,0,0,0,0,0]
623 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm1
624 ; AVX512BW-FCP-NEXT: vpermw %zmm1, %zmm0, %zmm0
625 ; AVX512BW-FCP-NEXT: vmovq {{.*#+}} xmm2 = [0,5,10,0,0,0,0,0]
626 ; AVX512BW-FCP-NEXT: vpermw %zmm1, %zmm2, %zmm1
627 ; AVX512BW-FCP-NEXT: vmovdqa 16(%rdi), %xmm2
628 ; AVX512BW-FCP-NEXT: vpextrw $7, %xmm2, %eax
629 ; AVX512BW-FCP-NEXT: vpinsrw $3, %eax, %xmm1, %xmm1
630 ; AVX512BW-FCP-NEXT: vpinsrw $3, 32(%rdi), %xmm0, %xmm0
631 ; AVX512BW-FCP-NEXT: vmovq {{.*#+}} xmm2 = [2,7,12,17,0,0,0,0]
632 ; AVX512BW-FCP-NEXT: vmovdqa 32(%rdi), %ymm3
633 ; AVX512BW-FCP-NEXT: vmovdqa (%rdi), %ymm4
634 ; AVX512BW-FCP-NEXT: vpermi2w %ymm3, %ymm4, %ymm2
635 ; AVX512BW-FCP-NEXT: vmovq {{.*#+}} xmm5 = [3,8,13,18,0,0,0,0]
636 ; AVX512BW-FCP-NEXT: vpermi2w %ymm3, %ymm4, %ymm5
637 ; AVX512BW-FCP-NEXT: vmovq {{.*#+}} xmm6 = [4,9,14,19,0,0,0,0]
638 ; AVX512BW-FCP-NEXT: vpermi2w %ymm3, %ymm4, %ymm6
639 ; AVX512BW-FCP-NEXT: vmovq %xmm1, (%rsi)
640 ; AVX512BW-FCP-NEXT: vmovq %xmm0, (%rdx)
641 ; AVX512BW-FCP-NEXT: vmovq %xmm2, (%rcx)
642 ; AVX512BW-FCP-NEXT: vmovq %xmm5, (%r8)
643 ; AVX512BW-FCP-NEXT: vmovq %xmm6, (%r9)
644 ; AVX512BW-FCP-NEXT: vzeroupper
645 ; AVX512BW-FCP-NEXT: retq
647 ; AVX512DQ-BW-LABEL: load_i16_stride5_vf4:
648 ; AVX512DQ-BW: # %bb.0:
649 ; AVX512DQ-BW-NEXT: vmovq {{.*#+}} xmm0 = [1,6,11,0,0,0,0,0]
650 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm1
651 ; AVX512DQ-BW-NEXT: vpermw %zmm1, %zmm0, %zmm0
652 ; AVX512DQ-BW-NEXT: vmovq {{.*#+}} xmm2 = [0,5,10,0,0,0,0,0]
653 ; AVX512DQ-BW-NEXT: vpermw %zmm1, %zmm2, %zmm1
654 ; AVX512DQ-BW-NEXT: vmovdqa 16(%rdi), %xmm2
655 ; AVX512DQ-BW-NEXT: vpextrw $7, %xmm2, %eax
656 ; AVX512DQ-BW-NEXT: vpinsrw $3, %eax, %xmm1, %xmm1
657 ; AVX512DQ-BW-NEXT: vpinsrw $3, 32(%rdi), %xmm0, %xmm0
658 ; AVX512DQ-BW-NEXT: vmovq {{.*#+}} xmm2 = [2,7,12,17,0,0,0,0]
659 ; AVX512DQ-BW-NEXT: vmovdqa 32(%rdi), %ymm3
660 ; AVX512DQ-BW-NEXT: vmovdqa (%rdi), %ymm4
661 ; AVX512DQ-BW-NEXT: vpermi2w %ymm3, %ymm4, %ymm2
662 ; AVX512DQ-BW-NEXT: vmovq {{.*#+}} xmm5 = [3,8,13,18,0,0,0,0]
663 ; AVX512DQ-BW-NEXT: vpermi2w %ymm3, %ymm4, %ymm5
664 ; AVX512DQ-BW-NEXT: vmovq {{.*#+}} xmm6 = [4,9,14,19,0,0,0,0]
665 ; AVX512DQ-BW-NEXT: vpermi2w %ymm3, %ymm4, %ymm6
666 ; AVX512DQ-BW-NEXT: vmovq %xmm1, (%rsi)
667 ; AVX512DQ-BW-NEXT: vmovq %xmm0, (%rdx)
668 ; AVX512DQ-BW-NEXT: vmovq %xmm2, (%rcx)
669 ; AVX512DQ-BW-NEXT: vmovq %xmm5, (%r8)
670 ; AVX512DQ-BW-NEXT: vmovq %xmm6, (%r9)
671 ; AVX512DQ-BW-NEXT: vzeroupper
672 ; AVX512DQ-BW-NEXT: retq
674 ; AVX512DQ-BW-FCP-LABEL: load_i16_stride5_vf4:
675 ; AVX512DQ-BW-FCP: # %bb.0:
676 ; AVX512DQ-BW-FCP-NEXT: vmovq {{.*#+}} xmm0 = [1,6,11,0,0,0,0,0]
677 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm1
678 ; AVX512DQ-BW-FCP-NEXT: vpermw %zmm1, %zmm0, %zmm0
679 ; AVX512DQ-BW-FCP-NEXT: vmovq {{.*#+}} xmm2 = [0,5,10,0,0,0,0,0]
680 ; AVX512DQ-BW-FCP-NEXT: vpermw %zmm1, %zmm2, %zmm1
681 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 16(%rdi), %xmm2
682 ; AVX512DQ-BW-FCP-NEXT: vpextrw $7, %xmm2, %eax
683 ; AVX512DQ-BW-FCP-NEXT: vpinsrw $3, %eax, %xmm1, %xmm1
684 ; AVX512DQ-BW-FCP-NEXT: vpinsrw $3, 32(%rdi), %xmm0, %xmm0
685 ; AVX512DQ-BW-FCP-NEXT: vmovq {{.*#+}} xmm2 = [2,7,12,17,0,0,0,0]
686 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 32(%rdi), %ymm3
687 ; AVX512DQ-BW-FCP-NEXT: vmovdqa (%rdi), %ymm4
688 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %ymm3, %ymm4, %ymm2
689 ; AVX512DQ-BW-FCP-NEXT: vmovq {{.*#+}} xmm5 = [3,8,13,18,0,0,0,0]
690 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %ymm3, %ymm4, %ymm5
691 ; AVX512DQ-BW-FCP-NEXT: vmovq {{.*#+}} xmm6 = [4,9,14,19,0,0,0,0]
692 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %ymm3, %ymm4, %ymm6
693 ; AVX512DQ-BW-FCP-NEXT: vmovq %xmm1, (%rsi)
694 ; AVX512DQ-BW-FCP-NEXT: vmovq %xmm0, (%rdx)
695 ; AVX512DQ-BW-FCP-NEXT: vmovq %xmm2, (%rcx)
696 ; AVX512DQ-BW-FCP-NEXT: vmovq %xmm5, (%r8)
697 ; AVX512DQ-BW-FCP-NEXT: vmovq %xmm6, (%r9)
698 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
699 ; AVX512DQ-BW-FCP-NEXT: retq
700 %wide.vec = load <20 x i16>, ptr %in.vec, align 64
701 %strided.vec0 = shufflevector <20 x i16> %wide.vec, <20 x i16> poison, <4 x i32> <i32 0, i32 5, i32 10, i32 15>
702 %strided.vec1 = shufflevector <20 x i16> %wide.vec, <20 x i16> poison, <4 x i32> <i32 1, i32 6, i32 11, i32 16>
703 %strided.vec2 = shufflevector <20 x i16> %wide.vec, <20 x i16> poison, <4 x i32> <i32 2, i32 7, i32 12, i32 17>
704 %strided.vec3 = shufflevector <20 x i16> %wide.vec, <20 x i16> poison, <4 x i32> <i32 3, i32 8, i32 13, i32 18>
705 %strided.vec4 = shufflevector <20 x i16> %wide.vec, <20 x i16> poison, <4 x i32> <i32 4, i32 9, i32 14, i32 19>
706 store <4 x i16> %strided.vec0, ptr %out.vec0, align 64
707 store <4 x i16> %strided.vec1, ptr %out.vec1, align 64
708 store <4 x i16> %strided.vec2, ptr %out.vec2, align 64
709 store <4 x i16> %strided.vec3, ptr %out.vec3, align 64
710 store <4 x i16> %strided.vec4, ptr %out.vec4, align 64
714 define void @load_i16_stride5_vf8(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4) nounwind {
715 ; SSE-LABEL: load_i16_stride5_vf8:
717 ; SSE-NEXT: movdqa 64(%rdi), %xmm6
718 ; SSE-NEXT: movdqa (%rdi), %xmm4
719 ; SSE-NEXT: movdqa 16(%rdi), %xmm3
720 ; SSE-NEXT: movdqa 32(%rdi), %xmm0
721 ; SSE-NEXT: movdqa 48(%rdi), %xmm5
722 ; SSE-NEXT: movdqa {{.*#+}} xmm1 = [65535,65535,65535,65535,0,65535,65535,65535]
723 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm5[0,1,0,3]
724 ; SSE-NEXT: pand %xmm1, %xmm2
725 ; SSE-NEXT: pandn %xmm0, %xmm1
726 ; SSE-NEXT: por %xmm2, %xmm1
727 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[3,1,2,3]
728 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[2,1,2,3,4,5,6,7]
729 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm4[0,2,2,3]
730 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm7[0,3,2,3,4,5,6,7]
731 ; SSE-NEXT: punpckldq {{.*#+}} xmm7 = xmm7[0],xmm2[0],xmm7[1],xmm2[1]
732 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[0,1],xmm1[2,3]
733 ; SSE-NEXT: movaps {{.*#+}} xmm1 = [65535,65535,65535,65535,65535,65535,65535,0]
734 ; SSE-NEXT: andps %xmm1, %xmm7
735 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm6[0,1,0,1]
736 ; SSE-NEXT: movaps %xmm1, %xmm2
737 ; SSE-NEXT: pandn %xmm8, %xmm2
738 ; SSE-NEXT: por %xmm7, %xmm2
739 ; SSE-NEXT: movdqa %xmm3, %xmm7
740 ; SSE-NEXT: psrlq $48, %xmm7
741 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm4[0,3,2,3]
742 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm8[1,2,2,3,4,5,6,7]
743 ; SSE-NEXT: punpckldq {{.*#+}} xmm8 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
744 ; SSE-NEXT: movdqa {{.*#+}} xmm7 = [0,0,0,65535,65535,65535,65535,65535]
745 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm5[1,3,2,3]
746 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm0[0,2,2,3]
747 ; SSE-NEXT: punpckldq {{.*#+}} xmm10 = xmm10[0],xmm9[0],xmm10[1],xmm9[1]
748 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm10[0,1,2,3,7,5,6,7]
749 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[0,1,2,1]
750 ; SSE-NEXT: pshuflw {{.*#+}} xmm9 = xmm9[0,0,0,0,4,5,6,7]
751 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,5,6,4,7]
752 ; SSE-NEXT: pand %xmm7, %xmm9
753 ; SSE-NEXT: pandn %xmm8, %xmm7
754 ; SSE-NEXT: por %xmm9, %xmm7
755 ; SSE-NEXT: pand %xmm1, %xmm7
756 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm6[0,1,2,0]
757 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm6[0,1,0,3]
758 ; SSE-NEXT: pshufd {{.*#+}} xmm10 = xmm6[0,1,1,3]
759 ; SSE-NEXT: psllq $48, %xmm6
760 ; SSE-NEXT: pandn %xmm6, %xmm1
761 ; SSE-NEXT: por %xmm7, %xmm1
762 ; SSE-NEXT: movdqa %xmm5, %xmm7
763 ; SSE-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,2,2,3]
764 ; SSE-NEXT: movdqa %xmm5, %xmm12
765 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[1,0],xmm0[0,0]
766 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[2,0],xmm0[2,3]
767 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,0,1,3]
768 ; SSE-NEXT: movdqa {{.*#+}} xmm13 = [65535,65535,65535,0,0,0,65535,65535]
769 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,1,1,3]
770 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,7,6,7]
771 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm3[2],xmm5[3],xmm3[3]
772 ; SSE-NEXT: pand %xmm13, %xmm5
773 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm4[1,1,1,1]
774 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm3[0,2,2,3]
775 ; SSE-NEXT: punpckldq {{.*#+}} xmm14 = xmm14[0],xmm11[0],xmm14[1],xmm11[1]
776 ; SSE-NEXT: movdqa %xmm13, %xmm15
777 ; SSE-NEXT: pshuflw {{.*#+}} xmm11 = xmm14[0,3,2,3,4,5,6,7]
778 ; SSE-NEXT: pshufd {{.*#+}} xmm11 = xmm11[0,2,2,3]
779 ; SSE-NEXT: pshuflw {{.*#+}} xmm11 = xmm11[1,0,3,3,4,5,6,7]
780 ; SSE-NEXT: pand %xmm13, %xmm11
781 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[2,0],xmm3[3,0]
782 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm4[0,2]
783 ; SSE-NEXT: movdqa %xmm13, %xmm4
784 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,6,6,7]
785 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,3]
786 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[2,1,3,3,4,5,6,7]
787 ; SSE-NEXT: pand %xmm13, %xmm3
788 ; SSE-NEXT: pandn %xmm12, %xmm13
789 ; SSE-NEXT: por %xmm13, %xmm5
790 ; SSE-NEXT: pshufhw {{.*#+}} xmm12 = xmm12[0,1,2,3,6,5,6,7]
791 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,6,5]
792 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[3,1],xmm12[2,3]
793 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm8[2,0]
794 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm0[2,2,2,2,4,5,6,7]
795 ; SSE-NEXT: pandn %xmm8, %xmm15
796 ; SSE-NEXT: por %xmm15, %xmm11
797 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[2,0],xmm0[3,0]
798 ; SSE-NEXT: pandn %xmm0, %xmm4
799 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm7[0,2]
800 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,7,4,6,7]
801 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm9[0,1,2,3,4,5,5,6]
802 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[3,1],xmm0[2,3]
803 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[0,1],xmm7[2,0]
804 ; SSE-NEXT: por %xmm4, %xmm3
805 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm6[0,1,0,3,4,5,6,7]
806 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm10[0,1,2,3,4,5,4,7]
807 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[3,1],xmm0[1,3]
808 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm4[2,0]
809 ; SSE-NEXT: movdqa %xmm2, (%rsi)
810 ; SSE-NEXT: movdqa %xmm1, (%rdx)
811 ; SSE-NEXT: movaps %xmm5, (%rcx)
812 ; SSE-NEXT: movaps %xmm11, (%r8)
813 ; SSE-NEXT: movaps %xmm3, (%r9)
816 ; AVX-LABEL: load_i16_stride5_vf8:
818 ; AVX-NEXT: vmovdqa (%rdi), %xmm0
819 ; AVX-NEXT: vmovdqa 16(%rdi), %xmm1
820 ; AVX-NEXT: vmovdqa 32(%rdi), %xmm2
821 ; AVX-NEXT: vmovdqa 48(%rdi), %xmm3
822 ; AVX-NEXT: vpshufd {{.*#+}} xmm4 = xmm3[0,1,0,3]
823 ; AVX-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1,2,3],xmm2[4],xmm4[5,6,7]
824 ; AVX-NEXT: vpshufd {{.*#+}} xmm5 = xmm1[3,1,2,3]
825 ; AVX-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[2,1,2,3,4,5,6,7]
826 ; AVX-NEXT: vpshufd {{.*#+}} xmm6 = xmm0[0,2,2,3]
827 ; AVX-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[0,3,2,3,4,5,6,7]
828 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1]
829 ; AVX-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2,3],xmm4[4,5,6,7]
830 ; AVX-NEXT: vmovdqa 64(%rdi), %xmm5
831 ; AVX-NEXT: vpshufd {{.*#+}} xmm6 = xmm5[0,1,0,1]
832 ; AVX-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,6],xmm6[7]
833 ; AVX-NEXT: vpsrlq $48, %xmm1, %xmm6
834 ; AVX-NEXT: vpshufd {{.*#+}} xmm7 = xmm0[0,3,2,3]
835 ; AVX-NEXT: vpshuflw {{.*#+}} xmm7 = xmm7[1,2,2,3,4,5,6,7]
836 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1]
837 ; AVX-NEXT: vpblendw {{.*#+}} xmm7 = xmm2[0,1],xmm3[2,3],xmm2[4,5],xmm3[6,7]
838 ; AVX-NEXT: vpshufb {{.*#+}} xmm7 = xmm7[u,u,u,u,u,u,0,1,10,11,4,5,14,15,u,u]
839 ; AVX-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1,2],xmm7[3,4,5,6,7]
840 ; AVX-NEXT: vpsllq $48, %xmm5, %xmm7
841 ; AVX-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,5,6],xmm7[7]
842 ; AVX-NEXT: vpshufd {{.*#+}} xmm7 = xmm0[0,1,1,3]
843 ; AVX-NEXT: vpshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,7,6,7]
844 ; AVX-NEXT: vpunpckhdq {{.*#+}} xmm7 = xmm7[2],xmm1[2],xmm7[3],xmm1[3]
845 ; AVX-NEXT: vpblendw {{.*#+}} xmm8 = xmm2[0,1],xmm3[2,3],xmm2[4,5,6,7]
846 ; AVX-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[u,u,u,u,u,u,2,3,12,13,6,7,u,u,u,u]
847 ; AVX-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2],xmm8[3,4,5],xmm7[6,7]
848 ; AVX-NEXT: vpshufd {{.*#+}} xmm8 = xmm5[0,1,2,0]
849 ; AVX-NEXT: vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,6,5]
850 ; AVX-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,5],xmm8[6,7]
851 ; AVX-NEXT: vpblendw {{.*#+}} xmm8 = xmm1[0,1],xmm0[2,3],xmm1[4,5,6,7]
852 ; AVX-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[6,7,0,1,10,11,u,u,u,u,u,u,12,13,14,15]
853 ; AVX-NEXT: vpblendw {{.*#+}} xmm9 = xmm2[0,1,2,3],xmm3[4,5],xmm2[6,7]
854 ; AVX-NEXT: vpshuflw {{.*#+}} xmm9 = xmm9[2,2,2,2,4,5,6,7]
855 ; AVX-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,7,4,6,7]
856 ; AVX-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1,2],xmm9[3,4,5],xmm8[6,7]
857 ; AVX-NEXT: vpshufd {{.*#+}} xmm9 = xmm5[0,1,0,3]
858 ; AVX-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,5,5,6]
859 ; AVX-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5],xmm9[6,7]
860 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[1,1,1,1]
861 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,2,2,3]
862 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,1,0,3,4,5,6,7]
863 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
864 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1,2,3],xmm0[4,5],xmm1[6,7]
865 ; AVX-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[8,9,2,3,12,13,u,u,u,u,u,u,12,13,14,15]
866 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm2[3,4,5],xmm0[6,7]
867 ; AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm5[0,1,1,3]
868 ; AVX-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,4,7]
869 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5],xmm1[6,7]
870 ; AVX-NEXT: vmovdqa %xmm4, (%rsi)
871 ; AVX-NEXT: vmovdqa %xmm6, (%rdx)
872 ; AVX-NEXT: vmovdqa %xmm7, (%rcx)
873 ; AVX-NEXT: vmovdqa %xmm8, (%r8)
874 ; AVX-NEXT: vmovdqa %xmm0, (%r9)
877 ; AVX2-LABEL: load_i16_stride5_vf8:
879 ; AVX2-NEXT: vmovdqa (%rdi), %ymm0
880 ; AVX2-NEXT: vmovdqa 32(%rdi), %ymm2
881 ; AVX2-NEXT: vpblendw {{.*#+}} ymm1 = ymm0[0],ymm2[1],ymm0[2,3],ymm2[4],ymm0[5],ymm2[6],ymm0[7,8],ymm2[9],ymm0[10,11],ymm2[12],ymm0[13],ymm2[14],ymm0[15]
882 ; AVX2-NEXT: vextracti128 $1, %ymm1, %xmm3
883 ; AVX2-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0],xmm3[1,2,3],xmm1[4,5],xmm3[6,7]
884 ; AVX2-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[0,1,10,11,4,5,14,15,8,9,2,3,12,13,u,u]
885 ; AVX2-NEXT: vpbroadcastw 70(%rdi), %xmm3
886 ; AVX2-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,6],xmm3[7]
887 ; AVX2-NEXT: vpblendw {{.*#+}} ymm3 = ymm2[0],ymm0[1],ymm2[2],ymm0[3],ymm2[4,5],ymm0[6],ymm2[7,8],ymm0[9],ymm2[10],ymm0[11],ymm2[12,13],ymm0[14],ymm2[15]
888 ; AVX2-NEXT: vextracti128 $1, %ymm3, %xmm4
889 ; AVX2-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm4[2,3],xmm3[4,5,6],xmm4[7]
890 ; AVX2-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[2,3,12,13,6,7,0,1,10,11,4,5,14,15,u,u]
891 ; AVX2-NEXT: vmovdqa 64(%rdi), %xmm4
892 ; AVX2-NEXT: vpsllq $48, %xmm4, %xmm5
893 ; AVX2-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,6],xmm5[7]
894 ; AVX2-NEXT: vpblendw {{.*#+}} ymm5 = ymm2[0,1],ymm0[2],ymm2[3],ymm0[4],ymm2[5,6],ymm0[7],ymm2[8,9],ymm0[10],ymm2[11],ymm0[12],ymm2[13,14],ymm0[15]
895 ; AVX2-NEXT: vextracti128 $1, %ymm5, %xmm6
896 ; AVX2-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2],xmm6[3,4],xmm5[5,6,7]
897 ; AVX2-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
898 ; AVX2-NEXT: vpshufd {{.*#+}} xmm6 = xmm4[0,1,2,0]
899 ; AVX2-NEXT: vpshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,5,6,5]
900 ; AVX2-NEXT: vpblendd {{.*#+}} xmm5 = xmm5[0,1,2],xmm6[3]
901 ; AVX2-NEXT: vpblendw {{.*#+}} ymm6 = ymm0[0],ymm2[1,2],ymm0[3],ymm2[4],ymm0[5],ymm2[6,7],ymm0[8],ymm2[9,10],ymm0[11],ymm2[12],ymm0[13],ymm2[14,15]
902 ; AVX2-NEXT: vextracti128 $1, %ymm6, %xmm7
903 ; AVX2-NEXT: vpblendd {{.*#+}} xmm6 = xmm7[0],xmm6[1],xmm7[2],xmm6[3]
904 ; AVX2-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
905 ; AVX2-NEXT: vpshufd {{.*#+}} xmm7 = xmm4[0,1,0,3]
906 ; AVX2-NEXT: vpshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,5,5,6]
907 ; AVX2-NEXT: vpblendd {{.*#+}} xmm6 = xmm6[0,1,2],xmm7[3]
908 ; AVX2-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0],ymm0[1],ymm2[2,3],ymm0[4],ymm2[5],ymm0[6],ymm2[7,8],ymm0[9],ymm2[10,11],ymm0[12],ymm2[13],ymm0[14],ymm2[15]
909 ; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm2
910 ; AVX2-NEXT: vpblendw {{.*#+}} xmm0 = xmm2[0,1,2],xmm0[3,4],xmm2[5,6,7]
911 ; AVX2-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[8,9,2,3,12,13,6,7,0,1,10,11,u,u,u,u]
912 ; AVX2-NEXT: vpshufd {{.*#+}} xmm2 = xmm4[0,1,1,3]
913 ; AVX2-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,4,7]
914 ; AVX2-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1,2],xmm2[3]
915 ; AVX2-NEXT: vmovdqa %xmm1, (%rsi)
916 ; AVX2-NEXT: vmovdqa %xmm3, (%rdx)
917 ; AVX2-NEXT: vmovdqa %xmm5, (%rcx)
918 ; AVX2-NEXT: vmovdqa %xmm6, (%r8)
919 ; AVX2-NEXT: vmovdqa %xmm0, (%r9)
920 ; AVX2-NEXT: vzeroupper
923 ; AVX2-FP-LABEL: load_i16_stride5_vf8:
925 ; AVX2-FP-NEXT: vmovdqa (%rdi), %ymm0
926 ; AVX2-FP-NEXT: vmovdqa 32(%rdi), %ymm1
927 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm2 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5],ymm1[6],ymm0[7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13],ymm1[14],ymm0[15]
928 ; AVX2-FP-NEXT: vextracti128 $1, %ymm2, %xmm3
929 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm3[1,2,3],xmm2[4,5],xmm3[6,7]
930 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[0,1,10,11,4,5,14,15,8,9,2,3,12,13,u,u]
931 ; AVX2-FP-NEXT: vpbroadcastw 70(%rdi), %xmm3
932 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,6],xmm3[7]
933 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm3 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
934 ; AVX2-FP-NEXT: vextracti128 $1, %ymm3, %xmm4
935 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm4[2,3],xmm3[4,5,6],xmm4[7]
936 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[2,3,12,13,6,7,0,1,10,11,4,5,14,15,u,u]
937 ; AVX2-FP-NEXT: vmovdqa 64(%rdi), %xmm4
938 ; AVX2-FP-NEXT: vpsllq $48, %xmm4, %xmm5
939 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,6],xmm5[7]
940 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm5 = [4,5,14,15,8,9,2,3,12,13,6,7,0,1,10,11]
941 ; AVX2-FP-NEXT: vpshufb %xmm5, %xmm4, %xmm6
942 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm7 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6],ymm0[7],ymm1[8,9],ymm0[10],ymm1[11],ymm0[12],ymm1[13,14],ymm0[15]
943 ; AVX2-FP-NEXT: vextracti128 $1, %ymm7, %xmm8
944 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2],xmm8[3,4],xmm7[5,6,7]
945 ; AVX2-FP-NEXT: vpshufb %xmm5, %xmm7, %xmm5
946 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm5 = xmm5[0,1,2],xmm6[3]
947 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm6 = [6,7,0,1,10,11,4,5,14,15,8,9,2,3,12,13]
948 ; AVX2-FP-NEXT: vpshufb %xmm6, %xmm4, %xmm7
949 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm8 = ymm0[0],ymm1[1,2],ymm0[3],ymm1[4],ymm0[5],ymm1[6,7],ymm0[8],ymm1[9,10],ymm0[11],ymm1[12],ymm0[13],ymm1[14,15]
950 ; AVX2-FP-NEXT: vextracti128 $1, %ymm8, %xmm9
951 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0],xmm8[1],xmm9[2],xmm8[3]
952 ; AVX2-FP-NEXT: vpshufb %xmm6, %xmm8, %xmm6
953 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm6 = xmm6[0,1,2],xmm7[3]
954 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm7 = [8,9,2,3,12,13,6,7,0,1,10,11,4,5,14,15]
955 ; AVX2-FP-NEXT: vpshufb %xmm7, %xmm4, %xmm4
956 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13],ymm0[14],ymm1[15]
957 ; AVX2-FP-NEXT: vextracti128 $1, %ymm0, %xmm1
958 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3,4],xmm1[5,6,7]
959 ; AVX2-FP-NEXT: vpshufb %xmm7, %xmm0, %xmm0
960 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1,2],xmm4[3]
961 ; AVX2-FP-NEXT: vmovdqa %xmm2, (%rsi)
962 ; AVX2-FP-NEXT: vmovdqa %xmm3, (%rdx)
963 ; AVX2-FP-NEXT: vmovdqa %xmm5, (%rcx)
964 ; AVX2-FP-NEXT: vmovdqa %xmm6, (%r8)
965 ; AVX2-FP-NEXT: vmovdqa %xmm0, (%r9)
966 ; AVX2-FP-NEXT: vzeroupper
969 ; AVX2-FCP-LABEL: load_i16_stride5_vf8:
971 ; AVX2-FCP-NEXT: vmovdqa (%rdi), %ymm0
972 ; AVX2-FCP-NEXT: vmovdqa 32(%rdi), %ymm1
973 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5],ymm1[6],ymm0[7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13],ymm1[14],ymm0[15]
974 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm2, %xmm3
975 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm3[1,2,3],xmm2[4,5],xmm3[6,7]
976 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[0,1,10,11,4,5,14,15,8,9,2,3,12,13,u,u]
977 ; AVX2-FCP-NEXT: vpbroadcastw 70(%rdi), %xmm3
978 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,6],xmm3[7]
979 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
980 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm3, %xmm4
981 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm4[2,3],xmm3[4,5,6],xmm4[7]
982 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[2,3,12,13,6,7,0,1,10,11,4,5,14,15,u,u]
983 ; AVX2-FCP-NEXT: vmovdqa 64(%rdi), %xmm4
984 ; AVX2-FCP-NEXT: vpsllq $48, %xmm4, %xmm5
985 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,6],xmm5[7]
986 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm5 = [4,5,14,15,8,9,2,3,12,13,6,7,0,1,10,11]
987 ; AVX2-FCP-NEXT: vpshufb %xmm5, %xmm4, %xmm6
988 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm7 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6],ymm0[7],ymm1[8,9],ymm0[10],ymm1[11],ymm0[12],ymm1[13,14],ymm0[15]
989 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm7, %xmm8
990 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2],xmm8[3,4],xmm7[5,6,7]
991 ; AVX2-FCP-NEXT: vpshufb %xmm5, %xmm7, %xmm5
992 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} xmm5 = xmm5[0,1,2],xmm6[3]
993 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm6 = [6,7,0,1,10,11,4,5,14,15,8,9,2,3,12,13]
994 ; AVX2-FCP-NEXT: vpshufb %xmm6, %xmm4, %xmm7
995 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm0[0],ymm1[1,2],ymm0[3],ymm1[4],ymm0[5],ymm1[6,7],ymm0[8],ymm1[9,10],ymm0[11],ymm1[12],ymm0[13],ymm1[14,15]
996 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm8, %xmm9
997 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0],xmm8[1],xmm9[2],xmm8[3]
998 ; AVX2-FCP-NEXT: vpshufb %xmm6, %xmm8, %xmm6
999 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} xmm6 = xmm6[0,1,2],xmm7[3]
1000 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm7 = [8,9,2,3,12,13,6,7,0,1,10,11,4,5,14,15]
1001 ; AVX2-FCP-NEXT: vpshufb %xmm7, %xmm4, %xmm4
1002 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13],ymm0[14],ymm1[15]
1003 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm0, %xmm1
1004 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3,4],xmm1[5,6,7]
1005 ; AVX2-FCP-NEXT: vpshufb %xmm7, %xmm0, %xmm0
1006 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1,2],xmm4[3]
1007 ; AVX2-FCP-NEXT: vmovdqa %xmm2, (%rsi)
1008 ; AVX2-FCP-NEXT: vmovdqa %xmm3, (%rdx)
1009 ; AVX2-FCP-NEXT: vmovdqa %xmm5, (%rcx)
1010 ; AVX2-FCP-NEXT: vmovdqa %xmm6, (%r8)
1011 ; AVX2-FCP-NEXT: vmovdqa %xmm0, (%r9)
1012 ; AVX2-FCP-NEXT: vzeroupper
1013 ; AVX2-FCP-NEXT: retq
1015 ; AVX512-LABEL: load_i16_stride5_vf8:
1017 ; AVX512-NEXT: vmovdqa (%rdi), %ymm0
1018 ; AVX512-NEXT: vmovdqa 32(%rdi), %ymm2
1019 ; AVX512-NEXT: vpblendw {{.*#+}} ymm1 = ymm0[0],ymm2[1],ymm0[2,3],ymm2[4],ymm0[5],ymm2[6],ymm0[7,8],ymm2[9],ymm0[10,11],ymm2[12],ymm0[13],ymm2[14],ymm0[15]
1020 ; AVX512-NEXT: vextracti128 $1, %ymm1, %xmm3
1021 ; AVX512-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0],xmm3[1,2,3],xmm1[4,5],xmm3[6,7]
1022 ; AVX512-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[0,1,10,11,4,5,14,15,8,9,2,3,12,13,u,u]
1023 ; AVX512-NEXT: vpbroadcastw 70(%rdi), %xmm3
1024 ; AVX512-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,6],xmm3[7]
1025 ; AVX512-NEXT: vmovdqa 64(%rdi), %xmm3
1026 ; AVX512-NEXT: vpblendw {{.*#+}} ymm4 = ymm2[0],ymm0[1],ymm2[2],ymm0[3],ymm2[4,5],ymm0[6],ymm2[7,8],ymm0[9],ymm2[10],ymm0[11],ymm2[12,13],ymm0[14],ymm2[15]
1027 ; AVX512-NEXT: vextracti128 $1, %ymm4, %xmm5
1028 ; AVX512-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1],xmm5[2,3],xmm4[4,5,6],xmm5[7]
1029 ; AVX512-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[2,3,12,13,6,7,0,1,10,11,4,5,14,15,u,u]
1030 ; AVX512-NEXT: vpsllq $48, %xmm3, %xmm5
1031 ; AVX512-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,6],xmm5[7]
1032 ; AVX512-NEXT: vpblendw {{.*#+}} ymm5 = ymm2[0,1],ymm0[2],ymm2[3],ymm0[4],ymm2[5,6],ymm0[7],ymm2[8,9],ymm0[10],ymm2[11],ymm0[12],ymm2[13,14],ymm0[15]
1033 ; AVX512-NEXT: vextracti128 $1, %ymm5, %xmm6
1034 ; AVX512-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2],xmm6[3,4],xmm5[5,6,7]
1035 ; AVX512-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
1036 ; AVX512-NEXT: vpshufd {{.*#+}} xmm6 = xmm3[0,1,2,0]
1037 ; AVX512-NEXT: vpshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,5,6,5]
1038 ; AVX512-NEXT: vpblendd {{.*#+}} xmm5 = xmm5[0,1,2],xmm6[3]
1039 ; AVX512-NEXT: vpblendw {{.*#+}} ymm6 = ymm0[0],ymm2[1,2],ymm0[3],ymm2[4],ymm0[5],ymm2[6,7],ymm0[8],ymm2[9,10],ymm0[11],ymm2[12],ymm0[13],ymm2[14,15]
1040 ; AVX512-NEXT: vextracti128 $1, %ymm6, %xmm7
1041 ; AVX512-NEXT: vpblendd {{.*#+}} xmm6 = xmm7[0],xmm6[1],xmm7[2],xmm6[3]
1042 ; AVX512-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
1043 ; AVX512-NEXT: vpshufd {{.*#+}} xmm7 = xmm3[0,1,0,3]
1044 ; AVX512-NEXT: vpshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,5,5,6]
1045 ; AVX512-NEXT: vpblendd {{.*#+}} xmm6 = xmm6[0,1,2],xmm7[3]
1046 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0],ymm0[1],ymm2[2,3],ymm0[4],ymm2[5],ymm0[6],ymm2[7,8],ymm0[9],ymm2[10,11],ymm0[12],ymm2[13],ymm0[14],ymm2[15]
1047 ; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm2
1048 ; AVX512-NEXT: vpblendw {{.*#+}} xmm0 = xmm2[0,1,2],xmm0[3,4],xmm2[5,6,7]
1049 ; AVX512-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[8,9,2,3,12,13,6,7,0,1,10,11,u,u,u,u]
1050 ; AVX512-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[0,1,1,3]
1051 ; AVX512-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,4,7]
1052 ; AVX512-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1,2],xmm2[3]
1053 ; AVX512-NEXT: vmovdqa %xmm1, (%rsi)
1054 ; AVX512-NEXT: vmovdqa %xmm4, (%rdx)
1055 ; AVX512-NEXT: vmovdqa %xmm5, (%rcx)
1056 ; AVX512-NEXT: vmovdqa %xmm6, (%r8)
1057 ; AVX512-NEXT: vmovdqa %xmm0, (%r9)
1058 ; AVX512-NEXT: vzeroupper
1061 ; AVX512-FCP-LABEL: load_i16_stride5_vf8:
1062 ; AVX512-FCP: # %bb.0:
1063 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %ymm0
1064 ; AVX512-FCP-NEXT: vmovdqa 32(%rdi), %ymm1
1065 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5],ymm1[6],ymm0[7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13],ymm1[14],ymm0[15]
1066 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm2, %xmm3
1067 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm3[1,2,3],xmm2[4,5],xmm3[6,7]
1068 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[0,1,10,11,4,5,14,15,8,9,2,3,12,13,u,u]
1069 ; AVX512-FCP-NEXT: vpbroadcastw 70(%rdi), %xmm3
1070 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,6],xmm3[7]
1071 ; AVX512-FCP-NEXT: vmovdqa 64(%rdi), %xmm3
1072 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm4 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
1073 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm4, %xmm5
1074 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1],xmm5[2,3],xmm4[4,5,6],xmm5[7]
1075 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[2,3,12,13,6,7,0,1,10,11,4,5,14,15,u,u]
1076 ; AVX512-FCP-NEXT: vpsllq $48, %xmm3, %xmm5
1077 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,6],xmm5[7]
1078 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm5 = [4,5,14,15,8,9,2,3,12,13,6,7,0,1,10,11]
1079 ; AVX512-FCP-NEXT: vpshufb %xmm5, %xmm3, %xmm6
1080 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm7 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6],ymm0[7],ymm1[8,9],ymm0[10],ymm1[11],ymm0[12],ymm1[13,14],ymm0[15]
1081 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm7, %xmm8
1082 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2],xmm8[3,4],xmm7[5,6,7]
1083 ; AVX512-FCP-NEXT: vpshufb %xmm5, %xmm7, %xmm5
1084 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} xmm5 = xmm5[0,1,2],xmm6[3]
1085 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm6 = [6,7,0,1,10,11,4,5,14,15,8,9,2,3,12,13]
1086 ; AVX512-FCP-NEXT: vpshufb %xmm6, %xmm3, %xmm7
1087 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm0[0],ymm1[1,2],ymm0[3],ymm1[4],ymm0[5],ymm1[6,7],ymm0[8],ymm1[9,10],ymm0[11],ymm1[12],ymm0[13],ymm1[14,15]
1088 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm8, %xmm9
1089 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0],xmm8[1],xmm9[2],xmm8[3]
1090 ; AVX512-FCP-NEXT: vpshufb %xmm6, %xmm8, %xmm6
1091 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} xmm6 = xmm6[0,1,2],xmm7[3]
1092 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm7 = [8,9,2,3,12,13,6,7,0,1,10,11,4,5,14,15]
1093 ; AVX512-FCP-NEXT: vpshufb %xmm7, %xmm3, %xmm3
1094 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13],ymm0[14],ymm1[15]
1095 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm0, %xmm1
1096 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3,4],xmm1[5,6,7]
1097 ; AVX512-FCP-NEXT: vpshufb %xmm7, %xmm0, %xmm0
1098 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1,2],xmm3[3]
1099 ; AVX512-FCP-NEXT: vmovdqa %xmm2, (%rsi)
1100 ; AVX512-FCP-NEXT: vmovdqa %xmm4, (%rdx)
1101 ; AVX512-FCP-NEXT: vmovdqa %xmm5, (%rcx)
1102 ; AVX512-FCP-NEXT: vmovdqa %xmm6, (%r8)
1103 ; AVX512-FCP-NEXT: vmovdqa %xmm0, (%r9)
1104 ; AVX512-FCP-NEXT: vzeroupper
1105 ; AVX512-FCP-NEXT: retq
1107 ; AVX512DQ-LABEL: load_i16_stride5_vf8:
1108 ; AVX512DQ: # %bb.0:
1109 ; AVX512DQ-NEXT: vmovdqa (%rdi), %ymm0
1110 ; AVX512DQ-NEXT: vmovdqa 32(%rdi), %ymm2
1111 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm1 = ymm0[0],ymm2[1],ymm0[2,3],ymm2[4],ymm0[5],ymm2[6],ymm0[7,8],ymm2[9],ymm0[10,11],ymm2[12],ymm0[13],ymm2[14],ymm0[15]
1112 ; AVX512DQ-NEXT: vextracti128 $1, %ymm1, %xmm3
1113 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0],xmm3[1,2,3],xmm1[4,5],xmm3[6,7]
1114 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[0,1,10,11,4,5,14,15,8,9,2,3,12,13,u,u]
1115 ; AVX512DQ-NEXT: vpbroadcastw 70(%rdi), %xmm3
1116 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,6],xmm3[7]
1117 ; AVX512DQ-NEXT: vmovdqa 64(%rdi), %xmm3
1118 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm4 = ymm2[0],ymm0[1],ymm2[2],ymm0[3],ymm2[4,5],ymm0[6],ymm2[7,8],ymm0[9],ymm2[10],ymm0[11],ymm2[12,13],ymm0[14],ymm2[15]
1119 ; AVX512DQ-NEXT: vextracti128 $1, %ymm4, %xmm5
1120 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1],xmm5[2,3],xmm4[4,5,6],xmm5[7]
1121 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[2,3,12,13,6,7,0,1,10,11,4,5,14,15,u,u]
1122 ; AVX512DQ-NEXT: vpsllq $48, %xmm3, %xmm5
1123 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,6],xmm5[7]
1124 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm5 = ymm2[0,1],ymm0[2],ymm2[3],ymm0[4],ymm2[5,6],ymm0[7],ymm2[8,9],ymm0[10],ymm2[11],ymm0[12],ymm2[13,14],ymm0[15]
1125 ; AVX512DQ-NEXT: vextracti128 $1, %ymm5, %xmm6
1126 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2],xmm6[3,4],xmm5[5,6,7]
1127 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
1128 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm6 = xmm3[0,1,2,0]
1129 ; AVX512DQ-NEXT: vpshufhw {{.*#+}} xmm6 = xmm6[0,1,2,3,4,5,6,5]
1130 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm5 = xmm5[0,1,2],xmm6[3]
1131 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm6 = ymm0[0],ymm2[1,2],ymm0[3],ymm2[4],ymm0[5],ymm2[6,7],ymm0[8],ymm2[9,10],ymm0[11],ymm2[12],ymm0[13],ymm2[14,15]
1132 ; AVX512DQ-NEXT: vextracti128 $1, %ymm6, %xmm7
1133 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm6 = xmm7[0],xmm6[1],xmm7[2],xmm6[3]
1134 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
1135 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm7 = xmm3[0,1,0,3]
1136 ; AVX512DQ-NEXT: vpshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,5,5,6]
1137 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm6 = xmm6[0,1,2],xmm7[3]
1138 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0],ymm0[1],ymm2[2,3],ymm0[4],ymm2[5],ymm0[6],ymm2[7,8],ymm0[9],ymm2[10,11],ymm0[12],ymm2[13],ymm0[14],ymm2[15]
1139 ; AVX512DQ-NEXT: vextracti128 $1, %ymm0, %xmm2
1140 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm0 = xmm2[0,1,2],xmm0[3,4],xmm2[5,6,7]
1141 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[8,9,2,3,12,13,6,7,0,1,10,11,u,u,u,u]
1142 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm2 = xmm3[0,1,1,3]
1143 ; AVX512DQ-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,4,7]
1144 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1,2],xmm2[3]
1145 ; AVX512DQ-NEXT: vmovdqa %xmm1, (%rsi)
1146 ; AVX512DQ-NEXT: vmovdqa %xmm4, (%rdx)
1147 ; AVX512DQ-NEXT: vmovdqa %xmm5, (%rcx)
1148 ; AVX512DQ-NEXT: vmovdqa %xmm6, (%r8)
1149 ; AVX512DQ-NEXT: vmovdqa %xmm0, (%r9)
1150 ; AVX512DQ-NEXT: vzeroupper
1151 ; AVX512DQ-NEXT: retq
1153 ; AVX512DQ-FCP-LABEL: load_i16_stride5_vf8:
1154 ; AVX512DQ-FCP: # %bb.0:
1155 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %ymm0
1156 ; AVX512DQ-FCP-NEXT: vmovdqa 32(%rdi), %ymm1
1157 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5],ymm1[6],ymm0[7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13],ymm1[14],ymm0[15]
1158 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm2, %xmm3
1159 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm3[1,2,3],xmm2[4,5],xmm3[6,7]
1160 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[0,1,10,11,4,5,14,15,8,9,2,3,12,13,u,u]
1161 ; AVX512DQ-FCP-NEXT: vpbroadcastw 70(%rdi), %xmm3
1162 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,6],xmm3[7]
1163 ; AVX512DQ-FCP-NEXT: vmovdqa 64(%rdi), %xmm3
1164 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm4 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
1165 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm4, %xmm5
1166 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1],xmm5[2,3],xmm4[4,5,6],xmm5[7]
1167 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[2,3,12,13,6,7,0,1,10,11,4,5,14,15,u,u]
1168 ; AVX512DQ-FCP-NEXT: vpsllq $48, %xmm3, %xmm5
1169 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,6],xmm5[7]
1170 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm5 = [4,5,14,15,8,9,2,3,12,13,6,7,0,1,10,11]
1171 ; AVX512DQ-FCP-NEXT: vpshufb %xmm5, %xmm3, %xmm6
1172 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm7 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6],ymm0[7],ymm1[8,9],ymm0[10],ymm1[11],ymm0[12],ymm1[13,14],ymm0[15]
1173 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm7, %xmm8
1174 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2],xmm8[3,4],xmm7[5,6,7]
1175 ; AVX512DQ-FCP-NEXT: vpshufb %xmm5, %xmm7, %xmm5
1176 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} xmm5 = xmm5[0,1,2],xmm6[3]
1177 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm6 = [6,7,0,1,10,11,4,5,14,15,8,9,2,3,12,13]
1178 ; AVX512DQ-FCP-NEXT: vpshufb %xmm6, %xmm3, %xmm7
1179 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm0[0],ymm1[1,2],ymm0[3],ymm1[4],ymm0[5],ymm1[6,7],ymm0[8],ymm1[9,10],ymm0[11],ymm1[12],ymm0[13],ymm1[14,15]
1180 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm8, %xmm9
1181 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0],xmm8[1],xmm9[2],xmm8[3]
1182 ; AVX512DQ-FCP-NEXT: vpshufb %xmm6, %xmm8, %xmm6
1183 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} xmm6 = xmm6[0,1,2],xmm7[3]
1184 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm7 = [8,9,2,3,12,13,6,7,0,1,10,11,4,5,14,15]
1185 ; AVX512DQ-FCP-NEXT: vpshufb %xmm7, %xmm3, %xmm3
1186 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13],ymm0[14],ymm1[15]
1187 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm0, %xmm1
1188 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3,4],xmm1[5,6,7]
1189 ; AVX512DQ-FCP-NEXT: vpshufb %xmm7, %xmm0, %xmm0
1190 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0,1,2],xmm3[3]
1191 ; AVX512DQ-FCP-NEXT: vmovdqa %xmm2, (%rsi)
1192 ; AVX512DQ-FCP-NEXT: vmovdqa %xmm4, (%rdx)
1193 ; AVX512DQ-FCP-NEXT: vmovdqa %xmm5, (%rcx)
1194 ; AVX512DQ-FCP-NEXT: vmovdqa %xmm6, (%r8)
1195 ; AVX512DQ-FCP-NEXT: vmovdqa %xmm0, (%r9)
1196 ; AVX512DQ-FCP-NEXT: vzeroupper
1197 ; AVX512DQ-FCP-NEXT: retq
1199 ; AVX512BW-LABEL: load_i16_stride5_vf8:
1200 ; AVX512BW: # %bb.0:
1201 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm0
1202 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm1
1203 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} xmm2 = [0,5,10,15,20,25,30,35]
1204 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
1205 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} xmm3 = [1,6,11,16,21,26,31,36]
1206 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm3
1207 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} xmm4 = [2,7,12,17,22,27,32,37]
1208 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm4
1209 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} xmm5 = [3,8,13,18,23,28,33,38]
1210 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm5
1211 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} xmm6 = [4,9,14,19,24,29,34,39]
1212 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm6
1213 ; AVX512BW-NEXT: vmovdqa %xmm2, (%rsi)
1214 ; AVX512BW-NEXT: vmovdqa %xmm3, (%rdx)
1215 ; AVX512BW-NEXT: vmovdqa %xmm4, (%rcx)
1216 ; AVX512BW-NEXT: vmovdqa %xmm5, (%r8)
1217 ; AVX512BW-NEXT: vmovdqa %xmm6, (%r9)
1218 ; AVX512BW-NEXT: vzeroupper
1219 ; AVX512BW-NEXT: retq
1221 ; AVX512BW-FCP-LABEL: load_i16_stride5_vf8:
1222 ; AVX512BW-FCP: # %bb.0:
1223 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
1224 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
1225 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} xmm2 = [0,5,10,15,20,25,30,35]
1226 ; AVX512BW-FCP-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
1227 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} xmm3 = [1,6,11,16,21,26,31,36]
1228 ; AVX512BW-FCP-NEXT: vpermi2w %zmm1, %zmm0, %zmm3
1229 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} xmm4 = [2,7,12,17,22,27,32,37]
1230 ; AVX512BW-FCP-NEXT: vpermi2w %zmm1, %zmm0, %zmm4
1231 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} xmm5 = [3,8,13,18,23,28,33,38]
1232 ; AVX512BW-FCP-NEXT: vpermi2w %zmm1, %zmm0, %zmm5
1233 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} xmm6 = [4,9,14,19,24,29,34,39]
1234 ; AVX512BW-FCP-NEXT: vpermi2w %zmm1, %zmm0, %zmm6
1235 ; AVX512BW-FCP-NEXT: vmovdqa %xmm2, (%rsi)
1236 ; AVX512BW-FCP-NEXT: vmovdqa %xmm3, (%rdx)
1237 ; AVX512BW-FCP-NEXT: vmovdqa %xmm4, (%rcx)
1238 ; AVX512BW-FCP-NEXT: vmovdqa %xmm5, (%r8)
1239 ; AVX512BW-FCP-NEXT: vmovdqa %xmm6, (%r9)
1240 ; AVX512BW-FCP-NEXT: vzeroupper
1241 ; AVX512BW-FCP-NEXT: retq
1243 ; AVX512DQ-BW-LABEL: load_i16_stride5_vf8:
1244 ; AVX512DQ-BW: # %bb.0:
1245 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm0
1246 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm1
1247 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} xmm2 = [0,5,10,15,20,25,30,35]
1248 ; AVX512DQ-BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
1249 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} xmm3 = [1,6,11,16,21,26,31,36]
1250 ; AVX512DQ-BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm3
1251 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} xmm4 = [2,7,12,17,22,27,32,37]
1252 ; AVX512DQ-BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm4
1253 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} xmm5 = [3,8,13,18,23,28,33,38]
1254 ; AVX512DQ-BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm5
1255 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} xmm6 = [4,9,14,19,24,29,34,39]
1256 ; AVX512DQ-BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm6
1257 ; AVX512DQ-BW-NEXT: vmovdqa %xmm2, (%rsi)
1258 ; AVX512DQ-BW-NEXT: vmovdqa %xmm3, (%rdx)
1259 ; AVX512DQ-BW-NEXT: vmovdqa %xmm4, (%rcx)
1260 ; AVX512DQ-BW-NEXT: vmovdqa %xmm5, (%r8)
1261 ; AVX512DQ-BW-NEXT: vmovdqa %xmm6, (%r9)
1262 ; AVX512DQ-BW-NEXT: vzeroupper
1263 ; AVX512DQ-BW-NEXT: retq
1265 ; AVX512DQ-BW-FCP-LABEL: load_i16_stride5_vf8:
1266 ; AVX512DQ-BW-FCP: # %bb.0:
1267 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
1268 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
1269 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} xmm2 = [0,5,10,15,20,25,30,35]
1270 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
1271 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} xmm3 = [1,6,11,16,21,26,31,36]
1272 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm1, %zmm0, %zmm3
1273 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} xmm4 = [2,7,12,17,22,27,32,37]
1274 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm1, %zmm0, %zmm4
1275 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} xmm5 = [3,8,13,18,23,28,33,38]
1276 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm1, %zmm0, %zmm5
1277 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} xmm6 = [4,9,14,19,24,29,34,39]
1278 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm1, %zmm0, %zmm6
1279 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %xmm2, (%rsi)
1280 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %xmm3, (%rdx)
1281 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %xmm4, (%rcx)
1282 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %xmm5, (%r8)
1283 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %xmm6, (%r9)
1284 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
1285 ; AVX512DQ-BW-FCP-NEXT: retq
1286 %wide.vec = load <40 x i16>, ptr %in.vec, align 64
1287 %strided.vec0 = shufflevector <40 x i16> %wide.vec, <40 x i16> poison, <8 x i32> <i32 0, i32 5, i32 10, i32 15, i32 20, i32 25, i32 30, i32 35>
1288 %strided.vec1 = shufflevector <40 x i16> %wide.vec, <40 x i16> poison, <8 x i32> <i32 1, i32 6, i32 11, i32 16, i32 21, i32 26, i32 31, i32 36>
1289 %strided.vec2 = shufflevector <40 x i16> %wide.vec, <40 x i16> poison, <8 x i32> <i32 2, i32 7, i32 12, i32 17, i32 22, i32 27, i32 32, i32 37>
1290 %strided.vec3 = shufflevector <40 x i16> %wide.vec, <40 x i16> poison, <8 x i32> <i32 3, i32 8, i32 13, i32 18, i32 23, i32 28, i32 33, i32 38>
1291 %strided.vec4 = shufflevector <40 x i16> %wide.vec, <40 x i16> poison, <8 x i32> <i32 4, i32 9, i32 14, i32 19, i32 24, i32 29, i32 34, i32 39>
1292 store <8 x i16> %strided.vec0, ptr %out.vec0, align 64
1293 store <8 x i16> %strided.vec1, ptr %out.vec1, align 64
1294 store <8 x i16> %strided.vec2, ptr %out.vec2, align 64
1295 store <8 x i16> %strided.vec3, ptr %out.vec3, align 64
1296 store <8 x i16> %strided.vec4, ptr %out.vec4, align 64
1300 define void @load_i16_stride5_vf16(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4) nounwind {
1301 ; SSE-LABEL: load_i16_stride5_vf16:
1303 ; SSE-NEXT: movdqa 144(%rdi), %xmm14
1304 ; SSE-NEXT: movdqa 80(%rdi), %xmm8
1305 ; SSE-NEXT: movdqa 96(%rdi), %xmm7
1306 ; SSE-NEXT: movdqa 128(%rdi), %xmm15
1307 ; SSE-NEXT: movdqa 112(%rdi), %xmm12
1308 ; SSE-NEXT: movdqa 64(%rdi), %xmm10
1309 ; SSE-NEXT: movdqa (%rdi), %xmm11
1310 ; SSE-NEXT: movdqa 16(%rdi), %xmm9
1311 ; SSE-NEXT: movdqa 32(%rdi), %xmm13
1312 ; SSE-NEXT: movdqa 48(%rdi), %xmm5
1313 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,65535,65535,65535,0,65535,65535,65535]
1314 ; SSE-NEXT: movdqa %xmm0, %xmm1
1315 ; SSE-NEXT: pandn %xmm13, %xmm1
1316 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm5[0,1,0,3]
1317 ; SSE-NEXT: pand %xmm0, %xmm2
1318 ; SSE-NEXT: por %xmm1, %xmm2
1319 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm9[3,1,2,3]
1320 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,1,2,3,4,5,6,7]
1321 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm11[0,2,2,3]
1322 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm3[0,3,2,3,4,5,6,7]
1323 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
1324 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm2[2,3]
1325 ; SSE-NEXT: movaps {{.*#+}} xmm6 = [65535,65535,65535,65535,65535,65535,65535,0]
1326 ; SSE-NEXT: andps %xmm6, %xmm4
1327 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm10[0,1,0,1]
1328 ; SSE-NEXT: movaps %xmm6, %xmm2
1329 ; SSE-NEXT: pandn %xmm1, %xmm2
1330 ; SSE-NEXT: por %xmm4, %xmm2
1331 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1332 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm15[0,1,0,3]
1333 ; SSE-NEXT: pand %xmm0, %xmm1
1334 ; SSE-NEXT: pandn %xmm12, %xmm0
1335 ; SSE-NEXT: por %xmm1, %xmm0
1336 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm7[3,1,2,3]
1337 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,1,2,3,4,5,6,7]
1338 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm8[0,2,2,3]
1339 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[0,3,2,3,4,5,6,7]
1340 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1341 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm0[2,3]
1342 ; SSE-NEXT: andps %xmm6, %xmm2
1343 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm14[0,1,0,1]
1344 ; SSE-NEXT: movaps %xmm6, %xmm1
1345 ; SSE-NEXT: andnps %xmm0, %xmm1
1346 ; SSE-NEXT: orps %xmm2, %xmm1
1347 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1348 ; SSE-NEXT: movdqa %xmm9, %xmm0
1349 ; SSE-NEXT: psrlq $48, %xmm0
1350 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm11[0,3,2,3]
1351 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[1,2,2,3,4,5,6,7]
1352 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
1353 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,0,0,65535,65535,65535,65535,65535]
1354 ; SSE-NEXT: movdqa %xmm0, %xmm2
1355 ; SSE-NEXT: pandn %xmm1, %xmm2
1356 ; SSE-NEXT: movdqa %xmm5, %xmm3
1357 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[1,3,2,3]
1358 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm13[0,2,2,3]
1359 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
1360 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm4[0,1,2,3,7,5,6,7]
1361 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,2,1]
1362 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,0,0,0,4,5,6,7]
1363 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,6,4,7]
1364 ; SSE-NEXT: pand %xmm0, %xmm1
1365 ; SSE-NEXT: por %xmm2, %xmm1
1366 ; SSE-NEXT: movdqa %xmm10, %xmm5
1367 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1368 ; SSE-NEXT: movdqa %xmm10, %xmm2
1369 ; SSE-NEXT: psllq $48, %xmm2
1370 ; SSE-NEXT: movaps %xmm6, %xmm4
1371 ; SSE-NEXT: andnps %xmm2, %xmm4
1372 ; SSE-NEXT: pand %xmm6, %xmm1
1373 ; SSE-NEXT: orps %xmm1, %xmm4
1374 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1375 ; SSE-NEXT: movdqa %xmm7, %xmm1
1376 ; SSE-NEXT: psrlq $48, %xmm1
1377 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm8[0,3,2,3]
1378 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[1,2,2,3,4,5,6,7]
1379 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1380 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm15[1,3,2,3]
1381 ; SSE-NEXT: movdqa %xmm15, %xmm10
1382 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm12[0,2,2,3]
1383 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
1384 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm4[0,1,2,3,7,5,6,7]
1385 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,2,1]
1386 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,0,0,0,4,5,6,7]
1387 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,6,4,7]
1388 ; SSE-NEXT: pand %xmm0, %xmm1
1389 ; SSE-NEXT: pandn %xmm2, %xmm0
1390 ; SSE-NEXT: por %xmm1, %xmm0
1391 ; SSE-NEXT: pand %xmm6, %xmm0
1392 ; SSE-NEXT: movdqa %xmm14, %xmm4
1393 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1394 ; SSE-NEXT: movdqa %xmm14, %xmm1
1395 ; SSE-NEXT: psllq $48, %xmm1
1396 ; SSE-NEXT: pandn %xmm1, %xmm6
1397 ; SSE-NEXT: por %xmm0, %xmm6
1398 ; SSE-NEXT: movdqa %xmm3, %xmm0
1399 ; SSE-NEXT: movdqa %xmm3, %xmm14
1400 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1401 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,0],xmm13[0,0]
1402 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm13[2,3]
1403 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,0,1,3]
1404 ; SSE-NEXT: movaps {{.*#+}} xmm3 = [65535,65535,65535,0,0,0,65535,65535]
1405 ; SSE-NEXT: movaps %xmm3, %xmm1
1406 ; SSE-NEXT: andnps %xmm0, %xmm1
1407 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm11[0,1,1,3]
1408 ; SSE-NEXT: pshufhw {{.*#+}} xmm15 = xmm2[0,1,2,3,4,7,6,7]
1409 ; SSE-NEXT: punpckhdq {{.*#+}} xmm15 = xmm15[2],xmm9[2],xmm15[3],xmm9[3]
1410 ; SSE-NEXT: pand %xmm3, %xmm15
1411 ; SSE-NEXT: por %xmm1, %xmm15
1412 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,6,5,6,7]
1413 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[0,1,2,0]
1414 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,6,5]
1415 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,1],xmm0[2,3]
1416 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[0,1],xmm1[2,0]
1417 ; SSE-NEXT: movdqa %xmm10, %xmm5
1418 ; SSE-NEXT: movdqa %xmm10, %xmm1
1419 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,0],xmm12[0,0]
1420 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm12[2,3]
1421 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0,1,3]
1422 ; SSE-NEXT: movaps %xmm3, %xmm2
1423 ; SSE-NEXT: andnps %xmm1, %xmm2
1424 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm8[0,1,1,3]
1425 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,7,6,7]
1426 ; SSE-NEXT: punpckhdq {{.*#+}} xmm0 = xmm0[2],xmm7[2],xmm0[3],xmm7[3]
1427 ; SSE-NEXT: pand %xmm3, %xmm0
1428 ; SSE-NEXT: por %xmm2, %xmm0
1429 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,6,7]
1430 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[0,1,2,0]
1431 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,6,5]
1432 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,1],xmm1[2,3]
1433 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm2[2,0]
1434 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm13[2,2,2,2,4,5,6,7]
1435 ; SSE-NEXT: movdqa %xmm3, %xmm2
1436 ; SSE-NEXT: pandn %xmm1, %xmm2
1437 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm11[1,1,1,1]
1438 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm9[0,2,2,3]
1439 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
1440 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm4[0,3,2,3,4,5,6,7]
1441 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
1442 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[1,0,3,3,4,5,6,7]
1443 ; SSE-NEXT: pand %xmm3, %xmm1
1444 ; SSE-NEXT: por %xmm2, %xmm1
1445 ; SSE-NEXT: movdqa %xmm14, %xmm4
1446 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[2,0],xmm13[3,0]
1447 ; SSE-NEXT: movdqa %xmm3, %xmm2
1448 ; SSE-NEXT: pandn %xmm13, %xmm2
1449 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[0,1],xmm4[0,2]
1450 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm13[0,1,2,3,7,4,6,7]
1451 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
1452 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm10[0,1,0,3]
1453 ; SSE-NEXT: pshufhw {{.*#+}} xmm13 = xmm13[0,1,2,3,4,5,5,6]
1454 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[3,1],xmm4[2,3]
1455 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,1],xmm13[2,0]
1456 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm12[2,2,2,2,4,5,6,7]
1457 ; SSE-NEXT: movdqa %xmm3, %xmm14
1458 ; SSE-NEXT: pandn %xmm4, %xmm14
1459 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm8[1,1,1,1]
1460 ; SSE-NEXT: pshufd {{.*#+}} xmm13 = xmm7[0,2,2,3]
1461 ; SSE-NEXT: punpckldq {{.*#+}} xmm13 = xmm13[0],xmm4[0],xmm13[1],xmm4[1]
1462 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm13[0,3,2,3,4,5,6,7]
1463 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,2,2,3]
1464 ; SSE-NEXT: pshuflw {{.*#+}} xmm13 = xmm4[1,0,3,3,4,5,6,7]
1465 ; SSE-NEXT: pand %xmm3, %xmm13
1466 ; SSE-NEXT: por %xmm14, %xmm13
1467 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[2,0],xmm9[3,0]
1468 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm11[0,2]
1469 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,0],xmm7[3,0]
1470 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[0,1],xmm8[0,2]
1471 ; SSE-NEXT: movdqa %xmm5, %xmm11
1472 ; SSE-NEXT: movdqa %xmm5, %xmm4
1473 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[2,0],xmm12[3,0]
1474 ; SSE-NEXT: pshufhw {{.*#+}} xmm8 = xmm9[0,1,2,3,4,6,6,7]
1475 ; SSE-NEXT: pshufd {{.*#+}} xmm8 = xmm8[0,2,2,3]
1476 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm8[2,1,3,3,4,5,6,7]
1477 ; SSE-NEXT: pand %xmm3, %xmm8
1478 ; SSE-NEXT: pshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,6,6,7]
1479 ; SSE-NEXT: pshufd {{.*#+}} xmm7 = xmm7[0,2,2,3]
1480 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm7[2,1,3,3,4,5,6,7]
1481 ; SSE-NEXT: pand %xmm3, %xmm7
1482 ; SSE-NEXT: pandn %xmm12, %xmm3
1483 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm4[0,2]
1484 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm12[0,1,2,3,7,4,6,7]
1485 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
1486 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm9[0,1,0,3]
1487 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,5,6]
1488 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[3,1],xmm4[2,3]
1489 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[0,1],xmm5[2,0]
1490 ; SSE-NEXT: por %xmm2, %xmm8
1491 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
1492 ; SSE-NEXT: # xmm2 = mem[0,2,2,3]
1493 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm10[0,1,1,3]
1494 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[0,1,0,3,4,5,6,7]
1495 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,4,7]
1496 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[3,1],xmm2[1,3]
1497 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[0,1],xmm4[2,0]
1498 ; SSE-NEXT: por %xmm7, %xmm3
1499 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm11[0,2,2,3]
1500 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm9[0,1,1,3]
1501 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[0,1,0,3,4,5,6,7]
1502 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,4,7]
1503 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[3,1],xmm2[1,3]
1504 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm4[2,0]
1505 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1506 ; SSE-NEXT: movaps %xmm2, 16(%rsi)
1507 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1508 ; SSE-NEXT: movaps %xmm2, (%rsi)
1509 ; SSE-NEXT: movdqa %xmm6, 16(%rdx)
1510 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
1511 ; SSE-NEXT: movaps %xmm2, (%rdx)
1512 ; SSE-NEXT: movaps %xmm0, 16(%rcx)
1513 ; SSE-NEXT: movaps %xmm15, (%rcx)
1514 ; SSE-NEXT: movaps %xmm13, 16(%r8)
1515 ; SSE-NEXT: movaps %xmm1, (%r8)
1516 ; SSE-NEXT: movaps %xmm3, 16(%r9)
1517 ; SSE-NEXT: movaps %xmm8, (%r9)
1520 ; AVX-LABEL: load_i16_stride5_vf16:
1522 ; AVX-NEXT: vmovdqa 96(%rdi), %xmm0
1523 ; AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[0,1,1,3]
1524 ; AVX-NEXT: vpshufhw {{.*#+}} xmm2 = xmm1[0,1,2,3,4,5,4,7]
1525 ; AVX-NEXT: vmovdqa 112(%rdi), %xmm1
1526 ; AVX-NEXT: vpunpckhqdq {{.*#+}} xmm3 = xmm2[1],xmm1[1]
1527 ; AVX-NEXT: vmovdqa 80(%rdi), %xmm2
1528 ; AVX-NEXT: vpshufd {{.*#+}} xmm4 = xmm2[0,2,2,3]
1529 ; AVX-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,3,2,3,4,5,6,7]
1530 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm4[0,1],xmm3[2,3,4,5,6,7]
1531 ; AVX-NEXT: vmovdqa 144(%rdi), %xmm8
1532 ; AVX-NEXT: vmovdqa 128(%rdi), %xmm7
1533 ; AVX-NEXT: vpblendw {{.*#+}} xmm4 = xmm7[0,1],xmm8[2,3],xmm7[4,5,6,7]
1534 ; AVX-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[u,u,u,u,u,u,u,u,u,u,2,3,12,13,6,7]
1535 ; AVX-NEXT: vpblendw {{.*#+}} xmm10 = xmm3[0,1,2,3,4],xmm4[5,6,7]
1536 ; AVX-NEXT: vmovdqa (%rdi), %xmm3
1537 ; AVX-NEXT: vmovdqa 16(%rdi), %xmm4
1538 ; AVX-NEXT: vmovdqa 32(%rdi), %xmm5
1539 ; AVX-NEXT: vmovdqa 48(%rdi), %xmm6
1540 ; AVX-NEXT: vpshufd {{.*#+}} xmm9 = xmm6[0,1,0,3]
1541 ; AVX-NEXT: vpblendw {{.*#+}} xmm9 = xmm9[0,1,2,3],xmm5[4],xmm9[5,6,7]
1542 ; AVX-NEXT: vpshufd {{.*#+}} xmm11 = xmm4[3,1,2,3]
1543 ; AVX-NEXT: vpshuflw {{.*#+}} xmm11 = xmm11[2,1,2,3,4,5,6,7]
1544 ; AVX-NEXT: vpshufd {{.*#+}} xmm12 = xmm3[0,2,2,3]
1545 ; AVX-NEXT: vpshuflw {{.*#+}} xmm12 = xmm12[0,3,2,3,4,5,6,7]
1546 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm11 = xmm12[0],xmm11[0],xmm12[1],xmm11[1]
1547 ; AVX-NEXT: vpblendw {{.*#+}} xmm9 = xmm11[0,1,2,3],xmm9[4,5,6,7]
1548 ; AVX-NEXT: vmovaps {{.*#+}} ymm11 = [65535,65535,65535,65535,65535,65535,65535,0,0,0,0,0,0,65535,65535,65535]
1549 ; AVX-NEXT: vandps %ymm11, %ymm9, %ymm12
1550 ; AVX-NEXT: vmovaps 64(%rdi), %xmm9
1551 ; AVX-NEXT: vshufps {{.*#+}} xmm13 = xmm9[0,1,0,1]
1552 ; AVX-NEXT: vandnps %ymm13, %ymm11, %ymm13
1553 ; AVX-NEXT: vorps %ymm13, %ymm12, %ymm12
1554 ; AVX-NEXT: vinsertf128 $1, %xmm10, %ymm12, %ymm10
1555 ; AVX-NEXT: vpblendw {{.*#+}} xmm12 = xmm1[0,1],xmm0[2,3],xmm1[4,5,6,7]
1556 ; AVX-NEXT: vpshuflw {{.*#+}} xmm12 = xmm12[0,1,3,0,4,5,6,7]
1557 ; AVX-NEXT: vpshufhw {{.*#+}} xmm12 = xmm12[0,1,2,3,5,5,5,5]
1558 ; AVX-NEXT: vpshufd {{.*#+}} xmm13 = xmm2[0,3,2,3]
1559 ; AVX-NEXT: vpshuflw {{.*#+}} xmm13 = xmm13[1,2,2,3,4,5,6,7]
1560 ; AVX-NEXT: vpblendw {{.*#+}} xmm12 = xmm13[0,1],xmm12[2,3,4,5,6,7]
1561 ; AVX-NEXT: vpblendw {{.*#+}} xmm13 = xmm7[0,1,2,3],xmm8[4,5],xmm7[6,7]
1562 ; AVX-NEXT: vpshufb {{.*#+}} xmm13 = xmm13[u,u,u,u,u,u,u,u,u,u,4,5,14,15,8,9]
1563 ; AVX-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0,1,2,3,4],xmm13[5,6,7]
1564 ; AVX-NEXT: vpsllq $48, %xmm9, %xmm13
1565 ; AVX-NEXT: vandnps %ymm13, %ymm11, %ymm13
1566 ; AVX-NEXT: vpsrlq $48, %xmm4, %xmm14
1567 ; AVX-NEXT: vpshufd {{.*#+}} xmm15 = xmm3[0,3,2,3]
1568 ; AVX-NEXT: vpshuflw {{.*#+}} xmm15 = xmm15[1,2,2,3,4,5,6,7]
1569 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1]
1570 ; AVX-NEXT: vpblendw {{.*#+}} xmm15 = xmm5[0,1],xmm6[2,3],xmm5[4,5],xmm6[6,7]
1571 ; AVX-NEXT: vpshufb {{.*#+}} xmm15 = xmm15[u,u,u,u,u,u,0,1,10,11,4,5,14,15,6,7]
1572 ; AVX-NEXT: vpblendw {{.*#+}} xmm14 = xmm14[0,1,2],xmm15[3,4,5,6,7]
1573 ; AVX-NEXT: vandps %ymm11, %ymm14, %ymm11
1574 ; AVX-NEXT: vorps %ymm13, %ymm11, %ymm11
1575 ; AVX-NEXT: vinsertf128 $1, %xmm12, %ymm11, %ymm11
1576 ; AVX-NEXT: vpblendw {{.*#+}} xmm12 = xmm1[0,1,2,3],xmm0[4,5],xmm1[6,7]
1577 ; AVX-NEXT: vpshufb {{.*#+}} xmm12 = xmm12[u,u,u,u,8,9,2,3,12,13,12,13,u,u,u,u]
1578 ; AVX-NEXT: vpshufd {{.*#+}} xmm13 = xmm2[3,1,2,3]
1579 ; AVX-NEXT: vpshuflw {{.*#+}} xmm13 = xmm13[2,1,2,3,4,5,6,7]
1580 ; AVX-NEXT: vpblendw {{.*#+}} xmm12 = xmm13[0,1],xmm12[2,3,4,5,6,7]
1581 ; AVX-NEXT: vpblendw {{.*#+}} xmm13 = xmm8[0,1],xmm7[2,3],xmm8[4,5,6,7]
1582 ; AVX-NEXT: vpshufb {{.*#+}} xmm13 = xmm13[u,u,u,u,u,u,u,u,u,u,6,7,0,1,10,11]
1583 ; AVX-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0,1,2,3,4],xmm13[5,6,7]
1584 ; AVX-NEXT: vpshufd {{.*#+}} xmm13 = xmm3[0,1,1,3]
1585 ; AVX-NEXT: vpshufhw {{.*#+}} xmm13 = xmm13[0,1,2,3,4,7,6,7]
1586 ; AVX-NEXT: vpunpckhdq {{.*#+}} xmm13 = xmm13[2],xmm4[2],xmm13[3],xmm4[3]
1587 ; AVX-NEXT: vpblendw {{.*#+}} xmm14 = xmm5[0,1],xmm6[2,3],xmm5[4,5,6,7]
1588 ; AVX-NEXT: vpshufb {{.*#+}} xmm14 = xmm14[u,u,u,u,u,u,2,3,12,13,6,7,u,u,u,u]
1589 ; AVX-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0,1,2],xmm14[3,4,5],xmm13[6,7]
1590 ; AVX-NEXT: vpshufd {{.*#+}} xmm14 = xmm9[0,1,2,0]
1591 ; AVX-NEXT: vpshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,4,5,6,5]
1592 ; AVX-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0,1,2,3,4,5],xmm14[6,7]
1593 ; AVX-NEXT: vinsertf128 $1, %xmm12, %ymm13, %ymm12
1594 ; AVX-NEXT: vpblendw {{.*#+}} xmm13 = xmm0[0,1],xmm1[2,3],xmm0[4,5],xmm1[6,7]
1595 ; AVX-NEXT: vpshufb {{.*#+}} xmm13 = xmm13[u,u,0,1,10,11,4,5,14,15,u,u,u,u,u,u]
1596 ; AVX-NEXT: vpsrlq $48, %xmm2, %xmm14
1597 ; AVX-NEXT: vpblendw {{.*#+}} xmm13 = xmm14[0],xmm13[1,2,3,4,5,6,7]
1598 ; AVX-NEXT: vpblendw {{.*#+}} xmm14 = xmm8[0,1,2,3],xmm7[4,5],xmm8[6,7]
1599 ; AVX-NEXT: vpshufb {{.*#+}} xmm14 = xmm14[u,u,u,u,u,u,u,u,u,u,8,9,2,3,12,13]
1600 ; AVX-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0,1,2,3,4],xmm14[5,6,7]
1601 ; AVX-NEXT: vpblendw {{.*#+}} xmm14 = xmm4[0,1],xmm3[2,3],xmm4[4,5,6,7]
1602 ; AVX-NEXT: vpshufb {{.*#+}} xmm14 = xmm14[6,7,0,1,10,11,u,u,u,u,u,u,u,u,u,u]
1603 ; AVX-NEXT: vpblendw {{.*#+}} xmm15 = xmm5[0,1,2,3],xmm6[4,5],xmm5[6,7]
1604 ; AVX-NEXT: vpshuflw {{.*#+}} xmm15 = xmm15[2,2,2,2,4,5,6,7]
1605 ; AVX-NEXT: vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,7,4,6,7]
1606 ; AVX-NEXT: vpblendw {{.*#+}} xmm14 = xmm14[0,1,2],xmm15[3,4,5],xmm14[6,7]
1607 ; AVX-NEXT: vpshufd {{.*#+}} xmm15 = xmm9[0,1,0,3]
1608 ; AVX-NEXT: vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,5,6]
1609 ; AVX-NEXT: vpblendw {{.*#+}} xmm14 = xmm14[0,1,2,3,4,5],xmm15[6,7]
1610 ; AVX-NEXT: vinsertf128 $1, %xmm13, %ymm14, %ymm13
1611 ; AVX-NEXT: vpshufd {{.*#+}} xmm8 = xmm8[3,1,2,3]
1612 ; AVX-NEXT: vpshuflw {{.*#+}} xmm8 = xmm8[0,1,2,1,4,5,6,7]
1613 ; AVX-NEXT: vpshufd {{.*#+}} xmm7 = xmm7[0,2,2,3]
1614 ; AVX-NEXT: vpshuflw {{.*#+}} xmm7 = xmm7[0,1,0,3,4,5,6,7]
1615 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1]
1616 ; AVX-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,3,2,3]
1617 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm1[3],xmm0[4,5,6,7]
1618 ; AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm2[2,3,2,3]
1619 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1,2,3,4,5,6,7]
1620 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3],xmm7[4,5,6,7]
1621 ; AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm5[1,1,1,1]
1622 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm6[0,2,2,3]
1623 ; AVX-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,1,0,3,4,5,6,7]
1624 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1625 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm4[0,1,2,3],xmm3[4,5],xmm4[6,7]
1626 ; AVX-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[8,9,2,3,12,13,u,u,u,u,u,u,u,u,u,u]
1627 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1,2],xmm1[3,4,5],xmm2[6,7]
1628 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm9[0,1,1,3]
1629 ; AVX-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,4,7]
1630 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5],xmm2[6,7]
1631 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
1632 ; AVX-NEXT: vmovaps %ymm10, (%rsi)
1633 ; AVX-NEXT: vmovaps %ymm11, (%rdx)
1634 ; AVX-NEXT: vmovaps %ymm12, (%rcx)
1635 ; AVX-NEXT: vmovaps %ymm13, (%r8)
1636 ; AVX-NEXT: vmovaps %ymm0, (%r9)
1637 ; AVX-NEXT: vzeroupper
1640 ; AVX2-LABEL: load_i16_stride5_vf16:
1642 ; AVX2-NEXT: vmovdqa (%rdi), %ymm2
1643 ; AVX2-NEXT: vmovdqa 32(%rdi), %ymm3
1644 ; AVX2-NEXT: vmovdqa 64(%rdi), %ymm0
1645 ; AVX2-NEXT: vmovdqa 96(%rdi), %ymm1
1646 ; AVX2-NEXT: vpblendw {{.*#+}} ymm4 = ymm2[0],ymm3[1],ymm2[2,3],ymm3[4],ymm2[5],ymm3[6],ymm2[7,8],ymm3[9],ymm2[10,11],ymm3[12],ymm2[13],ymm3[14],ymm2[15]
1647 ; AVX2-NEXT: vextracti128 $1, %ymm4, %xmm5
1648 ; AVX2-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0],xmm5[1,2,3],xmm4[4,5],xmm5[6,7]
1649 ; AVX2-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[0,1,10,11,4,5,14,15,8,9,2,3,12,13,6,7]
1650 ; AVX2-NEXT: vpblendw {{.*#+}} ymm5 = ymm0[0],ymm1[1,2],ymm0[3],ymm1[4],ymm0[5],ymm1[6,7],ymm0[8],ymm1[9,10],ymm0[11],ymm1[12],ymm0[13],ymm1[14,15]
1651 ; AVX2-NEXT: vpermq {{.*#+}} ymm6 = ymm5[2,3,0,1]
1652 ; AVX2-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3,4],ymm6[5],ymm5[6],ymm6[7]
1653 ; AVX2-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[0,1,10,11,4,5,14,15,8,9,10,11,4,5,6,7,16,17,26,27,20,21,30,31,24,25,26,27,20,21,22,23]
1654 ; AVX2-NEXT: vpmovsxbw {{.*#+}} xmm7 = [65535,65535,65535,65535,65535,65535,65535,0]
1655 ; AVX2-NEXT: vpblendvb %ymm7, %ymm4, %ymm5, %ymm5
1656 ; AVX2-NEXT: vmovdqa 144(%rdi), %xmm6
1657 ; AVX2-NEXT: vmovdqa 128(%rdi), %xmm4
1658 ; AVX2-NEXT: vpblendd {{.*#+}} xmm8 = xmm4[0],xmm6[1],xmm4[2,3]
1659 ; AVX2-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[u,u,u,u,u,u,u,u,u,u,2,3,12,13,6,7]
1660 ; AVX2-NEXT: vinserti128 $1, %xmm8, %ymm0, %ymm8
1661 ; AVX2-NEXT: vpblendw {{.*#+}} ymm8 = ymm5[0,1,2,3,4],ymm8[5,6,7],ymm5[8,9,10,11,12],ymm8[13,14,15]
1662 ; AVX2-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm8[4,5,6,7]
1663 ; AVX2-NEXT: vpblendw {{.*#+}} ymm8 = ymm3[0],ymm2[1],ymm3[2],ymm2[3],ymm3[4,5],ymm2[6],ymm3[7,8],ymm2[9],ymm3[10],ymm2[11],ymm3[12,13],ymm2[14],ymm3[15]
1664 ; AVX2-NEXT: vextracti128 $1, %ymm8, %xmm9
1665 ; AVX2-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1],xmm9[2,3],xmm8[4,5,6],xmm9[7]
1666 ; AVX2-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[2,3,12,13,6,7,0,1,10,11,4,5,14,15,10,11]
1667 ; AVX2-NEXT: vpblendw {{.*#+}} ymm9 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13],ymm0[14],ymm1[15]
1668 ; AVX2-NEXT: vpermq {{.*#+}} ymm10 = ymm9[2,3,0,1]
1669 ; AVX2-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4],ymm10[5],ymm9[6,7]
1670 ; AVX2-NEXT: vpshufb {{.*#+}} ymm9 = ymm9[2,3,12,13,6,7,0,1,10,11,6,7,8,9,8,9,18,19,28,29,22,23,16,17,26,27,22,23,24,25,24,25]
1671 ; AVX2-NEXT: vpblendvb %ymm7, %ymm8, %ymm9, %ymm7
1672 ; AVX2-NEXT: vpblendd {{.*#+}} xmm8 = xmm4[0,1],xmm6[2],xmm4[3]
1673 ; AVX2-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[u,u,u,u,u,u,u,u,u,u,4,5,14,15,8,9]
1674 ; AVX2-NEXT: vinserti128 $1, %xmm8, %ymm0, %ymm8
1675 ; AVX2-NEXT: vpblendw {{.*#+}} ymm8 = ymm7[0,1,2,3,4],ymm8[5,6,7],ymm7[8,9,10,11,12],ymm8[13,14,15]
1676 ; AVX2-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm8[4,5,6,7]
1677 ; AVX2-NEXT: vpblendw {{.*#+}} ymm8 = ymm3[0,1],ymm2[2],ymm3[3],ymm2[4],ymm3[5,6],ymm2[7],ymm3[8,9],ymm2[10],ymm3[11],ymm2[12],ymm3[13,14],ymm2[15]
1678 ; AVX2-NEXT: vextracti128 $1, %ymm8, %xmm9
1679 ; AVX2-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1,2],xmm9[3,4],xmm8[5,6,7]
1680 ; AVX2-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
1681 ; AVX2-NEXT: vpblendw {{.*#+}} ymm9 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5],ymm1[6],ymm0[7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13],ymm1[14],ymm0[15]
1682 ; AVX2-NEXT: vpermq {{.*#+}} ymm10 = ymm9[2,3,0,1]
1683 ; AVX2-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],ymm10[6],ymm9[7]
1684 ; AVX2-NEXT: vpshufb {{.*#+}} ymm9 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,0,1,10,11,20,21,30,31,24,25,18,19,28,29,26,27,16,17,26,27]
1685 ; AVX2-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2],ymm9[3,4,5,6,7]
1686 ; AVX2-NEXT: vpblendd {{.*#+}} xmm9 = xmm6[0],xmm4[1],xmm6[2,3]
1687 ; AVX2-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[u,u,u,u,u,u,u,u,u,u,6,7,0,1,10,11]
1688 ; AVX2-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
1689 ; AVX2-NEXT: vpblendw {{.*#+}} ymm9 = ymm8[0,1,2,3,4],ymm9[5,6,7],ymm8[8,9,10,11,12],ymm9[13,14,15]
1690 ; AVX2-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm9[4,5,6,7]
1691 ; AVX2-NEXT: vpblendw {{.*#+}} ymm9 = ymm2[0],ymm3[1,2],ymm2[3],ymm3[4],ymm2[5],ymm3[6,7],ymm2[8],ymm3[9,10],ymm2[11],ymm3[12],ymm2[13],ymm3[14,15]
1692 ; AVX2-NEXT: vextracti128 $1, %ymm9, %xmm10
1693 ; AVX2-NEXT: vpblendd {{.*#+}} xmm9 = xmm10[0],xmm9[1],xmm10[2],xmm9[3]
1694 ; AVX2-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
1695 ; AVX2-NEXT: vpblendw {{.*#+}} ymm10 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
1696 ; AVX2-NEXT: vpermq {{.*#+}} ymm11 = ymm10[2,3,0,1]
1697 ; AVX2-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0,1,2,3],ymm11[4],ymm10[5],ymm11[6],ymm10[7]
1698 ; AVX2-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,2,3,12,13,22,23,16,17,26,27,20,21,30,31,30,31,18,19,28,29]
1699 ; AVX2-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2],ymm10[3,4,5,6,7]
1700 ; AVX2-NEXT: vpblendd {{.*#+}} xmm10 = xmm6[0,1],xmm4[2],xmm6[3]
1701 ; AVX2-NEXT: vpshufb {{.*#+}} xmm10 = xmm10[u,u,u,u,u,u,u,u,u,u,8,9,2,3,12,13]
1702 ; AVX2-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
1703 ; AVX2-NEXT: vpblendw {{.*#+}} ymm10 = ymm9[0,1,2,3,4],ymm10[5,6,7],ymm9[8,9,10,11,12],ymm10[13,14,15]
1704 ; AVX2-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm10[4,5,6,7]
1705 ; AVX2-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5],ymm2[6],ymm3[7,8],ymm2[9],ymm3[10,11],ymm2[12],ymm3[13],ymm2[14],ymm3[15]
1706 ; AVX2-NEXT: vextracti128 $1, %ymm2, %xmm3
1707 ; AVX2-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1,2],xmm2[3,4],xmm3[5,6,7]
1708 ; AVX2-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[8,9,2,3,12,13,6,7,0,1,10,11,u,u,u,u]
1709 ; AVX2-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6],ymm0[7],ymm1[8,9],ymm0[10],ymm1[11],ymm0[12],ymm1[13,14],ymm0[15]
1710 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
1711 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4],ymm0[5,6],ymm1[7]
1712 ; AVX2-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,4,5,14,15,24,25,18,19,28,29,22,23,u,u,u,u,u,u,u,u]
1713 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3,4,5,6,7]
1714 ; AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm6[3,1,2,3]
1715 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,2,1,4,5,6,7]
1716 ; AVX2-NEXT: vpshufd {{.*#+}} xmm2 = xmm4[0,2,2,3]
1717 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,1,0,3,4,5,6,7]
1718 ; AVX2-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1719 ; AVX2-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
1720 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
1721 ; AVX2-NEXT: vmovdqa %ymm5, (%rsi)
1722 ; AVX2-NEXT: vmovdqa %ymm7, (%rdx)
1723 ; AVX2-NEXT: vmovdqa %ymm8, (%rcx)
1724 ; AVX2-NEXT: vmovdqa %ymm9, (%r8)
1725 ; AVX2-NEXT: vmovdqa %ymm0, (%r9)
1726 ; AVX2-NEXT: vzeroupper
1729 ; AVX2-FP-LABEL: load_i16_stride5_vf16:
1731 ; AVX2-FP-NEXT: vmovdqa (%rdi), %ymm2
1732 ; AVX2-FP-NEXT: vmovdqa 32(%rdi), %ymm3
1733 ; AVX2-FP-NEXT: vmovdqa 64(%rdi), %ymm0
1734 ; AVX2-FP-NEXT: vmovdqa 96(%rdi), %ymm1
1735 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm4 = ymm2[0],ymm3[1],ymm2[2,3],ymm3[4],ymm2[5],ymm3[6],ymm2[7,8],ymm3[9],ymm2[10,11],ymm3[12],ymm2[13],ymm3[14],ymm2[15]
1736 ; AVX2-FP-NEXT: vextracti128 $1, %ymm4, %xmm5
1737 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0],xmm5[1,2,3],xmm4[4,5],xmm5[6,7]
1738 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm4 = xmm4[0,1,10,11,4,5,14,15,8,9,2,3,12,13,6,7]
1739 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm5 = ymm0[0],ymm1[1,2],ymm0[3],ymm1[4],ymm0[5],ymm1[6,7],ymm0[8],ymm1[9,10],ymm0[11],ymm1[12],ymm0[13],ymm1[14,15]
1740 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm6 = ymm5[2,3,0,1]
1741 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3,4],ymm6[5],ymm5[6],ymm6[7]
1742 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[0,1,10,11,4,5,14,15,8,9,10,11,4,5,6,7,16,17,26,27,20,21,30,31,24,25,26,27,20,21,22,23]
1743 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} xmm7 = [65535,65535,65535,65535,65535,65535,65535,0]
1744 ; AVX2-FP-NEXT: vpblendvb %ymm7, %ymm4, %ymm5, %ymm6
1745 ; AVX2-FP-NEXT: vmovdqa 144(%rdi), %xmm4
1746 ; AVX2-FP-NEXT: vmovdqa 128(%rdi), %xmm5
1747 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm8 = xmm5[0],xmm4[1],xmm5[2,3]
1748 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[u,u,u,u,u,u,u,u,u,u,2,3,12,13,6,7]
1749 ; AVX2-FP-NEXT: vinserti128 $1, %xmm8, %ymm0, %ymm8
1750 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm8 = ymm6[0,1,2,3,4],ymm8[5,6,7],ymm6[8,9,10,11,12],ymm8[13,14,15]
1751 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
1752 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm8 = ymm3[0],ymm2[1],ymm3[2],ymm2[3],ymm3[4,5],ymm2[6],ymm3[7,8],ymm2[9],ymm3[10],ymm2[11],ymm3[12,13],ymm2[14],ymm3[15]
1753 ; AVX2-FP-NEXT: vextracti128 $1, %ymm8, %xmm9
1754 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1],xmm9[2,3],xmm8[4,5,6],xmm9[7]
1755 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[2,3,12,13,6,7,0,1,10,11,4,5,14,15,10,11]
1756 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm9 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13],ymm0[14],ymm1[15]
1757 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm10 = ymm9[2,3,0,1]
1758 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4],ymm10[5],ymm9[6,7]
1759 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm9 = ymm9[2,3,12,13,6,7,0,1,10,11,6,7,8,9,8,9,18,19,28,29,22,23,16,17,26,27,22,23,24,25,24,25]
1760 ; AVX2-FP-NEXT: vpblendvb %ymm7, %ymm8, %ymm9, %ymm7
1761 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm8 = xmm5[0,1],xmm4[2],xmm5[3]
1762 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[u,u,u,u,u,u,u,u,u,u,4,5,14,15,8,9]
1763 ; AVX2-FP-NEXT: vinserti128 $1, %xmm8, %ymm0, %ymm8
1764 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm8 = ymm7[0,1,2,3,4],ymm8[5,6,7],ymm7[8,9,10,11,12],ymm8[13,14,15]
1765 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm8[4,5,6,7]
1766 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm8 = ymm3[0,1],ymm2[2],ymm3[3],ymm2[4],ymm3[5,6],ymm2[7],ymm3[8,9],ymm2[10],ymm3[11],ymm2[12],ymm3[13,14],ymm2[15]
1767 ; AVX2-FP-NEXT: vextracti128 $1, %ymm8, %xmm9
1768 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1,2],xmm9[3,4],xmm8[5,6,7]
1769 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
1770 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm9 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5],ymm1[6],ymm0[7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13],ymm1[14],ymm0[15]
1771 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm10 = ymm9[2,3,0,1]
1772 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],ymm10[6],ymm9[7]
1773 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm9 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,0,1,10,11,20,21,30,31,24,25,18,19,28,29,26,27,16,17,26,27]
1774 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2],ymm9[3,4,5,6,7]
1775 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm9 = xmm4[0],xmm5[1],xmm4[2,3]
1776 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[u,u,u,u,u,u,u,u,u,u,6,7,0,1,10,11]
1777 ; AVX2-FP-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
1778 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm9 = ymm8[0,1,2,3,4],ymm9[5,6,7],ymm8[8,9,10,11,12],ymm9[13,14,15]
1779 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm9[4,5,6,7]
1780 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm9 = ymm2[0],ymm3[1,2],ymm2[3],ymm3[4],ymm2[5],ymm3[6,7],ymm2[8],ymm3[9,10],ymm2[11],ymm3[12],ymm2[13],ymm3[14,15]
1781 ; AVX2-FP-NEXT: vextracti128 $1, %ymm9, %xmm10
1782 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm9 = xmm10[0],xmm9[1],xmm10[2],xmm9[3]
1783 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
1784 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm10 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
1785 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm11 = ymm10[2,3,0,1]
1786 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0,1,2,3],ymm11[4],ymm10[5],ymm11[6],ymm10[7]
1787 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,2,3,12,13,22,23,16,17,26,27,20,21,30,31,30,31,18,19,28,29]
1788 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2],ymm10[3,4,5,6,7]
1789 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm10 = xmm4[0,1],xmm5[2],xmm4[3]
1790 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm10 = xmm10[u,u,u,u,u,u,u,u,u,u,8,9,2,3,12,13]
1791 ; AVX2-FP-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
1792 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm10 = ymm9[0,1,2,3,4],ymm10[5,6,7],ymm9[8,9,10,11,12],ymm10[13,14,15]
1793 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm10[4,5,6,7]
1794 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5],ymm2[6],ymm3[7,8],ymm2[9],ymm3[10,11],ymm2[12],ymm3[13],ymm2[14],ymm3[15]
1795 ; AVX2-FP-NEXT: vextracti128 $1, %ymm2, %xmm3
1796 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1,2],xmm2[3,4],xmm3[5,6,7]
1797 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[8,9,2,3,12,13,6,7,0,1,10,11,u,u,u,u]
1798 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6],ymm0[7],ymm1[8,9],ymm0[10],ymm1[11],ymm0[12],ymm1[13,14],ymm0[15]
1799 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
1800 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4],ymm0[5,6],ymm1[7]
1801 ; AVX2-FP-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,4,5,14,15,24,25,18,19,28,29,22,23,u,u,u,u,u,u,u,u]
1802 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3,4,5,6,7]
1803 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm1 = xmm4[12,13,14,15,4,5,14,15,u,u,u,u,u,u,u,u]
1804 ; AVX2-FP-NEXT: vpshufb {{.*#+}} xmm2 = xmm5[0,1,2,3,0,1,10,11,u,u,u,u,u,u,u,u]
1805 ; AVX2-FP-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1806 ; AVX2-FP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
1807 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
1808 ; AVX2-FP-NEXT: vmovdqa %ymm6, (%rsi)
1809 ; AVX2-FP-NEXT: vmovdqa %ymm7, (%rdx)
1810 ; AVX2-FP-NEXT: vmovdqa %ymm8, (%rcx)
1811 ; AVX2-FP-NEXT: vmovdqa %ymm9, (%r8)
1812 ; AVX2-FP-NEXT: vmovdqa %ymm0, (%r9)
1813 ; AVX2-FP-NEXT: vzeroupper
1814 ; AVX2-FP-NEXT: retq
1816 ; AVX2-FCP-LABEL: load_i16_stride5_vf16:
1817 ; AVX2-FCP: # %bb.0:
1818 ; AVX2-FCP-NEXT: vmovdqa 128(%rdi), %ymm0
1819 ; AVX2-FCP-NEXT: vmovdqa (%rdi), %ymm3
1820 ; AVX2-FCP-NEXT: vmovdqa 32(%rdi), %ymm4
1821 ; AVX2-FCP-NEXT: vmovdqa 64(%rdi), %ymm1
1822 ; AVX2-FCP-NEXT: vmovdqa 96(%rdi), %ymm2
1823 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm5 = ymm3[0],ymm4[1],ymm3[2,3],ymm4[4],ymm3[5],ymm4[6],ymm3[7,8],ymm4[9],ymm3[10,11],ymm4[12],ymm3[13],ymm4[14],ymm3[15]
1824 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm5, %xmm6
1825 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0],xmm6[1,2,3],xmm5[4,5],xmm6[6,7]
1826 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[0,1,10,11,4,5,14,15,8,9,2,3,12,13,6,7]
1827 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm6 = ymm1[0],ymm2[1,2],ymm1[3],ymm2[4],ymm1[5],ymm2[6,7],ymm1[8],ymm2[9,10],ymm1[11],ymm2[12],ymm1[13],ymm2[14,15]
1828 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm7 = [1,3,0,2,4,6,1,3]
1829 ; AVX2-FCP-NEXT: vpermd %ymm6, %ymm7, %ymm6
1830 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm6 = ymm6[0,1,6,7,8,9,14,15,4,5,14,15,4,5,2,3,16,17,22,23,24,25,30,31,20,21,30,31,20,21,18,19]
1831 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} xmm8 = [65535,65535,65535,65535,65535,65535,65535,0]
1832 ; AVX2-FCP-NEXT: vpblendvb %ymm8, %ymm5, %ymm6, %ymm5
1833 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm6 = [0,0,0,0,0,3,5,0]
1834 ; AVX2-FCP-NEXT: vpermd %ymm0, %ymm6, %ymm6
1835 ; AVX2-FCP-NEXT: vpbroadcastq {{.*#+}} ymm7 = [0,0,18,19,20,21,26,27,0,0,18,19,20,21,26,27,0,0,18,19,20,21,26,27,0,0,18,19,20,21,26,27]
1836 ; AVX2-FCP-NEXT: vpshufb %ymm7, %ymm6, %ymm6
1837 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm6 = ymm5[0,1,2,3,4],ymm6[5,6,7],ymm5[8,9,10,11,12],ymm6[13,14,15]
1838 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
1839 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm6 = ymm4[0],ymm3[1],ymm4[2],ymm3[3],ymm4[4,5],ymm3[6],ymm4[7,8],ymm3[9],ymm4[10],ymm3[11],ymm4[12,13],ymm3[14],ymm4[15]
1840 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm6, %xmm9
1841 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1],xmm9[2,3],xmm6[4,5,6],xmm9[7]
1842 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[2,3,12,13,6,7,0,1,10,11,4,5,14,15,10,11]
1843 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm9 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5],ymm1[6],ymm2[7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13],ymm1[14],ymm2[15]
1844 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm10 = [2,0,0,0,4,7,1,6]
1845 ; AVX2-FCP-NEXT: vpermd %ymm9, %ymm10, %ymm9
1846 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm9 = ymm9[2,3,4,5,10,11,0,1,14,15,2,3,12,13,0,1,18,19,20,21,26,27,16,17,30,31,18,19,28,29,16,17]
1847 ; AVX2-FCP-NEXT: vpblendvb %ymm8, %ymm6, %ymm9, %ymm6
1848 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm8 = [0,0,0,0,1,3,6,0]
1849 ; AVX2-FCP-NEXT: vpermd %ymm0, %ymm8, %ymm9
1850 ; AVX2-FCP-NEXT: vpbroadcastq {{.*#+}} ymm8 = [0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25]
1851 ; AVX2-FCP-NEXT: vpshufb %ymm8, %ymm9, %ymm9
1852 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm9 = ymm6[0,1,2,3,4],ymm9[5,6,7],ymm6[8,9,10,11,12],ymm9[13,14,15]
1853 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm9[4,5,6,7]
1854 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm9 = ymm4[0,1],ymm3[2],ymm4[3],ymm3[4],ymm4[5,6],ymm3[7],ymm4[8,9],ymm3[10],ymm4[11],ymm3[12],ymm4[13,14],ymm3[15]
1855 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm9, %xmm10
1856 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm9 = xmm9[0,1,2],xmm10[3,4],xmm9[5,6,7]
1857 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
1858 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm10 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5],ymm2[6],ymm1[7,8],ymm2[9],ymm1[10,11],ymm2[12],ymm1[13],ymm2[14],ymm1[15]
1859 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm11 = [0,2,0,0,5,7,2,4]
1860 ; AVX2-FCP-NEXT: vpermd %ymm10, %ymm11, %ymm10
1861 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,0,1,6,7,16,17,22,23,24,25,30,31,20,21,22,23,16,17,22,23]
1862 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2],ymm10[3,4,5,6,7]
1863 ; AVX2-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm10 = [1,4,6,0,1,4,6,0]
1864 ; AVX2-FCP-NEXT: # ymm10 = mem[0,1,0,1]
1865 ; AVX2-FCP-NEXT: vpermd %ymm0, %ymm10, %ymm10
1866 ; AVX2-FCP-NEXT: vpshufb %ymm7, %ymm10, %ymm7
1867 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm7 = ymm9[0,1,2,3,4],ymm7[5,6,7],ymm9[8,9,10,11,12],ymm7[13,14,15]
1868 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm7 = ymm9[0,1,2,3],ymm7[4,5,6,7]
1869 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm9 = ymm3[0],ymm4[1,2],ymm3[3],ymm4[4],ymm3[5],ymm4[6,7],ymm3[8],ymm4[9,10],ymm3[11],ymm4[12],ymm3[13],ymm4[14,15]
1870 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm9, %xmm10
1871 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} xmm9 = xmm10[0],xmm9[1],xmm10[2],xmm9[3]
1872 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
1873 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm10 = ymm2[0],ymm1[1],ymm2[2],ymm1[3],ymm2[4,5],ymm1[6],ymm2[7,8],ymm1[9],ymm2[10],ymm1[11],ymm2[12,13],ymm1[14],ymm2[15]
1874 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm11 = [0,3,0,0,5,0,2,7]
1875 ; AVX2-FCP-NEXT: vpermd %ymm10, %ymm11, %ymm10
1876 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,2,3,4,5,18,19,20,21,26,27,16,17,30,31,30,31,18,19,20,21]
1877 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2],ymm10[3,4,5,6,7]
1878 ; AVX2-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm10 = [2,4,7,0,2,4,7,0]
1879 ; AVX2-FCP-NEXT: # ymm10 = mem[0,1,0,1]
1880 ; AVX2-FCP-NEXT: vpermd %ymm0, %ymm10, %ymm10
1881 ; AVX2-FCP-NEXT: vpshufb %ymm8, %ymm10, %ymm8
1882 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0,1,2,3,4],ymm8[5,6,7],ymm9[8,9,10,11,12],ymm8[13,14,15]
1883 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2,3],ymm8[4,5,6,7]
1884 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0],ymm3[1],ymm4[2,3],ymm3[4],ymm4[5],ymm3[6],ymm4[7,8],ymm3[9],ymm4[10,11],ymm3[12],ymm4[13],ymm3[14],ymm4[15]
1885 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm3, %xmm4
1886 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm3 = xmm4[0,1,2],xmm3[3,4],xmm4[5,6,7]
1887 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[8,9,2,3,12,13,6,7,0,1,10,11,u,u,u,u]
1888 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3],ymm1[4],ymm2[5,6],ymm1[7],ymm2[8,9],ymm1[10],ymm2[11],ymm1[12],ymm2[13,14],ymm1[15]
1889 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm2 = [1,3,0,0,6,0,3,5]
1890 ; AVX2-FCP-NEXT: vpermd %ymm1, %ymm2, %ymm1
1891 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,0,1,6,7,16,17,22,23,24,25,30,31,u,u,u,u,u,u,u,u]
1892 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3,4,5,6,7]
1893 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm2 = [0,0,0,0,0,2,5,7]
1894 ; AVX2-FCP-NEXT: vpermd %ymm0, %ymm2, %ymm0
1895 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,17,22,23,24,25,30,31]
1896 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
1897 ; AVX2-FCP-NEXT: vmovdqa %ymm5, (%rsi)
1898 ; AVX2-FCP-NEXT: vmovdqa %ymm6, (%rdx)
1899 ; AVX2-FCP-NEXT: vmovdqa %ymm7, (%rcx)
1900 ; AVX2-FCP-NEXT: vmovdqa %ymm8, (%r8)
1901 ; AVX2-FCP-NEXT: vmovdqa %ymm0, (%r9)
1902 ; AVX2-FCP-NEXT: vzeroupper
1903 ; AVX2-FCP-NEXT: retq
1905 ; AVX512-LABEL: load_i16_stride5_vf16:
1907 ; AVX512-NEXT: vmovdqa (%rdi), %ymm2
1908 ; AVX512-NEXT: vmovdqa 32(%rdi), %ymm3
1909 ; AVX512-NEXT: vmovdqa 64(%rdi), %ymm0
1910 ; AVX512-NEXT: vmovdqa 96(%rdi), %ymm1
1911 ; AVX512-NEXT: vpblendw {{.*#+}} ymm4 = ymm0[0],ymm1[1,2],ymm0[3],ymm1[4],ymm0[5],ymm1[6,7],ymm0[8],ymm1[9,10],ymm0[11],ymm1[12],ymm0[13],ymm1[14,15]
1912 ; AVX512-NEXT: vpermq {{.*#+}} ymm5 = ymm4[2,3,0,1]
1913 ; AVX512-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3,4],ymm5[5],ymm4[6],ymm5[7]
1914 ; AVX512-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm4[6,7,16,17,26,27,20,21,30,31,24,25],zero,zero,zero,zero,zero,zero
1915 ; AVX512-NEXT: vpblendw {{.*#+}} ymm5 = ymm2[0],ymm3[1],ymm2[2,3],ymm3[4],ymm2[5],ymm3[6],ymm2[7,8],ymm3[9],ymm2[10,11],ymm3[12],ymm2[13],ymm3[14],ymm2[15]
1916 ; AVX512-NEXT: vextracti128 $1, %ymm5, %xmm6
1917 ; AVX512-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0],xmm6[1,2,3],xmm5[4,5],xmm6[6,7]
1918 ; AVX512-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[0,1,10,11,4,5,14,15,8,9,2,3,12,13],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm5[u,u,u,u,u,u]
1919 ; AVX512-NEXT: vpor %ymm4, %ymm5, %ymm5
1920 ; AVX512-NEXT: vmovdqa 144(%rdi), %xmm6
1921 ; AVX512-NEXT: vmovdqa 128(%rdi), %xmm4
1922 ; AVX512-NEXT: vpblendd {{.*#+}} xmm7 = xmm4[0],xmm6[1],xmm4[2,3]
1923 ; AVX512-NEXT: vpshufb {{.*#+}} xmm7 = xmm7[u,u,u,u,u,u,u,u,u,u,2,3,12,13,6,7]
1924 ; AVX512-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
1925 ; AVX512-NEXT: vpblendw {{.*#+}} ymm7 = ymm5[0,1,2,3,4],ymm7[5,6,7],ymm5[8,9,10,11,12],ymm7[13,14,15]
1926 ; AVX512-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
1927 ; AVX512-NEXT: vpblendw {{.*#+}} ymm7 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13],ymm0[14],ymm1[15]
1928 ; AVX512-NEXT: vpermq {{.*#+}} ymm8 = ymm7[2,3,0,1]
1929 ; AVX512-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0,1,2,3,4],ymm8[5],ymm7[6,7]
1930 ; AVX512-NEXT: vpshufb {{.*#+}} ymm7 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm7[8,9,18,19,28,29,22,23,16,17,26,27],zero,zero,zero,zero,zero,zero
1931 ; AVX512-NEXT: vpblendw {{.*#+}} ymm8 = ymm3[0],ymm2[1],ymm3[2],ymm2[3],ymm3[4,5],ymm2[6],ymm3[7,8],ymm2[9],ymm3[10],ymm2[11],ymm3[12,13],ymm2[14],ymm3[15]
1932 ; AVX512-NEXT: vextracti128 $1, %ymm8, %xmm9
1933 ; AVX512-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1],xmm9[2,3],xmm8[4,5,6],xmm9[7]
1934 ; AVX512-NEXT: vpshufb {{.*#+}} ymm8 = ymm8[2,3,12,13,6,7,0,1,10,11,4,5,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm8[u,u,u,u,u,u]
1935 ; AVX512-NEXT: vpor %ymm7, %ymm8, %ymm7
1936 ; AVX512-NEXT: vpblendd {{.*#+}} xmm8 = xmm4[0,1],xmm6[2],xmm4[3]
1937 ; AVX512-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[u,u,u,u,u,u,u,u,u,u,4,5,14,15,8,9]
1938 ; AVX512-NEXT: vinserti128 $1, %xmm8, %ymm0, %ymm8
1939 ; AVX512-NEXT: vpblendw {{.*#+}} ymm8 = ymm7[0,1,2,3,4],ymm8[5,6,7],ymm7[8,9,10,11,12],ymm8[13,14,15]
1940 ; AVX512-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm8[4,5,6,7]
1941 ; AVX512-NEXT: vpblendw {{.*#+}} ymm8 = ymm3[0,1],ymm2[2],ymm3[3],ymm2[4],ymm3[5,6],ymm2[7],ymm3[8,9],ymm2[10],ymm3[11],ymm2[12],ymm3[13,14],ymm2[15]
1942 ; AVX512-NEXT: vextracti128 $1, %ymm8, %xmm9
1943 ; AVX512-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1,2],xmm9[3,4],xmm8[5,6,7]
1944 ; AVX512-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
1945 ; AVX512-NEXT: vpblendw {{.*#+}} ymm9 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5],ymm1[6],ymm0[7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13],ymm1[14],ymm0[15]
1946 ; AVX512-NEXT: vpermq {{.*#+}} ymm10 = ymm9[2,3,0,1]
1947 ; AVX512-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],ymm10[6],ymm9[7]
1948 ; AVX512-NEXT: vpshufb {{.*#+}} ymm9 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,0,1,10,11,20,21,30,31,24,25,18,19,28,29,26,27,16,17,26,27]
1949 ; AVX512-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2],ymm9[3,4,5,6,7]
1950 ; AVX512-NEXT: vpblendd {{.*#+}} xmm9 = xmm6[0],xmm4[1],xmm6[2,3]
1951 ; AVX512-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[u,u,u,u,u,u,u,u,u,u,6,7,0,1,10,11]
1952 ; AVX512-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
1953 ; AVX512-NEXT: vpblendw {{.*#+}} ymm9 = ymm8[0,1,2,3,4],ymm9[5,6,7],ymm8[8,9,10,11,12],ymm9[13,14,15]
1954 ; AVX512-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm9[4,5,6,7]
1955 ; AVX512-NEXT: vpblendw {{.*#+}} ymm9 = ymm2[0],ymm3[1,2],ymm2[3],ymm3[4],ymm2[5],ymm3[6,7],ymm2[8],ymm3[9,10],ymm2[11],ymm3[12],ymm2[13],ymm3[14,15]
1956 ; AVX512-NEXT: vextracti128 $1, %ymm9, %xmm10
1957 ; AVX512-NEXT: vpblendd {{.*#+}} xmm9 = xmm10[0],xmm9[1],xmm10[2],xmm9[3]
1958 ; AVX512-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
1959 ; AVX512-NEXT: vpblendw {{.*#+}} ymm10 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
1960 ; AVX512-NEXT: vpermq {{.*#+}} ymm11 = ymm10[2,3,0,1]
1961 ; AVX512-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0,1,2,3],ymm11[4],ymm10[5],ymm11[6],ymm10[7]
1962 ; AVX512-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,2,3,12,13,22,23,16,17,26,27,20,21,30,31,30,31,18,19,28,29]
1963 ; AVX512-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2],ymm10[3,4,5,6,7]
1964 ; AVX512-NEXT: vpblendd {{.*#+}} xmm10 = xmm6[0,1],xmm4[2],xmm6[3]
1965 ; AVX512-NEXT: vpshufb {{.*#+}} xmm10 = xmm10[u,u,u,u,u,u,u,u,u,u,8,9,2,3,12,13]
1966 ; AVX512-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
1967 ; AVX512-NEXT: vpblendw {{.*#+}} ymm10 = ymm9[0,1,2,3,4],ymm10[5,6,7],ymm9[8,9,10,11,12],ymm10[13,14,15]
1968 ; AVX512-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm10[4,5,6,7]
1969 ; AVX512-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5],ymm2[6],ymm3[7,8],ymm2[9],ymm3[10,11],ymm2[12],ymm3[13],ymm2[14],ymm3[15]
1970 ; AVX512-NEXT: vextracti128 $1, %ymm2, %xmm3
1971 ; AVX512-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1,2],xmm2[3,4],xmm3[5,6,7]
1972 ; AVX512-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[8,9,2,3,12,13,6,7,0,1,10,11,u,u,u,u]
1973 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6],ymm0[7],ymm1[8,9],ymm0[10],ymm1[11],ymm0[12],ymm1[13,14],ymm0[15]
1974 ; AVX512-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
1975 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4],ymm0[5,6],ymm1[7]
1976 ; AVX512-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,4,5,14,15,24,25,18,19,28,29,22,23,u,u,u,u,u,u,u,u]
1977 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3,4,5,6,7]
1978 ; AVX512-NEXT: vpshufd {{.*#+}} xmm1 = xmm6[3,1,2,3]
1979 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,2,1,4,5,6,7]
1980 ; AVX512-NEXT: vpshufd {{.*#+}} xmm2 = xmm4[0,2,2,3]
1981 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,1,0,3,4,5,6,7]
1982 ; AVX512-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
1983 ; AVX512-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
1984 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
1985 ; AVX512-NEXT: vmovdqa %ymm5, (%rsi)
1986 ; AVX512-NEXT: vmovdqa %ymm7, (%rdx)
1987 ; AVX512-NEXT: vmovdqa %ymm8, (%rcx)
1988 ; AVX512-NEXT: vmovdqa %ymm9, (%r8)
1989 ; AVX512-NEXT: vmovdqa %ymm0, (%r9)
1990 ; AVX512-NEXT: vzeroupper
1993 ; AVX512-FCP-LABEL: load_i16_stride5_vf16:
1994 ; AVX512-FCP: # %bb.0:
1995 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %ymm2
1996 ; AVX512-FCP-NEXT: vmovdqa 32(%rdi), %ymm3
1997 ; AVX512-FCP-NEXT: vmovdqa 64(%rdi), %ymm0
1998 ; AVX512-FCP-NEXT: vmovdqa 96(%rdi), %ymm1
1999 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm4 = ymm0[0],ymm1[1,2],ymm0[3],ymm1[4],ymm0[5],ymm1[6,7],ymm0[8],ymm1[9,10],ymm0[11],ymm1[12],ymm0[13],ymm1[14,15]
2000 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm5 = [1,0,0,0,4,6,1,3]
2001 ; AVX512-FCP-NEXT: vpermd %ymm4, %ymm5, %ymm4
2002 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm4[2,3,16,17,22,23,24,25,30,31,20,21],zero,zero,zero,zero,zero,zero
2003 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm5 = ymm2[0],ymm3[1],ymm2[2,3],ymm3[4],ymm2[5],ymm3[6],ymm2[7,8],ymm3[9],ymm2[10,11],ymm3[12],ymm2[13],ymm3[14],ymm2[15]
2004 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm5, %xmm6
2005 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0],xmm6[1,2,3],xmm5[4,5],xmm6[6,7]
2006 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[0,1,10,11,4,5,14,15,8,9,2,3,12,13],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm5[u,u,u,u,u,u]
2007 ; AVX512-FCP-NEXT: vpor %ymm4, %ymm5, %ymm5
2008 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm6 = [0,0,0,0,0,3,5,0]
2009 ; AVX512-FCP-NEXT: vmovdqa 128(%rdi), %ymm4
2010 ; AVX512-FCP-NEXT: vpermd %ymm4, %ymm6, %ymm6
2011 ; AVX512-FCP-NEXT: vpbroadcastq {{.*#+}} ymm7 = [0,0,18,19,20,21,26,27,0,0,18,19,20,21,26,27,0,0,18,19,20,21,26,27,0,0,18,19,20,21,26,27]
2012 ; AVX512-FCP-NEXT: vpshufb %ymm7, %ymm6, %ymm6
2013 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm6 = ymm5[0,1,2,3,4],ymm6[5,6,7],ymm5[8,9,10,11,12],ymm6[13,14,15]
2014 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
2015 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm6 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13],ymm0[14],ymm1[15]
2016 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm8 = [2,0,0,0,4,7,1,6]
2017 ; AVX512-FCP-NEXT: vpermd %ymm6, %ymm8, %ymm6
2018 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm6[0,1,18,19,20,21,26,27,16,17,30,31],zero,zero,zero,zero,zero,zero
2019 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm3[0],ymm2[1],ymm3[2],ymm2[3],ymm3[4,5],ymm2[6],ymm3[7,8],ymm2[9],ymm3[10],ymm2[11],ymm3[12,13],ymm2[14],ymm3[15]
2020 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm8, %xmm9
2021 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1],xmm9[2,3],xmm8[4,5,6],xmm9[7]
2022 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm8 = ymm8[2,3,12,13,6,7,0,1,10,11,4,5,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm8[u,u,u,u,u,u]
2023 ; AVX512-FCP-NEXT: vpor %ymm6, %ymm8, %ymm6
2024 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm8 = [0,0,0,0,1,3,6,0]
2025 ; AVX512-FCP-NEXT: vpermd %ymm4, %ymm8, %ymm9
2026 ; AVX512-FCP-NEXT: vpbroadcastq {{.*#+}} ymm8 = [0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25]
2027 ; AVX512-FCP-NEXT: vpshufb %ymm8, %ymm9, %ymm9
2028 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm9 = ymm6[0,1,2,3,4],ymm9[5,6,7],ymm6[8,9,10,11,12],ymm9[13,14,15]
2029 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm9[4,5,6,7]
2030 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm9 = ymm3[0,1],ymm2[2],ymm3[3],ymm2[4],ymm3[5,6],ymm2[7],ymm3[8,9],ymm2[10],ymm3[11],ymm2[12],ymm3[13,14],ymm2[15]
2031 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm9, %xmm10
2032 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm9 = xmm9[0,1,2],xmm10[3,4],xmm9[5,6,7]
2033 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
2034 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm10 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5],ymm1[6],ymm0[7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13],ymm1[14],ymm0[15]
2035 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm11 = [0,2,0,0,5,7,2,4]
2036 ; AVX512-FCP-NEXT: vpermd %ymm10, %ymm11, %ymm10
2037 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,0,1,6,7,16,17,22,23,24,25,30,31,20,21,22,23,16,17,22,23]
2038 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2],ymm10[3,4,5,6,7]
2039 ; AVX512-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm10 = [1,4,6,0,1,4,6,0]
2040 ; AVX512-FCP-NEXT: # ymm10 = mem[0,1,0,1]
2041 ; AVX512-FCP-NEXT: vpermd %ymm4, %ymm10, %ymm10
2042 ; AVX512-FCP-NEXT: vpshufb %ymm7, %ymm10, %ymm7
2043 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm7 = ymm9[0,1,2,3,4],ymm7[5,6,7],ymm9[8,9,10,11,12],ymm7[13,14,15]
2044 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm7 = ymm9[0,1,2,3],ymm7[4,5,6,7]
2045 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm9 = ymm2[0],ymm3[1,2],ymm2[3],ymm3[4],ymm2[5],ymm3[6,7],ymm2[8],ymm3[9,10],ymm2[11],ymm3[12],ymm2[13],ymm3[14,15]
2046 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm9, %xmm10
2047 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} xmm9 = xmm10[0],xmm9[1],xmm10[2],xmm9[3]
2048 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
2049 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm10 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
2050 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm11 = [0,3,0,0,5,0,2,7]
2051 ; AVX512-FCP-NEXT: vpermd %ymm10, %ymm11, %ymm10
2052 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,2,3,4,5,18,19,20,21,26,27,16,17,30,31,30,31,18,19,20,21]
2053 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2],ymm10[3,4,5,6,7]
2054 ; AVX512-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm10 = [2,4,7,0,2,4,7,0]
2055 ; AVX512-FCP-NEXT: # ymm10 = mem[0,1,0,1]
2056 ; AVX512-FCP-NEXT: vpermd %ymm4, %ymm10, %ymm10
2057 ; AVX512-FCP-NEXT: vpshufb %ymm8, %ymm10, %ymm8
2058 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0,1,2,3,4],ymm8[5,6,7],ymm9[8,9,10,11,12],ymm8[13,14,15]
2059 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2,3],ymm8[4,5,6,7]
2060 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5],ymm2[6],ymm3[7,8],ymm2[9],ymm3[10,11],ymm2[12],ymm3[13],ymm2[14],ymm3[15]
2061 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm2, %xmm3
2062 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1,2],xmm2[3,4],xmm3[5,6,7]
2063 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[8,9,2,3,12,13,6,7,0,1,10,11,u,u,u,u]
2064 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6],ymm0[7],ymm1[8,9],ymm0[10],ymm1[11],ymm0[12],ymm1[13,14],ymm0[15]
2065 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm1 = [1,3,0,0,6,0,3,5]
2066 ; AVX512-FCP-NEXT: vpermd %ymm0, %ymm1, %ymm0
2067 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,0,1,6,7,16,17,22,23,24,25,30,31,u,u,u,u,u,u,u,u]
2068 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3,4,5,6,7]
2069 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm1 = [0,0,0,0,0,2,5,7]
2070 ; AVX512-FCP-NEXT: vpermd %ymm4, %ymm1, %ymm1
2071 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,17,22,23,u,u,u,u,24,25,30,31,u,u,u,u]
2072 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm2 = [0,1,2,3,4,5,12,14]
2073 ; AVX512-FCP-NEXT: vpermi2d %ymm1, %ymm0, %ymm2
2074 ; AVX512-FCP-NEXT: vmovdqa %ymm5, (%rsi)
2075 ; AVX512-FCP-NEXT: vmovdqa %ymm6, (%rdx)
2076 ; AVX512-FCP-NEXT: vmovdqa %ymm7, (%rcx)
2077 ; AVX512-FCP-NEXT: vmovdqa %ymm8, (%r8)
2078 ; AVX512-FCP-NEXT: vmovdqa %ymm2, (%r9)
2079 ; AVX512-FCP-NEXT: vzeroupper
2080 ; AVX512-FCP-NEXT: retq
2082 ; AVX512DQ-LABEL: load_i16_stride5_vf16:
2083 ; AVX512DQ: # %bb.0:
2084 ; AVX512DQ-NEXT: vmovdqa (%rdi), %ymm2
2085 ; AVX512DQ-NEXT: vmovdqa 32(%rdi), %ymm3
2086 ; AVX512DQ-NEXT: vmovdqa 64(%rdi), %ymm0
2087 ; AVX512DQ-NEXT: vmovdqa 96(%rdi), %ymm1
2088 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm4 = ymm0[0],ymm1[1,2],ymm0[3],ymm1[4],ymm0[5],ymm1[6,7],ymm0[8],ymm1[9,10],ymm0[11],ymm1[12],ymm0[13],ymm1[14,15]
2089 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm5 = ymm4[2,3,0,1]
2090 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3,4],ymm5[5],ymm4[6],ymm5[7]
2091 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm4[6,7,16,17,26,27,20,21,30,31,24,25],zero,zero,zero,zero,zero,zero
2092 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm5 = ymm2[0],ymm3[1],ymm2[2,3],ymm3[4],ymm2[5],ymm3[6],ymm2[7,8],ymm3[9],ymm2[10,11],ymm3[12],ymm2[13],ymm3[14],ymm2[15]
2093 ; AVX512DQ-NEXT: vextracti128 $1, %ymm5, %xmm6
2094 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0],xmm6[1,2,3],xmm5[4,5],xmm6[6,7]
2095 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[0,1,10,11,4,5,14,15,8,9,2,3,12,13],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm5[u,u,u,u,u,u]
2096 ; AVX512DQ-NEXT: vpor %ymm4, %ymm5, %ymm5
2097 ; AVX512DQ-NEXT: vmovdqa 144(%rdi), %xmm6
2098 ; AVX512DQ-NEXT: vmovdqa 128(%rdi), %xmm4
2099 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm7 = xmm4[0],xmm6[1],xmm4[2,3]
2100 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm7 = xmm7[u,u,u,u,u,u,u,u,u,u,2,3,12,13,6,7]
2101 ; AVX512DQ-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
2102 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm7 = ymm5[0,1,2,3,4],ymm7[5,6,7],ymm5[8,9,10,11,12],ymm7[13,14,15]
2103 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
2104 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm7 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13],ymm0[14],ymm1[15]
2105 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm8 = ymm7[2,3,0,1]
2106 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0,1,2,3,4],ymm8[5],ymm7[6,7]
2107 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm7 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm7[8,9,18,19,28,29,22,23,16,17,26,27],zero,zero,zero,zero,zero,zero
2108 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm8 = ymm3[0],ymm2[1],ymm3[2],ymm2[3],ymm3[4,5],ymm2[6],ymm3[7,8],ymm2[9],ymm3[10],ymm2[11],ymm3[12,13],ymm2[14],ymm3[15]
2109 ; AVX512DQ-NEXT: vextracti128 $1, %ymm8, %xmm9
2110 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1],xmm9[2,3],xmm8[4,5,6],xmm9[7]
2111 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm8 = ymm8[2,3,12,13,6,7,0,1,10,11,4,5,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm8[u,u,u,u,u,u]
2112 ; AVX512DQ-NEXT: vpor %ymm7, %ymm8, %ymm7
2113 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm8 = xmm4[0,1],xmm6[2],xmm4[3]
2114 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[u,u,u,u,u,u,u,u,u,u,4,5,14,15,8,9]
2115 ; AVX512DQ-NEXT: vinserti128 $1, %xmm8, %ymm0, %ymm8
2116 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm8 = ymm7[0,1,2,3,4],ymm8[5,6,7],ymm7[8,9,10,11,12],ymm8[13,14,15]
2117 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm8[4,5,6,7]
2118 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm8 = ymm3[0,1],ymm2[2],ymm3[3],ymm2[4],ymm3[5,6],ymm2[7],ymm3[8,9],ymm2[10],ymm3[11],ymm2[12],ymm3[13,14],ymm2[15]
2119 ; AVX512DQ-NEXT: vextracti128 $1, %ymm8, %xmm9
2120 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1,2],xmm9[3,4],xmm8[5,6,7]
2121 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
2122 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm9 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5],ymm1[6],ymm0[7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13],ymm1[14],ymm0[15]
2123 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm10 = ymm9[2,3,0,1]
2124 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4,5],ymm10[6],ymm9[7]
2125 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm9 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,0,1,10,11,20,21,30,31,24,25,18,19,28,29,26,27,16,17,26,27]
2126 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2],ymm9[3,4,5,6,7]
2127 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm9 = xmm6[0],xmm4[1],xmm6[2,3]
2128 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[u,u,u,u,u,u,u,u,u,u,6,7,0,1,10,11]
2129 ; AVX512DQ-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
2130 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm9 = ymm8[0,1,2,3,4],ymm9[5,6,7],ymm8[8,9,10,11,12],ymm9[13,14,15]
2131 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm9[4,5,6,7]
2132 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm9 = ymm2[0],ymm3[1,2],ymm2[3],ymm3[4],ymm2[5],ymm3[6,7],ymm2[8],ymm3[9,10],ymm2[11],ymm3[12],ymm2[13],ymm3[14,15]
2133 ; AVX512DQ-NEXT: vextracti128 $1, %ymm9, %xmm10
2134 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm9 = xmm10[0],xmm9[1],xmm10[2],xmm9[3]
2135 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
2136 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm10 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
2137 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm11 = ymm10[2,3,0,1]
2138 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0,1,2,3],ymm11[4],ymm10[5],ymm11[6],ymm10[7]
2139 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,2,3,12,13,22,23,16,17,26,27,20,21,30,31,30,31,18,19,28,29]
2140 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2],ymm10[3,4,5,6,7]
2141 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm10 = xmm6[0,1],xmm4[2],xmm6[3]
2142 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm10 = xmm10[u,u,u,u,u,u,u,u,u,u,8,9,2,3,12,13]
2143 ; AVX512DQ-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
2144 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm10 = ymm9[0,1,2,3,4],ymm10[5,6,7],ymm9[8,9,10,11,12],ymm10[13,14,15]
2145 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm10[4,5,6,7]
2146 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5],ymm2[6],ymm3[7,8],ymm2[9],ymm3[10,11],ymm2[12],ymm3[13],ymm2[14],ymm3[15]
2147 ; AVX512DQ-NEXT: vextracti128 $1, %ymm2, %xmm3
2148 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1,2],xmm2[3,4],xmm3[5,6,7]
2149 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[8,9,2,3,12,13,6,7,0,1,10,11,u,u,u,u]
2150 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6],ymm0[7],ymm1[8,9],ymm0[10],ymm1[11],ymm0[12],ymm1[13,14],ymm0[15]
2151 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
2152 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4],ymm0[5,6],ymm1[7]
2153 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,4,5,14,15,24,25,18,19,28,29,22,23,u,u,u,u,u,u,u,u]
2154 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3,4,5,6,7]
2155 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm1 = xmm6[3,1,2,3]
2156 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,2,1,4,5,6,7]
2157 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm2 = xmm4[0,2,2,3]
2158 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,1,0,3,4,5,6,7]
2159 ; AVX512DQ-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
2160 ; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
2161 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
2162 ; AVX512DQ-NEXT: vmovdqa %ymm5, (%rsi)
2163 ; AVX512DQ-NEXT: vmovdqa %ymm7, (%rdx)
2164 ; AVX512DQ-NEXT: vmovdqa %ymm8, (%rcx)
2165 ; AVX512DQ-NEXT: vmovdqa %ymm9, (%r8)
2166 ; AVX512DQ-NEXT: vmovdqa %ymm0, (%r9)
2167 ; AVX512DQ-NEXT: vzeroupper
2168 ; AVX512DQ-NEXT: retq
2170 ; AVX512DQ-FCP-LABEL: load_i16_stride5_vf16:
2171 ; AVX512DQ-FCP: # %bb.0:
2172 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %ymm2
2173 ; AVX512DQ-FCP-NEXT: vmovdqa 32(%rdi), %ymm3
2174 ; AVX512DQ-FCP-NEXT: vmovdqa 64(%rdi), %ymm0
2175 ; AVX512DQ-FCP-NEXT: vmovdqa 96(%rdi), %ymm1
2176 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm4 = ymm0[0],ymm1[1,2],ymm0[3],ymm1[4],ymm0[5],ymm1[6,7],ymm0[8],ymm1[9,10],ymm0[11],ymm1[12],ymm0[13],ymm1[14,15]
2177 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm5 = [1,0,0,0,4,6,1,3]
2178 ; AVX512DQ-FCP-NEXT: vpermd %ymm4, %ymm5, %ymm4
2179 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm4 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm4[2,3,16,17,22,23,24,25,30,31,20,21],zero,zero,zero,zero,zero,zero
2180 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm5 = ymm2[0],ymm3[1],ymm2[2,3],ymm3[4],ymm2[5],ymm3[6],ymm2[7,8],ymm3[9],ymm2[10,11],ymm3[12],ymm2[13],ymm3[14],ymm2[15]
2181 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm5, %xmm6
2182 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0],xmm6[1,2,3],xmm5[4,5],xmm6[6,7]
2183 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm5 = ymm5[0,1,10,11,4,5,14,15,8,9,2,3,12,13],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm5[u,u,u,u,u,u]
2184 ; AVX512DQ-FCP-NEXT: vpor %ymm4, %ymm5, %ymm5
2185 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm6 = [0,0,0,0,0,3,5,0]
2186 ; AVX512DQ-FCP-NEXT: vmovdqa 128(%rdi), %ymm4
2187 ; AVX512DQ-FCP-NEXT: vpermd %ymm4, %ymm6, %ymm6
2188 ; AVX512DQ-FCP-NEXT: vpbroadcastq {{.*#+}} ymm7 = [0,0,18,19,20,21,26,27,0,0,18,19,20,21,26,27,0,0,18,19,20,21,26,27,0,0,18,19,20,21,26,27]
2189 ; AVX512DQ-FCP-NEXT: vpshufb %ymm7, %ymm6, %ymm6
2190 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm6 = ymm5[0,1,2,3,4],ymm6[5,6,7],ymm5[8,9,10,11,12],ymm6[13,14,15]
2191 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
2192 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm6 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13],ymm0[14],ymm1[15]
2193 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm8 = [2,0,0,0,4,7,1,6]
2194 ; AVX512DQ-FCP-NEXT: vpermd %ymm6, %ymm8, %ymm6
2195 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm6 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm6[0,1,18,19,20,21,26,27,16,17,30,31],zero,zero,zero,zero,zero,zero
2196 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm3[0],ymm2[1],ymm3[2],ymm2[3],ymm3[4,5],ymm2[6],ymm3[7,8],ymm2[9],ymm3[10],ymm2[11],ymm3[12,13],ymm2[14],ymm3[15]
2197 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm8, %xmm9
2198 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1],xmm9[2,3],xmm8[4,5,6],xmm9[7]
2199 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm8 = ymm8[2,3,12,13,6,7,0,1,10,11,4,5,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm8[u,u,u,u,u,u]
2200 ; AVX512DQ-FCP-NEXT: vpor %ymm6, %ymm8, %ymm6
2201 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm8 = [0,0,0,0,1,3,6,0]
2202 ; AVX512DQ-FCP-NEXT: vpermd %ymm4, %ymm8, %ymm9
2203 ; AVX512DQ-FCP-NEXT: vpbroadcastq {{.*#+}} ymm8 = [0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25]
2204 ; AVX512DQ-FCP-NEXT: vpshufb %ymm8, %ymm9, %ymm9
2205 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm9 = ymm6[0,1,2,3,4],ymm9[5,6,7],ymm6[8,9,10,11,12],ymm9[13,14,15]
2206 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm9[4,5,6,7]
2207 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm9 = ymm3[0,1],ymm2[2],ymm3[3],ymm2[4],ymm3[5,6],ymm2[7],ymm3[8,9],ymm2[10],ymm3[11],ymm2[12],ymm3[13,14],ymm2[15]
2208 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm9, %xmm10
2209 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm9 = xmm9[0,1,2],xmm10[3,4],xmm9[5,6,7]
2210 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
2211 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm10 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5],ymm1[6],ymm0[7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13],ymm1[14],ymm0[15]
2212 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm11 = [0,2,0,0,5,7,2,4]
2213 ; AVX512DQ-FCP-NEXT: vpermd %ymm10, %ymm11, %ymm10
2214 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,0,1,6,7,16,17,22,23,24,25,30,31,20,21,22,23,16,17,22,23]
2215 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2],ymm10[3,4,5,6,7]
2216 ; AVX512DQ-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm10 = [1,4,6,0,1,4,6,0]
2217 ; AVX512DQ-FCP-NEXT: # ymm10 = mem[0,1,0,1]
2218 ; AVX512DQ-FCP-NEXT: vpermd %ymm4, %ymm10, %ymm10
2219 ; AVX512DQ-FCP-NEXT: vpshufb %ymm7, %ymm10, %ymm7
2220 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm7 = ymm9[0,1,2,3,4],ymm7[5,6,7],ymm9[8,9,10,11,12],ymm7[13,14,15]
2221 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm7 = ymm9[0,1,2,3],ymm7[4,5,6,7]
2222 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm9 = ymm2[0],ymm3[1,2],ymm2[3],ymm3[4],ymm2[5],ymm3[6,7],ymm2[8],ymm3[9,10],ymm2[11],ymm3[12],ymm2[13],ymm3[14,15]
2223 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm9, %xmm10
2224 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} xmm9 = xmm10[0],xmm9[1],xmm10[2],xmm9[3]
2225 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm9 = xmm9[6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
2226 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm10 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
2227 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm11 = [0,3,0,0,5,0,2,7]
2228 ; AVX512DQ-FCP-NEXT: vpermd %ymm10, %ymm11, %ymm10
2229 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,2,3,4,5,18,19,20,21,26,27,16,17,30,31,30,31,18,19,20,21]
2230 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2],ymm10[3,4,5,6,7]
2231 ; AVX512DQ-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm10 = [2,4,7,0,2,4,7,0]
2232 ; AVX512DQ-FCP-NEXT: # ymm10 = mem[0,1,0,1]
2233 ; AVX512DQ-FCP-NEXT: vpermd %ymm4, %ymm10, %ymm10
2234 ; AVX512DQ-FCP-NEXT: vpshufb %ymm8, %ymm10, %ymm8
2235 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0,1,2,3,4],ymm8[5,6,7],ymm9[8,9,10,11,12],ymm8[13,14,15]
2236 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2,3],ymm8[4,5,6,7]
2237 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5],ymm2[6],ymm3[7,8],ymm2[9],ymm3[10,11],ymm2[12],ymm3[13],ymm2[14],ymm3[15]
2238 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm2, %xmm3
2239 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1,2],xmm2[3,4],xmm3[5,6,7]
2240 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[8,9,2,3,12,13,6,7,0,1,10,11,u,u,u,u]
2241 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6],ymm0[7],ymm1[8,9],ymm0[10],ymm1[11],ymm0[12],ymm1[13,14],ymm0[15]
2242 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm1 = [1,3,0,0,6,0,3,5]
2243 ; AVX512DQ-FCP-NEXT: vpermd %ymm0, %ymm1, %ymm0
2244 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,0,1,6,7,16,17,22,23,24,25,30,31,u,u,u,u,u,u,u,u]
2245 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3,4,5,6,7]
2246 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm1 = [0,0,0,0,0,2,5,7]
2247 ; AVX512DQ-FCP-NEXT: vpermd %ymm4, %ymm1, %ymm1
2248 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,17,22,23,u,u,u,u,24,25,30,31,u,u,u,u]
2249 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm2 = [0,1,2,3,4,5,12,14]
2250 ; AVX512DQ-FCP-NEXT: vpermi2d %ymm1, %ymm0, %ymm2
2251 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm5, (%rsi)
2252 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm6, (%rdx)
2253 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm7, (%rcx)
2254 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm8, (%r8)
2255 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm2, (%r9)
2256 ; AVX512DQ-FCP-NEXT: vzeroupper
2257 ; AVX512DQ-FCP-NEXT: retq
2259 ; AVX512BW-LABEL: load_i16_stride5_vf16:
2260 ; AVX512BW: # %bb.0:
2261 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm0
2262 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm1
2263 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm2 = [0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0]
2264 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
2265 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm3 = [0,1,2,3,4,5,6,7,8,9,10,11,12,17,22,27]
2266 ; AVX512BW-NEXT: vmovdqa 128(%rdi), %ymm4
2267 ; AVX512BW-NEXT: vpermi2w %ymm4, %ymm2, %ymm3
2268 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm2 = [1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0]
2269 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
2270 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm5 = [0,1,2,3,4,5,6,7,8,9,10,11,12,18,23,28]
2271 ; AVX512BW-NEXT: vpermi2w %ymm4, %ymm2, %ymm5
2272 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm2 = [34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0]
2273 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm1, %zmm2
2274 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm6 = [0,1,2,3,4,5,6,7,8,9,10,11,12,19,24,29]
2275 ; AVX512BW-NEXT: vpermi2w %ymm4, %ymm2, %ymm6
2276 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm2 = [35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0]
2277 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm1, %zmm2
2278 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm7 = [0,1,2,3,4,5,6,7,8,9,10,11,12,20,25,30]
2279 ; AVX512BW-NEXT: vpermi2w %ymm4, %ymm2, %ymm7
2280 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm2 = [4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0]
2281 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
2282 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm0 = [0,1,2,3,4,5,6,7,8,9,10,11,16,21,26,31]
2283 ; AVX512BW-NEXT: vpermi2w %ymm4, %ymm2, %ymm0
2284 ; AVX512BW-NEXT: vmovdqa %ymm3, (%rsi)
2285 ; AVX512BW-NEXT: vmovdqa %ymm5, (%rdx)
2286 ; AVX512BW-NEXT: vmovdqa %ymm6, (%rcx)
2287 ; AVX512BW-NEXT: vmovdqa %ymm7, (%r8)
2288 ; AVX512BW-NEXT: vmovdqa %ymm0, (%r9)
2289 ; AVX512BW-NEXT: vzeroupper
2290 ; AVX512BW-NEXT: retq
2292 ; AVX512BW-FCP-LABEL: load_i16_stride5_vf16:
2293 ; AVX512BW-FCP: # %bb.0:
2294 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
2295 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
2296 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm2 = [0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0]
2297 ; AVX512BW-FCP-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
2298 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm3 = [0,1,2,3,4,5,6,7,8,9,10,11,12,17,22,27]
2299 ; AVX512BW-FCP-NEXT: vmovdqa 128(%rdi), %ymm4
2300 ; AVX512BW-FCP-NEXT: vpermi2w %ymm4, %ymm2, %ymm3
2301 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm2 = [1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0]
2302 ; AVX512BW-FCP-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
2303 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm5 = [0,1,2,3,4,5,6,7,8,9,10,11,12,18,23,28]
2304 ; AVX512BW-FCP-NEXT: vpermi2w %ymm4, %ymm2, %ymm5
2305 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm2 = [34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0]
2306 ; AVX512BW-FCP-NEXT: vpermi2w %zmm0, %zmm1, %zmm2
2307 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm6 = [0,1,2,3,4,5,6,7,8,9,10,11,12,19,24,29]
2308 ; AVX512BW-FCP-NEXT: vpermi2w %ymm4, %ymm2, %ymm6
2309 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm2 = [35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0]
2310 ; AVX512BW-FCP-NEXT: vpermi2w %zmm0, %zmm1, %zmm2
2311 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm7 = [0,1,2,3,4,5,6,7,8,9,10,11,12,20,25,30]
2312 ; AVX512BW-FCP-NEXT: vpermi2w %ymm4, %ymm2, %ymm7
2313 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm2 = [4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0]
2314 ; AVX512BW-FCP-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
2315 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm0 = [0,1,2,3,4,5,6,7,8,9,10,11,16,21,26,31]
2316 ; AVX512BW-FCP-NEXT: vpermi2w %ymm4, %ymm2, %ymm0
2317 ; AVX512BW-FCP-NEXT: vmovdqa %ymm3, (%rsi)
2318 ; AVX512BW-FCP-NEXT: vmovdqa %ymm5, (%rdx)
2319 ; AVX512BW-FCP-NEXT: vmovdqa %ymm6, (%rcx)
2320 ; AVX512BW-FCP-NEXT: vmovdqa %ymm7, (%r8)
2321 ; AVX512BW-FCP-NEXT: vmovdqa %ymm0, (%r9)
2322 ; AVX512BW-FCP-NEXT: vzeroupper
2323 ; AVX512BW-FCP-NEXT: retq
2325 ; AVX512DQ-BW-LABEL: load_i16_stride5_vf16:
2326 ; AVX512DQ-BW: # %bb.0:
2327 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm0
2328 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm1
2329 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm2 = [0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0]
2330 ; AVX512DQ-BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
2331 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm3 = [0,1,2,3,4,5,6,7,8,9,10,11,12,17,22,27]
2332 ; AVX512DQ-BW-NEXT: vmovdqa 128(%rdi), %ymm4
2333 ; AVX512DQ-BW-NEXT: vpermi2w %ymm4, %ymm2, %ymm3
2334 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm2 = [1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0]
2335 ; AVX512DQ-BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
2336 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm5 = [0,1,2,3,4,5,6,7,8,9,10,11,12,18,23,28]
2337 ; AVX512DQ-BW-NEXT: vpermi2w %ymm4, %ymm2, %ymm5
2338 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm2 = [34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0]
2339 ; AVX512DQ-BW-NEXT: vpermi2w %zmm0, %zmm1, %zmm2
2340 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm6 = [0,1,2,3,4,5,6,7,8,9,10,11,12,19,24,29]
2341 ; AVX512DQ-BW-NEXT: vpermi2w %ymm4, %ymm2, %ymm6
2342 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm2 = [35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0]
2343 ; AVX512DQ-BW-NEXT: vpermi2w %zmm0, %zmm1, %zmm2
2344 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm7 = [0,1,2,3,4,5,6,7,8,9,10,11,12,20,25,30]
2345 ; AVX512DQ-BW-NEXT: vpermi2w %ymm4, %ymm2, %ymm7
2346 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm2 = [4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0]
2347 ; AVX512DQ-BW-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
2348 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm0 = [0,1,2,3,4,5,6,7,8,9,10,11,16,21,26,31]
2349 ; AVX512DQ-BW-NEXT: vpermi2w %ymm4, %ymm2, %ymm0
2350 ; AVX512DQ-BW-NEXT: vmovdqa %ymm3, (%rsi)
2351 ; AVX512DQ-BW-NEXT: vmovdqa %ymm5, (%rdx)
2352 ; AVX512DQ-BW-NEXT: vmovdqa %ymm6, (%rcx)
2353 ; AVX512DQ-BW-NEXT: vmovdqa %ymm7, (%r8)
2354 ; AVX512DQ-BW-NEXT: vmovdqa %ymm0, (%r9)
2355 ; AVX512DQ-BW-NEXT: vzeroupper
2356 ; AVX512DQ-BW-NEXT: retq
2358 ; AVX512DQ-BW-FCP-LABEL: load_i16_stride5_vf16:
2359 ; AVX512DQ-BW-FCP: # %bb.0:
2360 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm0
2361 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm1
2362 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm2 = [0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0]
2363 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
2364 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm3 = [0,1,2,3,4,5,6,7,8,9,10,11,12,17,22,27]
2365 ; AVX512DQ-BW-FCP-NEXT: vmovdqa 128(%rdi), %ymm4
2366 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %ymm4, %ymm2, %ymm3
2367 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm2 = [1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0]
2368 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
2369 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm5 = [0,1,2,3,4,5,6,7,8,9,10,11,12,18,23,28]
2370 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %ymm4, %ymm2, %ymm5
2371 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm2 = [34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0]
2372 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm0, %zmm1, %zmm2
2373 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm6 = [0,1,2,3,4,5,6,7,8,9,10,11,12,19,24,29]
2374 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %ymm4, %ymm2, %ymm6
2375 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm2 = [35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0]
2376 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm0, %zmm1, %zmm2
2377 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm7 = [0,1,2,3,4,5,6,7,8,9,10,11,12,20,25,30]
2378 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %ymm4, %ymm2, %ymm7
2379 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm2 = [4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0]
2380 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm1, %zmm0, %zmm2
2381 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm0 = [0,1,2,3,4,5,6,7,8,9,10,11,16,21,26,31]
2382 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %ymm4, %ymm2, %ymm0
2383 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm3, (%rsi)
2384 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm5, (%rdx)
2385 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm6, (%rcx)
2386 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm7, (%r8)
2387 ; AVX512DQ-BW-FCP-NEXT: vmovdqa %ymm0, (%r9)
2388 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
2389 ; AVX512DQ-BW-FCP-NEXT: retq
2390 %wide.vec = load <80 x i16>, ptr %in.vec, align 64
2391 %strided.vec0 = shufflevector <80 x i16> %wide.vec, <80 x i16> poison, <16 x i32> <i32 0, i32 5, i32 10, i32 15, i32 20, i32 25, i32 30, i32 35, i32 40, i32 45, i32 50, i32 55, i32 60, i32 65, i32 70, i32 75>
2392 %strided.vec1 = shufflevector <80 x i16> %wide.vec, <80 x i16> poison, <16 x i32> <i32 1, i32 6, i32 11, i32 16, i32 21, i32 26, i32 31, i32 36, i32 41, i32 46, i32 51, i32 56, i32 61, i32 66, i32 71, i32 76>
2393 %strided.vec2 = shufflevector <80 x i16> %wide.vec, <80 x i16> poison, <16 x i32> <i32 2, i32 7, i32 12, i32 17, i32 22, i32 27, i32 32, i32 37, i32 42, i32 47, i32 52, i32 57, i32 62, i32 67, i32 72, i32 77>
2394 %strided.vec3 = shufflevector <80 x i16> %wide.vec, <80 x i16> poison, <16 x i32> <i32 3, i32 8, i32 13, i32 18, i32 23, i32 28, i32 33, i32 38, i32 43, i32 48, i32 53, i32 58, i32 63, i32 68, i32 73, i32 78>
2395 %strided.vec4 = shufflevector <80 x i16> %wide.vec, <80 x i16> poison, <16 x i32> <i32 4, i32 9, i32 14, i32 19, i32 24, i32 29, i32 34, i32 39, i32 44, i32 49, i32 54, i32 59, i32 64, i32 69, i32 74, i32 79>
2396 store <16 x i16> %strided.vec0, ptr %out.vec0, align 64
2397 store <16 x i16> %strided.vec1, ptr %out.vec1, align 64
2398 store <16 x i16> %strided.vec2, ptr %out.vec2, align 64
2399 store <16 x i16> %strided.vec3, ptr %out.vec3, align 64
2400 store <16 x i16> %strided.vec4, ptr %out.vec4, align 64
2404 define void @load_i16_stride5_vf32(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4) nounwind {
2405 ; SSE-LABEL: load_i16_stride5_vf32:
2407 ; SSE-NEXT: subq $408, %rsp # imm = 0x198
2408 ; SSE-NEXT: movdqa 64(%rdi), %xmm4
2409 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2410 ; SSE-NEXT: movdqa (%rdi), %xmm6
2411 ; SSE-NEXT: movdqa 16(%rdi), %xmm13
2412 ; SSE-NEXT: movdqa 32(%rdi), %xmm9
2413 ; SSE-NEXT: movdqa 48(%rdi), %xmm5
2414 ; SSE-NEXT: movdqa 224(%rdi), %xmm7
2415 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2416 ; SSE-NEXT: movdqa 160(%rdi), %xmm11
2417 ; SSE-NEXT: movdqa 176(%rdi), %xmm12
2418 ; SSE-NEXT: movdqa 208(%rdi), %xmm8
2419 ; SSE-NEXT: movdqa 192(%rdi), %xmm2
2420 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2421 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,65535,65535,65535,0,65535,65535,65535]
2422 ; SSE-NEXT: movdqa %xmm0, %xmm1
2423 ; SSE-NEXT: pandn %xmm2, %xmm1
2424 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm8[0,1,0,3]
2425 ; SSE-NEXT: pand %xmm0, %xmm2
2426 ; SSE-NEXT: por %xmm1, %xmm2
2427 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm12[3,1,2,3]
2428 ; SSE-NEXT: movdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2429 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,1,2,3,4,5,6,7]
2430 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm11[0,2,2,3]
2431 ; SSE-NEXT: movdqa %xmm11, (%rsp) # 16-byte Spill
2432 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[0,3,2,3,4,5,6,7]
2433 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
2434 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,3]
2435 ; SSE-NEXT: movaps {{.*#+}} xmm15 = [65535,65535,65535,65535,65535,65535,65535,0]
2436 ; SSE-NEXT: andps %xmm15, %xmm3
2437 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm7[0,1,0,1]
2438 ; SSE-NEXT: movaps %xmm15, %xmm2
2439 ; SSE-NEXT: pandn %xmm1, %xmm2
2440 ; SSE-NEXT: por %xmm3, %xmm2
2441 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2442 ; SSE-NEXT: movdqa %xmm0, %xmm1
2443 ; SSE-NEXT: pandn %xmm9, %xmm1
2444 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2445 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm5[0,1,0,3]
2446 ; SSE-NEXT: movdqa %xmm5, %xmm7
2447 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2448 ; SSE-NEXT: pand %xmm0, %xmm2
2449 ; SSE-NEXT: por %xmm1, %xmm2
2450 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm13[3,1,2,3]
2451 ; SSE-NEXT: movdqa %xmm13, %xmm5
2452 ; SSE-NEXT: movdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2453 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,1,2,3,4,5,6,7]
2454 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm6[0,2,2,3]
2455 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2456 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[0,3,2,3,4,5,6,7]
2457 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
2458 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,3]
2459 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm4[0,1,0,1]
2460 ; SSE-NEXT: movaps %xmm15, %xmm2
2461 ; SSE-NEXT: andnps %xmm1, %xmm2
2462 ; SSE-NEXT: movdqa 272(%rdi), %xmm4
2463 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2464 ; SSE-NEXT: andps %xmm15, %xmm3
2465 ; SSE-NEXT: orps %xmm3, %xmm2
2466 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2467 ; SSE-NEXT: movdqa %xmm0, %xmm1
2468 ; SSE-NEXT: pandn %xmm4, %xmm1
2469 ; SSE-NEXT: movdqa 288(%rdi), %xmm2
2470 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2471 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,0,3]
2472 ; SSE-NEXT: pand %xmm0, %xmm2
2473 ; SSE-NEXT: por %xmm1, %xmm2
2474 ; SSE-NEXT: movdqa 256(%rdi), %xmm14
2475 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm14[3,1,2,3]
2476 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2477 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,1,2,3,4,5,6,7]
2478 ; SSE-NEXT: movdqa 240(%rdi), %xmm13
2479 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm13[0,2,2,3]
2480 ; SSE-NEXT: movdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2481 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[0,3,2,3,4,5,6,7]
2482 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
2483 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,3]
2484 ; SSE-NEXT: movdqa 304(%rdi), %xmm1
2485 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2486 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,0,1]
2487 ; SSE-NEXT: movaps %xmm15, %xmm2
2488 ; SSE-NEXT: andnps %xmm1, %xmm2
2489 ; SSE-NEXT: andps %xmm15, %xmm3
2490 ; SSE-NEXT: orps %xmm3, %xmm2
2491 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2492 ; SSE-NEXT: movdqa 128(%rdi), %xmm1
2493 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2494 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,0,3]
2495 ; SSE-NEXT: pand %xmm0, %xmm1
2496 ; SSE-NEXT: movdqa 112(%rdi), %xmm2
2497 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2498 ; SSE-NEXT: pandn %xmm2, %xmm0
2499 ; SSE-NEXT: por %xmm1, %xmm0
2500 ; SSE-NEXT: movdqa 96(%rdi), %xmm1
2501 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2502 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,3]
2503 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,1,2,3,4,5,6,7]
2504 ; SSE-NEXT: movdqa 80(%rdi), %xmm4
2505 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[0,2,2,3]
2506 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2507 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[0,3,2,3,4,5,6,7]
2508 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
2509 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm0[2,3]
2510 ; SSE-NEXT: movdqa 144(%rdi), %xmm0
2511 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2512 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,0,1]
2513 ; SSE-NEXT: movaps %xmm15, %xmm1
2514 ; SSE-NEXT: andnps %xmm0, %xmm1
2515 ; SSE-NEXT: andps %xmm15, %xmm2
2516 ; SSE-NEXT: orps %xmm2, %xmm1
2517 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2518 ; SSE-NEXT: psrlq $48, %xmm12
2519 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm11[0,3,2,3]
2520 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[1,2,2,3,4,5,6,7]
2521 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm12[0],xmm1[1],xmm12[1]
2522 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,0,0,65535,65535,65535,65535,65535]
2523 ; SSE-NEXT: movdqa %xmm0, %xmm2
2524 ; SSE-NEXT: pandn %xmm1, %xmm2
2525 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2526 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm8[1,3,2,3]
2527 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
2528 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm11[0,2,2,3]
2529 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
2530 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm3[0,1,2,3,7,5,6,7]
2531 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,2,1]
2532 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,0,0,0,4,5,6,7]
2533 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,6,4,7]
2534 ; SSE-NEXT: pand %xmm0, %xmm1
2535 ; SSE-NEXT: por %xmm2, %xmm1
2536 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
2537 ; SSE-NEXT: movdqa %xmm10, %xmm2
2538 ; SSE-NEXT: psllq $48, %xmm2
2539 ; SSE-NEXT: movaps %xmm15, %xmm3
2540 ; SSE-NEXT: andnps %xmm2, %xmm3
2541 ; SSE-NEXT: pand %xmm15, %xmm1
2542 ; SSE-NEXT: orps %xmm1, %xmm3
2543 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2544 ; SSE-NEXT: psrlq $48, %xmm5
2545 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm6[0,3,2,3]
2546 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[1,2,2,3,4,5,6,7]
2547 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1]
2548 ; SSE-NEXT: movdqa %xmm0, %xmm1
2549 ; SSE-NEXT: pandn %xmm2, %xmm1
2550 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm7[1,3,2,3]
2551 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm9[0,2,2,3]
2552 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
2553 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm3[0,1,2,3,7,5,6,7]
2554 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,2,1]
2555 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[0,0,0,0,4,5,6,7]
2556 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,5,6,4,7]
2557 ; SSE-NEXT: pand %xmm0, %xmm2
2558 ; SSE-NEXT: por %xmm1, %xmm2
2559 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
2560 ; SSE-NEXT: movdqa %xmm6, %xmm1
2561 ; SSE-NEXT: psllq $48, %xmm1
2562 ; SSE-NEXT: movdqa %xmm15, %xmm3
2563 ; SSE-NEXT: pandn %xmm1, %xmm3
2564 ; SSE-NEXT: pand %xmm15, %xmm2
2565 ; SSE-NEXT: por %xmm2, %xmm3
2566 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2567 ; SSE-NEXT: movdqa %xmm14, %xmm1
2568 ; SSE-NEXT: psrlq $48, %xmm1
2569 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm13[0,3,2,3]
2570 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[1,2,2,3,4,5,6,7]
2571 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
2572 ; SSE-NEXT: movdqa %xmm0, %xmm1
2573 ; SSE-NEXT: pandn %xmm2, %xmm1
2574 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
2575 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm14[1,3,2,3]
2576 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
2577 ; SSE-NEXT: # xmm3 = mem[0,2,2,3]
2578 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
2579 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm3[0,1,2,3,7,5,6,7]
2580 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,2,1]
2581 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[0,0,0,0,4,5,6,7]
2582 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,5,6,4,7]
2583 ; SSE-NEXT: pand %xmm0, %xmm2
2584 ; SSE-NEXT: por %xmm1, %xmm2
2585 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
2586 ; SSE-NEXT: movdqa %xmm7, %xmm1
2587 ; SSE-NEXT: psllq $48, %xmm1
2588 ; SSE-NEXT: movdqa %xmm15, %xmm3
2589 ; SSE-NEXT: pandn %xmm1, %xmm3
2590 ; SSE-NEXT: pand %xmm15, %xmm2
2591 ; SSE-NEXT: por %xmm2, %xmm3
2592 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2593 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
2594 ; SSE-NEXT: movdqa %xmm13, %xmm1
2595 ; SSE-NEXT: psrlq $48, %xmm1
2596 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[0,3,2,3]
2597 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[1,2,2,3,4,5,6,7]
2598 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
2599 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
2600 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm12[1,3,2,3]
2601 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
2602 ; SSE-NEXT: # xmm3 = mem[0,2,2,3]
2603 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
2604 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm3[0,1,2,3,7,5,6,7]
2605 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,2,1]
2606 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,0,0,0,4,5,6,7]
2607 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,6,4,7]
2608 ; SSE-NEXT: pand %xmm0, %xmm1
2609 ; SSE-NEXT: pandn %xmm2, %xmm0
2610 ; SSE-NEXT: por %xmm1, %xmm0
2611 ; SSE-NEXT: pand %xmm15, %xmm0
2612 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
2613 ; SSE-NEXT: movdqa %xmm5, %xmm1
2614 ; SSE-NEXT: psllq $48, %xmm1
2615 ; SSE-NEXT: pandn %xmm1, %xmm15
2616 ; SSE-NEXT: por %xmm0, %xmm15
2617 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2618 ; SSE-NEXT: movdqa %xmm8, %xmm0
2619 ; SSE-NEXT: movdqa %xmm11, %xmm8
2620 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,0],xmm11[0,0]
2621 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm11[2,3]
2622 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,0,1,3]
2623 ; SSE-NEXT: movaps {{.*#+}} xmm11 = [65535,65535,65535,0,0,0,65535,65535]
2624 ; SSE-NEXT: movaps %xmm11, %xmm1
2625 ; SSE-NEXT: andnps %xmm0, %xmm1
2626 ; SSE-NEXT: movdqa (%rsp), %xmm4 # 16-byte Reload
2627 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[0,1,1,3]
2628 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,7,6,7]
2629 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
2630 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm3[2],xmm2[3],xmm3[3]
2631 ; SSE-NEXT: pand %xmm11, %xmm2
2632 ; SSE-NEXT: por %xmm1, %xmm2
2633 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,6,5,6,7]
2634 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm10[0,1,2,0]
2635 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,6,5]
2636 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,1],xmm0[2,3]
2637 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm1[2,0]
2638 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2639 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2640 ; SSE-NEXT: movaps %xmm1, %xmm0
2641 ; SSE-NEXT: movaps %xmm1, %xmm15
2642 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
2643 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,0],xmm9[0,0]
2644 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm9[2,3]
2645 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,0,1,3]
2646 ; SSE-NEXT: movaps %xmm11, %xmm1
2647 ; SSE-NEXT: andnps %xmm0, %xmm1
2648 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2649 ; SSE-NEXT: # xmm2 = mem[0,1,1,3]
2650 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,7,6,7]
2651 ; SSE-NEXT: punpckhdq {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2652 ; SSE-NEXT: # xmm2 = xmm2[2],mem[2],xmm2[3],mem[3]
2653 ; SSE-NEXT: pand %xmm11, %xmm2
2654 ; SSE-NEXT: por %xmm1, %xmm2
2655 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,6,5,6,7]
2656 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm6[0,1,2,0]
2657 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,6,5]
2658 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,1],xmm0[2,3]
2659 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm1[2,0]
2660 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2661 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
2662 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[1,0],xmm10[0,0]
2663 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[2,0],xmm10[2,3]
2664 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[0,0,1,3]
2665 ; SSE-NEXT: movaps %xmm11, %xmm1
2666 ; SSE-NEXT: andnps %xmm14, %xmm1
2667 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2668 ; SSE-NEXT: # xmm2 = mem[0,1,1,3]
2669 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,7,6,7]
2670 ; SSE-NEXT: punpckhdq {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2671 ; SSE-NEXT: # xmm2 = xmm2[2],mem[2],xmm2[3],mem[3]
2672 ; SSE-NEXT: pand %xmm11, %xmm2
2673 ; SSE-NEXT: por %xmm1, %xmm2
2674 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm14[0,1,2,3,6,5,6,7]
2675 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm7[0,1,2,0]
2676 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,6,5]
2677 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,1],xmm0[2,3]
2678 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm1[2,0]
2679 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2680 ; SSE-NEXT: movdqa %xmm12, %xmm0
2681 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2682 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,0],xmm1[0,0]
2683 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm1[2,3]
2684 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,0,1,3]
2685 ; SSE-NEXT: movaps %xmm11, %xmm1
2686 ; SSE-NEXT: andnps %xmm0, %xmm1
2687 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
2688 ; SSE-NEXT: # xmm2 = mem[0,1,1,3]
2689 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,7,6,7]
2690 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm13[2],xmm2[3],xmm13[3]
2691 ; SSE-NEXT: pand %xmm11, %xmm2
2692 ; SSE-NEXT: por %xmm1, %xmm2
2693 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,6,5,6,7]
2694 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[0,1,2,0]
2695 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,6,5]
2696 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,1],xmm0[2,3]
2697 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm1[2,0]
2698 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2699 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm8[2,2,2,2,4,5,6,7]
2700 ; SSE-NEXT: movdqa %xmm11, %xmm1
2701 ; SSE-NEXT: pandn %xmm0, %xmm1
2702 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm4[1,1,1,1]
2703 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[0,2,2,3]
2704 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
2705 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm2[0,3,2,3,4,5,6,7]
2706 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
2707 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[1,0,3,3,4,5,6,7]
2708 ; SSE-NEXT: pand %xmm11, %xmm0
2709 ; SSE-NEXT: por %xmm1, %xmm0
2710 ; SSE-NEXT: movdqa %xmm0, %xmm2
2711 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
2712 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[2,0],xmm8[3,0]
2713 ; SSE-NEXT: movaps %xmm11, %xmm0
2714 ; SSE-NEXT: andnps %xmm8, %xmm0
2715 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2716 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[0,1],xmm3[0,2]
2717 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm8[0,1,2,3,7,4,6,7]
2718 ; SSE-NEXT: pshufd $196, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2719 ; SSE-NEXT: # xmm1 = mem[0,1,0,3]
2720 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,5,6]
2721 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,1],xmm0[2,3]
2722 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm1[2,0]
2723 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2724 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm9[2,2,2,2,4,5,6,7]
2725 ; SSE-NEXT: movdqa %xmm11, %xmm1
2726 ; SSE-NEXT: pandn %xmm0, %xmm1
2727 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
2728 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm8[1,1,1,1]
2729 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
2730 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm14[0,2,2,3]
2731 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
2732 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm2[0,3,2,3,4,5,6,7]
2733 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
2734 ; SSE-NEXT: pshuflw {{.*#+}} xmm13 = xmm0[1,0,3,3,4,5,6,7]
2735 ; SSE-NEXT: pand %xmm11, %xmm13
2736 ; SSE-NEXT: por %xmm1, %xmm13
2737 ; SSE-NEXT: movaps %xmm15, %xmm0
2738 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm9[3,0]
2739 ; SSE-NEXT: movdqa %xmm11, %xmm12
2740 ; SSE-NEXT: pandn %xmm9, %xmm12
2741 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm0[0,2]
2742 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm9[0,1,2,3,7,4,6,7]
2743 ; SSE-NEXT: pshufd $196, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2744 ; SSE-NEXT: # xmm1 = mem[0,1,0,3]
2745 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,5,6]
2746 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,1],xmm0[2,3]
2747 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[0,1],xmm1[2,0]
2748 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm10[2,2,2,2,4,5,6,7]
2749 ; SSE-NEXT: movdqa %xmm11, %xmm1
2750 ; SSE-NEXT: pandn %xmm0, %xmm1
2751 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
2752 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm6[1,1,1,1]
2753 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
2754 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm3[0,2,2,3]
2755 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
2756 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm2[0,3,2,3,4,5,6,7]
2757 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
2758 ; SSE-NEXT: pshuflw {{.*#+}} xmm9 = xmm0[1,0,3,3,4,5,6,7]
2759 ; SSE-NEXT: pand %xmm11, %xmm9
2760 ; SSE-NEXT: por %xmm1, %xmm9
2761 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2762 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm10[3,0]
2763 ; SSE-NEXT: movdqa %xmm11, %xmm15
2764 ; SSE-NEXT: pandn %xmm10, %xmm15
2765 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[0,1],xmm0[0,2]
2766 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm10[0,1,2,3,7,4,6,7]
2767 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm7[0,1,0,3]
2768 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,5,6]
2769 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,1],xmm0[2,3]
2770 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm1[2,0]
2771 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
2772 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm7[2,2,2,2,4,5,6,7]
2773 ; SSE-NEXT: movdqa %xmm11, %xmm1
2774 ; SSE-NEXT: pandn %xmm0, %xmm1
2775 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
2776 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm5[1,1,1,1]
2777 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
2778 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[0,2,2,3]
2779 ; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
2780 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm2[0,3,2,3,4,5,6,7]
2781 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
2782 ; SSE-NEXT: pshuflw {{.*#+}} xmm10 = xmm0[1,0,3,3,4,5,6,7]
2783 ; SSE-NEXT: pand %xmm11, %xmm10
2784 ; SSE-NEXT: por %xmm1, %xmm10
2785 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2786 ; SSE-NEXT: movaps (%rsp), %xmm1 # 16-byte Reload
2787 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm0[3,0]
2788 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm1[0,2]
2789 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,0],xmm14[3,0]
2790 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[0,1],xmm8[0,2]
2791 ; SSE-NEXT: movaps %xmm14, %xmm2
2792 ; SSE-NEXT: movdqa %xmm3, %xmm1
2793 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[2,0],xmm3[3,0]
2794 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,1],xmm6[0,2]
2795 ; SSE-NEXT: movaps %xmm1, %xmm14
2796 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[2,0],xmm4[3,0]
2797 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm5[0,2]
2798 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
2799 ; SSE-NEXT: movaps %xmm8, %xmm1
2800 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm7[3,0]
2801 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,6,6,7]
2802 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
2803 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm0[2,1,3,3,4,5,6,7]
2804 ; SSE-NEXT: pand %xmm11, %xmm3
2805 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm2[0,1,2,3,4,6,6,7]
2806 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
2807 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm0[2,1,3,3,4,5,6,7]
2808 ; SSE-NEXT: pand %xmm11, %xmm2
2809 ; SSE-NEXT: pshufhw {{.*#+}} xmm0 = xmm14[0,1,2,3,4,6,6,7]
2810 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
2811 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm0[2,1,3,3,4,5,6,7]
2812 ; SSE-NEXT: pand %xmm11, %xmm0
2813 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,6,6,7]
2814 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,2,2,3]
2815 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[2,1,3,3,4,5,6,7]
2816 ; SSE-NEXT: pand %xmm11, %xmm4
2817 ; SSE-NEXT: pandn %xmm7, %xmm11
2818 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[0,1],xmm1[0,2]
2819 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm7[0,1,2,3,7,4,6,7]
2820 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
2821 ; SSE-NEXT: pshufd {{.*#+}} xmm14 = xmm6[0,1,0,3]
2822 ; SSE-NEXT: pshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,4,5,5,6]
2823 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[3,1],xmm1[2,3]
2824 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[0,1],xmm14[2,0]
2825 ; SSE-NEXT: por {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
2826 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2827 ; SSE-NEXT: # xmm1 = mem[0,2,2,3]
2828 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Folded Reload
2829 ; SSE-NEXT: # xmm14 = mem[0,1,1,3]
2830 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,0,3,4,5,6,7]
2831 ; SSE-NEXT: pshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,4,5,4,7]
2832 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[3,1],xmm1[1,3]
2833 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm14[2,0]
2834 ; SSE-NEXT: por %xmm12, %xmm2
2835 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2836 ; SSE-NEXT: # xmm1 = mem[0,2,2,3]
2837 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Folded Reload
2838 ; SSE-NEXT: # xmm12 = mem[0,1,1,3]
2839 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,0,3,4,5,6,7]
2840 ; SSE-NEXT: pshufhw {{.*#+}} xmm12 = xmm12[0,1,2,3,4,5,4,7]
2841 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[3,1],xmm1[1,3]
2842 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm12[2,0]
2843 ; SSE-NEXT: por %xmm15, %xmm0
2844 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
2845 ; SSE-NEXT: # xmm1 = mem[0,2,2,3]
2846 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
2847 ; SSE-NEXT: # xmm5 = mem[0,1,1,3]
2848 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,0,3,4,5,6,7]
2849 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,4,7]
2850 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[3,1],xmm1[1,3]
2851 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm5[2,0]
2852 ; SSE-NEXT: por %xmm4, %xmm11
2853 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm8[0,2,2,3]
2854 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm6[0,1,1,3]
2855 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,0,3,4,5,6,7]
2856 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,4,7]
2857 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[3,1],xmm1[1,3]
2858 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[0,1],xmm4[2,0]
2859 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2860 ; SSE-NEXT: movaps %xmm1, 16(%rsi)
2861 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2862 ; SSE-NEXT: movaps %xmm1, 48(%rsi)
2863 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2864 ; SSE-NEXT: movaps %xmm1, (%rsi)
2865 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2866 ; SSE-NEXT: movaps %xmm1, 32(%rsi)
2867 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2868 ; SSE-NEXT: movaps %xmm1, 16(%rdx)
2869 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2870 ; SSE-NEXT: movaps %xmm1, 48(%rdx)
2871 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2872 ; SSE-NEXT: movaps %xmm1, (%rdx)
2873 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2874 ; SSE-NEXT: movaps %xmm1, 32(%rdx)
2875 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2876 ; SSE-NEXT: movaps %xmm1, 16(%rcx)
2877 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2878 ; SSE-NEXT: movaps %xmm1, 48(%rcx)
2879 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2880 ; SSE-NEXT: movaps %xmm1, (%rcx)
2881 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2882 ; SSE-NEXT: movaps %xmm1, 32(%rcx)
2883 ; SSE-NEXT: movaps %xmm10, 16(%r8)
2884 ; SSE-NEXT: movaps %xmm9, 48(%r8)
2885 ; SSE-NEXT: movaps %xmm13, (%r8)
2886 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
2887 ; SSE-NEXT: movaps %xmm1, 32(%r8)
2888 ; SSE-NEXT: movaps %xmm11, 16(%r9)
2889 ; SSE-NEXT: movaps %xmm0, 48(%r9)
2890 ; SSE-NEXT: movaps %xmm2, (%r9)
2891 ; SSE-NEXT: movaps %xmm3, 32(%r9)
2892 ; SSE-NEXT: addq $408, %rsp # imm = 0x198
2895 ; AVX-LABEL: load_i16_stride5_vf32:
2897 ; AVX-NEXT: subq $424, %rsp # imm = 0x1A8
2898 ; AVX-NEXT: vmovdqa 144(%rdi), %xmm9
2899 ; AVX-NEXT: vmovdqa 128(%rdi), %xmm7
2900 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm7[0,1],xmm9[2,3],xmm7[4,5,6,7]
2901 ; AVX-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2902 ; AVX-NEXT: vmovdqa {{.*#+}} xmm1 = [6,7,2,3,4,5,6,7,6,7,2,3,12,13,6,7]
2903 ; AVX-NEXT: vpshufb %xmm1, %xmm0, %xmm0
2904 ; AVX-NEXT: vmovdqa 96(%rdi), %xmm11
2905 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm11[0,1,1,3]
2906 ; AVX-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,4,7]
2907 ; AVX-NEXT: vmovdqa 112(%rdi), %xmm10
2908 ; AVX-NEXT: vpunpckhqdq {{.*#+}} xmm2 = xmm2[1],xmm10[1]
2909 ; AVX-NEXT: vmovdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2910 ; AVX-NEXT: vmovdqa 80(%rdi), %xmm3
2911 ; AVX-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2912 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,2,2,3]
2913 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,3,2,3,4,5,6,7]
2914 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3,4,5,6,7]
2915 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3,4],xmm0[5,6,7]
2916 ; AVX-NEXT: vmovdqa (%rdi), %xmm5
2917 ; AVX-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2918 ; AVX-NEXT: vmovdqa 16(%rdi), %xmm12
2919 ; AVX-NEXT: vmovdqa 32(%rdi), %xmm3
2920 ; AVX-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2921 ; AVX-NEXT: vmovdqa 48(%rdi), %xmm15
2922 ; AVX-NEXT: vpshufd {{.*#+}} xmm0 = xmm15[0,1,0,3]
2923 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3],xmm3[4],xmm0[5,6,7]
2924 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm12[3,1,2,3]
2925 ; AVX-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2926 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,1,2,3,4,5,6,7]
2927 ; AVX-NEXT: vpshufd {{.*#+}} xmm4 = xmm5[0,2,2,3]
2928 ; AVX-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,3,2,3,4,5,6,7]
2929 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
2930 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3],xmm0[4,5,6,7]
2931 ; AVX-NEXT: vmovaps {{.*#+}} ymm6 = [65535,65535,65535,65535,65535,65535,65535,0,0,0,0,0,0,65535,65535,65535]
2932 ; AVX-NEXT: vandps %ymm6, %ymm3, %ymm3
2933 ; AVX-NEXT: vmovaps 64(%rdi), %xmm5
2934 ; AVX-NEXT: vshufps {{.*#+}} xmm4 = xmm5[0,1,0,1]
2935 ; AVX-NEXT: vmovaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2936 ; AVX-NEXT: vandnps %ymm4, %ymm6, %ymm4
2937 ; AVX-NEXT: vorps %ymm4, %ymm3, %ymm3
2938 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm0
2939 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2940 ; AVX-NEXT: vmovdqa 304(%rdi), %xmm2
2941 ; AVX-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2942 ; AVX-NEXT: vmovdqa 288(%rdi), %xmm13
2943 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm13[0,1],xmm2[2,3],xmm13[4,5,6,7]
2944 ; AVX-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2945 ; AVX-NEXT: vpshufb %xmm1, %xmm2, %xmm1
2946 ; AVX-NEXT: vmovdqa 256(%rdi), %xmm0
2947 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2948 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,1,1,3]
2949 ; AVX-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,4,7]
2950 ; AVX-NEXT: vmovdqa 272(%rdi), %xmm0
2951 ; AVX-NEXT: vmovdqa %xmm0, (%rsp) # 16-byte Spill
2952 ; AVX-NEXT: vpunpckhqdq {{.*#+}} xmm2 = xmm2[1],xmm0[1]
2953 ; AVX-NEXT: vmovdqa 240(%rdi), %xmm0
2954 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2955 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,2,2,3]
2956 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,3,2,3,4,5,6,7]
2957 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3,4,5,6,7]
2958 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm2[0,1,2,3,4],xmm1[5,6,7]
2959 ; AVX-NEXT: vmovdqa 176(%rdi), %xmm0
2960 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2961 ; AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[3,1,2,3]
2962 ; AVX-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[2,1,2,3,4,5,6,7]
2963 ; AVX-NEXT: vmovdqa 160(%rdi), %xmm0
2964 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2965 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,2,2,3]
2966 ; AVX-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,3,2,3,4,5,6,7]
2967 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
2968 ; AVX-NEXT: vmovdqa 208(%rdi), %xmm0
2969 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2970 ; AVX-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,1,0,3]
2971 ; AVX-NEXT: vmovdqa 192(%rdi), %xmm14
2972 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3],xmm14[4],xmm0[5,6,7]
2973 ; AVX-NEXT: vmovdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2974 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1,2,3],xmm0[4,5,6,7]
2975 ; AVX-NEXT: vandps %ymm6, %ymm0, %ymm0
2976 ; AVX-NEXT: vmovaps 224(%rdi), %xmm1
2977 ; AVX-NEXT: vmovaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2978 ; AVX-NEXT: vshufps {{.*#+}} xmm8 = xmm1[0,1,0,1]
2979 ; AVX-NEXT: vandnps %ymm8, %ymm6, %ymm8
2980 ; AVX-NEXT: vorps %ymm0, %ymm8, %ymm0
2981 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
2982 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2983 ; AVX-NEXT: vmovdqa %xmm11, %xmm6
2984 ; AVX-NEXT: vmovdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2985 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm10[0,1],xmm11[2,3],xmm10[4,5,6,7]
2986 ; AVX-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,1,3,0,4,5,6,7]
2987 ; AVX-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,5,5,5,5]
2988 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
2989 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm11[0,3,2,3]
2990 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[1,2,2,3,4,5,6,7]
2991 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm3[0,1],xmm0[2,3,4,5,6,7]
2992 ; AVX-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
2993 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm7[0,1,2,3],xmm9[4,5],xmm7[6,7]
2994 ; AVX-NEXT: vmovdqa {{.*#+}} xmm8 = [0,1,2,3,4,5,6,7,8,9,4,5,14,15,8,9]
2995 ; AVX-NEXT: vpshufb %xmm8, %xmm3, %xmm3
2996 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4],xmm3[5,6,7]
2997 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
2998 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm4[0,3,2,3]
2999 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[1,2,2,3,4,5,6,7]
3000 ; AVX-NEXT: vpsrlq $48, %xmm12, %xmm9
3001 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm3 = xmm3[0],xmm9[0],xmm3[1],xmm9[1]
3002 ; AVX-NEXT: vmovdqa %xmm15, %xmm12
3003 ; AVX-NEXT: vmovdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3004 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3005 ; AVX-NEXT: vpblendw {{.*#+}} xmm9 = xmm2[0,1],xmm15[2,3],xmm2[4,5],xmm15[6,7]
3006 ; AVX-NEXT: vmovdqa {{.*#+}} xmm1 = [0,1,0,1,0,1,0,1,10,11,4,5,14,15,6,7]
3007 ; AVX-NEXT: vpshufb %xmm1, %xmm9, %xmm9
3008 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2],xmm9[3,4,5,6,7]
3009 ; AVX-NEXT: vmovaps {{.*#+}} ymm10 = [65535,65535,65535,65535,65535,65535,65535,0,0,0,0,0,0,65535,65535,65535]
3010 ; AVX-NEXT: vandps %ymm3, %ymm10, %ymm3
3011 ; AVX-NEXT: vpsllq $48, %xmm5, %xmm9
3012 ; AVX-NEXT: vandnps %ymm9, %ymm10, %ymm9
3013 ; AVX-NEXT: vorps %ymm3, %ymm9, %ymm3
3014 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm0
3015 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3016 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
3017 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm13[0,1,2,3],xmm5[4,5],xmm13[6,7]
3018 ; AVX-NEXT: vpshufb %xmm8, %xmm0, %xmm0
3019 ; AVX-NEXT: vmovdqa (%rsp), %xmm15 # 16-byte Reload
3020 ; AVX-NEXT: vpblendw $12, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm3 # 16-byte Folded Reload
3021 ; AVX-NEXT: # xmm3 = xmm15[0,1],mem[2,3],xmm15[4,5,6,7]
3022 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,1,3,0,4,5,6,7]
3023 ; AVX-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,5,5,5,5]
3024 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
3025 ; AVX-NEXT: vpshufd {{.*#+}} xmm8 = xmm10[0,3,2,3]
3026 ; AVX-NEXT: vpshuflw {{.*#+}} xmm8 = xmm8[1,2,2,3,4,5,6,7]
3027 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm8[0,1],xmm3[2,3,4,5,6,7]
3028 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm3[0,1,2,3,4],xmm0[5,6,7]
3029 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
3030 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm14[0,1],xmm13[2,3],xmm14[4,5],xmm13[6,7]
3031 ; AVX-NEXT: vpshufb %xmm1, %xmm3, %xmm1
3032 ; AVX-NEXT: vpshufd $236, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
3033 ; AVX-NEXT: # xmm3 = mem[0,3,2,3]
3034 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[1,2,2,3,4,5,6,7]
3035 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
3036 ; AVX-NEXT: vpsrlq $48, %xmm8, %xmm8
3037 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm3 = xmm3[0],xmm8[0],xmm3[1],xmm8[1]
3038 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3,4,5,6,7]
3039 ; AVX-NEXT: vmovaps {{.*#+}} ymm8 = [65535,65535,65535,65535,65535,65535,65535,0,0,0,0,0,0,65535,65535,65535]
3040 ; AVX-NEXT: vandps %ymm1, %ymm8, %ymm1
3041 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3042 ; AVX-NEXT: vpsllq $48, %xmm14, %xmm3
3043 ; AVX-NEXT: vandnps %ymm3, %ymm8, %ymm3
3044 ; AVX-NEXT: vorps %ymm3, %ymm1, %ymm1
3045 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
3046 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3047 ; AVX-NEXT: vpblendw $207, {{[-0-9]+}}(%r{{[sb]}}p), %xmm6, %xmm0 # 16-byte Folded Reload
3048 ; AVX-NEXT: # xmm0 = mem[0,1,2,3],xmm6[4,5],mem[6,7]
3049 ; AVX-NEXT: vmovdqa {{.*#+}} xmm1 = [u,u,u,u,8,9,2,3,12,13,12,13,12,13,12,13]
3050 ; AVX-NEXT: vpshufb %xmm1, %xmm0, %xmm0
3051 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm11[3,1,2,3]
3052 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,1,2,3,4,5,6,7]
3053 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm3[0,1],xmm0[2,3,4,5,6,7]
3054 ; AVX-NEXT: vpblendw $243, {{[-0-9]+}}(%r{{[sb]}}p), %xmm7, %xmm3 # 16-byte Folded Reload
3055 ; AVX-NEXT: # xmm3 = mem[0,1],xmm7[2,3],mem[4,5,6,7]
3056 ; AVX-NEXT: vmovdqa {{.*#+}} xmm7 = [0,1,6,7,4,5,6,7,8,9,6,7,0,1,10,11]
3057 ; AVX-NEXT: vpshufb %xmm7, %xmm3, %xmm3
3058 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4],xmm3[5,6,7]
3059 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm2[0,1],xmm12[2,3],xmm2[4,5,6,7]
3060 ; AVX-NEXT: vmovdqa {{.*#+}} xmm8 = [2,3,2,3,2,3,2,3,12,13,6,7,12,13,14,15]
3061 ; AVX-NEXT: vpshufb %xmm8, %xmm3, %xmm3
3062 ; AVX-NEXT: vpshufd {{.*#+}} xmm9 = xmm4[0,1,1,3]
3063 ; AVX-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,7,6,7]
3064 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
3065 ; AVX-NEXT: vpunpckhdq {{.*#+}} xmm9 = xmm9[2],xmm11[2],xmm9[3],xmm11[3]
3066 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm9[0,1,2],xmm3[3,4,5],xmm9[6,7]
3067 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
3068 ; AVX-NEXT: vpshufd {{.*#+}} xmm9 = xmm12[0,1,2,0]
3069 ; AVX-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,5,6,5]
3070 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5],xmm9[6,7]
3071 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm0
3072 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3073 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3074 ; AVX-NEXT: vmovdqa %xmm5, %xmm9
3075 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm5[0,1],xmm6[2,3],xmm5[4,5,6,7]
3076 ; AVX-NEXT: vpshufb %xmm7, %xmm0, %xmm4
3077 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
3078 ; AVX-NEXT: vmovdqa %xmm15, %xmm0
3079 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm15[0,1,2,3],xmm5[4,5],xmm15[6,7]
3080 ; AVX-NEXT: vpshufb %xmm1, %xmm3, %xmm1
3081 ; AVX-NEXT: vmovdqa %xmm10, %xmm2
3082 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm10[3,1,2,3]
3083 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,1,2,3,4,5,6,7]
3084 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0,1],xmm1[2,3,4,5,6,7]
3085 ; AVX-NEXT: vpblendw {{.*#+}} xmm10 = xmm1[0,1,2,3,4],xmm4[5,6,7]
3086 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
3087 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm15[0,1],xmm13[2,3],xmm15[4,5,6,7]
3088 ; AVX-NEXT: vpshufb %xmm8, %xmm1, %xmm1
3089 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3090 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm4[0,1,1,3]
3091 ; AVX-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,7,6,7]
3092 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
3093 ; AVX-NEXT: vpunpckhdq {{.*#+}} xmm3 = xmm3[2],xmm8[2],xmm3[3],xmm8[3]
3094 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0,1,2],xmm1[3,4,5],xmm3[6,7]
3095 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm14[0,1,2,0]
3096 ; AVX-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,6,5]
3097 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5],xmm3[6,7]
3098 ; AVX-NEXT: vinsertf128 $1, %xmm10, %ymm1, %ymm1
3099 ; AVX-NEXT: vmovups %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3100 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm5[0,1],xmm0[2,3],xmm5[4,5],xmm0[6,7]
3101 ; AVX-NEXT: vmovdqa {{.*#+}} xmm1 = [u,u,0,1,10,11,4,5,14,15,14,15,14,15,14,15]
3102 ; AVX-NEXT: vpshufb %xmm1, %xmm3, %xmm0
3103 ; AVX-NEXT: vpsrlq $48, %xmm2, %xmm3
3104 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm3[0],xmm0[1,2,3,4,5,6,7]
3105 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm9[0,1,2,3],xmm6[4,5],xmm9[6,7]
3106 ; AVX-NEXT: vmovdqa {{.*#+}} xmm5 = [0,1,2,3,4,5,6,7,8,9,8,9,2,3,12,13]
3107 ; AVX-NEXT: vpshufb %xmm5, %xmm3, %xmm3
3108 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4],xmm3[5,6,7]
3109 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm8[0,1],xmm4[2,3],xmm8[4,5,6,7]
3110 ; AVX-NEXT: vmovdqa {{.*#+}} xmm6 = [6,7,0,1,10,11,10,11,8,9,10,11,12,13,14,15]
3111 ; AVX-NEXT: vpshufb %xmm6, %xmm3, %xmm3
3112 ; AVX-NEXT: vpblendw {{.*#+}} xmm9 = xmm15[0,1,2,3],xmm13[4,5],xmm15[6,7]
3113 ; AVX-NEXT: vpshuflw {{.*#+}} xmm9 = xmm9[2,2,2,2,4,5,6,7]
3114 ; AVX-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,7,4,6,7]
3115 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2],xmm9[3,4,5],xmm3[6,7]
3116 ; AVX-NEXT: vpshufd {{.*#+}} xmm9 = xmm14[0,1,0,3]
3117 ; AVX-NEXT: vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,5,5,6]
3118 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5],xmm9[6,7]
3119 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm0
3120 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3121 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
3122 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
3123 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm8[0,1,2,3],xmm9[4,5],xmm8[6,7]
3124 ; AVX-NEXT: vpshufb %xmm5, %xmm0, %xmm0
3125 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
3126 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3127 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm5[0,1],xmm4[2,3],xmm5[4,5],xmm4[6,7]
3128 ; AVX-NEXT: vpshufb %xmm1, %xmm3, %xmm1
3129 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
3130 ; AVX-NEXT: vpsrlq $48, %xmm2, %xmm3
3131 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0],xmm1[1,2,3,4,5,6,7]
3132 ; AVX-NEXT: vpblendw {{.*#+}} xmm7 = xmm1[0,1,2,3,4],xmm0[5,6,7]
3133 ; AVX-NEXT: vmovdqa %xmm11, %xmm0
3134 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
3135 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm11[0,1],xmm10[2,3],xmm11[4,5,6,7]
3136 ; AVX-NEXT: vpshufb %xmm6, %xmm1, %xmm1
3137 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
3138 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3139 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm6[0,1,2,3],xmm11[4,5],xmm6[6,7]
3140 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,2,2,2,4,5,6,7]
3141 ; AVX-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,7,4,6,7]
3142 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm3[3,4,5],xmm1[6,7]
3143 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm12[0,1,0,3]
3144 ; AVX-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,5,6]
3145 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5],xmm3[6,7]
3146 ; AVX-NEXT: vinsertf128 $1, %xmm7, %ymm1, %ymm7
3147 ; AVX-NEXT: vpshufd {{.*#+}} xmm1 = xmm8[3,1,2,3]
3148 ; AVX-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,2,1,4,5,6,7]
3149 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm9[0,2,2,3]
3150 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,1,0,3,4,5,6,7]
3151 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
3152 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm5[0,3,2,3]
3153 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2],xmm4[3],xmm3[4,5,6,7]
3154 ; AVX-NEXT: vpshufd {{.*#+}} xmm5 = xmm2[2,3,2,3]
3155 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm5[0],xmm3[1,2,3,4,5,6,7]
3156 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm3[0,1,2,3],xmm1[4,5,6,7]
3157 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm0[0,1,2,3],xmm10[4,5],xmm0[6,7]
3158 ; AVX-NEXT: vpshufd {{.*#+}} xmm5 = xmm6[1,1,1,1]
3159 ; AVX-NEXT: vpshufd {{.*#+}} xmm8 = xmm11[0,2,2,3]
3160 ; AVX-NEXT: vpshuflw {{.*#+}} xmm8 = xmm8[0,1,0,3,4,5,6,7]
3161 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm5 = xmm8[0],xmm5[0],xmm8[1],xmm5[1]
3162 ; AVX-NEXT: vmovdqa {{.*#+}} xmm8 = [8,9,2,3,12,13,12,13,8,9,12,13,12,13,14,15]
3163 ; AVX-NEXT: vpshufb %xmm8, %xmm3, %xmm3
3164 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2],xmm5[3,4,5],xmm3[6,7]
3165 ; AVX-NEXT: vpshufd {{.*#+}} xmm5 = xmm12[0,1,1,3]
3166 ; AVX-NEXT: vpshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,4,7]
3167 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5],xmm5[6,7]
3168 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm3, %ymm1
3169 ; AVX-NEXT: vpshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
3170 ; AVX-NEXT: # xmm3 = mem[3,1,2,3]
3171 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,1,2,1,4,5,6,7]
3172 ; AVX-NEXT: vpshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
3173 ; AVX-NEXT: # xmm5 = mem[0,2,2,3]
3174 ; AVX-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,1,0,3,4,5,6,7]
3175 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm3 = xmm5[0],xmm3[0],xmm5[1],xmm3[1]
3176 ; AVX-NEXT: vpshufd $236, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
3177 ; AVX-NEXT: # xmm5 = mem[0,3,2,3]
3178 ; AVX-NEXT: vpblendw $8, (%rsp), %xmm5, %xmm5 # 16-byte Folded Reload
3179 ; AVX-NEXT: # xmm5 = xmm5[0,1,2],mem[3],xmm5[4,5,6,7]
3180 ; AVX-NEXT: vpshufd $238, {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Folded Reload
3181 ; AVX-NEXT: # xmm9 = mem[2,3,2,3]
3182 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm9[0],xmm5[1,2,3,4,5,6,7]
3183 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm5[0,1,2,3],xmm3[4,5,6,7]
3184 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
3185 ; AVX-NEXT: vpblendw $48, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm5 # 16-byte Folded Reload
3186 ; AVX-NEXT: # xmm5 = xmm0[0,1,2,3],mem[4,5],xmm0[6,7]
3187 ; AVX-NEXT: vpshufb %xmm8, %xmm5, %xmm5
3188 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm15[1,1,1,1]
3189 ; AVX-NEXT: vpshufd {{.*#+}} xmm6 = xmm13[0,2,2,3]
3190 ; AVX-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[0,1,0,3,4,5,6,7]
3191 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm6[0],xmm2[0],xmm6[1],xmm2[1]
3192 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm5[0,1,2],xmm2[3,4,5],xmm5[6,7]
3193 ; AVX-NEXT: vpshufd {{.*#+}} xmm4 = xmm14[0,1,1,3]
3194 ; AVX-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,4,7]
3195 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5],xmm4[6,7]
3196 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
3197 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
3198 ; AVX-NEXT: vmovaps %ymm3, 32(%rsi)
3199 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
3200 ; AVX-NEXT: vmovaps %ymm3, (%rsi)
3201 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
3202 ; AVX-NEXT: vmovaps %ymm3, 32(%rdx)
3203 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
3204 ; AVX-NEXT: vmovaps %ymm3, (%rdx)
3205 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3206 ; AVX-NEXT: vmovaps %ymm0, 32(%rcx)
3207 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3208 ; AVX-NEXT: vmovaps %ymm0, (%rcx)
3209 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3210 ; AVX-NEXT: vmovaps %ymm0, 32(%r8)
3211 ; AVX-NEXT: vmovaps %ymm7, (%r8)
3212 ; AVX-NEXT: vmovaps %ymm2, 32(%r9)
3213 ; AVX-NEXT: vmovaps %ymm1, (%r9)
3214 ; AVX-NEXT: addq $424, %rsp # imm = 0x1A8
3215 ; AVX-NEXT: vzeroupper
3218 ; AVX2-LABEL: load_i16_stride5_vf32:
3220 ; AVX2-NEXT: subq $264, %rsp # imm = 0x108
3221 ; AVX2-NEXT: vmovdqa (%rdi), %ymm1
3222 ; AVX2-NEXT: vmovdqa 32(%rdi), %ymm2
3223 ; AVX2-NEXT: vmovdqa 64(%rdi), %ymm3
3224 ; AVX2-NEXT: vmovdqa 96(%rdi), %ymm15
3225 ; AVX2-NEXT: vmovdqa 192(%rdi), %ymm4
3226 ; AVX2-NEXT: vmovdqa 160(%rdi), %ymm5
3227 ; AVX2-NEXT: vmovdqa 224(%rdi), %ymm7
3228 ; AVX2-NEXT: vmovdqa 256(%rdi), %ymm6
3229 ; AVX2-NEXT: vpblendw {{.*#+}} ymm0 = ymm7[0],ymm6[1,2],ymm7[3],ymm6[4],ymm7[5],ymm6[6,7],ymm7[8],ymm6[9,10],ymm7[11],ymm6[12],ymm7[13],ymm6[14,15]
3230 ; AVX2-NEXT: vpermq {{.*#+}} ymm8 = ymm0[2,3,0,1]
3231 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm8[5],ymm0[6],ymm8[7]
3232 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm10 = [0,1,10,11,4,5,14,15,8,9,10,11,4,5,6,7,16,17,26,27,20,21,30,31,24,25,26,27,20,21,22,23]
3233 ; AVX2-NEXT: vpshufb %ymm10, %ymm0, %ymm0
3234 ; AVX2-NEXT: vpblendw {{.*#+}} ymm8 = ymm5[0],ymm4[1],ymm5[2,3],ymm4[4],ymm5[5],ymm4[6],ymm5[7,8],ymm4[9],ymm5[10,11],ymm4[12],ymm5[13],ymm4[14],ymm5[15]
3235 ; AVX2-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3236 ; AVX2-NEXT: vextracti128 $1, %ymm8, %xmm9
3237 ; AVX2-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0],xmm9[1,2,3],xmm8[4,5],xmm9[6,7]
3238 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm11 = [0,1,10,11,4,5,14,15,8,9,2,3,12,13,6,7]
3239 ; AVX2-NEXT: vpshufb %xmm11, %xmm8, %xmm8
3240 ; AVX2-NEXT: vpmovsxbw {{.*#+}} xmm9 = [65535,65535,65535,65535,65535,65535,65535,0]
3241 ; AVX2-NEXT: vpblendvb %ymm9, %ymm8, %ymm0, %ymm8
3242 ; AVX2-NEXT: vpblendw {{.*#+}} ymm0 = ymm3[0],ymm15[1,2],ymm3[3],ymm15[4],ymm3[5],ymm15[6,7],ymm3[8],ymm15[9,10],ymm3[11],ymm15[12],ymm3[13],ymm15[14,15]
3243 ; AVX2-NEXT: vpermq {{.*#+}} ymm12 = ymm0[2,3,0,1]
3244 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm12[5],ymm0[6],ymm12[7]
3245 ; AVX2-NEXT: vpshufb %ymm10, %ymm0, %ymm0
3246 ; AVX2-NEXT: vpblendw {{.*#+}} ymm10 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5],ymm2[6],ymm1[7,8],ymm2[9],ymm1[10,11],ymm2[12],ymm1[13],ymm2[14],ymm1[15]
3247 ; AVX2-NEXT: vmovdqu %ymm1, (%rsp) # 32-byte Spill
3248 ; AVX2-NEXT: vextracti128 $1, %ymm10, %xmm12
3249 ; AVX2-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0],xmm12[1,2,3],xmm10[4,5],xmm12[6,7]
3250 ; AVX2-NEXT: vpshufb %xmm11, %xmm10, %xmm10
3251 ; AVX2-NEXT: vpblendvb %ymm9, %ymm10, %ymm0, %ymm12
3252 ; AVX2-NEXT: vpblendw {{.*#+}} ymm0 = ymm6[0],ymm7[1],ymm6[2,3],ymm7[4],ymm6[5],ymm7[6],ymm6[7,8],ymm7[9],ymm6[10,11],ymm7[12],ymm6[13],ymm7[14],ymm6[15]
3253 ; AVX2-NEXT: vpermq {{.*#+}} ymm10 = ymm0[2,3,0,1]
3254 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm10[5],ymm0[6,7]
3255 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm10 = [2,3,12,13,6,7,0,1,10,11,6,7,8,9,8,9,18,19,28,29,22,23,16,17,26,27,22,23,24,25,24,25]
3256 ; AVX2-NEXT: vpshufb %ymm10, %ymm0, %ymm0
3257 ; AVX2-NEXT: vpblendw {{.*#+}} ymm11 = ymm4[0],ymm5[1],ymm4[2],ymm5[3],ymm4[4,5],ymm5[6],ymm4[7,8],ymm5[9],ymm4[10],ymm5[11],ymm4[12,13],ymm5[14],ymm4[15]
3258 ; AVX2-NEXT: vextracti128 $1, %ymm11, %xmm13
3259 ; AVX2-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1],xmm13[2,3],xmm11[4,5,6],xmm13[7]
3260 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm14 = [2,3,12,13,6,7,0,1,10,11,4,5,14,15,10,11]
3261 ; AVX2-NEXT: vpshufb %xmm14, %xmm11, %xmm11
3262 ; AVX2-NEXT: vpblendvb %ymm9, %ymm11, %ymm0, %ymm0
3263 ; AVX2-NEXT: vpblendw {{.*#+}} ymm11 = ymm15[0],ymm3[1],ymm15[2,3],ymm3[4],ymm15[5],ymm3[6],ymm15[7,8],ymm3[9],ymm15[10,11],ymm3[12],ymm15[13],ymm3[14],ymm15[15]
3264 ; AVX2-NEXT: vmovdqa %ymm15, %ymm5
3265 ; AVX2-NEXT: vpermq {{.*#+}} ymm13 = ymm11[2,3,0,1]
3266 ; AVX2-NEXT: vpblendd {{.*#+}} ymm11 = ymm11[0,1,2,3,4],ymm13[5],ymm11[6,7]
3267 ; AVX2-NEXT: vpshufb %ymm10, %ymm11, %ymm10
3268 ; AVX2-NEXT: vpblendw {{.*#+}} ymm11 = ymm2[0],ymm1[1],ymm2[2],ymm1[3],ymm2[4,5],ymm1[6],ymm2[7,8],ymm1[9],ymm2[10],ymm1[11],ymm2[12,13],ymm1[14],ymm2[15]
3269 ; AVX2-NEXT: vmovdqa %ymm2, %ymm15
3270 ; AVX2-NEXT: vextracti128 $1, %ymm11, %xmm13
3271 ; AVX2-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1],xmm13[2,3],xmm11[4,5,6],xmm13[7]
3272 ; AVX2-NEXT: vmovdqa 304(%rdi), %xmm13
3273 ; AVX2-NEXT: vpshufb %xmm14, %xmm11, %xmm11
3274 ; AVX2-NEXT: vmovdqa 288(%rdi), %xmm14
3275 ; AVX2-NEXT: vpblendvb %ymm9, %ymm11, %ymm10, %ymm9
3276 ; AVX2-NEXT: vpblendd {{.*#+}} xmm10 = xmm14[0],xmm13[1],xmm14[2,3]
3277 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [6,7,2,3,4,5,6,7,6,7,2,3,12,13,6,7]
3278 ; AVX2-NEXT: vpshufb %xmm1, %xmm10, %xmm10
3279 ; AVX2-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
3280 ; AVX2-NEXT: vpblendw {{.*#+}} ymm10 = ymm8[0,1,2,3,4],ymm10[5,6,7],ymm8[8,9,10,11,12],ymm10[13,14,15]
3281 ; AVX2-NEXT: vpblendd {{.*#+}} ymm2 = ymm8[0,1,2,3],ymm10[4,5,6,7]
3282 ; AVX2-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3283 ; AVX2-NEXT: vmovdqa 144(%rdi), %xmm11
3284 ; AVX2-NEXT: vmovdqa 128(%rdi), %xmm10
3285 ; AVX2-NEXT: vpblendd {{.*#+}} xmm8 = xmm10[0],xmm11[1],xmm10[2,3]
3286 ; AVX2-NEXT: vpshufb %xmm1, %xmm8, %xmm1
3287 ; AVX2-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
3288 ; AVX2-NEXT: vpblendw {{.*#+}} ymm1 = ymm12[0,1,2,3,4],ymm1[5,6,7],ymm12[8,9,10,11,12],ymm1[13,14,15]
3289 ; AVX2-NEXT: vpblendd {{.*#+}} ymm1 = ymm12[0,1,2,3],ymm1[4,5,6,7]
3290 ; AVX2-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3291 ; AVX2-NEXT: vpblendd {{.*#+}} xmm1 = xmm14[0,1],xmm13[2],xmm14[3]
3292 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm8 = [0,1,2,3,4,5,6,7,8,9,4,5,14,15,8,9]
3293 ; AVX2-NEXT: vpshufb %xmm8, %xmm1, %xmm1
3294 ; AVX2-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
3295 ; AVX2-NEXT: vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3,4],ymm1[5,6,7],ymm0[8,9,10,11,12],ymm1[13,14,15]
3296 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3297 ; AVX2-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3298 ; AVX2-NEXT: vpblendd {{.*#+}} xmm0 = xmm10[0,1],xmm11[2],xmm10[3]
3299 ; AVX2-NEXT: vpshufb %xmm8, %xmm0, %xmm0
3300 ; AVX2-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
3301 ; AVX2-NEXT: vpblendw {{.*#+}} ymm0 = ymm9[0,1,2,3,4],ymm0[5,6,7],ymm9[8,9,10,11,12],ymm0[13,14,15]
3302 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
3303 ; AVX2-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3304 ; AVX2-NEXT: vpblendw {{.*#+}} ymm0 = ymm7[0],ymm6[1],ymm7[2,3],ymm6[4],ymm7[5],ymm6[6],ymm7[7,8],ymm6[9],ymm7[10,11],ymm6[12],ymm7[13],ymm6[14],ymm7[15]
3305 ; AVX2-NEXT: vmovdqa %ymm6, %ymm9
3306 ; AVX2-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3307 ; AVX2-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3308 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
3309 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6],ymm0[7]
3310 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3311 ; AVX2-NEXT: vpblendw {{.*#+}} ymm1 = ymm4[0,1],ymm2[2],ymm4[3],ymm2[4],ymm4[5,6],ymm2[7],ymm4[8,9],ymm2[10],ymm4[11],ymm2[12],ymm4[13,14],ymm2[15]
3312 ; AVX2-NEXT: vextracti128 $1, %ymm1, %xmm8
3313 ; AVX2-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm8[3,4],xmm1[5,6,7]
3314 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm8 = [u,u,u,u,u,u,u,u,u,u,u,u,0,1,10,11,20,21,30,31,24,25,18,19,28,29,26,27,16,17,26,27]
3315 ; AVX2-NEXT: vpshufb %ymm8, %ymm0, %ymm0
3316 ; AVX2-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
3317 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3,4,5,6,7]
3318 ; AVX2-NEXT: vpblendd {{.*#+}} xmm1 = xmm13[0],xmm14[1],xmm13[2,3]
3319 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm12 = [0,1,6,7,4,5,6,7,8,9,6,7,0,1,10,11]
3320 ; AVX2-NEXT: vpshufb %xmm12, %xmm1, %xmm1
3321 ; AVX2-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
3322 ; AVX2-NEXT: vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3,4],ymm1[5,6,7],ymm0[8,9,10,11,12],ymm1[13,14,15]
3323 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3324 ; AVX2-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3325 ; AVX2-NEXT: vpblendw {{.*#+}} ymm0 = ymm3[0],ymm5[1],ymm3[2,3],ymm5[4],ymm3[5],ymm5[6],ymm3[7,8],ymm5[9],ymm3[10,11],ymm5[12],ymm3[13],ymm5[14],ymm3[15]
3326 ; AVX2-NEXT: vmovdqa %ymm5, %ymm6
3327 ; AVX2-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3328 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
3329 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6],ymm0[7]
3330 ; AVX2-NEXT: vpshufb %ymm8, %ymm0, %ymm0
3331 ; AVX2-NEXT: vmovdqu (%rsp), %ymm5 # 32-byte Reload
3332 ; AVX2-NEXT: vpblendw {{.*#+}} ymm1 = ymm15[0,1],ymm5[2],ymm15[3],ymm5[4],ymm15[5,6],ymm5[7],ymm15[8,9],ymm5[10],ymm15[11],ymm5[12],ymm15[13,14],ymm5[15]
3333 ; AVX2-NEXT: vextracti128 $1, %ymm1, %xmm8
3334 ; AVX2-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm8[3,4],xmm1[5,6,7]
3335 ; AVX2-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
3336 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3,4,5,6,7]
3337 ; AVX2-NEXT: vpblendd {{.*#+}} xmm1 = xmm11[0],xmm10[1],xmm11[2,3]
3338 ; AVX2-NEXT: vpshufb %xmm12, %xmm1, %xmm1
3339 ; AVX2-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
3340 ; AVX2-NEXT: vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3,4],ymm1[5,6,7],ymm0[8,9,10,11,12],ymm1[13,14,15]
3341 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3342 ; AVX2-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3343 ; AVX2-NEXT: vpblendw {{.*#+}} ymm0 = ymm9[0],ymm7[1],ymm9[2],ymm7[3],ymm9[4,5],ymm7[6],ymm9[7,8],ymm7[9],ymm9[10],ymm7[11],ymm9[12,13],ymm7[14],ymm9[15]
3344 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
3345 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
3346 ; AVX2-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm4[1,2],ymm2[3],ymm4[4],ymm2[5],ymm4[6,7],ymm2[8],ymm4[9,10],ymm2[11],ymm4[12],ymm2[13],ymm4[14,15]
3347 ; AVX2-NEXT: vmovdqa %ymm4, %ymm7
3348 ; AVX2-NEXT: vextracti128 $1, %ymm1, %xmm8
3349 ; AVX2-NEXT: vpblendd {{.*#+}} xmm1 = xmm8[0],xmm1[1],xmm8[2],xmm1[3]
3350 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm12 = [u,u,u,u,u,u,u,u,u,u,u,u,2,3,12,13,22,23,16,17,26,27,20,21,30,31,30,31,18,19,28,29]
3351 ; AVX2-NEXT: vpshufb %ymm12, %ymm0, %ymm0
3352 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm8 = [6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
3353 ; AVX2-NEXT: vpshufb %xmm8, %xmm1, %xmm1
3354 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3,4,5,6,7]
3355 ; AVX2-NEXT: vpblendd {{.*#+}} xmm1 = xmm13[0,1],xmm14[2],xmm13[3]
3356 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,2,3,4,5,6,7,8,9,8,9,2,3,12,13]
3357 ; AVX2-NEXT: vpshufb %xmm4, %xmm1, %xmm1
3358 ; AVX2-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
3359 ; AVX2-NEXT: vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3,4],ymm1[5,6,7],ymm0[8,9,10,11,12],ymm1[13,14,15]
3360 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3361 ; AVX2-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3362 ; AVX2-NEXT: vpblendw {{.*#+}} ymm0 = ymm6[0],ymm3[1],ymm6[2],ymm3[3],ymm6[4,5],ymm3[6],ymm6[7,8],ymm3[9],ymm6[10],ymm3[11],ymm6[12,13],ymm3[14],ymm6[15]
3363 ; AVX2-NEXT: vpermq {{.*#+}} ymm9 = ymm0[2,3,0,1]
3364 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm9[4],ymm0[5],ymm9[6],ymm0[7]
3365 ; AVX2-NEXT: vpshufb %ymm12, %ymm0, %ymm0
3366 ; AVX2-NEXT: vpblendw {{.*#+}} ymm9 = ymm5[0],ymm15[1,2],ymm5[3],ymm15[4],ymm5[5],ymm15[6,7],ymm5[8],ymm15[9,10],ymm5[11],ymm15[12],ymm5[13],ymm15[14,15]
3367 ; AVX2-NEXT: vmovdqa %ymm5, %ymm1
3368 ; AVX2-NEXT: vextracti128 $1, %ymm9, %xmm12
3369 ; AVX2-NEXT: vpblendd {{.*#+}} xmm9 = xmm12[0],xmm9[1],xmm12[2],xmm9[3]
3370 ; AVX2-NEXT: vpshufb %xmm8, %xmm9, %xmm8
3371 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm8[0,1,2],ymm0[3,4,5,6,7]
3372 ; AVX2-NEXT: vpblendd {{.*#+}} xmm8 = xmm11[0,1],xmm10[2],xmm11[3]
3373 ; AVX2-NEXT: vpshufb %xmm4, %xmm8, %xmm2
3374 ; AVX2-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3375 ; AVX2-NEXT: vpblendw {{.*#+}} ymm2 = ymm0[0,1,2,3,4],ymm2[5,6,7],ymm0[8,9,10,11,12],ymm2[13,14,15]
3376 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4,5,6,7]
3377 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3378 ; AVX2-NEXT: vpblendw $148, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm2 # 32-byte Folded Reload
3379 ; AVX2-NEXT: # ymm2 = ymm2[0,1],mem[2],ymm2[3],mem[4],ymm2[5,6],mem[7],ymm2[8,9],mem[10],ymm2[11],mem[12],ymm2[13,14],mem[15]
3380 ; AVX2-NEXT: vpermq {{.*#+}} ymm6 = ymm2[2,3,0,1]
3381 ; AVX2-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm6[4],ymm2[5,6],ymm6[7]
3382 ; AVX2-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm4 # 32-byte Folded Reload
3383 ; AVX2-NEXT: # ymm4 = ymm7[0],mem[1],ymm7[2,3],mem[4],ymm7[5],mem[6],ymm7[7,8],mem[9],ymm7[10,11],mem[12],ymm7[13],mem[14],ymm7[15]
3384 ; AVX2-NEXT: vextracti128 $1, %ymm4, %xmm5
3385 ; AVX2-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3,4],xmm5[5,6,7]
3386 ; AVX2-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [24,25,18,19,28,29,22,23,0,0,0,0,4,5,14,15,24,25,18,19,28,29,22,23,0,0,0,0,4,5,14,15]
3387 ; AVX2-NEXT: # ymm5 = mem[0,1,0,1]
3388 ; AVX2-NEXT: vpshufb %ymm5, %ymm2, %ymm2
3389 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm6 = [8,9,2,3,12,13,6,7,0,1,10,11,0,1,6,7]
3390 ; AVX2-NEXT: vpshufb %xmm6, %xmm4, %xmm4
3391 ; AVX2-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3,4,5,6,7]
3392 ; AVX2-NEXT: vpshufd {{.*#+}} xmm4 = xmm13[3,1,2,3]
3393 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,1,2,1,4,5,6,7]
3394 ; AVX2-NEXT: vpshufd {{.*#+}} xmm7 = xmm14[0,2,2,3]
3395 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm7 = xmm7[0,1,0,3,4,5,6,7]
3396 ; AVX2-NEXT: vpunpckldq {{.*#+}} xmm4 = xmm7[0],xmm4[0],xmm7[1],xmm4[1]
3397 ; AVX2-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
3398 ; AVX2-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm4[6,7]
3399 ; AVX2-NEXT: vpblendw $107, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
3400 ; AVX2-NEXT: # ymm3 = mem[0,1],ymm3[2],mem[3],ymm3[4],mem[5,6],ymm3[7],mem[8,9],ymm3[10],mem[11],ymm3[12],mem[13,14],ymm3[15]
3401 ; AVX2-NEXT: vpermq {{.*#+}} ymm4 = ymm3[2,3,0,1]
3402 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm4[4],ymm3[5,6],ymm4[7]
3403 ; AVX2-NEXT: vpshufb %ymm5, %ymm3, %ymm3
3404 ; AVX2-NEXT: vpblendw {{.*#+}} ymm4 = ymm15[0],ymm1[1],ymm15[2,3],ymm1[4],ymm15[5],ymm1[6],ymm15[7,8],ymm1[9],ymm15[10,11],ymm1[12],ymm15[13],ymm1[14],ymm15[15]
3405 ; AVX2-NEXT: vextracti128 $1, %ymm4, %xmm5
3406 ; AVX2-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3,4],xmm5[5,6,7]
3407 ; AVX2-NEXT: vpshufb %xmm6, %xmm4, %xmm4
3408 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2],ymm3[3,4,5,6,7]
3409 ; AVX2-NEXT: vpshufd {{.*#+}} xmm4 = xmm11[3,1,2,3]
3410 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,1,2,1,4,5,6,7]
3411 ; AVX2-NEXT: vpshufd {{.*#+}} xmm5 = xmm10[0,2,2,3]
3412 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,1,0,3,4,5,6,7]
3413 ; AVX2-NEXT: vpunpckldq {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
3414 ; AVX2-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
3415 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm4[6,7]
3416 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
3417 ; AVX2-NEXT: vmovaps %ymm4, 32(%rsi)
3418 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3419 ; AVX2-NEXT: vmovaps %ymm1, (%rsi)
3420 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3421 ; AVX2-NEXT: vmovaps %ymm1, 32(%rdx)
3422 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3423 ; AVX2-NEXT: vmovaps %ymm1, (%rdx)
3424 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3425 ; AVX2-NEXT: vmovaps %ymm1, 32(%rcx)
3426 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3427 ; AVX2-NEXT: vmovaps %ymm1, (%rcx)
3428 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3429 ; AVX2-NEXT: vmovaps %ymm1, 32(%r8)
3430 ; AVX2-NEXT: vmovdqa %ymm0, (%r8)
3431 ; AVX2-NEXT: vmovdqa %ymm2, 32(%r9)
3432 ; AVX2-NEXT: vmovdqa %ymm3, (%r9)
3433 ; AVX2-NEXT: addq $264, %rsp # imm = 0x108
3434 ; AVX2-NEXT: vzeroupper
3437 ; AVX2-FP-LABEL: load_i16_stride5_vf32:
3439 ; AVX2-FP-NEXT: subq $264, %rsp # imm = 0x108
3440 ; AVX2-FP-NEXT: vmovdqa (%rdi), %ymm13
3441 ; AVX2-FP-NEXT: vmovdqa 32(%rdi), %ymm6
3442 ; AVX2-FP-NEXT: vmovdqa 64(%rdi), %ymm7
3443 ; AVX2-FP-NEXT: vmovdqa 96(%rdi), %ymm10
3444 ; AVX2-FP-NEXT: vmovdqa 192(%rdi), %ymm14
3445 ; AVX2-FP-NEXT: vmovdqa 160(%rdi), %ymm3
3446 ; AVX2-FP-NEXT: vmovdqa 224(%rdi), %ymm4
3447 ; AVX2-FP-NEXT: vmovdqa 256(%rdi), %ymm5
3448 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm0 = ymm4[0],ymm5[1,2],ymm4[3],ymm5[4],ymm4[5],ymm5[6,7],ymm4[8],ymm5[9,10],ymm4[11],ymm5[12],ymm4[13],ymm5[14,15]
3449 ; AVX2-FP-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3450 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
3451 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm1[5],ymm0[6],ymm1[7]
3452 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} ymm1 = [0,1,10,11,4,5,14,15,8,9,10,11,4,5,6,7,16,17,26,27,20,21,30,31,24,25,26,27,20,21,22,23]
3453 ; AVX2-FP-NEXT: vpshufb %ymm1, %ymm0, %ymm0
3454 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm8 = ymm3[0],ymm14[1],ymm3[2,3],ymm14[4],ymm3[5],ymm14[6],ymm3[7,8],ymm14[9],ymm3[10,11],ymm14[12],ymm3[13],ymm14[14],ymm3[15]
3455 ; AVX2-FP-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3456 ; AVX2-FP-NEXT: vextracti128 $1, %ymm8, %xmm9
3457 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0],xmm9[1,2,3],xmm8[4,5],xmm9[6,7]
3458 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm9 = [0,1,10,11,4,5,14,15,8,9,2,3,12,13,6,7]
3459 ; AVX2-FP-NEXT: vpshufb %xmm9, %xmm8, %xmm8
3460 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} xmm11 = [65535,65535,65535,65535,65535,65535,65535,0]
3461 ; AVX2-FP-NEXT: vpblendvb %ymm11, %ymm8, %ymm0, %ymm0
3462 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm8 = ymm7[0],ymm10[1,2],ymm7[3],ymm10[4],ymm7[5],ymm10[6,7],ymm7[8],ymm10[9,10],ymm7[11],ymm10[12],ymm7[13],ymm10[14,15]
3463 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm12 = ymm8[2,3,0,1]
3464 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4],ymm12[5],ymm8[6],ymm12[7]
3465 ; AVX2-FP-NEXT: vpshufb %ymm1, %ymm8, %ymm1
3466 ; AVX2-FP-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3467 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm8 = ymm13[0],ymm6[1],ymm13[2,3],ymm6[4],ymm13[5],ymm6[6],ymm13[7,8],ymm6[9],ymm13[10,11],ymm6[12],ymm13[13],ymm6[14],ymm13[15]
3468 ; AVX2-FP-NEXT: vextracti128 $1, %ymm8, %xmm12
3469 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0],xmm12[1,2,3],xmm8[4,5],xmm12[6,7]
3470 ; AVX2-FP-NEXT: vpshufb %xmm9, %xmm8, %xmm8
3471 ; AVX2-FP-NEXT: vpblendvb %ymm11, %ymm8, %ymm1, %ymm12
3472 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm1 = ymm5[0],ymm4[1],ymm5[2,3],ymm4[4],ymm5[5],ymm4[6],ymm5[7,8],ymm4[9],ymm5[10,11],ymm4[12],ymm5[13],ymm4[14],ymm5[15]
3473 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm8 = ymm1[2,3,0,1]
3474 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4],ymm8[5],ymm1[6,7]
3475 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} ymm8 = [2,3,12,13,6,7,0,1,10,11,6,7,8,9,8,9,18,19,28,29,22,23,16,17,26,27,22,23,24,25,24,25]
3476 ; AVX2-FP-NEXT: vpshufb %ymm8, %ymm1, %ymm1
3477 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm9 = ymm14[0],ymm3[1],ymm14[2],ymm3[3],ymm14[4,5],ymm3[6],ymm14[7,8],ymm3[9],ymm14[10],ymm3[11],ymm14[12,13],ymm3[14],ymm14[15]
3478 ; AVX2-FP-NEXT: vmovdqa %ymm14, %ymm5
3479 ; AVX2-FP-NEXT: vextracti128 $1, %ymm9, %xmm14
3480 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm9 = xmm9[0,1],xmm14[2,3],xmm9[4,5,6],xmm14[7]
3481 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm14 = [2,3,12,13,6,7,0,1,10,11,4,5,14,15,10,11]
3482 ; AVX2-FP-NEXT: vpshufb %xmm14, %xmm9, %xmm9
3483 ; AVX2-FP-NEXT: vpblendvb %ymm11, %ymm9, %ymm1, %ymm1
3484 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm9 = ymm10[0],ymm7[1],ymm10[2,3],ymm7[4],ymm10[5],ymm7[6],ymm10[7,8],ymm7[9],ymm10[10,11],ymm7[12],ymm10[13],ymm7[14],ymm10[15]
3485 ; AVX2-FP-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3486 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm15 = ymm9[2,3,0,1]
3487 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3,4],ymm15[5],ymm9[6,7]
3488 ; AVX2-FP-NEXT: vpshufb %ymm8, %ymm9, %ymm15
3489 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm8 = ymm6[0],ymm13[1],ymm6[2],ymm13[3],ymm6[4,5],ymm13[6],ymm6[7,8],ymm13[9],ymm6[10],ymm13[11],ymm6[12,13],ymm13[14],ymm6[15]
3490 ; AVX2-FP-NEXT: vextracti128 $1, %ymm8, %xmm9
3491 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm9 = xmm8[0,1],xmm9[2,3],xmm8[4,5,6],xmm9[7]
3492 ; AVX2-FP-NEXT: vmovdqa 304(%rdi), %xmm8
3493 ; AVX2-FP-NEXT: vpshufb %xmm14, %xmm9, %xmm14
3494 ; AVX2-FP-NEXT: vmovdqa 288(%rdi), %xmm9
3495 ; AVX2-FP-NEXT: vpblendvb %ymm11, %ymm14, %ymm15, %ymm11
3496 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm14 = xmm9[0],xmm8[1],xmm9[2,3]
3497 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm2 = [6,7,2,3,4,5,6,7,6,7,2,3,12,13,6,7]
3498 ; AVX2-FP-NEXT: vpshufb %xmm2, %xmm14, %xmm14
3499 ; AVX2-FP-NEXT: vinserti128 $1, %xmm14, %ymm0, %ymm14
3500 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm14 = ymm0[0,1,2,3,4],ymm14[5,6,7],ymm0[8,9,10,11,12],ymm14[13,14,15]
3501 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm14[4,5,6,7]
3502 ; AVX2-FP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3503 ; AVX2-FP-NEXT: vmovdqa 144(%rdi), %xmm6
3504 ; AVX2-FP-NEXT: vmovdqa 128(%rdi), %xmm15
3505 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm14 = xmm15[0],xmm6[1],xmm15[2,3]
3506 ; AVX2-FP-NEXT: vpshufb %xmm2, %xmm14, %xmm2
3507 ; AVX2-FP-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3508 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm2 = ymm12[0,1,2,3,4],ymm2[5,6,7],ymm12[8,9,10,11,12],ymm2[13,14,15]
3509 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm12[0,1,2,3],ymm2[4,5,6,7]
3510 ; AVX2-FP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3511 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm2 = xmm9[0,1],xmm8[2],xmm9[3]
3512 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm12 = [0,1,2,3,4,5,6,7,8,9,4,5,14,15,8,9]
3513 ; AVX2-FP-NEXT: vpshufb %xmm12, %xmm2, %xmm2
3514 ; AVX2-FP-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3515 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm2 = ymm1[0,1,2,3,4],ymm2[5,6,7],ymm1[8,9,10,11,12],ymm2[13,14,15]
3516 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm2[4,5,6,7]
3517 ; AVX2-FP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3518 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm1 = xmm15[0,1],xmm6[2],xmm15[3]
3519 ; AVX2-FP-NEXT: vpshufb %xmm12, %xmm1, %xmm1
3520 ; AVX2-FP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
3521 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm1 = ymm11[0,1,2,3,4],ymm1[5,6,7],ymm11[8,9,10,11,12],ymm1[13,14,15]
3522 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm11[0,1,2,3],ymm1[4,5,6,7]
3523 ; AVX2-FP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3524 ; AVX2-FP-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3525 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
3526 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm1 = ymm4[0],ymm3[1],ymm4[2,3],ymm3[4],ymm4[5],ymm3[6],ymm4[7,8],ymm3[9],ymm4[10,11],ymm3[12],ymm4[13],ymm3[14],ymm4[15]
3527 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm2 = ymm1[2,3,0,1]
3528 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm2[6],ymm1[7]
3529 ; AVX2-FP-NEXT: vmovdqu %ymm5, (%rsp) # 32-byte Spill
3530 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3531 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0,1],ymm0[2],ymm5[3],ymm0[4],ymm5[5,6],ymm0[7],ymm5[8,9],ymm0[10],ymm5[11],ymm0[12],ymm5[13,14],ymm0[15]
3532 ; AVX2-FP-NEXT: vextracti128 $1, %ymm2, %xmm11
3533 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm11[3,4],xmm2[5,6,7]
3534 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} ymm11 = [u,u,u,u,u,u,u,u,u,u,u,u,0,1,10,11,20,21,30,31,24,25,18,19,28,29,26,27,16,17,26,27]
3535 ; AVX2-FP-NEXT: vpshufb %ymm11, %ymm1, %ymm1
3536 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm12 = [4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
3537 ; AVX2-FP-NEXT: vpshufb %xmm12, %xmm2, %xmm2
3538 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3,4,5,6,7]
3539 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm2 = xmm8[0],xmm9[1],xmm8[2,3]
3540 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm14 = [0,1,6,7,4,5,6,7,8,9,6,7,0,1,10,11]
3541 ; AVX2-FP-NEXT: vpshufb %xmm14, %xmm2, %xmm2
3542 ; AVX2-FP-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3543 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm2 = ymm1[0,1,2,3,4],ymm2[5,6,7],ymm1[8,9,10,11,12],ymm2[13,14,15]
3544 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
3545 ; AVX2-FP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3546 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm1 = ymm7[0],ymm10[1],ymm7[2,3],ymm10[4],ymm7[5],ymm10[6],ymm7[7,8],ymm10[9],ymm7[10,11],ymm10[12],ymm7[13],ymm10[14],ymm7[15]
3547 ; AVX2-FP-NEXT: vmovdqa %ymm7, %ymm10
3548 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm2 = ymm1[2,3,0,1]
3549 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4,5],ymm2[6],ymm1[7]
3550 ; AVX2-FP-NEXT: vpshufb %ymm11, %ymm1, %ymm1
3551 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
3552 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm2 = ymm7[0,1],ymm13[2],ymm7[3],ymm13[4],ymm7[5,6],ymm13[7],ymm7[8,9],ymm13[10],ymm7[11],ymm13[12],ymm7[13,14],ymm13[15]
3553 ; AVX2-FP-NEXT: vextracti128 $1, %ymm2, %xmm11
3554 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm11[3,4],xmm2[5,6,7]
3555 ; AVX2-FP-NEXT: vpshufb %xmm12, %xmm2, %xmm2
3556 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3,4,5,6,7]
3557 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm2 = xmm6[0],xmm15[1],xmm6[2,3]
3558 ; AVX2-FP-NEXT: vpshufb %xmm14, %xmm2, %xmm2
3559 ; AVX2-FP-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3560 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm2 = ymm1[0,1,2,3,4],ymm2[5,6,7],ymm1[8,9,10,11,12],ymm2[13,14,15]
3561 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
3562 ; AVX2-FP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3563 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm1 = ymm3[0],ymm4[1],ymm3[2],ymm4[3],ymm3[4,5],ymm4[6],ymm3[7,8],ymm4[9],ymm3[10],ymm4[11],ymm3[12,13],ymm4[14],ymm3[15]
3564 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm2 = ymm1[2,3,0,1]
3565 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4],ymm1[5],ymm2[6],ymm1[7]
3566 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm2 = ymm0[0],ymm5[1,2],ymm0[3],ymm5[4],ymm0[5],ymm5[6,7],ymm0[8],ymm5[9,10],ymm0[11],ymm5[12],ymm0[13],ymm5[14,15]
3567 ; AVX2-FP-NEXT: vextracti128 $1, %ymm2, %xmm11
3568 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm2 = xmm11[0],xmm2[1],xmm11[2],xmm2[3]
3569 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} ymm11 = [u,u,u,u,u,u,u,u,u,u,u,u,2,3,12,13,22,23,16,17,26,27,20,21,30,31,30,31,18,19,28,29]
3570 ; AVX2-FP-NEXT: vpshufb %ymm11, %ymm1, %ymm1
3571 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm12 = [6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
3572 ; AVX2-FP-NEXT: vpshufb %xmm12, %xmm2, %xmm2
3573 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3,4,5,6,7]
3574 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm2 = xmm8[0,1],xmm9[2],xmm8[3]
3575 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,2,3,4,5,6,7,8,9,8,9,2,3,12,13]
3576 ; AVX2-FP-NEXT: vpshufb %xmm3, %xmm2, %xmm2
3577 ; AVX2-FP-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
3578 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm2 = ymm1[0,1,2,3,4],ymm2[5,6,7],ymm1[8,9,10,11,12],ymm2[13,14,15]
3579 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm1[0,1,2,3],ymm2[4,5,6,7]
3580 ; AVX2-FP-NEXT: vmovdqa %ymm10, %ymm5
3581 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
3582 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm1 = ymm10[0],ymm5[1],ymm10[2],ymm5[3],ymm10[4,5],ymm5[6],ymm10[7,8],ymm5[9],ymm10[10],ymm5[11],ymm10[12,13],ymm5[14],ymm10[15]
3583 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm14 = ymm1[2,3,0,1]
3584 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm14[4],ymm1[5],ymm14[6],ymm1[7]
3585 ; AVX2-FP-NEXT: vpshufb %ymm11, %ymm1, %ymm1
3586 ; AVX2-FP-NEXT: vmovdqa %ymm7, %ymm4
3587 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm11 = ymm13[0],ymm7[1,2],ymm13[3],ymm7[4],ymm13[5],ymm7[6,7],ymm13[8],ymm7[9,10],ymm13[11],ymm7[12],ymm13[13],ymm7[14,15]
3588 ; AVX2-FP-NEXT: vextracti128 $1, %ymm11, %xmm14
3589 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm11 = xmm14[0],xmm11[1],xmm14[2],xmm11[3]
3590 ; AVX2-FP-NEXT: vpshufb %xmm12, %xmm11, %xmm11
3591 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm1 = ymm11[0,1,2],ymm1[3,4,5,6,7]
3592 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm11 = xmm6[0,1],xmm15[2],xmm6[3]
3593 ; AVX2-FP-NEXT: vpshufb %xmm3, %xmm11, %xmm3
3594 ; AVX2-FP-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
3595 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm3 = ymm1[0,1,2,3,4],ymm3[5,6,7],ymm1[8,9,10,11,12],ymm3[13,14,15]
3596 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm3[4,5,6,7]
3597 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm11 = [12,13,14,15,4,5,14,15,8,9,10,11,12,13,14,15]
3598 ; AVX2-FP-NEXT: vpshufb %xmm11, %xmm6, %xmm3
3599 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,2,3,0,1,10,11,8,9,10,11,12,13,14,15]
3600 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm15, %xmm12
3601 ; AVX2-FP-NEXT: vpunpckldq {{.*#+}} xmm3 = xmm12[0],xmm3[0],xmm12[1],xmm3[1]
3602 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm6 = ymm10[0,1],ymm5[2],ymm10[3],ymm5[4],ymm10[5,6],ymm5[7],ymm10[8,9],ymm5[10],ymm10[11],ymm5[12],ymm10[13,14],ymm5[15]
3603 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm7 = ymm6[2,3,0,1]
3604 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm7[4],ymm6[5,6],ymm7[7]
3605 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0],ymm13[1],ymm4[2,3],ymm13[4],ymm4[5],ymm13[6],ymm4[7,8],ymm13[9],ymm4[10,11],ymm13[12],ymm4[13],ymm13[14],ymm4[15]
3606 ; AVX2-FP-NEXT: vextracti128 $1, %ymm4, %xmm5
3607 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3,4],xmm5[5,6,7]
3608 ; AVX2-FP-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [24,25,18,19,28,29,22,23,0,0,0,0,4,5,14,15,24,25,18,19,28,29,22,23,0,0,0,0,4,5,14,15]
3609 ; AVX2-FP-NEXT: # ymm5 = mem[0,1,0,1]
3610 ; AVX2-FP-NEXT: vpshufb %ymm5, %ymm6, %ymm6
3611 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm7 = [8,9,2,3,12,13,6,7,0,1,10,11,0,1,6,7]
3612 ; AVX2-FP-NEXT: vpshufb %xmm7, %xmm4, %xmm4
3613 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2],ymm6[3,4,5,6,7]
3614 ; AVX2-FP-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
3615 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3,4,5],ymm3[6,7]
3616 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
3617 ; AVX2-FP-NEXT: vpblendw $107, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4, %ymm4 # 32-byte Folded Reload
3618 ; AVX2-FP-NEXT: # ymm4 = mem[0,1],ymm4[2],mem[3],ymm4[4],mem[5,6],ymm4[7],mem[8,9],ymm4[10],mem[11],ymm4[12],mem[13,14],ymm4[15]
3619 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm6 = ymm4[2,3,0,1]
3620 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4],ymm4[5,6],ymm6[7]
3621 ; AVX2-FP-NEXT: vpshufb %ymm5, %ymm4, %ymm4
3622 ; AVX2-FP-NEXT: vmovdqu (%rsp), %ymm5 # 32-byte Reload
3623 ; AVX2-FP-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm5 # 32-byte Folded Reload
3624 ; AVX2-FP-NEXT: # ymm5 = ymm5[0],mem[1],ymm5[2,3],mem[4],ymm5[5],mem[6],ymm5[7,8],mem[9],ymm5[10,11],mem[12],ymm5[13],mem[14],ymm5[15]
3625 ; AVX2-FP-NEXT: vextracti128 $1, %ymm5, %xmm6
3626 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1,2],xmm5[3,4],xmm6[5,6,7]
3627 ; AVX2-FP-NEXT: vpshufb %xmm7, %xmm5, %xmm5
3628 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3,4,5,6,7]
3629 ; AVX2-FP-NEXT: vpshufb %xmm11, %xmm8, %xmm5
3630 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm9, %xmm0
3631 ; AVX2-FP-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm5[0],xmm0[1],xmm5[1]
3632 ; AVX2-FP-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
3633 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3,4,5],ymm0[6,7]
3634 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
3635 ; AVX2-FP-NEXT: vmovaps %ymm4, 32(%rsi)
3636 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
3637 ; AVX2-FP-NEXT: vmovaps %ymm4, (%rsi)
3638 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
3639 ; AVX2-FP-NEXT: vmovaps %ymm4, 32(%rdx)
3640 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
3641 ; AVX2-FP-NEXT: vmovaps %ymm4, (%rdx)
3642 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
3643 ; AVX2-FP-NEXT: vmovaps %ymm4, 32(%rcx)
3644 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
3645 ; AVX2-FP-NEXT: vmovaps %ymm4, (%rcx)
3646 ; AVX2-FP-NEXT: vmovdqa %ymm2, 32(%r8)
3647 ; AVX2-FP-NEXT: vmovdqa %ymm1, (%r8)
3648 ; AVX2-FP-NEXT: vmovdqa %ymm0, 32(%r9)
3649 ; AVX2-FP-NEXT: vmovdqa %ymm3, (%r9)
3650 ; AVX2-FP-NEXT: addq $264, %rsp # imm = 0x108
3651 ; AVX2-FP-NEXT: vzeroupper
3652 ; AVX2-FP-NEXT: retq
3654 ; AVX2-FCP-LABEL: load_i16_stride5_vf32:
3655 ; AVX2-FCP: # %bb.0:
3656 ; AVX2-FCP-NEXT: subq $296, %rsp # imm = 0x128
3657 ; AVX2-FCP-NEXT: vmovdqa 224(%rdi), %ymm15
3658 ; AVX2-FCP-NEXT: vmovdqa 256(%rdi), %ymm1
3659 ; AVX2-FCP-NEXT: vmovdqa 192(%rdi), %ymm3
3660 ; AVX2-FCP-NEXT: vmovdqa 160(%rdi), %ymm14
3661 ; AVX2-FCP-NEXT: vmovdqa (%rdi), %ymm4
3662 ; AVX2-FCP-NEXT: vmovdqa 32(%rdi), %ymm5
3663 ; AVX2-FCP-NEXT: vmovdqa 64(%rdi), %ymm6
3664 ; AVX2-FCP-NEXT: vmovdqa 96(%rdi), %ymm7
3665 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm6[0],ymm7[1,2],ymm6[3],ymm7[4],ymm6[5],ymm7[6,7],ymm6[8],ymm7[9,10],ymm6[11],ymm7[12],ymm6[13],ymm7[14,15]
3666 ; AVX2-FCP-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3667 ; AVX2-FCP-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3668 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm10 = [1,3,0,2,4,6,1,3]
3669 ; AVX2-FCP-NEXT: vpermd %ymm8, %ymm10, %ymm8
3670 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} ymm11 = [0,1,6,7,8,9,14,15,4,5,14,15,4,5,2,3,16,17,22,23,24,25,30,31,20,21,30,31,20,21,18,19]
3671 ; AVX2-FCP-NEXT: vpshufb %ymm11, %ymm8, %ymm8
3672 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm9 = ymm4[0],ymm5[1],ymm4[2,3],ymm5[4],ymm4[5],ymm5[6],ymm4[7,8],ymm5[9],ymm4[10,11],ymm5[12],ymm4[13],ymm5[14],ymm4[15]
3673 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm9, %xmm12
3674 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm9 = xmm9[0],xmm12[1,2,3],xmm9[4,5],xmm12[6,7]
3675 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm12 = [0,1,10,11,4,5,14,15,8,9,2,3,12,13,6,7]
3676 ; AVX2-FCP-NEXT: vpshufb %xmm12, %xmm9, %xmm13
3677 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} xmm9 = [65535,65535,65535,65535,65535,65535,65535,0]
3678 ; AVX2-FCP-NEXT: vpblendvb %ymm9, %ymm13, %ymm8, %ymm8
3679 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm13 = ymm14[0],ymm3[1],ymm14[2,3],ymm3[4],ymm14[5],ymm3[6],ymm14[7,8],ymm3[9],ymm14[10,11],ymm3[12],ymm14[13],ymm3[14],ymm14[15]
3680 ; AVX2-FCP-NEXT: vmovdqa %ymm14, %ymm0
3681 ; AVX2-FCP-NEXT: vmovdqu %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3682 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm13, %xmm14
3683 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0],xmm14[1,2,3],xmm13[4,5],xmm14[6,7]
3684 ; AVX2-FCP-NEXT: vpshufb %xmm12, %xmm13, %xmm12
3685 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm13 = ymm15[0],ymm1[1,2],ymm15[3],ymm1[4],ymm15[5],ymm1[6,7],ymm15[8],ymm1[9,10],ymm15[11],ymm1[12],ymm15[13],ymm1[14,15]
3686 ; AVX2-FCP-NEXT: vmovdqu %ymm1, (%rsp) # 32-byte Spill
3687 ; AVX2-FCP-NEXT: vmovdqa %ymm15, %ymm2
3688 ; AVX2-FCP-NEXT: vpermd %ymm13, %ymm10, %ymm10
3689 ; AVX2-FCP-NEXT: vpshufb %ymm11, %ymm10, %ymm10
3690 ; AVX2-FCP-NEXT: vpblendvb %ymm9, %ymm12, %ymm10, %ymm11
3691 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm10 = ymm7[0],ymm6[1],ymm7[2,3],ymm6[4],ymm7[5],ymm6[6],ymm7[7,8],ymm6[9],ymm7[10,11],ymm6[12],ymm7[13],ymm6[14],ymm7[15]
3692 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm12 = [2,0,0,0,4,7,1,6]
3693 ; AVX2-FCP-NEXT: vpermd %ymm10, %ymm12, %ymm10
3694 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} ymm14 = [2,3,4,5,10,11,0,1,14,15,2,3,12,13,0,1,18,19,20,21,26,27,16,17,30,31,18,19,28,29,16,17]
3695 ; AVX2-FCP-NEXT: vpshufb %ymm14, %ymm10, %ymm10
3696 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm13 = ymm5[0],ymm4[1],ymm5[2],ymm4[3],ymm5[4,5],ymm4[6],ymm5[7,8],ymm4[9],ymm5[10],ymm4[11],ymm5[12,13],ymm4[14],ymm5[15]
3697 ; AVX2-FCP-NEXT: vmovdqa %ymm4, %ymm7
3698 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm13, %xmm15
3699 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0,1],xmm15[2,3],xmm13[4,5,6],xmm15[7]
3700 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm15 = [2,3,12,13,6,7,0,1,10,11,4,5,14,15,10,11]
3701 ; AVX2-FCP-NEXT: vpshufb %xmm15, %xmm13, %xmm13
3702 ; AVX2-FCP-NEXT: vpblendvb %ymm9, %ymm13, %ymm10, %ymm13
3703 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm10 = ymm3[0],ymm0[1],ymm3[2],ymm0[3],ymm3[4,5],ymm0[6],ymm3[7,8],ymm0[9],ymm3[10],ymm0[11],ymm3[12,13],ymm0[14],ymm3[15]
3704 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm10, %xmm0
3705 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm10[0,1],xmm0[2,3],xmm10[4,5,6],xmm0[7]
3706 ; AVX2-FCP-NEXT: vpshufb %xmm15, %xmm0, %xmm0
3707 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm10 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5],ymm2[6],ymm1[7,8],ymm2[9],ymm1[10,11],ymm2[12],ymm1[13],ymm2[14],ymm1[15]
3708 ; AVX2-FCP-NEXT: vmovdqa %ymm2, %ymm15
3709 ; AVX2-FCP-NEXT: vpermd %ymm10, %ymm12, %ymm10
3710 ; AVX2-FCP-NEXT: vmovdqa 128(%rdi), %ymm12
3711 ; AVX2-FCP-NEXT: vpshufb %ymm14, %ymm10, %ymm10
3712 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm14 = [0,3,1,3,0,3,5,7]
3713 ; AVX2-FCP-NEXT: vpblendvb %ymm9, %ymm0, %ymm10, %ymm0
3714 ; AVX2-FCP-NEXT: vpermd %ymm12, %ymm14, %ymm9
3715 ; AVX2-FCP-NEXT: vpbroadcastq {{.*#+}} ymm6 = [0,0,18,19,20,21,26,27,0,0,18,19,20,21,26,27,0,0,18,19,20,21,26,27,0,0,18,19,20,21,26,27]
3716 ; AVX2-FCP-NEXT: vpshufb %ymm6, %ymm9, %ymm9
3717 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm9 = ymm8[0,1,2,3,4],ymm9[5,6,7],ymm8[8,9,10,11,12],ymm9[13,14,15]
3718 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm8[0,1,2,3],ymm9[4,5,6,7]
3719 ; AVX2-FCP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3720 ; AVX2-FCP-NEXT: vmovdqa 288(%rdi), %ymm10
3721 ; AVX2-FCP-NEXT: vpermd %ymm10, %ymm14, %ymm8
3722 ; AVX2-FCP-NEXT: vpshufb %ymm6, %ymm8, %ymm8
3723 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm11[0,1,2,3,4],ymm8[5,6,7],ymm11[8,9,10,11,12],ymm8[13,14,15]
3724 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm11[0,1,2,3],ymm8[4,5,6,7]
3725 ; AVX2-FCP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3726 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm8 = [1,3,2,3,1,3,6,7]
3727 ; AVX2-FCP-NEXT: vpermd %ymm12, %ymm8, %ymm11
3728 ; AVX2-FCP-NEXT: vpbroadcastq {{.*#+}} ymm2 = [0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25]
3729 ; AVX2-FCP-NEXT: vpshufb %ymm2, %ymm11, %ymm11
3730 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm11 = ymm13[0,1,2,3,4],ymm11[5,6,7],ymm13[8,9,10,11,12],ymm11[13,14,15]
3731 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm13[0,1,2,3],ymm11[4,5,6,7]
3732 ; AVX2-FCP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3733 ; AVX2-FCP-NEXT: vpermd %ymm10, %ymm8, %ymm8
3734 ; AVX2-FCP-NEXT: vpshufb %ymm2, %ymm8, %ymm8
3735 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm0[0,1,2,3,4],ymm8[5,6,7],ymm0[8,9,10,11,12],ymm8[13,14,15]
3736 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm8[4,5,6,7]
3737 ; AVX2-FCP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3738 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm5[0,1],ymm4[2],ymm5[3],ymm4[4],ymm5[5,6],ymm4[7],ymm5[8,9],ymm4[10],ymm5[11],ymm4[12],ymm5[13,14],ymm4[15]
3739 ; AVX2-FCP-NEXT: vmovdqa %ymm5, %ymm9
3740 ; AVX2-FCP-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3741 ; AVX2-FCP-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3742 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm0, %xmm8
3743 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm8[3,4],xmm0[5,6,7]
3744 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
3745 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
3746 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm5[0],ymm4[1],ymm5[2,3],ymm4[4],ymm5[5],ymm4[6],ymm5[7,8],ymm4[9],ymm5[10,11],ymm4[12],ymm5[13],ymm4[14],ymm5[15]
3747 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm14 = [0,2,0,0,5,7,2,4]
3748 ; AVX2-FCP-NEXT: vpermd %ymm8, %ymm14, %ymm8
3749 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} ymm2 = [u,u,u,u,u,u,u,u,u,u,u,u,0,1,6,7,16,17,22,23,24,25,30,31,20,21,22,23,16,17,22,23]
3750 ; AVX2-FCP-NEXT: vpshufb %ymm2, %ymm8, %ymm8
3751 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm1 = [4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
3752 ; AVX2-FCP-NEXT: vpshufb %xmm1, %xmm0, %xmm0
3753 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2],ymm8[3,4,5,6,7]
3754 ; AVX2-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm8 = [1,4,6,0,1,4,6,0]
3755 ; AVX2-FCP-NEXT: # ymm8 = mem[0,1,0,1]
3756 ; AVX2-FCP-NEXT: vpermd %ymm12, %ymm8, %ymm11
3757 ; AVX2-FCP-NEXT: vpshufb %ymm6, %ymm11, %ymm11
3758 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm11 = ymm0[0,1,2,3,4],ymm11[5,6,7],ymm0[8,9,10,11,12],ymm11[13,14,15]
3759 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm11[4,5,6,7]
3760 ; AVX2-FCP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3761 ; AVX2-FCP-NEXT: vmovdqa %ymm3, %ymm6
3762 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
3763 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm11 = ymm3[0,1],ymm0[2],ymm3[3],ymm0[4],ymm3[5,6],ymm0[7],ymm3[8,9],ymm0[10],ymm3[11],ymm0[12],ymm3[13,14],ymm0[15]
3764 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm11, %xmm13
3765 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1,2],xmm13[3,4],xmm11[5,6,7]
3766 ; AVX2-FCP-NEXT: vpshufb %xmm1, %xmm11, %xmm1
3767 ; AVX2-FCP-NEXT: vmovdqu (%rsp), %ymm3 # 32-byte Reload
3768 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm11 = ymm15[0],ymm3[1],ymm15[2,3],ymm3[4],ymm15[5],ymm3[6],ymm15[7,8],ymm3[9],ymm15[10,11],ymm3[12],ymm15[13],ymm3[14],ymm15[15]
3769 ; AVX2-FCP-NEXT: vpermd %ymm11, %ymm14, %ymm11
3770 ; AVX2-FCP-NEXT: vpshufb %ymm2, %ymm11, %ymm2
3771 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2],ymm2[3,4,5,6,7]
3772 ; AVX2-FCP-NEXT: vpermd %ymm10, %ymm8, %ymm2
3773 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,26,27]
3774 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm1[0,1,2,3,4],ymm2[5,6,7],ymm1[8,9,10,11,12],ymm2[13,14,15]
3775 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4,5,6,7]
3776 ; AVX2-FCP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3777 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm7[0],ymm9[1,2],ymm7[3],ymm9[4],ymm7[5],ymm9[6,7],ymm7[8],ymm9[9,10],ymm7[11],ymm9[12],ymm7[13],ymm9[14,15]
3778 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm1, %xmm2
3779 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0],xmm1[1],xmm2[2],xmm1[3]
3780 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm4[0],ymm5[1],ymm4[2],ymm5[3],ymm4[4,5],ymm5[6],ymm4[7,8],ymm5[9],ymm4[10],ymm5[11],ymm4[12,13],ymm5[14],ymm4[15]
3781 ; AVX2-FCP-NEXT: vmovdqa %ymm4, %ymm7
3782 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm11 = [0,3,0,0,5,0,2,7]
3783 ; AVX2-FCP-NEXT: vpermd %ymm2, %ymm11, %ymm2
3784 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} ymm13 = [u,u,u,u,u,u,u,u,u,u,u,u,2,3,4,5,18,19,20,21,26,27,16,17,30,31,30,31,18,19,20,21]
3785 ; AVX2-FCP-NEXT: vpshufb %ymm13, %ymm2, %ymm2
3786 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm14 = [6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
3787 ; AVX2-FCP-NEXT: vpshufb %xmm14, %xmm1, %xmm1
3788 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2],ymm2[3,4,5,6,7]
3789 ; AVX2-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm2 = [2,4,7,0,2,4,7,0]
3790 ; AVX2-FCP-NEXT: # ymm2 = mem[0,1,0,1]
3791 ; AVX2-FCP-NEXT: vpermd %ymm12, %ymm2, %ymm8
3792 ; AVX2-FCP-NEXT: vpbroadcastq {{.*#+}} ymm4 = [0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25]
3793 ; AVX2-FCP-NEXT: vpshufb %ymm4, %ymm8, %ymm8
3794 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm1[0,1,2,3,4],ymm8[5,6,7],ymm1[8,9,10,11,12],ymm8[13,14,15]
3795 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm8[4,5,6,7]
3796 ; AVX2-FCP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
3797 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm0[0],ymm6[1,2],ymm0[3],ymm6[4],ymm0[5],ymm6[6,7],ymm0[8],ymm6[9,10],ymm0[11],ymm6[12],ymm0[13],ymm6[14,15]
3798 ; AVX2-FCP-NEXT: vmovdqa %ymm6, %ymm8
3799 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm1, %xmm0
3800 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm1[1],xmm0[2],xmm1[3]
3801 ; AVX2-FCP-NEXT: vpshufb %xmm14, %xmm0, %xmm0
3802 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm3[0],ymm15[1],ymm3[2],ymm15[3],ymm3[4,5],ymm15[6],ymm3[7,8],ymm15[9],ymm3[10],ymm15[11],ymm3[12,13],ymm15[14],ymm3[15]
3803 ; AVX2-FCP-NEXT: vmovdqa %ymm3, %ymm9
3804 ; AVX2-FCP-NEXT: vmovdqa %ymm15, %ymm14
3805 ; AVX2-FCP-NEXT: vpermd %ymm1, %ymm11, %ymm1
3806 ; AVX2-FCP-NEXT: vpshufb %ymm13, %ymm1, %ymm1
3807 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2],ymm1[3,4,5,6,7]
3808 ; AVX2-FCP-NEXT: vpermd %ymm10, %ymm2, %ymm1
3809 ; AVX2-FCP-NEXT: vpshufb %ymm4, %ymm1, %ymm1
3810 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3,4],ymm1[5,6,7],ymm0[8,9,10,11,12],ymm1[13,14,15]
3811 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm15 = ymm0[0,1,2,3],ymm1[4,5,6,7]
3812 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm7[0,1],ymm5[2],ymm7[3],ymm5[4],ymm7[5,6],ymm5[7],ymm7[8,9],ymm5[10],ymm7[11],ymm5[12],ymm7[13,14],ymm5[15]
3813 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
3814 ; AVX2-FCP-NEXT: vpblendw $173, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
3815 ; AVX2-FCP-NEXT: # ymm1 = mem[0],ymm1[1],mem[2,3],ymm1[4],mem[5],ymm1[6],mem[7,8],ymm1[9],mem[10,11],ymm1[12],mem[13],ymm1[14],mem[15]
3816 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm1, %xmm2
3817 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1,2],xmm1[3,4],xmm2[5,6,7]
3818 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm2 = [1,3,0,0,6,0,3,5]
3819 ; AVX2-FCP-NEXT: vpermd %ymm0, %ymm2, %ymm0
3820 ; AVX2-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [16,17,22,23,24,25,30,31,0,0,0,0,0,1,6,7,16,17,22,23,24,25,30,31,0,0,0,0,0,1,6,7]
3821 ; AVX2-FCP-NEXT: # ymm4 = mem[0,1,0,1]
3822 ; AVX2-FCP-NEXT: vpshufb %ymm4, %ymm0, %ymm0
3823 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm5 = [8,9,2,3,12,13,6,7,0,1,10,11,0,1,6,7]
3824 ; AVX2-FCP-NEXT: vpshufb %xmm5, %xmm1, %xmm1
3825 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3,4,5,6,7]
3826 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm1 = [0,2,1,3,0,2,5,7]
3827 ; AVX2-FCP-NEXT: vpermd %ymm12, %ymm1, %ymm6
3828 ; AVX2-FCP-NEXT: vpbroadcastq {{.*#+}} ymm7 = [16,17,22,23,24,25,30,31,16,17,22,23,24,25,30,31,16,17,22,23,24,25,30,31,16,17,22,23,24,25,30,31]
3829 ; AVX2-FCP-NEXT: vpshufb %ymm7, %ymm6, %ymm6
3830 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm6[6,7]
3831 ; AVX2-FCP-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm3 # 32-byte Folded Reload
3832 ; AVX2-FCP-NEXT: # ymm3 = ymm8[0],mem[1],ymm8[2,3],mem[4],ymm8[5],mem[6],ymm8[7,8],mem[9],ymm8[10,11],mem[12],ymm8[13],mem[14],ymm8[15]
3833 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm3, %xmm6
3834 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm3 = xmm6[0,1,2],xmm3[3,4],xmm6[5,6,7]
3835 ; AVX2-FCP-NEXT: vpshufb %xmm5, %xmm3, %xmm3
3836 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm5 = ymm9[0,1],ymm14[2],ymm9[3],ymm14[4],ymm9[5,6],ymm14[7],ymm9[8,9],ymm14[10],ymm9[11],ymm14[12],ymm9[13,14],ymm14[15]
3837 ; AVX2-FCP-NEXT: vpermd %ymm5, %ymm2, %ymm2
3838 ; AVX2-FCP-NEXT: vpshufb %ymm4, %ymm2, %ymm2
3839 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3,4,5,6,7]
3840 ; AVX2-FCP-NEXT: vpermd %ymm10, %ymm1, %ymm1
3841 ; AVX2-FCP-NEXT: vpshufb %ymm7, %ymm1, %ymm1
3842 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4,5],ymm1[6,7]
3843 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3844 ; AVX2-FCP-NEXT: vmovaps %ymm2, 32(%rsi)
3845 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3846 ; AVX2-FCP-NEXT: vmovaps %ymm2, (%rsi)
3847 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3848 ; AVX2-FCP-NEXT: vmovaps %ymm2, 32(%rdx)
3849 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3850 ; AVX2-FCP-NEXT: vmovaps %ymm2, (%rdx)
3851 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3852 ; AVX2-FCP-NEXT: vmovaps %ymm2, 32(%rcx)
3853 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3854 ; AVX2-FCP-NEXT: vmovaps %ymm2, (%rcx)
3855 ; AVX2-FCP-NEXT: vmovdqa %ymm15, 32(%r8)
3856 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
3857 ; AVX2-FCP-NEXT: vmovaps %ymm2, (%r8)
3858 ; AVX2-FCP-NEXT: vmovdqa %ymm1, 32(%r9)
3859 ; AVX2-FCP-NEXT: vmovdqa %ymm0, (%r9)
3860 ; AVX2-FCP-NEXT: addq $296, %rsp # imm = 0x128
3861 ; AVX2-FCP-NEXT: vzeroupper
3862 ; AVX2-FCP-NEXT: retq
3864 ; AVX512-LABEL: load_i16_stride5_vf32:
3866 ; AVX512-NEXT: vmovdqa 256(%rdi), %ymm0
3867 ; AVX512-NEXT: vmovdqa 288(%rdi), %ymm1
3868 ; AVX512-NEXT: vpblendw {{.*#+}} ymm2 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6],ymm0[7],ymm1[8,9],ymm0[10],ymm1[11],ymm0[12],ymm1[13,14],ymm0[15]
3869 ; AVX512-NEXT: vextracti128 $1, %ymm2, %xmm3
3870 ; AVX512-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm3[3,4],xmm2[5,6,7]
3871 ; AVX512-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[u,u,u,u,4,5,14,15,8,9,2,3,12,13,6,7]
3872 ; AVX512-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm4
3873 ; AVX512-NEXT: vmovdqa 192(%rdi), %ymm3
3874 ; AVX512-NEXT: vmovdqa 224(%rdi), %ymm9
3875 ; AVX512-NEXT: vpblendw {{.*#+}} ymm5 = ymm9[0],ymm3[1],ymm9[2,3],ymm3[4],ymm9[5],ymm3[6],ymm9[7,8],ymm3[9],ymm9[10,11],ymm3[12],ymm9[13],ymm3[14],ymm9[15]
3876 ; AVX512-NEXT: vextracti128 $1, %ymm5, %xmm6
3877 ; AVX512-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1,2],ymm6[3],ymm5[4,5,6,7]
3878 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm7 = [u,u,u,u,u,u,u,u,8,9,2,3,12,13,6,7,16,17,26,27,26,27,30,31,24,25,18,19,28,29,22,23]
3879 ; AVX512-NEXT: vpshufb %ymm7, %ymm5, %ymm5
3880 ; AVX512-NEXT: vmovdqa64 176(%rdi), %xmm20
3881 ; AVX512-NEXT: vpshufd {{.*#+}} xmm8 = xmm20[3,1,2,3]
3882 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm8 = xmm8[2,1,2,3,4,5,6,7]
3883 ; AVX512-NEXT: vmovdqa 160(%rdi), %xmm6
3884 ; AVX512-NEXT: vpshufd {{.*#+}} xmm10 = xmm6[0,2,2,3]
3885 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm10 = xmm10[0,3,2,3,4,5,6,7]
3886 ; AVX512-NEXT: vpunpckldq {{.*#+}} xmm8 = xmm10[0],xmm8[0],xmm10[1],xmm8[1]
3887 ; AVX512-NEXT: vpblendd {{.*#+}} ymm5 = ymm8[0,1],ymm5[2,3,4,5,6,7]
3888 ; AVX512-NEXT: vpblendd {{.*#+}} ymm14 = ymm5[0,1,2,3,4],ymm4[5,6,7]
3889 ; AVX512-NEXT: vmovdqa (%rdi), %ymm8
3890 ; AVX512-NEXT: vmovdqa 32(%rdi), %ymm10
3891 ; AVX512-NEXT: vmovdqa 64(%rdi), %ymm4
3892 ; AVX512-NEXT: vmovdqa 96(%rdi), %ymm5
3893 ; AVX512-NEXT: vpblendw {{.*#+}} ymm11 = ymm4[0],ymm5[1,2],ymm4[3],ymm5[4],ymm4[5],ymm5[6,7],ymm4[8],ymm5[9,10],ymm4[11],ymm5[12],ymm4[13],ymm5[14,15]
3894 ; AVX512-NEXT: vpermq {{.*#+}} ymm12 = ymm11[2,3,0,1]
3895 ; AVX512-NEXT: vpblendd {{.*#+}} ymm11 = ymm11[0,1,2,3,4],ymm12[5],ymm11[6],ymm12[7]
3896 ; AVX512-NEXT: vpshufb {{.*#+}} ymm11 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm11[6,7,16,17,26,27,20,21,30,31,24,25],zero,zero,zero,zero,zero,zero
3897 ; AVX512-NEXT: vpblendw {{.*#+}} ymm12 = ymm8[0],ymm10[1],ymm8[2,3],ymm10[4],ymm8[5],ymm10[6],ymm8[7,8],ymm10[9],ymm8[10,11],ymm10[12],ymm8[13],ymm10[14],ymm8[15]
3898 ; AVX512-NEXT: vextracti128 $1, %ymm12, %xmm13
3899 ; AVX512-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0],xmm13[1,2,3],xmm12[4,5],xmm13[6,7]
3900 ; AVX512-NEXT: vpshufb {{.*#+}} ymm12 = ymm12[0,1,10,11,4,5,14,15,8,9,2,3,12,13],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm12[u,u,u,u,u,u]
3901 ; AVX512-NEXT: vpor %ymm11, %ymm12, %ymm15
3902 ; AVX512-NEXT: vmovdqa 144(%rdi), %xmm11
3903 ; AVX512-NEXT: vmovdqa 128(%rdi), %xmm12
3904 ; AVX512-NEXT: vpblendd {{.*#+}} xmm13 = xmm12[0],xmm11[1],xmm12[2,3]
3905 ; AVX512-NEXT: vpshufb %xmm7, %xmm13, %xmm7
3906 ; AVX512-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
3907 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm18 = [65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,0,0,0,0,0,0,0,0,0,0,0,0,0,65535,65535,65535,65535,65535,65535]
3908 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm7 = zmm7 ^ (zmm18 & (zmm7 ^ zmm15))
3909 ; AVX512-NEXT: vinserti64x4 $1, %ymm14, %zmm7, %zmm16
3910 ; AVX512-NEXT: vpblendw {{.*#+}} ymm7 = ymm3[0],ymm9[1],ymm3[2,3],ymm9[4],ymm3[5],ymm9[6],ymm3[7,8],ymm9[9],ymm3[10,11],ymm9[12],ymm3[13],ymm9[14],ymm3[15]
3911 ; AVX512-NEXT: vextracti128 $1, %ymm7, %xmm14
3912 ; AVX512-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0],ymm14[1],ymm7[2],ymm14[3],ymm7[4,5,6,7]
3913 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm14 = [2,3,12,13,0,1,0,1,10,11,4,5,14,15,8,9,18,19,28,29,16,17,16,17,26,27,20,21,30,31,24,25]
3914 ; AVX512-NEXT: vpshufb %ymm14, %ymm7, %ymm7
3915 ; AVX512-NEXT: vpsrlq $48, %xmm20, %xmm15
3916 ; AVX512-NEXT: vpshufd {{.*#+}} xmm13 = xmm6[0,3,2,3]
3917 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm13 = xmm13[1,2,2,3,4,5,6,7]
3918 ; AVX512-NEXT: vpunpckldq {{.*#+}} xmm13 = xmm13[0],xmm15[0],xmm13[1],xmm15[1]
3919 ; AVX512-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0,1,2],xmm7[3,4,5,6,7]
3920 ; AVX512-NEXT: vpblendd {{.*#+}} ymm7 = ymm13[0,1,2,3],ymm7[4,5,6,7]
3921 ; AVX512-NEXT: vmovdqa %ymm0, %ymm2
3922 ; AVX512-NEXT: vpblendw {{.*#+}} ymm13 = ymm0[0],ymm1[1,2],ymm0[3],ymm1[4],ymm0[5],ymm1[6,7],ymm0[8],ymm1[9,10],ymm0[11],ymm1[12],ymm0[13],ymm1[14,15]
3923 ; AVX512-NEXT: vextracti128 $1, %ymm13, %xmm15
3924 ; AVX512-NEXT: vpblendd {{.*#+}} xmm13 = xmm15[0],xmm13[1],xmm15[2],xmm13[3]
3925 ; AVX512-NEXT: vpshufb {{.*#+}} xmm13 = xmm13[u,u,u,u,6,7,0,1,10,11,4,5,14,15,8,9]
3926 ; AVX512-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
3927 ; AVX512-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0,1,2,3,4],ymm13[5,6,7]
3928 ; AVX512-NEXT: vpblendw {{.*#+}} ymm13 = ymm5[0],ymm4[1],ymm5[2,3],ymm4[4],ymm5[5],ymm4[6],ymm5[7,8],ymm4[9],ymm5[10,11],ymm4[12],ymm5[13],ymm4[14],ymm5[15]
3929 ; AVX512-NEXT: vpermq {{.*#+}} ymm15 = ymm13[2,3,0,1]
3930 ; AVX512-NEXT: vpblendd {{.*#+}} ymm13 = ymm13[0,1,2,3,4],ymm15[5],ymm13[6,7]
3931 ; AVX512-NEXT: vpblendw {{.*#+}} ymm15 = ymm10[0],ymm8[1],ymm10[2],ymm8[3],ymm10[4,5],ymm8[6],ymm10[7,8],ymm8[9],ymm10[10],ymm8[11],ymm10[12,13],ymm8[14],ymm10[15]
3932 ; AVX512-NEXT: vextracti128 $1, %ymm15, %xmm0
3933 ; AVX512-NEXT: vpblendw {{.*#+}} xmm0 = xmm15[0,1],xmm0[2,3],xmm15[4,5,6],xmm0[7]
3934 ; AVX512-NEXT: vpshufb {{.*#+}} ymm13 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm13[8,9,18,19,28,29,22,23,16,17,26,27],zero,zero,zero,zero,zero,zero
3935 ; AVX512-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[2,3,12,13,6,7,0,1,10,11,4,5,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm0[u,u,u,u,u,u]
3936 ; AVX512-NEXT: vpor %ymm0, %ymm13, %ymm0
3937 ; AVX512-NEXT: vpblendd {{.*#+}} xmm13 = xmm12[0,1],xmm11[2],xmm12[3]
3938 ; AVX512-NEXT: vpshufb %xmm14, %xmm13, %xmm13
3939 ; AVX512-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
3940 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm13 = zmm13 ^ (zmm18 & (zmm13 ^ zmm0))
3941 ; AVX512-NEXT: vinserti64x4 $1, %ymm7, %zmm13, %zmm19
3942 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm9[0],ymm3[1],ymm9[2],ymm3[3],ymm9[4,5],ymm3[6],ymm9[7,8],ymm3[9],ymm9[10],ymm3[11],ymm9[12,13],ymm3[14],ymm9[15]
3943 ; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm7
3944 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm7[1],ymm0[2,3,4,5,6,7]
3945 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm7 = [4,5,14,15,12,13,2,3,12,13,6,7,0,1,10,11,20,21,30,31,28,29,18,19,28,29,22,23,16,17,26,27]
3946 ; AVX512-NEXT: vpshufb %ymm7, %ymm0, %ymm0
3947 ; AVX512-NEXT: vmovdqa64 %ymm7, %ymm21
3948 ; AVX512-NEXT: vpshufd {{.*#+}} xmm7 = xmm6[0,1,1,3]
3949 ; AVX512-NEXT: vpshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,7,6,7]
3950 ; AVX512-NEXT: vmovdqa64 %xmm20, %xmm15
3951 ; AVX512-NEXT: vpunpckhdq {{.*#+}} xmm7 = xmm7[2],xmm20[2],xmm7[3],xmm20[3]
3952 ; AVX512-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2],xmm0[3,4,5,6,7]
3953 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm7[0,1,2,3],ymm0[4,5,6,7]
3954 ; AVX512-NEXT: vpblendw {{.*#+}} ymm7 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5],ymm2[6],ymm1[7,8],ymm2[9],ymm1[10,11],ymm2[12],ymm1[13],ymm2[14],ymm1[15]
3955 ; AVX512-NEXT: vextracti128 $1, %ymm7, %xmm13
3956 ; AVX512-NEXT: vpblendw {{.*#+}} xmm7 = xmm13[0,1,2],xmm7[3,4],xmm13[5,6,7]
3957 ; AVX512-NEXT: vpshufb {{.*#+}} xmm7 = xmm7[u,u,u,u,8,9,2,3,12,13,6,7,0,1,10,11]
3958 ; AVX512-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
3959 ; AVX512-NEXT: vpblendd {{.*#+}} ymm7 = ymm0[0,1,2,3,4],ymm7[5,6,7]
3960 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm9[0,1],ymm3[2],ymm9[3],ymm3[4],ymm9[5,6],ymm3[7],ymm9[8,9],ymm3[10],ymm9[11],ymm3[12],ymm9[13,14],ymm3[15]
3961 ; AVX512-NEXT: vmovdqa64 %ymm9, %ymm20
3962 ; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm13
3963 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm13[2],ymm0[3,4,5,6,7]
3964 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm13 = [6,7,6,7,8,9,4,5,14,15,8,9,2,3,12,13,22,23,22,23,24,25,20,21,30,31,24,25,18,19,28,29]
3965 ; AVX512-NEXT: vpshufb %ymm13, %ymm0, %ymm0
3966 ; AVX512-NEXT: vpblendd {{.*#+}} xmm14 = xmm15[0],xmm6[1],xmm15[2,3]
3967 ; AVX512-NEXT: vmovdqa64 %xmm15, %xmm22
3968 ; AVX512-NEXT: vpshufb {{.*#+}} xmm14 = xmm14[6,7,0,1,10,11,u,u,u,u,u,u,u,u,u,u]
3969 ; AVX512-NEXT: vpblendw {{.*#+}} xmm14 = xmm14[0,1,2],xmm0[3,4,5,6,7]
3970 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm14[0,1,2,3],ymm0[4,5,6,7]
3971 ; AVX512-NEXT: vpblendd {{.*#+}} xmm14 = xmm11[0,1],xmm12[2],xmm11[3]
3972 ; AVX512-NEXT: vpshufb %xmm13, %xmm14, %xmm13
3973 ; AVX512-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
3974 ; AVX512-NEXT: vinserti64x4 $1, %ymm0, %zmm13, %zmm0
3975 ; AVX512-NEXT: vpblendw {{.*#+}} ymm13 = ymm8[0],ymm10[1,2],ymm8[3],ymm10[4],ymm8[5],ymm10[6,7],ymm8[8],ymm10[9,10],ymm8[11],ymm10[12],ymm8[13],ymm10[14,15]
3976 ; AVX512-NEXT: vextracti128 $1, %ymm13, %xmm14
3977 ; AVX512-NEXT: vpblendd {{.*#+}} xmm13 = xmm14[0],xmm13[1],xmm14[2],xmm13[3]
3978 ; AVX512-NEXT: vpblendw {{.*#+}} ymm14 = ymm5[0],ymm4[1],ymm5[2],ymm4[3],ymm5[4,5],ymm4[6],ymm5[7,8],ymm4[9],ymm5[10],ymm4[11],ymm5[12,13],ymm4[14],ymm5[15]
3979 ; AVX512-NEXT: vpermq {{.*#+}} ymm15 = ymm14[2,3,0,1]
3980 ; AVX512-NEXT: vpblendd {{.*#+}} ymm14 = ymm14[0,1,2,3],ymm15[4],ymm14[5],ymm15[6],ymm14[7]
3981 ; AVX512-NEXT: vpshufb {{.*#+}} xmm13 = xmm13[6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
3982 ; AVX512-NEXT: vpshufb {{.*#+}} ymm14 = ymm14[u,u,u,u,u,u,u,u,u,u,u,u,2,3,12,13,22,23,16,17,26,27,20,21,30,31,30,31,18,19,28,29]
3983 ; AVX512-NEXT: vpblendd {{.*#+}} ymm13 = ymm13[0,1,2],ymm14[3,4,5,6,7]
3984 ; AVX512-NEXT: vpblendw {{.*#+}} ymm14 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5],ymm1[6],ymm2[7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13],ymm1[14],ymm2[15]
3985 ; AVX512-NEXT: vmovdqa %ymm2, %ymm9
3986 ; AVX512-NEXT: vextracti128 $1, %ymm14, %xmm15
3987 ; AVX512-NEXT: vpblendw {{.*#+}} xmm14 = xmm14[0],xmm15[1,2,3],xmm14[4,5],xmm15[6,7]
3988 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm13 = zmm0 ^ (mem & (zmm13 ^ zmm0))
3989 ; AVX512-NEXT: vpshufb {{.*#+}} xmm0 = xmm14[u,u,0,1,10,11,4,5,14,15,8,9,2,3,12,13]
3990 ; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
3991 ; AVX512-NEXT: vextracti64x4 $1, %zmm13, %ymm14
3992 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm14[0],ymm0[1,2,3,4,5,6,7],ymm14[8],ymm0[9,10,11,12,13,14,15]
3993 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm14[0,1,2,3],ymm0[4,5,6,7]
3994 ; AVX512-NEXT: vinserti64x4 $1, %ymm0, %zmm13, %zmm17
3995 ; AVX512-NEXT: vpshufd {{.*#+}} xmm0 = xmm11[3,1,2,3]
3996 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,1,2,1,4,5,6,7]
3997 ; AVX512-NEXT: vpshufd {{.*#+}} xmm13 = xmm12[0,2,2,3]
3998 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm13 = xmm13[0,1,0,3,4,5,6,7]
3999 ; AVX512-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm13[0],xmm0[0],xmm13[1],xmm0[1]
4000 ; AVX512-NEXT: vpblendw {{.*#+}} ymm13 = ymm10[0,1],ymm8[2],ymm10[3],ymm8[4],ymm10[5,6],ymm8[7],ymm10[8,9],ymm8[10],ymm10[11],ymm8[12],ymm10[13,14],ymm8[15]
4001 ; AVX512-NEXT: vextracti128 $1, %ymm13, %xmm14
4002 ; AVX512-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0,1,2],xmm14[3,4],xmm13[5,6,7]
4003 ; AVX512-NEXT: vpblendw {{.*#+}} ymm14 = ymm4[0],ymm5[1],ymm4[2,3],ymm5[4],ymm4[5],ymm5[6],ymm4[7,8],ymm5[9],ymm4[10,11],ymm5[12],ymm4[13],ymm5[14],ymm4[15]
4004 ; AVX512-NEXT: vpermq {{.*#+}} ymm15 = ymm14[2,3,0,1]
4005 ; AVX512-NEXT: vpblendd {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6],ymm14[7]
4006 ; AVX512-NEXT: vpshufb {{.*#+}} xmm13 = xmm13[4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
4007 ; AVX512-NEXT: vpshufb {{.*#+}} ymm14 = ymm14[u,u,u,u,u,u,u,u,u,u,u,u,0,1,10,11,20,21,30,31,24,25,18,19,28,29,26,27,16,17,26,27]
4008 ; AVX512-NEXT: vpblendd {{.*#+}} ymm13 = ymm13[0,1,2],ymm14[3,4,5,6,7]
4009 ; AVX512-NEXT: vpblendd {{.*#+}} xmm11 = xmm11[0],xmm12[1],xmm11[2,3]
4010 ; AVX512-NEXT: vmovdqa64 %ymm21, %ymm2
4011 ; AVX512-NEXT: vpshufb %xmm2, %xmm11, %xmm11
4012 ; AVX512-NEXT: vinserti128 $1, %xmm11, %ymm0, %ymm11
4013 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm11 = zmm11 ^ (zmm18 & (zmm11 ^ zmm13))
4014 ; AVX512-NEXT: vinserti64x4 $1, %ymm7, %zmm11, %zmm7
4015 ; AVX512-NEXT: vmovdqa64 %ymm20, %ymm2
4016 ; AVX512-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm2[1,2],ymm3[3],ymm2[4],ymm3[5],ymm2[6,7],ymm3[8],ymm2[9,10],ymm3[11],ymm2[12],ymm3[13],ymm2[14,15]
4017 ; AVX512-NEXT: vextracti128 $1, %ymm2, %xmm3
4018 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3,4,5,6,7]
4019 ; AVX512-NEXT: vmovdqa64 %xmm22, %xmm3
4020 ; AVX512-NEXT: vpblendd {{.*#+}} xmm3 = xmm3[0,1],xmm6[2],xmm3[3]
4021 ; AVX512-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[8,9,14,15,4,5,6,7,0,1,10,11,4,5,14,15,24,25,30,31,20,21,22,23,16,17,26,27,20,21,30,31]
4022 ; AVX512-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[8,9,2,3,12,13,u,u,u,u,u,u,u,u,u,u]
4023 ; AVX512-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2],xmm2[3,4,5,6,7]
4024 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7]
4025 ; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4026 ; AVX512-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
4027 ; AVX512-NEXT: vpblendw {{.*#+}} ymm2 = ymm10[0],ymm8[1],ymm10[2,3],ymm8[4],ymm10[5],ymm8[6],ymm10[7,8],ymm8[9],ymm10[10,11],ymm8[12],ymm10[13],ymm8[14],ymm10[15]
4028 ; AVX512-NEXT: vextracti128 $1, %ymm2, %xmm3
4029 ; AVX512-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1,2],xmm2[3,4],xmm3[5,6,7]
4030 ; AVX512-NEXT: vpblendw {{.*#+}} ymm3 = ymm5[0,1],ymm4[2],ymm5[3],ymm4[4],ymm5[5,6],ymm4[7],ymm5[8,9],ymm4[10],ymm5[11],ymm4[12],ymm5[13,14],ymm4[15]
4031 ; AVX512-NEXT: vpermq {{.*#+}} ymm4 = ymm3[2,3,0,1]
4032 ; AVX512-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm4[4],ymm3[5,6],ymm4[7]
4033 ; AVX512-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[8,9,2,3,12,13,6,7,0,1,10,11,u,u,u,u]
4034 ; AVX512-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,4,5,14,15,24,25,18,19,28,29,22,23,20,21,22,23,20,21,30,31]
4035 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2],ymm3[3,4,5,6,7]
4036 ; AVX512-NEXT: movb $7, %al
4037 ; AVX512-NEXT: kmovw %eax, %k1
4038 ; AVX512-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm0 {%k1}
4039 ; AVX512-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm9[1],ymm1[2],ymm9[3],ymm1[4,5],ymm9[6],ymm1[7,8],ymm9[9],ymm1[10],ymm9[11],ymm1[12,13],ymm9[14],ymm1[15]
4040 ; AVX512-NEXT: vextracti128 $1, %ymm1, %xmm2
4041 ; AVX512-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1],xmm2[2,3],xmm1[4,5,6],xmm2[7]
4042 ; AVX512-NEXT: vextracti64x4 $1, %zmm0, %ymm2
4043 ; AVX512-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[u,u,2,3,12,13,6,7,0,1,10,11,4,5,14,15]
4044 ; AVX512-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
4045 ; AVX512-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1,2,3,4,5,6,7],ymm2[8],ymm1[9,10,11,12,13,14,15]
4046 ; AVX512-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
4047 ; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
4048 ; AVX512-NEXT: vmovdqa64 %zmm16, (%rsi)
4049 ; AVX512-NEXT: vmovdqa64 %zmm19, (%rdx)
4050 ; AVX512-NEXT: vmovdqa64 %zmm7, (%rcx)
4051 ; AVX512-NEXT: vmovdqa64 %zmm17, (%r8)
4052 ; AVX512-NEXT: vmovdqa64 %zmm0, (%r9)
4053 ; AVX512-NEXT: vzeroupper
4056 ; AVX512-FCP-LABEL: load_i16_stride5_vf32:
4057 ; AVX512-FCP: # %bb.0:
4058 ; AVX512-FCP-NEXT: vmovdqa 176(%rdi), %xmm2
4059 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm0 = xmm2[4,5,14,15,4,5,6,7,u,u,u,u,u,u,u,u]
4060 ; AVX512-FCP-NEXT: vmovdqa 160(%rdi), %xmm3
4061 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm1 = xmm3[0,1,10,11,8,9,10,11,u,u,u,u,u,u,u,u]
4062 ; AVX512-FCP-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
4063 ; AVX512-FCP-NEXT: vmovdqa 192(%rdi), %ymm4
4064 ; AVX512-FCP-NEXT: vmovdqa 224(%rdi), %ymm5
4065 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm5[0],ymm4[1],ymm5[2,3],ymm4[4],ymm5[5],ymm4[6],ymm5[7,8],ymm4[9],ymm5[10,11],ymm4[12],ymm5[13],ymm4[14],ymm5[15]
4066 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm6 = [2,4,7,1,4,6,0,0]
4067 ; AVX512-FCP-NEXT: vpermd %ymm1, %ymm6, %ymm1
4068 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,8,9,14,15,0,1,6,7,16,17,22,23,u,u,u,u,u,u,u,u,u,u,u,u]
4069 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm6 = [8,9,3,2,4,0,0,0]
4070 ; AVX512-FCP-NEXT: vpermi2d %ymm0, %ymm1, %ymm6
4071 ; AVX512-FCP-NEXT: vmovdqa 256(%rdi), %ymm0
4072 ; AVX512-FCP-NEXT: vmovdqa 288(%rdi), %ymm1
4073 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm7 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6],ymm0[7],ymm1[8,9],ymm0[10],ymm1[11],ymm0[12],ymm1[13,14],ymm0[15]
4074 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm7, %xmm8
4075 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2],xmm8[3,4],xmm7[5,6,7]
4076 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm7 = xmm7[u,u,u,u,4,5,14,15,8,9,2,3,12,13,6,7]
4077 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
4078 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3,4],ymm7[5,6,7]
4079 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %ymm10
4080 ; AVX512-FCP-NEXT: vmovdqa 32(%rdi), %ymm11
4081 ; AVX512-FCP-NEXT: vmovdqa 64(%rdi), %ymm8
4082 ; AVX512-FCP-NEXT: vmovdqa 96(%rdi), %ymm9
4083 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm7 = ymm8[0],ymm9[1,2],ymm8[3],ymm9[4],ymm8[5],ymm9[6,7],ymm8[8],ymm9[9,10],ymm8[11],ymm9[12],ymm8[13],ymm9[14,15]
4084 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm12 = [1,0,0,0,4,6,1,3]
4085 ; AVX512-FCP-NEXT: vpermd %ymm7, %ymm12, %ymm7
4086 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm7 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm7[2,3,16,17,22,23,24,25,30,31,20,21],zero,zero,zero,zero,zero,zero
4087 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm12 = ymm10[0],ymm11[1],ymm10[2,3],ymm11[4],ymm10[5],ymm11[6],ymm10[7,8],ymm11[9],ymm10[10,11],ymm11[12],ymm10[13],ymm11[14],ymm10[15]
4088 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm12, %xmm13
4089 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0],xmm13[1,2,3],xmm12[4,5],xmm13[6,7]
4090 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm12 = ymm12[0,1,10,11,4,5,14,15,8,9,2,3,12,13],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm12[u,u,u,u,u,u]
4091 ; AVX512-FCP-NEXT: vpor %ymm7, %ymm12, %ymm12
4092 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm13 = [0,3,1,0,0,3,5,0]
4093 ; AVX512-FCP-NEXT: vmovdqa 128(%rdi), %ymm7
4094 ; AVX512-FCP-NEXT: vpermd %ymm7, %ymm13, %ymm13
4095 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm14 = [4,5,2,3,4,5,6,7,8,9,2,3,4,5,10,11,20,21,18,19,20,21,22,23,24,25,18,19,20,21,26,27]
4096 ; AVX512-FCP-NEXT: vpshufb %ymm14, %ymm13, %ymm13
4097 ; AVX512-FCP-NEXT: vmovdqa64 {{.*#+}} zmm18 = [65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,0,0,0,0,0,0,0,0,0,0,0,0,0,65535,65535,65535,65535,65535,65535]
4098 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm13 = zmm13 ^ (zmm18 & (zmm13 ^ zmm12))
4099 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm6, %zmm13, %zmm16
4100 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm12 = ymm11[0],ymm10[1],ymm11[2],ymm10[3],ymm11[4,5],ymm10[6],ymm11[7,8],ymm10[9],ymm11[10],ymm10[11],ymm11[12,13],ymm10[14],ymm11[15]
4101 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm12, %xmm13
4102 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0,1],xmm13[2,3],xmm12[4,5,6],xmm13[7]
4103 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm6 = [2,3,12,13,6,7,0,1,10,11,4,5,14,15,128,128,128,128,128,128,128,128,128,128,128,128,u,u,u,u,u,u]
4104 ; AVX512-FCP-NEXT: vpshufb %ymm6, %ymm12, %ymm12
4105 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm13 = ymm9[0],ymm8[1],ymm9[2,3],ymm8[4],ymm9[5],ymm8[6],ymm9[7,8],ymm8[9],ymm9[10,11],ymm8[12],ymm9[13],ymm8[14],ymm9[15]
4106 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm17 = [2,0,0,0,4,7,1,6]
4107 ; AVX512-FCP-NEXT: vpermd %ymm13, %ymm17, %ymm13
4108 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm13 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm13[0,1,18,19,20,21,26,27,16,17,30,31],zero,zero,zero,zero,zero,zero
4109 ; AVX512-FCP-NEXT: vpor %ymm13, %ymm12, %ymm12
4110 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm13 = [1,3,2,0,1,3,6,0]
4111 ; AVX512-FCP-NEXT: vpermd %ymm7, %ymm13, %ymm15
4112 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm13 = [0,1,6,7,4,5,6,7,8,9,0,1,6,7,8,9,16,17,22,23,20,21,22,23,24,25,16,17,22,23,24,25]
4113 ; AVX512-FCP-NEXT: vpshufb %ymm13, %ymm15, %ymm15
4114 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm15 = zmm15 ^ (zmm18 & (zmm15 ^ zmm12))
4115 ; AVX512-FCP-NEXT: vpshufb %xmm6, %xmm3, %xmm6
4116 ; AVX512-FCP-NEXT: vpsrlq $48, %xmm2, %xmm12
4117 ; AVX512-FCP-NEXT: vpunpckldq {{.*#+}} xmm6 = xmm6[0],xmm12[0],xmm6[1],xmm12[1]
4118 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm12 = ymm4[0],ymm5[1],ymm4[2,3],ymm5[4],ymm4[5],ymm5[6],ymm4[7,8],ymm5[9],ymm4[10,11],ymm5[12],ymm4[13],ymm5[14],ymm4[15]
4119 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm17 = [0,2,5,7,4,7,0,0]
4120 ; AVX512-FCP-NEXT: vpermd %ymm12, %ymm17, %ymm12
4121 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm12 = ymm12[2,3,4,5,4,5,0,1,6,7,8,9,14,15,4,5,18,19,20,21,20,21,16,17,22,23,24,25,30,31,20,21]
4122 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1,2],xmm12[3,4,5,6,7]
4123 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm12[4,5,6,7]
4124 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm12 = ymm0[0],ymm1[1,2],ymm0[3],ymm1[4],ymm0[5],ymm1[6,7],ymm0[8],ymm1[9,10],ymm0[11],ymm1[12],ymm0[13],ymm1[14,15]
4125 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} ymm17 = [1,4,6,3,1,4,6,3]
4126 ; AVX512-FCP-NEXT: # ymm17 = mem[0,1,2,3,0,1,2,3]
4127 ; AVX512-FCP-NEXT: vpermd %ymm12, %ymm17, %ymm12
4128 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm12 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,26,27,16,17,30,31,24,25]
4129 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3,4],ymm12[5,6,7]
4130 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm6, %zmm15, %zmm17
4131 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm6 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13],ymm0[14],ymm1[15]
4132 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm6, %xmm15
4133 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm6 = xmm15[0,1,2],xmm6[3,4],xmm15[5,6,7]
4134 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[u,u,u,u,8,9,2,3,12,13,6,7,0,1,10,11]
4135 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
4136 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm15 = xmm3[u,u,u,u,u,u,u,u,4,5,14,15,u,u,u,u]
4137 ; AVX512-FCP-NEXT: vpunpckhdq {{.*#+}} xmm15 = xmm15[2],xmm2[2],xmm15[3],xmm2[3]
4138 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm12 = ymm5[0],ymm4[1],ymm5[2],ymm4[3],ymm5[4,5],ymm4[6],ymm5[7,8],ymm4[9],ymm5[10],ymm4[11],ymm5[12,13],ymm4[14],ymm5[15]
4139 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm19 = [0,3,5,2,5,7,0,0]
4140 ; AVX512-FCP-NEXT: vpermd %ymm12, %ymm19, %ymm12
4141 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm12 = ymm12[0,1,6,7,2,3,2,3,4,5,10,11,0,1,14,15,16,17,22,23,18,19,18,19,20,21,26,27,16,17,30,31]
4142 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm15 = xmm15[0,1,2],xmm12[3,4,5,6,7]
4143 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm12 = ymm15[0,1,2,3],ymm12[4,5,6,7]
4144 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm6 = ymm12[0,1,2,3,4],ymm6[5,6,7]
4145 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm12 = ymm11[0,1],ymm10[2],ymm11[3],ymm10[4],ymm11[5,6],ymm10[7],ymm11[8,9],ymm10[10],ymm11[11],ymm10[12],ymm11[13,14],ymm10[15]
4146 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm12, %xmm15
4147 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0,1,2],xmm15[3,4],xmm12[5,6,7]
4148 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm12 = xmm12[4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
4149 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm15 = ymm8[0],ymm9[1],ymm8[2,3],ymm9[4],ymm8[5],ymm9[6],ymm8[7,8],ymm9[9],ymm8[10,11],ymm9[12],ymm8[13],ymm9[14],ymm8[15]
4150 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm19 = [0,2,0,0,5,7,2,4]
4151 ; AVX512-FCP-NEXT: vpermd %ymm15, %ymm19, %ymm15
4152 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm15 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,0,1,6,7,16,17,22,23,24,25,30,31,20,21,22,23,16,17,22,23]
4153 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2],ymm15[3,4,5,6,7]
4154 ; AVX512-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm15 = [1,4,6,0,1,4,6,0]
4155 ; AVX512-FCP-NEXT: # ymm15 = mem[0,1,0,1]
4156 ; AVX512-FCP-NEXT: vpermd %ymm7, %ymm15, %ymm15
4157 ; AVX512-FCP-NEXT: vpshufb %ymm14, %ymm15, %ymm14
4158 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm14 = zmm14 ^ (zmm18 & (zmm14 ^ zmm12))
4159 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm6, %zmm14, %zmm14
4160 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm6 = ymm10[0],ymm11[1,2],ymm10[3],ymm11[4],ymm10[5],ymm11[6,7],ymm10[8],ymm11[9,10],ymm10[11],ymm11[12],ymm10[13],ymm11[14,15]
4161 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm6, %xmm12
4162 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} xmm6 = xmm12[0],xmm6[1],xmm12[2],xmm6[3]
4163 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
4164 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm12 = ymm9[0],ymm8[1],ymm9[2],ymm8[3],ymm9[4,5],ymm8[6],ymm9[7,8],ymm8[9],ymm9[10],ymm8[11],ymm9[12,13],ymm8[14],ymm9[15]
4165 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm15 = [0,3,0,0,5,0,2,7]
4166 ; AVX512-FCP-NEXT: vpermd %ymm12, %ymm15, %ymm12
4167 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm12 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,2,3,4,5,18,19,20,21,26,27,16,17,30,31,30,31,18,19,20,21]
4168 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2],ymm12[3,4,5,6,7]
4169 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} xmm12 = xmm2[0],xmm3[1],xmm2[2,3]
4170 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm12 = xmm12[6,7,0,1,10,11,u,u,u,u,u,u,u,u,u,u]
4171 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm15 = ymm5[0,1],ymm4[2],ymm5[3],ymm4[4],ymm5[5,6],ymm4[7],ymm5[8,9],ymm4[10],ymm5[11],ymm4[12],ymm5[13,14],ymm4[15]
4172 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm18 = [1,3,6,0,5,0,0,0]
4173 ; AVX512-FCP-NEXT: vpermd %ymm15, %ymm18, %ymm15
4174 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm15 = ymm15[2,3,2,3,4,5,0,1,6,7,8,9,14,15,4,5,18,19,18,19,20,21,16,17,22,23,24,25,30,31,20,21]
4175 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0,1,2],xmm15[3,4,5,6,7]
4176 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3],ymm15[4,5,6,7]
4177 ; AVX512-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm15 = [2,4,7,0,2,4,7,0]
4178 ; AVX512-FCP-NEXT: # ymm15 = mem[0,1,0,1]
4179 ; AVX512-FCP-NEXT: vpermd %ymm7, %ymm15, %ymm15
4180 ; AVX512-FCP-NEXT: vpshufb %ymm13, %ymm15, %ymm13
4181 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm12, %zmm13, %zmm12
4182 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm12 = zmm12 ^ (mem & (zmm12 ^ zmm6))
4183 ; AVX512-FCP-NEXT: vextracti64x4 $1, %zmm12, %ymm6
4184 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm13 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5],ymm1[6],ymm0[7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13],ymm1[14],ymm0[15]
4185 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm13, %xmm15
4186 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0],xmm15[1,2,3],xmm13[4,5],xmm15[6,7]
4187 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm13 = xmm13[u,u,0,1,10,11,4,5,14,15,8,9,2,3,12,13]
4188 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
4189 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm13 = ymm6[0],ymm13[1,2,3,4,5,6,7],ymm6[8],ymm13[9,10,11,12,13,14,15]
4190 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm13[4,5,6,7]
4191 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm6, %zmm12, %zmm6
4192 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm10 = ymm11[0],ymm10[1],ymm11[2,3],ymm10[4],ymm11[5],ymm10[6],ymm11[7,8],ymm10[9],ymm11[10,11],ymm10[12],ymm11[13],ymm10[14],ymm11[15]
4193 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm10, %xmm11
4194 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm10 = xmm11[0,1,2],xmm10[3,4],xmm11[5,6,7]
4195 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm10 = xmm10[8,9,2,3,12,13,6,7,0,1,10,11,u,u,u,u]
4196 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0,1],ymm8[2],ymm9[3],ymm8[4],ymm9[5,6],ymm8[7],ymm9[8,9],ymm8[10],ymm9[11],ymm8[12],ymm9[13,14],ymm8[15]
4197 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm9 = [1,3,0,0,6,0,3,5]
4198 ; AVX512-FCP-NEXT: vpermd %ymm8, %ymm9, %ymm8
4199 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm8 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,0,1,6,7,16,17,22,23,24,25,30,31,16,17,22,23,16,17,22,23]
4200 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm8 = ymm10[0,1,2],ymm8[3,4,5,6,7]
4201 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} xmm2 = xmm2[0,1],xmm3[2],xmm2[3]
4202 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[8,9,2,3,12,13,u,u,u,u,u,u,u,u,u,u]
4203 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0],ymm5[1,2],ymm4[3],ymm5[4],ymm4[5],ymm5[6,7],ymm4[8],ymm5[9,10],ymm4[11],ymm5[12],ymm4[13],ymm5[14,15]
4204 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm4 = [1,4,6,3,6,0,0,0]
4205 ; AVX512-FCP-NEXT: vpermd %ymm3, %ymm4, %ymm3
4206 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5,2,3,4,5,10,11,0,1,14,15,16,17,18,19,20,21,18,19,20,21,26,27,16,17,30,31]
4207 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm3[3,4,5,6,7]
4208 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
4209 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm3 = [0,2,1,3,0,2,5,7]
4210 ; AVX512-FCP-NEXT: vpermd %ymm7, %ymm3, %ymm3
4211 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[0,1,6,7,4,5,6,7,0,1,6,7,8,9,14,15,16,17,22,23,20,21,22,23,16,17,22,23,24,25,30,31]
4212 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm2, %zmm3, %zmm2
4213 ; AVX512-FCP-NEXT: movb $7, %al
4214 ; AVX512-FCP-NEXT: kmovw %eax, %k1
4215 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm2 {%k1}
4216 ; AVX512-FCP-NEXT: vextracti64x4 $1, %zmm2, %ymm3
4217 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
4218 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm0, %xmm1
4219 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5,6],xmm1[7]
4220 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[u,u,2,3,12,13,6,7,0,1,10,11,4,5,14,15]
4221 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4222 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm3[0],ymm0[1,2,3,4,5,6,7],ymm3[8],ymm0[9,10,11,12,13,14,15]
4223 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
4224 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0
4225 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, (%rsi)
4226 ; AVX512-FCP-NEXT: vmovdqa64 %zmm17, (%rdx)
4227 ; AVX512-FCP-NEXT: vmovdqa64 %zmm14, (%rcx)
4228 ; AVX512-FCP-NEXT: vmovdqa64 %zmm6, (%r8)
4229 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, (%r9)
4230 ; AVX512-FCP-NEXT: vzeroupper
4231 ; AVX512-FCP-NEXT: retq
4233 ; AVX512DQ-LABEL: load_i16_stride5_vf32:
4234 ; AVX512DQ: # %bb.0:
4235 ; AVX512DQ-NEXT: vmovdqa 256(%rdi), %ymm0
4236 ; AVX512DQ-NEXT: vmovdqa 288(%rdi), %ymm1
4237 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm2 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6],ymm0[7],ymm1[8,9],ymm0[10],ymm1[11],ymm0[12],ymm1[13,14],ymm0[15]
4238 ; AVX512DQ-NEXT: vextracti128 $1, %ymm2, %xmm3
4239 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm3[3,4],xmm2[5,6,7]
4240 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[u,u,u,u,4,5,14,15,8,9,2,3,12,13,6,7]
4241 ; AVX512DQ-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm4
4242 ; AVX512DQ-NEXT: vmovdqa 192(%rdi), %ymm3
4243 ; AVX512DQ-NEXT: vmovdqa 224(%rdi), %ymm9
4244 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm5 = ymm9[0],ymm3[1],ymm9[2,3],ymm3[4],ymm9[5],ymm3[6],ymm9[7,8],ymm3[9],ymm9[10,11],ymm3[12],ymm9[13],ymm3[14],ymm9[15]
4245 ; AVX512DQ-NEXT: vextracti128 $1, %ymm5, %xmm6
4246 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0],ymm5[1,2],ymm6[3],ymm5[4,5,6,7]
4247 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm7 = [u,u,u,u,u,u,u,u,8,9,2,3,12,13,6,7,16,17,26,27,26,27,30,31,24,25,18,19,28,29,22,23]
4248 ; AVX512DQ-NEXT: vpshufb %ymm7, %ymm5, %ymm5
4249 ; AVX512DQ-NEXT: vmovdqa64 176(%rdi), %xmm20
4250 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm8 = xmm20[3,1,2,3]
4251 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm8 = xmm8[2,1,2,3,4,5,6,7]
4252 ; AVX512DQ-NEXT: vmovdqa 160(%rdi), %xmm6
4253 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm10 = xmm6[0,2,2,3]
4254 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm10 = xmm10[0,3,2,3,4,5,6,7]
4255 ; AVX512DQ-NEXT: vpunpckldq {{.*#+}} xmm8 = xmm10[0],xmm8[0],xmm10[1],xmm8[1]
4256 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm5 = ymm8[0,1],ymm5[2,3,4,5,6,7]
4257 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm14 = ymm5[0,1,2,3,4],ymm4[5,6,7]
4258 ; AVX512DQ-NEXT: vmovdqa (%rdi), %ymm8
4259 ; AVX512DQ-NEXT: vmovdqa 32(%rdi), %ymm10
4260 ; AVX512DQ-NEXT: vmovdqa 64(%rdi), %ymm4
4261 ; AVX512DQ-NEXT: vmovdqa 96(%rdi), %ymm5
4262 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm11 = ymm4[0],ymm5[1,2],ymm4[3],ymm5[4],ymm4[5],ymm5[6,7],ymm4[8],ymm5[9,10],ymm4[11],ymm5[12],ymm4[13],ymm5[14,15]
4263 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm12 = ymm11[2,3,0,1]
4264 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm11 = ymm11[0,1,2,3,4],ymm12[5],ymm11[6],ymm12[7]
4265 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm11 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm11[6,7,16,17,26,27,20,21,30,31,24,25],zero,zero,zero,zero,zero,zero
4266 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm12 = ymm8[0],ymm10[1],ymm8[2,3],ymm10[4],ymm8[5],ymm10[6],ymm8[7,8],ymm10[9],ymm8[10,11],ymm10[12],ymm8[13],ymm10[14],ymm8[15]
4267 ; AVX512DQ-NEXT: vextracti128 $1, %ymm12, %xmm13
4268 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0],xmm13[1,2,3],xmm12[4,5],xmm13[6,7]
4269 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm12 = ymm12[0,1,10,11,4,5,14,15,8,9,2,3,12,13],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm12[u,u,u,u,u,u]
4270 ; AVX512DQ-NEXT: vpor %ymm11, %ymm12, %ymm15
4271 ; AVX512DQ-NEXT: vmovdqa 144(%rdi), %xmm11
4272 ; AVX512DQ-NEXT: vmovdqa 128(%rdi), %xmm12
4273 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm13 = xmm12[0],xmm11[1],xmm12[2,3]
4274 ; AVX512DQ-NEXT: vpshufb %xmm7, %xmm13, %xmm7
4275 ; AVX512DQ-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
4276 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm18 = [65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,0,0,0,0,0,0,0,0,0,0,0,0,0,65535,65535,65535,65535,65535,65535]
4277 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm7 = zmm7 ^ (zmm18 & (zmm7 ^ zmm15))
4278 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm14, %zmm7, %zmm16
4279 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm7 = ymm3[0],ymm9[1],ymm3[2,3],ymm9[4],ymm3[5],ymm9[6],ymm3[7,8],ymm9[9],ymm3[10,11],ymm9[12],ymm3[13],ymm9[14],ymm3[15]
4280 ; AVX512DQ-NEXT: vextracti128 $1, %ymm7, %xmm14
4281 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0],ymm14[1],ymm7[2],ymm14[3],ymm7[4,5,6,7]
4282 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm14 = [2,3,12,13,0,1,0,1,10,11,4,5,14,15,8,9,18,19,28,29,16,17,16,17,26,27,20,21,30,31,24,25]
4283 ; AVX512DQ-NEXT: vpshufb %ymm14, %ymm7, %ymm7
4284 ; AVX512DQ-NEXT: vpsrlq $48, %xmm20, %xmm15
4285 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm13 = xmm6[0,3,2,3]
4286 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm13 = xmm13[1,2,2,3,4,5,6,7]
4287 ; AVX512DQ-NEXT: vpunpckldq {{.*#+}} xmm13 = xmm13[0],xmm15[0],xmm13[1],xmm15[1]
4288 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0,1,2],xmm7[3,4,5,6,7]
4289 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm7 = ymm13[0,1,2,3],ymm7[4,5,6,7]
4290 ; AVX512DQ-NEXT: vmovdqa %ymm0, %ymm2
4291 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm13 = ymm0[0],ymm1[1,2],ymm0[3],ymm1[4],ymm0[5],ymm1[6,7],ymm0[8],ymm1[9,10],ymm0[11],ymm1[12],ymm0[13],ymm1[14,15]
4292 ; AVX512DQ-NEXT: vextracti128 $1, %ymm13, %xmm15
4293 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm13 = xmm15[0],xmm13[1],xmm15[2],xmm13[3]
4294 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm13 = xmm13[u,u,u,u,6,7,0,1,10,11,4,5,14,15,8,9]
4295 ; AVX512DQ-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
4296 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0,1,2,3,4],ymm13[5,6,7]
4297 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm13 = ymm5[0],ymm4[1],ymm5[2,3],ymm4[4],ymm5[5],ymm4[6],ymm5[7,8],ymm4[9],ymm5[10,11],ymm4[12],ymm5[13],ymm4[14],ymm5[15]
4298 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm15 = ymm13[2,3,0,1]
4299 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm13 = ymm13[0,1,2,3,4],ymm15[5],ymm13[6,7]
4300 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm15 = ymm10[0],ymm8[1],ymm10[2],ymm8[3],ymm10[4,5],ymm8[6],ymm10[7,8],ymm8[9],ymm10[10],ymm8[11],ymm10[12,13],ymm8[14],ymm10[15]
4301 ; AVX512DQ-NEXT: vextracti128 $1, %ymm15, %xmm0
4302 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm0 = xmm15[0,1],xmm0[2,3],xmm15[4,5,6],xmm0[7]
4303 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm13 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm13[8,9,18,19,28,29,22,23,16,17,26,27],zero,zero,zero,zero,zero,zero
4304 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[2,3,12,13,6,7,0,1,10,11,4,5,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm0[u,u,u,u,u,u]
4305 ; AVX512DQ-NEXT: vpor %ymm0, %ymm13, %ymm0
4306 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm13 = xmm12[0,1],xmm11[2],xmm12[3]
4307 ; AVX512DQ-NEXT: vpshufb %xmm14, %xmm13, %xmm13
4308 ; AVX512DQ-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
4309 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm13 = zmm13 ^ (zmm18 & (zmm13 ^ zmm0))
4310 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm7, %zmm13, %zmm19
4311 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm9[0],ymm3[1],ymm9[2],ymm3[3],ymm9[4,5],ymm3[6],ymm9[7,8],ymm3[9],ymm9[10],ymm3[11],ymm9[12,13],ymm3[14],ymm9[15]
4312 ; AVX512DQ-NEXT: vextracti128 $1, %ymm0, %xmm7
4313 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0],ymm7[1],ymm0[2,3,4,5,6,7]
4314 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm7 = [4,5,14,15,12,13,2,3,12,13,6,7,0,1,10,11,20,21,30,31,28,29,18,19,28,29,22,23,16,17,26,27]
4315 ; AVX512DQ-NEXT: vpshufb %ymm7, %ymm0, %ymm0
4316 ; AVX512DQ-NEXT: vmovdqa64 %ymm7, %ymm21
4317 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm7 = xmm6[0,1,1,3]
4318 ; AVX512DQ-NEXT: vpshufhw {{.*#+}} xmm7 = xmm7[0,1,2,3,4,7,6,7]
4319 ; AVX512DQ-NEXT: vmovdqa64 %xmm20, %xmm15
4320 ; AVX512DQ-NEXT: vpunpckhdq {{.*#+}} xmm7 = xmm7[2],xmm20[2],xmm7[3],xmm20[3]
4321 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2],xmm0[3,4,5,6,7]
4322 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm7[0,1,2,3],ymm0[4,5,6,7]
4323 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm7 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5],ymm2[6],ymm1[7,8],ymm2[9],ymm1[10,11],ymm2[12],ymm1[13],ymm2[14],ymm1[15]
4324 ; AVX512DQ-NEXT: vextracti128 $1, %ymm7, %xmm13
4325 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm7 = xmm13[0,1,2],xmm7[3,4],xmm13[5,6,7]
4326 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm7 = xmm7[u,u,u,u,8,9,2,3,12,13,6,7,0,1,10,11]
4327 ; AVX512DQ-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
4328 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm7 = ymm0[0,1,2,3,4],ymm7[5,6,7]
4329 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm9[0,1],ymm3[2],ymm9[3],ymm3[4],ymm9[5,6],ymm3[7],ymm9[8,9],ymm3[10],ymm9[11],ymm3[12],ymm9[13,14],ymm3[15]
4330 ; AVX512DQ-NEXT: vmovdqa64 %ymm9, %ymm20
4331 ; AVX512DQ-NEXT: vextracti128 $1, %ymm0, %xmm13
4332 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm13[2],ymm0[3,4,5,6,7]
4333 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm13 = [6,7,6,7,8,9,4,5,14,15,8,9,2,3,12,13,22,23,22,23,24,25,20,21,30,31,24,25,18,19,28,29]
4334 ; AVX512DQ-NEXT: vpshufb %ymm13, %ymm0, %ymm0
4335 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm14 = xmm15[0],xmm6[1],xmm15[2,3]
4336 ; AVX512DQ-NEXT: vmovdqa64 %xmm15, %xmm22
4337 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm14 = xmm14[6,7,0,1,10,11,u,u,u,u,u,u,u,u,u,u]
4338 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm14 = xmm14[0,1,2],xmm0[3,4,5,6,7]
4339 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm14[0,1,2,3],ymm0[4,5,6,7]
4340 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm14 = xmm11[0,1],xmm12[2],xmm11[3]
4341 ; AVX512DQ-NEXT: vpshufb %xmm13, %xmm14, %xmm13
4342 ; AVX512DQ-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
4343 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm0, %zmm13, %zmm0
4344 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm13 = ymm8[0],ymm10[1,2],ymm8[3],ymm10[4],ymm8[5],ymm10[6,7],ymm8[8],ymm10[9,10],ymm8[11],ymm10[12],ymm8[13],ymm10[14,15]
4345 ; AVX512DQ-NEXT: vextracti128 $1, %ymm13, %xmm14
4346 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm13 = xmm14[0],xmm13[1],xmm14[2],xmm13[3]
4347 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm14 = ymm5[0],ymm4[1],ymm5[2],ymm4[3],ymm5[4,5],ymm4[6],ymm5[7,8],ymm4[9],ymm5[10],ymm4[11],ymm5[12,13],ymm4[14],ymm5[15]
4348 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm15 = ymm14[2,3,0,1]
4349 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm14 = ymm14[0,1,2,3],ymm15[4],ymm14[5],ymm15[6],ymm14[7]
4350 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm13 = xmm13[6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
4351 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm14 = ymm14[u,u,u,u,u,u,u,u,u,u,u,u,2,3,12,13,22,23,16,17,26,27,20,21,30,31,30,31,18,19,28,29]
4352 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm13 = ymm13[0,1,2],ymm14[3,4,5,6,7]
4353 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm14 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5],ymm1[6],ymm2[7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13],ymm1[14],ymm2[15]
4354 ; AVX512DQ-NEXT: vmovdqa %ymm2, %ymm9
4355 ; AVX512DQ-NEXT: vextracti128 $1, %ymm14, %xmm15
4356 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm14 = xmm14[0],xmm15[1,2,3],xmm14[4,5],xmm15[6,7]
4357 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm13 = zmm0 ^ (mem & (zmm13 ^ zmm0))
4358 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm0 = xmm14[u,u,0,1,10,11,4,5,14,15,8,9,2,3,12,13]
4359 ; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4360 ; AVX512DQ-NEXT: vextracti64x4 $1, %zmm13, %ymm14
4361 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm14[0],ymm0[1,2,3,4,5,6,7],ymm14[8],ymm0[9,10,11,12,13,14,15]
4362 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm14[0,1,2,3],ymm0[4,5,6,7]
4363 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm0, %zmm13, %zmm17
4364 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm0 = xmm11[3,1,2,3]
4365 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,1,2,1,4,5,6,7]
4366 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm13 = xmm12[0,2,2,3]
4367 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm13 = xmm13[0,1,0,3,4,5,6,7]
4368 ; AVX512DQ-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm13[0],xmm0[0],xmm13[1],xmm0[1]
4369 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm13 = ymm10[0,1],ymm8[2],ymm10[3],ymm8[4],ymm10[5,6],ymm8[7],ymm10[8,9],ymm8[10],ymm10[11],ymm8[12],ymm10[13,14],ymm8[15]
4370 ; AVX512DQ-NEXT: vextracti128 $1, %ymm13, %xmm14
4371 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0,1,2],xmm14[3,4],xmm13[5,6,7]
4372 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm14 = ymm4[0],ymm5[1],ymm4[2,3],ymm5[4],ymm4[5],ymm5[6],ymm4[7,8],ymm5[9],ymm4[10,11],ymm5[12],ymm4[13],ymm5[14],ymm4[15]
4373 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm15 = ymm14[2,3,0,1]
4374 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5],ymm15[6],ymm14[7]
4375 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm13 = xmm13[4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
4376 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm14 = ymm14[u,u,u,u,u,u,u,u,u,u,u,u,0,1,10,11,20,21,30,31,24,25,18,19,28,29,26,27,16,17,26,27]
4377 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm13 = ymm13[0,1,2],ymm14[3,4,5,6,7]
4378 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm11 = xmm11[0],xmm12[1],xmm11[2,3]
4379 ; AVX512DQ-NEXT: vmovdqa64 %ymm21, %ymm2
4380 ; AVX512DQ-NEXT: vpshufb %xmm2, %xmm11, %xmm11
4381 ; AVX512DQ-NEXT: vinserti128 $1, %xmm11, %ymm0, %ymm11
4382 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm11 = zmm11 ^ (zmm18 & (zmm11 ^ zmm13))
4383 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm7, %zmm11, %zmm7
4384 ; AVX512DQ-NEXT: vmovdqa64 %ymm20, %ymm2
4385 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm2[1,2],ymm3[3],ymm2[4],ymm3[5],ymm2[6,7],ymm3[8],ymm2[9,10],ymm3[11],ymm2[12],ymm3[13],ymm2[14,15]
4386 ; AVX512DQ-NEXT: vextracti128 $1, %ymm2, %xmm3
4387 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0],ymm2[1],ymm3[2],ymm2[3,4,5,6,7]
4388 ; AVX512DQ-NEXT: vmovdqa64 %xmm22, %xmm3
4389 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm3 = xmm3[0,1],xmm6[2],xmm3[3]
4390 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm2 = ymm2[8,9,14,15,4,5,6,7,0,1,10,11,4,5,14,15,24,25,30,31,20,21,22,23,16,17,26,27,20,21,30,31]
4391 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm3 = xmm3[8,9,2,3,12,13,u,u,u,u,u,u,u,u,u,u]
4392 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2],xmm2[3,4,5,6,7]
4393 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7]
4394 ; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4395 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
4396 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm2 = ymm10[0],ymm8[1],ymm10[2,3],ymm8[4],ymm10[5],ymm8[6],ymm10[7,8],ymm8[9],ymm10[10,11],ymm8[12],ymm10[13],ymm8[14],ymm10[15]
4397 ; AVX512DQ-NEXT: vextracti128 $1, %ymm2, %xmm3
4398 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1,2],xmm2[3,4],xmm3[5,6,7]
4399 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm3 = ymm5[0,1],ymm4[2],ymm5[3],ymm4[4],ymm5[5,6],ymm4[7],ymm5[8,9],ymm4[10],ymm5[11],ymm4[12],ymm5[13,14],ymm4[15]
4400 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm4 = ymm3[2,3,0,1]
4401 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm4[4],ymm3[5,6],ymm4[7]
4402 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[8,9,2,3,12,13,6,7,0,1,10,11,u,u,u,u]
4403 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,4,5,14,15,24,25,18,19,28,29,22,23,20,21,22,23,20,21,30,31]
4404 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2],ymm3[3,4,5,6,7]
4405 ; AVX512DQ-NEXT: movb $7, %al
4406 ; AVX512DQ-NEXT: kmovw %eax, %k1
4407 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm2, %zmm0, %zmm0 {%k1}
4408 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0],ymm9[1],ymm1[2],ymm9[3],ymm1[4,5],ymm9[6],ymm1[7,8],ymm9[9],ymm1[10],ymm9[11],ymm1[12,13],ymm9[14],ymm1[15]
4409 ; AVX512DQ-NEXT: vextracti128 $1, %ymm1, %xmm2
4410 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1],xmm2[2,3],xmm1[4,5,6],xmm2[7]
4411 ; AVX512DQ-NEXT: vextracti64x4 $1, %zmm0, %ymm2
4412 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[u,u,2,3,12,13,6,7,0,1,10,11,4,5,14,15]
4413 ; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
4414 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1,2,3,4,5,6,7],ymm2[8],ymm1[9,10,11,12,13,14,15]
4415 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm1[4,5,6,7]
4416 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
4417 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, (%rsi)
4418 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, (%rdx)
4419 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, (%rcx)
4420 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, (%r8)
4421 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%r9)
4422 ; AVX512DQ-NEXT: vzeroupper
4423 ; AVX512DQ-NEXT: retq
4425 ; AVX512DQ-FCP-LABEL: load_i16_stride5_vf32:
4426 ; AVX512DQ-FCP: # %bb.0:
4427 ; AVX512DQ-FCP-NEXT: vmovdqa 176(%rdi), %xmm2
4428 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm0 = xmm2[4,5,14,15,4,5,6,7,u,u,u,u,u,u,u,u]
4429 ; AVX512DQ-FCP-NEXT: vmovdqa 160(%rdi), %xmm3
4430 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm1 = xmm3[0,1,10,11,8,9,10,11,u,u,u,u,u,u,u,u]
4431 ; AVX512DQ-FCP-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
4432 ; AVX512DQ-FCP-NEXT: vmovdqa 192(%rdi), %ymm4
4433 ; AVX512DQ-FCP-NEXT: vmovdqa 224(%rdi), %ymm5
4434 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm5[0],ymm4[1],ymm5[2,3],ymm4[4],ymm5[5],ymm4[6],ymm5[7,8],ymm4[9],ymm5[10,11],ymm4[12],ymm5[13],ymm4[14],ymm5[15]
4435 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm6 = [2,4,7,1,4,6,0,0]
4436 ; AVX512DQ-FCP-NEXT: vpermd %ymm1, %ymm6, %ymm1
4437 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,8,9,14,15,0,1,6,7,16,17,22,23,u,u,u,u,u,u,u,u,u,u,u,u]
4438 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm6 = [8,9,3,2,4,0,0,0]
4439 ; AVX512DQ-FCP-NEXT: vpermi2d %ymm0, %ymm1, %ymm6
4440 ; AVX512DQ-FCP-NEXT: vmovdqa 256(%rdi), %ymm0
4441 ; AVX512DQ-FCP-NEXT: vmovdqa 288(%rdi), %ymm1
4442 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm7 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6],ymm0[7],ymm1[8,9],ymm0[10],ymm1[11],ymm0[12],ymm1[13,14],ymm0[15]
4443 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm7, %xmm8
4444 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2],xmm8[3,4],xmm7[5,6,7]
4445 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm7 = xmm7[u,u,u,u,4,5,14,15,8,9,2,3,12,13,6,7]
4446 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
4447 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3,4],ymm7[5,6,7]
4448 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %ymm10
4449 ; AVX512DQ-FCP-NEXT: vmovdqa 32(%rdi), %ymm11
4450 ; AVX512DQ-FCP-NEXT: vmovdqa 64(%rdi), %ymm8
4451 ; AVX512DQ-FCP-NEXT: vmovdqa 96(%rdi), %ymm9
4452 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm7 = ymm8[0],ymm9[1,2],ymm8[3],ymm9[4],ymm8[5],ymm9[6,7],ymm8[8],ymm9[9,10],ymm8[11],ymm9[12],ymm8[13],ymm9[14,15]
4453 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm12 = [1,0,0,0,4,6,1,3]
4454 ; AVX512DQ-FCP-NEXT: vpermd %ymm7, %ymm12, %ymm7
4455 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm7 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm7[2,3,16,17,22,23,24,25,30,31,20,21],zero,zero,zero,zero,zero,zero
4456 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm12 = ymm10[0],ymm11[1],ymm10[2,3],ymm11[4],ymm10[5],ymm11[6],ymm10[7,8],ymm11[9],ymm10[10,11],ymm11[12],ymm10[13],ymm11[14],ymm10[15]
4457 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm12, %xmm13
4458 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0],xmm13[1,2,3],xmm12[4,5],xmm13[6,7]
4459 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm12 = ymm12[0,1,10,11,4,5,14,15,8,9,2,3,12,13],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm12[u,u,u,u,u,u]
4460 ; AVX512DQ-FCP-NEXT: vpor %ymm7, %ymm12, %ymm12
4461 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm13 = [0,3,1,0,0,3,5,0]
4462 ; AVX512DQ-FCP-NEXT: vmovdqa 128(%rdi), %ymm7
4463 ; AVX512DQ-FCP-NEXT: vpermd %ymm7, %ymm13, %ymm13
4464 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm14 = [4,5,2,3,4,5,6,7,8,9,2,3,4,5,10,11,20,21,18,19,20,21,22,23,24,25,18,19,20,21,26,27]
4465 ; AVX512DQ-FCP-NEXT: vpshufb %ymm14, %ymm13, %ymm13
4466 ; AVX512DQ-FCP-NEXT: vmovdqa64 {{.*#+}} zmm18 = [65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,0,0,0,0,0,0,0,0,0,0,0,0,0,65535,65535,65535,65535,65535,65535]
4467 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm13 = zmm13 ^ (zmm18 & (zmm13 ^ zmm12))
4468 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm6, %zmm13, %zmm16
4469 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm12 = ymm11[0],ymm10[1],ymm11[2],ymm10[3],ymm11[4,5],ymm10[6],ymm11[7,8],ymm10[9],ymm11[10],ymm10[11],ymm11[12,13],ymm10[14],ymm11[15]
4470 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm12, %xmm13
4471 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0,1],xmm13[2,3],xmm12[4,5,6],xmm13[7]
4472 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm6 = [2,3,12,13,6,7,0,1,10,11,4,5,14,15,128,128,128,128,128,128,128,128,128,128,128,128,u,u,u,u,u,u]
4473 ; AVX512DQ-FCP-NEXT: vpshufb %ymm6, %ymm12, %ymm12
4474 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm13 = ymm9[0],ymm8[1],ymm9[2,3],ymm8[4],ymm9[5],ymm8[6],ymm9[7,8],ymm8[9],ymm9[10,11],ymm8[12],ymm9[13],ymm8[14],ymm9[15]
4475 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm17 = [2,0,0,0,4,7,1,6]
4476 ; AVX512DQ-FCP-NEXT: vpermd %ymm13, %ymm17, %ymm13
4477 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm13 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm13[0,1,18,19,20,21,26,27,16,17,30,31],zero,zero,zero,zero,zero,zero
4478 ; AVX512DQ-FCP-NEXT: vpor %ymm13, %ymm12, %ymm12
4479 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm13 = [1,3,2,0,1,3,6,0]
4480 ; AVX512DQ-FCP-NEXT: vpermd %ymm7, %ymm13, %ymm15
4481 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm13 = [0,1,6,7,4,5,6,7,8,9,0,1,6,7,8,9,16,17,22,23,20,21,22,23,24,25,16,17,22,23,24,25]
4482 ; AVX512DQ-FCP-NEXT: vpshufb %ymm13, %ymm15, %ymm15
4483 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm15 = zmm15 ^ (zmm18 & (zmm15 ^ zmm12))
4484 ; AVX512DQ-FCP-NEXT: vpshufb %xmm6, %xmm3, %xmm6
4485 ; AVX512DQ-FCP-NEXT: vpsrlq $48, %xmm2, %xmm12
4486 ; AVX512DQ-FCP-NEXT: vpunpckldq {{.*#+}} xmm6 = xmm6[0],xmm12[0],xmm6[1],xmm12[1]
4487 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm12 = ymm4[0],ymm5[1],ymm4[2,3],ymm5[4],ymm4[5],ymm5[6],ymm4[7,8],ymm5[9],ymm4[10,11],ymm5[12],ymm4[13],ymm5[14],ymm4[15]
4488 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm17 = [0,2,5,7,4,7,0,0]
4489 ; AVX512DQ-FCP-NEXT: vpermd %ymm12, %ymm17, %ymm12
4490 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm12 = ymm12[2,3,4,5,4,5,0,1,6,7,8,9,14,15,4,5,18,19,20,21,20,21,16,17,22,23,24,25,30,31,20,21]
4491 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1,2],xmm12[3,4,5,6,7]
4492 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm12[4,5,6,7]
4493 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm12 = ymm0[0],ymm1[1,2],ymm0[3],ymm1[4],ymm0[5],ymm1[6,7],ymm0[8],ymm1[9,10],ymm0[11],ymm1[12],ymm0[13],ymm1[14,15]
4494 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} ymm17 = [1,4,6,3,1,4,6,3]
4495 ; AVX512DQ-FCP-NEXT: # ymm17 = mem[0,1,2,3,0,1,2,3]
4496 ; AVX512DQ-FCP-NEXT: vpermd %ymm12, %ymm17, %ymm12
4497 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm12 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,26,27,16,17,30,31,24,25]
4498 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3,4],ymm12[5,6,7]
4499 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm6, %zmm15, %zmm17
4500 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm6 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13],ymm0[14],ymm1[15]
4501 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm6, %xmm15
4502 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm6 = xmm15[0,1,2],xmm6[3,4],xmm15[5,6,7]
4503 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[u,u,u,u,8,9,2,3,12,13,6,7,0,1,10,11]
4504 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
4505 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm15 = xmm3[u,u,u,u,u,u,u,u,4,5,14,15,u,u,u,u]
4506 ; AVX512DQ-FCP-NEXT: vpunpckhdq {{.*#+}} xmm15 = xmm15[2],xmm2[2],xmm15[3],xmm2[3]
4507 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm12 = ymm5[0],ymm4[1],ymm5[2],ymm4[3],ymm5[4,5],ymm4[6],ymm5[7,8],ymm4[9],ymm5[10],ymm4[11],ymm5[12,13],ymm4[14],ymm5[15]
4508 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm19 = [0,3,5,2,5,7,0,0]
4509 ; AVX512DQ-FCP-NEXT: vpermd %ymm12, %ymm19, %ymm12
4510 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm12 = ymm12[0,1,6,7,2,3,2,3,4,5,10,11,0,1,14,15,16,17,22,23,18,19,18,19,20,21,26,27,16,17,30,31]
4511 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm15 = xmm15[0,1,2],xmm12[3,4,5,6,7]
4512 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm12 = ymm15[0,1,2,3],ymm12[4,5,6,7]
4513 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm6 = ymm12[0,1,2,3,4],ymm6[5,6,7]
4514 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm12 = ymm11[0,1],ymm10[2],ymm11[3],ymm10[4],ymm11[5,6],ymm10[7],ymm11[8,9],ymm10[10],ymm11[11],ymm10[12],ymm11[13,14],ymm10[15]
4515 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm12, %xmm15
4516 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0,1,2],xmm15[3,4],xmm12[5,6,7]
4517 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm12 = xmm12[4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
4518 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm15 = ymm8[0],ymm9[1],ymm8[2,3],ymm9[4],ymm8[5],ymm9[6],ymm8[7,8],ymm9[9],ymm8[10,11],ymm9[12],ymm8[13],ymm9[14],ymm8[15]
4519 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm19 = [0,2,0,0,5,7,2,4]
4520 ; AVX512DQ-FCP-NEXT: vpermd %ymm15, %ymm19, %ymm15
4521 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm15 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,0,1,6,7,16,17,22,23,24,25,30,31,20,21,22,23,16,17,22,23]
4522 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2],ymm15[3,4,5,6,7]
4523 ; AVX512DQ-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm15 = [1,4,6,0,1,4,6,0]
4524 ; AVX512DQ-FCP-NEXT: # ymm15 = mem[0,1,0,1]
4525 ; AVX512DQ-FCP-NEXT: vpermd %ymm7, %ymm15, %ymm15
4526 ; AVX512DQ-FCP-NEXT: vpshufb %ymm14, %ymm15, %ymm14
4527 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm14 = zmm14 ^ (zmm18 & (zmm14 ^ zmm12))
4528 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm6, %zmm14, %zmm14
4529 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm6 = ymm10[0],ymm11[1,2],ymm10[3],ymm11[4],ymm10[5],ymm11[6,7],ymm10[8],ymm11[9,10],ymm10[11],ymm11[12],ymm10[13],ymm11[14,15]
4530 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm6, %xmm12
4531 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} xmm6 = xmm12[0],xmm6[1],xmm12[2],xmm6[3]
4532 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm6 = xmm6[6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
4533 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm12 = ymm9[0],ymm8[1],ymm9[2],ymm8[3],ymm9[4,5],ymm8[6],ymm9[7,8],ymm8[9],ymm9[10],ymm8[11],ymm9[12,13],ymm8[14],ymm9[15]
4534 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm15 = [0,3,0,0,5,0,2,7]
4535 ; AVX512DQ-FCP-NEXT: vpermd %ymm12, %ymm15, %ymm12
4536 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm12 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,2,3,4,5,18,19,20,21,26,27,16,17,30,31,30,31,18,19,20,21]
4537 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2],ymm12[3,4,5,6,7]
4538 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} xmm12 = xmm2[0],xmm3[1],xmm2[2,3]
4539 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm12 = xmm12[6,7,0,1,10,11,u,u,u,u,u,u,u,u,u,u]
4540 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm15 = ymm5[0,1],ymm4[2],ymm5[3],ymm4[4],ymm5[5,6],ymm4[7],ymm5[8,9],ymm4[10],ymm5[11],ymm4[12],ymm5[13,14],ymm4[15]
4541 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm18 = [1,3,6,0,5,0,0,0]
4542 ; AVX512DQ-FCP-NEXT: vpermd %ymm15, %ymm18, %ymm15
4543 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm15 = ymm15[2,3,2,3,4,5,0,1,6,7,8,9,14,15,4,5,18,19,18,19,20,21,16,17,22,23,24,25,30,31,20,21]
4544 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0,1,2],xmm15[3,4,5,6,7]
4545 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3],ymm15[4,5,6,7]
4546 ; AVX512DQ-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm15 = [2,4,7,0,2,4,7,0]
4547 ; AVX512DQ-FCP-NEXT: # ymm15 = mem[0,1,0,1]
4548 ; AVX512DQ-FCP-NEXT: vpermd %ymm7, %ymm15, %ymm15
4549 ; AVX512DQ-FCP-NEXT: vpshufb %ymm13, %ymm15, %ymm13
4550 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm12, %zmm13, %zmm12
4551 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm12 = zmm12 ^ (mem & (zmm12 ^ zmm6))
4552 ; AVX512DQ-FCP-NEXT: vextracti64x4 $1, %zmm12, %ymm6
4553 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm13 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5],ymm1[6],ymm0[7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13],ymm1[14],ymm0[15]
4554 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm13, %xmm15
4555 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm13 = xmm13[0],xmm15[1,2,3],xmm13[4,5],xmm15[6,7]
4556 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm13 = xmm13[u,u,0,1,10,11,4,5,14,15,8,9,2,3,12,13]
4557 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
4558 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm13 = ymm6[0],ymm13[1,2,3,4,5,6,7],ymm6[8],ymm13[9,10,11,12,13,14,15]
4559 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm13[4,5,6,7]
4560 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm6, %zmm12, %zmm6
4561 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm10 = ymm11[0],ymm10[1],ymm11[2,3],ymm10[4],ymm11[5],ymm10[6],ymm11[7,8],ymm10[9],ymm11[10,11],ymm10[12],ymm11[13],ymm10[14],ymm11[15]
4562 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm10, %xmm11
4563 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm10 = xmm11[0,1,2],xmm10[3,4],xmm11[5,6,7]
4564 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm10 = xmm10[8,9,2,3,12,13,6,7,0,1,10,11,u,u,u,u]
4565 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0,1],ymm8[2],ymm9[3],ymm8[4],ymm9[5,6],ymm8[7],ymm9[8,9],ymm8[10],ymm9[11],ymm8[12],ymm9[13,14],ymm8[15]
4566 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm9 = [1,3,0,0,6,0,3,5]
4567 ; AVX512DQ-FCP-NEXT: vpermd %ymm8, %ymm9, %ymm8
4568 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm8 = ymm8[u,u,u,u,u,u,u,u,u,u,u,u,0,1,6,7,16,17,22,23,24,25,30,31,16,17,22,23,16,17,22,23]
4569 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm8 = ymm10[0,1,2],ymm8[3,4,5,6,7]
4570 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} xmm2 = xmm2[0,1],xmm3[2],xmm2[3]
4571 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[8,9,2,3,12,13,u,u,u,u,u,u,u,u,u,u]
4572 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0],ymm5[1,2],ymm4[3],ymm5[4],ymm4[5],ymm5[6,7],ymm4[8],ymm5[9,10],ymm4[11],ymm5[12],ymm4[13],ymm5[14,15]
4573 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm4 = [1,4,6,3,6,0,0,0]
4574 ; AVX512DQ-FCP-NEXT: vpermd %ymm3, %ymm4, %ymm3
4575 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5,2,3,4,5,10,11,0,1,14,15,16,17,18,19,20,21,18,19,20,21,26,27,16,17,30,31]
4576 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm3[3,4,5,6,7]
4577 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
4578 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm3 = [0,2,1,3,0,2,5,7]
4579 ; AVX512DQ-FCP-NEXT: vpermd %ymm7, %ymm3, %ymm3
4580 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} ymm3 = ymm3[0,1,6,7,4,5,6,7,0,1,6,7,8,9,14,15,16,17,22,23,20,21,22,23,16,17,22,23,24,25,30,31]
4581 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm2, %zmm3, %zmm2
4582 ; AVX512DQ-FCP-NEXT: movb $7, %al
4583 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k1
4584 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm8, %zmm0, %zmm2 {%k1}
4585 ; AVX512DQ-FCP-NEXT: vextracti64x4 $1, %zmm2, %ymm3
4586 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
4587 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm0, %xmm1
4588 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5,6],xmm1[7]
4589 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[u,u,2,3,12,13,6,7,0,1,10,11,4,5,14,15]
4590 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
4591 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm3[0],ymm0[1,2,3,4,5,6,7],ymm3[8],ymm0[9,10,11,12,13,14,15]
4592 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
4593 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0
4594 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, (%rsi)
4595 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm17, (%rdx)
4596 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm14, (%rcx)
4597 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm6, (%r8)
4598 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, (%r9)
4599 ; AVX512DQ-FCP-NEXT: vzeroupper
4600 ; AVX512DQ-FCP-NEXT: retq
4602 ; AVX512BW-LABEL: load_i16_stride5_vf32:
4603 ; AVX512BW: # %bb.0:
4604 ; AVX512BW-NEXT: vmovdqa64 256(%rdi), %zmm0
4605 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm1
4606 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm2
4607 ; AVX512BW-NEXT: vmovdqa64 128(%rdi), %zmm3
4608 ; AVX512BW-NEXT: vmovdqa64 192(%rdi), %zmm4
4609 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [16,21,26,31,36,41,46,51,56,61,0,0,0,1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0,1,6,11]
4610 ; AVX512BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
4611 ; AVX512BW-NEXT: vpermi2w %zmm4, %zmm3, %zmm5
4612 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm6 = [0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0]
4613 ; AVX512BW-NEXT: vpermi2w %zmm2, %zmm1, %zmm6
4614 ; AVX512BW-NEXT: movl $67100672, %eax # imm = 0x3FFE000
4615 ; AVX512BW-NEXT: kmovd %eax, %k1
4616 ; AVX512BW-NEXT: vmovdqu16 %zmm5, %zmm6 {%k1}
4617 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} zmm5 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,34,39,44,49,54,59]
4618 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm6, %zmm5
4619 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [49,54,59,0,5,10,15,20,25,30,0,0,0,34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0,34,39,44]
4620 ; AVX512BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
4621 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm4, %zmm6
4622 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm7 = [1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0]
4623 ; AVX512BW-NEXT: vpermi2w %zmm2, %zmm1, %zmm7
4624 ; AVX512BW-NEXT: vmovdqu16 %zmm6, %zmm7 {%k1}
4625 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} zmm6 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,35,40,45,50,55,60]
4626 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm7, %zmm6
4627 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [50,55,60,1,6,11,16,21,26,31,0,0,0,35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0,35,40,45]
4628 ; AVX512BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
4629 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm4, %zmm7
4630 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm8 = [34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0]
4631 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm2, %zmm8
4632 ; AVX512BW-NEXT: vmovdqu16 %zmm7, %zmm8 {%k1}
4633 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} zmm7 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,36,41,46,51,56,61]
4634 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm8, %zmm7
4635 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [19,24,29,34,39,44,49,54,59,0,0,0,0,4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0,4,9,14]
4636 ; AVX512BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
4637 ; AVX512BW-NEXT: vpermi2w %zmm4, %zmm3, %zmm8
4638 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm9 = [35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0]
4639 ; AVX512BW-NEXT: vpermi2w %zmm1, %zmm2, %zmm9
4640 ; AVX512BW-NEXT: movl $33546240, %eax # imm = 0x1FFE000
4641 ; AVX512BW-NEXT: kmovd %eax, %k1
4642 ; AVX512BW-NEXT: vmovdqu16 %zmm8, %zmm9 {%k1}
4643 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} zmm8 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,32,37,42,47,52,57,62]
4644 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm9, %zmm8
4645 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm9 = [4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0]
4646 ; AVX512BW-NEXT: vpermi2w %zmm2, %zmm1, %zmm9
4647 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [20,25,30,35,40,45,50,55,60,0,0,0,0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0,0,5,10,15]
4648 ; AVX512BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
4649 ; AVX512BW-NEXT: vpermi2w %zmm4, %zmm3, %zmm1
4650 ; AVX512BW-NEXT: movb $7, %al
4651 ; AVX512BW-NEXT: kmovd %eax, %k1
4652 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm1 {%k1}
4653 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} zmm2 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,33,38,43,48,53,58,63]
4654 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm1, %zmm2
4655 ; AVX512BW-NEXT: vmovdqa64 %zmm5, (%rsi)
4656 ; AVX512BW-NEXT: vmovdqa64 %zmm6, (%rdx)
4657 ; AVX512BW-NEXT: vmovdqa64 %zmm7, (%rcx)
4658 ; AVX512BW-NEXT: vmovdqa64 %zmm8, (%r8)
4659 ; AVX512BW-NEXT: vmovdqa64 %zmm2, (%r9)
4660 ; AVX512BW-NEXT: vzeroupper
4661 ; AVX512BW-NEXT: retq
4663 ; AVX512BW-FCP-LABEL: load_i16_stride5_vf32:
4664 ; AVX512BW-FCP: # %bb.0:
4665 ; AVX512BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm0
4666 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm1
4667 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm2
4668 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm3
4669 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm4
4670 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [16,21,26,31,36,41,46,51,56,61,0,0,0,1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0,1,6,11]
4671 ; AVX512BW-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
4672 ; AVX512BW-FCP-NEXT: vpermi2w %zmm4, %zmm3, %zmm5
4673 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm6 = [0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0]
4674 ; AVX512BW-FCP-NEXT: vpermi2w %zmm2, %zmm1, %zmm6
4675 ; AVX512BW-FCP-NEXT: movl $67100672, %eax # imm = 0x3FFE000
4676 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
4677 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm5, %zmm6 {%k1}
4678 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm5 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,34,39,44,49,54,59]
4679 ; AVX512BW-FCP-NEXT: vpermi2w %zmm0, %zmm6, %zmm5
4680 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [49,54,59,0,5,10,15,20,25,30,0,0,0,34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0,34,39,44]
4681 ; AVX512BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
4682 ; AVX512BW-FCP-NEXT: vpermi2w %zmm3, %zmm4, %zmm6
4683 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm7 = [1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0]
4684 ; AVX512BW-FCP-NEXT: vpermi2w %zmm2, %zmm1, %zmm7
4685 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm6, %zmm7 {%k1}
4686 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm6 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,35,40,45,50,55,60]
4687 ; AVX512BW-FCP-NEXT: vpermi2w %zmm0, %zmm7, %zmm6
4688 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [50,55,60,1,6,11,16,21,26,31,0,0,0,35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0,35,40,45]
4689 ; AVX512BW-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
4690 ; AVX512BW-FCP-NEXT: vpermi2w %zmm3, %zmm4, %zmm7
4691 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm8 = [34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0]
4692 ; AVX512BW-FCP-NEXT: vpermi2w %zmm1, %zmm2, %zmm8
4693 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm7, %zmm8 {%k1}
4694 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm7 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,36,41,46,51,56,61]
4695 ; AVX512BW-FCP-NEXT: vpermi2w %zmm0, %zmm8, %zmm7
4696 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [19,24,29,34,39,44,49,54,59,0,0,0,0,4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0,4,9,14]
4697 ; AVX512BW-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
4698 ; AVX512BW-FCP-NEXT: vpermi2w %zmm4, %zmm3, %zmm8
4699 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm9 = [35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0]
4700 ; AVX512BW-FCP-NEXT: vpermi2w %zmm1, %zmm2, %zmm9
4701 ; AVX512BW-FCP-NEXT: movl $33546240, %eax # imm = 0x1FFE000
4702 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
4703 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm8, %zmm9 {%k1}
4704 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm8 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,32,37,42,47,52,57,62]
4705 ; AVX512BW-FCP-NEXT: vpermi2w %zmm0, %zmm9, %zmm8
4706 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm9 = [4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0]
4707 ; AVX512BW-FCP-NEXT: vpermi2w %zmm2, %zmm1, %zmm9
4708 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [20,25,30,35,40,45,50,55,60,0,0,0,0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0,0,5,10,15]
4709 ; AVX512BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
4710 ; AVX512BW-FCP-NEXT: vpermi2w %zmm4, %zmm3, %zmm1
4711 ; AVX512BW-FCP-NEXT: movb $7, %al
4712 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
4713 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm1 {%k1}
4714 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm2 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,33,38,43,48,53,58,63]
4715 ; AVX512BW-FCP-NEXT: vpermi2w %zmm0, %zmm1, %zmm2
4716 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm5, (%rsi)
4717 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, (%rdx)
4718 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm7, (%rcx)
4719 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, (%r8)
4720 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm2, (%r9)
4721 ; AVX512BW-FCP-NEXT: vzeroupper
4722 ; AVX512BW-FCP-NEXT: retq
4724 ; AVX512DQ-BW-LABEL: load_i16_stride5_vf32:
4725 ; AVX512DQ-BW: # %bb.0:
4726 ; AVX512DQ-BW-NEXT: vmovdqa64 256(%rdi), %zmm0
4727 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm1
4728 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm2
4729 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rdi), %zmm3
4730 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rdi), %zmm4
4731 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [16,21,26,31,36,41,46,51,56,61,0,0,0,1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0,1,6,11]
4732 ; AVX512DQ-BW-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
4733 ; AVX512DQ-BW-NEXT: vpermi2w %zmm4, %zmm3, %zmm5
4734 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm6 = [0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0]
4735 ; AVX512DQ-BW-NEXT: vpermi2w %zmm2, %zmm1, %zmm6
4736 ; AVX512DQ-BW-NEXT: movl $67100672, %eax # imm = 0x3FFE000
4737 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
4738 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm5, %zmm6 {%k1}
4739 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} zmm5 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,34,39,44,49,54,59]
4740 ; AVX512DQ-BW-NEXT: vpermi2w %zmm0, %zmm6, %zmm5
4741 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [49,54,59,0,5,10,15,20,25,30,0,0,0,34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0,34,39,44]
4742 ; AVX512DQ-BW-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
4743 ; AVX512DQ-BW-NEXT: vpermi2w %zmm3, %zmm4, %zmm6
4744 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm7 = [1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0]
4745 ; AVX512DQ-BW-NEXT: vpermi2w %zmm2, %zmm1, %zmm7
4746 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm6, %zmm7 {%k1}
4747 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} zmm6 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,35,40,45,50,55,60]
4748 ; AVX512DQ-BW-NEXT: vpermi2w %zmm0, %zmm7, %zmm6
4749 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [50,55,60,1,6,11,16,21,26,31,0,0,0,35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0,35,40,45]
4750 ; AVX512DQ-BW-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
4751 ; AVX512DQ-BW-NEXT: vpermi2w %zmm3, %zmm4, %zmm7
4752 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm8 = [34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0]
4753 ; AVX512DQ-BW-NEXT: vpermi2w %zmm1, %zmm2, %zmm8
4754 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm7, %zmm8 {%k1}
4755 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} zmm7 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,36,41,46,51,56,61]
4756 ; AVX512DQ-BW-NEXT: vpermi2w %zmm0, %zmm8, %zmm7
4757 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [19,24,29,34,39,44,49,54,59,0,0,0,0,4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0,4,9,14]
4758 ; AVX512DQ-BW-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
4759 ; AVX512DQ-BW-NEXT: vpermi2w %zmm4, %zmm3, %zmm8
4760 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm9 = [35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0]
4761 ; AVX512DQ-BW-NEXT: vpermi2w %zmm1, %zmm2, %zmm9
4762 ; AVX512DQ-BW-NEXT: movl $33546240, %eax # imm = 0x1FFE000
4763 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
4764 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm8, %zmm9 {%k1}
4765 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} zmm8 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,32,37,42,47,52,57,62]
4766 ; AVX512DQ-BW-NEXT: vpermi2w %zmm0, %zmm9, %zmm8
4767 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm9 = [4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0]
4768 ; AVX512DQ-BW-NEXT: vpermi2w %zmm2, %zmm1, %zmm9
4769 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [20,25,30,35,40,45,50,55,60,0,0,0,0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0,0,5,10,15]
4770 ; AVX512DQ-BW-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
4771 ; AVX512DQ-BW-NEXT: vpermi2w %zmm4, %zmm3, %zmm1
4772 ; AVX512DQ-BW-NEXT: movb $7, %al
4773 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
4774 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm1 {%k1}
4775 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} zmm2 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,33,38,43,48,53,58,63]
4776 ; AVX512DQ-BW-NEXT: vpermi2w %zmm0, %zmm1, %zmm2
4777 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm5, (%rsi)
4778 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, (%rdx)
4779 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm7, (%rcx)
4780 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, (%r8)
4781 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm2, (%r9)
4782 ; AVX512DQ-BW-NEXT: vzeroupper
4783 ; AVX512DQ-BW-NEXT: retq
4785 ; AVX512DQ-BW-FCP-LABEL: load_i16_stride5_vf32:
4786 ; AVX512DQ-BW-FCP: # %bb.0:
4787 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm0
4788 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm1
4789 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm2
4790 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm3
4791 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm4
4792 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm5 = [16,21,26,31,36,41,46,51,56,61,0,0,0,1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0,1,6,11]
4793 ; AVX512DQ-BW-FCP-NEXT: # zmm5 = mem[0,1,2,3,0,1,2,3]
4794 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm4, %zmm3, %zmm5
4795 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm6 = [0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0]
4796 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm2, %zmm1, %zmm6
4797 ; AVX512DQ-BW-FCP-NEXT: movl $67100672, %eax # imm = 0x3FFE000
4798 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
4799 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm5, %zmm6 {%k1}
4800 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm5 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,34,39,44,49,54,59]
4801 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm0, %zmm6, %zmm5
4802 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm6 = [49,54,59,0,5,10,15,20,25,30,0,0,0,34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0,34,39,44]
4803 ; AVX512DQ-BW-FCP-NEXT: # zmm6 = mem[0,1,2,3,0,1,2,3]
4804 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm3, %zmm4, %zmm6
4805 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm7 = [1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0]
4806 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm2, %zmm1, %zmm7
4807 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm6, %zmm7 {%k1}
4808 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm6 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,35,40,45,50,55,60]
4809 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm0, %zmm7, %zmm6
4810 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm7 = [50,55,60,1,6,11,16,21,26,31,0,0,0,35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0,35,40,45]
4811 ; AVX512DQ-BW-FCP-NEXT: # zmm7 = mem[0,1,2,3,0,1,2,3]
4812 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm3, %zmm4, %zmm7
4813 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm8 = [34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0]
4814 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm1, %zmm2, %zmm8
4815 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm7, %zmm8 {%k1}
4816 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm7 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,36,41,46,51,56,61]
4817 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm0, %zmm8, %zmm7
4818 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm8 = [19,24,29,34,39,44,49,54,59,0,0,0,0,4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0,4,9,14]
4819 ; AVX512DQ-BW-FCP-NEXT: # zmm8 = mem[0,1,2,3,0,1,2,3]
4820 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm4, %zmm3, %zmm8
4821 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm9 = [35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0]
4822 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm1, %zmm2, %zmm9
4823 ; AVX512DQ-BW-FCP-NEXT: movl $33546240, %eax # imm = 0x1FFE000
4824 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
4825 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm8, %zmm9 {%k1}
4826 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm8 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,32,37,42,47,52,57,62]
4827 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm0, %zmm9, %zmm8
4828 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm9 = [4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0]
4829 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm2, %zmm1, %zmm9
4830 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm1 = [20,25,30,35,40,45,50,55,60,0,0,0,0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0,0,5,10,15]
4831 ; AVX512DQ-BW-FCP-NEXT: # zmm1 = mem[0,1,2,3,0,1,2,3]
4832 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm4, %zmm3, %zmm1
4833 ; AVX512DQ-BW-FCP-NEXT: movb $7, %al
4834 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
4835 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm1 {%k1}
4836 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm2 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,33,38,43,48,53,58,63]
4837 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm0, %zmm1, %zmm2
4838 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm5, (%rsi)
4839 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, (%rdx)
4840 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm7, (%rcx)
4841 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, (%r8)
4842 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm2, (%r9)
4843 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
4844 ; AVX512DQ-BW-FCP-NEXT: retq
4845 %wide.vec = load <160 x i16>, ptr %in.vec, align 64
4846 %strided.vec0 = shufflevector <160 x i16> %wide.vec, <160 x i16> poison, <32 x i32> <i32 0, i32 5, i32 10, i32 15, i32 20, i32 25, i32 30, i32 35, i32 40, i32 45, i32 50, i32 55, i32 60, i32 65, i32 70, i32 75, i32 80, i32 85, i32 90, i32 95, i32 100, i32 105, i32 110, i32 115, i32 120, i32 125, i32 130, i32 135, i32 140, i32 145, i32 150, i32 155>
4847 %strided.vec1 = shufflevector <160 x i16> %wide.vec, <160 x i16> poison, <32 x i32> <i32 1, i32 6, i32 11, i32 16, i32 21, i32 26, i32 31, i32 36, i32 41, i32 46, i32 51, i32 56, i32 61, i32 66, i32 71, i32 76, i32 81, i32 86, i32 91, i32 96, i32 101, i32 106, i32 111, i32 116, i32 121, i32 126, i32 131, i32 136, i32 141, i32 146, i32 151, i32 156>
4848 %strided.vec2 = shufflevector <160 x i16> %wide.vec, <160 x i16> poison, <32 x i32> <i32 2, i32 7, i32 12, i32 17, i32 22, i32 27, i32 32, i32 37, i32 42, i32 47, i32 52, i32 57, i32 62, i32 67, i32 72, i32 77, i32 82, i32 87, i32 92, i32 97, i32 102, i32 107, i32 112, i32 117, i32 122, i32 127, i32 132, i32 137, i32 142, i32 147, i32 152, i32 157>
4849 %strided.vec3 = shufflevector <160 x i16> %wide.vec, <160 x i16> poison, <32 x i32> <i32 3, i32 8, i32 13, i32 18, i32 23, i32 28, i32 33, i32 38, i32 43, i32 48, i32 53, i32 58, i32 63, i32 68, i32 73, i32 78, i32 83, i32 88, i32 93, i32 98, i32 103, i32 108, i32 113, i32 118, i32 123, i32 128, i32 133, i32 138, i32 143, i32 148, i32 153, i32 158>
4850 %strided.vec4 = shufflevector <160 x i16> %wide.vec, <160 x i16> poison, <32 x i32> <i32 4, i32 9, i32 14, i32 19, i32 24, i32 29, i32 34, i32 39, i32 44, i32 49, i32 54, i32 59, i32 64, i32 69, i32 74, i32 79, i32 84, i32 89, i32 94, i32 99, i32 104, i32 109, i32 114, i32 119, i32 124, i32 129, i32 134, i32 139, i32 144, i32 149, i32 154, i32 159>
4851 store <32 x i16> %strided.vec0, ptr %out.vec0, align 64
4852 store <32 x i16> %strided.vec1, ptr %out.vec1, align 64
4853 store <32 x i16> %strided.vec2, ptr %out.vec2, align 64
4854 store <32 x i16> %strided.vec3, ptr %out.vec3, align 64
4855 store <32 x i16> %strided.vec4, ptr %out.vec4, align 64
4859 define void @load_i16_stride5_vf64(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr %out.vec2, ptr %out.vec3, ptr %out.vec4) nounwind {
4860 ; SSE-LABEL: load_i16_stride5_vf64:
4862 ; SSE-NEXT: subq $1016, %rsp # imm = 0x3F8
4863 ; SSE-NEXT: movdqa 464(%rdi), %xmm5
4864 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4865 ; SSE-NEXT: movdqa 400(%rdi), %xmm8
4866 ; SSE-NEXT: movdqa 416(%rdi), %xmm11
4867 ; SSE-NEXT: movdqa 448(%rdi), %xmm4
4868 ; SSE-NEXT: movdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4869 ; SSE-NEXT: movdqa 432(%rdi), %xmm7
4870 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4871 ; SSE-NEXT: movdqa 144(%rdi), %xmm6
4872 ; SSE-NEXT: movdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4873 ; SSE-NEXT: movdqa 80(%rdi), %xmm15
4874 ; SSE-NEXT: movdqa 96(%rdi), %xmm10
4875 ; SSE-NEXT: movdqa 128(%rdi), %xmm14
4876 ; SSE-NEXT: movdqa 112(%rdi), %xmm2
4877 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4878 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [65535,65535,65535,65535,0,65535,65535,65535]
4879 ; SSE-NEXT: movdqa %xmm0, %xmm1
4880 ; SSE-NEXT: pandn %xmm2, %xmm1
4881 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm14[0,1,0,3]
4882 ; SSE-NEXT: movdqa %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4883 ; SSE-NEXT: pand %xmm0, %xmm2
4884 ; SSE-NEXT: por %xmm1, %xmm2
4885 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm10[3,1,2,3]
4886 ; SSE-NEXT: movdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4887 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,1,2,3,4,5,6,7]
4888 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm15[0,2,2,3]
4889 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm3[0,3,2,3,4,5,6,7]
4890 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
4891 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,3]
4892 ; SSE-NEXT: movaps {{.*#+}} xmm13 = [65535,65535,65535,65535,65535,65535,65535,0]
4893 ; SSE-NEXT: andps %xmm13, %xmm3
4894 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm6[0,1,0,1]
4895 ; SSE-NEXT: movaps %xmm13, %xmm2
4896 ; SSE-NEXT: pandn %xmm1, %xmm2
4897 ; SSE-NEXT: por %xmm3, %xmm2
4898 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4899 ; SSE-NEXT: movdqa %xmm0, %xmm1
4900 ; SSE-NEXT: pandn %xmm7, %xmm1
4901 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm4[0,1,0,3]
4902 ; SSE-NEXT: pand %xmm0, %xmm2
4903 ; SSE-NEXT: por %xmm1, %xmm2
4904 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm11[3,1,2,3]
4905 ; SSE-NEXT: movdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4906 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,1,2,3,4,5,6,7]
4907 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm8[0,2,2,3]
4908 ; SSE-NEXT: movdqa %xmm8, %xmm6
4909 ; SSE-NEXT: movdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4910 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm3[0,3,2,3,4,5,6,7]
4911 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
4912 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm2[2,3]
4913 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm5[0,1,0,1]
4914 ; SSE-NEXT: movaps %xmm13, %xmm2
4915 ; SSE-NEXT: andnps %xmm1, %xmm2
4916 ; SSE-NEXT: movdqa 32(%rdi), %xmm3
4917 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4918 ; SSE-NEXT: andps %xmm13, %xmm4
4919 ; SSE-NEXT: orps %xmm4, %xmm2
4920 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4921 ; SSE-NEXT: movdqa %xmm0, %xmm1
4922 ; SSE-NEXT: pandn %xmm3, %xmm1
4923 ; SSE-NEXT: movdqa 48(%rdi), %xmm2
4924 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4925 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,0,3]
4926 ; SSE-NEXT: pand %xmm0, %xmm2
4927 ; SSE-NEXT: por %xmm1, %xmm2
4928 ; SSE-NEXT: movdqa 16(%rdi), %xmm1
4929 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4930 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,3]
4931 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,1,2,3,4,5,6,7]
4932 ; SSE-NEXT: movdqa (%rdi), %xmm9
4933 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm9[0,2,2,3]
4934 ; SSE-NEXT: movdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4935 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[0,3,2,3,4,5,6,7]
4936 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
4937 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm2[2,3]
4938 ; SSE-NEXT: movdqa 64(%rdi), %xmm1
4939 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4940 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,0,1]
4941 ; SSE-NEXT: movaps %xmm13, %xmm2
4942 ; SSE-NEXT: andnps %xmm1, %xmm2
4943 ; SSE-NEXT: andps %xmm13, %xmm4
4944 ; SSE-NEXT: orps %xmm4, %xmm2
4945 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4946 ; SSE-NEXT: movdqa 352(%rdi), %xmm2
4947 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4948 ; SSE-NEXT: movdqa %xmm0, %xmm1
4949 ; SSE-NEXT: pandn %xmm2, %xmm1
4950 ; SSE-NEXT: movdqa 368(%rdi), %xmm2
4951 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4952 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,0,3]
4953 ; SSE-NEXT: pand %xmm0, %xmm2
4954 ; SSE-NEXT: por %xmm1, %xmm2
4955 ; SSE-NEXT: movdqa 336(%rdi), %xmm1
4956 ; SSE-NEXT: movdqa %xmm1, (%rsp) # 16-byte Spill
4957 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,3]
4958 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,1,2,3,4,5,6,7]
4959 ; SSE-NEXT: movdqa 320(%rdi), %xmm7
4960 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm7[0,2,2,3]
4961 ; SSE-NEXT: movdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4962 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[0,3,2,3,4,5,6,7]
4963 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
4964 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm2[2,3]
4965 ; SSE-NEXT: movdqa 384(%rdi), %xmm1
4966 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4967 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,0,1]
4968 ; SSE-NEXT: movaps %xmm13, %xmm2
4969 ; SSE-NEXT: andnps %xmm1, %xmm2
4970 ; SSE-NEXT: andps %xmm13, %xmm4
4971 ; SSE-NEXT: orps %xmm4, %xmm2
4972 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4973 ; SSE-NEXT: movdqa 272(%rdi), %xmm2
4974 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4975 ; SSE-NEXT: movdqa %xmm0, %xmm1
4976 ; SSE-NEXT: pandn %xmm2, %xmm1
4977 ; SSE-NEXT: movdqa 288(%rdi), %xmm2
4978 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4979 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,0,3]
4980 ; SSE-NEXT: pand %xmm0, %xmm2
4981 ; SSE-NEXT: por %xmm1, %xmm2
4982 ; SSE-NEXT: movdqa 256(%rdi), %xmm12
4983 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm12[3,1,2,3]
4984 ; SSE-NEXT: movdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4985 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,1,2,3,4,5,6,7]
4986 ; SSE-NEXT: movdqa 240(%rdi), %xmm3
4987 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4988 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,2,2,3]
4989 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[0,3,2,3,4,5,6,7]
4990 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
4991 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm2[2,3]
4992 ; SSE-NEXT: movdqa 304(%rdi), %xmm1
4993 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
4994 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,0,1]
4995 ; SSE-NEXT: movaps %xmm13, %xmm2
4996 ; SSE-NEXT: andnps %xmm1, %xmm2
4997 ; SSE-NEXT: andps %xmm13, %xmm4
4998 ; SSE-NEXT: orps %xmm4, %xmm2
4999 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5000 ; SSE-NEXT: movdqa 592(%rdi), %xmm2
5001 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5002 ; SSE-NEXT: movdqa %xmm0, %xmm1
5003 ; SSE-NEXT: pandn %xmm2, %xmm1
5004 ; SSE-NEXT: movdqa 608(%rdi), %xmm2
5005 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5006 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,1,0,3]
5007 ; SSE-NEXT: pand %xmm0, %xmm2
5008 ; SSE-NEXT: por %xmm1, %xmm2
5009 ; SSE-NEXT: movdqa 576(%rdi), %xmm1
5010 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5011 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,3]
5012 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,1,2,3,4,5,6,7]
5013 ; SSE-NEXT: movdqa 560(%rdi), %xmm3
5014 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,2,2,3]
5015 ; SSE-NEXT: movdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5016 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[0,3,2,3,4,5,6,7]
5017 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
5018 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm2[2,3]
5019 ; SSE-NEXT: movdqa 624(%rdi), %xmm1
5020 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5021 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,0,1]
5022 ; SSE-NEXT: movaps %xmm13, %xmm2
5023 ; SSE-NEXT: andnps %xmm1, %xmm2
5024 ; SSE-NEXT: andps %xmm13, %xmm4
5025 ; SSE-NEXT: orps %xmm4, %xmm2
5026 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5027 ; SSE-NEXT: movdqa 192(%rdi), %xmm2
5028 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5029 ; SSE-NEXT: movdqa %xmm0, %xmm1
5030 ; SSE-NEXT: pandn %xmm2, %xmm1
5031 ; SSE-NEXT: movdqa 208(%rdi), %xmm2
5032 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5033 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm2[0,1,0,3]
5034 ; SSE-NEXT: pand %xmm0, %xmm4
5035 ; SSE-NEXT: por %xmm1, %xmm4
5036 ; SSE-NEXT: movdqa 176(%rdi), %xmm1
5037 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5038 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,3]
5039 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,1,2,3,4,5,6,7]
5040 ; SSE-NEXT: movdqa 160(%rdi), %xmm2
5041 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5042 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm2[0,2,2,3]
5043 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm5[0,3,2,3,4,5,6,7]
5044 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1]
5045 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[2,3]
5046 ; SSE-NEXT: movdqa 224(%rdi), %xmm1
5047 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5048 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,0,1]
5049 ; SSE-NEXT: movaps %xmm13, %xmm4
5050 ; SSE-NEXT: andnps %xmm1, %xmm4
5051 ; SSE-NEXT: andps %xmm13, %xmm5
5052 ; SSE-NEXT: orps %xmm5, %xmm4
5053 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5054 ; SSE-NEXT: movdqa 528(%rdi), %xmm1
5055 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5056 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,0,3]
5057 ; SSE-NEXT: pand %xmm0, %xmm1
5058 ; SSE-NEXT: movdqa 512(%rdi), %xmm2
5059 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5060 ; SSE-NEXT: pandn %xmm2, %xmm0
5061 ; SSE-NEXT: por %xmm1, %xmm0
5062 ; SSE-NEXT: movdqa 496(%rdi), %xmm1
5063 ; SSE-NEXT: movdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5064 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[3,1,2,3]
5065 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[2,1,2,3,4,5,6,7]
5066 ; SSE-NEXT: movdqa 480(%rdi), %xmm2
5067 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5068 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm2[0,2,2,3]
5069 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[0,3,2,3,4,5,6,7]
5070 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
5071 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm0[2,3]
5072 ; SSE-NEXT: movdqa 544(%rdi), %xmm0
5073 ; SSE-NEXT: movdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5074 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,0,1]
5075 ; SSE-NEXT: movaps %xmm13, %xmm1
5076 ; SSE-NEXT: andnps %xmm0, %xmm1
5077 ; SSE-NEXT: andps %xmm13, %xmm4
5078 ; SSE-NEXT: orps %xmm4, %xmm1
5079 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5080 ; SSE-NEXT: psrlq $48, %xmm10
5081 ; SSE-NEXT: movdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5082 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm15[0,3,2,3]
5083 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[1,2,2,3,4,5,6,7]
5084 ; SSE-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm10[0],xmm1[1],xmm10[1]
5085 ; SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,0,0,65535,65535,65535,65535,65535]
5086 ; SSE-NEXT: movdqa %xmm0, %xmm4
5087 ; SSE-NEXT: pandn %xmm1, %xmm4
5088 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm14[1,3,2,3]
5089 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
5090 ; SSE-NEXT: # xmm5 = mem[0,2,2,3]
5091 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1]
5092 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm5[0,1,2,3,7,5,6,7]
5093 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,2,1]
5094 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,0,0,0,4,5,6,7]
5095 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,6,4,7]
5096 ; SSE-NEXT: pand %xmm0, %xmm1
5097 ; SSE-NEXT: por %xmm4, %xmm1
5098 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
5099 ; SSE-NEXT: movdqa %xmm8, %xmm4
5100 ; SSE-NEXT: psllq $48, %xmm4
5101 ; SSE-NEXT: movaps %xmm13, %xmm2
5102 ; SSE-NEXT: andnps %xmm4, %xmm2
5103 ; SSE-NEXT: pand %xmm13, %xmm1
5104 ; SSE-NEXT: orps %xmm1, %xmm2
5105 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5106 ; SSE-NEXT: psrlq $48, %xmm11
5107 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm6[0,3,2,3]
5108 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[1,2,2,3,4,5,6,7]
5109 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm11[0],xmm4[1],xmm11[1]
5110 ; SSE-NEXT: movdqa %xmm0, %xmm1
5111 ; SSE-NEXT: pandn %xmm4, %xmm1
5112 ; SSE-NEXT: pshufd $237, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
5113 ; SSE-NEXT: # xmm4 = mem[1,3,2,3]
5114 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
5115 ; SSE-NEXT: # xmm5 = mem[0,2,2,3]
5116 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
5117 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm5[0,1,2,3,7,5,6,7]
5118 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,1,2,1]
5119 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[0,0,0,0,4,5,6,7]
5120 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,5,6,4,7]
5121 ; SSE-NEXT: pand %xmm0, %xmm4
5122 ; SSE-NEXT: por %xmm1, %xmm4
5123 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5124 ; SSE-NEXT: psllq $48, %xmm1
5125 ; SSE-NEXT: movdqa %xmm13, %xmm2
5126 ; SSE-NEXT: pandn %xmm1, %xmm2
5127 ; SSE-NEXT: pand %xmm13, %xmm4
5128 ; SSE-NEXT: por %xmm4, %xmm2
5129 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5130 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5131 ; SSE-NEXT: psrlq $48, %xmm1
5132 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm9[0,3,2,3]
5133 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[1,2,2,3,4,5,6,7]
5134 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
5135 ; SSE-NEXT: movdqa %xmm0, %xmm1
5136 ; SSE-NEXT: pandn %xmm4, %xmm1
5137 ; SSE-NEXT: pshufd $237, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
5138 ; SSE-NEXT: # xmm4 = mem[1,3,2,3]
5139 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
5140 ; SSE-NEXT: # xmm5 = mem[0,2,2,3]
5141 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
5142 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm5[0,1,2,3,7,5,6,7]
5143 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,1,2,1]
5144 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[0,0,0,0,4,5,6,7]
5145 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,5,6,4,7]
5146 ; SSE-NEXT: pand %xmm0, %xmm4
5147 ; SSE-NEXT: por %xmm1, %xmm4
5148 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5149 ; SSE-NEXT: psllq $48, %xmm1
5150 ; SSE-NEXT: movdqa %xmm13, %xmm2
5151 ; SSE-NEXT: pandn %xmm1, %xmm2
5152 ; SSE-NEXT: pand %xmm13, %xmm4
5153 ; SSE-NEXT: por %xmm4, %xmm2
5154 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5155 ; SSE-NEXT: movdqa (%rsp), %xmm1 # 16-byte Reload
5156 ; SSE-NEXT: psrlq $48, %xmm1
5157 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm7[0,3,2,3]
5158 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[1,2,2,3,4,5,6,7]
5159 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
5160 ; SSE-NEXT: movdqa %xmm0, %xmm1
5161 ; SSE-NEXT: pandn %xmm4, %xmm1
5162 ; SSE-NEXT: pshufd $237, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
5163 ; SSE-NEXT: # xmm4 = mem[1,3,2,3]
5164 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
5165 ; SSE-NEXT: # xmm5 = mem[0,2,2,3]
5166 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
5167 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm5[0,1,2,3,7,5,6,7]
5168 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,1,2,1]
5169 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[0,0,0,0,4,5,6,7]
5170 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,5,6,4,7]
5171 ; SSE-NEXT: pand %xmm0, %xmm4
5172 ; SSE-NEXT: por %xmm1, %xmm4
5173 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5174 ; SSE-NEXT: psllq $48, %xmm1
5175 ; SSE-NEXT: movdqa %xmm13, %xmm2
5176 ; SSE-NEXT: pandn %xmm1, %xmm2
5177 ; SSE-NEXT: pand %xmm13, %xmm4
5178 ; SSE-NEXT: por %xmm4, %xmm2
5179 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5180 ; SSE-NEXT: psrlq $48, %xmm12
5181 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
5182 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm14[0,3,2,3]
5183 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[1,2,2,3,4,5,6,7]
5184 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm12[0],xmm4[1],xmm12[1]
5185 ; SSE-NEXT: movdqa %xmm0, %xmm1
5186 ; SSE-NEXT: pandn %xmm4, %xmm1
5187 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
5188 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm10[1,3,2,3]
5189 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
5190 ; SSE-NEXT: # xmm5 = mem[0,2,2,3]
5191 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
5192 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm5[0,1,2,3,7,5,6,7]
5193 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,1,2,1]
5194 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[0,0,0,0,4,5,6,7]
5195 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,5,6,4,7]
5196 ; SSE-NEXT: pand %xmm0, %xmm4
5197 ; SSE-NEXT: por %xmm1, %xmm4
5198 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
5199 ; SSE-NEXT: movdqa %xmm12, %xmm1
5200 ; SSE-NEXT: psllq $48, %xmm1
5201 ; SSE-NEXT: movdqa %xmm13, %xmm2
5202 ; SSE-NEXT: pandn %xmm1, %xmm2
5203 ; SSE-NEXT: pand %xmm13, %xmm4
5204 ; SSE-NEXT: por %xmm4, %xmm2
5205 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5206 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5207 ; SSE-NEXT: psrlq $48, %xmm1
5208 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,3,2,3]
5209 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[1,2,2,3,4,5,6,7]
5210 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
5211 ; SSE-NEXT: movdqa %xmm0, %xmm1
5212 ; SSE-NEXT: pandn %xmm4, %xmm1
5213 ; SSE-NEXT: pshufd $237, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
5214 ; SSE-NEXT: # xmm4 = mem[1,3,2,3]
5215 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
5216 ; SSE-NEXT: # xmm5 = mem[0,2,2,3]
5217 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
5218 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm5[0,1,2,3,7,5,6,7]
5219 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,1,2,1]
5220 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[0,0,0,0,4,5,6,7]
5221 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,5,6,4,7]
5222 ; SSE-NEXT: pand %xmm0, %xmm4
5223 ; SSE-NEXT: por %xmm1, %xmm4
5224 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5225 ; SSE-NEXT: psllq $48, %xmm1
5226 ; SSE-NEXT: movdqa %xmm13, %xmm2
5227 ; SSE-NEXT: pandn %xmm1, %xmm2
5228 ; SSE-NEXT: pand %xmm13, %xmm4
5229 ; SSE-NEXT: por %xmm4, %xmm2
5230 ; SSE-NEXT: movdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5231 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
5232 ; SSE-NEXT: movdqa %xmm7, %xmm1
5233 ; SSE-NEXT: psrlq $48, %xmm1
5234 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
5235 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm11[0,3,2,3]
5236 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[1,2,2,3,4,5,6,7]
5237 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
5238 ; SSE-NEXT: movdqa %xmm0, %xmm1
5239 ; SSE-NEXT: pandn %xmm4, %xmm1
5240 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
5241 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm2[1,3,2,3]
5242 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
5243 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm9[0,2,2,3]
5244 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
5245 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm5[0,1,2,3,7,5,6,7]
5246 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm4[0,1,2,1]
5247 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[0,0,0,0,4,5,6,7]
5248 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,5,6,4,7]
5249 ; SSE-NEXT: pand %xmm0, %xmm4
5250 ; SSE-NEXT: por %xmm1, %xmm4
5251 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
5252 ; SSE-NEXT: movdqa %xmm3, %xmm1
5253 ; SSE-NEXT: psllq $48, %xmm1
5254 ; SSE-NEXT: movdqa %xmm13, %xmm5
5255 ; SSE-NEXT: pandn %xmm1, %xmm5
5256 ; SSE-NEXT: pand %xmm13, %xmm4
5257 ; SSE-NEXT: por %xmm4, %xmm5
5258 ; SSE-NEXT: movdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5259 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5260 ; SSE-NEXT: psrlq $48, %xmm1
5261 ; SSE-NEXT: pshufd $236, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
5262 ; SSE-NEXT: # xmm4 = mem[0,3,2,3]
5263 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm4[1,2,2,3,4,5,6,7]
5264 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
5265 ; SSE-NEXT: pshufd $237, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
5266 ; SSE-NEXT: # xmm1 = mem[1,3,2,3]
5267 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
5268 ; SSE-NEXT: # xmm5 = mem[0,2,2,3]
5269 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1]
5270 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm5[0,1,2,3,7,5,6,7]
5271 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,2,1]
5272 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,0,0,0,4,5,6,7]
5273 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,6,4,7]
5274 ; SSE-NEXT: pand %xmm0, %xmm1
5275 ; SSE-NEXT: pandn %xmm4, %xmm0
5276 ; SSE-NEXT: por %xmm1, %xmm0
5277 ; SSE-NEXT: pand %xmm13, %xmm0
5278 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5279 ; SSE-NEXT: psllq $48, %xmm1
5280 ; SSE-NEXT: pandn %xmm1, %xmm13
5281 ; SSE-NEXT: por %xmm0, %xmm13
5282 ; SSE-NEXT: movdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5283 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5284 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5285 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,0],xmm0[0,0]
5286 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm0[2,3]
5287 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0,1,3]
5288 ; SSE-NEXT: movaps {{.*#+}} xmm6 = [65535,65535,65535,0,0,0,65535,65535]
5289 ; SSE-NEXT: movaps %xmm6, %xmm4
5290 ; SSE-NEXT: andnps %xmm1, %xmm4
5291 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm15[0,1,1,3]
5292 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,7,6,7]
5293 ; SSE-NEXT: punpckhdq {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
5294 ; SSE-NEXT: # xmm5 = xmm5[2],mem[2],xmm5[3],mem[3]
5295 ; SSE-NEXT: pand %xmm6, %xmm5
5296 ; SSE-NEXT: por %xmm4, %xmm5
5297 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,6,7]
5298 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm8[0,1,2,0]
5299 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,6,5]
5300 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[3,1],xmm1[2,3]
5301 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[2,0]
5302 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5303 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5304 ; SSE-NEXT: movaps %xmm0, %xmm1
5305 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
5306 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,0],xmm13[0,0]
5307 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm13[2,3]
5308 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0,1,3]
5309 ; SSE-NEXT: movaps %xmm6, %xmm4
5310 ; SSE-NEXT: andnps %xmm1, %xmm4
5311 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
5312 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm8[0,1,1,3]
5313 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,7,6,7]
5314 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
5315 ; SSE-NEXT: punpckhdq {{.*#+}} xmm5 = xmm5[2],xmm15[2],xmm5[3],xmm15[3]
5316 ; SSE-NEXT: pand %xmm6, %xmm5
5317 ; SSE-NEXT: por %xmm4, %xmm5
5318 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,6,7]
5319 ; SSE-NEXT: pshufd $36, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
5320 ; SSE-NEXT: # xmm4 = mem[0,1,2,0]
5321 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,6,5]
5322 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[3,1],xmm1[2,3]
5323 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[2,0]
5324 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5325 ; SSE-NEXT: movdqa %xmm10, %xmm1
5326 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
5327 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,0],xmm4[0,0]
5328 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm4[2,3]
5329 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0,1,3]
5330 ; SSE-NEXT: movaps %xmm6, %xmm4
5331 ; SSE-NEXT: andnps %xmm1, %xmm4
5332 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm14[0,1,1,3]
5333 ; SSE-NEXT: pshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,7,6,7]
5334 ; SSE-NEXT: punpckhdq {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
5335 ; SSE-NEXT: # xmm5 = xmm5[2],mem[2],xmm5[3],mem[3]
5336 ; SSE-NEXT: pand %xmm6, %xmm5
5337 ; SSE-NEXT: por %xmm4, %xmm5
5338 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,6,7]
5339 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm12[0,1,2,0]
5340 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,6,5]
5341 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[3,1],xmm1[2,3]
5342 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm4[2,0]
5343 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5344 ; SSE-NEXT: movdqa %xmm2, %xmm1
5345 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,0],xmm9[0,0]
5346 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm9[2,3]
5347 ; SSE-NEXT: movdqa %xmm9, %xmm12
5348 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0,1,3]
5349 ; SSE-NEXT: movaps %xmm6, %xmm4
5350 ; SSE-NEXT: andnps %xmm1, %xmm4
5351 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm11[0,1,1,3]
5352 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm5[0,1,2,3,4,7,6,7]
5353 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm7[2],xmm2[3],xmm7[3]
5354 ; SSE-NEXT: pand %xmm6, %xmm2
5355 ; SSE-NEXT: por %xmm4, %xmm2
5356 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,6,7]
5357 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,1,2,0]
5358 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,6,5]
5359 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[3,1],xmm1[2,3]
5360 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm4[2,0]
5361 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5362 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5363 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
5364 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,0],xmm2[0,0]
5365 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm2[2,3]
5366 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0,1,3]
5367 ; SSE-NEXT: movaps %xmm6, %xmm4
5368 ; SSE-NEXT: andnps %xmm1, %xmm4
5369 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
5370 ; SSE-NEXT: # xmm5 = mem[0,1,1,3]
5371 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm5[0,1,2,3,4,7,6,7]
5372 ; SSE-NEXT: punpckhdq {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
5373 ; SSE-NEXT: # xmm2 = xmm2[2],mem[2],xmm2[3],mem[3]
5374 ; SSE-NEXT: pand %xmm6, %xmm2
5375 ; SSE-NEXT: por %xmm4, %xmm2
5376 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,6,7]
5377 ; SSE-NEXT: pshufd $36, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
5378 ; SSE-NEXT: # xmm4 = mem[0,1,2,0]
5379 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,6,5]
5380 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[3,1],xmm1[2,3]
5381 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm4[2,0]
5382 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5383 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5384 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
5385 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,0],xmm7[0,0]
5386 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm7[2,3]
5387 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0,1,3]
5388 ; SSE-NEXT: movaps %xmm6, %xmm4
5389 ; SSE-NEXT: andnps %xmm1, %xmm4
5390 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
5391 ; SSE-NEXT: # xmm5 = mem[0,1,1,3]
5392 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm5[0,1,2,3,4,7,6,7]
5393 ; SSE-NEXT: movdqa (%rsp), %xmm9 # 16-byte Reload
5394 ; SSE-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm9[2],xmm2[3],xmm9[3]
5395 ; SSE-NEXT: pand %xmm6, %xmm2
5396 ; SSE-NEXT: por %xmm4, %xmm2
5397 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,6,7]
5398 ; SSE-NEXT: pshufd $36, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
5399 ; SSE-NEXT: # xmm4 = mem[0,1,2,0]
5400 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,6,5]
5401 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[3,1],xmm1[2,3]
5402 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm4[2,0]
5403 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5404 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5405 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
5406 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,0],xmm2[0,0]
5407 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm2[2,3]
5408 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0,1,3]
5409 ; SSE-NEXT: movaps %xmm6, %xmm4
5410 ; SSE-NEXT: andnps %xmm1, %xmm4
5411 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
5412 ; SSE-NEXT: # xmm5 = mem[0,1,1,3]
5413 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm5[0,1,2,3,4,7,6,7]
5414 ; SSE-NEXT: punpckhdq {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
5415 ; SSE-NEXT: # xmm2 = xmm2[2],mem[2],xmm2[3],mem[3]
5416 ; SSE-NEXT: pand %xmm6, %xmm2
5417 ; SSE-NEXT: por %xmm4, %xmm2
5418 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,6,7]
5419 ; SSE-NEXT: pshufd $36, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
5420 ; SSE-NEXT: # xmm4 = mem[0,1,2,0]
5421 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,6,5]
5422 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[3,1],xmm1[2,3]
5423 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm4[2,0]
5424 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5425 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5426 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
5427 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[1,0],xmm10[0,0]
5428 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm10[2,3]
5429 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0,1,3]
5430 ; SSE-NEXT: movaps %xmm6, %xmm4
5431 ; SSE-NEXT: andnps %xmm1, %xmm4
5432 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
5433 ; SSE-NEXT: # xmm5 = mem[0,1,1,3]
5434 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm5[0,1,2,3,4,7,6,7]
5435 ; SSE-NEXT: punpckhdq {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
5436 ; SSE-NEXT: # xmm2 = xmm2[2],mem[2],xmm2[3],mem[3]
5437 ; SSE-NEXT: pand %xmm6, %xmm2
5438 ; SSE-NEXT: por %xmm4, %xmm2
5439 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,6,5,6,7]
5440 ; SSE-NEXT: pshufd $36, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
5441 ; SSE-NEXT: # xmm4 = mem[0,1,2,0]
5442 ; SSE-NEXT: pshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,6,5]
5443 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[3,1],xmm1[2,3]
5444 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm4[2,0]
5445 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5446 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm13[2,2,2,2,4,5,6,7]
5447 ; SSE-NEXT: movdqa %xmm6, %xmm4
5448 ; SSE-NEXT: pandn %xmm1, %xmm4
5449 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm8[1,1,1,1]
5450 ; SSE-NEXT: pshufd {{.*#+}} xmm5 = xmm15[0,2,2,3]
5451 ; SSE-NEXT: punpckldq {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1]
5452 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm5[0,3,2,3,4,5,6,7]
5453 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
5454 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[1,0,3,3,4,5,6,7]
5455 ; SSE-NEXT: pand %xmm6, %xmm1
5456 ; SSE-NEXT: por %xmm4, %xmm1
5457 ; SSE-NEXT: movdqa %xmm1, %xmm2
5458 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,0],xmm13[3,0]
5459 ; SSE-NEXT: movaps %xmm6, %xmm3
5460 ; SSE-NEXT: andnps %xmm13, %xmm3
5461 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5462 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[0,1],xmm0[0,2]
5463 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm13[0,1,2,3,7,4,6,7]
5464 ; SSE-NEXT: pshufd $196, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
5465 ; SSE-NEXT: # xmm3 = mem[0,1,0,3]
5466 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,5,6]
5467 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[3,1],xmm1[2,3]
5468 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm3[2,0]
5469 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5470 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
5471 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm14[2,2,2,2,4,5,6,7]
5472 ; SSE-NEXT: movdqa %xmm6, %xmm3
5473 ; SSE-NEXT: pandn %xmm1, %xmm3
5474 ; SSE-NEXT: pshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
5475 ; SSE-NEXT: # xmm1 = mem[1,1,1,1]
5476 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
5477 ; SSE-NEXT: # xmm4 = mem[0,2,2,3]
5478 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
5479 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm4[0,3,2,3,4,5,6,7]
5480 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
5481 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[1,0,3,3,4,5,6,7]
5482 ; SSE-NEXT: pand %xmm6, %xmm1
5483 ; SSE-NEXT: por %xmm3, %xmm1
5484 ; SSE-NEXT: movdqa %xmm1, %xmm4
5485 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5486 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm14[3,0]
5487 ; SSE-NEXT: movaps %xmm6, %xmm2
5488 ; SSE-NEXT: andnps %xmm14, %xmm2
5489 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5490 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[0,1],xmm1[0,2]
5491 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm14[0,1,2,3,7,4,6,7]
5492 ; SSE-NEXT: pshufd $196, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
5493 ; SSE-NEXT: # xmm3 = mem[0,1,0,3]
5494 ; SSE-NEXT: pshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,5,6]
5495 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[3,1],xmm1[2,3]
5496 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm3[2,0]
5497 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5498 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm12[2,2,2,2,4,5,6,7]
5499 ; SSE-NEXT: movdqa %xmm6, %xmm3
5500 ; SSE-NEXT: pandn %xmm1, %xmm3
5501 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm11[1,1,1,1]
5502 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
5503 ; SSE-NEXT: pshufd {{.*#+}} xmm4 = xmm13[0,2,2,3]
5504 ; SSE-NEXT: punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
5505 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm4[0,3,2,3,4,5,6,7]
5506 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
5507 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[1,0,3,3,4,5,6,7]
5508 ; SSE-NEXT: pand %xmm6, %xmm1
5509 ; SSE-NEXT: por %xmm3, %xmm1
5510 ; SSE-NEXT: movdqa %xmm1, %xmm3
5511 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5512 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm12[3,0]
5513 ; SSE-NEXT: movaps %xmm6, %xmm2
5514 ; SSE-NEXT: andnps %xmm12, %xmm2
5515 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5516 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[0,1],xmm1[0,2]
5517 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm12[0,1,2,3,7,4,6,7]
5518 ; SSE-NEXT: pshufd $196, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
5519 ; SSE-NEXT: # xmm2 = mem[0,1,0,3]
5520 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,5,6]
5521 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,1],xmm1[2,3]
5522 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
5523 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5524 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
5525 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm4[2,2,2,2,4,5,6,7]
5526 ; SSE-NEXT: movdqa %xmm6, %xmm2
5527 ; SSE-NEXT: pandn %xmm1, %xmm2
5528 ; SSE-NEXT: pshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
5529 ; SSE-NEXT: # xmm1 = mem[1,1,1,1]
5530 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
5531 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm5[0,2,2,3]
5532 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
5533 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm3[0,3,2,3,4,5,6,7]
5534 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
5535 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[1,0,3,3,4,5,6,7]
5536 ; SSE-NEXT: pand %xmm6, %xmm1
5537 ; SSE-NEXT: por %xmm2, %xmm1
5538 ; SSE-NEXT: movdqa %xmm1, %xmm3
5539 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5540 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm4[3,0]
5541 ; SSE-NEXT: movaps %xmm4, %xmm2
5542 ; SSE-NEXT: movaps %xmm6, %xmm4
5543 ; SSE-NEXT: andnps %xmm2, %xmm4
5544 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5545 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm1[0,2]
5546 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm2[0,1,2,3,7,4,6,7]
5547 ; SSE-NEXT: pshufd $196, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
5548 ; SSE-NEXT: # xmm2 = mem[0,1,0,3]
5549 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,5,6]
5550 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,1],xmm1[2,3]
5551 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm2[2,0]
5552 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5553 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm7[2,2,2,2,4,5,6,7]
5554 ; SSE-NEXT: movdqa %xmm6, %xmm2
5555 ; SSE-NEXT: pandn %xmm1, %xmm2
5556 ; SSE-NEXT: pshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
5557 ; SSE-NEXT: # xmm1 = mem[1,1,1,1]
5558 ; SSE-NEXT: movdqa %xmm9, %xmm11
5559 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm9[0,2,2,3]
5560 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
5561 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm3[0,3,2,3,4,5,6,7]
5562 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
5563 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm1[1,0,3,3,4,5,6,7]
5564 ; SSE-NEXT: pand %xmm6, %xmm0
5565 ; SSE-NEXT: por %xmm2, %xmm0
5566 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5567 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm7[3,0]
5568 ; SSE-NEXT: movaps %xmm6, %xmm2
5569 ; SSE-NEXT: andnps %xmm7, %xmm2
5570 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5571 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[0,1],xmm1[0,2]
5572 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm7[0,1,2,3,7,4,6,7]
5573 ; SSE-NEXT: pshufd $196, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
5574 ; SSE-NEXT: # xmm2 = mem[0,1,0,3]
5575 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,5,6]
5576 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,1],xmm1[2,3]
5577 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm2[2,0]
5578 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5579 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
5580 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm4[2,2,2,2,4,5,6,7]
5581 ; SSE-NEXT: movdqa %xmm6, %xmm2
5582 ; SSE-NEXT: pandn %xmm1, %xmm2
5583 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
5584 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm15[1,1,1,1]
5585 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
5586 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm9[0,2,2,3]
5587 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
5588 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm3[0,3,2,3,4,5,6,7]
5589 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
5590 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm1[1,0,3,3,4,5,6,7]
5591 ; SSE-NEXT: pand %xmm6, %xmm0
5592 ; SSE-NEXT: por %xmm2, %xmm0
5593 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5594 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm4[3,0]
5595 ; SSE-NEXT: movaps %xmm4, %xmm2
5596 ; SSE-NEXT: movaps %xmm6, %xmm3
5597 ; SSE-NEXT: andnps %xmm4, %xmm3
5598 ; SSE-NEXT: movaps %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5599 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm1[0,2]
5600 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm2[0,1,2,3,7,4,6,7]
5601 ; SSE-NEXT: pshufd $196, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
5602 ; SSE-NEXT: # xmm2 = mem[0,1,0,3]
5603 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,5,6]
5604 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,1],xmm1[2,3]
5605 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm2[2,0]
5606 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5607 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm10[2,2,2,2,4,5,6,7]
5608 ; SSE-NEXT: movdqa %xmm6, %xmm2
5609 ; SSE-NEXT: pandn %xmm1, %xmm2
5610 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
5611 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm12[1,1,1,1]
5612 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
5613 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm7[0,2,2,3]
5614 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
5615 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm3[0,3,2,3,4,5,6,7]
5616 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
5617 ; SSE-NEXT: pshuflw {{.*#+}} xmm14 = xmm1[1,0,3,3,4,5,6,7]
5618 ; SSE-NEXT: pand %xmm6, %xmm14
5619 ; SSE-NEXT: por %xmm2, %xmm14
5620 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5621 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm10[3,0]
5622 ; SSE-NEXT: movaps %xmm6, %xmm0
5623 ; SSE-NEXT: andnps %xmm10, %xmm0
5624 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5625 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[0,1],xmm1[0,2]
5626 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm10[0,1,2,3,7,4,6,7]
5627 ; SSE-NEXT: pshufd $196, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
5628 ; SSE-NEXT: # xmm2 = mem[0,1,0,3]
5629 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,5,6]
5630 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[3,1],xmm1[2,3]
5631 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[0,1],xmm2[2,0]
5632 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
5633 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm0[2,2,2,2,4,5,6,7]
5634 ; SSE-NEXT: movdqa %xmm6, %xmm2
5635 ; SSE-NEXT: pandn %xmm1, %xmm2
5636 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
5637 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm8[1,1,1,1]
5638 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
5639 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm4[0,2,2,3]
5640 ; SSE-NEXT: punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
5641 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm3[0,3,2,3,4,5,6,7]
5642 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3]
5643 ; SSE-NEXT: pshuflw {{.*#+}} xmm10 = xmm1[1,0,3,3,4,5,6,7]
5644 ; SSE-NEXT: pand %xmm6, %xmm10
5645 ; SSE-NEXT: por %xmm2, %xmm10
5646 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
5647 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5648 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm2[3,0]
5649 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm1[0,2]
5650 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
5651 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5652 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm3[3,0]
5653 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm1[0,2]
5654 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5655 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm13[3,0]
5656 ; SSE-NEXT: shufps {{.*#+}} xmm13 = xmm13[0,1],xmm1[0,2]
5657 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5658 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm5[3,0]
5659 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm1[0,2]
5660 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5661 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm11[3,0]
5662 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[0,1],xmm1[0,2]
5663 ; SSE-NEXT: movaps %xmm11, (%rsp) # 16-byte Spill
5664 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[2,0],xmm9[3,0]
5665 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[0,1],xmm15[0,2]
5666 ; SSE-NEXT: movdqa %xmm7, %xmm1
5667 ; SSE-NEXT: shufps {{.*#+}} xmm12 = xmm12[2,0],xmm7[3,0]
5668 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,1],xmm12[0,2]
5669 ; SSE-NEXT: movaps %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5670 ; SSE-NEXT: movdqa %xmm4, %xmm1
5671 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,0],xmm4[3,0]
5672 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,1],xmm8[0,2]
5673 ; SSE-NEXT: movaps %xmm1, %xmm15
5674 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
5675 ; SSE-NEXT: movaps %xmm4, %xmm1
5676 ; SSE-NEXT: movaps %xmm4, %xmm12
5677 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,0],xmm0[3,0]
5678 ; SSE-NEXT: movaps %xmm0, %xmm11
5679 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,6,6,7]
5680 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
5681 ; SSE-NEXT: pshuflw {{.*#+}} xmm8 = xmm2[2,1,3,3,4,5,6,7]
5682 ; SSE-NEXT: pand %xmm6, %xmm8
5683 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm3[0,1,2,3,4,6,6,7]
5684 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
5685 ; SSE-NEXT: pshuflw {{.*#+}} xmm7 = xmm2[2,1,3,3,4,5,6,7]
5686 ; SSE-NEXT: pand %xmm6, %xmm7
5687 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm13[0,1,2,3,4,6,6,7]
5688 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
5689 ; SSE-NEXT: pshuflw {{.*#+}} xmm0 = xmm2[2,1,3,3,4,5,6,7]
5690 ; SSE-NEXT: pand %xmm6, %xmm0
5691 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm5[0,1,2,3,4,6,6,7]
5692 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
5693 ; SSE-NEXT: pshuflw {{.*#+}} xmm5 = xmm2[2,1,3,3,4,5,6,7]
5694 ; SSE-NEXT: pand %xmm6, %xmm5
5695 ; SSE-NEXT: pshufhw $232, (%rsp), %xmm2 # 16-byte Folded Reload
5696 ; SSE-NEXT: # xmm2 = mem[0,1,2,3,4,6,6,7]
5697 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
5698 ; SSE-NEXT: pshuflw {{.*#+}} xmm4 = xmm2[2,1,3,3,4,5,6,7]
5699 ; SSE-NEXT: pand %xmm6, %xmm4
5700 ; SSE-NEXT: pshufhw {{.*#+}} xmm2 = xmm9[0,1,2,3,4,6,6,7]
5701 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
5702 ; SSE-NEXT: pshuflw {{.*#+}} xmm3 = xmm2[2,1,3,3,4,5,6,7]
5703 ; SSE-NEXT: pand %xmm6, %xmm3
5704 ; SSE-NEXT: pshufhw $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
5705 ; SSE-NEXT: # xmm2 = mem[0,1,2,3,4,6,6,7]
5706 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[0,2,2,3]
5707 ; SSE-NEXT: pshuflw {{.*#+}} xmm2 = xmm2[2,1,3,3,4,5,6,7]
5708 ; SSE-NEXT: pand %xmm6, %xmm2
5709 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm15[0,1,2,3,4,6,6,7]
5710 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm9[0,2,2,3]
5711 ; SSE-NEXT: pshuflw {{.*#+}} xmm9 = xmm9[2,1,3,3,4,5,6,7]
5712 ; SSE-NEXT: pand %xmm6, %xmm9
5713 ; SSE-NEXT: andnps %xmm11, %xmm6
5714 ; SSE-NEXT: shufps {{.*#+}} xmm11 = xmm11[0,1],xmm1[0,2]
5715 ; SSE-NEXT: pshufhw {{.*#+}} xmm1 = xmm11[0,1,2,3,7,4,6,7]
5716 ; SSE-NEXT: movdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
5717 ; SSE-NEXT: pshufd {{.*#+}} xmm15 = xmm11[0,1,0,3]
5718 ; SSE-NEXT: pshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,5,6]
5719 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[3,1],xmm1[2,3]
5720 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[0,1],xmm15[2,0]
5721 ; SSE-NEXT: por {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Folded Reload
5722 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
5723 ; SSE-NEXT: # xmm1 = mem[0,2,2,3]
5724 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
5725 ; SSE-NEXT: # xmm15 = mem[0,1,1,3]
5726 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,0,3,4,5,6,7]
5727 ; SSE-NEXT: pshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,4,7]
5728 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[3,1],xmm1[1,3]
5729 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[0,1],xmm15[2,0]
5730 ; SSE-NEXT: por {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Folded Reload
5731 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
5732 ; SSE-NEXT: # xmm1 = mem[0,2,2,3]
5733 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
5734 ; SSE-NEXT: # xmm15 = mem[0,1,1,3]
5735 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,0,3,4,5,6,7]
5736 ; SSE-NEXT: pshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,4,7]
5737 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[3,1],xmm1[1,3]
5738 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[0,1],xmm15[2,0]
5739 ; SSE-NEXT: por {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
5740 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
5741 ; SSE-NEXT: # xmm1 = mem[0,2,2,3]
5742 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
5743 ; SSE-NEXT: # xmm15 = mem[0,1,1,3]
5744 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,0,3,4,5,6,7]
5745 ; SSE-NEXT: pshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,4,7]
5746 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[3,1],xmm1[1,3]
5747 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,1],xmm15[2,0]
5748 ; SSE-NEXT: por {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
5749 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
5750 ; SSE-NEXT: # xmm1 = mem[0,2,2,3]
5751 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
5752 ; SSE-NEXT: # xmm15 = mem[0,1,1,3]
5753 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,0,3,4,5,6,7]
5754 ; SSE-NEXT: pshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,4,7]
5755 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[3,1],xmm1[1,3]
5756 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[0,1],xmm15[2,0]
5757 ; SSE-NEXT: por {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
5758 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
5759 ; SSE-NEXT: # xmm1 = mem[0,2,2,3]
5760 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
5761 ; SSE-NEXT: # xmm15 = mem[0,1,1,3]
5762 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,0,3,4,5,6,7]
5763 ; SSE-NEXT: pshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,4,7]
5764 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[3,1],xmm1[1,3]
5765 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm15[2,0]
5766 ; SSE-NEXT: por {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
5767 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
5768 ; SSE-NEXT: # xmm1 = mem[0,2,2,3]
5769 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
5770 ; SSE-NEXT: # xmm15 = mem[0,1,1,3]
5771 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,0,3,4,5,6,7]
5772 ; SSE-NEXT: pshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,4,7]
5773 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[3,1],xmm1[1,3]
5774 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[0,1],xmm15[2,0]
5775 ; SSE-NEXT: por {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
5776 ; SSE-NEXT: pshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
5777 ; SSE-NEXT: # xmm1 = mem[0,2,2,3]
5778 ; SSE-NEXT: pshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
5779 ; SSE-NEXT: # xmm15 = mem[0,1,1,3]
5780 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,0,3,4,5,6,7]
5781 ; SSE-NEXT: pshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,4,7]
5782 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[3,1],xmm1[1,3]
5783 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,1],xmm15[2,0]
5784 ; SSE-NEXT: orps %xmm9, %xmm6
5785 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm12[0,2,2,3]
5786 ; SSE-NEXT: pshufd {{.*#+}} xmm9 = xmm11[0,1,1,3]
5787 ; SSE-NEXT: pshuflw {{.*#+}} xmm1 = xmm1[0,1,0,3,4,5,6,7]
5788 ; SSE-NEXT: pshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,5,4,7]
5789 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[3,1],xmm1[1,3]
5790 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[0,1],xmm9[2,0]
5791 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5792 ; SSE-NEXT: movaps %xmm1, 96(%rsi)
5793 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5794 ; SSE-NEXT: movaps %xmm1, 32(%rsi)
5795 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5796 ; SSE-NEXT: movaps %xmm1, 112(%rsi)
5797 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5798 ; SSE-NEXT: movaps %xmm1, 48(%rsi)
5799 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5800 ; SSE-NEXT: movaps %xmm1, 64(%rsi)
5801 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5802 ; SSE-NEXT: movaps %xmm1, (%rsi)
5803 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5804 ; SSE-NEXT: movaps %xmm1, 80(%rsi)
5805 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5806 ; SSE-NEXT: movaps %xmm1, 16(%rsi)
5807 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5808 ; SSE-NEXT: movaps %xmm1, 96(%rdx)
5809 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5810 ; SSE-NEXT: movaps %xmm1, 32(%rdx)
5811 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5812 ; SSE-NEXT: movaps %xmm1, 112(%rdx)
5813 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5814 ; SSE-NEXT: movaps %xmm1, 48(%rdx)
5815 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5816 ; SSE-NEXT: movaps %xmm1, 64(%rdx)
5817 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5818 ; SSE-NEXT: movaps %xmm1, (%rdx)
5819 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5820 ; SSE-NEXT: movaps %xmm1, 80(%rdx)
5821 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5822 ; SSE-NEXT: movaps %xmm1, 16(%rdx)
5823 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5824 ; SSE-NEXT: movaps %xmm1, 96(%rcx)
5825 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5826 ; SSE-NEXT: movaps %xmm1, 112(%rcx)
5827 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5828 ; SSE-NEXT: movaps %xmm1, 64(%rcx)
5829 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5830 ; SSE-NEXT: movaps %xmm1, 80(%rcx)
5831 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5832 ; SSE-NEXT: movaps %xmm1, 32(%rcx)
5833 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5834 ; SSE-NEXT: movaps %xmm1, 48(%rcx)
5835 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5836 ; SSE-NEXT: movaps %xmm1, (%rcx)
5837 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5838 ; SSE-NEXT: movaps %xmm1, 16(%rcx)
5839 ; SSE-NEXT: movaps %xmm10, 112(%r8)
5840 ; SSE-NEXT: movaps %xmm14, 96(%r8)
5841 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5842 ; SSE-NEXT: movaps %xmm1, 80(%r8)
5843 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5844 ; SSE-NEXT: movaps %xmm1, 64(%r8)
5845 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5846 ; SSE-NEXT: movaps %xmm1, 48(%r8)
5847 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5848 ; SSE-NEXT: movaps %xmm1, 32(%r8)
5849 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5850 ; SSE-NEXT: movaps %xmm1, 16(%r8)
5851 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
5852 ; SSE-NEXT: movaps %xmm1, (%r8)
5853 ; SSE-NEXT: movaps %xmm6, 112(%r9)
5854 ; SSE-NEXT: movaps %xmm2, 96(%r9)
5855 ; SSE-NEXT: movaps %xmm3, 80(%r9)
5856 ; SSE-NEXT: movaps %xmm4, 64(%r9)
5857 ; SSE-NEXT: movaps %xmm5, 48(%r9)
5858 ; SSE-NEXT: movaps %xmm0, 32(%r9)
5859 ; SSE-NEXT: movaps %xmm7, 16(%r9)
5860 ; SSE-NEXT: movaps %xmm8, (%r9)
5861 ; SSE-NEXT: addq $1016, %rsp # imm = 0x3F8
5864 ; AVX-LABEL: load_i16_stride5_vf64:
5866 ; AVX-NEXT: subq $1032, %rsp # imm = 0x408
5867 ; AVX-NEXT: vmovdqa 304(%rdi), %xmm0
5868 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5869 ; AVX-NEXT: vmovdqa 288(%rdi), %xmm1
5870 ; AVX-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5871 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1],xmm0[2,3],xmm1[4,5,6,7]
5872 ; AVX-NEXT: vmovdqa {{.*#+}} xmm1 = [6,7,2,3,4,5,6,7,6,7,2,3,12,13,6,7]
5873 ; AVX-NEXT: vpshufb %xmm1, %xmm0, %xmm0
5874 ; AVX-NEXT: vmovdqa 256(%rdi), %xmm2
5875 ; AVX-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5876 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[0,1,1,3]
5877 ; AVX-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,4,7]
5878 ; AVX-NEXT: vmovdqa 272(%rdi), %xmm15
5879 ; AVX-NEXT: vpunpckhqdq {{.*#+}} xmm2 = xmm2[1],xmm15[1]
5880 ; AVX-NEXT: vmovdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5881 ; AVX-NEXT: vmovdqa 240(%rdi), %xmm3
5882 ; AVX-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5883 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[0,2,2,3]
5884 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,3,2,3,4,5,6,7]
5885 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3,4,5,6,7]
5886 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3,4],xmm0[5,6,7]
5887 ; AVX-NEXT: vmovdqa 208(%rdi), %xmm0
5888 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5889 ; AVX-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,1,0,3]
5890 ; AVX-NEXT: vmovdqa 192(%rdi), %xmm3
5891 ; AVX-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5892 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3],xmm3[4],xmm0[5,6,7]
5893 ; AVX-NEXT: vmovdqa 176(%rdi), %xmm3
5894 ; AVX-NEXT: vmovdqa %xmm3, (%rsp) # 16-byte Spill
5895 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[3,1,2,3]
5896 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,1,2,3,4,5,6,7]
5897 ; AVX-NEXT: vmovdqa 160(%rdi), %xmm4
5898 ; AVX-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5899 ; AVX-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[0,2,2,3]
5900 ; AVX-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,3,2,3,4,5,6,7]
5901 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
5902 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3],xmm0[4,5,6,7]
5903 ; AVX-NEXT: vmovaps {{.*#+}} ymm5 = [65535,65535,65535,65535,65535,65535,65535,0,0,0,0,0,0,65535,65535,65535]
5904 ; AVX-NEXT: vandps %ymm5, %ymm3, %ymm3
5905 ; AVX-NEXT: vmovaps 224(%rdi), %xmm0
5906 ; AVX-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5907 ; AVX-NEXT: vshufps {{.*#+}} xmm4 = xmm0[0,1,0,1]
5908 ; AVX-NEXT: vandnps %ymm4, %ymm5, %ymm4
5909 ; AVX-NEXT: vorps %ymm4, %ymm3, %ymm3
5910 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm0
5911 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5912 ; AVX-NEXT: vmovdqa 576(%rdi), %xmm0
5913 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5914 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,1,1,3]
5915 ; AVX-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,4,7]
5916 ; AVX-NEXT: vmovdqa 592(%rdi), %xmm12
5917 ; AVX-NEXT: vpunpckhqdq {{.*#+}} xmm2 = xmm2[1],xmm12[1]
5918 ; AVX-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5919 ; AVX-NEXT: vmovdqa 560(%rdi), %xmm0
5920 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5921 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,2,2,3]
5922 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,3,2,3,4,5,6,7]
5923 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3,4,5,6,7]
5924 ; AVX-NEXT: vmovdqa 624(%rdi), %xmm3
5925 ; AVX-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5926 ; AVX-NEXT: vmovdqa 608(%rdi), %xmm0
5927 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5928 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm0[0,1],xmm3[2,3],xmm0[4,5,6,7]
5929 ; AVX-NEXT: vpshufb %xmm1, %xmm3, %xmm3
5930 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3,4],xmm3[5,6,7]
5931 ; AVX-NEXT: vmovdqa 496(%rdi), %xmm0
5932 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5933 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[3,1,2,3]
5934 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,1,2,3,4,5,6,7]
5935 ; AVX-NEXT: vmovdqa 480(%rdi), %xmm9
5936 ; AVX-NEXT: vpshufd {{.*#+}} xmm4 = xmm9[0,2,2,3]
5937 ; AVX-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5938 ; AVX-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,3,2,3,4,5,6,7]
5939 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
5940 ; AVX-NEXT: vmovdqa 528(%rdi), %xmm0
5941 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5942 ; AVX-NEXT: vpshufd {{.*#+}} xmm4 = xmm0[0,1,0,3]
5943 ; AVX-NEXT: vmovdqa 512(%rdi), %xmm13
5944 ; AVX-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1,2,3],xmm13[4],xmm4[5,6,7]
5945 ; AVX-NEXT: vmovdqa %xmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5946 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3],xmm4[4,5,6,7]
5947 ; AVX-NEXT: vandps %ymm5, %ymm3, %ymm3
5948 ; AVX-NEXT: vmovaps 544(%rdi), %xmm11
5949 ; AVX-NEXT: vshufps {{.*#+}} xmm4 = xmm11[0,1,0,1]
5950 ; AVX-NEXT: vmovaps %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5951 ; AVX-NEXT: vandnps %ymm4, %ymm5, %ymm4
5952 ; AVX-NEXT: vorps %ymm4, %ymm3, %ymm3
5953 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm0
5954 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5955 ; AVX-NEXT: vmovdqa 96(%rdi), %xmm10
5956 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm10[0,1,1,3]
5957 ; AVX-NEXT: vmovdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5958 ; AVX-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,4,7]
5959 ; AVX-NEXT: vmovdqa 112(%rdi), %xmm0
5960 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5961 ; AVX-NEXT: vpunpckhqdq {{.*#+}} xmm2 = xmm2[1],xmm0[1]
5962 ; AVX-NEXT: vmovdqa 80(%rdi), %xmm0
5963 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5964 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,2,2,3]
5965 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,3,2,3,4,5,6,7]
5966 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3,4,5,6,7]
5967 ; AVX-NEXT: vmovdqa 144(%rdi), %xmm7
5968 ; AVX-NEXT: vmovdqa 128(%rdi), %xmm6
5969 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm6[0,1],xmm7[2,3],xmm6[4,5,6,7]
5970 ; AVX-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5971 ; AVX-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5972 ; AVX-NEXT: vpshufb %xmm1, %xmm3, %xmm3
5973 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3,4],xmm3[5,6,7]
5974 ; AVX-NEXT: vmovdqa 16(%rdi), %xmm0
5975 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5976 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[3,1,2,3]
5977 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,1,2,3,4,5,6,7]
5978 ; AVX-NEXT: vmovdqa (%rdi), %xmm0
5979 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5980 ; AVX-NEXT: vpshufd {{.*#+}} xmm4 = xmm0[0,2,2,3]
5981 ; AVX-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,3,2,3,4,5,6,7]
5982 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
5983 ; AVX-NEXT: vmovdqa 32(%rdi), %xmm0
5984 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5985 ; AVX-NEXT: vmovdqa 48(%rdi), %xmm4
5986 ; AVX-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5987 ; AVX-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[0,1,0,3]
5988 ; AVX-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1,2,3],xmm0[4],xmm4[5,6,7]
5989 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3],xmm4[4,5,6,7]
5990 ; AVX-NEXT: vandps %ymm5, %ymm3, %ymm3
5991 ; AVX-NEXT: vmovaps 64(%rdi), %xmm0
5992 ; AVX-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
5993 ; AVX-NEXT: vshufps {{.*#+}} xmm4 = xmm0[0,1,0,1]
5994 ; AVX-NEXT: vandnps %ymm4, %ymm5, %ymm4
5995 ; AVX-NEXT: vorps %ymm4, %ymm3, %ymm3
5996 ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm0
5997 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
5998 ; AVX-NEXT: vmovdqa 464(%rdi), %xmm8
5999 ; AVX-NEXT: vmovdqa 448(%rdi), %xmm0
6000 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6001 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm0[0,1],xmm8[2,3],xmm0[4,5,6,7]
6002 ; AVX-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6003 ; AVX-NEXT: vpshufb %xmm1, %xmm2, %xmm1
6004 ; AVX-NEXT: vmovdqa 416(%rdi), %xmm0
6005 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6006 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[0,1,1,3]
6007 ; AVX-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,4,7]
6008 ; AVX-NEXT: vmovdqa 432(%rdi), %xmm0
6009 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6010 ; AVX-NEXT: vpunpckhqdq {{.*#+}} xmm2 = xmm2[1],xmm0[1]
6011 ; AVX-NEXT: vmovdqa 400(%rdi), %xmm0
6012 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6013 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,2,2,3]
6014 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,3,2,3,4,5,6,7]
6015 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3,4,5,6,7]
6016 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1,2,3,4],xmm1[5,6,7]
6017 ; AVX-NEXT: vmovdqa 336(%rdi), %xmm0
6018 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6019 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm0[3,1,2,3]
6020 ; AVX-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[2,1,2,3,4,5,6,7]
6021 ; AVX-NEXT: vmovdqa 320(%rdi), %xmm0
6022 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6023 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,2,2,3]
6024 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,3,2,3,4,5,6,7]
6025 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
6026 ; AVX-NEXT: vmovdqa 368(%rdi), %xmm0
6027 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6028 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm0[0,1,0,3]
6029 ; AVX-NEXT: vmovdqa 352(%rdi), %xmm0
6030 ; AVX-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6031 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3],xmm0[4],xmm3[5,6,7]
6032 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3],xmm3[4,5,6,7]
6033 ; AVX-NEXT: vandps %ymm5, %ymm2, %ymm2
6034 ; AVX-NEXT: vmovaps 384(%rdi), %xmm0
6035 ; AVX-NEXT: vmovaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6036 ; AVX-NEXT: vshufps {{.*#+}} xmm3 = xmm0[0,1,0,1]
6037 ; AVX-NEXT: vandnps %ymm3, %ymm5, %ymm3
6038 ; AVX-NEXT: vorps %ymm3, %ymm2, %ymm2
6039 ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm2, %ymm0
6040 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6041 ; AVX-NEXT: vpblendw $12, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm1 # 16-byte Folded Reload
6042 ; AVX-NEXT: # xmm1 = xmm15[0,1],mem[2,3],xmm15[4,5,6,7]
6043 ; AVX-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,3,0,4,5,6,7]
6044 ; AVX-NEXT: vpshufhw {{.*#+}} xmm1 = xmm1[0,1,2,3,5,5,5,5]
6045 ; AVX-NEXT: vpshufd $236, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6046 ; AVX-NEXT: # xmm2 = mem[0,3,2,3]
6047 ; AVX-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[1,2,2,3,4,5,6,7]
6048 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3,4,5,6,7]
6049 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6050 ; AVX-NEXT: vpblendw $207, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm3 # 16-byte Folded Reload
6051 ; AVX-NEXT: # xmm3 = mem[0,1,2,3],xmm0[4,5],mem[6,7]
6052 ; AVX-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,2,3,4,5,6,7,8,9,4,5,14,15,8,9]
6053 ; AVX-NEXT: vpshufb %xmm2, %xmm3, %xmm3
6054 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm1[0,1,2,3,4],xmm3[5,6,7]
6055 ; AVX-NEXT: vpshufd $236, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6056 ; AVX-NEXT: # xmm1 = mem[0,3,2,3]
6057 ; AVX-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[1,2,2,3,4,5,6,7]
6058 ; AVX-NEXT: vmovdqa (%rsp), %xmm0 # 16-byte Reload
6059 ; AVX-NEXT: vpsrlq $48, %xmm0, %xmm15
6060 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm15 = xmm1[0],xmm15[0],xmm1[1],xmm15[1]
6061 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6062 ; AVX-NEXT: vpblendw $204, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm0 # 16-byte Folded Reload
6063 ; AVX-NEXT: # xmm0 = xmm0[0,1],mem[2,3],xmm0[4,5],mem[6,7]
6064 ; AVX-NEXT: vmovdqa {{.*#+}} xmm1 = [0,1,0,1,0,1,0,1,10,11,4,5,14,15,6,7]
6065 ; AVX-NEXT: vpshufb %xmm1, %xmm0, %xmm0
6066 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm15[0,1,2],xmm0[3,4,5,6,7]
6067 ; AVX-NEXT: vandps %ymm5, %ymm0, %ymm0
6068 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
6069 ; AVX-NEXT: vpsllq $48, %xmm4, %xmm15
6070 ; AVX-NEXT: vandnps %ymm15, %ymm5, %ymm15
6071 ; AVX-NEXT: vorps %ymm0, %ymm15, %ymm0
6072 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
6073 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6074 ; AVX-NEXT: vpblendw $12, {{[-0-9]+}}(%r{{[sb]}}p), %xmm12, %xmm0 # 16-byte Folded Reload
6075 ; AVX-NEXT: # xmm0 = xmm12[0,1],mem[2,3],xmm12[4,5,6,7]
6076 ; AVX-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,1,3,0,4,5,6,7]
6077 ; AVX-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,5,5,5,5]
6078 ; AVX-NEXT: vpshufd $236, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
6079 ; AVX-NEXT: # xmm3 = mem[0,3,2,3]
6080 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[1,2,2,3,4,5,6,7]
6081 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm3[0,1],xmm0[2,3,4,5,6,7]
6082 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
6083 ; AVX-NEXT: vpblendw $207, {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm3 # 16-byte Folded Reload
6084 ; AVX-NEXT: # xmm3 = mem[0,1,2,3],xmm14[4,5],mem[6,7]
6085 ; AVX-NEXT: vpshufb %xmm2, %xmm3, %xmm3
6086 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4],xmm3[5,6,7]
6087 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm9[0,3,2,3]
6088 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[1,2,2,3,4,5,6,7]
6089 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
6090 ; AVX-NEXT: vpsrlq $48, %xmm4, %xmm15
6091 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm3 = xmm3[0],xmm15[0],xmm3[1],xmm15[1]
6092 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
6093 ; AVX-NEXT: vpblendw {{.*#+}} xmm15 = xmm13[0,1],xmm9[2,3],xmm13[4,5],xmm9[6,7]
6094 ; AVX-NEXT: vpshufb %xmm1, %xmm15, %xmm15
6095 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2],xmm15[3,4,5,6,7]
6096 ; AVX-NEXT: vandps %ymm5, %ymm3, %ymm3
6097 ; AVX-NEXT: vpsllq $48, %xmm11, %xmm15
6098 ; AVX-NEXT: vandnps %ymm15, %ymm5, %ymm15
6099 ; AVX-NEXT: vorps %ymm3, %ymm15, %ymm3
6100 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm0
6101 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6102 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
6103 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm13[0,1],xmm10[2,3],xmm13[4,5,6,7]
6104 ; AVX-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,1,3,0,4,5,6,7]
6105 ; AVX-NEXT: vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,5,5,5,5]
6106 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
6107 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm12[0,3,2,3]
6108 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[1,2,2,3,4,5,6,7]
6109 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm3[0,1],xmm0[2,3,4,5,6,7]
6110 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm6[0,1,2,3],xmm7[4,5],xmm6[6,7]
6111 ; AVX-NEXT: vpshufb %xmm2, %xmm3, %xmm3
6112 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4],xmm3[5,6,7]
6113 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
6114 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm11[0,3,2,3]
6115 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[1,2,2,3,4,5,6,7]
6116 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
6117 ; AVX-NEXT: vpsrlq $48, %xmm10, %xmm15
6118 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm3 = xmm3[0],xmm15[0],xmm3[1],xmm15[1]
6119 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
6120 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
6121 ; AVX-NEXT: vpblendw {{.*#+}} xmm15 = xmm4[0,1],xmm6[2,3],xmm4[4,5],xmm6[6,7]
6122 ; AVX-NEXT: vpshufb %xmm1, %xmm15, %xmm15
6123 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2],xmm15[3,4,5,6,7]
6124 ; AVX-NEXT: vandps %ymm5, %ymm3, %ymm3
6125 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
6126 ; AVX-NEXT: vpsllq $48, %xmm7, %xmm15
6127 ; AVX-NEXT: vandnps %ymm15, %ymm5, %ymm15
6128 ; AVX-NEXT: vorps %ymm3, %ymm15, %ymm3
6129 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm0
6130 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6131 ; AVX-NEXT: vpblendw $207, {{[-0-9]+}}(%r{{[sb]}}p), %xmm8, %xmm0 # 16-byte Folded Reload
6132 ; AVX-NEXT: # xmm0 = mem[0,1,2,3],xmm8[4,5],mem[6,7]
6133 ; AVX-NEXT: vpshufb %xmm2, %xmm0, %xmm0
6134 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
6135 ; AVX-NEXT: vpblendw $243, {{[-0-9]+}}(%r{{[sb]}}p), %xmm8, %xmm2 # 16-byte Folded Reload
6136 ; AVX-NEXT: # xmm2 = mem[0,1],xmm8[2,3],mem[4,5,6,7]
6137 ; AVX-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,1,3,0,4,5,6,7]
6138 ; AVX-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,5,5,5,5]
6139 ; AVX-NEXT: vpshufd $236, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
6140 ; AVX-NEXT: # xmm3 = mem[0,3,2,3]
6141 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[1,2,2,3,4,5,6,7]
6142 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1],xmm2[2,3,4,5,6,7]
6143 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm2[0,1,2,3,4],xmm0[5,6,7]
6144 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6145 ; AVX-NEXT: vpblendw $204, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
6146 ; AVX-NEXT: # xmm2 = xmm2[0,1],mem[2,3],xmm2[4,5],mem[6,7]
6147 ; AVX-NEXT: vpshufb %xmm1, %xmm2, %xmm1
6148 ; AVX-NEXT: vpshufd $236, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6149 ; AVX-NEXT: # xmm2 = mem[0,3,2,3]
6150 ; AVX-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[1,2,2,3,4,5,6,7]
6151 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
6152 ; AVX-NEXT: vpsrlq $48, %xmm3, %xmm3
6153 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
6154 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1,2],xmm1[3,4,5,6,7]
6155 ; AVX-NEXT: vandps %ymm5, %ymm1, %ymm1
6156 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6157 ; AVX-NEXT: vpsllq $48, %xmm2, %xmm2
6158 ; AVX-NEXT: vandnps %ymm2, %ymm5, %ymm2
6159 ; AVX-NEXT: vorps %ymm2, %ymm1, %ymm1
6160 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
6161 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6162 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6163 ; AVX-NEXT: vpblendw $207, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm1 # 16-byte Folded Reload
6164 ; AVX-NEXT: # xmm1 = mem[0,1,2,3],xmm0[4,5],mem[6,7]
6165 ; AVX-NEXT: vmovdqa {{.*#+}} xmm0 = [u,u,u,u,8,9,2,3,12,13,12,13,12,13,12,13]
6166 ; AVX-NEXT: vpshufb %xmm0, %xmm1, %xmm1
6167 ; AVX-NEXT: vpshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6168 ; AVX-NEXT: # xmm2 = mem[3,1,2,3]
6169 ; AVX-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[2,1,2,3,4,5,6,7]
6170 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1],xmm1[2,3,4,5,6,7]
6171 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6172 ; AVX-NEXT: vpblendw $12, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm3 # 16-byte Folded Reload
6173 ; AVX-NEXT: # xmm3 = xmm2[0,1],mem[2,3],xmm2[4,5,6,7]
6174 ; AVX-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,6,7,4,5,6,7,8,9,6,7,0,1,10,11]
6175 ; AVX-NEXT: vpshufb %xmm2, %xmm3, %xmm3
6176 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm1[0,1,2,3,4],xmm3[5,6,7]
6177 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
6178 ; AVX-NEXT: vpblendw $243, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm5 # 16-byte Folded Reload
6179 ; AVX-NEXT: # xmm5 = mem[0,1],xmm1[2,3],mem[4,5,6,7]
6180 ; AVX-NEXT: vmovdqa {{.*#+}} xmm1 = [2,3,2,3,2,3,2,3,12,13,6,7,12,13,14,15]
6181 ; AVX-NEXT: vpshufb %xmm1, %xmm5, %xmm5
6182 ; AVX-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
6183 ; AVX-NEXT: # xmm15 = mem[0,1,1,3]
6184 ; AVX-NEXT: vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,7,6,7]
6185 ; AVX-NEXT: vpunpckhdq (%rsp), %xmm15, %xmm15 # 16-byte Folded Reload
6186 ; AVX-NEXT: # xmm15 = xmm15[2],mem[2],xmm15[3],mem[3]
6187 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm15[0,1,2],xmm5[3,4,5],xmm15[6,7]
6188 ; AVX-NEXT: vpshufd $36, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
6189 ; AVX-NEXT: # xmm15 = mem[0,1,2,0]
6190 ; AVX-NEXT: vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,6,5]
6191 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5],xmm15[6,7]
6192 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm3
6193 ; AVX-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6194 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
6195 ; AVX-NEXT: vpblendw $207, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3, %xmm3 # 16-byte Folded Reload
6196 ; AVX-NEXT: # xmm3 = mem[0,1,2,3],xmm3[4,5],mem[6,7]
6197 ; AVX-NEXT: vpshufb %xmm0, %xmm3, %xmm3
6198 ; AVX-NEXT: vpshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
6199 ; AVX-NEXT: # xmm5 = mem[3,1,2,3]
6200 ; AVX-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[2,1,2,3,4,5,6,7]
6201 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm5[0,1],xmm3[2,3,4,5,6,7]
6202 ; AVX-NEXT: vpblendw $12, {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm5 # 16-byte Folded Reload
6203 ; AVX-NEXT: # xmm5 = xmm14[0,1],mem[2,3],xmm14[4,5,6,7]
6204 ; AVX-NEXT: vpshufb %xmm2, %xmm5, %xmm5
6205 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3,4],xmm5[5,6,7]
6206 ; AVX-NEXT: vpblendw $243, {{[-0-9]+}}(%r{{[sb]}}p), %xmm9, %xmm5 # 16-byte Folded Reload
6207 ; AVX-NEXT: # xmm5 = mem[0,1],xmm9[2,3],mem[4,5,6,7]
6208 ; AVX-NEXT: vpshufb %xmm1, %xmm5, %xmm5
6209 ; AVX-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
6210 ; AVX-NEXT: # xmm15 = mem[0,1,1,3]
6211 ; AVX-NEXT: vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,7,6,7]
6212 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
6213 ; AVX-NEXT: vpunpckhdq {{.*#+}} xmm15 = xmm15[2],xmm14[2],xmm15[3],xmm14[3]
6214 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm15[0,1,2],xmm5[3,4,5],xmm15[6,7]
6215 ; AVX-NEXT: vpshufd $36, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
6216 ; AVX-NEXT: # xmm15 = mem[0,1,2,0]
6217 ; AVX-NEXT: vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,6,5]
6218 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5],xmm15[6,7]
6219 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm3
6220 ; AVX-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6221 ; AVX-NEXT: vpblendw $48, {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm3 # 16-byte Folded Reload
6222 ; AVX-NEXT: # xmm3 = xmm13[0,1,2,3],mem[4,5],xmm13[6,7]
6223 ; AVX-NEXT: vpshufb %xmm0, %xmm3, %xmm3
6224 ; AVX-NEXT: vpshufd {{.*#+}} xmm5 = xmm12[3,1,2,3]
6225 ; AVX-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[2,1,2,3,4,5,6,7]
6226 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm5[0,1],xmm3[2,3,4,5,6,7]
6227 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
6228 ; AVX-NEXT: vpblendw $243, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm5 # 16-byte Folded Reload
6229 ; AVX-NEXT: # xmm5 = mem[0,1],xmm5[2,3],mem[4,5,6,7]
6230 ; AVX-NEXT: vpshufb %xmm2, %xmm5, %xmm5
6231 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3,4],xmm5[5,6,7]
6232 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm4[0,1],xmm6[2,3],xmm4[4,5,6,7]
6233 ; AVX-NEXT: vpshufb %xmm1, %xmm5, %xmm5
6234 ; AVX-NEXT: vpshufd {{.*#+}} xmm15 = xmm11[0,1,1,3]
6235 ; AVX-NEXT: vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,7,6,7]
6236 ; AVX-NEXT: vpunpckhdq {{.*#+}} xmm15 = xmm15[2],xmm10[2],xmm15[3],xmm10[3]
6237 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm15[0,1,2],xmm5[3,4,5],xmm15[6,7]
6238 ; AVX-NEXT: vpshufd {{.*#+}} xmm15 = xmm7[0,1,2,0]
6239 ; AVX-NEXT: vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,6,5]
6240 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5],xmm15[6,7]
6241 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm3
6242 ; AVX-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6243 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
6244 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
6245 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm12[0,1],xmm13[2,3],xmm12[4,5,6,7]
6246 ; AVX-NEXT: vpshufb %xmm2, %xmm3, %xmm2
6247 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
6248 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm7[0,1,2,3],xmm8[4,5],xmm7[6,7]
6249 ; AVX-NEXT: vpshufb %xmm0, %xmm3, %xmm0
6250 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
6251 ; AVX-NEXT: vpshufd {{.*#+}} xmm3 = xmm8[3,1,2,3]
6252 ; AVX-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[2,1,2,3,4,5,6,7]
6253 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm3[0,1],xmm0[2,3,4,5,6,7]
6254 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4],xmm2[5,6,7]
6255 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
6256 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
6257 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm10[0,1],xmm11[2,3],xmm10[4,5,6,7]
6258 ; AVX-NEXT: vpshufb %xmm1, %xmm2, %xmm1
6259 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
6260 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm6[0,1,1,3]
6261 ; AVX-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,7,6,7]
6262 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
6263 ; AVX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm2[2],xmm4[2],xmm2[3],xmm4[3]
6264 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0,1,2],xmm1[3,4,5],xmm2[6,7]
6265 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
6266 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm9[0,1,2,0]
6267 ; AVX-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,6,5]
6268 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5],xmm2[6,7]
6269 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
6270 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6271 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6272 ; AVX-NEXT: vpblendw $204, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm1 # 16-byte Folded Reload
6273 ; AVX-NEXT: # xmm1 = xmm0[0,1],mem[2,3],xmm0[4,5],mem[6,7]
6274 ; AVX-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[u,u,0,1,10,11,4,5,14,15,14,15,14,15,14,15]
6275 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6276 ; AVX-NEXT: vpsrlq $48, %xmm2, %xmm2
6277 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0],xmm1[1,2,3,4,5,6,7]
6278 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6279 ; AVX-NEXT: vpblendw $48, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm3 # 16-byte Folded Reload
6280 ; AVX-NEXT: # xmm3 = xmm2[0,1,2,3],mem[4,5],xmm2[6,7]
6281 ; AVX-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,2,3,4,5,6,7,8,9,8,9,2,3,12,13]
6282 ; AVX-NEXT: vpshufb %xmm2, %xmm3, %xmm3
6283 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm1[0,1,2,3,4],xmm3[5,6,7]
6284 ; AVX-NEXT: vmovdqa (%rsp), %xmm1 # 16-byte Reload
6285 ; AVX-NEXT: vpblendw $12, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm5 # 16-byte Folded Reload
6286 ; AVX-NEXT: # xmm5 = xmm1[0,1],mem[2,3],xmm1[4,5,6,7]
6287 ; AVX-NEXT: vpshufb {{.*#+}} xmm5 = xmm5[6,7,0,1,10,11,10,11,8,9,10,11,12,13,14,15]
6288 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6289 ; AVX-NEXT: vpblendw $48, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm15 # 16-byte Folded Reload
6290 ; AVX-NEXT: # xmm15 = xmm0[0,1,2,3],mem[4,5],xmm0[6,7]
6291 ; AVX-NEXT: vpshuflw {{.*#+}} xmm15 = xmm15[2,2,2,2,4,5,6,7]
6292 ; AVX-NEXT: vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,7,4,6,7]
6293 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2],xmm15[3,4,5],xmm5[6,7]
6294 ; AVX-NEXT: vpshufd $196, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
6295 ; AVX-NEXT: # xmm15 = mem[0,1,0,3]
6296 ; AVX-NEXT: vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,5,6]
6297 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5],xmm15[6,7]
6298 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm3
6299 ; AVX-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6300 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6301 ; AVX-NEXT: vpblendw $204, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm3 # 16-byte Folded Reload
6302 ; AVX-NEXT: # xmm3 = xmm0[0,1],mem[2,3],xmm0[4,5],mem[6,7]
6303 ; AVX-NEXT: vmovdqa {{.*#+}} xmm1 = [u,u,0,1,10,11,4,5,14,15,14,15,14,15,14,15]
6304 ; AVX-NEXT: vpshufb %xmm1, %xmm3, %xmm3
6305 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6306 ; AVX-NEXT: vpsrlq $48, %xmm0, %xmm5
6307 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm5[0],xmm3[1,2,3,4,5,6,7]
6308 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
6309 ; AVX-NEXT: vpblendw $48, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0, %xmm5 # 16-byte Folded Reload
6310 ; AVX-NEXT: # xmm5 = xmm0[0,1,2,3],mem[4,5],xmm0[6,7]
6311 ; AVX-NEXT: vpshufb %xmm2, %xmm5, %xmm5
6312 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3,4],xmm5[5,6,7]
6313 ; AVX-NEXT: vpblendw $12, {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm5 # 16-byte Folded Reload
6314 ; AVX-NEXT: # xmm5 = xmm14[0,1],mem[2,3],xmm14[4,5,6,7]
6315 ; AVX-NEXT: vmovdqa {{.*#+}} xmm0 = [6,7,0,1,10,11,10,11,8,9,10,11,12,13,14,15]
6316 ; AVX-NEXT: vpshufb %xmm0, %xmm5, %xmm5
6317 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
6318 ; AVX-NEXT: vpblendw $207, {{[-0-9]+}}(%r{{[sb]}}p), %xmm14, %xmm15 # 16-byte Folded Reload
6319 ; AVX-NEXT: # xmm15 = mem[0,1,2,3],xmm14[4,5],mem[6,7]
6320 ; AVX-NEXT: vpshuflw {{.*#+}} xmm15 = xmm15[2,2,2,2,4,5,6,7]
6321 ; AVX-NEXT: vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,7,4,6,7]
6322 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2],xmm15[3,4,5],xmm5[6,7]
6323 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
6324 ; AVX-NEXT: vpshufd {{.*#+}} xmm15 = xmm14[0,1,0,3]
6325 ; AVX-NEXT: vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,5,6]
6326 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5],xmm15[6,7]
6327 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm3
6328 ; AVX-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6329 ; AVX-NEXT: vpblendw $51, {{[-0-9]+}}(%r{{[sb]}}p), %xmm7, %xmm3 # 16-byte Folded Reload
6330 ; AVX-NEXT: # xmm3 = mem[0,1],xmm7[2,3],mem[4,5],xmm7[6,7]
6331 ; AVX-NEXT: vpshufb %xmm1, %xmm3, %xmm3
6332 ; AVX-NEXT: vpsrlq $48, %xmm8, %xmm5
6333 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm5[0],xmm3[1,2,3,4,5,6,7]
6334 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm12[0,1,2,3],xmm13[4,5],xmm12[6,7]
6335 ; AVX-NEXT: vpshufb %xmm2, %xmm5, %xmm5
6336 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3,4],xmm5[5,6,7]
6337 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm4[0,1],xmm6[2,3],xmm4[4,5,6,7]
6338 ; AVX-NEXT: vpshufb %xmm0, %xmm5, %xmm5
6339 ; AVX-NEXT: vmovdqa %xmm0, %xmm4
6340 ; AVX-NEXT: vpblendw {{.*#+}} xmm15 = xmm10[0,1,2,3],xmm11[4,5],xmm10[6,7]
6341 ; AVX-NEXT: vpshuflw {{.*#+}} xmm15 = xmm15[2,2,2,2,4,5,6,7]
6342 ; AVX-NEXT: vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,7,4,6,7]
6343 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2],xmm15[3,4,5],xmm5[6,7]
6344 ; AVX-NEXT: vpshufd {{.*#+}} xmm15 = xmm9[0,1,0,3]
6345 ; AVX-NEXT: vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,5,6]
6346 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5],xmm15[6,7]
6347 ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm5, %ymm3
6348 ; AVX-NEXT: vmovups %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6349 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
6350 ; AVX-NEXT: vpblendw $48, {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm3 # 16-byte Folded Reload
6351 ; AVX-NEXT: # xmm3 = xmm13[0,1,2,3],mem[4,5],xmm13[6,7]
6352 ; AVX-NEXT: vpshufb %xmm2, %xmm3, %xmm2
6353 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
6354 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
6355 ; AVX-NEXT: vpblendw {{.*#+}} xmm3 = xmm10[0,1],xmm11[2,3],xmm10[4,5],xmm11[6,7]
6356 ; AVX-NEXT: vpshufb %xmm1, %xmm3, %xmm0
6357 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
6358 ; AVX-NEXT: vpsrlq $48, %xmm7, %xmm3
6359 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm3[0],xmm0[1,2,3,4,5,6,7]
6360 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4],xmm2[5,6,7]
6361 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
6362 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
6363 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm6[0,1],xmm3[2,3],xmm6[4,5,6,7]
6364 ; AVX-NEXT: vpshufb %xmm4, %xmm2, %xmm1
6365 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
6366 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
6367 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm4[0,1,2,3],xmm8[4,5],xmm4[6,7]
6368 ; AVX-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[2,2,2,2,4,5,6,7]
6369 ; AVX-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,7,4,6,7]
6370 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm2[3,4,5],xmm1[6,7]
6371 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
6372 ; AVX-NEXT: vpshufd {{.*#+}} xmm2 = xmm9[0,1,0,3]
6373 ; AVX-NEXT: vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,5,6]
6374 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5],xmm2[6,7]
6375 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
6376 ; AVX-NEXT: vmovups %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6377 ; AVX-NEXT: vpshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6378 ; AVX-NEXT: # xmm0 = mem[3,1,2,3]
6379 ; AVX-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,1,2,1,4,5,6,7]
6380 ; AVX-NEXT: vpshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6381 ; AVX-NEXT: # xmm1 = mem[0,2,2,3]
6382 ; AVX-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,0,3,4,5,6,7]
6383 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
6384 ; AVX-NEXT: vpshufd $236, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
6385 ; AVX-NEXT: # xmm1 = mem[0,3,2,3]
6386 ; AVX-NEXT: vpblendw $8, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
6387 ; AVX-NEXT: # xmm1 = xmm1[0,1,2],mem[3],xmm1[4,5,6,7]
6388 ; AVX-NEXT: vpshufd $238, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6389 ; AVX-NEXT: # xmm2 = mem[2,3,2,3]
6390 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm2[0],xmm1[1,2,3,4,5,6,7]
6391 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0,1,2,3],xmm0[4,5,6,7]
6392 ; AVX-NEXT: vmovdqa (%rsp), %xmm1 # 16-byte Reload
6393 ; AVX-NEXT: vpblendw $48, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
6394 ; AVX-NEXT: # xmm1 = xmm1[0,1,2,3],mem[4,5],xmm1[6,7]
6395 ; AVX-NEXT: vpshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6396 ; AVX-NEXT: # xmm2 = mem[1,1,1,1]
6397 ; AVX-NEXT: vpshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
6398 ; AVX-NEXT: # xmm5 = mem[0,2,2,3]
6399 ; AVX-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,1,0,3,4,5,6,7]
6400 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm5 = xmm5[0],xmm2[0],xmm5[1],xmm2[1]
6401 ; AVX-NEXT: vmovdqa {{.*#+}} xmm2 = [8,9,2,3,12,13,12,13,8,9,12,13,12,13,14,15]
6402 ; AVX-NEXT: vpshufb %xmm2, %xmm1, %xmm1
6403 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm5[3,4,5],xmm1[6,7]
6404 ; AVX-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
6405 ; AVX-NEXT: # xmm5 = mem[0,1,1,3]
6406 ; AVX-NEXT: vpshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,4,7]
6407 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5],xmm5[6,7]
6408 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm1
6409 ; AVX-NEXT: vpshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Folded Reload
6410 ; AVX-NEXT: # xmm0 = mem[3,1,2,3]
6411 ; AVX-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,1,2,1,4,5,6,7]
6412 ; AVX-NEXT: vpshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
6413 ; AVX-NEXT: # xmm5 = mem[0,2,2,3]
6414 ; AVX-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,1,0,3,4,5,6,7]
6415 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm5[0],xmm0[0],xmm5[1],xmm0[1]
6416 ; AVX-NEXT: vpshufd $236, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
6417 ; AVX-NEXT: # xmm5 = mem[0,3,2,3]
6418 ; AVX-NEXT: vpblendw $8, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm5 # 16-byte Folded Reload
6419 ; AVX-NEXT: # xmm5 = xmm5[0,1,2],mem[3],xmm5[4,5,6,7]
6420 ; AVX-NEXT: vpshufd $238, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
6421 ; AVX-NEXT: # xmm15 = mem[2,3,2,3]
6422 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm15[0],xmm5[1,2,3,4,5,6,7]
6423 ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm5[0,1,2,3],xmm0[4,5,6,7]
6424 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
6425 ; AVX-NEXT: vpblendw $48, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm5 # 16-byte Folded Reload
6426 ; AVX-NEXT: # xmm5 = xmm5[0,1,2,3],mem[4,5],xmm5[6,7]
6427 ; AVX-NEXT: vpshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
6428 ; AVX-NEXT: # xmm15 = mem[1,1,1,1]
6429 ; AVX-NEXT: vpshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Folded Reload
6430 ; AVX-NEXT: # xmm12 = mem[0,2,2,3]
6431 ; AVX-NEXT: vpshuflw {{.*#+}} xmm12 = xmm12[0,1,0,3,4,5,6,7]
6432 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm12 = xmm12[0],xmm15[0],xmm12[1],xmm15[1]
6433 ; AVX-NEXT: vpshufb %xmm2, %xmm5, %xmm5
6434 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2],xmm12[3,4,5],xmm5[6,7]
6435 ; AVX-NEXT: vpshufd {{.*#+}} xmm12 = xmm14[0,1,1,3]
6436 ; AVX-NEXT: vpshufhw {{.*#+}} xmm12 = xmm12[0,1,2,3,4,5,4,7]
6437 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5],xmm12[6,7]
6438 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm5, %ymm0
6439 ; AVX-NEXT: vpshufd {{.*#+}} xmm5 = xmm13[3,1,2,3]
6440 ; AVX-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,1,2,1,4,5,6,7]
6441 ; AVX-NEXT: vpshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Folded Reload
6442 ; AVX-NEXT: # xmm12 = mem[0,2,2,3]
6443 ; AVX-NEXT: vpshuflw {{.*#+}} xmm12 = xmm12[0,1,0,3,4,5,6,7]
6444 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm5 = xmm12[0],xmm5[0],xmm12[1],xmm5[1]
6445 ; AVX-NEXT: vpshufd {{.*#+}} xmm12 = xmm10[0,3,2,3]
6446 ; AVX-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0,1,2],xmm11[3],xmm12[4,5,6,7]
6447 ; AVX-NEXT: vpshufd {{.*#+}} xmm15 = xmm7[2,3,2,3]
6448 ; AVX-NEXT: vpblendw {{.*#+}} xmm12 = xmm15[0],xmm12[1,2,3,4,5,6,7]
6449 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm12[0,1,2,3],xmm5[4,5,6,7]
6450 ; AVX-NEXT: vpblendw {{.*#+}} xmm12 = xmm6[0,1,2,3],xmm3[4,5],xmm6[6,7]
6451 ; AVX-NEXT: vpshufd {{.*#+}} xmm15 = xmm4[1,1,1,1]
6452 ; AVX-NEXT: vpshufd {{.*#+}} xmm14 = xmm8[0,2,2,3]
6453 ; AVX-NEXT: vpshuflw {{.*#+}} xmm14 = xmm14[0,1,0,3,4,5,6,7]
6454 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm14 = xmm14[0],xmm15[0],xmm14[1],xmm15[1]
6455 ; AVX-NEXT: vpshufb %xmm2, %xmm12, %xmm12
6456 ; AVX-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0,1,2],xmm14[3,4,5],xmm12[6,7]
6457 ; AVX-NEXT: vpshufd {{.*#+}} xmm14 = xmm9[0,1,1,3]
6458 ; AVX-NEXT: vpshufhw {{.*#+}} xmm14 = xmm14[0,1,2,3,4,5,4,7]
6459 ; AVX-NEXT: vpblendw {{.*#+}} xmm12 = xmm12[0,1,2,3,4,5],xmm14[6,7]
6460 ; AVX-NEXT: vinsertf128 $1, %xmm5, %ymm12, %ymm12
6461 ; AVX-NEXT: vpshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
6462 ; AVX-NEXT: # xmm5 = mem[3,1,2,3]
6463 ; AVX-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,1,2,1,4,5,6,7]
6464 ; AVX-NEXT: vpshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Folded Reload
6465 ; AVX-NEXT: # xmm13 = mem[0,2,2,3]
6466 ; AVX-NEXT: vpshuflw {{.*#+}} xmm13 = xmm13[0,1,0,3,4,5,6,7]
6467 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm5 = xmm13[0],xmm5[0],xmm13[1],xmm5[1]
6468 ; AVX-NEXT: vpshufd $236, {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Folded Reload
6469 ; AVX-NEXT: # xmm13 = mem[0,3,2,3]
6470 ; AVX-NEXT: vpblendw $8, {{[-0-9]+}}(%r{{[sb]}}p), %xmm13, %xmm11 # 16-byte Folded Reload
6471 ; AVX-NEXT: # xmm11 = xmm13[0,1,2],mem[3],xmm13[4,5,6,7]
6472 ; AVX-NEXT: vpshufd $238, {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Folded Reload
6473 ; AVX-NEXT: # xmm10 = mem[2,3,2,3]
6474 ; AVX-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0],xmm11[1,2,3,4,5,6,7]
6475 ; AVX-NEXT: vpblendw {{.*#+}} xmm5 = xmm10[0,1,2,3],xmm5[4,5,6,7]
6476 ; AVX-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
6477 ; AVX-NEXT: vpblendw $48, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3, %xmm8 # 16-byte Folded Reload
6478 ; AVX-NEXT: # xmm8 = xmm3[0,1,2,3],mem[4,5],xmm3[6,7]
6479 ; AVX-NEXT: vpshufb %xmm2, %xmm8, %xmm2
6480 ; AVX-NEXT: vpshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
6481 ; AVX-NEXT: # xmm4 = mem[1,1,1,1]
6482 ; AVX-NEXT: vpshufd $232, {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Folded Reload
6483 ; AVX-NEXT: # xmm7 = mem[0,2,2,3]
6484 ; AVX-NEXT: vpshuflw {{.*#+}} xmm7 = xmm7[0,1,0,3,4,5,6,7]
6485 ; AVX-NEXT: vpunpckldq {{.*#+}} xmm4 = xmm7[0],xmm4[0],xmm7[1],xmm4[1]
6486 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm4[3,4,5],xmm2[6,7]
6487 ; AVX-NEXT: vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
6488 ; AVX-NEXT: # xmm4 = mem[0,1,1,3]
6489 ; AVX-NEXT: vpshufhw {{.*#+}} xmm4 = xmm4[0,1,2,3,4,5,4,7]
6490 ; AVX-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5],xmm4[6,7]
6491 ; AVX-NEXT: vinsertf128 $1, %xmm5, %ymm2, %ymm2
6492 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6493 ; AVX-NEXT: vmovaps %ymm3, 64(%rsi)
6494 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6495 ; AVX-NEXT: vmovaps %ymm4, (%rsi)
6496 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6497 ; AVX-NEXT: vmovaps %ymm4, 96(%rsi)
6498 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6499 ; AVX-NEXT: vmovaps %ymm4, 32(%rsi)
6500 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6501 ; AVX-NEXT: vmovaps %ymm3, 64(%rdx)
6502 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6503 ; AVX-NEXT: vmovaps %ymm3, (%rdx)
6504 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6505 ; AVX-NEXT: vmovaps %ymm3, 96(%rdx)
6506 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6507 ; AVX-NEXT: vmovaps %ymm3, 32(%rdx)
6508 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6509 ; AVX-NEXT: vmovaps %ymm3, 64(%rcx)
6510 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6511 ; AVX-NEXT: vmovaps %ymm3, (%rcx)
6512 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6513 ; AVX-NEXT: vmovaps %ymm3, 96(%rcx)
6514 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6515 ; AVX-NEXT: vmovaps %ymm3, 32(%rcx)
6516 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6517 ; AVX-NEXT: vmovaps %ymm3, 64(%r8)
6518 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6519 ; AVX-NEXT: vmovaps %ymm3, (%r8)
6520 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6521 ; AVX-NEXT: vmovaps %ymm3, 96(%r8)
6522 ; AVX-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6523 ; AVX-NEXT: vmovaps %ymm3, 32(%r8)
6524 ; AVX-NEXT: vmovaps %ymm2, 64(%r9)
6525 ; AVX-NEXT: vmovaps %ymm12, (%r9)
6526 ; AVX-NEXT: vmovaps %ymm0, 96(%r9)
6527 ; AVX-NEXT: vmovaps %ymm1, 32(%r9)
6528 ; AVX-NEXT: addq $1032, %rsp # imm = 0x408
6529 ; AVX-NEXT: vzeroupper
6532 ; AVX2-LABEL: load_i16_stride5_vf64:
6534 ; AVX2-NEXT: subq $1048, %rsp # imm = 0x418
6535 ; AVX2-NEXT: vmovdqa 384(%rdi), %ymm10
6536 ; AVX2-NEXT: vmovdqa 512(%rdi), %ymm4
6537 ; AVX2-NEXT: vmovdqa 480(%rdi), %ymm14
6538 ; AVX2-NEXT: vmovdqa 544(%rdi), %ymm11
6539 ; AVX2-NEXT: vmovdqa 576(%rdi), %ymm8
6540 ; AVX2-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6541 ; AVX2-NEXT: vmovdqa 192(%rdi), %ymm3
6542 ; AVX2-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6543 ; AVX2-NEXT: vmovdqa 160(%rdi), %ymm5
6544 ; AVX2-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6545 ; AVX2-NEXT: vmovdqa 224(%rdi), %ymm0
6546 ; AVX2-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6547 ; AVX2-NEXT: vmovdqa 256(%rdi), %ymm1
6548 ; AVX2-NEXT: vmovdqu %ymm1, (%rsp) # 32-byte Spill
6549 ; AVX2-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm1[1,2],ymm0[3],ymm1[4],ymm0[5],ymm1[6,7],ymm0[8],ymm1[9,10],ymm0[11],ymm1[12],ymm0[13],ymm1[14,15]
6550 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
6551 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm1[5],ymm0[6],ymm1[7]
6552 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [0,1,10,11,4,5,14,15,8,9,10,11,4,5,6,7,16,17,26,27,20,21,30,31,24,25,26,27,20,21,22,23]
6553 ; AVX2-NEXT: vpshufb %ymm1, %ymm0, %ymm2
6554 ; AVX2-NEXT: vpblendw {{.*#+}} ymm0 = ymm5[0],ymm3[1],ymm5[2,3],ymm3[4],ymm5[5],ymm3[6],ymm5[7,8],ymm3[9],ymm5[10,11],ymm3[12],ymm5[13],ymm3[14],ymm5[15]
6555 ; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm3
6556 ; AVX2-NEXT: vpblendw {{.*#+}} xmm3 = xmm0[0],xmm3[1,2,3],xmm0[4,5],xmm3[6,7]
6557 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,10,11,4,5,14,15,8,9,2,3,12,13,6,7]
6558 ; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm3
6559 ; AVX2-NEXT: vpmovsxbw {{.*#+}} xmm5 = [65535,65535,65535,65535,65535,65535,65535,0]
6560 ; AVX2-NEXT: vpblendvb %ymm5, %ymm3, %ymm2, %ymm2
6561 ; AVX2-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6562 ; AVX2-NEXT: vpblendw {{.*#+}} ymm2 = ymm11[0],ymm8[1,2],ymm11[3],ymm8[4],ymm11[5],ymm8[6,7],ymm11[8],ymm8[9,10],ymm11[11],ymm8[12],ymm11[13],ymm8[14,15]
6563 ; AVX2-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6564 ; AVX2-NEXT: vpermq {{.*#+}} ymm3 = ymm2[2,3,0,1]
6565 ; AVX2-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm3[5],ymm2[6],ymm3[7]
6566 ; AVX2-NEXT: vpblendw {{.*#+}} ymm3 = ymm14[0],ymm4[1],ymm14[2,3],ymm4[4],ymm14[5],ymm4[6],ymm14[7,8],ymm4[9],ymm14[10,11],ymm4[12],ymm14[13],ymm4[14],ymm14[15]
6567 ; AVX2-NEXT: vmovdqu %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6568 ; AVX2-NEXT: vmovdqa %ymm4, %ymm8
6569 ; AVX2-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6570 ; AVX2-NEXT: vextracti128 $1, %ymm3, %xmm4
6571 ; AVX2-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0],xmm4[1,2,3],xmm3[4,5],xmm4[6,7]
6572 ; AVX2-NEXT: vmovdqa 416(%rdi), %ymm13
6573 ; AVX2-NEXT: vpshufb %ymm1, %ymm2, %ymm2
6574 ; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm3
6575 ; AVX2-NEXT: vpblendvb %ymm5, %ymm3, %ymm2, %ymm2
6576 ; AVX2-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6577 ; AVX2-NEXT: vpblendw {{.*#+}} ymm2 = ymm10[0],ymm13[1,2],ymm10[3],ymm13[4],ymm10[5],ymm13[6,7],ymm10[8],ymm13[9,10],ymm10[11],ymm13[12],ymm10[13],ymm13[14,15]
6578 ; AVX2-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6579 ; AVX2-NEXT: vmovdqu %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6580 ; AVX2-NEXT: vpermq {{.*#+}} ymm3 = ymm2[2,3,0,1]
6581 ; AVX2-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm3[5],ymm2[6],ymm3[7]
6582 ; AVX2-NEXT: vmovdqa 352(%rdi), %ymm4
6583 ; AVX2-NEXT: vmovdqa 320(%rdi), %ymm15
6584 ; AVX2-NEXT: vpblendw {{.*#+}} ymm3 = ymm15[0],ymm4[1],ymm15[2,3],ymm4[4],ymm15[5],ymm4[6],ymm15[7,8],ymm4[9],ymm15[10,11],ymm4[12],ymm15[13],ymm4[14],ymm15[15]
6585 ; AVX2-NEXT: vmovdqu %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6586 ; AVX2-NEXT: vmovdqa %ymm4, %ymm6
6587 ; AVX2-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6588 ; AVX2-NEXT: vextracti128 $1, %ymm3, %xmm4
6589 ; AVX2-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0],xmm4[1,2,3],xmm3[4,5],xmm4[6,7]
6590 ; AVX2-NEXT: vpshufb %ymm1, %ymm2, %ymm2
6591 ; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm3
6592 ; AVX2-NEXT: vpblendvb %ymm5, %ymm3, %ymm2, %ymm2
6593 ; AVX2-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6594 ; AVX2-NEXT: vmovdqa 64(%rdi), %ymm9
6595 ; AVX2-NEXT: vmovdqa 96(%rdi), %ymm12
6596 ; AVX2-NEXT: vpblendw {{.*#+}} ymm2 = ymm9[0],ymm12[1,2],ymm9[3],ymm12[4],ymm9[5],ymm12[6,7],ymm9[8],ymm12[9,10],ymm9[11],ymm12[12],ymm9[13],ymm12[14,15]
6597 ; AVX2-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6598 ; AVX2-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6599 ; AVX2-NEXT: vpermq {{.*#+}} ymm3 = ymm2[2,3,0,1]
6600 ; AVX2-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm3[5],ymm2[6],ymm3[7]
6601 ; AVX2-NEXT: vpshufb %ymm1, %ymm2, %ymm1
6602 ; AVX2-NEXT: vmovdqa (%rdi), %ymm5
6603 ; AVX2-NEXT: vmovdqa 32(%rdi), %ymm7
6604 ; AVX2-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0],ymm7[1],ymm5[2,3],ymm7[4],ymm5[5],ymm7[6],ymm5[7,8],ymm7[9],ymm5[10,11],ymm7[12],ymm5[13],ymm7[14],ymm5[15]
6605 ; AVX2-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6606 ; AVX2-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6607 ; AVX2-NEXT: vextracti128 $1, %ymm2, %xmm3
6608 ; AVX2-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm3[1,2,3],xmm2[4,5],xmm3[6,7]
6609 ; AVX2-NEXT: vpshufb %xmm0, %xmm2, %xmm0
6610 ; AVX2-NEXT: vpmovsxbw {{.*#+}} xmm3 = [65535,65535,65535,65535,65535,65535,65535,0]
6611 ; AVX2-NEXT: vpblendvb %ymm3, %ymm0, %ymm1, %ymm0
6612 ; AVX2-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6613 ; AVX2-NEXT: vmovdqu (%rsp), %ymm0 # 32-byte Reload
6614 ; AVX2-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6615 ; AVX2-NEXT: # ymm0 = ymm0[0],mem[1],ymm0[2,3],mem[4],ymm0[5],mem[6],ymm0[7,8],mem[9],ymm0[10,11],mem[12],ymm0[13],mem[14],ymm0[15]
6616 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
6617 ; AVX2-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3,4],ymm1[5],ymm0[6,7]
6618 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6619 ; AVX2-NEXT: vpblendw $74, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
6620 ; AVX2-NEXT: # ymm0 = ymm0[0],mem[1],ymm0[2],mem[3],ymm0[4,5],mem[6],ymm0[7,8],mem[9],ymm0[10],mem[11],ymm0[12,13],mem[14],ymm0[15]
6621 ; AVX2-NEXT: vextracti128 $1, %ymm0, %xmm2
6622 ; AVX2-NEXT: vpblendw {{.*#+}} xmm2 = xmm0[0,1],xmm2[2,3],xmm0[4,5,6],xmm2[7]
6623 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm0 = [2,3,12,13,6,7,0,1,10,11,6,7,8,9,8,9,18,19,28,29,22,23,16,17,26,27,22,23,24,25,24,25]
6624 ; AVX2-NEXT: vpshufb %ymm0, %ymm1, %ymm1
6625 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm4 = [2,3,12,13,6,7,0,1,10,11,4,5,14,15,10,11]
6626 ; AVX2-NEXT: vpshufb %xmm4, %xmm2, %xmm2
6627 ; AVX2-NEXT: vpblendvb %ymm3, %ymm2, %ymm1, %ymm1
6628 ; AVX2-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6629 ; AVX2-NEXT: vpblendw $173, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm1 # 32-byte Folded Reload
6630 ; AVX2-NEXT: # ymm1 = mem[0],ymm11[1],mem[2,3],ymm11[4],mem[5],ymm11[6],mem[7,8],ymm11[9],mem[10,11],ymm11[12],mem[13],ymm11[14],mem[15]
6631 ; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm1[2,3,0,1]
6632 ; AVX2-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4],ymm2[5],ymm1[6,7]
6633 ; AVX2-NEXT: vpblendw {{.*#+}} ymm2 = ymm8[0],ymm14[1],ymm8[2],ymm14[3],ymm8[4,5],ymm14[6],ymm8[7,8],ymm14[9],ymm8[10],ymm14[11],ymm8[12,13],ymm14[14],ymm8[15]
6634 ; AVX2-NEXT: vextracti128 $1, %ymm2, %xmm8
6635 ; AVX2-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm8[2,3],xmm2[4,5,6],xmm8[7]
6636 ; AVX2-NEXT: vpshufb %ymm0, %ymm1, %ymm1
6637 ; AVX2-NEXT: vpshufb %xmm4, %xmm2, %xmm2
6638 ; AVX2-NEXT: vpblendvb %ymm3, %ymm2, %ymm1, %ymm11
6639 ; AVX2-NEXT: vpblendw {{.*#+}} ymm1 = ymm13[0],ymm10[1],ymm13[2,3],ymm10[4],ymm13[5],ymm10[6],ymm13[7,8],ymm10[9],ymm13[10,11],ymm10[12],ymm13[13],ymm10[14],ymm13[15]
6640 ; AVX2-NEXT: vpermq {{.*#+}} ymm8 = ymm1[2,3,0,1]
6641 ; AVX2-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3,4],ymm8[5],ymm1[6,7]
6642 ; AVX2-NEXT: vpblendw {{.*#+}} ymm8 = ymm6[0],ymm15[1],ymm6[2],ymm15[3],ymm6[4,5],ymm15[6],ymm6[7,8],ymm15[9],ymm6[10],ymm15[11],ymm6[12,13],ymm15[14],ymm6[15]
6643 ; AVX2-NEXT: vextracti128 $1, %ymm8, %xmm10
6644 ; AVX2-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3],xmm8[4,5,6],xmm10[7]
6645 ; AVX2-NEXT: vpshufb %ymm0, %ymm1, %ymm1
6646 ; AVX2-NEXT: vpshufb %xmm4, %xmm8, %xmm8
6647 ; AVX2-NEXT: vpblendvb %ymm3, %ymm8, %ymm1, %ymm6
6648 ; AVX2-NEXT: vpblendw {{.*#+}} ymm8 = ymm12[0],ymm9[1],ymm12[2,3],ymm9[4],ymm12[5],ymm9[6],ymm12[7,8],ymm9[9],ymm12[10,11],ymm9[12],ymm12[13],ymm9[14],ymm12[15]
6649 ; AVX2-NEXT: vpermq {{.*#+}} ymm10 = ymm8[2,3,0,1]
6650 ; AVX2-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4],ymm10[5],ymm8[6,7]
6651 ; AVX2-NEXT: vpshufb %ymm0, %ymm8, %ymm0
6652 ; AVX2-NEXT: vpblendw {{.*#+}} ymm8 = ymm7[0],ymm5[1],ymm7[2],ymm5[3],ymm7[4,5],ymm5[6],ymm7[7,8],ymm5[9],ymm7[10],ymm5[11],ymm7[12,13],ymm5[14],ymm7[15]
6653 ; AVX2-NEXT: vextracti128 $1, %ymm8, %xmm10
6654 ; AVX2-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1],xmm10[2,3],xmm8[4,5,6],xmm10[7]
6655 ; AVX2-NEXT: vpshufb %xmm4, %xmm8, %xmm4
6656 ; AVX2-NEXT: vpblendvb %ymm3, %ymm4, %ymm0, %ymm2
6657 ; AVX2-NEXT: vmovdqa 304(%rdi), %xmm9
6658 ; AVX2-NEXT: vmovdqa 288(%rdi), %xmm3
6659 ; AVX2-NEXT: vpblendd {{.*#+}} xmm8 = xmm3[0],xmm9[1],xmm3[2,3]
6660 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm0 = [6,7,2,3,4,5,6,7,6,7,2,3,12,13,6,7]
6661 ; AVX2-NEXT: vpshufb %xmm0, %xmm8, %xmm8
6662 ; AVX2-NEXT: vinserti128 $1, %xmm8, %ymm0, %ymm8
6663 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6664 ; AVX2-NEXT: vpblendw {{.*#+}} ymm8 = ymm4[0,1,2,3,4],ymm8[5,6,7],ymm4[8,9,10,11,12],ymm8[13,14,15]
6665 ; AVX2-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
6666 ; AVX2-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6667 ; AVX2-NEXT: vmovdqa 624(%rdi), %xmm15
6668 ; AVX2-NEXT: vmovdqa 608(%rdi), %xmm12
6669 ; AVX2-NEXT: vpblendd {{.*#+}} xmm10 = xmm12[0],xmm15[1],xmm12[2,3]
6670 ; AVX2-NEXT: vmovdqa %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6671 ; AVX2-NEXT: vpshufb %xmm0, %xmm10, %xmm10
6672 ; AVX2-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
6673 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6674 ; AVX2-NEXT: vpblendw {{.*#+}} ymm10 = ymm4[0,1,2,3,4],ymm10[5,6,7],ymm4[8,9,10,11,12],ymm10[13,14,15]
6675 ; AVX2-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm10[4,5,6,7]
6676 ; AVX2-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6677 ; AVX2-NEXT: vmovdqa 464(%rdi), %xmm10
6678 ; AVX2-NEXT: vmovdqa 448(%rdi), %xmm8
6679 ; AVX2-NEXT: vpblendd {{.*#+}} xmm13 = xmm8[0],xmm10[1],xmm8[2,3]
6680 ; AVX2-NEXT: vmovdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6681 ; AVX2-NEXT: vpshufb %xmm0, %xmm13, %xmm13
6682 ; AVX2-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
6683 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6684 ; AVX2-NEXT: vpblendw {{.*#+}} ymm13 = ymm4[0,1,2,3,4],ymm13[5,6,7],ymm4[8,9,10,11,12],ymm13[13,14,15]
6685 ; AVX2-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm13[4,5,6,7]
6686 ; AVX2-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6687 ; AVX2-NEXT: vmovdqa 144(%rdi), %xmm5
6688 ; AVX2-NEXT: vmovdqa 128(%rdi), %xmm4
6689 ; AVX2-NEXT: vpblendd {{.*#+}} xmm14 = xmm4[0],xmm5[1],xmm4[2,3]
6690 ; AVX2-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6691 ; AVX2-NEXT: vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6692 ; AVX2-NEXT: vpshufb %xmm0, %xmm14, %xmm0
6693 ; AVX2-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6694 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
6695 ; AVX2-NEXT: vpblendw {{.*#+}} ymm0 = ymm7[0,1,2,3,4],ymm0[5,6,7],ymm7[8,9,10,11,12],ymm0[13,14,15]
6696 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm7[0,1,2,3],ymm0[4,5,6,7]
6697 ; AVX2-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6698 ; AVX2-NEXT: vmovdqa %xmm3, %xmm14
6699 ; AVX2-NEXT: vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6700 ; AVX2-NEXT: vpblendd {{.*#+}} xmm0 = xmm3[0,1],xmm9[2],xmm3[3]
6701 ; AVX2-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6702 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm7 = [0,1,2,3,4,5,6,7,8,9,4,5,14,15,8,9]
6703 ; AVX2-NEXT: vpshufb %xmm7, %xmm0, %xmm0
6704 ; AVX2-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6705 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
6706 ; AVX2-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2,3,4],ymm0[5,6,7],ymm1[8,9,10,11,12],ymm0[13,14,15]
6707 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
6708 ; AVX2-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6709 ; AVX2-NEXT: vpblendd {{.*#+}} xmm0 = xmm12[0,1],xmm15[2],xmm12[3]
6710 ; AVX2-NEXT: vmovdqa %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6711 ; AVX2-NEXT: vpshufb %xmm7, %xmm0, %xmm0
6712 ; AVX2-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6713 ; AVX2-NEXT: vpblendw {{.*#+}} ymm0 = ymm11[0,1,2,3,4],ymm0[5,6,7],ymm11[8,9,10,11,12],ymm0[13,14,15]
6714 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm11[0,1,2,3],ymm0[4,5,6,7]
6715 ; AVX2-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6716 ; AVX2-NEXT: vpblendd {{.*#+}} xmm0 = xmm8[0,1],xmm10[2],xmm8[3]
6717 ; AVX2-NEXT: vmovdqa %xmm8, %xmm10
6718 ; AVX2-NEXT: vmovdqa %xmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
6719 ; AVX2-NEXT: vpshufb %xmm7, %xmm0, %xmm0
6720 ; AVX2-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6721 ; AVX2-NEXT: vpblendw {{.*#+}} ymm0 = ymm6[0,1,2,3,4],ymm0[5,6,7],ymm6[8,9,10,11,12],ymm0[13,14,15]
6722 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm6[0,1,2,3],ymm0[4,5,6,7]
6723 ; AVX2-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6724 ; AVX2-NEXT: vpblendd {{.*#+}} xmm0 = xmm4[0,1],xmm5[2],xmm4[3]
6725 ; AVX2-NEXT: vpshufb %xmm7, %xmm0, %xmm0
6726 ; AVX2-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
6727 ; AVX2-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0,1,2,3,4],ymm0[5,6,7],ymm2[8,9,10,11,12],ymm0[13,14,15]
6728 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
6729 ; AVX2-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6730 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
6731 ; AVX2-NEXT: vpblendw $82, (%rsp), %ymm13, %ymm0 # 32-byte Folded Reload
6732 ; AVX2-NEXT: # ymm0 = ymm13[0],mem[1],ymm13[2,3],mem[4],ymm13[5],mem[6],ymm13[7,8],mem[9],ymm13[10,11],mem[12],ymm13[13],mem[14],ymm13[15]
6733 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
6734 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6],ymm0[7]
6735 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
6736 ; AVX2-NEXT: vpblendw $148, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm1 # 32-byte Folded Reload
6737 ; AVX2-NEXT: # ymm1 = ymm15[0,1],mem[2],ymm15[3],mem[4],ymm15[5,6],mem[7],ymm15[8,9],mem[10],ymm15[11],mem[12],ymm15[13,14],mem[15]
6738 ; AVX2-NEXT: vextracti128 $1, %ymm1, %xmm2
6739 ; AVX2-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm2[3,4],xmm1[5,6,7]
6740 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm2 = [u,u,u,u,u,u,u,u,u,u,u,u,0,1,10,11,20,21,30,31,24,25,18,19,28,29,26,27,16,17,26,27]
6741 ; AVX2-NEXT: vpshufb %ymm2, %ymm0, %ymm3
6742 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm0 = [4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
6743 ; AVX2-NEXT: vpshufb %xmm0, %xmm1, %xmm1
6744 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm1[0,1,2],ymm3[3,4,5,6,7]
6745 ; AVX2-NEXT: vpblendd {{.*#+}} xmm6 = xmm9[0],xmm14[1],xmm9[2,3]
6746 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [0,1,6,7,4,5,6,7,8,9,6,7,0,1,10,11]
6747 ; AVX2-NEXT: vpshufb %xmm1, %xmm6, %xmm6
6748 ; AVX2-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
6749 ; AVX2-NEXT: vpblendw {{.*#+}} ymm6 = ymm3[0,1,2,3,4],ymm6[5,6,7],ymm3[8,9,10,11,12],ymm6[13,14,15]
6750 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm6[4,5,6,7]
6751 ; AVX2-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6752 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
6753 ; AVX2-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
6754 ; AVX2-NEXT: # ymm3 = ymm3[0],mem[1],ymm3[2,3],mem[4],ymm3[5],mem[6],ymm3[7,8],mem[9],ymm3[10,11],mem[12],ymm3[13],mem[14],ymm3[15]
6755 ; AVX2-NEXT: vpermq {{.*#+}} ymm6 = ymm3[2,3,0,1]
6756 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm6[6],ymm3[7]
6757 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
6758 ; AVX2-NEXT: vpblendw $148, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm6 # 32-byte Folded Reload
6759 ; AVX2-NEXT: # ymm6 = ymm11[0,1],mem[2],ymm11[3],mem[4],ymm11[5,6],mem[7],ymm11[8,9],mem[10],ymm11[11],mem[12],ymm11[13,14],mem[15]
6760 ; AVX2-NEXT: vextracti128 $1, %ymm6, %xmm7
6761 ; AVX2-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1,2],xmm7[3,4],xmm6[5,6,7]
6762 ; AVX2-NEXT: vpshufb %ymm2, %ymm3, %ymm3
6763 ; AVX2-NEXT: vpshufb %xmm0, %xmm6, %xmm6
6764 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm6[0,1,2],ymm3[3,4,5,6,7]
6765 ; AVX2-NEXT: vpblendd $13, {{[-0-9]+}}(%r{{[sb]}}p), %xmm12, %xmm6 # 16-byte Folded Reload
6766 ; AVX2-NEXT: # xmm6 = mem[0],xmm12[1],mem[2,3]
6767 ; AVX2-NEXT: vpshufb %xmm1, %xmm6, %xmm6
6768 ; AVX2-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
6769 ; AVX2-NEXT: vpblendw {{.*#+}} ymm6 = ymm3[0,1,2,3,4],ymm6[5,6,7],ymm3[8,9,10,11,12],ymm6[13,14,15]
6770 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm6[4,5,6,7]
6771 ; AVX2-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6772 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
6773 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6774 ; AVX2-NEXT: vpblendw {{.*#+}} ymm3 = ymm14[0],ymm4[1],ymm14[2,3],ymm4[4],ymm14[5],ymm4[6],ymm14[7,8],ymm4[9],ymm14[10,11],ymm4[12],ymm14[13],ymm4[14],ymm14[15]
6775 ; AVX2-NEXT: vpermq {{.*#+}} ymm6 = ymm3[2,3,0,1]
6776 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm6[6],ymm3[7]
6777 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
6778 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
6779 ; AVX2-NEXT: vpblendw {{.*#+}} ymm6 = ymm9[0,1],ymm8[2],ymm9[3],ymm8[4],ymm9[5,6],ymm8[7],ymm9[8,9],ymm8[10],ymm9[11],ymm8[12],ymm9[13,14],ymm8[15]
6780 ; AVX2-NEXT: vextracti128 $1, %ymm6, %xmm7
6781 ; AVX2-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1,2],xmm7[3,4],xmm6[5,6,7]
6782 ; AVX2-NEXT: vpshufb %ymm2, %ymm3, %ymm3
6783 ; AVX2-NEXT: vpshufb %xmm0, %xmm6, %xmm6
6784 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm6[0,1,2],ymm3[3,4,5,6,7]
6785 ; AVX2-NEXT: vpblendd $13, {{[-0-9]+}}(%r{{[sb]}}p), %xmm10, %xmm6 # 16-byte Folded Reload
6786 ; AVX2-NEXT: # xmm6 = mem[0],xmm10[1],mem[2,3]
6787 ; AVX2-NEXT: vpshufb %xmm1, %xmm6, %xmm6
6788 ; AVX2-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
6789 ; AVX2-NEXT: vpblendw {{.*#+}} ymm6 = ymm3[0,1,2,3,4],ymm6[5,6,7],ymm3[8,9,10,11,12],ymm6[13,14,15]
6790 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm6[4,5,6,7]
6791 ; AVX2-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6792 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
6793 ; AVX2-NEXT: vpblendw $173, {{[-0-9]+}}(%r{{[sb]}}p), %ymm12, %ymm3 # 32-byte Folded Reload
6794 ; AVX2-NEXT: # ymm3 = mem[0],ymm12[1],mem[2,3],ymm12[4],mem[5],ymm12[6],mem[7,8],ymm12[9],mem[10,11],ymm12[12],mem[13],ymm12[14],mem[15]
6795 ; AVX2-NEXT: vpermq {{.*#+}} ymm6 = ymm3[2,3,0,1]
6796 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm6[6],ymm3[7]
6797 ; AVX2-NEXT: vpshufb %ymm2, %ymm3, %ymm2
6798 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
6799 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
6800 ; AVX2-NEXT: vpblendw {{.*#+}} ymm3 = ymm10[0,1],ymm5[2],ymm10[3],ymm5[4],ymm10[5,6],ymm5[7],ymm10[8,9],ymm5[10],ymm10[11],ymm5[12],ymm10[13,14],ymm5[15]
6801 ; AVX2-NEXT: vextracti128 $1, %ymm3, %xmm6
6802 ; AVX2-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2],xmm6[3,4],xmm3[5,6,7]
6803 ; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
6804 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2],ymm2[3,4,5,6,7]
6805 ; AVX2-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
6806 ; AVX2-NEXT: vpblendd $13, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
6807 ; AVX2-NEXT: # xmm2 = mem[0],xmm2[1],mem[2,3]
6808 ; AVX2-NEXT: vpshufb %xmm1, %xmm2, %xmm1
6809 ; AVX2-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
6810 ; AVX2-NEXT: vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3,4],ymm1[5,6,7],ymm0[8,9,10,11,12],ymm1[13,14,15]
6811 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6812 ; AVX2-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6813 ; AVX2-NEXT: vpblendw $181, (%rsp), %ymm13, %ymm0 # 32-byte Folded Reload
6814 ; AVX2-NEXT: # ymm0 = mem[0],ymm13[1],mem[2],ymm13[3],mem[4,5],ymm13[6],mem[7,8],ymm13[9],mem[10],ymm13[11],mem[12,13],ymm13[14],mem[15]
6815 ; AVX2-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
6816 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
6817 ; AVX2-NEXT: vpblendw $41, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm1 # 32-byte Folded Reload
6818 ; AVX2-NEXT: # ymm1 = mem[0],ymm15[1,2],mem[3],ymm15[4],mem[5],ymm15[6,7],mem[8],ymm15[9,10],mem[11],ymm15[12],mem[13],ymm15[14,15]
6819 ; AVX2-NEXT: vextracti128 $1, %ymm1, %xmm2
6820 ; AVX2-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0],xmm1[1],xmm2[2],xmm1[3]
6821 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm2 = [u,u,u,u,u,u,u,u,u,u,u,u,2,3,12,13,22,23,16,17,26,27,20,21,30,31,30,31,18,19,28,29]
6822 ; AVX2-NEXT: vpshufb %ymm2, %ymm0, %ymm3
6823 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm0 = [6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
6824 ; AVX2-NEXT: vpshufb %xmm0, %xmm1, %xmm1
6825 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm1[0,1,2],ymm3[3,4,5,6,7]
6826 ; AVX2-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
6827 ; AVX2-NEXT: vpblendd $11, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15, %xmm6 # 16-byte Folded Reload
6828 ; AVX2-NEXT: # xmm6 = mem[0,1],xmm15[2],mem[3]
6829 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [0,1,2,3,4,5,6,7,8,9,8,9,2,3,12,13]
6830 ; AVX2-NEXT: vpshufb %xmm1, %xmm6, %xmm6
6831 ; AVX2-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
6832 ; AVX2-NEXT: vpblendw {{.*#+}} ymm6 = ymm3[0,1,2,3,4],ymm6[5,6,7],ymm3[8,9,10,11,12],ymm6[13,14,15]
6833 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm6[4,5,6,7]
6834 ; AVX2-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6835 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
6836 ; AVX2-NEXT: vpblendw $74, {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm3 # 32-byte Folded Reload
6837 ; AVX2-NEXT: # ymm3 = ymm13[0],mem[1],ymm13[2],mem[3],ymm13[4,5],mem[6],ymm13[7,8],mem[9],ymm13[10],mem[11],ymm13[12,13],mem[14],ymm13[15]
6838 ; AVX2-NEXT: vpermq {{.*#+}} ymm6 = ymm3[2,3,0,1]
6839 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm6[4],ymm3[5],ymm6[6],ymm3[7]
6840 ; AVX2-NEXT: vpblendw $41, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm6 # 32-byte Folded Reload
6841 ; AVX2-NEXT: # ymm6 = mem[0],ymm11[1,2],mem[3],ymm11[4],mem[5],ymm11[6,7],mem[8],ymm11[9,10],mem[11],ymm11[12],mem[13],ymm11[14,15]
6842 ; AVX2-NEXT: vextracti128 $1, %ymm6, %xmm7
6843 ; AVX2-NEXT: vpblendd {{.*#+}} xmm6 = xmm7[0],xmm6[1],xmm7[2],xmm6[3]
6844 ; AVX2-NEXT: vpshufb %ymm2, %ymm3, %ymm3
6845 ; AVX2-NEXT: vpshufb %xmm0, %xmm6, %xmm6
6846 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm6[0,1,2],ymm3[3,4,5,6,7]
6847 ; AVX2-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
6848 ; AVX2-NEXT: vpblendd $11, {{[-0-9]+}}(%r{{[sb]}}p), %xmm11, %xmm6 # 16-byte Folded Reload
6849 ; AVX2-NEXT: # xmm6 = mem[0,1],xmm11[2],mem[3]
6850 ; AVX2-NEXT: vpshufb %xmm1, %xmm6, %xmm6
6851 ; AVX2-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
6852 ; AVX2-NEXT: vpblendw {{.*#+}} ymm6 = ymm3[0,1,2,3,4],ymm6[5,6,7],ymm3[8,9,10,11,12],ymm6[13,14,15]
6853 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm6[4,5,6,7]
6854 ; AVX2-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6855 ; AVX2-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0],ymm14[1],ymm4[2],ymm14[3],ymm4[4,5],ymm14[6],ymm4[7,8],ymm14[9],ymm4[10],ymm14[11],ymm4[12,13],ymm14[14],ymm4[15]
6856 ; AVX2-NEXT: vpermq {{.*#+}} ymm6 = ymm3[2,3,0,1]
6857 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm6[4],ymm3[5],ymm6[6],ymm3[7]
6858 ; AVX2-NEXT: vpblendw {{.*#+}} ymm6 = ymm8[0],ymm9[1,2],ymm8[3],ymm9[4],ymm8[5],ymm9[6,7],ymm8[8],ymm9[9,10],ymm8[11],ymm9[12],ymm8[13],ymm9[14,15]
6859 ; AVX2-NEXT: vextracti128 $1, %ymm6, %xmm7
6860 ; AVX2-NEXT: vpblendd {{.*#+}} xmm6 = xmm7[0],xmm6[1],xmm7[2],xmm6[3]
6861 ; AVX2-NEXT: vpshufb %ymm2, %ymm3, %ymm3
6862 ; AVX2-NEXT: vpshufb %xmm0, %xmm6, %xmm6
6863 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm6[0,1,2],ymm3[3,4,5,6,7]
6864 ; AVX2-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
6865 ; AVX2-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
6866 ; AVX2-NEXT: vpblendd {{.*#+}} xmm6 = xmm8[0,1],xmm14[2],xmm8[3]
6867 ; AVX2-NEXT: vpshufb %xmm1, %xmm6, %xmm6
6868 ; AVX2-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
6869 ; AVX2-NEXT: vpblendw {{.*#+}} ymm6 = ymm3[0,1,2,3,4],ymm6[5,6,7],ymm3[8,9,10,11,12],ymm6[13,14,15]
6870 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm6[4,5,6,7]
6871 ; AVX2-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
6872 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
6873 ; AVX2-NEXT: vpblendw {{.*#+}} ymm3 = ymm12[0],ymm9[1],ymm12[2],ymm9[3],ymm12[4,5],ymm9[6],ymm12[7,8],ymm9[9],ymm12[10],ymm9[11],ymm12[12,13],ymm9[14],ymm12[15]
6874 ; AVX2-NEXT: vpermq {{.*#+}} ymm6 = ymm3[2,3,0,1]
6875 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm6[4],ymm3[5],ymm6[6],ymm3[7]
6876 ; AVX2-NEXT: vpshufb %ymm2, %ymm3, %ymm2
6877 ; AVX2-NEXT: vpblendw {{.*#+}} ymm3 = ymm5[0],ymm10[1,2],ymm5[3],ymm10[4],ymm5[5],ymm10[6,7],ymm5[8],ymm10[9,10],ymm5[11],ymm10[12],ymm5[13],ymm10[14,15]
6878 ; AVX2-NEXT: vextracti128 $1, %ymm3, %xmm6
6879 ; AVX2-NEXT: vpblendd {{.*#+}} xmm3 = xmm6[0],xmm3[1],xmm6[2],xmm3[3]
6880 ; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0
6881 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2],ymm2[3,4,5,6,7]
6882 ; AVX2-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
6883 ; AVX2-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
6884 ; AVX2-NEXT: vpblendd {{.*#+}} xmm2 = xmm10[0,1],xmm12[2],xmm10[3]
6885 ; AVX2-NEXT: vpshufb %xmm1, %xmm2, %xmm1
6886 ; AVX2-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
6887 ; AVX2-NEXT: vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3,4],ymm1[5,6,7],ymm0[8,9,10,11,12],ymm1[13,14,15]
6888 ; AVX2-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
6889 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
6890 ; AVX2-NEXT: vpblendw $107, (%rsp), %ymm0, %ymm0 # 32-byte Folded Reload
6891 ; AVX2-NEXT: # ymm0 = mem[0,1],ymm0[2],mem[3],ymm0[4],mem[5,6],ymm0[7],mem[8,9],ymm0[10],mem[11],ymm0[12],mem[13,14],ymm0[15]
6892 ; AVX2-NEXT: vpermq {{.*#+}} ymm2 = ymm0[2,3,0,1]
6893 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm2[4],ymm0[5,6],ymm2[7]
6894 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
6895 ; AVX2-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm2 # 32-byte Folded Reload
6896 ; AVX2-NEXT: # ymm2 = ymm2[0],mem[1],ymm2[2,3],mem[4],ymm2[5],mem[6],ymm2[7,8],mem[9],ymm2[10,11],mem[12],ymm2[13],mem[14],ymm2[15]
6897 ; AVX2-NEXT: vextracti128 $1, %ymm2, %xmm3
6898 ; AVX2-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1,2],xmm2[3,4],xmm3[5,6,7]
6899 ; AVX2-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [24,25,18,19,28,29,22,23,0,0,0,0,4,5,14,15,24,25,18,19,28,29,22,23,0,0,0,0,4,5,14,15]
6900 ; AVX2-NEXT: # ymm7 = mem[0,1,0,1]
6901 ; AVX2-NEXT: vpshufb %ymm7, %ymm0, %ymm0
6902 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [8,9,2,3,12,13,6,7,0,1,10,11,0,1,6,7]
6903 ; AVX2-NEXT: vpshufb %xmm3, %xmm2, %xmm2
6904 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3,4,5,6,7]
6905 ; AVX2-NEXT: vpshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Folded Reload
6906 ; AVX2-NEXT: # xmm2 = mem[3,1,2,3]
6907 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm2 = xmm2[0,1,2,1,4,5,6,7]
6908 ; AVX2-NEXT: vpshufd {{.*#+}} xmm4 = xmm15[0,2,2,3]
6909 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,1,0,3,4,5,6,7]
6910 ; AVX2-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1]
6911 ; AVX2-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
6912 ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm2[6,7]
6913 ; AVX2-NEXT: vpblendw $148, {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm2 # 32-byte Folded Reload
6914 ; AVX2-NEXT: # ymm2 = ymm13[0,1],mem[2],ymm13[3],mem[4],ymm13[5,6],mem[7],ymm13[8,9],mem[10],ymm13[11],mem[12],ymm13[13,14],mem[15]
6915 ; AVX2-NEXT: vpermq {{.*#+}} ymm4 = ymm2[2,3,0,1]
6916 ; AVX2-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4],ymm2[5,6],ymm4[7]
6917 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6918 ; AVX2-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4, %ymm4 # 32-byte Folded Reload
6919 ; AVX2-NEXT: # ymm4 = ymm4[0],mem[1],ymm4[2,3],mem[4],ymm4[5],mem[6],ymm4[7,8],mem[9],ymm4[10,11],mem[12],ymm4[13],mem[14],ymm4[15]
6920 ; AVX2-NEXT: vextracti128 $1, %ymm4, %xmm5
6921 ; AVX2-NEXT: vpblendw {{.*#+}} xmm4 = xmm5[0,1,2],xmm4[3,4],xmm5[5,6,7]
6922 ; AVX2-NEXT: vpshufb %ymm7, %ymm2, %ymm2
6923 ; AVX2-NEXT: vpshufb %xmm3, %xmm4, %xmm4
6924 ; AVX2-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3,4,5,6,7]
6925 ; AVX2-NEXT: vpshufd $231, {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Folded Reload
6926 ; AVX2-NEXT: # xmm4 = mem[3,1,2,3]
6927 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm4 = xmm4[0,1,2,1,4,5,6,7]
6928 ; AVX2-NEXT: vpshufd {{.*#+}} xmm5 = xmm11[0,2,2,3]
6929 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,1,0,3,4,5,6,7]
6930 ; AVX2-NEXT: vpunpckldq {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
6931 ; AVX2-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
6932 ; AVX2-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm4[6,7]
6933 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
6934 ; AVX2-NEXT: vpblendw $107, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4, %ymm4 # 32-byte Folded Reload
6935 ; AVX2-NEXT: # ymm4 = mem[0,1],ymm4[2],mem[3],ymm4[4],mem[5,6],ymm4[7],mem[8,9],ymm4[10],mem[11],ymm4[12],mem[13,14],ymm4[15]
6936 ; AVX2-NEXT: vpermq {{.*#+}} ymm5 = ymm4[2,3,0,1]
6937 ; AVX2-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4],ymm4[5,6],ymm5[7]
6938 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
6939 ; AVX2-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm5 # 32-byte Folded Reload
6940 ; AVX2-NEXT: # ymm5 = ymm5[0],mem[1],ymm5[2,3],mem[4],ymm5[5],mem[6],ymm5[7,8],mem[9],ymm5[10,11],mem[12],ymm5[13],mem[14],ymm5[15]
6941 ; AVX2-NEXT: vextracti128 $1, %ymm5, %xmm6
6942 ; AVX2-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1,2],xmm5[3,4],xmm6[5,6,7]
6943 ; AVX2-NEXT: vpshufb %ymm7, %ymm4, %ymm4
6944 ; AVX2-NEXT: vpshufb %xmm3, %xmm5, %xmm5
6945 ; AVX2-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3,4,5,6,7]
6946 ; AVX2-NEXT: vpshufd {{.*#+}} xmm5 = xmm8[3,1,2,3]
6947 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,1,2,1,4,5,6,7]
6948 ; AVX2-NEXT: vpshufd {{.*#+}} xmm6 = xmm14[0,2,2,3]
6949 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[0,1,0,3,4,5,6,7]
6950 ; AVX2-NEXT: vpunpckldq {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1]
6951 ; AVX2-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
6952 ; AVX2-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3,4,5],ymm5[6,7]
6953 ; AVX2-NEXT: vpblendw $107, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm5 # 32-byte Folded Reload
6954 ; AVX2-NEXT: # ymm5 = mem[0,1],ymm9[2],mem[3],ymm9[4],mem[5,6],ymm9[7],mem[8,9],ymm9[10],mem[11],ymm9[12],mem[13,14],ymm9[15]
6955 ; AVX2-NEXT: vpermq {{.*#+}} ymm6 = ymm5[2,3,0,1]
6956 ; AVX2-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4],ymm5[5,6],ymm6[7]
6957 ; AVX2-NEXT: vpshufb %ymm7, %ymm5, %ymm5
6958 ; AVX2-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
6959 ; AVX2-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
6960 ; AVX2-NEXT: # ymm6 = ymm6[0],mem[1],ymm6[2,3],mem[4],ymm6[5],mem[6],ymm6[7,8],mem[9],ymm6[10,11],mem[12],ymm6[13],mem[14],ymm6[15]
6961 ; AVX2-NEXT: vextracti128 $1, %ymm6, %xmm7
6962 ; AVX2-NEXT: vpblendw {{.*#+}} xmm6 = xmm7[0,1,2],xmm6[3,4],xmm7[5,6,7]
6963 ; AVX2-NEXT: vpshufb %xmm3, %xmm6, %xmm3
6964 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2],ymm5[3,4,5,6,7]
6965 ; AVX2-NEXT: vpshufd {{.*#+}} xmm5 = xmm10[3,1,2,3]
6966 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm5 = xmm5[0,1,2,1,4,5,6,7]
6967 ; AVX2-NEXT: vpshufd {{.*#+}} xmm6 = xmm12[0,2,2,3]
6968 ; AVX2-NEXT: vpshuflw {{.*#+}} xmm6 = xmm6[0,1,0,3,4,5,6,7]
6969 ; AVX2-NEXT: vpunpckldq {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1]
6970 ; AVX2-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
6971 ; AVX2-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm5[6,7]
6972 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
6973 ; AVX2-NEXT: vmovaps %ymm5, 64(%rsi)
6974 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
6975 ; AVX2-NEXT: vmovaps %ymm5, (%rsi)
6976 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
6977 ; AVX2-NEXT: vmovaps %ymm5, 96(%rsi)
6978 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
6979 ; AVX2-NEXT: vmovaps %ymm5, 32(%rsi)
6980 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
6981 ; AVX2-NEXT: vmovaps %ymm5, 64(%rdx)
6982 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
6983 ; AVX2-NEXT: vmovaps %ymm5, (%rdx)
6984 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
6985 ; AVX2-NEXT: vmovaps %ymm5, 96(%rdx)
6986 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
6987 ; AVX2-NEXT: vmovaps %ymm5, 32(%rdx)
6988 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
6989 ; AVX2-NEXT: vmovaps %ymm5, 64(%rcx)
6990 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
6991 ; AVX2-NEXT: vmovaps %ymm5, (%rcx)
6992 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
6993 ; AVX2-NEXT: vmovaps %ymm5, 96(%rcx)
6994 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
6995 ; AVX2-NEXT: vmovaps %ymm5, 32(%rcx)
6996 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
6997 ; AVX2-NEXT: vmovaps %ymm5, 64(%r8)
6998 ; AVX2-NEXT: vmovdqa %ymm1, (%r8)
6999 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7000 ; AVX2-NEXT: vmovaps %ymm1, 96(%r8)
7001 ; AVX2-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7002 ; AVX2-NEXT: vmovaps %ymm1, 32(%r8)
7003 ; AVX2-NEXT: vmovdqa %ymm4, 64(%r9)
7004 ; AVX2-NEXT: vmovdqa %ymm3, (%r9)
7005 ; AVX2-NEXT: vmovdqa %ymm2, 96(%r9)
7006 ; AVX2-NEXT: vmovdqa %ymm0, 32(%r9)
7007 ; AVX2-NEXT: addq $1048, %rsp # imm = 0x418
7008 ; AVX2-NEXT: vzeroupper
7011 ; AVX2-FP-LABEL: load_i16_stride5_vf64:
7013 ; AVX2-FP-NEXT: subq $1080, %rsp # imm = 0x438
7014 ; AVX2-FP-NEXT: vmovdqa 384(%rdi), %ymm13
7015 ; AVX2-FP-NEXT: vmovdqa 512(%rdi), %ymm5
7016 ; AVX2-FP-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7017 ; AVX2-FP-NEXT: vmovdqa 480(%rdi), %ymm6
7018 ; AVX2-FP-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7019 ; AVX2-FP-NEXT: vmovdqa 544(%rdi), %ymm7
7020 ; AVX2-FP-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7021 ; AVX2-FP-NEXT: vmovdqa 576(%rdi), %ymm8
7022 ; AVX2-FP-NEXT: vmovdqa 192(%rdi), %ymm2
7023 ; AVX2-FP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7024 ; AVX2-FP-NEXT: vmovdqa 160(%rdi), %ymm15
7025 ; AVX2-FP-NEXT: vmovdqa 224(%rdi), %ymm1
7026 ; AVX2-FP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7027 ; AVX2-FP-NEXT: vmovdqa 256(%rdi), %ymm0
7028 ; AVX2-FP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7029 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1,2],ymm1[3],ymm0[4],ymm1[5],ymm0[6,7],ymm1[8],ymm0[9,10],ymm1[11],ymm0[12],ymm1[13],ymm0[14,15]
7030 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
7031 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm1[5],ymm0[6],ymm1[7]
7032 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} ymm1 = [0,1,10,11,4,5,14,15,8,9,10,11,4,5,6,7,16,17,26,27,20,21,30,31,24,25,26,27,20,21,22,23]
7033 ; AVX2-FP-NEXT: vpshufb %ymm1, %ymm0, %ymm3
7034 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm0 = ymm15[0],ymm2[1],ymm15[2,3],ymm2[4],ymm15[5],ymm2[6],ymm15[7,8],ymm2[9],ymm15[10,11],ymm2[12],ymm15[13],ymm2[14],ymm15[15]
7035 ; AVX2-FP-NEXT: vmovdqu %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7036 ; AVX2-FP-NEXT: vextracti128 $1, %ymm0, %xmm2
7037 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm2 = xmm0[0],xmm2[1,2,3],xmm0[4,5],xmm2[6,7]
7038 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,10,11,4,5,14,15,8,9,2,3,12,13,6,7]
7039 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm2, %xmm4
7040 ; AVX2-FP-NEXT: vpmovsxbw {{.*#+}} xmm10 = [65535,65535,65535,65535,65535,65535,65535,0]
7041 ; AVX2-FP-NEXT: vpblendvb %ymm10, %ymm4, %ymm3, %ymm2
7042 ; AVX2-FP-NEXT: vmovdqu %ymm2, (%rsp) # 32-byte Spill
7043 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm3 = ymm7[0],ymm8[1,2],ymm7[3],ymm8[4],ymm7[5],ymm8[6,7],ymm7[8],ymm8[9,10],ymm7[11],ymm8[12],ymm7[13],ymm8[14,15]
7044 ; AVX2-FP-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7045 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm4 = ymm3[2,3,0,1]
7046 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3,4],ymm4[5],ymm3[6],ymm4[7]
7047 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm4 = ymm6[0],ymm5[1],ymm6[2,3],ymm5[4],ymm6[5],ymm5[6],ymm6[7,8],ymm5[9],ymm6[10,11],ymm5[12],ymm6[13],ymm5[14],ymm6[15]
7048 ; AVX2-FP-NEXT: vextracti128 $1, %ymm4, %xmm6
7049 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0],xmm6[1,2,3],xmm4[4,5],xmm6[6,7]
7050 ; AVX2-FP-NEXT: vmovdqa 416(%rdi), %ymm14
7051 ; AVX2-FP-NEXT: vpshufb %ymm1, %ymm3, %ymm3
7052 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm4, %xmm4
7053 ; AVX2-FP-NEXT: vpblendvb %ymm10, %ymm4, %ymm3, %ymm3
7054 ; AVX2-FP-NEXT: vmovdqa %ymm10, %ymm7
7055 ; AVX2-FP-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7056 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm3 = ymm13[0],ymm14[1,2],ymm13[3],ymm14[4],ymm13[5],ymm14[6,7],ymm13[8],ymm14[9,10],ymm13[11],ymm14[12],ymm13[13],ymm14[14,15]
7057 ; AVX2-FP-NEXT: vmovdqu %ymm13, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7058 ; AVX2-FP-NEXT: vmovdqu %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7059 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm4 = ymm3[2,3,0,1]
7060 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3,4],ymm4[5],ymm3[6],ymm4[7]
7061 ; AVX2-FP-NEXT: vmovdqa 352(%rdi), %ymm12
7062 ; AVX2-FP-NEXT: vmovdqa 320(%rdi), %ymm11
7063 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm4 = ymm11[0],ymm12[1],ymm11[2,3],ymm12[4],ymm11[5],ymm12[6],ymm11[7,8],ymm12[9],ymm11[10,11],ymm12[12],ymm11[13],ymm12[14],ymm11[15]
7064 ; AVX2-FP-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7065 ; AVX2-FP-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7066 ; AVX2-FP-NEXT: vextracti128 $1, %ymm4, %xmm6
7067 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0],xmm6[1,2,3],xmm4[4,5],xmm6[6,7]
7068 ; AVX2-FP-NEXT: vpshufb %ymm1, %ymm3, %ymm3
7069 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm4, %xmm4
7070 ; AVX2-FP-NEXT: vpblendvb %ymm10, %ymm4, %ymm3, %ymm2
7071 ; AVX2-FP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7072 ; AVX2-FP-NEXT: vmovdqa 64(%rdi), %ymm9
7073 ; AVX2-FP-NEXT: vmovdqa 96(%rdi), %ymm10
7074 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm3 = ymm9[0],ymm10[1,2],ymm9[3],ymm10[4],ymm9[5],ymm10[6,7],ymm9[8],ymm10[9,10],ymm9[11],ymm10[12],ymm9[13],ymm10[14,15]
7075 ; AVX2-FP-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7076 ; AVX2-FP-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7077 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm6 = ymm3[2,3,0,1]
7078 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3,4],ymm6[5],ymm3[6],ymm6[7]
7079 ; AVX2-FP-NEXT: vpshufb %ymm1, %ymm3, %ymm1
7080 ; AVX2-FP-NEXT: vmovdqa (%rdi), %ymm4
7081 ; AVX2-FP-NEXT: vmovdqa 32(%rdi), %ymm5
7082 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0],ymm5[1],ymm4[2,3],ymm5[4],ymm4[5],ymm5[6],ymm4[7,8],ymm5[9],ymm4[10,11],ymm5[12],ymm4[13],ymm5[14],ymm4[15]
7083 ; AVX2-FP-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7084 ; AVX2-FP-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7085 ; AVX2-FP-NEXT: vextracti128 $1, %ymm3, %xmm6
7086 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0],xmm6[1,2,3],xmm3[4,5],xmm6[6,7]
7087 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm3, %xmm0
7088 ; AVX2-FP-NEXT: vmovdqa %ymm7, %ymm2
7089 ; AVX2-FP-NEXT: vpblendvb %ymm7, %ymm0, %ymm1, %ymm0
7090 ; AVX2-FP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7091 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7092 ; AVX2-FP-NEXT: vpblendw $173, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
7093 ; AVX2-FP-NEXT: # ymm0 = mem[0],ymm0[1],mem[2,3],ymm0[4],mem[5],ymm0[6],mem[7,8],ymm0[9],mem[10,11],ymm0[12],mem[13],ymm0[14],mem[15]
7094 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
7095 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm1[5],ymm0[6,7]
7096 ; AVX2-FP-NEXT: vpblendw $181, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm1 # 32-byte Folded Reload
7097 ; AVX2-FP-NEXT: # ymm1 = mem[0],ymm15[1],mem[2],ymm15[3],mem[4,5],ymm15[6],mem[7,8],ymm15[9],mem[10],ymm15[11],mem[12,13],ymm15[14],mem[15]
7098 ; AVX2-FP-NEXT: vextracti128 $1, %ymm1, %xmm3
7099 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1],xmm3[2,3],xmm1[4,5,6],xmm3[7]
7100 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} ymm7 = [2,3,12,13,6,7,0,1,10,11,6,7,8,9,8,9,18,19,28,29,22,23,16,17,26,27,22,23,24,25,24,25]
7101 ; AVX2-FP-NEXT: vpshufb %ymm7, %ymm0, %ymm0
7102 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm6 = [2,3,12,13,6,7,0,1,10,11,4,5,14,15,10,11]
7103 ; AVX2-FP-NEXT: vpshufb %xmm6, %xmm1, %xmm1
7104 ; AVX2-FP-NEXT: vpblendvb %ymm2, %ymm1, %ymm0, %ymm0
7105 ; AVX2-FP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7106 ; AVX2-FP-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm0 # 32-byte Folded Reload
7107 ; AVX2-FP-NEXT: # ymm0 = ymm8[0],mem[1],ymm8[2,3],mem[4],ymm8[5],mem[6],ymm8[7,8],mem[9],ymm8[10,11],mem[12],ymm8[13],mem[14],ymm8[15]
7108 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
7109 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm1[5],ymm0[6,7]
7110 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
7111 ; AVX2-FP-NEXT: vpblendw $181, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm1 # 32-byte Folded Reload
7112 ; AVX2-FP-NEXT: # ymm1 = mem[0],ymm15[1],mem[2],ymm15[3],mem[4,5],ymm15[6],mem[7,8],ymm15[9],mem[10],ymm15[11],mem[12,13],ymm15[14],mem[15]
7113 ; AVX2-FP-NEXT: vextracti128 $1, %ymm1, %xmm8
7114 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1],xmm8[2,3],xmm1[4,5,6],xmm8[7]
7115 ; AVX2-FP-NEXT: vpshufb %ymm7, %ymm0, %ymm0
7116 ; AVX2-FP-NEXT: vpshufb %xmm6, %xmm1, %xmm1
7117 ; AVX2-FP-NEXT: vpblendvb %ymm2, %ymm1, %ymm0, %ymm3
7118 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm0 = ymm14[0],ymm13[1],ymm14[2,3],ymm13[4],ymm14[5],ymm13[6],ymm14[7,8],ymm13[9],ymm14[10,11],ymm13[12],ymm14[13],ymm13[14],ymm14[15]
7119 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm8 = ymm0[2,3,0,1]
7120 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm8[5],ymm0[6,7]
7121 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm8 = ymm12[0],ymm11[1],ymm12[2],ymm11[3],ymm12[4,5],ymm11[6],ymm12[7,8],ymm11[9],ymm12[10],ymm11[11],ymm12[12,13],ymm11[14],ymm12[15]
7122 ; AVX2-FP-NEXT: vextracti128 $1, %ymm8, %xmm11
7123 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1],xmm11[2,3],xmm8[4,5,6],xmm11[7]
7124 ; AVX2-FP-NEXT: vpshufb %ymm7, %ymm0, %ymm0
7125 ; AVX2-FP-NEXT: vpshufb %xmm6, %xmm8, %xmm8
7126 ; AVX2-FP-NEXT: vmovdqa %ymm2, %ymm1
7127 ; AVX2-FP-NEXT: vpblendvb %ymm2, %ymm8, %ymm0, %ymm2
7128 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm8 = ymm10[0],ymm9[1],ymm10[2,3],ymm9[4],ymm10[5],ymm9[6],ymm10[7,8],ymm9[9],ymm10[10,11],ymm9[12],ymm10[13],ymm9[14],ymm10[15]
7129 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm11 = ymm8[2,3,0,1]
7130 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3,4],ymm11[5],ymm8[6,7]
7131 ; AVX2-FP-NEXT: vpshufb %ymm7, %ymm8, %ymm7
7132 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm8 = ymm5[0],ymm4[1],ymm5[2],ymm4[3],ymm5[4,5],ymm4[6],ymm5[7,8],ymm4[9],ymm5[10],ymm4[11],ymm5[12,13],ymm4[14],ymm5[15]
7133 ; AVX2-FP-NEXT: vextracti128 $1, %ymm8, %xmm11
7134 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm8 = xmm8[0,1],xmm11[2,3],xmm8[4,5,6],xmm11[7]
7135 ; AVX2-FP-NEXT: vpshufb %xmm6, %xmm8, %xmm6
7136 ; AVX2-FP-NEXT: vpblendvb %ymm1, %ymm6, %ymm7, %ymm12
7137 ; AVX2-FP-NEXT: vmovdqa 304(%rdi), %xmm1
7138 ; AVX2-FP-NEXT: vmovdqa 288(%rdi), %xmm9
7139 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm7 = xmm9[0],xmm1[1],xmm9[2,3]
7140 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm0 = [6,7,2,3,4,5,6,7,6,7,2,3,12,13,6,7]
7141 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm7, %xmm7
7142 ; AVX2-FP-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
7143 ; AVX2-FP-NEXT: vmovdqu (%rsp), %ymm4 # 32-byte Reload
7144 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm7 = ymm4[0,1,2,3,4],ymm7[5,6,7],ymm4[8,9,10,11,12],ymm7[13,14,15]
7145 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm7[4,5,6,7]
7146 ; AVX2-FP-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7147 ; AVX2-FP-NEXT: vmovdqa 624(%rdi), %xmm10
7148 ; AVX2-FP-NEXT: vmovdqa 608(%rdi), %xmm8
7149 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm7 = xmm8[0],xmm10[1],xmm8[2,3]
7150 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm7, %xmm7
7151 ; AVX2-FP-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
7152 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
7153 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm7 = ymm4[0,1,2,3,4],ymm7[5,6,7],ymm4[8,9,10,11,12],ymm7[13,14,15]
7154 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm7[4,5,6,7]
7155 ; AVX2-FP-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7156 ; AVX2-FP-NEXT: vmovdqa 464(%rdi), %xmm6
7157 ; AVX2-FP-NEXT: vmovdqa 448(%rdi), %xmm5
7158 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm7 = xmm5[0],xmm6[1],xmm5[2,3]
7159 ; AVX2-FP-NEXT: vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7160 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm7, %xmm7
7161 ; AVX2-FP-NEXT: vinserti128 $1, %xmm7, %ymm0, %ymm7
7162 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
7163 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm7 = ymm4[0,1,2,3,4],ymm7[5,6,7],ymm4[8,9,10,11,12],ymm7[13,14,15]
7164 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm7[4,5,6,7]
7165 ; AVX2-FP-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7166 ; AVX2-FP-NEXT: vmovdqa 144(%rdi), %xmm11
7167 ; AVX2-FP-NEXT: vmovdqa 128(%rdi), %xmm7
7168 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm14 = xmm7[0],xmm11[1],xmm7[2,3]
7169 ; AVX2-FP-NEXT: vmovdqa %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7170 ; AVX2-FP-NEXT: vmovdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7171 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm14, %xmm0
7172 ; AVX2-FP-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
7173 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
7174 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm0 = ymm4[0,1,2,3,4],ymm0[5,6,7],ymm4[8,9,10,11,12],ymm0[13,14,15]
7175 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
7176 ; AVX2-FP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7177 ; AVX2-FP-NEXT: vmovdqa %xmm9, %xmm4
7178 ; AVX2-FP-NEXT: vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7179 ; AVX2-FP-NEXT: vmovdqa %xmm1, %xmm14
7180 ; AVX2-FP-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7181 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm0 = xmm9[0,1],xmm1[2],xmm9[3]
7182 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm9 = [0,1,2,3,4,5,6,7,8,9,4,5,14,15,8,9]
7183 ; AVX2-FP-NEXT: vpshufb %xmm9, %xmm0, %xmm0
7184 ; AVX2-FP-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
7185 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7186 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2,3,4],ymm0[5,6,7],ymm1[8,9,10,11,12],ymm0[13,14,15]
7187 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
7188 ; AVX2-FP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7189 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm0 = xmm8[0,1],xmm10[2],xmm8[3]
7190 ; AVX2-FP-NEXT: vmovdqa %xmm10, %xmm13
7191 ; AVX2-FP-NEXT: vmovdqa %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7192 ; AVX2-FP-NEXT: vmovdqa %xmm8, %xmm10
7193 ; AVX2-FP-NEXT: vmovdqa %xmm8, (%rsp) # 16-byte Spill
7194 ; AVX2-FP-NEXT: vpshufb %xmm9, %xmm0, %xmm0
7195 ; AVX2-FP-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
7196 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm0 = ymm3[0,1,2,3,4],ymm0[5,6,7],ymm3[8,9,10,11,12],ymm0[13,14,15]
7197 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
7198 ; AVX2-FP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7199 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm0 = xmm5[0,1],xmm6[2],xmm5[3]
7200 ; AVX2-FP-NEXT: vmovdqa %xmm6, %xmm3
7201 ; AVX2-FP-NEXT: vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
7202 ; AVX2-FP-NEXT: vpshufb %xmm9, %xmm0, %xmm0
7203 ; AVX2-FP-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
7204 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0,1,2,3,4],ymm0[5,6,7],ymm2[8,9,10,11,12],ymm0[13,14,15]
7205 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
7206 ; AVX2-FP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7207 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm0 = xmm7[0,1],xmm11[2],xmm7[3]
7208 ; AVX2-FP-NEXT: vpshufb %xmm9, %xmm0, %xmm0
7209 ; AVX2-FP-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
7210 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm0 = ymm12[0,1,2,3,4],ymm0[5,6,7],ymm12[8,9,10,11,12],ymm0[13,14,15]
7211 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm12[0,1,2,3],ymm0[4,5,6,7]
7212 ; AVX2-FP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7213 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
7214 ; AVX2-FP-NEXT: vpblendw $173, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm0 # 32-byte Folded Reload
7215 ; AVX2-FP-NEXT: # ymm0 = mem[0],ymm8[1],mem[2,3],ymm8[4],mem[5],ymm8[6],mem[7,8],ymm8[9],mem[10,11],ymm8[12],mem[13],ymm8[14],mem[15]
7216 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
7217 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6],ymm0[7]
7218 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
7219 ; AVX2-FP-NEXT: vpblendw $148, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm1 # 32-byte Folded Reload
7220 ; AVX2-FP-NEXT: # ymm1 = ymm5[0,1],mem[2],ymm5[3],mem[4],ymm5[5,6],mem[7],ymm5[8,9],mem[10],ymm5[11],mem[12],ymm5[13,14],mem[15]
7221 ; AVX2-FP-NEXT: vextracti128 $1, %ymm1, %xmm2
7222 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm2[3,4],xmm1[5,6,7]
7223 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} ymm11 = [u,u,u,u,u,u,u,u,u,u,u,u,0,1,10,11,20,21,30,31,24,25,18,19,28,29,26,27,16,17,26,27]
7224 ; AVX2-FP-NEXT: vpshufb %ymm11, %ymm0, %ymm2
7225 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm0 = [4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
7226 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm1, %xmm1
7227 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm1[0,1,2],ymm2[3,4,5,6,7]
7228 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm9 = xmm14[0],xmm4[1],xmm14[2,3]
7229 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm1 = [0,1,6,7,4,5,6,7,8,9,6,7,0,1,10,11]
7230 ; AVX2-FP-NEXT: vpshufb %xmm1, %xmm9, %xmm9
7231 ; AVX2-FP-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
7232 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm9 = ymm2[0,1,2,3,4],ymm9[5,6,7],ymm2[8,9,10,11,12],ymm9[13,14,15]
7233 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm9[4,5,6,7]
7234 ; AVX2-FP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7235 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
7236 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
7237 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm2 = ymm4[0],ymm6[1],ymm4[2,3],ymm6[4],ymm4[5],ymm6[6],ymm4[7,8],ymm6[9],ymm4[10,11],ymm6[12],ymm4[13],ymm6[14],ymm4[15]
7238 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm9 = ymm2[2,3,0,1]
7239 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm9[6],ymm2[7]
7240 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
7241 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm9 = ymm7[0,1],ymm15[2],ymm7[3],ymm15[4],ymm7[5,6],ymm15[7],ymm7[8,9],ymm15[10],ymm7[11],ymm15[12],ymm7[13,14],ymm15[15]
7242 ; AVX2-FP-NEXT: vextracti128 $1, %ymm9, %xmm12
7243 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm9 = xmm9[0,1,2],xmm12[3,4],xmm9[5,6,7]
7244 ; AVX2-FP-NEXT: vpshufb %ymm11, %ymm2, %ymm2
7245 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm9, %xmm9
7246 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm9[0,1,2],ymm2[3,4,5,6,7]
7247 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm9 = xmm13[0],xmm10[1],xmm13[2,3]
7248 ; AVX2-FP-NEXT: vpshufb %xmm1, %xmm9, %xmm9
7249 ; AVX2-FP-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
7250 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm9 = ymm2[0,1,2,3,4],ymm9[5,6,7],ymm2[8,9,10,11,12],ymm9[13,14,15]
7251 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm9[4,5,6,7]
7252 ; AVX2-FP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7253 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
7254 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
7255 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm2 = ymm15[0],ymm14[1],ymm15[2,3],ymm14[4],ymm15[5],ymm14[6],ymm15[7,8],ymm14[9],ymm15[10,11],ymm14[12],ymm15[13],ymm14[14],ymm15[15]
7256 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm9 = ymm2[2,3,0,1]
7257 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm9[6],ymm2[7]
7258 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
7259 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
7260 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm9 = ymm10[0,1],ymm11[2],ymm10[3],ymm11[4],ymm10[5,6],ymm11[7],ymm10[8,9],ymm11[10],ymm10[11],ymm11[12],ymm10[13,14],ymm11[15]
7261 ; AVX2-FP-NEXT: vextracti128 $1, %ymm9, %xmm12
7262 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm9 = xmm9[0,1,2],xmm12[3,4],xmm9[5,6,7]
7263 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} ymm12 = [u,u,u,u,u,u,u,u,u,u,u,u,0,1,10,11,20,21,30,31,24,25,18,19,28,29,26,27,16,17,26,27]
7264 ; AVX2-FP-NEXT: vpshufb %ymm12, %ymm2, %ymm2
7265 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm9, %xmm9
7266 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm9[0,1,2],ymm2[3,4,5,6,7]
7267 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
7268 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm9 = xmm3[0],xmm13[1],xmm3[2,3]
7269 ; AVX2-FP-NEXT: vpshufb %xmm1, %xmm9, %xmm9
7270 ; AVX2-FP-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
7271 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm9 = ymm2[0,1,2,3,4],ymm9[5,6,7],ymm2[8,9,10,11,12],ymm9[13,14,15]
7272 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm9[4,5,6,7]
7273 ; AVX2-FP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7274 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
7275 ; AVX2-FP-NEXT: vpblendw $173, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm2 # 32-byte Folded Reload
7276 ; AVX2-FP-NEXT: # ymm2 = mem[0],ymm2[1],mem[2,3],ymm2[4],mem[5],ymm2[6],mem[7,8],ymm2[9],mem[10,11],ymm2[12],mem[13],ymm2[14],mem[15]
7277 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm9 = ymm2[2,3,0,1]
7278 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm9[6],ymm2[7]
7279 ; AVX2-FP-NEXT: vpshufb %ymm12, %ymm2, %ymm2
7280 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7281 ; AVX2-FP-NEXT: vpblendw $107, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
7282 ; AVX2-FP-NEXT: # ymm3 = mem[0,1],ymm3[2],mem[3],ymm3[4],mem[5,6],ymm3[7],mem[8,9],ymm3[10],mem[11],ymm3[12],mem[13,14],ymm3[15]
7283 ; AVX2-FP-NEXT: vextracti128 $1, %ymm3, %xmm9
7284 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2],xmm9[3,4],xmm3[5,6,7]
7285 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm3, %xmm0
7286 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2],ymm2[3,4,5,6,7]
7287 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm2 # 16-byte Reload
7288 ; AVX2-FP-NEXT: vpblendd $13, {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
7289 ; AVX2-FP-NEXT: # xmm2 = mem[0],xmm2[1],mem[2,3]
7290 ; AVX2-FP-NEXT: vpshufb %xmm1, %xmm2, %xmm1
7291 ; AVX2-FP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
7292 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3,4],ymm1[5,6,7],ymm0[8,9,10,11,12],ymm1[13,14,15]
7293 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7294 ; AVX2-FP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7295 ; AVX2-FP-NEXT: vpblendw $74, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm0 # 32-byte Folded Reload
7296 ; AVX2-FP-NEXT: # ymm0 = ymm8[0],mem[1],ymm8[2],mem[3],ymm8[4,5],mem[6],ymm8[7,8],mem[9],ymm8[10],mem[11],ymm8[12,13],mem[14],ymm8[15]
7297 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
7298 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4],ymm0[5],ymm1[6],ymm0[7]
7299 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
7300 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm1 = ymm8[0],ymm5[1,2],ymm8[3],ymm5[4],ymm8[5],ymm5[6,7],ymm8[8],ymm5[9,10],ymm8[11],ymm5[12],ymm8[13],ymm5[14,15]
7301 ; AVX2-FP-NEXT: vextracti128 $1, %ymm1, %xmm2
7302 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm1 = xmm2[0],xmm1[1],xmm2[2],xmm1[3]
7303 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} ymm3 = [u,u,u,u,u,u,u,u,u,u,u,u,2,3,12,13,22,23,16,17,26,27,20,21,30,31,30,31,18,19,28,29]
7304 ; AVX2-FP-NEXT: vpshufb %ymm3, %ymm0, %ymm2
7305 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm0 = [6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
7306 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm1, %xmm1
7307 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm1[0,1,2],ymm2[3,4,5,6,7]
7308 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7309 ; AVX2-FP-NEXT: vpblendd $4, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm9 # 16-byte Folded Reload
7310 ; AVX2-FP-NEXT: # xmm9 = xmm1[0,1],mem[2],xmm1[3]
7311 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm1 = [0,1,2,3,4,5,6,7,8,9,8,9,2,3,12,13]
7312 ; AVX2-FP-NEXT: vpshufb %xmm1, %xmm9, %xmm9
7313 ; AVX2-FP-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
7314 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm9 = ymm2[0,1,2,3,4],ymm9[5,6,7],ymm2[8,9,10,11,12],ymm9[13,14,15]
7315 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm9[4,5,6,7]
7316 ; AVX2-FP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7317 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm2 = ymm6[0],ymm4[1],ymm6[2],ymm4[3],ymm6[4,5],ymm4[6],ymm6[7,8],ymm4[9],ymm6[10],ymm4[11],ymm6[12,13],ymm4[14],ymm6[15]
7318 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm9 = ymm2[2,3,0,1]
7319 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm9[4],ymm2[5],ymm9[6],ymm2[7]
7320 ; AVX2-FP-NEXT: vpblendw $41, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm9 # 32-byte Folded Reload
7321 ; AVX2-FP-NEXT: # ymm9 = mem[0],ymm7[1,2],mem[3],ymm7[4],mem[5],ymm7[6,7],mem[8],ymm7[9,10],mem[11],ymm7[12],mem[13],ymm7[14,15]
7322 ; AVX2-FP-NEXT: vextracti128 $1, %ymm9, %xmm12
7323 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm9 = xmm12[0],xmm9[1],xmm12[2],xmm9[3]
7324 ; AVX2-FP-NEXT: vpshufb %ymm3, %ymm2, %ymm2
7325 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm9, %xmm9
7326 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm9[0,1,2],ymm2[3,4,5,6,7]
7327 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7328 ; AVX2-FP-NEXT: vpblendd $4, (%rsp), %xmm4, %xmm9 # 16-byte Folded Reload
7329 ; AVX2-FP-NEXT: # xmm9 = xmm4[0,1],mem[2],xmm4[3]
7330 ; AVX2-FP-NEXT: vpshufb %xmm1, %xmm9, %xmm9
7331 ; AVX2-FP-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
7332 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm9 = ymm2[0,1,2,3,4],ymm9[5,6,7],ymm2[8,9,10,11,12],ymm9[13,14,15]
7333 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm9[4,5,6,7]
7334 ; AVX2-FP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7335 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm2 = ymm14[0],ymm15[1],ymm14[2],ymm15[3],ymm14[4,5],ymm15[6],ymm14[7,8],ymm15[9],ymm14[10],ymm15[11],ymm14[12,13],ymm15[14],ymm14[15]
7336 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm9 = ymm2[2,3,0,1]
7337 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm9[4],ymm2[5],ymm9[6],ymm2[7]
7338 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm9 = ymm11[0],ymm10[1,2],ymm11[3],ymm10[4],ymm11[5],ymm10[6,7],ymm11[8],ymm10[9,10],ymm11[11],ymm10[12],ymm11[13],ymm10[14,15]
7339 ; AVX2-FP-NEXT: vextracti128 $1, %ymm9, %xmm12
7340 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm9 = xmm12[0],xmm9[1],xmm12[2],xmm9[3]
7341 ; AVX2-FP-NEXT: vpshufb %ymm3, %ymm2, %ymm2
7342 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm9, %xmm9
7343 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm9[0,1,2],ymm2[3,4,5,6,7]
7344 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
7345 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm9 = xmm15[0,1],xmm13[2],xmm15[3]
7346 ; AVX2-FP-NEXT: vpshufb %xmm1, %xmm9, %xmm9
7347 ; AVX2-FP-NEXT: vinserti128 $1, %xmm9, %ymm0, %ymm9
7348 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm9 = ymm2[0,1,2,3,4],ymm9[5,6,7],ymm2[8,9,10,11,12],ymm9[13,14,15]
7349 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm9[4,5,6,7]
7350 ; AVX2-FP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7351 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
7352 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
7353 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm2 = ymm12[0],ymm7[1],ymm12[2],ymm7[3],ymm12[4,5],ymm7[6],ymm12[7,8],ymm7[9],ymm12[10],ymm7[11],ymm12[12,13],ymm7[14],ymm12[15]
7354 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm9 = ymm2[2,3,0,1]
7355 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm9[4],ymm2[5],ymm9[6],ymm2[7]
7356 ; AVX2-FP-NEXT: vpshufb %ymm3, %ymm2, %ymm2
7357 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
7358 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
7359 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm3 = ymm10[0],ymm11[1,2],ymm10[3],ymm11[4],ymm10[5],ymm11[6,7],ymm10[8],ymm11[9,10],ymm10[11],ymm11[12],ymm10[13],ymm11[14,15]
7360 ; AVX2-FP-NEXT: vextracti128 $1, %ymm3, %xmm9
7361 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm3 = xmm9[0],xmm3[1],xmm9[2],xmm3[3]
7362 ; AVX2-FP-NEXT: vpshufb %xmm0, %xmm3, %xmm0
7363 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2],ymm2[3,4,5,6,7]
7364 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7365 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
7366 ; AVX2-FP-NEXT: vpblendd {{.*#+}} xmm2 = xmm3[0,1],xmm5[2],xmm3[3]
7367 ; AVX2-FP-NEXT: vpshufb %xmm1, %xmm2, %xmm1
7368 ; AVX2-FP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
7369 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3,4],ymm1[5,6,7],ymm0[8,9,10,11,12],ymm1[13,14,15]
7370 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7371 ; AVX2-FP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7372 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7373 ; AVX2-FP-NEXT: vpblendw $148, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
7374 ; AVX2-FP-NEXT: # ymm0 = ymm0[0,1],mem[2],ymm0[3],mem[4],ymm0[5,6],mem[7],ymm0[8,9],mem[10],ymm0[11],mem[12],ymm0[13,14],mem[15]
7375 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
7376 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4],ymm0[5,6],ymm1[7]
7377 ; AVX2-FP-NEXT: vpblendw $173, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm0 # 32-byte Folded Reload
7378 ; AVX2-FP-NEXT: # ymm0 = mem[0],ymm8[1],mem[2,3],ymm8[4],mem[5],ymm8[6],mem[7,8],ymm8[9],mem[10,11],ymm8[12],mem[13],ymm8[14],mem[15]
7379 ; AVX2-FP-NEXT: vextracti128 $1, %ymm0, %xmm2
7380 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm0[3,4],xmm2[5,6,7]
7381 ; AVX2-FP-NEXT: vbroadcasti128 {{.*#+}} ymm0 = [24,25,18,19,28,29,22,23,0,0,0,0,4,5,14,15,24,25,18,19,28,29,22,23,0,0,0,0,4,5,14,15]
7382 ; AVX2-FP-NEXT: # ymm0 = mem[0,1,0,1]
7383 ; AVX2-FP-NEXT: vpshufb %ymm0, %ymm1, %ymm1
7384 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm13 = [8,9,2,3,12,13,6,7,0,1,10,11,0,1,6,7]
7385 ; AVX2-FP-NEXT: vpshufb %xmm13, %xmm2, %xmm2
7386 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2],ymm1[3,4,5,6,7]
7387 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm9 = [12,13,14,15,4,5,14,15,8,9,10,11,12,13,14,15]
7388 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
7389 ; AVX2-FP-NEXT: vpshufb %xmm9, %xmm1, %xmm4
7390 ; AVX2-FP-NEXT: vmovdqa {{.*#+}} xmm1 = [0,1,2,3,0,1,10,11,8,9,10,11,12,13,14,15]
7391 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
7392 ; AVX2-FP-NEXT: vpshufb %xmm1, %xmm6, %xmm6
7393 ; AVX2-FP-NEXT: vpunpckldq {{.*#+}} xmm4 = xmm6[0],xmm4[0],xmm6[1],xmm4[1]
7394 ; AVX2-FP-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
7395 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm6 = ymm2[0,1,2,3,4,5],ymm4[6,7]
7396 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
7397 ; AVX2-FP-NEXT: vpblendw $148, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm2 # 32-byte Folded Reload
7398 ; AVX2-FP-NEXT: # ymm2 = ymm2[0,1],mem[2],ymm2[3],mem[4],ymm2[5,6],mem[7],ymm2[8,9],mem[10],ymm2[11],mem[12],ymm2[13,14],mem[15]
7399 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm4 = ymm2[2,3,0,1]
7400 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4],ymm2[5,6],ymm4[7]
7401 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
7402 ; AVX2-FP-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4, %ymm4 # 32-byte Folded Reload
7403 ; AVX2-FP-NEXT: # ymm4 = ymm4[0],mem[1],ymm4[2,3],mem[4],ymm4[5],mem[6],ymm4[7,8],mem[9],ymm4[10,11],mem[12],ymm4[13],mem[14],ymm4[15]
7404 ; AVX2-FP-NEXT: vextracti128 $1, %ymm4, %xmm14
7405 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm4 = xmm14[0,1,2],xmm4[3,4],xmm14[5,6,7]
7406 ; AVX2-FP-NEXT: vpshufb %ymm0, %ymm2, %ymm2
7407 ; AVX2-FP-NEXT: vpshufb %xmm13, %xmm4, %xmm4
7408 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2],ymm2[3,4,5,6,7]
7409 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
7410 ; AVX2-FP-NEXT: vpshufb %xmm9, %xmm4, %xmm4
7411 ; AVX2-FP-NEXT: vmovdqa (%rsp), %xmm8 # 16-byte Reload
7412 ; AVX2-FP-NEXT: vpshufb %xmm1, %xmm8, %xmm8
7413 ; AVX2-FP-NEXT: vpunpckldq {{.*#+}} xmm4 = xmm8[0],xmm4[0],xmm8[1],xmm4[1]
7414 ; AVX2-FP-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
7415 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm4[6,7]
7416 ; AVX2-FP-NEXT: vpshufb %xmm9, %xmm3, %xmm4
7417 ; AVX2-FP-NEXT: vpshufb %xmm1, %xmm5, %xmm5
7418 ; AVX2-FP-NEXT: vpunpckldq {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
7419 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm5 = ymm12[0,1],ymm7[2],ymm12[3],ymm7[4],ymm12[5,6],ymm7[7],ymm12[8,9],ymm7[10],ymm12[11],ymm7[12],ymm12[13,14],ymm7[15]
7420 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm7 = ymm5[2,3,0,1]
7421 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4],ymm5[5,6],ymm7[7]
7422 ; AVX2-FP-NEXT: vpblendw {{.*#+}} ymm7 = ymm11[0],ymm10[1],ymm11[2,3],ymm10[4],ymm11[5],ymm10[6],ymm11[7,8],ymm10[9],ymm11[10,11],ymm10[12],ymm11[13],ymm10[14],ymm11[15]
7423 ; AVX2-FP-NEXT: vextracti128 $1, %ymm7, %xmm8
7424 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm7 = xmm8[0,1,2],xmm7[3,4],xmm8[5,6,7]
7425 ; AVX2-FP-NEXT: vpshufb %ymm0, %ymm5, %ymm5
7426 ; AVX2-FP-NEXT: vpshufb %xmm13, %xmm7, %xmm7
7427 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm5 = ymm7[0,1,2],ymm5[3,4,5,6,7]
7428 ; AVX2-FP-NEXT: vinserti128 $1, %xmm4, %ymm0, %ymm4
7429 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2,3,4,5],ymm4[6,7]
7430 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7431 ; AVX2-FP-NEXT: vpblendw $107, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm5 # 32-byte Folded Reload
7432 ; AVX2-FP-NEXT: # ymm5 = mem[0,1],ymm3[2],mem[3],ymm3[4],mem[5,6],ymm3[7],mem[8,9],ymm3[10],mem[11],ymm3[12],mem[13,14],ymm3[15]
7433 ; AVX2-FP-NEXT: vpermq {{.*#+}} ymm7 = ymm5[2,3,0,1]
7434 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4],ymm5[5,6],ymm7[7]
7435 ; AVX2-FP-NEXT: vpshufb %ymm0, %ymm5, %ymm0
7436 ; AVX2-FP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7437 ; AVX2-FP-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm5 # 32-byte Folded Reload
7438 ; AVX2-FP-NEXT: # ymm5 = ymm3[0],mem[1],ymm3[2,3],mem[4],ymm3[5],mem[6],ymm3[7,8],mem[9],ymm3[10,11],mem[12],ymm3[13],mem[14],ymm3[15]
7439 ; AVX2-FP-NEXT: vextracti128 $1, %ymm5, %xmm7
7440 ; AVX2-FP-NEXT: vpblendw {{.*#+}} xmm5 = xmm7[0,1,2],xmm5[3,4],xmm7[5,6,7]
7441 ; AVX2-FP-NEXT: vpshufb %xmm13, %xmm5, %xmm3
7442 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0,1,2],ymm0[3,4,5,6,7]
7443 ; AVX2-FP-NEXT: vpshufb %xmm9, %xmm15, %xmm3
7444 ; AVX2-FP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
7445 ; AVX2-FP-NEXT: vpshufb %xmm1, %xmm5, %xmm1
7446 ; AVX2-FP-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1]
7447 ; AVX2-FP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
7448 ; AVX2-FP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6,7]
7449 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7450 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%rsi)
7451 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7452 ; AVX2-FP-NEXT: vmovaps %ymm1, (%rsi)
7453 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7454 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%rsi)
7455 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7456 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%rsi)
7457 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7458 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%rdx)
7459 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7460 ; AVX2-FP-NEXT: vmovaps %ymm1, (%rdx)
7461 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7462 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%rdx)
7463 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7464 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%rdx)
7465 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7466 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%rcx)
7467 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7468 ; AVX2-FP-NEXT: vmovaps %ymm1, (%rcx)
7469 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7470 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%rcx)
7471 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7472 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%rcx)
7473 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7474 ; AVX2-FP-NEXT: vmovaps %ymm1, 64(%r8)
7475 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7476 ; AVX2-FP-NEXT: vmovaps %ymm1, (%r8)
7477 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7478 ; AVX2-FP-NEXT: vmovaps %ymm1, 96(%r8)
7479 ; AVX2-FP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7480 ; AVX2-FP-NEXT: vmovaps %ymm1, 32(%r8)
7481 ; AVX2-FP-NEXT: vmovdqa %ymm0, 64(%r9)
7482 ; AVX2-FP-NEXT: vmovdqa %ymm4, (%r9)
7483 ; AVX2-FP-NEXT: vmovdqa %ymm2, 96(%r9)
7484 ; AVX2-FP-NEXT: vmovdqa %ymm6, 32(%r9)
7485 ; AVX2-FP-NEXT: addq $1080, %rsp # imm = 0x438
7486 ; AVX2-FP-NEXT: vzeroupper
7487 ; AVX2-FP-NEXT: retq
7489 ; AVX2-FCP-LABEL: load_i16_stride5_vf64:
7490 ; AVX2-FCP: # %bb.0:
7491 ; AVX2-FCP-NEXT: subq $1000, %rsp # imm = 0x3E8
7492 ; AVX2-FCP-NEXT: vmovdqa (%rdi), %ymm9
7493 ; AVX2-FCP-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7494 ; AVX2-FCP-NEXT: vmovdqa 544(%rdi), %ymm14
7495 ; AVX2-FCP-NEXT: vmovdqa 576(%rdi), %ymm11
7496 ; AVX2-FCP-NEXT: vmovdqa 512(%rdi), %ymm8
7497 ; AVX2-FCP-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7498 ; AVX2-FCP-NEXT: vmovdqa 480(%rdi), %ymm10
7499 ; AVX2-FCP-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7500 ; AVX2-FCP-NEXT: vmovdqa 224(%rdi), %ymm4
7501 ; AVX2-FCP-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7502 ; AVX2-FCP-NEXT: vmovdqa 256(%rdi), %ymm3
7503 ; AVX2-FCP-NEXT: vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7504 ; AVX2-FCP-NEXT: vmovdqa 192(%rdi), %ymm0
7505 ; AVX2-FCP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7506 ; AVX2-FCP-NEXT: vmovdqa 160(%rdi), %ymm1
7507 ; AVX2-FCP-NEXT: vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7508 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5],ymm0[6],ymm1[7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13],ymm0[14],ymm1[15]
7509 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm0, %xmm1
7510 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm1 = xmm0[0],xmm1[1,2,3],xmm0[4,5],xmm1[6,7]
7511 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,10,11,4,5,14,15,8,9,2,3,12,13,6,7]
7512 ; AVX2-FCP-NEXT: vpshufb %xmm0, %xmm1, %xmm2
7513 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0],ymm3[1,2],ymm4[3],ymm3[4],ymm4[5],ymm3[6,7],ymm4[8],ymm3[9,10],ymm4[11],ymm3[12],ymm4[13],ymm3[14,15]
7514 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm1 = [1,3,0,2,4,6,1,3]
7515 ; AVX2-FCP-NEXT: vpermd %ymm3, %ymm1, %ymm4
7516 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,6,7,8,9,14,15,4,5,14,15,4,5,2,3,16,17,22,23,24,25,30,31,20,21,30,31,20,21,18,19]
7517 ; AVX2-FCP-NEXT: vpshufb %ymm3, %ymm4, %ymm4
7518 ; AVX2-FCP-NEXT: vpmovsxbw {{.*#+}} xmm13 = [65535,65535,65535,65535,65535,65535,65535,0]
7519 ; AVX2-FCP-NEXT: vpblendvb %ymm13, %ymm2, %ymm4, %ymm2
7520 ; AVX2-FCP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7521 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm10[0],ymm8[1],ymm10[2,3],ymm8[4],ymm10[5],ymm8[6],ymm10[7,8],ymm8[9],ymm10[10,11],ymm8[12],ymm10[13],ymm8[14],ymm10[15]
7522 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm2, %xmm4
7523 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm4[1,2,3],xmm2[4,5],xmm4[6,7]
7524 ; AVX2-FCP-NEXT: vpshufb %xmm0, %xmm2, %xmm2
7525 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm4 = ymm14[0],ymm11[1,2],ymm14[3],ymm11[4],ymm14[5],ymm11[6,7],ymm14[8],ymm11[9,10],ymm14[11],ymm11[12],ymm14[13],ymm11[14,15]
7526 ; AVX2-FCP-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7527 ; AVX2-FCP-NEXT: vmovdqu %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7528 ; AVX2-FCP-NEXT: vpermd %ymm4, %ymm1, %ymm4
7529 ; AVX2-FCP-NEXT: vpshufb %ymm3, %ymm4, %ymm4
7530 ; AVX2-FCP-NEXT: vpblendvb %ymm13, %ymm2, %ymm4, %ymm2
7531 ; AVX2-FCP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7532 ; AVX2-FCP-NEXT: vmovdqa 32(%rdi), %ymm15
7533 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm9[0],ymm15[1],ymm9[2,3],ymm15[4],ymm9[5],ymm15[6],ymm9[7,8],ymm15[9],ymm9[10,11],ymm15[12],ymm9[13],ymm15[14],ymm9[15]
7534 ; AVX2-FCP-NEXT: vmovdqu %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7535 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm2, %xmm4
7536 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm4[1,2,3],xmm2[4,5],xmm4[6,7]
7537 ; AVX2-FCP-NEXT: vmovdqa 64(%rdi), %ymm8
7538 ; AVX2-FCP-NEXT: vmovdqa 96(%rdi), %ymm12
7539 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm4 = ymm8[0],ymm12[1,2],ymm8[3],ymm12[4],ymm8[5],ymm12[6,7],ymm8[8],ymm12[9,10],ymm8[11],ymm12[12],ymm8[13],ymm12[14,15]
7540 ; AVX2-FCP-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7541 ; AVX2-FCP-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7542 ; AVX2-FCP-NEXT: vpermd %ymm4, %ymm1, %ymm4
7543 ; AVX2-FCP-NEXT: vpshufb %ymm3, %ymm4, %ymm4
7544 ; AVX2-FCP-NEXT: vpshufb %xmm0, %xmm2, %xmm2
7545 ; AVX2-FCP-NEXT: vpblendvb %ymm13, %ymm2, %ymm4, %ymm2
7546 ; AVX2-FCP-NEXT: vmovdqu %ymm2, (%rsp) # 32-byte Spill
7547 ; AVX2-FCP-NEXT: vmovdqa 352(%rdi), %ymm9
7548 ; AVX2-FCP-NEXT: vmovdqa 320(%rdi), %ymm7
7549 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm7[0],ymm9[1],ymm7[2,3],ymm9[4],ymm7[5],ymm9[6],ymm7[7,8],ymm9[9],ymm7[10,11],ymm9[12],ymm7[13],ymm9[14],ymm7[15]
7550 ; AVX2-FCP-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7551 ; AVX2-FCP-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7552 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm2, %xmm4
7553 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm4[1,2,3],xmm2[4,5],xmm4[6,7]
7554 ; AVX2-FCP-NEXT: vpshufb %xmm0, %xmm2, %xmm0
7555 ; AVX2-FCP-NEXT: vmovdqa 384(%rdi), %ymm5
7556 ; AVX2-FCP-NEXT: vmovdqa 416(%rdi), %ymm10
7557 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0],ymm10[1,2],ymm5[3],ymm10[4],ymm5[5],ymm10[6,7],ymm5[8],ymm10[9,10],ymm5[11],ymm10[12],ymm5[13],ymm10[14,15]
7558 ; AVX2-FCP-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7559 ; AVX2-FCP-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7560 ; AVX2-FCP-NEXT: vpermd %ymm2, %ymm1, %ymm1
7561 ; AVX2-FCP-NEXT: vpshufb %ymm3, %ymm1, %ymm1
7562 ; AVX2-FCP-NEXT: vpblendvb %ymm13, %ymm0, %ymm1, %ymm0
7563 ; AVX2-FCP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7564 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7565 ; AVX2-FCP-NEXT: vpblendw $181, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
7566 ; AVX2-FCP-NEXT: # ymm0 = mem[0],ymm0[1],mem[2],ymm0[3],mem[4,5],ymm0[6],mem[7,8],ymm0[9],mem[10],ymm0[11],mem[12,13],ymm0[14],mem[15]
7567 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm0, %xmm1
7568 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5,6],xmm1[7]
7569 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm4 = [2,3,12,13,6,7,0,1,10,11,4,5,14,15,10,11]
7570 ; AVX2-FCP-NEXT: vpshufb %xmm4, %xmm0, %xmm0
7571 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7572 ; AVX2-FCP-NEXT: vpblendw $173, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
7573 ; AVX2-FCP-NEXT: # ymm1 = mem[0],ymm1[1],mem[2,3],ymm1[4],mem[5],ymm1[6],mem[7,8],ymm1[9],mem[10,11],ymm1[12],mem[13],ymm1[14],mem[15]
7574 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm2 = [2,0,0,0,4,7,1,6]
7575 ; AVX2-FCP-NEXT: vpermd %ymm1, %ymm2, %ymm3
7576 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} ymm1 = [2,3,4,5,10,11,0,1,14,15,2,3,12,13,0,1,18,19,20,21,26,27,16,17,30,31,18,19,28,29,16,17]
7577 ; AVX2-FCP-NEXT: vpshufb %ymm1, %ymm3, %ymm3
7578 ; AVX2-FCP-NEXT: vpblendvb %ymm13, %ymm0, %ymm3, %ymm6
7579 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7580 ; AVX2-FCP-NEXT: vpblendw $74, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
7581 ; AVX2-FCP-NEXT: # ymm0 = ymm0[0],mem[1],ymm0[2],mem[3],ymm0[4,5],mem[6],ymm0[7,8],mem[9],ymm0[10],mem[11],ymm0[12,13],mem[14],ymm0[15]
7582 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm0, %xmm3
7583 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm3[2,3],xmm0[4,5,6],xmm3[7]
7584 ; AVX2-FCP-NEXT: vpshufb %xmm4, %xmm0, %xmm0
7585 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm11[0],ymm14[1],ymm11[2,3],ymm14[4],ymm11[5],ymm14[6],ymm11[7,8],ymm14[9],ymm11[10,11],ymm14[12],ymm11[13],ymm14[14],ymm11[15]
7586 ; AVX2-FCP-NEXT: vpermd %ymm3, %ymm2, %ymm3
7587 ; AVX2-FCP-NEXT: vpshufb %ymm1, %ymm3, %ymm3
7588 ; AVX2-FCP-NEXT: vpblendvb %ymm13, %ymm0, %ymm3, %ymm3
7589 ; AVX2-FCP-NEXT: vpblendw $74, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm0 # 32-byte Folded Reload
7590 ; AVX2-FCP-NEXT: # ymm0 = ymm15[0],mem[1],ymm15[2],mem[3],ymm15[4,5],mem[6],ymm15[7,8],mem[9],ymm15[10],mem[11],ymm15[12,13],mem[14],ymm15[15]
7591 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm0, %xmm11
7592 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm11[2,3],xmm0[4,5,6],xmm11[7]
7593 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm11 = ymm12[0],ymm8[1],ymm12[2,3],ymm8[4],ymm12[5],ymm8[6],ymm12[7,8],ymm8[9],ymm12[10,11],ymm8[12],ymm12[13],ymm8[14],ymm12[15]
7594 ; AVX2-FCP-NEXT: vpermd %ymm11, %ymm2, %ymm11
7595 ; AVX2-FCP-NEXT: vpshufb %ymm1, %ymm11, %ymm11
7596 ; AVX2-FCP-NEXT: vpshufb %xmm4, %xmm0, %xmm0
7597 ; AVX2-FCP-NEXT: vpblendvb %ymm13, %ymm0, %ymm11, %ymm0
7598 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm11 = ymm9[0],ymm7[1],ymm9[2],ymm7[3],ymm9[4,5],ymm7[6],ymm9[7,8],ymm7[9],ymm9[10],ymm7[11],ymm9[12,13],ymm7[14],ymm9[15]
7599 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm11, %xmm12
7600 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1],xmm12[2,3],xmm11[4,5,6],xmm12[7]
7601 ; AVX2-FCP-NEXT: vpshufb %xmm4, %xmm11, %xmm4
7602 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm11 = ymm10[0],ymm5[1],ymm10[2,3],ymm5[4],ymm10[5],ymm5[6],ymm10[7,8],ymm5[9],ymm10[10,11],ymm5[12],ymm10[13],ymm5[14],ymm10[15]
7603 ; AVX2-FCP-NEXT: vpermd %ymm11, %ymm2, %ymm2
7604 ; AVX2-FCP-NEXT: vpshufb %ymm1, %ymm2, %ymm1
7605 ; AVX2-FCP-NEXT: vpblendvb %ymm13, %ymm4, %ymm1, %ymm1
7606 ; AVX2-FCP-NEXT: vmovdqa 288(%rdi), %ymm12
7607 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm5 = [0,3,1,3,0,3,5,7]
7608 ; AVX2-FCP-NEXT: vpermd %ymm12, %ymm5, %ymm11
7609 ; AVX2-FCP-NEXT: vpbroadcastq {{.*#+}} ymm14 = [0,0,18,19,20,21,26,27,0,0,18,19,20,21,26,27,0,0,18,19,20,21,26,27,0,0,18,19,20,21,26,27]
7610 ; AVX2-FCP-NEXT: vpshufb %ymm14, %ymm11, %ymm11
7611 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
7612 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm11 = ymm2[0,1,2,3,4],ymm11[5,6,7],ymm2[8,9,10,11,12],ymm11[13,14,15]
7613 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm11[4,5,6,7]
7614 ; AVX2-FCP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7615 ; AVX2-FCP-NEXT: vmovdqa 608(%rdi), %ymm10
7616 ; AVX2-FCP-NEXT: vpermd %ymm10, %ymm5, %ymm11
7617 ; AVX2-FCP-NEXT: vpshufb %ymm14, %ymm11, %ymm11
7618 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
7619 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm11 = ymm2[0,1,2,3,4],ymm11[5,6,7],ymm2[8,9,10,11,12],ymm11[13,14,15]
7620 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm11[4,5,6,7]
7621 ; AVX2-FCP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7622 ; AVX2-FCP-NEXT: vmovdqa 128(%rdi), %ymm9
7623 ; AVX2-FCP-NEXT: vpermd %ymm9, %ymm5, %ymm11
7624 ; AVX2-FCP-NEXT: vpshufb %ymm14, %ymm11, %ymm11
7625 ; AVX2-FCP-NEXT: vmovdqu (%rsp), %ymm2 # 32-byte Reload
7626 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm11 = ymm2[0,1,2,3,4],ymm11[5,6,7],ymm2[8,9,10,11,12],ymm11[13,14,15]
7627 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm11[4,5,6,7]
7628 ; AVX2-FCP-NEXT: vmovdqu %ymm2, (%rsp) # 32-byte Spill
7629 ; AVX2-FCP-NEXT: vmovdqa 448(%rdi), %ymm4
7630 ; AVX2-FCP-NEXT: vpermd %ymm4, %ymm5, %ymm5
7631 ; AVX2-FCP-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7632 ; AVX2-FCP-NEXT: vpshufb %ymm14, %ymm5, %ymm5
7633 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
7634 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm5 = ymm2[0,1,2,3,4],ymm5[5,6,7],ymm2[8,9,10,11,12],ymm5[13,14,15]
7635 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm5[4,5,6,7]
7636 ; AVX2-FCP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7637 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm7 = [1,3,2,3,1,3,6,7]
7638 ; AVX2-FCP-NEXT: vpermd %ymm12, %ymm7, %ymm11
7639 ; AVX2-FCP-NEXT: vmovdqa %ymm12, %ymm8
7640 ; AVX2-FCP-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7641 ; AVX2-FCP-NEXT: vpbroadcastq {{.*#+}} ymm13 = [0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25]
7642 ; AVX2-FCP-NEXT: vpshufb %ymm13, %ymm11, %ymm11
7643 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm11 = ymm6[0,1,2,3,4],ymm11[5,6,7],ymm6[8,9,10,11,12],ymm11[13,14,15]
7644 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm6[0,1,2,3],ymm11[4,5,6,7]
7645 ; AVX2-FCP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7646 ; AVX2-FCP-NEXT: vpermd %ymm10, %ymm7, %ymm6
7647 ; AVX2-FCP-NEXT: vmovdqa %ymm10, %ymm12
7648 ; AVX2-FCP-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7649 ; AVX2-FCP-NEXT: vpshufb %ymm13, %ymm6, %ymm6
7650 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm6 = ymm3[0,1,2,3,4],ymm6[5,6,7],ymm3[8,9,10,11,12],ymm6[13,14,15]
7651 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm6[4,5,6,7]
7652 ; AVX2-FCP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7653 ; AVX2-FCP-NEXT: vpermd %ymm9, %ymm7, %ymm3
7654 ; AVX2-FCP-NEXT: vmovdqa %ymm9, %ymm5
7655 ; AVX2-FCP-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7656 ; AVX2-FCP-NEXT: vpshufb %ymm13, %ymm3, %ymm3
7657 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm0[0,1,2,3,4],ymm3[5,6,7],ymm0[8,9,10,11,12],ymm3[13,14,15]
7658 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm3[4,5,6,7]
7659 ; AVX2-FCP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7660 ; AVX2-FCP-NEXT: vpermd %ymm4, %ymm7, %ymm0
7661 ; AVX2-FCP-NEXT: vpshufb %ymm13, %ymm0, %ymm0
7662 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm1[0,1,2,3,4],ymm0[5,6,7],ymm1[8,9,10,11,12],ymm0[13,14,15]
7663 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
7664 ; AVX2-FCP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7665 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
7666 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
7667 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm9[0,1],ymm4[2],ymm9[3],ymm4[4],ymm9[5,6],ymm4[7],ymm9[8,9],ymm4[10],ymm9[11],ymm4[12],ymm9[13,14],ymm4[15]
7668 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm0, %xmm1
7669 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm1 = xmm0[0,1,2],xmm1[3,4],xmm0[5,6,7]
7670 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm0 = [4,5,14,15,8,9,2,3,12,13,6,7,u,u,u,u]
7671 ; AVX2-FCP-NEXT: vpshufb %xmm0, %xmm1, %xmm6
7672 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7673 ; AVX2-FCP-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm3 # 32-byte Folded Reload
7674 ; AVX2-FCP-NEXT: # ymm3 = ymm1[0],mem[1],ymm1[2,3],mem[4],ymm1[5],mem[6],ymm1[7,8],mem[9],ymm1[10,11],mem[12],ymm1[13],mem[14],ymm1[15]
7675 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm1 = [0,2,0,0,5,7,2,4]
7676 ; AVX2-FCP-NEXT: vpermd %ymm3, %ymm1, %ymm7
7677 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} ymm3 = [u,u,u,u,u,u,u,u,u,u,u,u,0,1,6,7,16,17,22,23,24,25,30,31,20,21,22,23,16,17,22,23]
7678 ; AVX2-FCP-NEXT: vpshufb %ymm3, %ymm7, %ymm7
7679 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm7 = ymm6[0,1,2],ymm7[3,4,5,6,7]
7680 ; AVX2-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [1,4,6,0,1,4,6,0]
7681 ; AVX2-FCP-NEXT: # ymm6 = mem[0,1,0,1]
7682 ; AVX2-FCP-NEXT: vpermd %ymm8, %ymm6, %ymm11
7683 ; AVX2-FCP-NEXT: vmovdqa %ymm14, %ymm8
7684 ; AVX2-FCP-NEXT: vpshufb %ymm14, %ymm11, %ymm11
7685 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm11 = ymm7[0,1,2,3,4],ymm11[5,6,7],ymm7[8,9,10,11,12],ymm11[13,14,15]
7686 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm7[0,1,2,3],ymm11[4,5,6,7]
7687 ; AVX2-FCP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7688 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
7689 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
7690 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm7 = ymm15[0,1],ymm14[2],ymm15[3],ymm14[4],ymm15[5,6],ymm14[7],ymm15[8,9],ymm14[10],ymm15[11],ymm14[12],ymm15[13,14],ymm14[15]
7691 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm7, %xmm11
7692 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2],xmm11[3,4],xmm7[5,6,7]
7693 ; AVX2-FCP-NEXT: vpshufb %xmm0, %xmm7, %xmm7
7694 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
7695 ; AVX2-FCP-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm10, %ymm11 # 32-byte Folded Reload
7696 ; AVX2-FCP-NEXT: # ymm11 = ymm10[0],mem[1],ymm10[2,3],mem[4],ymm10[5],mem[6],ymm10[7,8],mem[9],ymm10[10,11],mem[12],ymm10[13],mem[14],ymm10[15]
7697 ; AVX2-FCP-NEXT: vpermd %ymm11, %ymm1, %ymm11
7698 ; AVX2-FCP-NEXT: vpshufb %ymm3, %ymm11, %ymm11
7699 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0,1,2],ymm11[3,4,5,6,7]
7700 ; AVX2-FCP-NEXT: vpermd %ymm12, %ymm6, %ymm11
7701 ; AVX2-FCP-NEXT: vpshufb %ymm8, %ymm11, %ymm11
7702 ; AVX2-FCP-NEXT: vmovdqa %ymm8, %ymm13
7703 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm11 = ymm7[0,1,2,3,4],ymm11[5,6,7],ymm7[8,9,10,11,12],ymm11[13,14,15]
7704 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm7[0,1,2,3],ymm11[4,5,6,7]
7705 ; AVX2-FCP-NEXT: vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7706 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
7707 ; AVX2-FCP-NEXT: vpblendw $107, {{[-0-9]+}}(%r{{[sb]}}p), %ymm12, %ymm7 # 32-byte Folded Reload
7708 ; AVX2-FCP-NEXT: # ymm7 = mem[0,1],ymm12[2],mem[3],ymm12[4],mem[5,6],ymm12[7],mem[8,9],ymm12[10],mem[11],ymm12[12],mem[13,14],ymm12[15]
7709 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm7, %xmm11
7710 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2],xmm11[3,4],xmm7[5,6,7]
7711 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
7712 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
7713 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm11 = ymm2[0],ymm8[1],ymm2[2,3],ymm8[4],ymm2[5],ymm8[6],ymm2[7,8],ymm8[9],ymm2[10,11],ymm8[12],ymm2[13],ymm8[14],ymm2[15]
7714 ; AVX2-FCP-NEXT: vpermd %ymm11, %ymm1, %ymm11
7715 ; AVX2-FCP-NEXT: vpshufb %ymm3, %ymm11, %ymm11
7716 ; AVX2-FCP-NEXT: vpshufb %xmm0, %xmm7, %xmm7
7717 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm7 = ymm7[0,1,2],ymm11[3,4,5,6,7]
7718 ; AVX2-FCP-NEXT: vpermd %ymm5, %ymm6, %ymm11
7719 ; AVX2-FCP-NEXT: vpshufb %ymm13, %ymm11, %ymm11
7720 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm11 = ymm7[0,1,2,3,4],ymm11[5,6,7],ymm7[8,9,10,11,12],ymm11[13,14,15]
7721 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm5 = ymm7[0,1,2,3],ymm11[4,5,6,7]
7722 ; AVX2-FCP-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7723 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
7724 ; AVX2-FCP-NEXT: vpblendw $107, {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm7 # 32-byte Folded Reload
7725 ; AVX2-FCP-NEXT: # ymm7 = mem[0,1],ymm13[2],mem[3],ymm13[4],mem[5,6],ymm13[7],mem[8,9],ymm13[10],mem[11],ymm13[12],mem[13,14],ymm13[15]
7726 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm7, %xmm11
7727 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm7 = xmm7[0,1,2],xmm11[3,4],xmm7[5,6,7]
7728 ; AVX2-FCP-NEXT: vpshufb %xmm0, %xmm7, %xmm0
7729 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
7730 ; AVX2-FCP-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm7 # 32-byte Folded Reload
7731 ; AVX2-FCP-NEXT: # ymm7 = ymm5[0],mem[1],ymm5[2,3],mem[4],ymm5[5],mem[6],ymm5[7,8],mem[9],ymm5[10,11],mem[12],ymm5[13],mem[14],ymm5[15]
7732 ; AVX2-FCP-NEXT: vpermd %ymm7, %ymm1, %ymm1
7733 ; AVX2-FCP-NEXT: vpshufb %ymm3, %ymm1, %ymm1
7734 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2],ymm1[3,4,5,6,7]
7735 ; AVX2-FCP-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm1 # 32-byte Folded Reload
7736 ; AVX2-FCP-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,26,27]
7737 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3,4],ymm1[5,6,7],ymm0[8,9,10,11,12],ymm1[13,14,15]
7738 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7739 ; AVX2-FCP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7740 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm4[0],ymm9[1,2],ymm4[3],ymm9[4],ymm4[5],ymm9[6,7],ymm4[8],ymm9[9,10],ymm4[11],ymm9[12],ymm4[13],ymm9[14,15]
7741 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm0, %xmm1
7742 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} xmm0 = xmm1[0],xmm0[1],xmm1[2],xmm0[3]
7743 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm4 = [6,7,0,1,10,11,4,5,14,15,8,9,u,u,u,u]
7744 ; AVX2-FCP-NEXT: vpshufb %xmm4, %xmm0, %xmm0
7745 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
7746 ; AVX2-FCP-NEXT: vpblendw $74, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm3 # 32-byte Folded Reload
7747 ; AVX2-FCP-NEXT: # ymm3 = ymm11[0],mem[1],ymm11[2],mem[3],ymm11[4,5],mem[6],ymm11[7,8],mem[9],ymm11[10],mem[11],ymm11[12,13],mem[14],ymm11[15]
7748 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm9 = [0,3,0,0,5,0,2,7]
7749 ; AVX2-FCP-NEXT: vpermd %ymm3, %ymm9, %ymm6
7750 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} ymm3 = [u,u,u,u,u,u,u,u,u,u,u,u,2,3,4,5,18,19,20,21,26,27,16,17,30,31,30,31,18,19,20,21]
7751 ; AVX2-FCP-NEXT: vpshufb %ymm3, %ymm6, %ymm6
7752 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2],ymm6[3,4,5,6,7]
7753 ; AVX2-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm6 = [2,4,7,0,2,4,7,0]
7754 ; AVX2-FCP-NEXT: # ymm6 = mem[0,1,0,1]
7755 ; AVX2-FCP-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm7 # 32-byte Folded Reload
7756 ; AVX2-FCP-NEXT: vpbroadcastq {{.*#+}} ymm1 = [0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25,0,0,16,17,22,23,24,25]
7757 ; AVX2-FCP-NEXT: vpshufb %ymm1, %ymm7, %ymm7
7758 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm7 = ymm0[0,1,2,3,4],ymm7[5,6,7],ymm0[8,9,10,11,12],ymm7[13,14,15]
7759 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm7[4,5,6,7]
7760 ; AVX2-FCP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7761 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm14[0],ymm15[1,2],ymm14[3],ymm15[4],ymm14[5],ymm15[6,7],ymm14[8],ymm15[9,10],ymm14[11],ymm15[12],ymm14[13],ymm15[14,15]
7762 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm0, %xmm7
7763 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} xmm0 = xmm7[0],xmm0[1],xmm7[2],xmm0[3]
7764 ; AVX2-FCP-NEXT: vpshufb %xmm4, %xmm0, %xmm0
7765 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
7766 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm7 = ymm15[0],ymm10[1],ymm15[2],ymm10[3],ymm15[4,5],ymm10[6],ymm15[7,8],ymm10[9],ymm15[10],ymm10[11],ymm15[12,13],ymm10[14],ymm15[15]
7767 ; AVX2-FCP-NEXT: vpermd %ymm7, %ymm9, %ymm7
7768 ; AVX2-FCP-NEXT: vpshufb %ymm3, %ymm7, %ymm7
7769 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2],ymm7[3,4,5,6,7]
7770 ; AVX2-FCP-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm7 # 32-byte Folded Reload
7771 ; AVX2-FCP-NEXT: vpshufb %ymm1, %ymm7, %ymm7
7772 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm7 = ymm0[0,1,2,3,4],ymm7[5,6,7],ymm0[8,9,10,11,12],ymm7[13,14,15]
7773 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm7[4,5,6,7]
7774 ; AVX2-FCP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7775 ; AVX2-FCP-NEXT: vpblendw $214, {{[-0-9]+}}(%r{{[sb]}}p), %ymm12, %ymm0 # 32-byte Folded Reload
7776 ; AVX2-FCP-NEXT: # ymm0 = ymm12[0],mem[1,2],ymm12[3],mem[4],ymm12[5],mem[6,7],ymm12[8],mem[9,10],ymm12[11],mem[12],ymm12[13],mem[14,15]
7777 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm0, %xmm7
7778 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} xmm0 = xmm7[0],xmm0[1],xmm7[2],xmm0[3]
7779 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm7 = ymm8[0],ymm2[1],ymm8[2],ymm2[3],ymm8[4,5],ymm2[6],ymm8[7,8],ymm2[9],ymm8[10],ymm2[11],ymm8[12,13],ymm2[14],ymm8[15]
7780 ; AVX2-FCP-NEXT: vmovdqa %ymm8, %ymm12
7781 ; AVX2-FCP-NEXT: vpermd %ymm7, %ymm9, %ymm7
7782 ; AVX2-FCP-NEXT: vpshufb %ymm3, %ymm7, %ymm7
7783 ; AVX2-FCP-NEXT: vpshufb %xmm4, %xmm0, %xmm0
7784 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2],ymm7[3,4,5,6,7]
7785 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
7786 ; AVX2-FCP-NEXT: vpermd %ymm8, %ymm6, %ymm7
7787 ; AVX2-FCP-NEXT: vpshufb %ymm1, %ymm7, %ymm7
7788 ; AVX2-FCP-NEXT: vmovdqa %ymm1, %ymm2
7789 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm7 = ymm0[0,1,2,3,4],ymm7[5,6,7],ymm0[8,9,10,11,12],ymm7[13,14,15]
7790 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm7[4,5,6,7]
7791 ; AVX2-FCP-NEXT: vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7792 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
7793 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm13[0],ymm10[1,2],ymm13[3],ymm10[4],ymm13[5],ymm10[6,7],ymm13[8],ymm10[9,10],ymm13[11],ymm10[12],ymm13[13],ymm10[14,15]
7794 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm0, %xmm7
7795 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} xmm0 = xmm7[0],xmm0[1],xmm7[2],xmm0[3]
7796 ; AVX2-FCP-NEXT: vpshufb %xmm4, %xmm0, %xmm0
7797 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
7798 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm13[0],ymm5[1],ymm13[2],ymm5[3],ymm13[4,5],ymm5[6],ymm13[7,8],ymm5[9],ymm13[10],ymm5[11],ymm13[12,13],ymm5[14],ymm13[15]
7799 ; AVX2-FCP-NEXT: vpermd %ymm1, %ymm9, %ymm1
7800 ; AVX2-FCP-NEXT: vpshufb %ymm3, %ymm1, %ymm1
7801 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2],ymm1[3,4,5,6,7]
7802 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
7803 ; AVX2-FCP-NEXT: vpermd %ymm14, %ymm6, %ymm1
7804 ; AVX2-FCP-NEXT: vpshufb %ymm2, %ymm1, %ymm1
7805 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm0[0,1,2,3,4],ymm1[5,6,7],ymm0[8,9,10,11,12],ymm1[13,14,15]
7806 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
7807 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
7808 ; AVX2-FCP-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
7809 ; AVX2-FCP-NEXT: # ymm0 = ymm0[0],mem[1],ymm0[2,3],mem[4],ymm0[5],mem[6],ymm0[7,8],mem[9],ymm0[10,11],mem[12],ymm0[13],mem[14],ymm0[15]
7810 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm0, %xmm2
7811 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm2[0,1,2],xmm0[3,4],xmm2[5,6,7]
7812 ; AVX2-FCP-NEXT: vpblendw $148, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm2 # 32-byte Folded Reload
7813 ; AVX2-FCP-NEXT: # ymm2 = ymm11[0,1],mem[2],ymm11[3],mem[4],ymm11[5,6],mem[7],ymm11[8,9],mem[10],ymm11[11],mem[12],ymm11[13,14],mem[15]
7814 ; AVX2-FCP-NEXT: vmovdqa {{.*#+}} xmm6 = [8,9,2,3,12,13,6,7,0,1,10,11,0,1,6,7]
7815 ; AVX2-FCP-NEXT: vpshufb %xmm6, %xmm0, %xmm0
7816 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm5 = [1,3,0,0,6,0,3,5]
7817 ; AVX2-FCP-NEXT: vpermd %ymm2, %ymm5, %ymm2
7818 ; AVX2-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm7 = [16,17,22,23,24,25,30,31,0,0,0,0,0,1,6,7,16,17,22,23,24,25,30,31,0,0,0,0,0,1,6,7]
7819 ; AVX2-FCP-NEXT: # ymm7 = mem[0,1,0,1]
7820 ; AVX2-FCP-NEXT: vpshufb %ymm7, %ymm2, %ymm2
7821 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm0[0,1,2],ymm2[3,4,5,6,7]
7822 ; AVX2-FCP-NEXT: vpmovsxbd {{.*#+}} ymm0 = [0,2,1,3,0,2,5,7]
7823 ; AVX2-FCP-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm3 # 32-byte Folded Reload
7824 ; AVX2-FCP-NEXT: vpbroadcastq {{.*#+}} ymm4 = [16,17,22,23,24,25,30,31,16,17,22,23,24,25,30,31,16,17,22,23,24,25,30,31,16,17,22,23,24,25,30,31]
7825 ; AVX2-FCP-NEXT: vpshufb %ymm4, %ymm3, %ymm3
7826 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4,5],ymm3[6,7]
7827 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
7828 ; AVX2-FCP-NEXT: vpblendw $173, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
7829 ; AVX2-FCP-NEXT: # ymm3 = mem[0],ymm3[1],mem[2,3],ymm3[4],mem[5],ymm3[6],mem[7,8],ymm3[9],mem[10,11],ymm3[12],mem[13],ymm3[14],mem[15]
7830 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm3, %xmm11
7831 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm3 = xmm11[0,1,2],xmm3[3,4],xmm11[5,6,7]
7832 ; AVX2-FCP-NEXT: vpblendw $148, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm11 # 32-byte Folded Reload
7833 ; AVX2-FCP-NEXT: # ymm11 = ymm15[0,1],mem[2],ymm15[3],mem[4],ymm15[5,6],mem[7],ymm15[8,9],mem[10],ymm15[11],mem[12],ymm15[13,14],mem[15]
7834 ; AVX2-FCP-NEXT: vpshufb %xmm6, %xmm3, %xmm3
7835 ; AVX2-FCP-NEXT: vpermd %ymm11, %ymm5, %ymm11
7836 ; AVX2-FCP-NEXT: vpshufb %ymm7, %ymm11, %ymm11
7837 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2],ymm11[3,4,5,6,7]
7838 ; AVX2-FCP-NEXT: vpermd {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm9 # 32-byte Folded Reload
7839 ; AVX2-FCP-NEXT: vpshufb %ymm4, %ymm9, %ymm9
7840 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3,4,5],ymm9[6,7]
7841 ; AVX2-FCP-NEXT: vpblendw $148, {{[-0-9]+}}(%r{{[sb]}}p), %ymm12, %ymm9 # 32-byte Folded Reload
7842 ; AVX2-FCP-NEXT: # ymm9 = ymm12[0,1],mem[2],ymm12[3],mem[4],ymm12[5,6],mem[7],ymm12[8,9],mem[10],ymm12[11],mem[12],ymm12[13,14],mem[15]
7843 ; AVX2-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
7844 ; AVX2-FCP-NEXT: vpblendw $173, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm11 # 32-byte Folded Reload
7845 ; AVX2-FCP-NEXT: # ymm11 = mem[0],ymm11[1],mem[2,3],ymm11[4],mem[5],ymm11[6],mem[7,8],ymm11[9],mem[10,11],ymm11[12],mem[13],ymm11[14],mem[15]
7846 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm11, %xmm12
7847 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm11 = xmm12[0,1,2],xmm11[3,4],xmm12[5,6,7]
7848 ; AVX2-FCP-NEXT: vpermd %ymm9, %ymm5, %ymm9
7849 ; AVX2-FCP-NEXT: vpshufb %ymm7, %ymm9, %ymm9
7850 ; AVX2-FCP-NEXT: vpshufb %xmm6, %xmm11, %xmm11
7851 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm9 = ymm11[0,1,2],ymm9[3,4,5,6,7]
7852 ; AVX2-FCP-NEXT: vpermd %ymm8, %ymm0, %ymm8
7853 ; AVX2-FCP-NEXT: vpshufb %ymm4, %ymm8, %ymm8
7854 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2,3,4,5],ymm8[6,7]
7855 ; AVX2-FCP-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm10, %ymm9 # 32-byte Folded Reload
7856 ; AVX2-FCP-NEXT: # ymm9 = ymm10[0],mem[1],ymm10[2,3],mem[4],ymm10[5],mem[6],ymm10[7,8],mem[9],ymm10[10,11],mem[12],ymm10[13],mem[14],ymm10[15]
7857 ; AVX2-FCP-NEXT: vextracti128 $1, %ymm9, %xmm11
7858 ; AVX2-FCP-NEXT: vpblendw {{.*#+}} xmm9 = xmm11[0,1,2],xmm9[3,4],xmm11[5,6,7]
7859 ; AVX2-FCP-NEXT: vpshufb %xmm6, %xmm9, %xmm6
7860 ; AVX2-FCP-NEXT: vpblendw $148, {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm9 # 32-byte Folded Reload
7861 ; AVX2-FCP-NEXT: # ymm9 = ymm13[0,1],mem[2],ymm13[3],mem[4],ymm13[5,6],mem[7],ymm13[8,9],mem[10],ymm13[11],mem[12],ymm13[13,14],mem[15]
7862 ; AVX2-FCP-NEXT: vpermd %ymm9, %ymm5, %ymm5
7863 ; AVX2-FCP-NEXT: vpshufb %ymm7, %ymm5, %ymm5
7864 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm5 = ymm6[0,1,2],ymm5[3,4,5,6,7]
7865 ; AVX2-FCP-NEXT: vpermd %ymm14, %ymm0, %ymm0
7866 ; AVX2-FCP-NEXT: vpshufb %ymm4, %ymm0, %ymm0
7867 ; AVX2-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm5[0,1,2,3,4,5],ymm0[6,7]
7868 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
7869 ; AVX2-FCP-NEXT: vmovaps %ymm4, 64(%rsi)
7870 ; AVX2-FCP-NEXT: vmovups (%rsp), %ymm4 # 32-byte Reload
7871 ; AVX2-FCP-NEXT: vmovaps %ymm4, (%rsi)
7872 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
7873 ; AVX2-FCP-NEXT: vmovaps %ymm4, 96(%rsi)
7874 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
7875 ; AVX2-FCP-NEXT: vmovaps %ymm4, 32(%rsi)
7876 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
7877 ; AVX2-FCP-NEXT: vmovaps %ymm4, 64(%rdx)
7878 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
7879 ; AVX2-FCP-NEXT: vmovaps %ymm4, (%rdx)
7880 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
7881 ; AVX2-FCP-NEXT: vmovaps %ymm4, 96(%rdx)
7882 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
7883 ; AVX2-FCP-NEXT: vmovaps %ymm4, 32(%rdx)
7884 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
7885 ; AVX2-FCP-NEXT: vmovaps %ymm4, 64(%rcx)
7886 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
7887 ; AVX2-FCP-NEXT: vmovaps %ymm4, (%rcx)
7888 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
7889 ; AVX2-FCP-NEXT: vmovaps %ymm4, 96(%rcx)
7890 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
7891 ; AVX2-FCP-NEXT: vmovaps %ymm4, 32(%rcx)
7892 ; AVX2-FCP-NEXT: vmovdqa %ymm1, 64(%r8)
7893 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7894 ; AVX2-FCP-NEXT: vmovaps %ymm1, (%r8)
7895 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7896 ; AVX2-FCP-NEXT: vmovaps %ymm1, 96(%r8)
7897 ; AVX2-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
7898 ; AVX2-FCP-NEXT: vmovaps %ymm1, 32(%r8)
7899 ; AVX2-FCP-NEXT: vmovdqa %ymm0, 64(%r9)
7900 ; AVX2-FCP-NEXT: vmovdqa %ymm8, (%r9)
7901 ; AVX2-FCP-NEXT: vmovdqa %ymm3, 96(%r9)
7902 ; AVX2-FCP-NEXT: vmovdqa %ymm2, 32(%r9)
7903 ; AVX2-FCP-NEXT: addq $1000, %rsp # imm = 0x3E8
7904 ; AVX2-FCP-NEXT: vzeroupper
7905 ; AVX2-FCP-NEXT: retq
7907 ; AVX512-LABEL: load_i16_stride5_vf64:
7909 ; AVX512-NEXT: subq $552, %rsp # imm = 0x228
7910 ; AVX512-NEXT: vmovdqa 384(%rdi), %ymm6
7911 ; AVX512-NEXT: vmovdqa 416(%rdi), %ymm11
7912 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm6[0],ymm11[1,2],ymm6[3],ymm11[4],ymm6[5],ymm11[6,7],ymm6[8],ymm11[9,10],ymm6[11],ymm11[12],ymm6[13],ymm11[14,15]
7913 ; AVX512-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7914 ; AVX512-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7915 ; AVX512-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
7916 ; AVX512-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3,4],ymm1[5],ymm0[6],ymm1[7]
7917 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm0 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,6,7,16,17,26,27,20,21,30,31,24,25,128,128,128,128,128,128]
7918 ; AVX512-NEXT: vpshufb %ymm0, %ymm1, %ymm2
7919 ; AVX512-NEXT: vmovdqa 352(%rdi), %ymm8
7920 ; AVX512-NEXT: vmovdqa 320(%rdi), %ymm7
7921 ; AVX512-NEXT: vpblendw {{.*#+}} ymm1 = ymm7[0],ymm8[1],ymm7[2,3],ymm8[4],ymm7[5],ymm8[6],ymm7[7,8],ymm8[9],ymm7[10,11],ymm8[12],ymm7[13],ymm8[14],ymm7[15]
7922 ; AVX512-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7923 ; AVX512-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7924 ; AVX512-NEXT: vextracti128 $1, %ymm1, %xmm3
7925 ; AVX512-NEXT: vpblendw {{.*#+}} xmm3 = xmm1[0],xmm3[1,2,3],xmm1[4,5],xmm3[6,7]
7926 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm1 = [0,1,10,11,4,5,14,15,8,9,2,3,12,13,128,128,128,128,128,128,128,128,128,128,128,128,u,u,u,u,u,u]
7927 ; AVX512-NEXT: vpshufb %ymm1, %ymm3, %ymm3
7928 ; AVX512-NEXT: vporq %ymm2, %ymm3, %ymm19
7929 ; AVX512-NEXT: vmovdqa 192(%rdi), %ymm15
7930 ; AVX512-NEXT: vmovdqa 224(%rdi), %ymm13
7931 ; AVX512-NEXT: vmovdqa 176(%rdi), %xmm12
7932 ; AVX512-NEXT: vmovdqa 160(%rdi), %xmm14
7933 ; AVX512-NEXT: vmovdqa (%rdi), %ymm4
7934 ; AVX512-NEXT: vmovdqa 32(%rdi), %ymm5
7935 ; AVX512-NEXT: vmovdqa 64(%rdi), %ymm10
7936 ; AVX512-NEXT: vmovdqa 96(%rdi), %ymm9
7937 ; AVX512-NEXT: vpblendw {{.*#+}} ymm2 = ymm10[0],ymm9[1,2],ymm10[3],ymm9[4],ymm10[5],ymm9[6,7],ymm10[8],ymm9[9,10],ymm10[11],ymm9[12],ymm10[13],ymm9[14,15]
7938 ; AVX512-NEXT: vpermq {{.*#+}} ymm3 = ymm2[2,3,0,1]
7939 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm3[5],ymm2[6],ymm3[7]
7940 ; AVX512-NEXT: vpshufb %ymm0, %ymm2, %ymm0
7941 ; AVX512-NEXT: vpblendw {{.*#+}} ymm2 = ymm4[0],ymm5[1],ymm4[2,3],ymm5[4],ymm4[5],ymm5[6],ymm4[7,8],ymm5[9],ymm4[10,11],ymm5[12],ymm4[13],ymm5[14],ymm4[15]
7942 ; AVX512-NEXT: vextracti128 $1, %ymm2, %xmm3
7943 ; AVX512-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm3[1,2,3],xmm2[4,5],xmm3[6,7]
7944 ; AVX512-NEXT: vpshufb %ymm1, %ymm2, %ymm1
7945 ; AVX512-NEXT: vpor %ymm0, %ymm1, %ymm0
7946 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7947 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm11[0],ymm6[1],ymm11[2,3],ymm6[4],ymm11[5],ymm6[6],ymm11[7,8],ymm6[9],ymm11[10,11],ymm6[12],ymm11[13],ymm6[14],ymm11[15]
7948 ; AVX512-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
7949 ; AVX512-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3,4],ymm1[5],ymm0[6,7]
7950 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm0 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,8,9,18,19,28,29,22,23,16,17,26,27,128,128,128,128,128,128]
7951 ; AVX512-NEXT: vpshufb %ymm0, %ymm1, %ymm2
7952 ; AVX512-NEXT: vpblendw {{.*#+}} ymm1 = ymm8[0],ymm7[1],ymm8[2],ymm7[3],ymm8[4,5],ymm7[6],ymm8[7,8],ymm7[9],ymm8[10],ymm7[11],ymm8[12,13],ymm7[14],ymm8[15]
7953 ; AVX512-NEXT: vextracti128 $1, %ymm1, %xmm3
7954 ; AVX512-NEXT: vpblendw {{.*#+}} xmm3 = xmm1[0,1],xmm3[2,3],xmm1[4,5,6],xmm3[7]
7955 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm1 = [2,3,12,13,6,7,0,1,10,11,4,5,14,15,128,128,128,128,128,128,128,128,128,128,128,128,u,u,u,u,u,u]
7956 ; AVX512-NEXT: vpshufb %ymm1, %ymm3, %ymm3
7957 ; AVX512-NEXT: vpor %ymm2, %ymm3, %ymm2
7958 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
7959 ; AVX512-NEXT: vpblendw {{.*#+}} ymm2 = ymm9[0],ymm10[1],ymm9[2,3],ymm10[4],ymm9[5],ymm10[6],ymm9[7,8],ymm10[9],ymm9[10,11],ymm10[12],ymm9[13],ymm10[14],ymm9[15]
7960 ; AVX512-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7961 ; AVX512-NEXT: vmovdqu %ymm9, (%rsp) # 32-byte Spill
7962 ; AVX512-NEXT: vpermq {{.*#+}} ymm3 = ymm2[2,3,0,1]
7963 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm3[5],ymm2[6,7]
7964 ; AVX512-NEXT: vpshufb %ymm0, %ymm2, %ymm0
7965 ; AVX512-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0],ymm4[1],ymm5[2],ymm4[3],ymm5[4,5],ymm4[6],ymm5[7,8],ymm4[9],ymm5[10],ymm4[11],ymm5[12,13],ymm4[14],ymm5[15]
7966 ; AVX512-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7967 ; AVX512-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
7968 ; AVX512-NEXT: vextracti128 $1, %ymm2, %xmm3
7969 ; AVX512-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm3[2,3],xmm2[4,5,6],xmm3[7]
7970 ; AVX512-NEXT: vpshufb %ymm1, %ymm2, %ymm1
7971 ; AVX512-NEXT: vporq %ymm0, %ymm1, %ymm28
7972 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm6 = [6,7,0,1,10,11,4,5,14,15,14,15,2,3,12,13,22,23,16,17,26,27,20,21,30,31,30,31,18,19,28,29]
7973 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm13[0,1],ymm15[2],ymm13[3],ymm15[4],ymm13[5,6],ymm15[7],ymm13[8,9],ymm15[10],ymm13[11],ymm15[12],ymm13[13,14],ymm15[15]
7974 ; AVX512-NEXT: vmovdqa64 %ymm15, %ymm18
7975 ; AVX512-NEXT: vmovdqa64 %ymm13, %ymm24
7976 ; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm1
7977 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4,5,6,7]
7978 ; AVX512-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[6,7,6,7,8,9,4,5,14,15,8,9,2,3,12,13,22,23,22,23,24,25,20,21,30,31,24,25,18,19,28,29]
7979 ; AVX512-NEXT: vpblendd {{.*#+}} xmm1 = xmm12[0],xmm14[1],xmm12[2,3]
7980 ; AVX512-NEXT: vmovdqa64 %xmm12, %xmm16
7981 ; AVX512-NEXT: vmovdqa64 %xmm14, %xmm30
7982 ; AVX512-NEXT: vpshufb %xmm6, %xmm1, %xmm1
7983 ; AVX512-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm0[3,4,5,6,7]
7984 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
7985 ; AVX512-NEXT: vmovdqa 144(%rdi), %xmm11
7986 ; AVX512-NEXT: vmovdqa 128(%rdi), %xmm7
7987 ; AVX512-NEXT: vpblendd {{.*#+}} xmm1 = xmm11[0,1],xmm7[2],xmm11[3]
7988 ; AVX512-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,6,7,8,9,8,9,2,3,12,13]
7989 ; AVX512-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
7990 ; AVX512-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
7991 ; AVX512-NEXT: vpblendw {{.*#+}} ymm1 = ymm9[0],ymm10[1],ymm9[2],ymm10[3],ymm9[4,5],ymm10[6],ymm9[7,8],ymm10[9],ymm9[10],ymm10[11],ymm9[12,13],ymm10[14],ymm9[15]
7992 ; AVX512-NEXT: vpermq {{.*#+}} ymm2 = ymm1[2,3,0,1]
7993 ; AVX512-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4],ymm1[5],ymm2[6],ymm1[7]
7994 ; AVX512-NEXT: vpblendw {{.*#+}} ymm2 = ymm4[0],ymm5[1,2],ymm4[3],ymm5[4],ymm4[5],ymm5[6,7],ymm4[8],ymm5[9,10],ymm4[11],ymm5[12],ymm4[13],ymm5[14,15]
7995 ; AVX512-NEXT: vextracti128 $1, %ymm2, %xmm3
7996 ; AVX512-NEXT: vpblendd {{.*#+}} xmm2 = xmm3[0],xmm2[1],xmm3[2],xmm2[3]
7997 ; AVX512-NEXT: vpshufb %ymm6, %ymm1, %ymm1
7998 ; AVX512-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[6,7,0,1,10,11,4,5,14,15,8,9,12,13,14,15]
7999 ; AVX512-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3,4,5,6,7]
8000 ; AVX512-NEXT: vmovdqa 256(%rdi), %ymm12
8001 ; AVX512-NEXT: vmovdqa 288(%rdi), %ymm15
8002 ; AVX512-NEXT: vpblendw {{.*#+}} ymm2 = ymm12[0],ymm15[1],ymm12[2,3],ymm15[4],ymm12[5],ymm15[6],ymm12[7,8],ymm15[9],ymm12[10,11],ymm15[12],ymm12[13],ymm15[14],ymm12[15]
8003 ; AVX512-NEXT: vextracti128 $1, %ymm2, %xmm3
8004 ; AVX512-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm3[1,2,3],xmm2[4,5],xmm3[6,7]
8005 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm1 = zmm0 ^ (mem & (zmm1 ^ zmm0))
8006 ; AVX512-NEXT: vpshufb {{.*#+}} xmm0 = xmm2[0,1,0,1,10,11,4,5,14,15,8,9,2,3,12,13]
8007 ; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
8008 ; AVX512-NEXT: vextracti64x4 $1, %zmm1, %ymm2
8009 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0],ymm0[1,2,3,4,5,6,7],ymm2[8],ymm0[9,10,11,12,13,14,15]
8010 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
8011 ; AVX512-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
8012 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8013 ; AVX512-NEXT: vmovdqa 464(%rdi), %xmm8
8014 ; AVX512-NEXT: vpshufd {{.*#+}} xmm0 = xmm8[3,1,2,3]
8015 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,1,2,1,4,5,6,7]
8016 ; AVX512-NEXT: vmovdqa 448(%rdi), %xmm3
8017 ; AVX512-NEXT: vpshufd {{.*#+}} xmm1 = xmm3[0,2,2,3]
8018 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,0,3,4,5,6,7]
8019 ; AVX512-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
8020 ; AVX512-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8021 ; AVX512-NEXT: vmovdqa %xmm11, %xmm6
8022 ; AVX512-NEXT: vpshufd {{.*#+}} xmm0 = xmm11[3,1,2,3]
8023 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,1,2,1,4,5,6,7]
8024 ; AVX512-NEXT: vmovdqa %xmm7, %xmm9
8025 ; AVX512-NEXT: vpshufd {{.*#+}} xmm1 = xmm7[0,2,2,3]
8026 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,0,3,4,5,6,7]
8027 ; AVX512-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
8028 ; AVX512-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8029 ; AVX512-NEXT: vpblendd {{.*#+}} xmm0 = xmm3[0],xmm8[1],xmm3[2,3]
8030 ; AVX512-NEXT: vmovdqa64 %xmm3, %xmm22
8031 ; AVX512-NEXT: vmovdqa {{.*#+}} xmm1 = [6,7,2,3,4,5,6,7,6,7,2,3,12,13,6,7]
8032 ; AVX512-NEXT: vpshufb %xmm1, %xmm0, %xmm0
8033 ; AVX512-NEXT: vmovdqa64 %xmm1, %xmm23
8034 ; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm10
8035 ; AVX512-NEXT: vmovdqa 576(%rdi), %ymm1
8036 ; AVX512-NEXT: vmovdqa 608(%rdi), %ymm2
8037 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0,1],ymm1[2],ymm2[3],ymm1[4],ymm2[5,6],ymm1[7],ymm2[8,9],ymm1[10],ymm2[11],ymm1[12],ymm2[13,14],ymm1[15]
8038 ; AVX512-NEXT: vmovdqa64 %ymm2, %ymm20
8039 ; AVX512-NEXT: vmovdqa64 %ymm1, %ymm17
8040 ; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm1
8041 ; AVX512-NEXT: vpblendw {{.*#+}} xmm11 = xmm0[0,1,2],xmm1[3,4],xmm0[5,6,7]
8042 ; AVX512-NEXT: vmovdqa 512(%rdi), %ymm5
8043 ; AVX512-NEXT: vmovdqa 544(%rdi), %ymm13
8044 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm13[0],ymm5[1],ymm13[2,3],ymm5[4],ymm13[5],ymm5[6],ymm13[7,8],ymm5[9],ymm13[10,11],ymm5[12],ymm13[13],ymm5[14],ymm13[15]
8045 ; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm1
8046 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm1[0],ymm0[1,2],ymm1[3],ymm0[4,5,6,7]
8047 ; AVX512-NEXT: vmovdqa64 496(%rdi), %xmm21
8048 ; AVX512-NEXT: vpshufd {{.*#+}} xmm1 = xmm21[3,1,2,3]
8049 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm0 = xmm1[2,1,2,3,4,5,6,7]
8050 ; AVX512-NEXT: vmovdqa 480(%rdi), %xmm7
8051 ; AVX512-NEXT: vpshufd {{.*#+}} xmm14 = xmm7[0,2,2,3]
8052 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm14 = xmm14[0,3,2,3,4,5,6,7]
8053 ; AVX512-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm14[0],xmm0[0],xmm14[1],xmm0[1]
8054 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm14 = [u,u,u,u,u,u,u,u,8,9,2,3,12,13,6,7,16,17,26,27,26,27,30,31,24,25,18,19,28,29,22,23]
8055 ; AVX512-NEXT: vpshufb %ymm14, %ymm2, %ymm2
8056 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3,4,5,6,7]
8057 ; AVX512-NEXT: vmovdqa {{.*#+}} xmm2 = [12,13,14,15,4,5,14,15,8,9,2,3,12,13,6,7]
8058 ; AVX512-NEXT: vpshufb %xmm2, %xmm11, %xmm11
8059 ; AVX512-NEXT: vinserti128 $1, %xmm11, %ymm0, %ymm11
8060 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm11[5,6,7]
8061 ; AVX512-NEXT: vmovdqa64 {{.*#+}} zmm27 = [65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,0,0,0,0,0,0,0,0,0,0,0,0,0,65535,65535,65535,65535,65535,65535]
8062 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm10 = zmm10 ^ (zmm27 & (zmm10 ^ zmm19))
8063 ; AVX512-NEXT: vinserti64x4 $1, %ymm0, %zmm10, %zmm0
8064 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8065 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm15[0,1],ymm12[2],ymm15[3],ymm12[4],ymm15[5,6],ymm12[7],ymm15[8,9],ymm12[10],ymm15[11],ymm12[12],ymm15[13,14],ymm12[15]
8066 ; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm10
8067 ; AVX512-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm10[3,4],xmm0[5,6,7]
8068 ; AVX512-NEXT: vpshufb %xmm2, %xmm0, %xmm0
8069 ; AVX512-NEXT: vmovdqa64 %ymm18, %ymm3
8070 ; AVX512-NEXT: vmovdqa64 %ymm24, %ymm1
8071 ; AVX512-NEXT: vpblendw {{.*#+}} ymm2 = ymm1[0],ymm3[1],ymm1[2,3],ymm3[4],ymm1[5],ymm3[6],ymm1[7,8],ymm3[9],ymm1[10,11],ymm3[12],ymm1[13],ymm3[14],ymm1[15]
8072 ; AVX512-NEXT: vextracti128 $1, %ymm2, %xmm10
8073 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm10[0],ymm2[1,2],ymm10[3],ymm2[4,5,6,7]
8074 ; AVX512-NEXT: vpshufb %ymm14, %ymm2, %ymm2
8075 ; AVX512-NEXT: vpshufd {{.*#+}} xmm10 = xmm16[3,1,2,3]
8076 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm10 = xmm10[2,1,2,3,4,5,6,7]
8077 ; AVX512-NEXT: vmovdqa64 %xmm30, %xmm24
8078 ; AVX512-NEXT: vpshufd {{.*#+}} xmm11 = xmm30[0,2,2,3]
8079 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm11 = xmm11[0,3,2,3,4,5,6,7]
8080 ; AVX512-NEXT: vpunpckldq {{.*#+}} xmm10 = xmm11[0],xmm10[0],xmm11[1],xmm10[1]
8081 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm10[0,1],ymm2[2,3,4,5,6,7]
8082 ; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
8083 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4],ymm0[5,6,7]
8084 ; AVX512-NEXT: vpblendd {{.*#+}} xmm2 = xmm9[0],xmm6[1],xmm9[2,3]
8085 ; AVX512-NEXT: vmovdqa64 %xmm9, %xmm25
8086 ; AVX512-NEXT: vmovdqa64 %xmm23, %xmm4
8087 ; AVX512-NEXT: vpshufb %xmm4, %xmm2, %xmm2
8088 ; AVX512-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
8089 ; AVX512-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm27, %zmm2 # 64-byte Folded Reload
8090 ; AVX512-NEXT: # zmm2 = zmm2 ^ (zmm27 & (zmm2 ^ mem))
8091 ; AVX512-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0
8092 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8093 ; AVX512-NEXT: vmovdqa64 %xmm22, %xmm4
8094 ; AVX512-NEXT: vpblendd {{.*#+}} xmm2 = xmm4[0,1],xmm8[2],xmm4[3]
8095 ; AVX512-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,6,7,8,9,4,5,14,15,8,9]
8096 ; AVX512-NEXT: vpshufb %xmm0, %xmm2, %xmm2
8097 ; AVX512-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
8098 ; AVX512-NEXT: vpblendw {{.*#+}} ymm10 = ymm5[0],ymm13[1],ymm5[2,3],ymm13[4],ymm5[5],ymm13[6],ymm5[7,8],ymm13[9],ymm5[10,11],ymm13[12],ymm5[13],ymm13[14],ymm5[15]
8099 ; AVX512-NEXT: vmovdqa64 %ymm13, %ymm26
8100 ; AVX512-NEXT: vmovdqa64 %ymm5, %ymm22
8101 ; AVX512-NEXT: vextracti128 $1, %ymm10, %xmm11
8102 ; AVX512-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0],ymm11[1],ymm10[2],ymm11[3],ymm10[4,5,6,7]
8103 ; AVX512-NEXT: vpshufd {{.*#+}} xmm11 = xmm7[0,3,2,3]
8104 ; AVX512-NEXT: vmovdqa64 %xmm7, %xmm30
8105 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm11 = xmm11[1,2,2,3,4,5,6,7]
8106 ; AVX512-NEXT: vpsrlq $48, %xmm21, %xmm13
8107 ; AVX512-NEXT: vpunpckldq {{.*#+}} xmm11 = xmm11[0],xmm13[0],xmm11[1],xmm13[1]
8108 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm13 = [2,3,12,13,0,1,0,1,10,11,4,5,14,15,8,9,18,19,28,29,16,17,16,17,26,27,20,21,30,31,24,25]
8109 ; AVX512-NEXT: vpshufb %ymm13, %ymm10, %ymm10
8110 ; AVX512-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1,2],xmm10[3,4,5,6,7]
8111 ; AVX512-NEXT: vpblendd {{.*#+}} ymm10 = ymm11[0,1,2,3],ymm10[4,5,6,7]
8112 ; AVX512-NEXT: vmovdqa64 %ymm20, %ymm9
8113 ; AVX512-NEXT: vmovdqa64 %ymm17, %ymm7
8114 ; AVX512-NEXT: vpblendw {{.*#+}} ymm11 = ymm7[0],ymm9[1,2],ymm7[3],ymm9[4],ymm7[5],ymm9[6,7],ymm7[8],ymm9[9,10],ymm7[11],ymm9[12],ymm7[13],ymm9[14,15]
8115 ; AVX512-NEXT: vextracti128 $1, %ymm11, %xmm14
8116 ; AVX512-NEXT: vpblendd {{.*#+}} xmm11 = xmm14[0],xmm11[1],xmm14[2],xmm11[3]
8117 ; AVX512-NEXT: vmovdqa {{.*#+}} xmm14 = [0,1,2,3,6,7,0,1,10,11,4,5,14,15,8,9]
8118 ; AVX512-NEXT: vpshufb %xmm14, %xmm11, %xmm11
8119 ; AVX512-NEXT: vinserti128 $1, %xmm11, %ymm0, %ymm11
8120 ; AVX512-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0,1,2,3,4],ymm11[5,6,7]
8121 ; AVX512-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm27, %zmm2 # 64-byte Folded Reload
8122 ; AVX512-NEXT: # zmm2 = zmm2 ^ (zmm27 & (zmm2 ^ mem))
8123 ; AVX512-NEXT: vinserti64x4 $1, %ymm10, %zmm2, %zmm2
8124 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8125 ; AVX512-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm1[1],ymm3[2,3],ymm1[4],ymm3[5],ymm1[6],ymm3[7,8],ymm1[9],ymm3[10,11],ymm1[12],ymm3[13],ymm1[14],ymm3[15]
8126 ; AVX512-NEXT: vmovdqa64 %ymm1, %ymm17
8127 ; AVX512-NEXT: vextracti128 $1, %ymm2, %xmm10
8128 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm10[1],ymm2[2],ymm10[3],ymm2[4,5,6,7]
8129 ; AVX512-NEXT: vpshufb %ymm13, %ymm2, %ymm2
8130 ; AVX512-NEXT: vpshufd {{.*#+}} xmm10 = xmm24[0,3,2,3]
8131 ; AVX512-NEXT: vmovdqa64 %xmm24, %xmm20
8132 ; AVX512-NEXT: vpshuflw {{.*#+}} xmm10 = xmm10[1,2,2,3,4,5,6,7]
8133 ; AVX512-NEXT: vmovdqa64 %xmm16, %xmm1
8134 ; AVX512-NEXT: vpsrlq $48, %xmm16, %xmm11
8135 ; AVX512-NEXT: vpunpckldq {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1]
8136 ; AVX512-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0,1,2],xmm2[3,4,5,6,7]
8137 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm10[0,1,2,3],ymm2[4,5,6,7]
8138 ; AVX512-NEXT: vpblendw {{.*#+}} ymm10 = ymm12[0],ymm15[1,2],ymm12[3],ymm15[4],ymm12[5],ymm15[6,7],ymm12[8],ymm15[9,10],ymm12[11],ymm15[12],ymm12[13],ymm15[14,15]
8139 ; AVX512-NEXT: vmovdqa64 %ymm12, %ymm19
8140 ; AVX512-NEXT: vextracti128 $1, %ymm10, %xmm11
8141 ; AVX512-NEXT: vpblendd {{.*#+}} xmm10 = xmm11[0],xmm10[1],xmm11[2],xmm10[3]
8142 ; AVX512-NEXT: vpshufb %xmm14, %xmm10, %xmm10
8143 ; AVX512-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
8144 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm10[5,6,7]
8145 ; AVX512-NEXT: vmovdqa64 %xmm25, %xmm5
8146 ; AVX512-NEXT: vpblendd {{.*#+}} xmm10 = xmm5[0,1],xmm6[2],xmm5[3]
8147 ; AVX512-NEXT: vmovdqa64 %xmm6, %xmm23
8148 ; AVX512-NEXT: vpshufb %xmm0, %xmm10, %xmm0
8149 ; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
8150 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm0 = zmm0 ^ (zmm27 & (zmm0 ^ zmm28))
8151 ; AVX512-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
8152 ; AVX512-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8153 ; AVX512-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
8154 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm18 # 32-byte Reload
8155 ; AVX512-NEXT: vmovdqa64 %ymm18, %ymm0
8156 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm12[1],ymm0[2,3],ymm12[4],ymm0[5],ymm12[6],ymm0[7,8],ymm12[9],ymm0[10,11],ymm12[12],ymm0[13],ymm12[14],ymm0[15]
8157 ; AVX512-NEXT: vpermq {{.*#+}} ymm2 = ymm0[2,3,0,1]
8158 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm0[0,1,2,3,4,5],ymm2[6],ymm0[7]
8159 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm24 # 32-byte Reload
8160 ; AVX512-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm16 # 32-byte Reload
8161 ; AVX512-NEXT: vmovdqa64 %ymm24, %ymm0
8162 ; AVX512-NEXT: vmovdqa64 %ymm16, %ymm5
8163 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm5[2],ymm0[3],ymm5[4],ymm0[5,6],ymm5[7],ymm0[8,9],ymm5[10],ymm0[11],ymm5[12],ymm0[13,14],ymm5[15]
8164 ; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm10
8165 ; AVX512-NEXT: vpblendw {{.*#+}} xmm10 = xmm0[0,1,2],xmm10[3,4],xmm0[5,6,7]
8166 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm0 = [4,5,14,15,8,9,2,3,12,13,10,11,0,1,10,11,20,21,30,31,24,25,18,19,28,29,26,27,16,17,26,27]
8167 ; AVX512-NEXT: vpshufb %ymm0, %ymm2, %ymm2
8168 ; AVX512-NEXT: vpshufb {{.*#+}} xmm10 = xmm10[4,5,14,15,8,9,2,3,12,13,6,7,12,13,14,15]
8169 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm10[0,1,2],ymm2[3,4,5,6,7]
8170 ; AVX512-NEXT: vpblendd {{.*#+}} xmm13 = xmm8[0],xmm4[1],xmm8[2,3]
8171 ; AVX512-NEXT: vmovdqa64 %xmm4, %xmm29
8172 ; AVX512-NEXT: vmovdqa64 %xmm8, %xmm28
8173 ; AVX512-NEXT: vpshufb {{.*#+}} xmm13 = xmm13[0,1,6,7,4,5,6,7,8,9,6,7,0,1,10,11]
8174 ; AVX512-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
8175 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm13 = zmm13 ^ (zmm27 & (zmm13 ^ zmm2))
8176 ; AVX512-NEXT: vmovdqa64 %ymm26, %ymm8
8177 ; AVX512-NEXT: vmovdqa64 %ymm22, %ymm4
8178 ; AVX512-NEXT: vpblendw {{.*#+}} ymm2 = ymm8[0],ymm4[1],ymm8[2],ymm4[3],ymm8[4,5],ymm4[6],ymm8[7,8],ymm4[9],ymm8[10],ymm4[11],ymm8[12,13],ymm4[14],ymm8[15]
8179 ; AVX512-NEXT: vextracti128 $1, %ymm2, %xmm14
8180 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm14[1],ymm2[2,3,4,5,6,7]
8181 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm14 = [4,5,14,15,12,13,2,3,12,13,6,7,0,1,10,11,20,21,30,31,28,29,18,19,28,29,22,23,16,17,26,27]
8182 ; AVX512-NEXT: vpshufb %ymm14, %ymm2, %ymm2
8183 ; AVX512-NEXT: vmovdqa64 %xmm30, %xmm6
8184 ; AVX512-NEXT: vpshufd {{.*#+}} xmm11 = xmm30[0,1,1,3]
8185 ; AVX512-NEXT: vpshufhw {{.*#+}} xmm11 = xmm11[0,1,2,3,4,7,6,7]
8186 ; AVX512-NEXT: vmovdqa64 %xmm21, %xmm5
8187 ; AVX512-NEXT: vpunpckhdq {{.*#+}} xmm11 = xmm11[2],xmm21[2],xmm11[3],xmm21[3]
8188 ; AVX512-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1,2],xmm2[3,4,5,6,7]
8189 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm11[0,1,2,3],ymm2[4,5,6,7]
8190 ; AVX512-NEXT: vpblendw {{.*#+}} ymm11 = ymm9[0],ymm7[1],ymm9[2,3],ymm7[4],ymm9[5],ymm7[6],ymm9[7,8],ymm7[9],ymm9[10,11],ymm7[12],ymm9[13],ymm7[14],ymm9[15]
8191 ; AVX512-NEXT: vmovdqa64 %ymm7, %ymm21
8192 ; AVX512-NEXT: vextracti128 $1, %ymm11, %xmm10
8193 ; AVX512-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0,1,2],xmm11[3,4],xmm10[5,6,7]
8194 ; AVX512-NEXT: vmovdqa {{.*#+}} xmm11 = [0,1,2,3,8,9,2,3,12,13,6,7,0,1,10,11]
8195 ; AVX512-NEXT: vpshufb %xmm11, %xmm10, %xmm10
8196 ; AVX512-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
8197 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm10[5,6,7]
8198 ; AVX512-NEXT: vinserti64x4 $1, %ymm2, %zmm13, %zmm2
8199 ; AVX512-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8200 ; AVX512-NEXT: vmovdqa64 %ymm17, %ymm2
8201 ; AVX512-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2],ymm3[3],ymm2[4,5],ymm3[6],ymm2[7,8],ymm3[9],ymm2[10],ymm3[11],ymm2[12,13],ymm3[14],ymm2[15]
8202 ; AVX512-NEXT: vmovdqa64 %ymm3, %ymm22
8203 ; AVX512-NEXT: vmovdqa64 %ymm17, %ymm26
8204 ; AVX512-NEXT: vextracti128 $1, %ymm2, %xmm10
8205 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm10[1],ymm2[2,3,4,5,6,7]
8206 ; AVX512-NEXT: vpshufb %ymm14, %ymm2, %ymm2
8207 ; AVX512-NEXT: vpshufd {{.*#+}} xmm10 = xmm20[0,1,1,3]
8208 ; AVX512-NEXT: vpshufhw {{.*#+}} xmm10 = xmm10[0,1,2,3,4,7,6,7]
8209 ; AVX512-NEXT: vpunpckhdq {{.*#+}} xmm10 = xmm10[2],xmm1[2],xmm10[3],xmm1[3]
8210 ; AVX512-NEXT: vmovdqa64 %xmm1, %xmm17
8211 ; AVX512-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0,1,2],xmm2[3,4,5,6,7]
8212 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm10[0,1,2,3],ymm2[4,5,6,7]
8213 ; AVX512-NEXT: vmovdqa64 %ymm19, %ymm1
8214 ; AVX512-NEXT: vpblendw {{.*#+}} ymm10 = ymm15[0],ymm1[1],ymm15[2,3],ymm1[4],ymm15[5],ymm1[6],ymm15[7,8],ymm1[9],ymm15[10,11],ymm1[12],ymm15[13],ymm1[14],ymm15[15]
8215 ; AVX512-NEXT: vmovdqa64 %ymm15, %ymm31
8216 ; AVX512-NEXT: vextracti128 $1, %ymm10, %xmm13
8217 ; AVX512-NEXT: vpblendw {{.*#+}} xmm10 = xmm13[0,1,2],xmm10[3,4],xmm13[5,6,7]
8218 ; AVX512-NEXT: vpshufb %xmm11, %xmm10, %xmm10
8219 ; AVX512-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
8220 ; AVX512-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm10[5,6,7]
8221 ; AVX512-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8222 ; AVX512-NEXT: vpblendw $82, (%rsp), %ymm1, %ymm10 # 32-byte Folded Reload
8223 ; AVX512-NEXT: # ymm10 = ymm1[0],mem[1],ymm1[2,3],mem[4],ymm1[5],mem[6],ymm1[7,8],mem[9],ymm1[10,11],mem[12],ymm1[13],mem[14],ymm1[15]
8224 ; AVX512-NEXT: vpermq {{.*#+}} ymm11 = ymm10[2,3,0,1]
8225 ; AVX512-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5],ymm11[6],ymm10[7]
8226 ; AVX512-NEXT: vpshufb %ymm0, %ymm10, %ymm0
8227 ; AVX512-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
8228 ; AVX512-NEXT: vpblendw $148, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm10 # 32-byte Folded Reload
8229 ; AVX512-NEXT: # ymm10 = ymm1[0,1],mem[2],ymm1[3],mem[4],ymm1[5,6],mem[7],ymm1[8,9],mem[10],ymm1[11],mem[12],ymm1[13,14],mem[15]
8230 ; AVX512-NEXT: vextracti128 $1, %ymm10, %xmm11
8231 ; AVX512-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0,1,2],xmm11[3,4],xmm10[5,6,7]
8232 ; AVX512-NEXT: vpshufb {{.*#+}} xmm10 = xmm10[4,5,14,15,8,9,2,3,12,13,6,7,12,13,14,15]
8233 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm10[0,1,2],ymm0[3,4,5,6,7]
8234 ; AVX512-NEXT: vmovdqa64 %xmm25, %xmm1
8235 ; AVX512-NEXT: vmovdqa64 %xmm23, %xmm3
8236 ; AVX512-NEXT: vpblendd {{.*#+}} xmm10 = xmm3[0],xmm1[1],xmm3[2,3]
8237 ; AVX512-NEXT: vpshufb {{.*#+}} xmm10 = xmm10[0,1,6,7,4,5,6,7,8,9,6,7,0,1,10,11]
8238 ; AVX512-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
8239 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm10 = zmm10 ^ (zmm27 & (zmm10 ^ zmm0))
8240 ; AVX512-NEXT: vinserti64x4 $1, %ymm2, %zmm10, %zmm23
8241 ; AVX512-NEXT: vmovdqa64 %ymm18, %ymm13
8242 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm12[0],ymm13[1],ymm12[2],ymm13[3],ymm12[4,5],ymm13[6],ymm12[7,8],ymm13[9],ymm12[10],ymm13[11],ymm12[12,13],ymm13[14],ymm12[15]
8243 ; AVX512-NEXT: vpermq {{.*#+}} ymm10 = ymm0[2,3,0,1]
8244 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm10[4],ymm0[5],ymm10[6],ymm0[7]
8245 ; AVX512-NEXT: vmovdqa64 %ymm16, %ymm14
8246 ; AVX512-NEXT: vmovdqa64 %ymm24, %ymm15
8247 ; AVX512-NEXT: vpblendw {{.*#+}} ymm10 = ymm14[0],ymm15[1,2],ymm14[3],ymm15[4],ymm14[5],ymm15[6,7],ymm14[8],ymm15[9,10],ymm14[11],ymm15[12],ymm14[13],ymm15[14,15]
8248 ; AVX512-NEXT: vextracti128 $1, %ymm10, %xmm11
8249 ; AVX512-NEXT: vpblendd {{.*#+}} xmm10 = xmm11[0],xmm10[1],xmm11[2],xmm10[3]
8250 ; AVX512-NEXT: vpshufb {{.*#+}} xmm10 = xmm10[6,7,0,1,10,11,4,5,14,15,8,9,12,13,14,15]
8251 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm1 = [6,7,0,1,10,11,4,5,14,15,14,15,2,3,12,13,22,23,16,17,26,27,20,21,30,31,30,31,18,19,28,29]
8252 ; AVX512-NEXT: vpshufb %ymm1, %ymm0, %ymm0
8253 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm10[0,1,2],ymm0[3,4,5,6,7]
8254 ; AVX512-NEXT: vmovdqa %ymm4, %ymm3
8255 ; AVX512-NEXT: vmovdqa %ymm8, %ymm4
8256 ; AVX512-NEXT: vpblendw {{.*#+}} ymm10 = ymm8[0,1],ymm3[2],ymm8[3],ymm3[4],ymm8[5,6],ymm3[7],ymm8[8,9],ymm3[10],ymm8[11],ymm3[12],ymm8[13,14],ymm3[15]
8257 ; AVX512-NEXT: vextracti128 $1, %ymm10, %xmm11
8258 ; AVX512-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0,1],ymm11[2],ymm10[3,4,5,6,7]
8259 ; AVX512-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[6,7,6,7,8,9,4,5,14,15,8,9,2,3,12,13,22,23,22,23,24,25,20,21,30,31,24,25,18,19,28,29]
8260 ; AVX512-NEXT: vpblendd {{.*#+}} xmm11 = xmm5[0],xmm6[1],xmm5[2,3]
8261 ; AVX512-NEXT: vmovdqa64 %xmm30, %xmm7
8262 ; AVX512-NEXT: vmovdqa %xmm5, %xmm6
8263 ; AVX512-NEXT: vpshufb %xmm1, %xmm11, %xmm11
8264 ; AVX512-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1,2],xmm10[3,4,5,6,7]
8265 ; AVX512-NEXT: vpblendd {{.*#+}} ymm10 = ymm11[0,1,2,3],ymm10[4,5,6,7]
8266 ; AVX512-NEXT: vmovdqa64 %xmm29, %xmm1
8267 ; AVX512-NEXT: vmovdqa64 %xmm28, %xmm2
8268 ; AVX512-NEXT: vpblendd {{.*#+}} xmm8 = xmm2[0,1],xmm1[2],xmm2[3]
8269 ; AVX512-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,6,7,8,9,8,9,2,3,12,13]
8270 ; AVX512-NEXT: vinserti128 $1, %xmm8, %ymm0, %ymm8
8271 ; AVX512-NEXT: vinserti64x4 $1, %ymm10, %zmm8, %zmm8
8272 ; AVX512-NEXT: vpternlogq {{.*#+}} zmm8 = zmm8 ^ (mem & (zmm8 ^ zmm0))
8273 ; AVX512-NEXT: vmovdqa %ymm9, %ymm2
8274 ; AVX512-NEXT: vmovdqa64 %ymm21, %ymm10
8275 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm10[0],ymm9[1],ymm10[2,3],ymm9[4],ymm10[5],ymm9[6],ymm10[7,8],ymm9[9],ymm10[10,11],ymm9[12],ymm10[13],ymm9[14],ymm10[15]
8276 ; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm9
8277 ; AVX512-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm9[1,2,3],xmm0[4,5],xmm9[6,7]
8278 ; AVX512-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,10,11,4,5,14,15,8,9,2,3,12,13]
8279 ; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
8280 ; AVX512-NEXT: vextracti64x4 $1, %zmm8, %ymm9
8281 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm9[0],ymm0[1,2,3,4,5,6,7],ymm9[8],ymm0[9,10,11,12,13,14,15]
8282 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
8283 ; AVX512-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm25
8284 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm12[0,1],ymm13[2],ymm12[3],ymm13[4],ymm12[5,6],ymm13[7],ymm12[8,9],ymm13[10],ymm12[11],ymm13[12],ymm12[13,14],ymm13[15]
8285 ; AVX512-NEXT: vpermq {{.*#+}} ymm8 = ymm0[2,3,0,1]
8286 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm8[4],ymm0[5,6],ymm8[7]
8287 ; AVX512-NEXT: vpblendw {{.*#+}} ymm8 = ymm15[0],ymm14[1],ymm15[2,3],ymm14[4],ymm15[5],ymm14[6],ymm15[7,8],ymm14[9],ymm15[10,11],ymm14[12],ymm15[13],ymm14[14],ymm15[15]
8288 ; AVX512-NEXT: vextracti128 $1, %ymm8, %xmm9
8289 ; AVX512-NEXT: vpblendw {{.*#+}} xmm11 = xmm9[0,1,2],xmm8[3,4],xmm9[5,6,7]
8290 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm9 = [8,9,2,3,12,13,6,7,4,5,6,7,4,5,14,15,24,25,18,19,28,29,22,23,20,21,22,23,20,21,30,31]
8291 ; AVX512-NEXT: vpshufb %ymm9, %ymm0, %ymm0
8292 ; AVX512-NEXT: vmovdqa {{.*#+}} xmm8 = [8,9,2,3,12,13,6,7,0,1,10,11,0,1,6,7]
8293 ; AVX512-NEXT: vpshufb %xmm8, %xmm11, %xmm11
8294 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm11[0,1,2],ymm0[3,4,5,6,7]
8295 ; AVX512-NEXT: vpblendw {{.*#+}} ymm4 = ymm3[0],ymm4[1,2],ymm3[3],ymm4[4],ymm3[5],ymm4[6,7],ymm3[8],ymm4[9,10],ymm3[11],ymm4[12],ymm3[13],ymm4[14,15]
8296 ; AVX512-NEXT: vextracti128 $1, %ymm4, %xmm5
8297 ; AVX512-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0],ymm4[1],ymm5[2],ymm4[3,4,5,6,7]
8298 ; AVX512-NEXT: vpblendd {{.*#+}} xmm1 = xmm6[0,1],xmm7[2],xmm6[3]
8299 ; AVX512-NEXT: vmovdqa {{.*#+}} ymm3 = [8,9,14,15,4,5,6,7,0,1,10,11,4,5,14,15,24,25,30,31,20,21,22,23,16,17,26,27,20,21,30,31]
8300 ; AVX512-NEXT: vpshufb %ymm3, %ymm4, %ymm4
8301 ; AVX512-NEXT: vpshufb %xmm9, %xmm1, %xmm1
8302 ; AVX512-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm4[3,4,5,6,7]
8303 ; AVX512-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm4[4,5,6,7]
8304 ; AVX512-NEXT: vinserti128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm4 # 16-byte Folded Reload
8305 ; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm4, %zmm1
8306 ; AVX512-NEXT: movb $7, %al
8307 ; AVX512-NEXT: kmovw %eax, %k1
8308 ; AVX512-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm1 {%k1}
8309 ; AVX512-NEXT: vextracti64x4 $1, %zmm1, %ymm4
8310 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0],ymm10[1],ymm2[2],ymm10[3],ymm2[4,5],ymm10[6],ymm2[7,8],ymm10[9],ymm2[10],ymm10[11],ymm2[12,13],ymm10[14],ymm2[15]
8311 ; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm5
8312 ; AVX512-NEXT: vpblendw {{.*#+}} xmm5 = xmm0[0,1],xmm5[2,3],xmm0[4,5,6],xmm5[7]
8313 ; AVX512-NEXT: vmovdqa {{.*#+}} xmm0 = [6,7,2,3,12,13,6,7,0,1,10,11,4,5,14,15]
8314 ; AVX512-NEXT: vpshufb %xmm0, %xmm5, %xmm5
8315 ; AVX512-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8316 ; AVX512-NEXT: vpblendw {{.*#+}} ymm5 = ymm4[0],ymm5[1,2,3,4,5,6,7],ymm4[8],ymm5[9,10,11,12,13,14,15]
8317 ; AVX512-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
8318 ; AVX512-NEXT: vinserti64x4 $1, %ymm4, %zmm1, %zmm1
8319 ; AVX512-NEXT: vmovdqa64 %ymm22, %ymm2
8320 ; AVX512-NEXT: vmovdqa64 %ymm26, %ymm4
8321 ; AVX512-NEXT: vpblendw {{.*#+}} ymm4 = ymm2[0],ymm4[1,2],ymm2[3],ymm4[4],ymm2[5],ymm4[6,7],ymm2[8],ymm4[9,10],ymm2[11],ymm4[12],ymm2[13],ymm4[14,15]
8322 ; AVX512-NEXT: vextracti128 $1, %ymm4, %xmm5
8323 ; AVX512-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0],ymm4[1],ymm5[2],ymm4[3,4,5,6,7]
8324 ; AVX512-NEXT: vpshufb %ymm3, %ymm4, %ymm3
8325 ; AVX512-NEXT: vmovdqa64 %xmm20, %xmm2
8326 ; AVX512-NEXT: vmovdqa64 %xmm17, %xmm4
8327 ; AVX512-NEXT: vpblendd {{.*#+}} xmm4 = xmm4[0,1],xmm2[2],xmm4[3]
8328 ; AVX512-NEXT: vpshufb %xmm9, %xmm4, %xmm4
8329 ; AVX512-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1,2],xmm3[3,4,5,6,7]
8330 ; AVX512-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
8331 ; AVX512-NEXT: vinserti128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm4 # 16-byte Folded Reload
8332 ; AVX512-NEXT: vinserti64x4 $1, %ymm3, %zmm4, %zmm3
8333 ; AVX512-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
8334 ; AVX512-NEXT: vpblendw $107, (%rsp), %ymm2, %ymm4 # 32-byte Folded Reload
8335 ; AVX512-NEXT: # ymm4 = mem[0,1],ymm2[2],mem[3],ymm2[4],mem[5,6],ymm2[7],mem[8,9],ymm2[10],mem[11],ymm2[12],mem[13,14],ymm2[15]
8336 ; AVX512-NEXT: vpermq {{.*#+}} ymm5 = ymm4[2,3,0,1]
8337 ; AVX512-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4],ymm4[5,6],ymm5[7]
8338 ; AVX512-NEXT: vpshufb %ymm9, %ymm4, %ymm4
8339 ; AVX512-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
8340 ; AVX512-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm5 # 32-byte Folded Reload
8341 ; AVX512-NEXT: # ymm5 = ymm2[0],mem[1],ymm2[2,3],mem[4],ymm2[5],mem[6],ymm2[7,8],mem[9],ymm2[10,11],mem[12],ymm2[13],mem[14],ymm2[15]
8342 ; AVX512-NEXT: vextracti128 $1, %ymm5, %xmm6
8343 ; AVX512-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1,2],xmm5[3,4],xmm6[5,6,7]
8344 ; AVX512-NEXT: vpshufb %xmm8, %xmm5, %xmm5
8345 ; AVX512-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3,4,5,6,7]
8346 ; AVX512-NEXT: vinserti64x4 $0, %ymm4, %zmm0, %zmm3 {%k1}
8347 ; AVX512-NEXT: vmovdqa64 %ymm19, %ymm2
8348 ; AVX512-NEXT: vmovdqa64 %ymm31, %ymm4
8349 ; AVX512-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0],ymm2[1],ymm4[2],ymm2[3],ymm4[4,5],ymm2[6],ymm4[7,8],ymm2[9],ymm4[10],ymm2[11],ymm4[12,13],ymm2[14],ymm4[15]
8350 ; AVX512-NEXT: vextracti128 $1, %ymm4, %xmm5
8351 ; AVX512-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1],xmm5[2,3],xmm4[4,5,6],xmm5[7]
8352 ; AVX512-NEXT: vpshufb %xmm0, %xmm4, %xmm0
8353 ; AVX512-NEXT: vextracti64x4 $1, %zmm3, %ymm4
8354 ; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
8355 ; AVX512-NEXT: vpblendw {{.*#+}} ymm0 = ymm4[0],ymm0[1,2,3,4,5,6,7],ymm4[8],ymm0[9,10,11,12,13,14,15]
8356 ; AVX512-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
8357 ; AVX512-NEXT: vinserti64x4 $1, %ymm0, %zmm3, %zmm0
8358 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8359 ; AVX512-NEXT: vmovaps %zmm2, (%rsi)
8360 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8361 ; AVX512-NEXT: vmovaps %zmm2, 64(%rsi)
8362 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8363 ; AVX512-NEXT: vmovaps %zmm2, 64(%rdx)
8364 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8365 ; AVX512-NEXT: vmovaps %zmm2, (%rdx)
8366 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8367 ; AVX512-NEXT: vmovaps %zmm2, 64(%rcx)
8368 ; AVX512-NEXT: vmovdqa64 %zmm23, (%rcx)
8369 ; AVX512-NEXT: vmovdqa64 %zmm25, 64(%r8)
8370 ; AVX512-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
8371 ; AVX512-NEXT: vmovaps %zmm2, (%r8)
8372 ; AVX512-NEXT: vmovdqa64 %zmm1, 64(%r9)
8373 ; AVX512-NEXT: vmovdqa64 %zmm0, (%r9)
8374 ; AVX512-NEXT: addq $552, %rsp # imm = 0x228
8375 ; AVX512-NEXT: vzeroupper
8378 ; AVX512-FCP-LABEL: load_i16_stride5_vf64:
8379 ; AVX512-FCP: # %bb.0:
8380 ; AVX512-FCP-NEXT: subq $552, %rsp # imm = 0x228
8381 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm0 = [4,5,14,15,4,5,6,7,8,9,10,11,12,13,14,15]
8382 ; AVX512-FCP-NEXT: vmovdqa 496(%rdi), %xmm1
8383 ; AVX512-FCP-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8384 ; AVX512-FCP-NEXT: vpshufb %xmm0, %xmm1, %xmm1
8385 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,10,11,8,9,10,11,8,9,10,11,12,13,14,15]
8386 ; AVX512-FCP-NEXT: vmovdqa 480(%rdi), %xmm2
8387 ; AVX512-FCP-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8388 ; AVX512-FCP-NEXT: vpshufb %xmm3, %xmm2, %xmm2
8389 ; AVX512-FCP-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
8390 ; AVX512-FCP-NEXT: vmovdqa 512(%rdi), %ymm10
8391 ; AVX512-FCP-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8392 ; AVX512-FCP-NEXT: vmovdqa 544(%rdi), %ymm11
8393 ; AVX512-FCP-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8394 ; AVX512-FCP-NEXT: vmovdqa 576(%rdi), %ymm7
8395 ; AVX512-FCP-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8396 ; AVX512-FCP-NEXT: vmovdqa 608(%rdi), %ymm8
8397 ; AVX512-FCP-NEXT: vmovdqu %ymm8, (%rsp) # 32-byte Spill
8398 ; AVX512-FCP-NEXT: vmovdqa 352(%rdi), %ymm4
8399 ; AVX512-FCP-NEXT: vmovdqa 320(%rdi), %ymm5
8400 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm5[0],ymm4[1],ymm5[2,3],ymm4[4],ymm5[5],ymm4[6],ymm5[7,8],ymm4[9],ymm5[10,11],ymm4[12],ymm5[13],ymm4[14],ymm5[15]
8401 ; AVX512-FCP-NEXT: vmovdqa64 %ymm5, %ymm28
8402 ; AVX512-FCP-NEXT: vmovdqa64 %ymm4, %ymm30
8403 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm1, %xmm4
8404 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm4 = xmm1[0],xmm4[1,2,3],xmm1[4,5],xmm4[6,7]
8405 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm1 = [0,1,10,11,4,5,14,15,8,9,2,3,12,13,128,128,128,128,128,128,128,128,128,128,128,128,u,u,u,u,u,u]
8406 ; AVX512-FCP-NEXT: vpshufb %ymm1, %ymm4, %ymm4
8407 ; AVX512-FCP-NEXT: vmovdqa 384(%rdi), %ymm6
8408 ; AVX512-FCP-NEXT: vmovdqa 416(%rdi), %ymm9
8409 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm5 = ymm6[0],ymm9[1,2],ymm6[3],ymm9[4],ymm6[5],ymm9[6,7],ymm6[8],ymm9[9,10],ymm6[11],ymm9[12],ymm6[13],ymm9[14,15]
8410 ; AVX512-FCP-NEXT: vmovdqa64 %ymm9, %ymm26
8411 ; AVX512-FCP-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8412 ; AVX512-FCP-NEXT: vmovdqa64 %ymm6, %ymm27
8413 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm6 = [1,3,0,2,4,6,1,3]
8414 ; AVX512-FCP-NEXT: vpermd %ymm5, %ymm6, %ymm5
8415 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm13 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,2,3,16,17,22,23,24,25,30,31,20,21,128,128,128,128,128,128]
8416 ; AVX512-FCP-NEXT: vpshufb %ymm13, %ymm5, %ymm5
8417 ; AVX512-FCP-NEXT: vpor %ymm5, %ymm4, %ymm4
8418 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm5 = ymm8[0,1],ymm7[2],ymm8[3],ymm7[4],ymm8[5,6],ymm7[7],ymm8[8,9],ymm7[10],ymm8[11],ymm7[12],ymm8[13,14],ymm7[15]
8419 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm5, %xmm7
8420 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2],xmm7[3,4],xmm5[5,6,7]
8421 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm7 = ymm11[0],ymm10[1],ymm11[2,3],ymm10[4],ymm11[5],ymm10[6],ymm11[7,8],ymm10[9],ymm11[10,11],ymm10[12],ymm11[13],ymm10[14],ymm11[15]
8422 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm10 = [2,4,7,1,4,6,0,0]
8423 ; AVX512-FCP-NEXT: vpermd %ymm7, %ymm10, %ymm7
8424 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm9 = [u,u,u,u,u,u,u,u,8,9,14,15,0,1,6,7,16,17,22,23,20,21,22,23,24,25,30,31,16,17,22,23]
8425 ; AVX512-FCP-NEXT: vpshufb %ymm9, %ymm7, %ymm7
8426 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm17 = [8,9,3,2,4,5,7,6]
8427 ; AVX512-FCP-NEXT: vpermt2d %ymm2, %ymm17, %ymm7
8428 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm2 = [12,13,14,15,4,5,14,15,8,9,2,3,12,13,6,7]
8429 ; AVX512-FCP-NEXT: vpshufb %xmm2, %xmm5, %xmm5
8430 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
8431 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm5 = ymm7[0,1,2,3,4],ymm5[5,6,7]
8432 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm18 = [0,3,1,3,0,3,5,7]
8433 ; AVX512-FCP-NEXT: vmovdqa 448(%rdi), %ymm7
8434 ; AVX512-FCP-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8435 ; AVX512-FCP-NEXT: vpermd %ymm7, %ymm18, %ymm7
8436 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm14 = [4,5,2,3,4,5,6,7,8,9,2,3,4,5,10,11,20,21,18,19,20,21,22,23,24,25,18,19,20,21,26,27]
8437 ; AVX512-FCP-NEXT: vpshufb %ymm14, %ymm7, %ymm7
8438 ; AVX512-FCP-NEXT: vmovdqa64 {{.*#+}} zmm16 = [65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,0,0,0,0,0,0,0,0,0,0,0,0,0,65535,65535,65535,65535,65535,65535]
8439 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm7 = zmm7 ^ (zmm16 & (zmm7 ^ zmm4))
8440 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm5, %zmm7, %zmm4
8441 ; AVX512-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8442 ; AVX512-FCP-NEXT: vmovdqa 176(%rdi), %xmm4
8443 ; AVX512-FCP-NEXT: vpshufb %xmm0, %xmm4, %xmm0
8444 ; AVX512-FCP-NEXT: vmovdqa64 %xmm4, %xmm31
8445 ; AVX512-FCP-NEXT: vmovdqa 160(%rdi), %xmm15
8446 ; AVX512-FCP-NEXT: vpshufb %xmm3, %xmm15, %xmm3
8447 ; AVX512-FCP-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
8448 ; AVX512-FCP-NEXT: vmovdqa 64(%rdi), %ymm11
8449 ; AVX512-FCP-NEXT: vmovdqa 96(%rdi), %ymm7
8450 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm11[0],ymm7[1,2],ymm11[3],ymm7[4],ymm11[5],ymm7[6,7],ymm11[8],ymm7[9,10],ymm11[11],ymm7[12],ymm11[13],ymm7[14,15]
8451 ; AVX512-FCP-NEXT: vpermd %ymm3, %ymm6, %ymm3
8452 ; AVX512-FCP-NEXT: vpshufb %ymm13, %ymm3, %ymm3
8453 ; AVX512-FCP-NEXT: vmovdqa (%rdi), %ymm12
8454 ; AVX512-FCP-NEXT: vmovdqa 32(%rdi), %ymm6
8455 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm4 = ymm12[0],ymm6[1],ymm12[2,3],ymm6[4],ymm12[5],ymm6[6],ymm12[7,8],ymm6[9],ymm12[10,11],ymm6[12],ymm12[13],ymm6[14],ymm12[15]
8456 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm4, %xmm5
8457 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0],xmm5[1,2,3],xmm4[4,5],xmm5[6,7]
8458 ; AVX512-FCP-NEXT: vpshufb %ymm1, %ymm4, %ymm1
8459 ; AVX512-FCP-NEXT: vpor %ymm3, %ymm1, %ymm1
8460 ; AVX512-FCP-NEXT: vmovdqa 192(%rdi), %ymm8
8461 ; AVX512-FCP-NEXT: vmovdqa 224(%rdi), %ymm13
8462 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm13[0],ymm8[1],ymm13[2,3],ymm8[4],ymm13[5],ymm8[6],ymm13[7,8],ymm8[9],ymm13[10,11],ymm8[12],ymm13[13],ymm8[14],ymm13[15]
8463 ; AVX512-FCP-NEXT: vpermd %ymm3, %ymm10, %ymm3
8464 ; AVX512-FCP-NEXT: vpshufb %ymm9, %ymm3, %ymm3
8465 ; AVX512-FCP-NEXT: vmovdqa 256(%rdi), %ymm5
8466 ; AVX512-FCP-NEXT: vmovdqa 288(%rdi), %ymm9
8467 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm4 = ymm9[0,1],ymm5[2],ymm9[3],ymm5[4],ymm9[5,6],ymm5[7],ymm9[8,9],ymm5[10],ymm9[11],ymm5[12],ymm9[13,14],ymm5[15]
8468 ; AVX512-FCP-NEXT: vmovdqa64 %ymm5, %ymm25
8469 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm4, %xmm10
8470 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1,2],xmm10[3,4],xmm4[5,6,7]
8471 ; AVX512-FCP-NEXT: vpshufb %xmm2, %xmm4, %xmm2
8472 ; AVX512-FCP-NEXT: vpermt2d %ymm0, %ymm17, %ymm3
8473 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
8474 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4],ymm0[5,6,7]
8475 ; AVX512-FCP-NEXT: vmovdqa64 128(%rdi), %ymm23
8476 ; AVX512-FCP-NEXT: vpermd %ymm23, %ymm18, %ymm2
8477 ; AVX512-FCP-NEXT: vpshufb %ymm14, %ymm2, %ymm2
8478 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm2 = zmm2 ^ (zmm16 & (zmm2 ^ zmm1))
8479 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0
8480 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8481 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm6[0],ymm12[1],ymm6[2],ymm12[3],ymm6[4,5],ymm12[6],ymm6[7,8],ymm12[9],ymm6[10],ymm12[11],ymm6[12,13],ymm12[14],ymm6[15]
8482 ; AVX512-FCP-NEXT: vmovdqa64 %ymm12, %ymm21
8483 ; AVX512-FCP-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8484 ; AVX512-FCP-NEXT: vmovdqa64 %ymm6, %ymm29
8485 ; AVX512-FCP-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8486 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm0, %xmm1
8487 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5,6],xmm1[7]
8488 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm17 = [2,0,0,0,4,7,1,6]
8489 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm7[0],ymm11[1],ymm7[2,3],ymm11[4],ymm7[5],ymm11[6],ymm7[7,8],ymm11[9],ymm7[10,11],ymm11[12],ymm7[13],ymm11[14],ymm7[15]
8490 ; AVX512-FCP-NEXT: vmovdqa64 %ymm11, %ymm19
8491 ; AVX512-FCP-NEXT: vmovdqa64 %ymm7, %ymm22
8492 ; AVX512-FCP-NEXT: vpermd %ymm1, %ymm17, %ymm1
8493 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm6 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,0,1,18,19,20,21,26,27,16,17,30,31,128,128,128,128,128,128]
8494 ; AVX512-FCP-NEXT: vpshufb %ymm6, %ymm1, %ymm1
8495 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm2 = [2,3,12,13,6,7,0,1,10,11,4,5,14,15,128,128,128,128,128,128,128,128,128,128,128,128,u,u,u,u,u,u]
8496 ; AVX512-FCP-NEXT: vpshufb %ymm2, %ymm0, %ymm0
8497 ; AVX512-FCP-NEXT: vpor %ymm1, %ymm0, %ymm10
8498 ; AVX512-FCP-NEXT: vpsrlq $48, %xmm31, %xmm0
8499 ; AVX512-FCP-NEXT: vpshufb %xmm2, %xmm15, %xmm1
8500 ; AVX512-FCP-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
8501 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm20 = [0,2,5,7,4,7,0,0]
8502 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm8[0],ymm13[1],ymm8[2,3],ymm13[4],ymm8[5],ymm13[6],ymm8[7,8],ymm13[9],ymm8[10,11],ymm13[12],ymm8[13],ymm13[14],ymm8[15]
8503 ; AVX512-FCP-NEXT: vmovdqa %ymm8, %ymm5
8504 ; AVX512-FCP-NEXT: vpermd %ymm0, %ymm20, %ymm4
8505 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm3 = [2,3,4,5,4,5,0,1,6,7,8,9,14,15,4,5,18,19,20,21,20,21,16,17,22,23,24,25,30,31,20,21]
8506 ; AVX512-FCP-NEXT: vpshufb %ymm3, %ymm4, %ymm4
8507 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm4[3,4,5,6,7]
8508 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm4 = ymm1[0,1,2,3],ymm4[4,5,6,7]
8509 ; AVX512-FCP-NEXT: vbroadcasti32x4 {{.*#+}} ymm24 = [1,4,6,3,1,4,6,3]
8510 ; AVX512-FCP-NEXT: # ymm24 = mem[0,1,2,3,0,1,2,3]
8511 ; AVX512-FCP-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8512 ; AVX512-FCP-NEXT: vmovdqa64 %ymm25, %ymm12
8513 ; AVX512-FCP-NEXT: vmovdqu64 %ymm25, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8514 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm12[0],ymm9[1,2],ymm12[3],ymm9[4],ymm12[5],ymm9[6,7],ymm12[8],ymm9[9,10],ymm12[11],ymm9[12],ymm12[13],ymm9[14,15]
8515 ; AVX512-FCP-NEXT: vpermd %ymm1, %ymm24, %ymm0
8516 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm1 = [0,1,2,3,2,3,4,5,10,11,0,1,14,15,8,9,16,17,18,19,18,19,20,21,26,27,16,17,30,31,24,25]
8517 ; AVX512-FCP-NEXT: vpshufb %ymm1, %ymm0, %ymm0
8518 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3,4],ymm0[5,6,7]
8519 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm25 = [1,3,2,3,1,3,6,7]
8520 ; AVX512-FCP-NEXT: vpermd %ymm23, %ymm25, %ymm4
8521 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm14 = [0,1,6,7,4,5,6,7,8,9,0,1,6,7,8,9,16,17,22,23,20,21,22,23,24,25,16,17,22,23,24,25]
8522 ; AVX512-FCP-NEXT: vpshufb %ymm14, %ymm4, %ymm4
8523 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm4 = zmm4 ^ (zmm16 & (zmm4 ^ zmm10))
8524 ; AVX512-FCP-NEXT: vmovdqa64 %zmm16, %zmm10
8525 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm4, %zmm0
8526 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8527 ; AVX512-FCP-NEXT: vmovdqa64 %ymm28, %ymm18
8528 ; AVX512-FCP-NEXT: vmovdqa64 %ymm30, %ymm28
8529 ; AVX512-FCP-NEXT: vmovdqa64 %ymm18, %ymm0
8530 ; AVX512-FCP-NEXT: vmovdqa64 %ymm30, %ymm4
8531 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm4[0],ymm0[1],ymm4[2],ymm0[3],ymm4[4,5],ymm0[6],ymm4[7,8],ymm0[9],ymm4[10],ymm0[11],ymm4[12,13],ymm0[14],ymm4[15]
8532 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm0, %xmm4
8533 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm4[2,3],xmm0[4,5,6],xmm4[7]
8534 ; AVX512-FCP-NEXT: vmovdqa64 %ymm27, %ymm7
8535 ; AVX512-FCP-NEXT: vmovdqa64 %ymm26, %ymm4
8536 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0],ymm7[1],ymm4[2,3],ymm7[4],ymm4[5],ymm7[6],ymm4[7,8],ymm7[9],ymm4[10,11],ymm7[12],ymm4[13],ymm7[14],ymm4[15]
8537 ; AVX512-FCP-NEXT: vpermd %ymm4, %ymm17, %ymm4
8538 ; AVX512-FCP-NEXT: vpshufb %ymm6, %ymm4, %ymm4
8539 ; AVX512-FCP-NEXT: vpshufb %ymm2, %ymm0, %ymm0
8540 ; AVX512-FCP-NEXT: vpor %ymm4, %ymm0, %ymm0
8541 ; AVX512-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
8542 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm16 # 32-byte Reload
8543 ; AVX512-FCP-NEXT: vmovdqa64 %ymm16, %ymm4
8544 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm4 = ymm11[0],ymm4[1],ymm11[2,3],ymm4[4],ymm11[5],ymm4[6],ymm11[7,8],ymm4[9],ymm11[10,11],ymm4[12],ymm11[13],ymm4[14],ymm11[15]
8545 ; AVX512-FCP-NEXT: vpermd %ymm4, %ymm20, %ymm4
8546 ; AVX512-FCP-NEXT: vpshufb %ymm3, %ymm4, %ymm3
8547 ; AVX512-FCP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
8548 ; AVX512-FCP-NEXT: vpshufb %xmm2, %xmm6, %xmm2
8549 ; AVX512-FCP-NEXT: vmovdqa64 {{[-0-9]+}}(%r{{[sb]}}p), %xmm27 # 16-byte Reload
8550 ; AVX512-FCP-NEXT: vpsrlq $48, %xmm27, %xmm4
8551 ; AVX512-FCP-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1]
8552 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm3[3,4,5,6,7]
8553 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
8554 ; AVX512-FCP-NEXT: vmovdqu (%rsp), %ymm8 # 32-byte Reload
8555 ; AVX512-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
8556 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0],ymm8[1,2],ymm4[3],ymm8[4],ymm4[5],ymm8[6,7],ymm4[8],ymm8[9,10],ymm4[11],ymm8[12],ymm4[13],ymm8[14,15]
8557 ; AVX512-FCP-NEXT: vpermd %ymm3, %ymm24, %ymm3
8558 ; AVX512-FCP-NEXT: vpshufb %ymm1, %ymm3, %ymm1
8559 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4],ymm1[5,6,7]
8560 ; AVX512-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm17 # 32-byte Reload
8561 ; AVX512-FCP-NEXT: vpermd %ymm17, %ymm25, %ymm2
8562 ; AVX512-FCP-NEXT: vpshufb %ymm14, %ymm2, %ymm2
8563 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm2 = zmm2 ^ (zmm10 & (zmm2 ^ zmm0))
8564 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm0
8565 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8566 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm9[0],ymm12[1],ymm9[2,3],ymm12[4],ymm9[5],ymm12[6],ymm9[7,8],ymm12[9],ymm9[10,11],ymm12[12],ymm9[13],ymm12[14],ymm9[15]
8567 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm0, %xmm1
8568 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm0[3,4],xmm1[5,6,7]
8569 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm0 = xmm15[u,u,u,u,u,u,u,u,4,5,14,15,u,u,u,u]
8570 ; AVX512-FCP-NEXT: vmovdqa64 %xmm15, %xmm20
8571 ; AVX512-FCP-NEXT: vpunpckhdq {{.*#+}} xmm0 = xmm0[2],xmm31[2],xmm0[3],xmm31[3]
8572 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm24 = [0,3,5,2,5,7,0,0]
8573 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm13[0],ymm5[1],ymm13[2],ymm5[3],ymm13[4,5],ymm5[6],ymm13[7,8],ymm5[9],ymm13[10],ymm5[11],ymm13[12,13],ymm5[14],ymm13[15]
8574 ; AVX512-FCP-NEXT: vmovdqa64 %ymm5, %ymm30
8575 ; AVX512-FCP-NEXT: vmovdqa64 %ymm13, %ymm26
8576 ; AVX512-FCP-NEXT: vpermd %ymm2, %ymm24, %ymm3
8577 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm12 = [0,1,6,7,2,3,2,3,4,5,10,11,0,1,14,15,16,17,22,23,18,19,18,19,20,21,26,27,16,17,30,31]
8578 ; AVX512-FCP-NEXT: vpshufb %ymm12, %ymm3, %ymm3
8579 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm3[3,4,5,6,7]
8580 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm3 = ymm0[0,1,2,3],ymm3[4,5,6,7]
8581 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,2,3,8,9,2,3,12,13,6,7,0,1,10,11]
8582 ; AVX512-FCP-NEXT: vpshufb %xmm2, %xmm1, %xmm1
8583 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
8584 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm10 = ymm3[0,1,2,3,4],ymm1[5,6,7]
8585 ; AVX512-FCP-NEXT: vmovdqa64 %ymm21, %ymm0
8586 ; AVX512-FCP-NEXT: vmovdqa64 %ymm29, %ymm1
8587 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6],ymm0[7],ymm1[8,9],ymm0[10],ymm1[11],ymm0[12],ymm1[13,14],ymm0[15]
8588 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm1, %xmm3
8589 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm5 = xmm1[0,1,2],xmm3[3,4],xmm1[5,6,7]
8590 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm25 = [0,2,0,0,5,7,2,4]
8591 ; AVX512-FCP-NEXT: vmovdqa64 %ymm19, %ymm0
8592 ; AVX512-FCP-NEXT: vmovdqa64 %ymm22, %ymm1
8593 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5],ymm1[6],ymm0[7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13],ymm1[14],ymm0[15]
8594 ; AVX512-FCP-NEXT: vmovdqa64 %ymm22, %ymm29
8595 ; AVX512-FCP-NEXT: vmovdqa64 %ymm19, %ymm21
8596 ; AVX512-FCP-NEXT: vpermd %ymm1, %ymm25, %ymm3
8597 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm1 = [0,1,6,7,8,9,14,15,4,5,6,7,0,1,6,7,16,17,22,23,24,25,30,31,20,21,22,23,16,17,22,23]
8598 ; AVX512-FCP-NEXT: vpshufb %ymm1, %ymm3, %ymm0
8599 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm3 = [4,5,14,15,8,9,2,3,12,13,6,7,12,13,14,15]
8600 ; AVX512-FCP-NEXT: vpshufb %xmm3, %xmm5, %xmm5
8601 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm5[0,1,2],ymm0[3,4,5,6,7]
8602 ; AVX512-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [1,4,6,0,1,4,6,0]
8603 ; AVX512-FCP-NEXT: # ymm5 = mem[0,1,0,1]
8604 ; AVX512-FCP-NEXT: vpermd %ymm23, %ymm5, %ymm14
8605 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm9 = [4,5,2,3,4,5,6,7,8,9,2,3,4,5,10,11,20,21,18,19,20,21,22,23,24,25,18,19,20,21,26,27]
8606 ; AVX512-FCP-NEXT: vpshufb %ymm9, %ymm14, %ymm14
8607 ; AVX512-FCP-NEXT: vmovdqa64 {{.*#+}} zmm15 = [65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,0,0,0,0,0,0,0,0,0,0,0,0,0,65535,65535,65535,65535,65535,65535]
8608 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm14 = zmm14 ^ (zmm15 & (zmm14 ^ zmm0))
8609 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm10, %zmm14, %zmm0
8610 ; AVX512-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8611 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm8[0],ymm4[1],ymm8[2,3],ymm4[4],ymm8[5],ymm4[6],ymm8[7,8],ymm4[9],ymm8[10,11],ymm4[12],ymm8[13],ymm4[14],ymm8[15]
8612 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm0, %xmm10
8613 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm10[0,1,2],xmm0[3,4],xmm10[5,6,7]
8614 ; AVX512-FCP-NEXT: vpshufb %xmm2, %xmm0, %xmm0
8615 ; AVX512-FCP-NEXT: vmovdqa64 %ymm16, %ymm8
8616 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm8[0],ymm11[1],ymm8[2],ymm11[3],ymm8[4,5],ymm11[6],ymm8[7,8],ymm11[9],ymm8[10],ymm11[11],ymm8[12,13],ymm11[14],ymm8[15]
8617 ; AVX512-FCP-NEXT: vmovdqa %ymm11, %ymm13
8618 ; AVX512-FCP-NEXT: vpermd %ymm2, %ymm24, %ymm2
8619 ; AVX512-FCP-NEXT: vpshufb %ymm12, %ymm2, %ymm2
8620 ; AVX512-FCP-NEXT: vpshufb {{.*#+}} xmm4 = xmm6[u,u,u,u,u,u,u,u,4,5,14,15,u,u,u,u]
8621 ; AVX512-FCP-NEXT: vmovdqa %xmm6, %xmm14
8622 ; AVX512-FCP-NEXT: vpunpckhdq {{.*#+}} xmm4 = xmm4[2],xmm27[2],xmm4[3],xmm27[3]
8623 ; AVX512-FCP-NEXT: vmovdqa64 %xmm27, %xmm6
8624 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1,2],xmm2[3,4,5,6,7]
8625 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2,3],ymm2[4,5,6,7]
8626 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
8627 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4],ymm0[5,6,7]
8628 ; AVX512-FCP-NEXT: vmovdqa64 %ymm18, %ymm11
8629 ; AVX512-FCP-NEXT: vmovdqa64 %ymm28, %ymm12
8630 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm12[0,1],ymm11[2],ymm12[3],ymm11[4],ymm12[5,6],ymm11[7],ymm12[8,9],ymm11[10],ymm12[11],ymm11[12],ymm12[13,14],ymm11[15]
8631 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm2, %xmm4
8632 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm4[3,4],xmm2[5,6,7]
8633 ; AVX512-FCP-NEXT: vpshufb %xmm3, %xmm2, %xmm2
8634 ; AVX512-FCP-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm3 # 32-byte Folded Reload
8635 ; AVX512-FCP-NEXT: # ymm3 = ymm7[0],mem[1],ymm7[2,3],mem[4],ymm7[5],mem[6],ymm7[7,8],mem[9],ymm7[10,11],mem[12],ymm7[13],mem[14],ymm7[15]
8636 ; AVX512-FCP-NEXT: vmovdqa64 %ymm7, %ymm16
8637 ; AVX512-FCP-NEXT: vpermd %ymm3, %ymm25, %ymm3
8638 ; AVX512-FCP-NEXT: vpshufb %ymm1, %ymm3, %ymm1
8639 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3,4,5,6,7]
8640 ; AVX512-FCP-NEXT: vpermd %ymm17, %ymm5, %ymm2
8641 ; AVX512-FCP-NEXT: vpshufb %ymm9, %ymm2, %ymm2
8642 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm2 = zmm2 ^ (zmm15 & (zmm2 ^ zmm1))
8643 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm28
8644 ; AVX512-FCP-NEXT: vmovdqa64 %xmm31, %xmm15
8645 ; AVX512-FCP-NEXT: vmovdqa64 %xmm20, %xmm7
8646 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} xmm1 = xmm15[0],xmm7[1],xmm15[2,3]
8647 ; AVX512-FCP-NEXT: vmovq {{.*#+}} xmm0 = [6,7,0,1,10,11,0,0,0,0,0,0,0,0,0,0]
8648 ; AVX512-FCP-NEXT: vpshufb %xmm0, %xmm1, %xmm2
8649 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm18 = [1,3,6,0,5,0,0,0]
8650 ; AVX512-FCP-NEXT: vmovdqa64 %ymm30, %ymm9
8651 ; AVX512-FCP-NEXT: vmovdqa64 %ymm26, %ymm10
8652 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm10[0,1],ymm9[2],ymm10[3],ymm9[4],ymm10[5,6],ymm9[7],ymm10[8,9],ymm9[10],ymm10[11],ymm9[12],ymm10[13,14],ymm9[15]
8653 ; AVX512-FCP-NEXT: vpermd %ymm1, %ymm18, %ymm4
8654 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm1 = [2,3,2,3,4,5,0,1,6,7,8,9,14,15,4,5,18,19,18,19,20,21,16,17,22,23,24,25,30,31,20,21]
8655 ; AVX512-FCP-NEXT: vpshufb %ymm1, %ymm4, %ymm4
8656 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm4[3,4,5,6,7]
8657 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
8658 ; AVX512-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [2,4,7,0,2,4,7,0]
8659 ; AVX512-FCP-NEXT: # ymm4 = mem[0,1,0,1]
8660 ; AVX512-FCP-NEXT: vpermd %ymm23, %ymm4, %ymm5
8661 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,6,7,4,5,6,7,8,9,0,1,6,7,8,9,16,17,22,23,20,21,22,23,24,25,16,17,22,23,24,25]
8662 ; AVX512-FCP-NEXT: vpshufb %ymm3, %ymm5, %ymm5
8663 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm2, %zmm5, %zmm25
8664 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} xmm2 = xmm6[0],xmm14[1],xmm6[2,3]
8665 ; AVX512-FCP-NEXT: vmovdqa64 %xmm27, %xmm22
8666 ; AVX512-FCP-NEXT: vmovdqa64 %xmm14, %xmm31
8667 ; AVX512-FCP-NEXT: vpshufb %xmm0, %xmm2, %xmm0
8668 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm8[0,1],ymm13[2],ymm8[3],ymm13[4],ymm8[5,6],ymm13[7],ymm8[8,9],ymm13[10],ymm8[11],ymm13[12],ymm8[13,14],ymm13[15]
8669 ; AVX512-FCP-NEXT: vmovdqa64 %ymm13, %ymm20
8670 ; AVX512-FCP-NEXT: vmovdqa64 %ymm8, %ymm30
8671 ; AVX512-FCP-NEXT: vpermd %ymm2, %ymm18, %ymm2
8672 ; AVX512-FCP-NEXT: vpshufb %ymm1, %ymm2, %ymm1
8673 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm1[3,4,5,6,7]
8674 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
8675 ; AVX512-FCP-NEXT: vpermd %ymm17, %ymm4, %ymm1
8676 ; AVX512-FCP-NEXT: vpshufb %ymm3, %ymm1, %ymm1
8677 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm24
8678 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} xmm0 = xmm15[0,1],xmm7[2],xmm15[3]
8679 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm9[0],ymm10[1,2],ymm9[3],ymm10[4],ymm9[5],ymm10[6,7],ymm9[8],ymm10[9,10],ymm9[11],ymm10[12],ymm9[13],ymm10[14,15]
8680 ; AVX512-FCP-NEXT: vmovq {{.*#+}} xmm2 = [8,9,2,3,12,13,0,0,0,0,0,0,0,0,0,0]
8681 ; AVX512-FCP-NEXT: vpshufb %xmm2, %xmm0, %xmm3
8682 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm27 = [1,4,6,3,6,0,0,0]
8683 ; AVX512-FCP-NEXT: vpermd %ymm1, %ymm27, %ymm1
8684 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm0 = [0,1,2,3,4,5,2,3,4,5,10,11,0,1,14,15,16,17,18,19,20,21,18,19,20,21,26,27,16,17,30,31]
8685 ; AVX512-FCP-NEXT: vpshufb %ymm0, %ymm1, %ymm1
8686 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2],xmm1[3,4,5,6,7]
8687 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3],ymm1[4,5,6,7]
8688 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm26 = [0,2,1,3,0,2,5,7]
8689 ; AVX512-FCP-NEXT: vpermd %ymm23, %ymm26, %ymm3
8690 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,6,7,4,5,6,7,0,1,6,7,8,9,14,15,16,17,22,23,20,21,22,23,16,17,22,23,24,25,30,31]
8691 ; AVX512-FCP-NEXT: vpshufb %ymm4, %ymm3, %ymm3
8692 ; AVX512-FCP-NEXT: vmovdqa64 %ymm4, %ymm19
8693 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm1, %zmm3, %zmm1
8694 ; AVX512-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
8695 ; AVX512-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
8696 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0],ymm14[1,2],ymm4[3],ymm14[4],ymm4[5],ymm14[6,7],ymm4[8],ymm14[9,10],ymm4[11],ymm14[12],ymm4[13],ymm14[14,15]
8697 ; AVX512-FCP-NEXT: vmovdqa64 %ymm4, %ymm18
8698 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm3, %xmm4
8699 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} xmm3 = xmm4[0],xmm3[1],xmm4[2],xmm3[3]
8700 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm4 = [0,3,0,0,5,0,2,7]
8701 ; AVX512-FCP-NEXT: vmovdqa64 %ymm21, %ymm15
8702 ; AVX512-FCP-NEXT: vmovdqa64 %ymm29, %ymm10
8703 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm5 = ymm10[0],ymm15[1],ymm10[2],ymm15[3],ymm10[4,5],ymm15[6],ymm10[7,8],ymm15[9],ymm10[10],ymm15[11],ymm10[12,13],ymm15[14],ymm10[15]
8704 ; AVX512-FCP-NEXT: vpermd %ymm5, %ymm4, %ymm5
8705 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm6 = [2,3,4,5,10,11,0,1,14,15,14,15,2,3,4,5,18,19,20,21,26,27,16,17,30,31,30,31,18,19,20,21]
8706 ; AVX512-FCP-NEXT: vpshufb %ymm6, %ymm5, %ymm5
8707 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm7 = [6,7,0,1,10,11,4,5,14,15,8,9,12,13,14,15]
8708 ; AVX512-FCP-NEXT: vpshufb %xmm7, %xmm3, %xmm3
8709 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2],ymm5[3,4,5,6,7]
8710 ; AVX512-FCP-NEXT: vpmovsxdq {{.*#+}} zmm5 = [18446744073709551615,18446744073709551615,18446744073709551615,65535,0,0,18446744073709486080,18446744073709551615]
8711 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm25 = zmm25 ^ (zmm5 & (zmm25 ^ zmm3))
8712 ; AVX512-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
8713 ; AVX512-FCP-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
8714 ; AVX512-FCP-NEXT: # ymm3 = ymm3[0],mem[1],ymm3[2,3],mem[4],ymm3[5],mem[6],ymm3[7,8],mem[9],ymm3[10,11],mem[12],ymm3[13],mem[14],ymm3[15]
8715 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm3, %xmm8
8716 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm8 = xmm3[0],xmm8[1,2,3],xmm3[4,5],xmm8[6,7]
8717 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,0,1,10,11,4,5,14,15,8,9,2,3,12,13]
8718 ; AVX512-FCP-NEXT: vpshufb %xmm3, %xmm8, %xmm8
8719 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm8, %ymm0, %ymm8
8720 ; AVX512-FCP-NEXT: vextracti64x4 $1, %zmm25, %ymm9
8721 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0],ymm8[1,2,3,4,5,6,7],ymm9[8],ymm8[9,10,11,12,13,14,15]
8722 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2,3],ymm8[4,5,6,7]
8723 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm8, %zmm25, %zmm21
8724 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm11[0],ymm12[1,2],ymm11[3],ymm12[4],ymm11[5],ymm12[6,7],ymm11[8],ymm12[9,10],ymm11[11],ymm12[12],ymm11[13],ymm12[14,15]
8725 ; AVX512-FCP-NEXT: vmovdqa64 %ymm12, %ymm23
8726 ; AVX512-FCP-NEXT: vmovdqa64 %ymm11, %ymm25
8727 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm8, %xmm9
8728 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0],xmm8[1],xmm9[2],xmm8[3]
8729 ; AVX512-FCP-NEXT: vpshufb %xmm7, %xmm8, %xmm7
8730 ; AVX512-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
8731 ; AVX512-FCP-NEXT: vmovdqa64 %ymm16, %ymm12
8732 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm13[0],ymm12[1],ymm13[2],ymm12[3],ymm13[4,5],ymm12[6],ymm13[7,8],ymm12[9],ymm13[10],ymm12[11],ymm13[12,13],ymm12[14],ymm13[15]
8733 ; AVX512-FCP-NEXT: vpermd %ymm8, %ymm4, %ymm4
8734 ; AVX512-FCP-NEXT: vpshufb %ymm6, %ymm4, %ymm4
8735 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm4 = ymm7[0,1,2],ymm4[3,4,5,6,7]
8736 ; AVX512-FCP-NEXT: vpternlogq {{.*#+}} zmm24 = zmm24 ^ (zmm5 & (zmm24 ^ zmm4))
8737 ; AVX512-FCP-NEXT: vmovdqa64 %xmm22, %xmm4
8738 ; AVX512-FCP-NEXT: vmovdqa64 %xmm31, %xmm5
8739 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} xmm4 = xmm4[0,1],xmm5[2],xmm4[3]
8740 ; AVX512-FCP-NEXT: vpshufb %xmm2, %xmm4, %xmm2
8741 ; AVX512-FCP-NEXT: vmovdqa64 %ymm20, %ymm4
8742 ; AVX512-FCP-NEXT: vmovdqa64 %ymm30, %ymm5
8743 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0],ymm5[1,2],ymm4[3],ymm5[4],ymm4[5],ymm5[6,7],ymm4[8],ymm5[9,10],ymm4[11],ymm5[12],ymm4[13],ymm5[14,15]
8744 ; AVX512-FCP-NEXT: vpermd %ymm4, %ymm27, %ymm4
8745 ; AVX512-FCP-NEXT: vpshufb %ymm0, %ymm4, %ymm0
8746 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm0[3,4,5,6,7]
8747 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
8748 ; AVX512-FCP-NEXT: vpermd %ymm17, %ymm26, %ymm2
8749 ; AVX512-FCP-NEXT: vmovdqa64 %ymm19, %ymm4
8750 ; AVX512-FCP-NEXT: vpshufb %ymm4, %ymm2, %ymm2
8751 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0
8752 ; AVX512-FCP-NEXT: vmovdqu (%rsp), %ymm9 # 32-byte Reload
8753 ; AVX512-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
8754 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm11[0],ymm9[1],ymm11[2,3],ymm9[4],ymm11[5],ymm9[6],ymm11[7,8],ymm9[9],ymm11[10,11],ymm9[12],ymm11[13],ymm9[14],ymm11[15]
8755 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm2, %xmm4
8756 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm4[1,2,3],xmm2[4,5],xmm4[6,7]
8757 ; AVX512-FCP-NEXT: vpshufb %xmm3, %xmm2, %xmm2
8758 ; AVX512-FCP-NEXT: vextracti64x4 $1, %zmm24, %ymm3
8759 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
8760 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm2[1,2,3,4,5,6,7],ymm3[8],ymm2[9,10,11,12,13,14,15]
8761 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7]
8762 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm2, %zmm24, %zmm2
8763 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm10[0,1],ymm15[2],ymm10[3],ymm15[4],ymm10[5,6],ymm15[7],ymm10[8,9],ymm15[10],ymm10[11],ymm15[12],ymm10[13,14],ymm15[15]
8764 ; AVX512-FCP-NEXT: vpmovsxbd {{.*#+}} ymm4 = [1,3,0,0,6,0,3,5]
8765 ; AVX512-FCP-NEXT: vpermd %ymm3, %ymm4, %ymm3
8766 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} ymm5 = [0,1,6,7,8,9,14,15,0,1,6,7,0,1,6,7,16,17,22,23,24,25,30,31,16,17,22,23,16,17,22,23]
8767 ; AVX512-FCP-NEXT: vpshufb %ymm5, %ymm3, %ymm3
8768 ; AVX512-FCP-NEXT: vmovdqa64 %ymm18, %ymm6
8769 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm6 = ymm14[0],ymm6[1],ymm14[2,3],ymm6[4],ymm14[5],ymm6[6],ymm14[7,8],ymm6[9],ymm14[10,11],ymm6[12],ymm14[13],ymm6[14],ymm14[15]
8770 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm6, %xmm7
8771 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm6 = xmm7[0,1,2],xmm6[3,4],xmm7[5,6,7]
8772 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm7 = [8,9,2,3,12,13,6,7,0,1,10,11,0,1,6,7]
8773 ; AVX512-FCP-NEXT: vpshufb %xmm7, %xmm6, %xmm6
8774 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm3 = ymm6[0,1,2],ymm3[3,4,5,6,7]
8775 ; AVX512-FCP-NEXT: movb $7, %al
8776 ; AVX512-FCP-NEXT: kmovw %eax, %k1
8777 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm1 {%k1}
8778 ; AVX512-FCP-NEXT: vextracti64x4 $1, %zmm1, %ymm3
8779 ; AVX512-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
8780 ; AVX512-FCP-NEXT: vpblendw $181, {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
8781 ; AVX512-FCP-NEXT: # ymm6 = mem[0],ymm6[1],mem[2],ymm6[3],mem[4,5],ymm6[6],mem[7,8],ymm6[9],mem[10],ymm6[11],mem[12,13],ymm6[14],mem[15]
8782 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm6, %xmm8
8783 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1],xmm8[2,3],xmm6[4,5,6],xmm8[7]
8784 ; AVX512-FCP-NEXT: vmovdqa {{.*#+}} xmm8 = [6,7,2,3,12,13,6,7,0,1,10,11,4,5,14,15]
8785 ; AVX512-FCP-NEXT: vpshufb %xmm8, %xmm6, %xmm6
8786 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
8787 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm6 = ymm3[0],ymm6[1,2,3,4,5,6,7],ymm3[8],ymm6[9,10,11,12,13,14,15]
8788 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm6[4,5,6,7]
8789 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm3, %zmm1, %zmm1
8790 ; AVX512-FCP-NEXT: vmovdqa64 %ymm23, %ymm3
8791 ; AVX512-FCP-NEXT: vmovdqa64 %ymm25, %ymm6
8792 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0],ymm6[1],ymm3[2,3],ymm6[4],ymm3[5],ymm6[6],ymm3[7,8],ymm6[9],ymm3[10,11],ymm6[12],ymm3[13],ymm6[14],ymm3[15]
8793 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm3, %xmm6
8794 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm3 = xmm6[0,1,2],xmm3[3,4],xmm6[5,6,7]
8795 ; AVX512-FCP-NEXT: vpshufb %xmm7, %xmm3, %xmm3
8796 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm6 = ymm13[0,1],ymm12[2],ymm13[3],ymm12[4],ymm13[5,6],ymm12[7],ymm13[8,9],ymm12[10],ymm13[11],ymm12[12],ymm13[13,14],ymm12[15]
8797 ; AVX512-FCP-NEXT: vpermd %ymm6, %ymm4, %ymm4
8798 ; AVX512-FCP-NEXT: vpshufb %ymm5, %ymm4, %ymm4
8799 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2],ymm4[3,4,5,6,7]
8800 ; AVX512-FCP-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm0 {%k1}
8801 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm9[0],ymm11[1],ymm9[2],ymm11[3],ymm9[4,5],ymm11[6],ymm9[7,8],ymm11[9],ymm9[10],ymm11[11],ymm9[12,13],ymm11[14],ymm9[15]
8802 ; AVX512-FCP-NEXT: vextracti128 $1, %ymm3, %xmm4
8803 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm4[2,3],xmm3[4,5,6],xmm4[7]
8804 ; AVX512-FCP-NEXT: vpshufb %xmm8, %xmm3, %xmm3
8805 ; AVX512-FCP-NEXT: vextracti64x4 $1, %zmm0, %ymm4
8806 ; AVX512-FCP-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
8807 ; AVX512-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0],ymm3[1,2,3,4,5,6,7],ymm4[8],ymm3[9,10,11,12,13,14,15]
8808 ; AVX512-FCP-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
8809 ; AVX512-FCP-NEXT: vinserti64x4 $1, %ymm3, %zmm0, %zmm0
8810 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8811 ; AVX512-FCP-NEXT: vmovaps %zmm3, (%rsi)
8812 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8813 ; AVX512-FCP-NEXT: vmovaps %zmm3, 64(%rsi)
8814 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8815 ; AVX512-FCP-NEXT: vmovaps %zmm3, 64(%rdx)
8816 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8817 ; AVX512-FCP-NEXT: vmovaps %zmm3, (%rdx)
8818 ; AVX512-FCP-NEXT: vmovdqa64 %zmm28, 64(%rcx)
8819 ; AVX512-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
8820 ; AVX512-FCP-NEXT: vmovaps %zmm3, (%rcx)
8821 ; AVX512-FCP-NEXT: vmovdqa64 %zmm2, 64(%r8)
8822 ; AVX512-FCP-NEXT: vmovdqa64 %zmm21, (%r8)
8823 ; AVX512-FCP-NEXT: vmovdqa64 %zmm0, 64(%r9)
8824 ; AVX512-FCP-NEXT: vmovdqa64 %zmm1, (%r9)
8825 ; AVX512-FCP-NEXT: addq $552, %rsp # imm = 0x228
8826 ; AVX512-FCP-NEXT: vzeroupper
8827 ; AVX512-FCP-NEXT: retq
8829 ; AVX512DQ-LABEL: load_i16_stride5_vf64:
8830 ; AVX512DQ: # %bb.0:
8831 ; AVX512DQ-NEXT: subq $552, %rsp # imm = 0x228
8832 ; AVX512DQ-NEXT: vmovdqa 384(%rdi), %ymm6
8833 ; AVX512DQ-NEXT: vmovdqa 416(%rdi), %ymm11
8834 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm6[0],ymm11[1,2],ymm6[3],ymm11[4],ymm6[5],ymm11[6,7],ymm6[8],ymm11[9,10],ymm6[11],ymm11[12],ymm6[13],ymm11[14,15]
8835 ; AVX512DQ-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8836 ; AVX512DQ-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8837 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
8838 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3,4],ymm1[5],ymm0[6],ymm1[7]
8839 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm0 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,6,7,16,17,26,27,20,21,30,31,24,25,128,128,128,128,128,128]
8840 ; AVX512DQ-NEXT: vpshufb %ymm0, %ymm1, %ymm2
8841 ; AVX512DQ-NEXT: vmovdqa 352(%rdi), %ymm8
8842 ; AVX512DQ-NEXT: vmovdqa 320(%rdi), %ymm7
8843 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm1 = ymm7[0],ymm8[1],ymm7[2,3],ymm8[4],ymm7[5],ymm8[6],ymm7[7,8],ymm8[9],ymm7[10,11],ymm8[12],ymm7[13],ymm8[14],ymm7[15]
8844 ; AVX512DQ-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8845 ; AVX512DQ-NEXT: vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8846 ; AVX512DQ-NEXT: vextracti128 $1, %ymm1, %xmm3
8847 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm3 = xmm1[0],xmm3[1,2,3],xmm1[4,5],xmm3[6,7]
8848 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm1 = [0,1,10,11,4,5,14,15,8,9,2,3,12,13,128,128,128,128,128,128,128,128,128,128,128,128,u,u,u,u,u,u]
8849 ; AVX512DQ-NEXT: vpshufb %ymm1, %ymm3, %ymm3
8850 ; AVX512DQ-NEXT: vporq %ymm2, %ymm3, %ymm19
8851 ; AVX512DQ-NEXT: vmovdqa 192(%rdi), %ymm15
8852 ; AVX512DQ-NEXT: vmovdqa 224(%rdi), %ymm13
8853 ; AVX512DQ-NEXT: vmovdqa 176(%rdi), %xmm12
8854 ; AVX512DQ-NEXT: vmovdqa 160(%rdi), %xmm14
8855 ; AVX512DQ-NEXT: vmovdqa (%rdi), %ymm4
8856 ; AVX512DQ-NEXT: vmovdqa 32(%rdi), %ymm5
8857 ; AVX512DQ-NEXT: vmovdqa 64(%rdi), %ymm10
8858 ; AVX512DQ-NEXT: vmovdqa 96(%rdi), %ymm9
8859 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm2 = ymm10[0],ymm9[1,2],ymm10[3],ymm9[4],ymm10[5],ymm9[6,7],ymm10[8],ymm9[9,10],ymm10[11],ymm9[12],ymm10[13],ymm9[14,15]
8860 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm3 = ymm2[2,3,0,1]
8861 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm3[5],ymm2[6],ymm3[7]
8862 ; AVX512DQ-NEXT: vpshufb %ymm0, %ymm2, %ymm0
8863 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm2 = ymm4[0],ymm5[1],ymm4[2,3],ymm5[4],ymm4[5],ymm5[6],ymm4[7,8],ymm5[9],ymm4[10,11],ymm5[12],ymm4[13],ymm5[14],ymm4[15]
8864 ; AVX512DQ-NEXT: vextracti128 $1, %ymm2, %xmm3
8865 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm3[1,2,3],xmm2[4,5],xmm3[6,7]
8866 ; AVX512DQ-NEXT: vpshufb %ymm1, %ymm2, %ymm1
8867 ; AVX512DQ-NEXT: vpor %ymm0, %ymm1, %ymm0
8868 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8869 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm11[0],ymm6[1],ymm11[2,3],ymm6[4],ymm11[5],ymm6[6],ymm11[7,8],ymm6[9],ymm11[10,11],ymm6[12],ymm11[13],ymm6[14],ymm11[15]
8870 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm1 = ymm0[2,3,0,1]
8871 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm1 = ymm0[0,1,2,3,4],ymm1[5],ymm0[6,7]
8872 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm0 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,8,9,18,19,28,29,22,23,16,17,26,27,128,128,128,128,128,128]
8873 ; AVX512DQ-NEXT: vpshufb %ymm0, %ymm1, %ymm2
8874 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm1 = ymm8[0],ymm7[1],ymm8[2],ymm7[3],ymm8[4,5],ymm7[6],ymm8[7,8],ymm7[9],ymm8[10],ymm7[11],ymm8[12,13],ymm7[14],ymm8[15]
8875 ; AVX512DQ-NEXT: vextracti128 $1, %ymm1, %xmm3
8876 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm3 = xmm1[0,1],xmm3[2,3],xmm1[4,5,6],xmm3[7]
8877 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm1 = [2,3,12,13,6,7,0,1,10,11,4,5,14,15,128,128,128,128,128,128,128,128,128,128,128,128,u,u,u,u,u,u]
8878 ; AVX512DQ-NEXT: vpshufb %ymm1, %ymm3, %ymm3
8879 ; AVX512DQ-NEXT: vpor %ymm2, %ymm3, %ymm2
8880 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8881 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm2 = ymm9[0],ymm10[1],ymm9[2,3],ymm10[4],ymm9[5],ymm10[6],ymm9[7,8],ymm10[9],ymm9[10,11],ymm10[12],ymm9[13],ymm10[14],ymm9[15]
8882 ; AVX512DQ-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8883 ; AVX512DQ-NEXT: vmovdqu %ymm9, (%rsp) # 32-byte Spill
8884 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm3 = ymm2[2,3,0,1]
8885 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm3[5],ymm2[6,7]
8886 ; AVX512DQ-NEXT: vpshufb %ymm0, %ymm2, %ymm0
8887 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm2 = ymm5[0],ymm4[1],ymm5[2],ymm4[3],ymm5[4,5],ymm4[6],ymm5[7,8],ymm4[9],ymm5[10],ymm4[11],ymm5[12,13],ymm4[14],ymm5[15]
8888 ; AVX512DQ-NEXT: vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8889 ; AVX512DQ-NEXT: vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
8890 ; AVX512DQ-NEXT: vextracti128 $1, %ymm2, %xmm3
8891 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm3[2,3],xmm2[4,5,6],xmm3[7]
8892 ; AVX512DQ-NEXT: vpshufb %ymm1, %ymm2, %ymm1
8893 ; AVX512DQ-NEXT: vporq %ymm0, %ymm1, %ymm28
8894 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm6 = [6,7,0,1,10,11,4,5,14,15,14,15,2,3,12,13,22,23,16,17,26,27,20,21,30,31,30,31,18,19,28,29]
8895 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm13[0,1],ymm15[2],ymm13[3],ymm15[4],ymm13[5,6],ymm15[7],ymm13[8,9],ymm15[10],ymm13[11],ymm15[12],ymm13[13,14],ymm15[15]
8896 ; AVX512DQ-NEXT: vmovdqa64 %ymm15, %ymm18
8897 ; AVX512DQ-NEXT: vmovdqa64 %ymm13, %ymm24
8898 ; AVX512DQ-NEXT: vextracti128 $1, %ymm0, %xmm1
8899 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4,5,6,7]
8900 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[6,7,6,7,8,9,4,5,14,15,8,9,2,3,12,13,22,23,22,23,24,25,20,21,30,31,24,25,18,19,28,29]
8901 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm1 = xmm12[0],xmm14[1],xmm12[2,3]
8902 ; AVX512DQ-NEXT: vmovdqa64 %xmm12, %xmm16
8903 ; AVX512DQ-NEXT: vmovdqa64 %xmm14, %xmm30
8904 ; AVX512DQ-NEXT: vpshufb %xmm6, %xmm1, %xmm1
8905 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm0[3,4,5,6,7]
8906 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
8907 ; AVX512DQ-NEXT: vmovdqa 144(%rdi), %xmm11
8908 ; AVX512DQ-NEXT: vmovdqa 128(%rdi), %xmm7
8909 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm1 = xmm11[0,1],xmm7[2],xmm11[3]
8910 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,6,7,8,9,8,9,2,3,12,13]
8911 ; AVX512DQ-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
8912 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
8913 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm1 = ymm9[0],ymm10[1],ymm9[2],ymm10[3],ymm9[4,5],ymm10[6],ymm9[7,8],ymm10[9],ymm9[10],ymm10[11],ymm9[12,13],ymm10[14],ymm9[15]
8914 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm2 = ymm1[2,3,0,1]
8915 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm2[4],ymm1[5],ymm2[6],ymm1[7]
8916 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm2 = ymm4[0],ymm5[1,2],ymm4[3],ymm5[4],ymm4[5],ymm5[6,7],ymm4[8],ymm5[9,10],ymm4[11],ymm5[12],ymm4[13],ymm5[14,15]
8917 ; AVX512DQ-NEXT: vextracti128 $1, %ymm2, %xmm3
8918 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm2 = xmm3[0],xmm2[1],xmm3[2],xmm2[3]
8919 ; AVX512DQ-NEXT: vpshufb %ymm6, %ymm1, %ymm1
8920 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm2 = xmm2[6,7,0,1,10,11,4,5,14,15,8,9,12,13,14,15]
8921 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3,4,5,6,7]
8922 ; AVX512DQ-NEXT: vmovdqa 256(%rdi), %ymm12
8923 ; AVX512DQ-NEXT: vmovdqa 288(%rdi), %ymm15
8924 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm2 = ymm12[0],ymm15[1],ymm12[2,3],ymm15[4],ymm12[5],ymm15[6],ymm12[7,8],ymm15[9],ymm12[10,11],ymm15[12],ymm12[13],ymm15[14],ymm12[15]
8925 ; AVX512DQ-NEXT: vextracti128 $1, %ymm2, %xmm3
8926 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm3[1,2,3],xmm2[4,5],xmm3[6,7]
8927 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm1 = zmm0 ^ (mem & (zmm1 ^ zmm0))
8928 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm0 = xmm2[0,1,0,1,10,11,4,5,14,15,8,9,2,3,12,13]
8929 ; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
8930 ; AVX512DQ-NEXT: vextracti64x4 $1, %zmm1, %ymm2
8931 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0],ymm0[1,2,3,4,5,6,7],ymm2[8],ymm0[9,10,11,12,13,14,15]
8932 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
8933 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
8934 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8935 ; AVX512DQ-NEXT: vmovdqa 464(%rdi), %xmm8
8936 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm0 = xmm8[3,1,2,3]
8937 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,1,2,1,4,5,6,7]
8938 ; AVX512DQ-NEXT: vmovdqa 448(%rdi), %xmm3
8939 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm1 = xmm3[0,2,2,3]
8940 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,0,3,4,5,6,7]
8941 ; AVX512DQ-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
8942 ; AVX512DQ-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8943 ; AVX512DQ-NEXT: vmovdqa %xmm11, %xmm6
8944 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm0 = xmm11[3,1,2,3]
8945 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm0 = xmm0[0,1,2,1,4,5,6,7]
8946 ; AVX512DQ-NEXT: vmovdqa %xmm7, %xmm9
8947 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm1 = xmm7[0,2,2,3]
8948 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm1 = xmm1[0,1,0,3,4,5,6,7]
8949 ; AVX512DQ-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
8950 ; AVX512DQ-NEXT: vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
8951 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm0 = xmm3[0],xmm8[1],xmm3[2,3]
8952 ; AVX512DQ-NEXT: vmovdqa64 %xmm3, %xmm22
8953 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} xmm1 = [6,7,2,3,4,5,6,7,6,7,2,3,12,13,6,7]
8954 ; AVX512DQ-NEXT: vpshufb %xmm1, %xmm0, %xmm0
8955 ; AVX512DQ-NEXT: vmovdqa64 %xmm1, %xmm23
8956 ; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm10
8957 ; AVX512DQ-NEXT: vmovdqa 576(%rdi), %ymm1
8958 ; AVX512DQ-NEXT: vmovdqa 608(%rdi), %ymm2
8959 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0,1],ymm1[2],ymm2[3],ymm1[4],ymm2[5,6],ymm1[7],ymm2[8,9],ymm1[10],ymm2[11],ymm1[12],ymm2[13,14],ymm1[15]
8960 ; AVX512DQ-NEXT: vmovdqa64 %ymm2, %ymm20
8961 ; AVX512DQ-NEXT: vmovdqa64 %ymm1, %ymm17
8962 ; AVX512DQ-NEXT: vextracti128 $1, %ymm0, %xmm1
8963 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm11 = xmm0[0,1,2],xmm1[3,4],xmm0[5,6,7]
8964 ; AVX512DQ-NEXT: vmovdqa 512(%rdi), %ymm5
8965 ; AVX512DQ-NEXT: vmovdqa 544(%rdi), %ymm13
8966 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm13[0],ymm5[1],ymm13[2,3],ymm5[4],ymm13[5],ymm5[6],ymm13[7,8],ymm5[9],ymm13[10,11],ymm5[12],ymm13[13],ymm5[14],ymm13[15]
8967 ; AVX512DQ-NEXT: vextracti128 $1, %ymm0, %xmm1
8968 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm1[0],ymm0[1,2],ymm1[3],ymm0[4,5,6,7]
8969 ; AVX512DQ-NEXT: vmovdqa64 496(%rdi), %xmm21
8970 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm1 = xmm21[3,1,2,3]
8971 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm0 = xmm1[2,1,2,3,4,5,6,7]
8972 ; AVX512DQ-NEXT: vmovdqa 480(%rdi), %xmm7
8973 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm14 = xmm7[0,2,2,3]
8974 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm14 = xmm14[0,3,2,3,4,5,6,7]
8975 ; AVX512DQ-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm14[0],xmm0[0],xmm14[1],xmm0[1]
8976 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm14 = [u,u,u,u,u,u,u,u,8,9,2,3,12,13,6,7,16,17,26,27,26,27,30,31,24,25,18,19,28,29,22,23]
8977 ; AVX512DQ-NEXT: vpshufb %ymm14, %ymm2, %ymm2
8978 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3,4,5,6,7]
8979 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} xmm2 = [12,13,14,15,4,5,14,15,8,9,2,3,12,13,6,7]
8980 ; AVX512DQ-NEXT: vpshufb %xmm2, %xmm11, %xmm11
8981 ; AVX512DQ-NEXT: vinserti128 $1, %xmm11, %ymm0, %ymm11
8982 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4],ymm11[5,6,7]
8983 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm27 = [65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,0,0,0,0,0,0,0,0,0,0,0,0,0,65535,65535,65535,65535,65535,65535]
8984 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm10 = zmm10 ^ (zmm27 & (zmm10 ^ zmm19))
8985 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm0, %zmm10, %zmm0
8986 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
8987 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm15[0,1],ymm12[2],ymm15[3],ymm12[4],ymm15[5,6],ymm12[7],ymm15[8,9],ymm12[10],ymm15[11],ymm12[12],ymm15[13,14],ymm12[15]
8988 ; AVX512DQ-NEXT: vextracti128 $1, %ymm0, %xmm10
8989 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm10[3,4],xmm0[5,6,7]
8990 ; AVX512DQ-NEXT: vpshufb %xmm2, %xmm0, %xmm0
8991 ; AVX512DQ-NEXT: vmovdqa64 %ymm18, %ymm3
8992 ; AVX512DQ-NEXT: vmovdqa64 %ymm24, %ymm1
8993 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm2 = ymm1[0],ymm3[1],ymm1[2,3],ymm3[4],ymm1[5],ymm3[6],ymm1[7,8],ymm3[9],ymm1[10,11],ymm3[12],ymm1[13],ymm3[14],ymm1[15]
8994 ; AVX512DQ-NEXT: vextracti128 $1, %ymm2, %xmm10
8995 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm10[0],ymm2[1,2],ymm10[3],ymm2[4,5,6,7]
8996 ; AVX512DQ-NEXT: vpshufb %ymm14, %ymm2, %ymm2
8997 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm10 = xmm16[3,1,2,3]
8998 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm10 = xmm10[2,1,2,3,4,5,6,7]
8999 ; AVX512DQ-NEXT: vmovdqa64 %xmm30, %xmm24
9000 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm11 = xmm30[0,2,2,3]
9001 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm11 = xmm11[0,3,2,3,4,5,6,7]
9002 ; AVX512DQ-NEXT: vpunpckldq {{.*#+}} xmm10 = xmm11[0],xmm10[0],xmm11[1],xmm10[1]
9003 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm10[0,1],ymm2[2,3,4,5,6,7]
9004 ; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
9005 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4],ymm0[5,6,7]
9006 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm2 = xmm9[0],xmm6[1],xmm9[2,3]
9007 ; AVX512DQ-NEXT: vmovdqa64 %xmm9, %xmm25
9008 ; AVX512DQ-NEXT: vmovdqa64 %xmm23, %xmm4
9009 ; AVX512DQ-NEXT: vpshufb %xmm4, %xmm2, %xmm2
9010 ; AVX512DQ-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
9011 ; AVX512DQ-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm27, %zmm2 # 64-byte Folded Reload
9012 ; AVX512DQ-NEXT: # zmm2 = zmm2 ^ (zmm27 & (zmm2 ^ mem))
9013 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0
9014 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9015 ; AVX512DQ-NEXT: vmovdqa64 %xmm22, %xmm4
9016 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm2 = xmm4[0,1],xmm8[2],xmm4[3]
9017 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,6,7,8,9,4,5,14,15,8,9]
9018 ; AVX512DQ-NEXT: vpshufb %xmm0, %xmm2, %xmm2
9019 ; AVX512DQ-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
9020 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm10 = ymm5[0],ymm13[1],ymm5[2,3],ymm13[4],ymm5[5],ymm13[6],ymm5[7,8],ymm13[9],ymm5[10,11],ymm13[12],ymm5[13],ymm13[14],ymm5[15]
9021 ; AVX512DQ-NEXT: vmovdqa64 %ymm13, %ymm26
9022 ; AVX512DQ-NEXT: vmovdqa64 %ymm5, %ymm22
9023 ; AVX512DQ-NEXT: vextracti128 $1, %ymm10, %xmm11
9024 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0],ymm11[1],ymm10[2],ymm11[3],ymm10[4,5,6,7]
9025 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm11 = xmm7[0,3,2,3]
9026 ; AVX512DQ-NEXT: vmovdqa64 %xmm7, %xmm30
9027 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm11 = xmm11[1,2,2,3,4,5,6,7]
9028 ; AVX512DQ-NEXT: vpsrlq $48, %xmm21, %xmm13
9029 ; AVX512DQ-NEXT: vpunpckldq {{.*#+}} xmm11 = xmm11[0],xmm13[0],xmm11[1],xmm13[1]
9030 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm13 = [2,3,12,13,0,1,0,1,10,11,4,5,14,15,8,9,18,19,28,29,16,17,16,17,26,27,20,21,30,31,24,25]
9031 ; AVX512DQ-NEXT: vpshufb %ymm13, %ymm10, %ymm10
9032 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1,2],xmm10[3,4,5,6,7]
9033 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm10 = ymm11[0,1,2,3],ymm10[4,5,6,7]
9034 ; AVX512DQ-NEXT: vmovdqa64 %ymm20, %ymm9
9035 ; AVX512DQ-NEXT: vmovdqa64 %ymm17, %ymm7
9036 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm11 = ymm7[0],ymm9[1,2],ymm7[3],ymm9[4],ymm7[5],ymm9[6,7],ymm7[8],ymm9[9,10],ymm7[11],ymm9[12],ymm7[13],ymm9[14,15]
9037 ; AVX512DQ-NEXT: vextracti128 $1, %ymm11, %xmm14
9038 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm11 = xmm14[0],xmm11[1],xmm14[2],xmm11[3]
9039 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} xmm14 = [0,1,2,3,6,7,0,1,10,11,4,5,14,15,8,9]
9040 ; AVX512DQ-NEXT: vpshufb %xmm14, %xmm11, %xmm11
9041 ; AVX512DQ-NEXT: vinserti128 $1, %xmm11, %ymm0, %ymm11
9042 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0,1,2,3,4],ymm11[5,6,7]
9043 ; AVX512DQ-NEXT: vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm27, %zmm2 # 64-byte Folded Reload
9044 ; AVX512DQ-NEXT: # zmm2 = zmm2 ^ (zmm27 & (zmm2 ^ mem))
9045 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm10, %zmm2, %zmm2
9046 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9047 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm1[1],ymm3[2,3],ymm1[4],ymm3[5],ymm1[6],ymm3[7,8],ymm1[9],ymm3[10,11],ymm1[12],ymm3[13],ymm1[14],ymm3[15]
9048 ; AVX512DQ-NEXT: vmovdqa64 %ymm1, %ymm17
9049 ; AVX512DQ-NEXT: vextracti128 $1, %ymm2, %xmm10
9050 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm10[1],ymm2[2],ymm10[3],ymm2[4,5,6,7]
9051 ; AVX512DQ-NEXT: vpshufb %ymm13, %ymm2, %ymm2
9052 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm10 = xmm24[0,3,2,3]
9053 ; AVX512DQ-NEXT: vmovdqa64 %xmm24, %xmm20
9054 ; AVX512DQ-NEXT: vpshuflw {{.*#+}} xmm10 = xmm10[1,2,2,3,4,5,6,7]
9055 ; AVX512DQ-NEXT: vmovdqa64 %xmm16, %xmm1
9056 ; AVX512DQ-NEXT: vpsrlq $48, %xmm16, %xmm11
9057 ; AVX512DQ-NEXT: vpunpckldq {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1]
9058 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0,1,2],xmm2[3,4,5,6,7]
9059 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm10[0,1,2,3],ymm2[4,5,6,7]
9060 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm10 = ymm12[0],ymm15[1,2],ymm12[3],ymm15[4],ymm12[5],ymm15[6,7],ymm12[8],ymm15[9,10],ymm12[11],ymm15[12],ymm12[13],ymm15[14,15]
9061 ; AVX512DQ-NEXT: vmovdqa64 %ymm12, %ymm19
9062 ; AVX512DQ-NEXT: vextracti128 $1, %ymm10, %xmm11
9063 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm10 = xmm11[0],xmm10[1],xmm11[2],xmm10[3]
9064 ; AVX512DQ-NEXT: vpshufb %xmm14, %xmm10, %xmm10
9065 ; AVX512DQ-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
9066 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm10[5,6,7]
9067 ; AVX512DQ-NEXT: vmovdqa64 %xmm25, %xmm5
9068 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm10 = xmm5[0,1],xmm6[2],xmm5[3]
9069 ; AVX512DQ-NEXT: vmovdqa64 %xmm6, %xmm23
9070 ; AVX512DQ-NEXT: vpshufb %xmm0, %xmm10, %xmm0
9071 ; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
9072 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm0 = zmm0 ^ (zmm27 & (zmm0 ^ zmm28))
9073 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
9074 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9075 ; AVX512DQ-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
9076 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm18 # 32-byte Reload
9077 ; AVX512DQ-NEXT: vmovdqa64 %ymm18, %ymm0
9078 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0],ymm12[1],ymm0[2,3],ymm12[4],ymm0[5],ymm12[6],ymm0[7,8],ymm12[9],ymm0[10,11],ymm12[12],ymm0[13],ymm12[14],ymm0[15]
9079 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm2 = ymm0[2,3,0,1]
9080 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm0[0,1,2,3,4,5],ymm2[6],ymm0[7]
9081 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm24 # 32-byte Reload
9082 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm16 # 32-byte Reload
9083 ; AVX512DQ-NEXT: vmovdqa64 %ymm24, %ymm0
9084 ; AVX512DQ-NEXT: vmovdqa64 %ymm16, %ymm5
9085 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm5[2],ymm0[3],ymm5[4],ymm0[5,6],ymm5[7],ymm0[8,9],ymm5[10],ymm0[11],ymm5[12],ymm0[13,14],ymm5[15]
9086 ; AVX512DQ-NEXT: vextracti128 $1, %ymm0, %xmm10
9087 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm10 = xmm0[0,1,2],xmm10[3,4],xmm0[5,6,7]
9088 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm0 = [4,5,14,15,8,9,2,3,12,13,10,11,0,1,10,11,20,21,30,31,24,25,18,19,28,29,26,27,16,17,26,27]
9089 ; AVX512DQ-NEXT: vpshufb %ymm0, %ymm2, %ymm2
9090 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm10 = xmm10[4,5,14,15,8,9,2,3,12,13,6,7,12,13,14,15]
9091 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm10[0,1,2],ymm2[3,4,5,6,7]
9092 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm13 = xmm8[0],xmm4[1],xmm8[2,3]
9093 ; AVX512DQ-NEXT: vmovdqa64 %xmm4, %xmm29
9094 ; AVX512DQ-NEXT: vmovdqa64 %xmm8, %xmm28
9095 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm13 = xmm13[0,1,6,7,4,5,6,7,8,9,6,7,0,1,10,11]
9096 ; AVX512DQ-NEXT: vinserti128 $1, %xmm13, %ymm0, %ymm13
9097 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm13 = zmm13 ^ (zmm27 & (zmm13 ^ zmm2))
9098 ; AVX512DQ-NEXT: vmovdqa64 %ymm26, %ymm8
9099 ; AVX512DQ-NEXT: vmovdqa64 %ymm22, %ymm4
9100 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm2 = ymm8[0],ymm4[1],ymm8[2],ymm4[3],ymm8[4,5],ymm4[6],ymm8[7,8],ymm4[9],ymm8[10],ymm4[11],ymm8[12,13],ymm4[14],ymm8[15]
9101 ; AVX512DQ-NEXT: vextracti128 $1, %ymm2, %xmm14
9102 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm14[1],ymm2[2,3,4,5,6,7]
9103 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm14 = [4,5,14,15,12,13,2,3,12,13,6,7,0,1,10,11,20,21,30,31,28,29,18,19,28,29,22,23,16,17,26,27]
9104 ; AVX512DQ-NEXT: vpshufb %ymm14, %ymm2, %ymm2
9105 ; AVX512DQ-NEXT: vmovdqa64 %xmm30, %xmm6
9106 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm11 = xmm30[0,1,1,3]
9107 ; AVX512DQ-NEXT: vpshufhw {{.*#+}} xmm11 = xmm11[0,1,2,3,4,7,6,7]
9108 ; AVX512DQ-NEXT: vmovdqa64 %xmm21, %xmm5
9109 ; AVX512DQ-NEXT: vpunpckhdq {{.*#+}} xmm11 = xmm11[2],xmm21[2],xmm11[3],xmm21[3]
9110 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1,2],xmm2[3,4,5,6,7]
9111 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm11[0,1,2,3],ymm2[4,5,6,7]
9112 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm11 = ymm9[0],ymm7[1],ymm9[2,3],ymm7[4],ymm9[5],ymm7[6],ymm9[7,8],ymm7[9],ymm9[10,11],ymm7[12],ymm9[13],ymm7[14],ymm9[15]
9113 ; AVX512DQ-NEXT: vmovdqa64 %ymm7, %ymm21
9114 ; AVX512DQ-NEXT: vextracti128 $1, %ymm11, %xmm10
9115 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0,1,2],xmm11[3,4],xmm10[5,6,7]
9116 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} xmm11 = [0,1,2,3,8,9,2,3,12,13,6,7,0,1,10,11]
9117 ; AVX512DQ-NEXT: vpshufb %xmm11, %xmm10, %xmm10
9118 ; AVX512DQ-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
9119 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm10[5,6,7]
9120 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm2, %zmm13, %zmm2
9121 ; AVX512DQ-NEXT: vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9122 ; AVX512DQ-NEXT: vmovdqa64 %ymm17, %ymm2
9123 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm2 = ymm2[0],ymm3[1],ymm2[2],ymm3[3],ymm2[4,5],ymm3[6],ymm2[7,8],ymm3[9],ymm2[10],ymm3[11],ymm2[12,13],ymm3[14],ymm2[15]
9124 ; AVX512DQ-NEXT: vmovdqa64 %ymm3, %ymm22
9125 ; AVX512DQ-NEXT: vmovdqa64 %ymm17, %ymm26
9126 ; AVX512DQ-NEXT: vextracti128 $1, %ymm2, %xmm10
9127 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0],ymm10[1],ymm2[2,3,4,5,6,7]
9128 ; AVX512DQ-NEXT: vpshufb %ymm14, %ymm2, %ymm2
9129 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm10 = xmm20[0,1,1,3]
9130 ; AVX512DQ-NEXT: vpshufhw {{.*#+}} xmm10 = xmm10[0,1,2,3,4,7,6,7]
9131 ; AVX512DQ-NEXT: vpunpckhdq {{.*#+}} xmm10 = xmm10[2],xmm1[2],xmm10[3],xmm1[3]
9132 ; AVX512DQ-NEXT: vmovdqa64 %xmm1, %xmm17
9133 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0,1,2],xmm2[3,4,5,6,7]
9134 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm10[0,1,2,3],ymm2[4,5,6,7]
9135 ; AVX512DQ-NEXT: vmovdqa64 %ymm19, %ymm1
9136 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm10 = ymm15[0],ymm1[1],ymm15[2,3],ymm1[4],ymm15[5],ymm1[6],ymm15[7,8],ymm1[9],ymm15[10,11],ymm1[12],ymm15[13],ymm1[14],ymm15[15]
9137 ; AVX512DQ-NEXT: vmovdqa64 %ymm15, %ymm31
9138 ; AVX512DQ-NEXT: vextracti128 $1, %ymm10, %xmm13
9139 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm10 = xmm13[0,1,2],xmm10[3,4],xmm13[5,6,7]
9140 ; AVX512DQ-NEXT: vpshufb %xmm11, %xmm10, %xmm10
9141 ; AVX512DQ-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
9142 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3,4],ymm10[5,6,7]
9143 ; AVX512DQ-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9144 ; AVX512DQ-NEXT: vpblendw $82, (%rsp), %ymm1, %ymm10 # 32-byte Folded Reload
9145 ; AVX512DQ-NEXT: # ymm10 = ymm1[0],mem[1],ymm1[2,3],mem[4],ymm1[5],mem[6],ymm1[7,8],mem[9],ymm1[10,11],mem[12],ymm1[13],mem[14],ymm1[15]
9146 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm11 = ymm10[2,3,0,1]
9147 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0,1,2,3,4,5],ymm11[6],ymm10[7]
9148 ; AVX512DQ-NEXT: vpshufb %ymm0, %ymm10, %ymm0
9149 ; AVX512DQ-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
9150 ; AVX512DQ-NEXT: vpblendw $148, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm10 # 32-byte Folded Reload
9151 ; AVX512DQ-NEXT: # ymm10 = ymm1[0,1],mem[2],ymm1[3],mem[4],ymm1[5,6],mem[7],ymm1[8,9],mem[10],ymm1[11],mem[12],ymm1[13,14],mem[15]
9152 ; AVX512DQ-NEXT: vextracti128 $1, %ymm10, %xmm11
9153 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm10 = xmm10[0,1,2],xmm11[3,4],xmm10[5,6,7]
9154 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm10 = xmm10[4,5,14,15,8,9,2,3,12,13,6,7,12,13,14,15]
9155 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm10[0,1,2],ymm0[3,4,5,6,7]
9156 ; AVX512DQ-NEXT: vmovdqa64 %xmm25, %xmm1
9157 ; AVX512DQ-NEXT: vmovdqa64 %xmm23, %xmm3
9158 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm10 = xmm3[0],xmm1[1],xmm3[2,3]
9159 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm10 = xmm10[0,1,6,7,4,5,6,7,8,9,6,7,0,1,10,11]
9160 ; AVX512DQ-NEXT: vinserti128 $1, %xmm10, %ymm0, %ymm10
9161 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm10 = zmm10 ^ (zmm27 & (zmm10 ^ zmm0))
9162 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm2, %zmm10, %zmm23
9163 ; AVX512DQ-NEXT: vmovdqa64 %ymm18, %ymm13
9164 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm12[0],ymm13[1],ymm12[2],ymm13[3],ymm12[4,5],ymm13[6],ymm12[7,8],ymm13[9],ymm12[10],ymm13[11],ymm12[12,13],ymm13[14],ymm12[15]
9165 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm10 = ymm0[2,3,0,1]
9166 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm10[4],ymm0[5],ymm10[6],ymm0[7]
9167 ; AVX512DQ-NEXT: vmovdqa64 %ymm16, %ymm14
9168 ; AVX512DQ-NEXT: vmovdqa64 %ymm24, %ymm15
9169 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm10 = ymm14[0],ymm15[1,2],ymm14[3],ymm15[4],ymm14[5],ymm15[6,7],ymm14[8],ymm15[9,10],ymm14[11],ymm15[12],ymm14[13],ymm15[14,15]
9170 ; AVX512DQ-NEXT: vextracti128 $1, %ymm10, %xmm11
9171 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm10 = xmm11[0],xmm10[1],xmm11[2],xmm10[3]
9172 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm10 = xmm10[6,7,0,1,10,11,4,5,14,15,8,9,12,13,14,15]
9173 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm1 = [6,7,0,1,10,11,4,5,14,15,14,15,2,3,12,13,22,23,16,17,26,27,20,21,30,31,30,31,18,19,28,29]
9174 ; AVX512DQ-NEXT: vpshufb %ymm1, %ymm0, %ymm0
9175 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm10[0,1,2],ymm0[3,4,5,6,7]
9176 ; AVX512DQ-NEXT: vmovdqa %ymm4, %ymm3
9177 ; AVX512DQ-NEXT: vmovdqa %ymm8, %ymm4
9178 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm10 = ymm8[0,1],ymm3[2],ymm8[3],ymm3[4],ymm8[5,6],ymm3[7],ymm8[8,9],ymm3[10],ymm8[11],ymm3[12],ymm8[13,14],ymm3[15]
9179 ; AVX512DQ-NEXT: vextracti128 $1, %ymm10, %xmm11
9180 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm10 = ymm10[0,1],ymm11[2],ymm10[3,4,5,6,7]
9181 ; AVX512DQ-NEXT: vpshufb {{.*#+}} ymm10 = ymm10[6,7,6,7,8,9,4,5,14,15,8,9,2,3,12,13,22,23,22,23,24,25,20,21,30,31,24,25,18,19,28,29]
9182 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm11 = xmm5[0],xmm6[1],xmm5[2,3]
9183 ; AVX512DQ-NEXT: vmovdqa64 %xmm30, %xmm7
9184 ; AVX512DQ-NEXT: vmovdqa %xmm5, %xmm6
9185 ; AVX512DQ-NEXT: vpshufb %xmm1, %xmm11, %xmm11
9186 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm11 = xmm11[0,1,2],xmm10[3,4,5,6,7]
9187 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm10 = ymm11[0,1,2,3],ymm10[4,5,6,7]
9188 ; AVX512DQ-NEXT: vmovdqa64 %xmm29, %xmm1
9189 ; AVX512DQ-NEXT: vmovdqa64 %xmm28, %xmm2
9190 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm8 = xmm2[0,1],xmm1[2],xmm2[3]
9191 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,6,7,8,9,8,9,2,3,12,13]
9192 ; AVX512DQ-NEXT: vinserti128 $1, %xmm8, %ymm0, %ymm8
9193 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm10, %zmm8, %zmm8
9194 ; AVX512DQ-NEXT: vpternlogq {{.*#+}} zmm8 = zmm8 ^ (mem & (zmm8 ^ zmm0))
9195 ; AVX512DQ-NEXT: vmovdqa %ymm9, %ymm2
9196 ; AVX512DQ-NEXT: vmovdqa64 %ymm21, %ymm10
9197 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm10[0],ymm9[1],ymm10[2,3],ymm9[4],ymm10[5],ymm9[6],ymm10[7,8],ymm9[9],ymm10[10,11],ymm9[12],ymm10[13],ymm9[14],ymm10[15]
9198 ; AVX512DQ-NEXT: vextracti128 $1, %ymm0, %xmm9
9199 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0],xmm9[1,2,3],xmm0[4,5],xmm9[6,7]
9200 ; AVX512DQ-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,10,11,4,5,14,15,8,9,2,3,12,13]
9201 ; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
9202 ; AVX512DQ-NEXT: vextracti64x4 $1, %zmm8, %ymm9
9203 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm9[0],ymm0[1,2,3,4,5,6,7],ymm9[8],ymm0[9,10,11,12,13,14,15]
9204 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm9[0,1,2,3],ymm0[4,5,6,7]
9205 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm25
9206 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm12[0,1],ymm13[2],ymm12[3],ymm13[4],ymm12[5,6],ymm13[7],ymm12[8,9],ymm13[10],ymm12[11],ymm13[12],ymm12[13,14],ymm13[15]
9207 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm8 = ymm0[2,3,0,1]
9208 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm8[4],ymm0[5,6],ymm8[7]
9209 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm8 = ymm15[0],ymm14[1],ymm15[2,3],ymm14[4],ymm15[5],ymm14[6],ymm15[7,8],ymm14[9],ymm15[10,11],ymm14[12],ymm15[13],ymm14[14],ymm15[15]
9210 ; AVX512DQ-NEXT: vextracti128 $1, %ymm8, %xmm9
9211 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm11 = xmm9[0,1,2],xmm8[3,4],xmm9[5,6,7]
9212 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm9 = [8,9,2,3,12,13,6,7,4,5,6,7,4,5,14,15,24,25,18,19,28,29,22,23,20,21,22,23,20,21,30,31]
9213 ; AVX512DQ-NEXT: vpshufb %ymm9, %ymm0, %ymm0
9214 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} xmm8 = [8,9,2,3,12,13,6,7,0,1,10,11,0,1,6,7]
9215 ; AVX512DQ-NEXT: vpshufb %xmm8, %xmm11, %xmm11
9216 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm11[0,1,2],ymm0[3,4,5,6,7]
9217 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm4 = ymm3[0],ymm4[1,2],ymm3[3],ymm4[4],ymm3[5],ymm4[6,7],ymm3[8],ymm4[9,10],ymm3[11],ymm4[12],ymm3[13],ymm4[14,15]
9218 ; AVX512DQ-NEXT: vextracti128 $1, %ymm4, %xmm5
9219 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0],ymm4[1],ymm5[2],ymm4[3,4,5,6,7]
9220 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm1 = xmm6[0,1],xmm7[2],xmm6[3]
9221 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm3 = [8,9,14,15,4,5,6,7,0,1,10,11,4,5,14,15,24,25,30,31,20,21,22,23,16,17,26,27,20,21,30,31]
9222 ; AVX512DQ-NEXT: vpshufb %ymm3, %ymm4, %ymm4
9223 ; AVX512DQ-NEXT: vpshufb %xmm9, %xmm1, %xmm1
9224 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm4[3,4,5,6,7]
9225 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm4[4,5,6,7]
9226 ; AVX512DQ-NEXT: vinserti128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm4 # 16-byte Folded Reload
9227 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm1, %zmm4, %zmm1
9228 ; AVX512DQ-NEXT: movb $7, %al
9229 ; AVX512DQ-NEXT: kmovw %eax, %k1
9230 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm0, %zmm0, %zmm1 {%k1}
9231 ; AVX512DQ-NEXT: vextracti64x4 $1, %zmm1, %ymm4
9232 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm2[0],ymm10[1],ymm2[2],ymm10[3],ymm2[4,5],ymm10[6],ymm2[7,8],ymm10[9],ymm2[10],ymm10[11],ymm2[12,13],ymm10[14],ymm2[15]
9233 ; AVX512DQ-NEXT: vextracti128 $1, %ymm0, %xmm5
9234 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm5 = xmm0[0,1],xmm5[2,3],xmm0[4,5,6],xmm5[7]
9235 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} xmm0 = [6,7,2,3,12,13,6,7,0,1,10,11,4,5,14,15]
9236 ; AVX512DQ-NEXT: vpshufb %xmm0, %xmm5, %xmm5
9237 ; AVX512DQ-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
9238 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm5 = ymm4[0],ymm5[1,2,3,4,5,6,7],ymm4[8],ymm5[9,10,11,12,13,14,15]
9239 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
9240 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm4, %zmm1, %zmm1
9241 ; AVX512DQ-NEXT: vmovdqa64 %ymm22, %ymm2
9242 ; AVX512DQ-NEXT: vmovdqa64 %ymm26, %ymm4
9243 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm4 = ymm2[0],ymm4[1,2],ymm2[3],ymm4[4],ymm2[5],ymm4[6,7],ymm2[8],ymm4[9,10],ymm2[11],ymm4[12],ymm2[13],ymm4[14,15]
9244 ; AVX512DQ-NEXT: vextracti128 $1, %ymm4, %xmm5
9245 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0],ymm4[1],ymm5[2],ymm4[3,4,5,6,7]
9246 ; AVX512DQ-NEXT: vpshufb %ymm3, %ymm4, %ymm3
9247 ; AVX512DQ-NEXT: vmovdqa64 %xmm20, %xmm2
9248 ; AVX512DQ-NEXT: vmovdqa64 %xmm17, %xmm4
9249 ; AVX512DQ-NEXT: vpblendd {{.*#+}} xmm4 = xmm4[0,1],xmm2[2],xmm4[3]
9250 ; AVX512DQ-NEXT: vpshufb %xmm9, %xmm4, %xmm4
9251 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1,2],xmm3[3,4,5,6,7]
9252 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
9253 ; AVX512DQ-NEXT: vinserti128 $1, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm4 # 16-byte Folded Reload
9254 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm3, %zmm4, %zmm3
9255 ; AVX512DQ-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9256 ; AVX512DQ-NEXT: vpblendw $107, (%rsp), %ymm2, %ymm4 # 32-byte Folded Reload
9257 ; AVX512DQ-NEXT: # ymm4 = mem[0,1],ymm2[2],mem[3],ymm2[4],mem[5,6],ymm2[7],mem[8,9],ymm2[10],mem[11],ymm2[12],mem[13,14],ymm2[15]
9258 ; AVX512DQ-NEXT: vpermq {{.*#+}} ymm5 = ymm4[2,3,0,1]
9259 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4],ymm4[5,6],ymm5[7]
9260 ; AVX512DQ-NEXT: vpshufb %ymm9, %ymm4, %ymm4
9261 ; AVX512DQ-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
9262 ; AVX512DQ-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2, %ymm5 # 32-byte Folded Reload
9263 ; AVX512DQ-NEXT: # ymm5 = ymm2[0],mem[1],ymm2[2,3],mem[4],ymm2[5],mem[6],ymm2[7,8],mem[9],ymm2[10,11],mem[12],ymm2[13],mem[14],ymm2[15]
9264 ; AVX512DQ-NEXT: vextracti128 $1, %ymm5, %xmm6
9265 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm5 = xmm6[0,1,2],xmm5[3,4],xmm6[5,6,7]
9266 ; AVX512DQ-NEXT: vpshufb %xmm8, %xmm5, %xmm5
9267 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm4 = ymm5[0,1,2],ymm4[3,4,5,6,7]
9268 ; AVX512DQ-NEXT: vinserti64x4 $0, %ymm4, %zmm0, %zmm3 {%k1}
9269 ; AVX512DQ-NEXT: vmovdqa64 %ymm19, %ymm2
9270 ; AVX512DQ-NEXT: vmovdqa64 %ymm31, %ymm4
9271 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0],ymm2[1],ymm4[2],ymm2[3],ymm4[4,5],ymm2[6],ymm4[7,8],ymm2[9],ymm4[10],ymm2[11],ymm4[12,13],ymm2[14],ymm4[15]
9272 ; AVX512DQ-NEXT: vextracti128 $1, %ymm4, %xmm5
9273 ; AVX512DQ-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1],xmm5[2,3],xmm4[4,5,6],xmm5[7]
9274 ; AVX512DQ-NEXT: vpshufb %xmm0, %xmm4, %xmm0
9275 ; AVX512DQ-NEXT: vextracti64x4 $1, %zmm3, %ymm4
9276 ; AVX512DQ-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
9277 ; AVX512DQ-NEXT: vpblendw {{.*#+}} ymm0 = ymm4[0],ymm0[1,2,3,4,5,6,7],ymm4[8],ymm0[9,10,11,12,13,14,15]
9278 ; AVX512DQ-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
9279 ; AVX512DQ-NEXT: vinserti64x4 $1, %ymm0, %zmm3, %zmm0
9280 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9281 ; AVX512DQ-NEXT: vmovaps %zmm2, (%rsi)
9282 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9283 ; AVX512DQ-NEXT: vmovaps %zmm2, 64(%rsi)
9284 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9285 ; AVX512DQ-NEXT: vmovaps %zmm2, 64(%rdx)
9286 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9287 ; AVX512DQ-NEXT: vmovaps %zmm2, (%rdx)
9288 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9289 ; AVX512DQ-NEXT: vmovaps %zmm2, 64(%rcx)
9290 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, (%rcx)
9291 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, 64(%r8)
9292 ; AVX512DQ-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
9293 ; AVX512DQ-NEXT: vmovaps %zmm2, (%r8)
9294 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 64(%r9)
9295 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%r9)
9296 ; AVX512DQ-NEXT: addq $552, %rsp # imm = 0x228
9297 ; AVX512DQ-NEXT: vzeroupper
9298 ; AVX512DQ-NEXT: retq
9300 ; AVX512DQ-FCP-LABEL: load_i16_stride5_vf64:
9301 ; AVX512DQ-FCP: # %bb.0:
9302 ; AVX512DQ-FCP-NEXT: subq $552, %rsp # imm = 0x228
9303 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm0 = [4,5,14,15,4,5,6,7,8,9,10,11,12,13,14,15]
9304 ; AVX512DQ-FCP-NEXT: vmovdqa 496(%rdi), %xmm1
9305 ; AVX512DQ-FCP-NEXT: vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9306 ; AVX512DQ-FCP-NEXT: vpshufb %xmm0, %xmm1, %xmm1
9307 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,10,11,8,9,10,11,8,9,10,11,12,13,14,15]
9308 ; AVX512DQ-FCP-NEXT: vmovdqa 480(%rdi), %xmm2
9309 ; AVX512DQ-FCP-NEXT: vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
9310 ; AVX512DQ-FCP-NEXT: vpshufb %xmm3, %xmm2, %xmm2
9311 ; AVX512DQ-FCP-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
9312 ; AVX512DQ-FCP-NEXT: vmovdqa 512(%rdi), %ymm10
9313 ; AVX512DQ-FCP-NEXT: vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9314 ; AVX512DQ-FCP-NEXT: vmovdqa 544(%rdi), %ymm11
9315 ; AVX512DQ-FCP-NEXT: vmovdqu %ymm11, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9316 ; AVX512DQ-FCP-NEXT: vmovdqa 576(%rdi), %ymm7
9317 ; AVX512DQ-FCP-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9318 ; AVX512DQ-FCP-NEXT: vmovdqa 608(%rdi), %ymm8
9319 ; AVX512DQ-FCP-NEXT: vmovdqu %ymm8, (%rsp) # 32-byte Spill
9320 ; AVX512DQ-FCP-NEXT: vmovdqa 352(%rdi), %ymm4
9321 ; AVX512DQ-FCP-NEXT: vmovdqa 320(%rdi), %ymm5
9322 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm5[0],ymm4[1],ymm5[2,3],ymm4[4],ymm5[5],ymm4[6],ymm5[7,8],ymm4[9],ymm5[10,11],ymm4[12],ymm5[13],ymm4[14],ymm5[15]
9323 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm5, %ymm28
9324 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm4, %ymm30
9325 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm1, %xmm4
9326 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm4 = xmm1[0],xmm4[1,2,3],xmm1[4,5],xmm4[6,7]
9327 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm1 = [0,1,10,11,4,5,14,15,8,9,2,3,12,13,128,128,128,128,128,128,128,128,128,128,128,128,u,u,u,u,u,u]
9328 ; AVX512DQ-FCP-NEXT: vpshufb %ymm1, %ymm4, %ymm4
9329 ; AVX512DQ-FCP-NEXT: vmovdqa 384(%rdi), %ymm6
9330 ; AVX512DQ-FCP-NEXT: vmovdqa 416(%rdi), %ymm9
9331 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm5 = ymm6[0],ymm9[1,2],ymm6[3],ymm9[4],ymm6[5],ymm9[6,7],ymm6[8],ymm9[9,10],ymm6[11],ymm9[12],ymm6[13],ymm9[14,15]
9332 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm9, %ymm26
9333 ; AVX512DQ-FCP-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9334 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm6, %ymm27
9335 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm6 = [1,3,0,2,4,6,1,3]
9336 ; AVX512DQ-FCP-NEXT: vpermd %ymm5, %ymm6, %ymm5
9337 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm13 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,2,3,16,17,22,23,24,25,30,31,20,21,128,128,128,128,128,128]
9338 ; AVX512DQ-FCP-NEXT: vpshufb %ymm13, %ymm5, %ymm5
9339 ; AVX512DQ-FCP-NEXT: vpor %ymm5, %ymm4, %ymm4
9340 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm5 = ymm8[0,1],ymm7[2],ymm8[3],ymm7[4],ymm8[5,6],ymm7[7],ymm8[8,9],ymm7[10],ymm8[11],ymm7[12],ymm8[13,14],ymm7[15]
9341 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm5, %xmm7
9342 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm5 = xmm5[0,1,2],xmm7[3,4],xmm5[5,6,7]
9343 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm7 = ymm11[0],ymm10[1],ymm11[2,3],ymm10[4],ymm11[5],ymm10[6],ymm11[7,8],ymm10[9],ymm11[10,11],ymm10[12],ymm11[13],ymm10[14],ymm11[15]
9344 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm10 = [2,4,7,1,4,6,0,0]
9345 ; AVX512DQ-FCP-NEXT: vpermd %ymm7, %ymm10, %ymm7
9346 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm9 = [u,u,u,u,u,u,u,u,8,9,14,15,0,1,6,7,16,17,22,23,20,21,22,23,24,25,30,31,16,17,22,23]
9347 ; AVX512DQ-FCP-NEXT: vpshufb %ymm9, %ymm7, %ymm7
9348 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm17 = [8,9,3,2,4,5,7,6]
9349 ; AVX512DQ-FCP-NEXT: vpermt2d %ymm2, %ymm17, %ymm7
9350 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm2 = [12,13,14,15,4,5,14,15,8,9,2,3,12,13,6,7]
9351 ; AVX512DQ-FCP-NEXT: vpshufb %xmm2, %xmm5, %xmm5
9352 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm5
9353 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm5 = ymm7[0,1,2,3,4],ymm5[5,6,7]
9354 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm18 = [0,3,1,3,0,3,5,7]
9355 ; AVX512DQ-FCP-NEXT: vmovdqa 448(%rdi), %ymm7
9356 ; AVX512DQ-FCP-NEXT: vmovdqu %ymm7, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9357 ; AVX512DQ-FCP-NEXT: vpermd %ymm7, %ymm18, %ymm7
9358 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm14 = [4,5,2,3,4,5,6,7,8,9,2,3,4,5,10,11,20,21,18,19,20,21,22,23,24,25,18,19,20,21,26,27]
9359 ; AVX512DQ-FCP-NEXT: vpshufb %ymm14, %ymm7, %ymm7
9360 ; AVX512DQ-FCP-NEXT: vmovdqa64 {{.*#+}} zmm16 = [65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,0,0,0,0,0,0,0,0,0,0,0,0,0,65535,65535,65535,65535,65535,65535]
9361 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm7 = zmm7 ^ (zmm16 & (zmm7 ^ zmm4))
9362 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm5, %zmm7, %zmm4
9363 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9364 ; AVX512DQ-FCP-NEXT: vmovdqa 176(%rdi), %xmm4
9365 ; AVX512DQ-FCP-NEXT: vpshufb %xmm0, %xmm4, %xmm0
9366 ; AVX512DQ-FCP-NEXT: vmovdqa64 %xmm4, %xmm31
9367 ; AVX512DQ-FCP-NEXT: vmovdqa 160(%rdi), %xmm15
9368 ; AVX512DQ-FCP-NEXT: vpshufb %xmm3, %xmm15, %xmm3
9369 ; AVX512DQ-FCP-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
9370 ; AVX512DQ-FCP-NEXT: vmovdqa 64(%rdi), %ymm11
9371 ; AVX512DQ-FCP-NEXT: vmovdqa 96(%rdi), %ymm7
9372 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm11[0],ymm7[1,2],ymm11[3],ymm7[4],ymm11[5],ymm7[6,7],ymm11[8],ymm7[9,10],ymm11[11],ymm7[12],ymm11[13],ymm7[14,15]
9373 ; AVX512DQ-FCP-NEXT: vpermd %ymm3, %ymm6, %ymm3
9374 ; AVX512DQ-FCP-NEXT: vpshufb %ymm13, %ymm3, %ymm3
9375 ; AVX512DQ-FCP-NEXT: vmovdqa (%rdi), %ymm12
9376 ; AVX512DQ-FCP-NEXT: vmovdqa 32(%rdi), %ymm6
9377 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm4 = ymm12[0],ymm6[1],ymm12[2,3],ymm6[4],ymm12[5],ymm6[6],ymm12[7,8],ymm6[9],ymm12[10,11],ymm6[12],ymm12[13],ymm6[14],ymm12[15]
9378 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm4, %xmm5
9379 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0],xmm5[1,2,3],xmm4[4,5],xmm5[6,7]
9380 ; AVX512DQ-FCP-NEXT: vpshufb %ymm1, %ymm4, %ymm1
9381 ; AVX512DQ-FCP-NEXT: vpor %ymm3, %ymm1, %ymm1
9382 ; AVX512DQ-FCP-NEXT: vmovdqa 192(%rdi), %ymm8
9383 ; AVX512DQ-FCP-NEXT: vmovdqa 224(%rdi), %ymm13
9384 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm13[0],ymm8[1],ymm13[2,3],ymm8[4],ymm13[5],ymm8[6],ymm13[7,8],ymm8[9],ymm13[10,11],ymm8[12],ymm13[13],ymm8[14],ymm13[15]
9385 ; AVX512DQ-FCP-NEXT: vpermd %ymm3, %ymm10, %ymm3
9386 ; AVX512DQ-FCP-NEXT: vpshufb %ymm9, %ymm3, %ymm3
9387 ; AVX512DQ-FCP-NEXT: vmovdqa 256(%rdi), %ymm5
9388 ; AVX512DQ-FCP-NEXT: vmovdqa 288(%rdi), %ymm9
9389 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm4 = ymm9[0,1],ymm5[2],ymm9[3],ymm5[4],ymm9[5,6],ymm5[7],ymm9[8,9],ymm5[10],ymm9[11],ymm5[12],ymm9[13,14],ymm5[15]
9390 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm5, %ymm25
9391 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm4, %xmm10
9392 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1,2],xmm10[3,4],xmm4[5,6,7]
9393 ; AVX512DQ-FCP-NEXT: vpshufb %xmm2, %xmm4, %xmm2
9394 ; AVX512DQ-FCP-NEXT: vpermt2d %ymm0, %ymm17, %ymm3
9395 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
9396 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4],ymm0[5,6,7]
9397 ; AVX512DQ-FCP-NEXT: vmovdqa64 128(%rdi), %ymm23
9398 ; AVX512DQ-FCP-NEXT: vpermd %ymm23, %ymm18, %ymm2
9399 ; AVX512DQ-FCP-NEXT: vpshufb %ymm14, %ymm2, %ymm2
9400 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm2 = zmm2 ^ (zmm16 & (zmm2 ^ zmm1))
9401 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0
9402 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9403 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm6[0],ymm12[1],ymm6[2],ymm12[3],ymm6[4,5],ymm12[6],ymm6[7,8],ymm12[9],ymm6[10],ymm12[11],ymm6[12,13],ymm12[14],ymm6[15]
9404 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm12, %ymm21
9405 ; AVX512DQ-FCP-NEXT: vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9406 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm6, %ymm29
9407 ; AVX512DQ-FCP-NEXT: vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9408 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm0, %xmm1
9409 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3],xmm0[4,5,6],xmm1[7]
9410 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm17 = [2,0,0,0,4,7,1,6]
9411 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm7[0],ymm11[1],ymm7[2,3],ymm11[4],ymm7[5],ymm11[6],ymm7[7,8],ymm11[9],ymm7[10,11],ymm11[12],ymm7[13],ymm11[14],ymm7[15]
9412 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm11, %ymm19
9413 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm7, %ymm22
9414 ; AVX512DQ-FCP-NEXT: vpermd %ymm1, %ymm17, %ymm1
9415 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm6 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,0,1,18,19,20,21,26,27,16,17,30,31,128,128,128,128,128,128]
9416 ; AVX512DQ-FCP-NEXT: vpshufb %ymm6, %ymm1, %ymm1
9417 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm2 = [2,3,12,13,6,7,0,1,10,11,4,5,14,15,128,128,128,128,128,128,128,128,128,128,128,128,u,u,u,u,u,u]
9418 ; AVX512DQ-FCP-NEXT: vpshufb %ymm2, %ymm0, %ymm0
9419 ; AVX512DQ-FCP-NEXT: vpor %ymm1, %ymm0, %ymm10
9420 ; AVX512DQ-FCP-NEXT: vpsrlq $48, %xmm31, %xmm0
9421 ; AVX512DQ-FCP-NEXT: vpshufb %xmm2, %xmm15, %xmm1
9422 ; AVX512DQ-FCP-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
9423 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm20 = [0,2,5,7,4,7,0,0]
9424 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm8[0],ymm13[1],ymm8[2,3],ymm13[4],ymm8[5],ymm13[6],ymm8[7,8],ymm13[9],ymm8[10,11],ymm13[12],ymm8[13],ymm13[14],ymm8[15]
9425 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm8, %ymm5
9426 ; AVX512DQ-FCP-NEXT: vpermd %ymm0, %ymm20, %ymm4
9427 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm3 = [2,3,4,5,4,5,0,1,6,7,8,9,14,15,4,5,18,19,20,21,20,21,16,17,22,23,24,25,30,31,20,21]
9428 ; AVX512DQ-FCP-NEXT: vpshufb %ymm3, %ymm4, %ymm4
9429 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm4[3,4,5,6,7]
9430 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm4 = ymm1[0,1,2,3],ymm4[4,5,6,7]
9431 ; AVX512DQ-FCP-NEXT: vbroadcasti32x4 {{.*#+}} ymm24 = [1,4,6,3,1,4,6,3]
9432 ; AVX512DQ-FCP-NEXT: # ymm24 = mem[0,1,2,3,0,1,2,3]
9433 ; AVX512DQ-FCP-NEXT: vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9434 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm25, %ymm12
9435 ; AVX512DQ-FCP-NEXT: vmovdqu64 %ymm25, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
9436 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm12[0],ymm9[1,2],ymm12[3],ymm9[4],ymm12[5],ymm9[6,7],ymm12[8],ymm9[9,10],ymm12[11],ymm9[12],ymm12[13],ymm9[14,15]
9437 ; AVX512DQ-FCP-NEXT: vpermd %ymm1, %ymm24, %ymm0
9438 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm1 = [0,1,2,3,2,3,4,5,10,11,0,1,14,15,8,9,16,17,18,19,18,19,20,21,26,27,16,17,30,31,24,25]
9439 ; AVX512DQ-FCP-NEXT: vpshufb %ymm1, %ymm0, %ymm0
9440 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3,4],ymm0[5,6,7]
9441 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm25 = [1,3,2,3,1,3,6,7]
9442 ; AVX512DQ-FCP-NEXT: vpermd %ymm23, %ymm25, %ymm4
9443 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm14 = [0,1,6,7,4,5,6,7,8,9,0,1,6,7,8,9,16,17,22,23,20,21,22,23,24,25,16,17,22,23,24,25]
9444 ; AVX512DQ-FCP-NEXT: vpshufb %ymm14, %ymm4, %ymm4
9445 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm4 = zmm4 ^ (zmm16 & (zmm4 ^ zmm10))
9446 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm16, %zmm10
9447 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm4, %zmm0
9448 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9449 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm28, %ymm18
9450 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm30, %ymm28
9451 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm18, %ymm0
9452 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm30, %ymm4
9453 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm4[0],ymm0[1],ymm4[2],ymm0[3],ymm4[4,5],ymm0[6],ymm4[7,8],ymm0[9],ymm4[10],ymm0[11],ymm4[12,13],ymm0[14],ymm4[15]
9454 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm0, %xmm4
9455 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1],xmm4[2,3],xmm0[4,5,6],xmm4[7]
9456 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm27, %ymm7
9457 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm26, %ymm4
9458 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0],ymm7[1],ymm4[2,3],ymm7[4],ymm4[5],ymm7[6],ymm4[7,8],ymm7[9],ymm4[10,11],ymm7[12],ymm4[13],ymm7[14],ymm4[15]
9459 ; AVX512DQ-FCP-NEXT: vpermd %ymm4, %ymm17, %ymm4
9460 ; AVX512DQ-FCP-NEXT: vpshufb %ymm6, %ymm4, %ymm4
9461 ; AVX512DQ-FCP-NEXT: vpshufb %ymm2, %ymm0, %ymm0
9462 ; AVX512DQ-FCP-NEXT: vpor %ymm4, %ymm0, %ymm0
9463 ; AVX512DQ-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
9464 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm16 # 32-byte Reload
9465 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm16, %ymm4
9466 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm4 = ymm11[0],ymm4[1],ymm11[2,3],ymm4[4],ymm11[5],ymm4[6],ymm11[7,8],ymm4[9],ymm11[10,11],ymm4[12],ymm11[13],ymm4[14],ymm11[15]
9467 ; AVX512DQ-FCP-NEXT: vpermd %ymm4, %ymm20, %ymm4
9468 ; AVX512DQ-FCP-NEXT: vpshufb %ymm3, %ymm4, %ymm3
9469 ; AVX512DQ-FCP-NEXT: vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
9470 ; AVX512DQ-FCP-NEXT: vpshufb %xmm2, %xmm6, %xmm2
9471 ; AVX512DQ-FCP-NEXT: vmovdqa64 {{[-0-9]+}}(%r{{[sb]}}p), %xmm27 # 16-byte Reload
9472 ; AVX512DQ-FCP-NEXT: vpsrlq $48, %xmm27, %xmm4
9473 ; AVX512DQ-FCP-NEXT: vpunpckldq {{.*#+}} xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1]
9474 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm3[3,4,5,6,7]
9475 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
9476 ; AVX512DQ-FCP-NEXT: vmovdqu (%rsp), %ymm8 # 32-byte Reload
9477 ; AVX512DQ-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
9478 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0],ymm8[1,2],ymm4[3],ymm8[4],ymm4[5],ymm8[6,7],ymm4[8],ymm8[9,10],ymm4[11],ymm8[12],ymm4[13],ymm8[14,15]
9479 ; AVX512DQ-FCP-NEXT: vpermd %ymm3, %ymm24, %ymm3
9480 ; AVX512DQ-FCP-NEXT: vpshufb %ymm1, %ymm3, %ymm1
9481 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3,4],ymm1[5,6,7]
9482 ; AVX512DQ-FCP-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm17 # 32-byte Reload
9483 ; AVX512DQ-FCP-NEXT: vpermd %ymm17, %ymm25, %ymm2
9484 ; AVX512DQ-FCP-NEXT: vpshufb %ymm14, %ymm2, %ymm2
9485 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm2 = zmm2 ^ (zmm10 & (zmm2 ^ zmm0))
9486 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm0
9487 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9488 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm9[0],ymm12[1],ymm9[2,3],ymm12[4],ymm9[5],ymm12[6],ymm9[7,8],ymm12[9],ymm9[10,11],ymm12[12],ymm9[13],ymm12[14],ymm9[15]
9489 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm0, %xmm1
9490 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2],xmm0[3,4],xmm1[5,6,7]
9491 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm0 = xmm15[u,u,u,u,u,u,u,u,4,5,14,15,u,u,u,u]
9492 ; AVX512DQ-FCP-NEXT: vmovdqa64 %xmm15, %xmm20
9493 ; AVX512DQ-FCP-NEXT: vpunpckhdq {{.*#+}} xmm0 = xmm0[2],xmm31[2],xmm0[3],xmm31[3]
9494 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm24 = [0,3,5,2,5,7,0,0]
9495 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm13[0],ymm5[1],ymm13[2],ymm5[3],ymm13[4,5],ymm5[6],ymm13[7,8],ymm5[9],ymm13[10],ymm5[11],ymm13[12,13],ymm5[14],ymm13[15]
9496 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm5, %ymm30
9497 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm13, %ymm26
9498 ; AVX512DQ-FCP-NEXT: vpermd %ymm2, %ymm24, %ymm3
9499 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm12 = [0,1,6,7,2,3,2,3,4,5,10,11,0,1,14,15,16,17,22,23,18,19,18,19,20,21,26,27,16,17,30,31]
9500 ; AVX512DQ-FCP-NEXT: vpshufb %ymm12, %ymm3, %ymm3
9501 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm3[3,4,5,6,7]
9502 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm3 = ymm0[0,1,2,3],ymm3[4,5,6,7]
9503 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm2 = [0,1,2,3,8,9,2,3,12,13,6,7,0,1,10,11]
9504 ; AVX512DQ-FCP-NEXT: vpshufb %xmm2, %xmm1, %xmm1
9505 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm1
9506 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm10 = ymm3[0,1,2,3,4],ymm1[5,6,7]
9507 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm21, %ymm0
9508 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm29, %ymm1
9509 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm0[2],ymm1[3],ymm0[4],ymm1[5,6],ymm0[7],ymm1[8,9],ymm0[10],ymm1[11],ymm0[12],ymm1[13,14],ymm0[15]
9510 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm1, %xmm3
9511 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm5 = xmm1[0,1,2],xmm3[3,4],xmm1[5,6,7]
9512 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm25 = [0,2,0,0,5,7,2,4]
9513 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm19, %ymm0
9514 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm22, %ymm1
9515 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5],ymm1[6],ymm0[7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13],ymm1[14],ymm0[15]
9516 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm22, %ymm29
9517 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm19, %ymm21
9518 ; AVX512DQ-FCP-NEXT: vpermd %ymm1, %ymm25, %ymm3
9519 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm1 = [0,1,6,7,8,9,14,15,4,5,6,7,0,1,6,7,16,17,22,23,24,25,30,31,20,21,22,23,16,17,22,23]
9520 ; AVX512DQ-FCP-NEXT: vpshufb %ymm1, %ymm3, %ymm0
9521 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm3 = [4,5,14,15,8,9,2,3,12,13,6,7,12,13,14,15]
9522 ; AVX512DQ-FCP-NEXT: vpshufb %xmm3, %xmm5, %xmm5
9523 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm5[0,1,2],ymm0[3,4,5,6,7]
9524 ; AVX512DQ-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm5 = [1,4,6,0,1,4,6,0]
9525 ; AVX512DQ-FCP-NEXT: # ymm5 = mem[0,1,0,1]
9526 ; AVX512DQ-FCP-NEXT: vpermd %ymm23, %ymm5, %ymm14
9527 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm9 = [4,5,2,3,4,5,6,7,8,9,2,3,4,5,10,11,20,21,18,19,20,21,22,23,24,25,18,19,20,21,26,27]
9528 ; AVX512DQ-FCP-NEXT: vpshufb %ymm9, %ymm14, %ymm14
9529 ; AVX512DQ-FCP-NEXT: vmovdqa64 {{.*#+}} zmm15 = [65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,65535,0,0,0,0,0,0,0,0,0,0,0,0,0,65535,65535,65535,65535,65535,65535]
9530 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm14 = zmm14 ^ (zmm15 & (zmm14 ^ zmm0))
9531 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm10, %zmm14, %zmm0
9532 ; AVX512DQ-FCP-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
9533 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm0 = ymm8[0],ymm4[1],ymm8[2,3],ymm4[4],ymm8[5],ymm4[6],ymm8[7,8],ymm4[9],ymm8[10,11],ymm4[12],ymm8[13],ymm4[14],ymm8[15]
9534 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm0, %xmm10
9535 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm10[0,1,2],xmm0[3,4],xmm10[5,6,7]
9536 ; AVX512DQ-FCP-NEXT: vpshufb %xmm2, %xmm0, %xmm0
9537 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm16, %ymm8
9538 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm8[0],ymm11[1],ymm8[2],ymm11[3],ymm8[4,5],ymm11[6],ymm8[7,8],ymm11[9],ymm8[10],ymm11[11],ymm8[12,13],ymm11[14],ymm8[15]
9539 ; AVX512DQ-FCP-NEXT: vmovdqa %ymm11, %ymm13
9540 ; AVX512DQ-FCP-NEXT: vpermd %ymm2, %ymm24, %ymm2
9541 ; AVX512DQ-FCP-NEXT: vpshufb %ymm12, %ymm2, %ymm2
9542 ; AVX512DQ-FCP-NEXT: vpshufb {{.*#+}} xmm4 = xmm6[u,u,u,u,u,u,u,u,4,5,14,15,u,u,u,u]
9543 ; AVX512DQ-FCP-NEXT: vmovdqa %xmm6, %xmm14
9544 ; AVX512DQ-FCP-NEXT: vpunpckhdq {{.*#+}} xmm4 = xmm4[2],xmm27[2],xmm4[3],xmm27[3]
9545 ; AVX512DQ-FCP-NEXT: vmovdqa64 %xmm27, %xmm6
9546 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm4 = xmm4[0,1,2],xmm2[3,4,5,6,7]
9547 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm4[0,1,2,3],ymm2[4,5,6,7]
9548 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm0, %ymm0, %ymm0
9549 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4],ymm0[5,6,7]
9550 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm18, %ymm11
9551 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm28, %ymm12
9552 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm12[0,1],ymm11[2],ymm12[3],ymm11[4],ymm12[5,6],ymm11[7],ymm12[8,9],ymm11[10],ymm12[11],ymm11[12],ymm12[13,14],ymm11[15]
9553 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm2, %xmm4
9554 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm4[3,4],xmm2[5,6,7]
9555 ; AVX512DQ-FCP-NEXT: vpshufb %xmm3, %xmm2, %xmm2
9556 ; AVX512DQ-FCP-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7, %ymm3 # 32-byte Folded Reload
9557 ; AVX512DQ-FCP-NEXT: # ymm3 = ymm7[0],mem[1],ymm7[2,3],mem[4],ymm7[5],mem[6],ymm7[7,8],mem[9],ymm7[10,11],mem[12],ymm7[13],mem[14],ymm7[15]
9558 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm7, %ymm16
9559 ; AVX512DQ-FCP-NEXT: vpermd %ymm3, %ymm25, %ymm3
9560 ; AVX512DQ-FCP-NEXT: vpshufb %ymm1, %ymm3, %ymm1
9561 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3,4,5,6,7]
9562 ; AVX512DQ-FCP-NEXT: vpermd %ymm17, %ymm5, %ymm2
9563 ; AVX512DQ-FCP-NEXT: vpshufb %ymm9, %ymm2, %ymm2
9564 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm2 = zmm2 ^ (zmm15 & (zmm2 ^ zmm1))
9565 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm28
9566 ; AVX512DQ-FCP-NEXT: vmovdqa64 %xmm31, %xmm15
9567 ; AVX512DQ-FCP-NEXT: vmovdqa64 %xmm20, %xmm7
9568 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} xmm1 = xmm15[0],xmm7[1],xmm15[2,3]
9569 ; AVX512DQ-FCP-NEXT: vmovq {{.*#+}} xmm0 = [6,7,0,1,10,11,0,0,0,0,0,0,0,0,0,0]
9570 ; AVX512DQ-FCP-NEXT: vpshufb %xmm0, %xmm1, %xmm2
9571 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm18 = [1,3,6,0,5,0,0,0]
9572 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm30, %ymm9
9573 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm26, %ymm10
9574 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm10[0,1],ymm9[2],ymm10[3],ymm9[4],ymm10[5,6],ymm9[7],ymm10[8,9],ymm9[10],ymm10[11],ymm9[12],ymm10[13,14],ymm9[15]
9575 ; AVX512DQ-FCP-NEXT: vpermd %ymm1, %ymm18, %ymm4
9576 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm1 = [2,3,2,3,4,5,0,1,6,7,8,9,14,15,4,5,18,19,18,19,20,21,16,17,22,23,24,25,30,31,20,21]
9577 ; AVX512DQ-FCP-NEXT: vpshufb %ymm1, %ymm4, %ymm4
9578 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm4[3,4,5,6,7]
9579 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
9580 ; AVX512DQ-FCP-NEXT: vbroadcasti128 {{.*#+}} ymm4 = [2,4,7,0,2,4,7,0]
9581 ; AVX512DQ-FCP-NEXT: # ymm4 = mem[0,1,0,1]
9582 ; AVX512DQ-FCP-NEXT: vpermd %ymm23, %ymm4, %ymm5
9583 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,6,7,4,5,6,7,8,9,0,1,6,7,8,9,16,17,22,23,20,21,22,23,24,25,16,17,22,23,24,25]
9584 ; AVX512DQ-FCP-NEXT: vpshufb %ymm3, %ymm5, %ymm5
9585 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm2, %zmm5, %zmm25
9586 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} xmm2 = xmm6[0],xmm14[1],xmm6[2,3]
9587 ; AVX512DQ-FCP-NEXT: vmovdqa64 %xmm27, %xmm22
9588 ; AVX512DQ-FCP-NEXT: vmovdqa64 %xmm14, %xmm31
9589 ; AVX512DQ-FCP-NEXT: vpshufb %xmm0, %xmm2, %xmm0
9590 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm8[0,1],ymm13[2],ymm8[3],ymm13[4],ymm8[5,6],ymm13[7],ymm8[8,9],ymm13[10],ymm8[11],ymm13[12],ymm8[13,14],ymm13[15]
9591 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm13, %ymm20
9592 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm8, %ymm30
9593 ; AVX512DQ-FCP-NEXT: vpermd %ymm2, %ymm18, %ymm2
9594 ; AVX512DQ-FCP-NEXT: vpshufb %ymm1, %ymm2, %ymm1
9595 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm1[3,4,5,6,7]
9596 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
9597 ; AVX512DQ-FCP-NEXT: vpermd %ymm17, %ymm4, %ymm1
9598 ; AVX512DQ-FCP-NEXT: vpshufb %ymm3, %ymm1, %ymm1
9599 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm24
9600 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} xmm0 = xmm15[0,1],xmm7[2],xmm15[3]
9601 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm1 = ymm9[0],ymm10[1,2],ymm9[3],ymm10[4],ymm9[5],ymm10[6,7],ymm9[8],ymm10[9,10],ymm9[11],ymm10[12],ymm9[13],ymm10[14,15]
9602 ; AVX512DQ-FCP-NEXT: vmovq {{.*#+}} xmm2 = [8,9,2,3,12,13,0,0,0,0,0,0,0,0,0,0]
9603 ; AVX512DQ-FCP-NEXT: vpshufb %xmm2, %xmm0, %xmm3
9604 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm27 = [1,4,6,3,6,0,0,0]
9605 ; AVX512DQ-FCP-NEXT: vpermd %ymm1, %ymm27, %ymm1
9606 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm0 = [0,1,2,3,4,5,2,3,4,5,10,11,0,1,14,15,16,17,18,19,20,21,18,19,20,21,26,27,16,17,30,31]
9607 ; AVX512DQ-FCP-NEXT: vpshufb %ymm0, %ymm1, %ymm1
9608 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2],xmm1[3,4,5,6,7]
9609 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3],ymm1[4,5,6,7]
9610 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm26 = [0,2,1,3,0,2,5,7]
9611 ; AVX512DQ-FCP-NEXT: vpermd %ymm23, %ymm26, %ymm3
9612 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,6,7,4,5,6,7,0,1,6,7,8,9,14,15,16,17,22,23,20,21,22,23,16,17,22,23,24,25,30,31]
9613 ; AVX512DQ-FCP-NEXT: vpshufb %ymm4, %ymm3, %ymm3
9614 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm4, %ymm19
9615 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm1, %zmm3, %zmm1
9616 ; AVX512DQ-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
9617 ; AVX512DQ-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
9618 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0],ymm14[1,2],ymm4[3],ymm14[4],ymm4[5],ymm14[6,7],ymm4[8],ymm14[9,10],ymm4[11],ymm14[12],ymm4[13],ymm14[14,15]
9619 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm4, %ymm18
9620 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm3, %xmm4
9621 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} xmm3 = xmm4[0],xmm3[1],xmm4[2],xmm3[3]
9622 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm4 = [0,3,0,0,5,0,2,7]
9623 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm21, %ymm15
9624 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm29, %ymm10
9625 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm5 = ymm10[0],ymm15[1],ymm10[2],ymm15[3],ymm10[4,5],ymm15[6],ymm10[7,8],ymm15[9],ymm10[10],ymm15[11],ymm10[12,13],ymm15[14],ymm10[15]
9626 ; AVX512DQ-FCP-NEXT: vpermd %ymm5, %ymm4, %ymm5
9627 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm6 = [2,3,4,5,10,11,0,1,14,15,14,15,2,3,4,5,18,19,20,21,26,27,16,17,30,31,30,31,18,19,20,21]
9628 ; AVX512DQ-FCP-NEXT: vpshufb %ymm6, %ymm5, %ymm5
9629 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm7 = [6,7,0,1,10,11,4,5,14,15,8,9,12,13,14,15]
9630 ; AVX512DQ-FCP-NEXT: vpshufb %xmm7, %xmm3, %xmm3
9631 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2],ymm5[3,4,5,6,7]
9632 ; AVX512DQ-FCP-NEXT: vpmovsxdq {{.*#+}} zmm5 = [18446744073709551615,18446744073709551615,18446744073709551615,65535,0,0,18446744073709486080,18446744073709551615]
9633 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm25 = zmm25 ^ (zmm5 & (zmm25 ^ zmm3))
9634 ; AVX512DQ-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
9635 ; AVX512DQ-FCP-NEXT: vpblendw $82, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3, %ymm3 # 32-byte Folded Reload
9636 ; AVX512DQ-FCP-NEXT: # ymm3 = ymm3[0],mem[1],ymm3[2,3],mem[4],ymm3[5],mem[6],ymm3[7,8],mem[9],ymm3[10,11],mem[12],ymm3[13],mem[14],ymm3[15]
9637 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm3, %xmm8
9638 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm8 = xmm3[0],xmm8[1,2,3],xmm3[4,5],xmm8[6,7]
9639 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,0,1,10,11,4,5,14,15,8,9,2,3,12,13]
9640 ; AVX512DQ-FCP-NEXT: vpshufb %xmm3, %xmm8, %xmm8
9641 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm8, %ymm0, %ymm8
9642 ; AVX512DQ-FCP-NEXT: vextracti64x4 $1, %zmm25, %ymm9
9643 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm9[0],ymm8[1,2,3,4,5,6,7],ymm9[8],ymm8[9,10,11,12,13,14,15]
9644 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm8 = ymm9[0,1,2,3],ymm8[4,5,6,7]
9645 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm8, %zmm25, %zmm21
9646 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm11[0],ymm12[1,2],ymm11[3],ymm12[4],ymm11[5],ymm12[6,7],ymm11[8],ymm12[9,10],ymm11[11],ymm12[12],ymm11[13],ymm12[14,15]
9647 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm12, %ymm23
9648 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm11, %ymm25
9649 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm8, %xmm9
9650 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} xmm8 = xmm9[0],xmm8[1],xmm9[2],xmm8[3]
9651 ; AVX512DQ-FCP-NEXT: vpshufb %xmm7, %xmm8, %xmm7
9652 ; AVX512DQ-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
9653 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm16, %ymm12
9654 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm8 = ymm13[0],ymm12[1],ymm13[2],ymm12[3],ymm13[4,5],ymm12[6],ymm13[7,8],ymm12[9],ymm13[10],ymm12[11],ymm13[12,13],ymm12[14],ymm13[15]
9655 ; AVX512DQ-FCP-NEXT: vpermd %ymm8, %ymm4, %ymm4
9656 ; AVX512DQ-FCP-NEXT: vpshufb %ymm6, %ymm4, %ymm4
9657 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm4 = ymm7[0,1,2],ymm4[3,4,5,6,7]
9658 ; AVX512DQ-FCP-NEXT: vpternlogq {{.*#+}} zmm24 = zmm24 ^ (zmm5 & (zmm24 ^ zmm4))
9659 ; AVX512DQ-FCP-NEXT: vmovdqa64 %xmm22, %xmm4
9660 ; AVX512DQ-FCP-NEXT: vmovdqa64 %xmm31, %xmm5
9661 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} xmm4 = xmm4[0,1],xmm5[2],xmm4[3]
9662 ; AVX512DQ-FCP-NEXT: vpshufb %xmm2, %xmm4, %xmm2
9663 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm20, %ymm4
9664 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm30, %ymm5
9665 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm4 = ymm4[0],ymm5[1,2],ymm4[3],ymm5[4],ymm4[5],ymm5[6,7],ymm4[8],ymm5[9,10],ymm4[11],ymm5[12],ymm4[13],ymm5[14,15]
9666 ; AVX512DQ-FCP-NEXT: vpermd %ymm4, %ymm27, %ymm4
9667 ; AVX512DQ-FCP-NEXT: vpshufb %ymm0, %ymm4, %ymm0
9668 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm0[3,4,5,6,7]
9669 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
9670 ; AVX512DQ-FCP-NEXT: vpermd %ymm17, %ymm26, %ymm2
9671 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm19, %ymm4
9672 ; AVX512DQ-FCP-NEXT: vpshufb %ymm4, %ymm2, %ymm2
9673 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0
9674 ; AVX512DQ-FCP-NEXT: vmovdqu (%rsp), %ymm9 # 32-byte Reload
9675 ; AVX512DQ-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Reload
9676 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm11[0],ymm9[1],ymm11[2,3],ymm9[4],ymm11[5],ymm9[6],ymm11[7,8],ymm9[9],ymm11[10,11],ymm9[12],ymm11[13],ymm9[14],ymm11[15]
9677 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm2, %xmm4
9678 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0],xmm4[1,2,3],xmm2[4,5],xmm4[6,7]
9679 ; AVX512DQ-FCP-NEXT: vpshufb %xmm3, %xmm2, %xmm2
9680 ; AVX512DQ-FCP-NEXT: vextracti64x4 $1, %zmm24, %ymm3
9681 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
9682 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm2 = ymm3[0],ymm2[1,2,3,4,5,6,7],ymm3[8],ymm2[9,10,11,12,13,14,15]
9683 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7]
9684 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm2, %zmm24, %zmm2
9685 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm10[0,1],ymm15[2],ymm10[3],ymm15[4],ymm10[5,6],ymm15[7],ymm10[8,9],ymm15[10],ymm10[11],ymm15[12],ymm10[13,14],ymm15[15]
9686 ; AVX512DQ-FCP-NEXT: vpmovsxbd {{.*#+}} ymm4 = [1,3,0,0,6,0,3,5]
9687 ; AVX512DQ-FCP-NEXT: vpermd %ymm3, %ymm4, %ymm3
9688 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} ymm5 = [0,1,6,7,8,9,14,15,0,1,6,7,0,1,6,7,16,17,22,23,24,25,30,31,16,17,22,23,16,17,22,23]
9689 ; AVX512DQ-FCP-NEXT: vpshufb %ymm5, %ymm3, %ymm3
9690 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm18, %ymm6
9691 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm6 = ymm14[0],ymm6[1],ymm14[2,3],ymm6[4],ymm14[5],ymm6[6],ymm14[7,8],ymm6[9],ymm14[10,11],ymm6[12],ymm14[13],ymm6[14],ymm14[15]
9692 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm6, %xmm7
9693 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm6 = xmm7[0,1,2],xmm6[3,4],xmm7[5,6,7]
9694 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm7 = [8,9,2,3,12,13,6,7,0,1,10,11,0,1,6,7]
9695 ; AVX512DQ-FCP-NEXT: vpshufb %xmm7, %xmm6, %xmm6
9696 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm3 = ymm6[0,1,2],ymm3[3,4,5,6,7]
9697 ; AVX512DQ-FCP-NEXT: movb $7, %al
9698 ; AVX512DQ-FCP-NEXT: kmovw %eax, %k1
9699 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm1 {%k1}
9700 ; AVX512DQ-FCP-NEXT: vextracti64x4 $1, %zmm1, %ymm3
9701 ; AVX512DQ-FCP-NEXT: vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
9702 ; AVX512DQ-FCP-NEXT: vpblendw $181, {{[-0-9]+}}(%r{{[sb]}}p), %ymm6, %ymm6 # 32-byte Folded Reload
9703 ; AVX512DQ-FCP-NEXT: # ymm6 = mem[0],ymm6[1],mem[2],ymm6[3],mem[4,5],ymm6[6],mem[7,8],ymm6[9],mem[10],ymm6[11],mem[12,13],ymm6[14],mem[15]
9704 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm6, %xmm8
9705 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm6 = xmm6[0,1],xmm8[2,3],xmm6[4,5,6],xmm8[7]
9706 ; AVX512DQ-FCP-NEXT: vmovdqa {{.*#+}} xmm8 = [6,7,2,3,12,13,6,7,0,1,10,11,4,5,14,15]
9707 ; AVX512DQ-FCP-NEXT: vpshufb %xmm8, %xmm6, %xmm6
9708 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm6
9709 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm6 = ymm3[0],ymm6[1,2,3,4,5,6,7],ymm3[8],ymm6[9,10,11,12,13,14,15]
9710 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3],ymm6[4,5,6,7]
9711 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm3, %zmm1, %zmm1
9712 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm23, %ymm3
9713 ; AVX512DQ-FCP-NEXT: vmovdqa64 %ymm25, %ymm6
9714 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm3[0],ymm6[1],ymm3[2,3],ymm6[4],ymm3[5],ymm6[6],ymm3[7,8],ymm6[9],ymm3[10,11],ymm6[12],ymm3[13],ymm6[14],ymm3[15]
9715 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm3, %xmm6
9716 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm3 = xmm6[0,1,2],xmm3[3,4],xmm6[5,6,7]
9717 ; AVX512DQ-FCP-NEXT: vpshufb %xmm7, %xmm3, %xmm3
9718 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm6 = ymm13[0,1],ymm12[2],ymm13[3],ymm12[4],ymm13[5,6],ymm12[7],ymm13[8,9],ymm12[10],ymm13[11],ymm12[12],ymm13[13,14],ymm12[15]
9719 ; AVX512DQ-FCP-NEXT: vpermd %ymm6, %ymm4, %ymm4
9720 ; AVX512DQ-FCP-NEXT: vpshufb %ymm5, %ymm4, %ymm4
9721 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm3 = ymm3[0,1,2],ymm4[3,4,5,6,7]
9722 ; AVX512DQ-FCP-NEXT: vinserti64x4 $0, %ymm3, %zmm0, %zmm0 {%k1}
9723 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm9[0],ymm11[1],ymm9[2],ymm11[3],ymm9[4,5],ymm11[6],ymm9[7,8],ymm11[9],ymm9[10],ymm11[11],ymm9[12,13],ymm11[14],ymm9[15]
9724 ; AVX512DQ-FCP-NEXT: vextracti128 $1, %ymm3, %xmm4
9725 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm4[2,3],xmm3[4,5,6],xmm4[7]
9726 ; AVX512DQ-FCP-NEXT: vpshufb %xmm8, %xmm3, %xmm3
9727 ; AVX512DQ-FCP-NEXT: vextracti64x4 $1, %zmm0, %ymm4
9728 ; AVX512DQ-FCP-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm3
9729 ; AVX512DQ-FCP-NEXT: vpblendw {{.*#+}} ymm3 = ymm4[0],ymm3[1,2,3,4,5,6,7],ymm4[8],ymm3[9,10,11,12,13,14,15]
9730 ; AVX512DQ-FCP-NEXT: vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
9731 ; AVX512DQ-FCP-NEXT: vinserti64x4 $1, %ymm3, %zmm0, %zmm0
9732 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9733 ; AVX512DQ-FCP-NEXT: vmovaps %zmm3, (%rsi)
9734 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9735 ; AVX512DQ-FCP-NEXT: vmovaps %zmm3, 64(%rsi)
9736 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9737 ; AVX512DQ-FCP-NEXT: vmovaps %zmm3, 64(%rdx)
9738 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9739 ; AVX512DQ-FCP-NEXT: vmovaps %zmm3, (%rdx)
9740 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm28, 64(%rcx)
9741 ; AVX512DQ-FCP-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
9742 ; AVX512DQ-FCP-NEXT: vmovaps %zmm3, (%rcx)
9743 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm2, 64(%r8)
9744 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm21, (%r8)
9745 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm0, 64(%r9)
9746 ; AVX512DQ-FCP-NEXT: vmovdqa64 %zmm1, (%r9)
9747 ; AVX512DQ-FCP-NEXT: addq $552, %rsp # imm = 0x228
9748 ; AVX512DQ-FCP-NEXT: vzeroupper
9749 ; AVX512DQ-FCP-NEXT: retq
9751 ; AVX512BW-LABEL: load_i16_stride5_vf64:
9752 ; AVX512BW: # %bb.0:
9753 ; AVX512BW-NEXT: vmovdqa64 576(%rdi), %zmm2
9754 ; AVX512BW-NEXT: vmovdqa64 384(%rdi), %zmm4
9755 ; AVX512BW-NEXT: vmovdqa64 320(%rdi), %zmm3
9756 ; AVX512BW-NEXT: vmovdqa64 512(%rdi), %zmm5
9757 ; AVX512BW-NEXT: vmovdqa64 448(%rdi), %zmm0
9758 ; AVX512BW-NEXT: vmovdqa64 256(%rdi), %zmm7
9759 ; AVX512BW-NEXT: vmovdqa64 (%rdi), %zmm9
9760 ; AVX512BW-NEXT: vmovdqa64 64(%rdi), %zmm11
9761 ; AVX512BW-NEXT: vmovdqa64 128(%rdi), %zmm1
9762 ; AVX512BW-NEXT: vmovdqa64 192(%rdi), %zmm10
9763 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [16,21,26,31,36,41,46,51,56,61,0,0,0,1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0,1,6,11]
9764 ; AVX512BW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
9765 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm13
9766 ; AVX512BW-NEXT: vpermt2w %zmm10, %zmm12, %zmm13
9767 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm6 = [0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0]
9768 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm8
9769 ; AVX512BW-NEXT: vpermt2w %zmm11, %zmm6, %zmm8
9770 ; AVX512BW-NEXT: movl $67100672, %eax # imm = 0x3FFE000
9771 ; AVX512BW-NEXT: kmovd %eax, %k1
9772 ; AVX512BW-NEXT: vmovdqu16 %zmm13, %zmm8 {%k1}
9773 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} zmm13 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,34,39,44,49,54,59]
9774 ; AVX512BW-NEXT: vpermt2w %zmm7, %zmm13, %zmm8
9775 ; AVX512BW-NEXT: vpermi2w %zmm5, %zmm0, %zmm12
9776 ; AVX512BW-NEXT: vpermi2w %zmm4, %zmm3, %zmm6
9777 ; AVX512BW-NEXT: vmovdqu16 %zmm12, %zmm6 {%k1}
9778 ; AVX512BW-NEXT: vpermt2w %zmm2, %zmm13, %zmm6
9779 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [49,54,59,0,5,10,15,20,25,30,0,0,0,34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0,34,39,44]
9780 ; AVX512BW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
9781 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm15
9782 ; AVX512BW-NEXT: vpermt2w %zmm1, %zmm14, %zmm15
9783 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm12 = [1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0]
9784 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm13
9785 ; AVX512BW-NEXT: vpermt2w %zmm11, %zmm12, %zmm13
9786 ; AVX512BW-NEXT: vmovdqu16 %zmm15, %zmm13 {%k1}
9787 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} zmm15 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,35,40,45,50,55,60]
9788 ; AVX512BW-NEXT: vpermt2w %zmm7, %zmm15, %zmm13
9789 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm5, %zmm14
9790 ; AVX512BW-NEXT: vpermi2w %zmm4, %zmm3, %zmm12
9791 ; AVX512BW-NEXT: vmovdqu16 %zmm14, %zmm12 {%k1}
9792 ; AVX512BW-NEXT: vpermt2w %zmm2, %zmm15, %zmm12
9793 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [50,55,60,1,6,11,16,21,26,31,0,0,0,35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0,35,40,45]
9794 ; AVX512BW-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
9795 ; AVX512BW-NEXT: vmovdqa64 %zmm10, %zmm17
9796 ; AVX512BW-NEXT: vpermt2w %zmm1, %zmm16, %zmm17
9797 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm14 = [34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0]
9798 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm15
9799 ; AVX512BW-NEXT: vpermt2w %zmm9, %zmm14, %zmm15
9800 ; AVX512BW-NEXT: vmovdqu16 %zmm17, %zmm15 {%k1}
9801 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} zmm17 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,36,41,46,51,56,61]
9802 ; AVX512BW-NEXT: vpermt2w %zmm7, %zmm17, %zmm15
9803 ; AVX512BW-NEXT: vpermi2w %zmm0, %zmm5, %zmm16
9804 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm4, %zmm14
9805 ; AVX512BW-NEXT: vmovdqu16 %zmm16, %zmm14 {%k1}
9806 ; AVX512BW-NEXT: vpermt2w %zmm2, %zmm17, %zmm14
9807 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [19,24,29,34,39,44,49,54,59,0,0,0,0,4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0,4,9,14]
9808 ; AVX512BW-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
9809 ; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm17
9810 ; AVX512BW-NEXT: vpermt2w %zmm10, %zmm16, %zmm17
9811 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm18 = [35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0]
9812 ; AVX512BW-NEXT: vmovdqa64 %zmm11, %zmm19
9813 ; AVX512BW-NEXT: vpermt2w %zmm9, %zmm18, %zmm19
9814 ; AVX512BW-NEXT: movl $33546240, %eax # imm = 0x1FFE000
9815 ; AVX512BW-NEXT: kmovd %eax, %k1
9816 ; AVX512BW-NEXT: vmovdqu16 %zmm17, %zmm19 {%k1}
9817 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} zmm17 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,32,37,42,47,52,57,62]
9818 ; AVX512BW-NEXT: vpermt2w %zmm7, %zmm17, %zmm19
9819 ; AVX512BW-NEXT: vpermi2w %zmm5, %zmm0, %zmm16
9820 ; AVX512BW-NEXT: vpermi2w %zmm3, %zmm4, %zmm18
9821 ; AVX512BW-NEXT: vmovdqu16 %zmm16, %zmm18 {%k1}
9822 ; AVX512BW-NEXT: vpermt2w %zmm2, %zmm17, %zmm18
9823 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} ymm16 = [4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0]
9824 ; AVX512BW-NEXT: vpermt2w %zmm11, %zmm16, %zmm9
9825 ; AVX512BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [20,25,30,35,40,45,50,55,60,0,0,0,0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0,0,5,10,15]
9826 ; AVX512BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
9827 ; AVX512BW-NEXT: vpermt2w %zmm10, %zmm11, %zmm1
9828 ; AVX512BW-NEXT: movb $7, %al
9829 ; AVX512BW-NEXT: kmovd %eax, %k1
9830 ; AVX512BW-NEXT: vmovdqa64 %zmm9, %zmm1 {%k1}
9831 ; AVX512BW-NEXT: vpmovsxbw {{.*#+}} zmm9 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,33,38,43,48,53,58,63]
9832 ; AVX512BW-NEXT: vpermt2w %zmm7, %zmm9, %zmm1
9833 ; AVX512BW-NEXT: vpermt2w %zmm5, %zmm11, %zmm0
9834 ; AVX512BW-NEXT: vpermt2w %zmm4, %zmm16, %zmm3
9835 ; AVX512BW-NEXT: vmovdqa64 %zmm3, %zmm0 {%k1}
9836 ; AVX512BW-NEXT: vpermt2w %zmm2, %zmm9, %zmm0
9837 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 64(%rsi)
9838 ; AVX512BW-NEXT: vmovdqa64 %zmm8, (%rsi)
9839 ; AVX512BW-NEXT: vmovdqa64 %zmm12, 64(%rdx)
9840 ; AVX512BW-NEXT: vmovdqa64 %zmm13, (%rdx)
9841 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 64(%rcx)
9842 ; AVX512BW-NEXT: vmovdqa64 %zmm15, (%rcx)
9843 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 64(%r8)
9844 ; AVX512BW-NEXT: vmovdqa64 %zmm19, (%r8)
9845 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 64(%r9)
9846 ; AVX512BW-NEXT: vmovdqa64 %zmm1, (%r9)
9847 ; AVX512BW-NEXT: vzeroupper
9848 ; AVX512BW-NEXT: retq
9850 ; AVX512BW-FCP-LABEL: load_i16_stride5_vf64:
9851 ; AVX512BW-FCP: # %bb.0:
9852 ; AVX512BW-FCP-NEXT: vmovdqa64 576(%rdi), %zmm2
9853 ; AVX512BW-FCP-NEXT: vmovdqa64 384(%rdi), %zmm4
9854 ; AVX512BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm3
9855 ; AVX512BW-FCP-NEXT: vmovdqa64 512(%rdi), %zmm5
9856 ; AVX512BW-FCP-NEXT: vmovdqa64 448(%rdi), %zmm0
9857 ; AVX512BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm7
9858 ; AVX512BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm9
9859 ; AVX512BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm11
9860 ; AVX512BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm1
9861 ; AVX512BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm10
9862 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [16,21,26,31,36,41,46,51,56,61,0,0,0,1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0,1,6,11]
9863 ; AVX512BW-FCP-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
9864 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm13
9865 ; AVX512BW-FCP-NEXT: vpermt2w %zmm10, %zmm12, %zmm13
9866 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm6 = [0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0]
9867 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm8
9868 ; AVX512BW-FCP-NEXT: vpermt2w %zmm11, %zmm6, %zmm8
9869 ; AVX512BW-FCP-NEXT: movl $67100672, %eax # imm = 0x3FFE000
9870 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
9871 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm13, %zmm8 {%k1}
9872 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm13 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,34,39,44,49,54,59]
9873 ; AVX512BW-FCP-NEXT: vpermt2w %zmm7, %zmm13, %zmm8
9874 ; AVX512BW-FCP-NEXT: vpermi2w %zmm5, %zmm0, %zmm12
9875 ; AVX512BW-FCP-NEXT: vpermi2w %zmm4, %zmm3, %zmm6
9876 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm12, %zmm6 {%k1}
9877 ; AVX512BW-FCP-NEXT: vpermt2w %zmm2, %zmm13, %zmm6
9878 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [49,54,59,0,5,10,15,20,25,30,0,0,0,34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0,34,39,44]
9879 ; AVX512BW-FCP-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
9880 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm15
9881 ; AVX512BW-FCP-NEXT: vpermt2w %zmm1, %zmm14, %zmm15
9882 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm12 = [1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0]
9883 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm13
9884 ; AVX512BW-FCP-NEXT: vpermt2w %zmm11, %zmm12, %zmm13
9885 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm15, %zmm13 {%k1}
9886 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm15 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,35,40,45,50,55,60]
9887 ; AVX512BW-FCP-NEXT: vpermt2w %zmm7, %zmm15, %zmm13
9888 ; AVX512BW-FCP-NEXT: vpermi2w %zmm0, %zmm5, %zmm14
9889 ; AVX512BW-FCP-NEXT: vpermi2w %zmm4, %zmm3, %zmm12
9890 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm14, %zmm12 {%k1}
9891 ; AVX512BW-FCP-NEXT: vpermt2w %zmm2, %zmm15, %zmm12
9892 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [50,55,60,1,6,11,16,21,26,31,0,0,0,35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0,35,40,45]
9893 ; AVX512BW-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
9894 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm17
9895 ; AVX512BW-FCP-NEXT: vpermt2w %zmm1, %zmm16, %zmm17
9896 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm14 = [34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0]
9897 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm15
9898 ; AVX512BW-FCP-NEXT: vpermt2w %zmm9, %zmm14, %zmm15
9899 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm17, %zmm15 {%k1}
9900 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm17 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,36,41,46,51,56,61]
9901 ; AVX512BW-FCP-NEXT: vpermt2w %zmm7, %zmm17, %zmm15
9902 ; AVX512BW-FCP-NEXT: vpermi2w %zmm0, %zmm5, %zmm16
9903 ; AVX512BW-FCP-NEXT: vpermi2w %zmm3, %zmm4, %zmm14
9904 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm16, %zmm14 {%k1}
9905 ; AVX512BW-FCP-NEXT: vpermt2w %zmm2, %zmm17, %zmm14
9906 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [19,24,29,34,39,44,49,54,59,0,0,0,0,4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0,4,9,14]
9907 ; AVX512BW-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
9908 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm17
9909 ; AVX512BW-FCP-NEXT: vpermt2w %zmm10, %zmm16, %zmm17
9910 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm18 = [35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0]
9911 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm19
9912 ; AVX512BW-FCP-NEXT: vpermt2w %zmm9, %zmm18, %zmm19
9913 ; AVX512BW-FCP-NEXT: movl $33546240, %eax # imm = 0x1FFE000
9914 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
9915 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm17, %zmm19 {%k1}
9916 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm17 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,32,37,42,47,52,57,62]
9917 ; AVX512BW-FCP-NEXT: vpermt2w %zmm7, %zmm17, %zmm19
9918 ; AVX512BW-FCP-NEXT: vpermi2w %zmm5, %zmm0, %zmm16
9919 ; AVX512BW-FCP-NEXT: vpermi2w %zmm3, %zmm4, %zmm18
9920 ; AVX512BW-FCP-NEXT: vmovdqu16 %zmm16, %zmm18 {%k1}
9921 ; AVX512BW-FCP-NEXT: vpermt2w %zmm2, %zmm17, %zmm18
9922 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm16 = [4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0]
9923 ; AVX512BW-FCP-NEXT: vpermt2w %zmm11, %zmm16, %zmm9
9924 ; AVX512BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [20,25,30,35,40,45,50,55,60,0,0,0,0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0,0,5,10,15]
9925 ; AVX512BW-FCP-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
9926 ; AVX512BW-FCP-NEXT: vpermt2w %zmm10, %zmm11, %zmm1
9927 ; AVX512BW-FCP-NEXT: movb $7, %al
9928 ; AVX512BW-FCP-NEXT: kmovd %eax, %k1
9929 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm1 {%k1}
9930 ; AVX512BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm9 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,33,38,43,48,53,58,63]
9931 ; AVX512BW-FCP-NEXT: vpermt2w %zmm7, %zmm9, %zmm1
9932 ; AVX512BW-FCP-NEXT: vpermt2w %zmm5, %zmm11, %zmm0
9933 ; AVX512BW-FCP-NEXT: vpermt2w %zmm4, %zmm16, %zmm3
9934 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm0 {%k1}
9935 ; AVX512BW-FCP-NEXT: vpermt2w %zmm2, %zmm9, %zmm0
9936 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm6, 64(%rsi)
9937 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm8, (%rsi)
9938 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm12, 64(%rdx)
9939 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm13, (%rdx)
9940 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm14, 64(%rcx)
9941 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm15, (%rcx)
9942 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm18, 64(%r8)
9943 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm19, (%r8)
9944 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm0, 64(%r9)
9945 ; AVX512BW-FCP-NEXT: vmovdqa64 %zmm1, (%r9)
9946 ; AVX512BW-FCP-NEXT: vzeroupper
9947 ; AVX512BW-FCP-NEXT: retq
9949 ; AVX512DQ-BW-LABEL: load_i16_stride5_vf64:
9950 ; AVX512DQ-BW: # %bb.0:
9951 ; AVX512DQ-BW-NEXT: vmovdqa64 576(%rdi), %zmm2
9952 ; AVX512DQ-BW-NEXT: vmovdqa64 384(%rdi), %zmm4
9953 ; AVX512DQ-BW-NEXT: vmovdqa64 320(%rdi), %zmm3
9954 ; AVX512DQ-BW-NEXT: vmovdqa64 512(%rdi), %zmm5
9955 ; AVX512DQ-BW-NEXT: vmovdqa64 448(%rdi), %zmm0
9956 ; AVX512DQ-BW-NEXT: vmovdqa64 256(%rdi), %zmm7
9957 ; AVX512DQ-BW-NEXT: vmovdqa64 (%rdi), %zmm9
9958 ; AVX512DQ-BW-NEXT: vmovdqa64 64(%rdi), %zmm11
9959 ; AVX512DQ-BW-NEXT: vmovdqa64 128(%rdi), %zmm1
9960 ; AVX512DQ-BW-NEXT: vmovdqa64 192(%rdi), %zmm10
9961 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [16,21,26,31,36,41,46,51,56,61,0,0,0,1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0,1,6,11]
9962 ; AVX512DQ-BW-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
9963 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm13
9964 ; AVX512DQ-BW-NEXT: vpermt2w %zmm10, %zmm12, %zmm13
9965 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm6 = [0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0]
9966 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm8
9967 ; AVX512DQ-BW-NEXT: vpermt2w %zmm11, %zmm6, %zmm8
9968 ; AVX512DQ-BW-NEXT: movl $67100672, %eax # imm = 0x3FFE000
9969 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
9970 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm13, %zmm8 {%k1}
9971 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} zmm13 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,34,39,44,49,54,59]
9972 ; AVX512DQ-BW-NEXT: vpermt2w %zmm7, %zmm13, %zmm8
9973 ; AVX512DQ-BW-NEXT: vpermi2w %zmm5, %zmm0, %zmm12
9974 ; AVX512DQ-BW-NEXT: vpermi2w %zmm4, %zmm3, %zmm6
9975 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm12, %zmm6 {%k1}
9976 ; AVX512DQ-BW-NEXT: vpermt2w %zmm2, %zmm13, %zmm6
9977 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [49,54,59,0,5,10,15,20,25,30,0,0,0,34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0,34,39,44]
9978 ; AVX512DQ-BW-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
9979 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm15
9980 ; AVX512DQ-BW-NEXT: vpermt2w %zmm1, %zmm14, %zmm15
9981 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm12 = [1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0]
9982 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm13
9983 ; AVX512DQ-BW-NEXT: vpermt2w %zmm11, %zmm12, %zmm13
9984 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm15, %zmm13 {%k1}
9985 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} zmm15 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,35,40,45,50,55,60]
9986 ; AVX512DQ-BW-NEXT: vpermt2w %zmm7, %zmm15, %zmm13
9987 ; AVX512DQ-BW-NEXT: vpermi2w %zmm0, %zmm5, %zmm14
9988 ; AVX512DQ-BW-NEXT: vpermi2w %zmm4, %zmm3, %zmm12
9989 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm14, %zmm12 {%k1}
9990 ; AVX512DQ-BW-NEXT: vpermt2w %zmm2, %zmm15, %zmm12
9991 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [50,55,60,1,6,11,16,21,26,31,0,0,0,35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0,35,40,45]
9992 ; AVX512DQ-BW-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
9993 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm10, %zmm17
9994 ; AVX512DQ-BW-NEXT: vpermt2w %zmm1, %zmm16, %zmm17
9995 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm14 = [34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0]
9996 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, %zmm15
9997 ; AVX512DQ-BW-NEXT: vpermt2w %zmm9, %zmm14, %zmm15
9998 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm17, %zmm15 {%k1}
9999 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} zmm17 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,36,41,46,51,56,61]
10000 ; AVX512DQ-BW-NEXT: vpermt2w %zmm7, %zmm17, %zmm15
10001 ; AVX512DQ-BW-NEXT: vpermi2w %zmm0, %zmm5, %zmm16
10002 ; AVX512DQ-BW-NEXT: vpermi2w %zmm3, %zmm4, %zmm14
10003 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm16, %zmm14 {%k1}
10004 ; AVX512DQ-BW-NEXT: vpermt2w %zmm2, %zmm17, %zmm14
10005 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [19,24,29,34,39,44,49,54,59,0,0,0,0,4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0,4,9,14]
10006 ; AVX512DQ-BW-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
10007 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, %zmm17
10008 ; AVX512DQ-BW-NEXT: vpermt2w %zmm10, %zmm16, %zmm17
10009 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm18 = [35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0]
10010 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm11, %zmm19
10011 ; AVX512DQ-BW-NEXT: vpermt2w %zmm9, %zmm18, %zmm19
10012 ; AVX512DQ-BW-NEXT: movl $33546240, %eax # imm = 0x1FFE000
10013 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
10014 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm17, %zmm19 {%k1}
10015 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} zmm17 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,32,37,42,47,52,57,62]
10016 ; AVX512DQ-BW-NEXT: vpermt2w %zmm7, %zmm17, %zmm19
10017 ; AVX512DQ-BW-NEXT: vpermi2w %zmm5, %zmm0, %zmm16
10018 ; AVX512DQ-BW-NEXT: vpermi2w %zmm3, %zmm4, %zmm18
10019 ; AVX512DQ-BW-NEXT: vmovdqu16 %zmm16, %zmm18 {%k1}
10020 ; AVX512DQ-BW-NEXT: vpermt2w %zmm2, %zmm17, %zmm18
10021 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} ymm16 = [4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0]
10022 ; AVX512DQ-BW-NEXT: vpermt2w %zmm11, %zmm16, %zmm9
10023 ; AVX512DQ-BW-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [20,25,30,35,40,45,50,55,60,0,0,0,0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0,0,5,10,15]
10024 ; AVX512DQ-BW-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
10025 ; AVX512DQ-BW-NEXT: vpermt2w %zmm10, %zmm11, %zmm1
10026 ; AVX512DQ-BW-NEXT: movb $7, %al
10027 ; AVX512DQ-BW-NEXT: kmovd %eax, %k1
10028 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm9, %zmm1 {%k1}
10029 ; AVX512DQ-BW-NEXT: vpmovsxbw {{.*#+}} zmm9 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,33,38,43,48,53,58,63]
10030 ; AVX512DQ-BW-NEXT: vpermt2w %zmm7, %zmm9, %zmm1
10031 ; AVX512DQ-BW-NEXT: vpermt2w %zmm5, %zmm11, %zmm0
10032 ; AVX512DQ-BW-NEXT: vpermt2w %zmm4, %zmm16, %zmm3
10033 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm3, %zmm0 {%k1}
10034 ; AVX512DQ-BW-NEXT: vpermt2w %zmm2, %zmm9, %zmm0
10035 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm6, 64(%rsi)
10036 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm8, (%rsi)
10037 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm12, 64(%rdx)
10038 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm13, (%rdx)
10039 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm14, 64(%rcx)
10040 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm15, (%rcx)
10041 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm18, 64(%r8)
10042 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm19, (%r8)
10043 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm0, 64(%r9)
10044 ; AVX512DQ-BW-NEXT: vmovdqa64 %zmm1, (%r9)
10045 ; AVX512DQ-BW-NEXT: vzeroupper
10046 ; AVX512DQ-BW-NEXT: retq
10048 ; AVX512DQ-BW-FCP-LABEL: load_i16_stride5_vf64:
10049 ; AVX512DQ-BW-FCP: # %bb.0:
10050 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 576(%rdi), %zmm2
10051 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 384(%rdi), %zmm4
10052 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 320(%rdi), %zmm3
10053 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 512(%rdi), %zmm5
10054 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 448(%rdi), %zmm0
10055 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 256(%rdi), %zmm7
10056 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 (%rdi), %zmm9
10057 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 64(%rdi), %zmm11
10058 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 128(%rdi), %zmm1
10059 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 192(%rdi), %zmm10
10060 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm12 = [16,21,26,31,36,41,46,51,56,61,0,0,0,1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0,1,6,11]
10061 ; AVX512DQ-BW-FCP-NEXT: # zmm12 = mem[0,1,2,3,0,1,2,3]
10062 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm13
10063 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm10, %zmm12, %zmm13
10064 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm6 = [0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0]
10065 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm8
10066 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm11, %zmm6, %zmm8
10067 ; AVX512DQ-BW-FCP-NEXT: movl $67100672, %eax # imm = 0x3FFE000
10068 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
10069 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm13, %zmm8 {%k1}
10070 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm13 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,34,39,44,49,54,59]
10071 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm7, %zmm13, %zmm8
10072 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm5, %zmm0, %zmm12
10073 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm4, %zmm3, %zmm6
10074 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm12, %zmm6 {%k1}
10075 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm2, %zmm13, %zmm6
10076 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm14 = [49,54,59,0,5,10,15,20,25,30,0,0,0,34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0,34,39,44]
10077 ; AVX512DQ-BW-FCP-NEXT: # zmm14 = mem[0,1,2,3,0,1,2,3]
10078 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm15
10079 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm1, %zmm14, %zmm15
10080 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm12 = [1,6,11,16,21,26,31,36,41,46,51,56,61,0,0,0]
10081 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm13
10082 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm11, %zmm12, %zmm13
10083 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm15, %zmm13 {%k1}
10084 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm15 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,35,40,45,50,55,60]
10085 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm7, %zmm15, %zmm13
10086 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm0, %zmm5, %zmm14
10087 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm4, %zmm3, %zmm12
10088 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm14, %zmm12 {%k1}
10089 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm2, %zmm15, %zmm12
10090 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [50,55,60,1,6,11,16,21,26,31,0,0,0,35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0,35,40,45]
10091 ; AVX512DQ-BW-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
10092 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm10, %zmm17
10093 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm1, %zmm16, %zmm17
10094 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm14 = [34,39,44,49,54,59,0,5,10,15,20,25,30,0,0,0]
10095 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm15
10096 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm9, %zmm14, %zmm15
10097 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm17, %zmm15 {%k1}
10098 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm17 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,36,41,46,51,56,61]
10099 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm7, %zmm17, %zmm15
10100 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm0, %zmm5, %zmm16
10101 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm3, %zmm4, %zmm14
10102 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm16, %zmm14 {%k1}
10103 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm2, %zmm17, %zmm14
10104 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm16 = [19,24,29,34,39,44,49,54,59,0,0,0,0,4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0,4,9,14]
10105 ; AVX512DQ-BW-FCP-NEXT: # zmm16 = mem[0,1,2,3,0,1,2,3]
10106 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, %zmm17
10107 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm10, %zmm16, %zmm17
10108 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm18 = [35,40,45,50,55,60,1,6,11,16,21,26,31,0,0,0]
10109 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm11, %zmm19
10110 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm9, %zmm18, %zmm19
10111 ; AVX512DQ-BW-FCP-NEXT: movl $33546240, %eax # imm = 0x1FFE000
10112 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
10113 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm17, %zmm19 {%k1}
10114 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm17 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,32,37,42,47,52,57,62]
10115 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm7, %zmm17, %zmm19
10116 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm5, %zmm0, %zmm16
10117 ; AVX512DQ-BW-FCP-NEXT: vpermi2w %zmm3, %zmm4, %zmm18
10118 ; AVX512DQ-BW-FCP-NEXT: vmovdqu16 %zmm16, %zmm18 {%k1}
10119 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm2, %zmm17, %zmm18
10120 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} ymm16 = [4,9,14,19,24,29,34,39,44,49,54,59,0,0,0,0]
10121 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm11, %zmm16, %zmm9
10122 ; AVX512DQ-BW-FCP-NEXT: vbroadcasti64x4 {{.*#+}} zmm11 = [20,25,30,35,40,45,50,55,60,0,0,0,0,5,10,15,20,25,30,35,40,45,50,55,60,0,0,0,0,5,10,15]
10123 ; AVX512DQ-BW-FCP-NEXT: # zmm11 = mem[0,1,2,3,0,1,2,3]
10124 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm10, %zmm11, %zmm1
10125 ; AVX512DQ-BW-FCP-NEXT: movb $7, %al
10126 ; AVX512DQ-BW-FCP-NEXT: kmovd %eax, %k1
10127 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm9, %zmm1 {%k1}
10128 ; AVX512DQ-BW-FCP-NEXT: vpmovsxbw {{.*#+}} zmm9 = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,33,38,43,48,53,58,63]
10129 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm7, %zmm9, %zmm1
10130 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm5, %zmm11, %zmm0
10131 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm4, %zmm16, %zmm3
10132 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm3, %zmm0 {%k1}
10133 ; AVX512DQ-BW-FCP-NEXT: vpermt2w %zmm2, %zmm9, %zmm0
10134 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm6, 64(%rsi)
10135 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm8, (%rsi)
10136 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm12, 64(%rdx)
10137 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm13, (%rdx)
10138 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm14, 64(%rcx)
10139 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm15, (%rcx)
10140 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm18, 64(%r8)
10141 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm19, (%r8)
10142 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm0, 64(%r9)
10143 ; AVX512DQ-BW-FCP-NEXT: vmovdqa64 %zmm1, (%r9)
10144 ; AVX512DQ-BW-FCP-NEXT: vzeroupper
10145 ; AVX512DQ-BW-FCP-NEXT: retq
10146 %wide.vec = load <320 x i16>, ptr %in.vec, align 64
10147 %strided.vec0 = shufflevector <320 x i16> %wide.vec, <320 x i16> poison, <64 x i32> <i32 0, i32 5, i32 10, i32 15, i32 20, i32 25, i32 30, i32 35, i32 40, i32 45, i32 50, i32 55, i32 60, i32 65, i32 70, i32 75, i32 80, i32 85, i32 90, i32 95, i32 100, i32 105, i32 110, i32 115, i32 120, i32 125, i32 130, i32 135, i32 140, i32 145, i32 150, i32 155, i32 160, i32 165, i32 170, i32 175, i32 180, i32 185, i32 190, i32 195, i32 200, i32 205, i32 210, i32 215, i32 220, i32 225, i32 230, i32 235, i32 240, i32 245, i32 250, i32 255, i32 260, i32 265, i32 270, i32 275, i32 280, i32 285, i32 290, i32 295, i32 300, i32 305, i32 310, i32 315>
10148 %strided.vec1 = shufflevector <320 x i16> %wide.vec, <320 x i16> poison, <64 x i32> <i32 1, i32 6, i32 11, i32 16, i32 21, i32 26, i32 31, i32 36, i32 41, i32 46, i32 51, i32 56, i32 61, i32 66, i32 71, i32 76, i32 81, i32 86, i32 91, i32 96, i32 101, i32 106, i32 111, i32 116, i32 121, i32 126, i32 131, i32 136, i32 141, i32 146, i32 151, i32 156, i32 161, i32 166, i32 171, i32 176, i32 181, i32 186, i32 191, i32 196, i32 201, i32 206, i32 211, i32 216, i32 221, i32 226, i32 231, i32 236, i32 241, i32 246, i32 251, i32 256, i32 261, i32 266, i32 271, i32 276, i32 281, i32 286, i32 291, i32 296, i32 301, i32 306, i32 311, i32 316>
10149 %strided.vec2 = shufflevector <320 x i16> %wide.vec, <320 x i16> poison, <64 x i32> <i32 2, i32 7, i32 12, i32 17, i32 22, i32 27, i32 32, i32 37, i32 42, i32 47, i32 52, i32 57, i32 62, i32 67, i32 72, i32 77, i32 82, i32 87, i32 92, i32 97, i32 102, i32 107, i32 112, i32 117, i32 122, i32 127, i32 132, i32 137, i32 142, i32 147, i32 152, i32 157, i32 162, i32 167, i32 172, i32 177, i32 182, i32 187, i32 192, i32 197, i32 202, i32 207, i32 212, i32 217, i32 222, i32 227, i32 232, i32 237, i32 242, i32 247, i32 252, i32 257, i32 262, i32 267, i32 272, i32 277, i32 282, i32 287, i32 292, i32 297, i32 302, i32 307, i32 312, i32 317>
10150 %strided.vec3 = shufflevector <320 x i16> %wide.vec, <320 x i16> poison, <64 x i32> <i32 3, i32 8, i32 13, i32 18, i32 23, i32 28, i32 33, i32 38, i32 43, i32 48, i32 53, i32 58, i32 63, i32 68, i32 73, i32 78, i32 83, i32 88, i32 93, i32 98, i32 103, i32 108, i32 113, i32 118, i32 123, i32 128, i32 133, i32 138, i32 143, i32 148, i32 153, i32 158, i32 163, i32 168, i32 173, i32 178, i32 183, i32 188, i32 193, i32 198, i32 203, i32 208, i32 213, i32 218, i32 223, i32 228, i32 233, i32 238, i32 243, i32 248, i32 253, i32 258, i32 263, i32 268, i32 273, i32 278, i32 283, i32 288, i32 293, i32 298, i32 303, i32 308, i32 313, i32 318>
10151 %strided.vec4 = shufflevector <320 x i16> %wide.vec, <320 x i16> poison, <64 x i32> <i32 4, i32 9, i32 14, i32 19, i32 24, i32 29, i32 34, i32 39, i32 44, i32 49, i32 54, i32 59, i32 64, i32 69, i32 74, i32 79, i32 84, i32 89, i32 94, i32 99, i32 104, i32 109, i32 114, i32 119, i32 124, i32 129, i32 134, i32 139, i32 144, i32 149, i32 154, i32 159, i32 164, i32 169, i32 174, i32 179, i32 184, i32 189, i32 194, i32 199, i32 204, i32 209, i32 214, i32 219, i32 224, i32 229, i32 234, i32 239, i32 244, i32 249, i32 254, i32 259, i32 264, i32 269, i32 274, i32 279, i32 284, i32 289, i32 294, i32 299, i32 304, i32 309, i32 314, i32 319>
10152 store <64 x i16> %strided.vec0, ptr %out.vec0, align 64
10153 store <64 x i16> %strided.vec1, ptr %out.vec1, align 64
10154 store <64 x i16> %strided.vec2, ptr %out.vec2, align 64
10155 store <64 x i16> %strided.vec3, ptr %out.vec3, align 64
10156 store <64 x i16> %strided.vec4, ptr %out.vec4, align 64