1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX512,AVX512F-ONLY,AVX512F-SLOW,FALLBACK0
3 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512,AVX512F-ONLY,AVX512F-FAST,FALLBACK1
4 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq | FileCheck %s --check-prefixes=AVX512,AVX512DQ,AVX512DQ-SLOW,FALLBACK2
5 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512dq,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512,AVX512DQ,AVX512DQ-FAST,FALLBACK3
6 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw | FileCheck %s --check-prefixes=AVX512,AVX512BW,AVX512BW-ONLY,AVX512BW-SLOW,FALLBACK4
7 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512bw,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512,AVX512BW,AVX512BW-ONLY,AVX512BW-FAST,FALLBACK5
8 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512vbmi | FileCheck %s --check-prefixes=AVX512,AVX512BW,AVX512VBMI-ONLY,AVX512VBMI-SLOW,FALLBACK6
9 ; RUN: llc < %s -mtriple=x86_64-- -mattr=+avx512vl,+avx512vbmi,+fast-variable-crosslane-shuffle,+fast-variable-perlane-shuffle | FileCheck %s --check-prefixes=AVX512,AVX512BW,AVX512VBMI-ONLY,AVX512VBMI-FAST,FALLBACK7
11 define void @mask_replication_factor2_vf2(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
12 ; AVX512F-ONLY-LABEL: mask_replication_factor2_vf2:
13 ; AVX512F-ONLY: # %bb.0:
14 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
15 ; AVX512F-ONLY-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
16 ; AVX512F-ONLY-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
17 ; AVX512F-ONLY-NEXT: vpmovsxdq %xmm0, %xmm0
18 ; AVX512F-ONLY-NEXT: vptestmd %xmm0, %xmm0, %k1
19 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %xmm0 {%k1} {z}
20 ; AVX512F-ONLY-NEXT: vmovdqa %xmm0, (%rdx)
21 ; AVX512F-ONLY-NEXT: retq
23 ; AVX512DQ-LABEL: mask_replication_factor2_vf2:
25 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
26 ; AVX512DQ-NEXT: vpmovm2d %k0, %xmm0
27 ; AVX512DQ-NEXT: vpmovsxdq %xmm0, %xmm0
28 ; AVX512DQ-NEXT: vpmovd2m %xmm0, %k1
29 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %xmm0 {%k1} {z}
30 ; AVX512DQ-NEXT: vmovdqa %xmm0, (%rdx)
33 ; AVX512BW-LABEL: mask_replication_factor2_vf2:
35 ; AVX512BW-NEXT: kmovq (%rdi), %k1
36 ; AVX512BW-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0
37 ; AVX512BW-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z}
38 ; AVX512BW-NEXT: vpmovsxdq %xmm0, %xmm0
39 ; AVX512BW-NEXT: vptestmd %xmm0, %xmm0, %k1
40 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %xmm0 {%k1} {z}
41 ; AVX512BW-NEXT: vmovdqa %xmm0, (%rdx)
43 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
44 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <2 x i32> <i32 0, i32 1>
45 %tgt.mask = shufflevector <2 x i1> %src.mask, <2 x i1> poison, <4 x i32> <i32 0, i32 0, i32 1, i32 1>
46 %data = call <4 x i32> @llvm.masked.load.v4i32.p0(ptr %in.vec, i32 64, <4 x i1> %tgt.mask, <4 x i32> poison)
47 %data.padded = shufflevector <4 x i32> %data, <4 x i32> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
48 store <4 x i32> %data, ptr %out.vec, align 64
52 define void @mask_replication_factor2_vf4(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
53 ; AVX512F-ONLY-LABEL: mask_replication_factor2_vf4:
54 ; AVX512F-ONLY: # %bb.0:
55 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
56 ; AVX512F-ONLY-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0
57 ; AVX512F-ONLY-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z}
58 ; AVX512F-ONLY-NEXT: vpmovsxdq %xmm0, %ymm0
59 ; AVX512F-ONLY-NEXT: vptestmd %ymm0, %ymm0, %k1
60 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %ymm0 {%k1} {z}
61 ; AVX512F-ONLY-NEXT: vmovdqa %ymm0, (%rdx)
62 ; AVX512F-ONLY-NEXT: vzeroupper
63 ; AVX512F-ONLY-NEXT: retq
65 ; AVX512DQ-LABEL: mask_replication_factor2_vf4:
67 ; AVX512DQ-NEXT: kmovb (%rdi), %k0
68 ; AVX512DQ-NEXT: vpmovm2d %k0, %ymm0
69 ; AVX512DQ-NEXT: vpmovsxdq %xmm0, %ymm0
70 ; AVX512DQ-NEXT: vpmovd2m %ymm0, %k1
71 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %ymm0 {%k1} {z}
72 ; AVX512DQ-NEXT: vmovdqa %ymm0, (%rdx)
73 ; AVX512DQ-NEXT: vzeroupper
76 ; AVX512BW-LABEL: mask_replication_factor2_vf4:
78 ; AVX512BW-NEXT: kmovw (%rdi), %k1
79 ; AVX512BW-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0
80 ; AVX512BW-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z}
81 ; AVX512BW-NEXT: vpmovsxdq %xmm0, %ymm0
82 ; AVX512BW-NEXT: vptestmd %ymm0, %ymm0, %k1
83 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %ymm0 {%k1} {z}
84 ; AVX512BW-NEXT: vmovdqa %ymm0, (%rdx)
85 ; AVX512BW-NEXT: vzeroupper
87 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
88 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
89 %tgt.mask = shufflevector <4 x i1> %src.mask, <4 x i1> poison, <8 x i32> <i32 0, i32 0, i32 1, i32 1, i32 2, i32 2, i32 3, i32 3>
90 %data = call <8 x i32> @llvm.masked.load.v8i32.p0(ptr %in.vec, i32 64, <8 x i1> %tgt.mask, <8 x i32> poison)
91 %data.padded = shufflevector <8 x i32> %data, <8 x i32> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
92 store <8 x i32> %data, ptr %out.vec, align 64
96 define void @mask_replication_factor2_vf8(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
97 ; AVX512F-ONLY-LABEL: mask_replication_factor2_vf8:
98 ; AVX512F-ONLY: # %bb.0:
99 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
100 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
101 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
102 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
103 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1
104 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
105 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, (%rdx)
106 ; AVX512F-ONLY-NEXT: vzeroupper
107 ; AVX512F-ONLY-NEXT: retq
109 ; AVX512DQ-LABEL: mask_replication_factor2_vf8:
111 ; AVX512DQ-NEXT: kmovb (%rdi), %k0
112 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
113 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
114 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
115 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k1
116 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
117 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%rdx)
118 ; AVX512DQ-NEXT: vzeroupper
119 ; AVX512DQ-NEXT: retq
121 ; AVX512BW-LABEL: mask_replication_factor2_vf8:
123 ; AVX512BW-NEXT: kmovw (%rdi), %k1
124 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
125 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
126 ; AVX512BW-NEXT: vpermd %zmm0, %zmm1, %zmm0
127 ; AVX512BW-NEXT: vptestmd %zmm0, %zmm0, %k1
128 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
129 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%rdx)
130 ; AVX512BW-NEXT: vzeroupper
131 ; AVX512BW-NEXT: retq
132 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
133 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
134 %tgt.mask = shufflevector <8 x i1> %src.mask, <8 x i1> poison, <16 x i32> <i32 0, i32 0, i32 1, i32 1, i32 2, i32 2, i32 3, i32 3, i32 4, i32 4, i32 5, i32 5, i32 6, i32 6, i32 7, i32 7>
135 %data = call <16 x i32> @llvm.masked.load.v16i32.p0(ptr %in.vec, i32 64, <16 x i1> %tgt.mask, <16 x i32> poison)
136 store <16 x i32> %data, ptr %out.vec, align 64
140 define void @mask_replication_factor2_vf16(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
141 ; AVX512F-ONLY-LABEL: mask_replication_factor2_vf16:
142 ; AVX512F-ONLY: # %bb.0:
143 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
144 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
145 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
146 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
147 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
148 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
149 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
150 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k2
151 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k2} {z}
152 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm1 {%k1} {z}
153 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, 64(%rdx)
154 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, (%rdx)
155 ; AVX512F-ONLY-NEXT: vzeroupper
156 ; AVX512F-ONLY-NEXT: retq
158 ; AVX512DQ-LABEL: mask_replication_factor2_vf16:
160 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
161 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
162 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
163 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
164 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
165 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
166 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
167 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k2
168 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k2} {z}
169 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm1 {%k1} {z}
170 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 64(%rdx)
171 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%rdx)
172 ; AVX512DQ-NEXT: vzeroupper
173 ; AVX512DQ-NEXT: retq
175 ; AVX512BW-LABEL: mask_replication_factor2_vf16:
177 ; AVX512BW-NEXT: kmovw (%rdi), %k0
178 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0
179 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
180 ; AVX512BW-NEXT: vpermw %zmm0, %zmm1, %zmm0
181 ; AVX512BW-NEXT: vpmovw2m %zmm0, %k1
182 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
183 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k1
184 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm1 {%k1} {z}
185 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 64(%rdx)
186 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%rdx)
187 ; AVX512BW-NEXT: vzeroupper
188 ; AVX512BW-NEXT: retq
189 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
190 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
191 %tgt.mask = shufflevector <16 x i1> %src.mask, <16 x i1> poison, <32 x i32> <i32 0, i32 0, i32 1, i32 1, i32 2, i32 2, i32 3, i32 3, i32 4, i32 4, i32 5, i32 5, i32 6, i32 6, i32 7, i32 7, i32 8, i32 8, i32 9, i32 9, i32 10, i32 10, i32 11, i32 11, i32 12, i32 12, i32 13, i32 13, i32 14, i32 14, i32 15, i32 15>
192 %data = call <32 x i32> @llvm.masked.load.v32i32.p0(ptr %in.vec, i32 64, <32 x i1> %tgt.mask, <32 x i32> poison)
193 store <32 x i32> %data, ptr %out.vec, align 64
197 define void @mask_replication_factor2_vf32(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
198 ; AVX512F-ONLY-LABEL: mask_replication_factor2_vf32:
199 ; AVX512F-ONLY: # %bb.0:
200 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
201 ; AVX512F-ONLY-NEXT: kmovw 2(%rdi), %k2
202 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k2} {z}
203 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
204 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm2
205 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k2
206 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm2 = [8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
207 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm2, %zmm0
208 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k3
209 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
210 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
211 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
212 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm2, %zmm0
213 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k4
214 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k4} {z}
215 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
216 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k3} {z}
217 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k2} {z}
218 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
219 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
220 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
221 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
222 ; AVX512F-ONLY-NEXT: vzeroupper
223 ; AVX512F-ONLY-NEXT: retq
225 ; AVX512DQ-LABEL: mask_replication_factor2_vf32:
227 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
228 ; AVX512DQ-NEXT: kmovw 2(%rdi), %k1
229 ; AVX512DQ-NEXT: vpmovm2d %k1, %zmm0
230 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
231 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm2
232 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k1
233 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm2 = [8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
234 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm2, %zmm0
235 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k2
236 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
237 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
238 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k3
239 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm2, %zmm0
240 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k4
241 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k4} {z}
242 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm1 {%k3} {z}
243 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k2} {z}
244 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k1} {z}
245 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 128(%rdx)
246 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 192(%rdx)
247 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, (%rdx)
248 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 64(%rdx)
249 ; AVX512DQ-NEXT: vzeroupper
250 ; AVX512DQ-NEXT: retq
252 ; AVX512BW-ONLY-LABEL: mask_replication_factor2_vf32:
253 ; AVX512BW-ONLY: # %bb.0:
254 ; AVX512BW-ONLY-NEXT: kmovq (%rdi), %k0
255 ; AVX512BW-ONLY-NEXT: vpmovm2b %k0, %zmm0
256 ; AVX512BW-ONLY-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,2,3,2,3]
257 ; AVX512BW-ONLY-NEXT: vpshufb {{.*#+}} zmm0 = zmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31,32,32,33,33,34,34,35,35,36,36,37,37,38,38,39,39,56,56,57,57,58,58,59,59,60,60,61,61,62,62,63,63]
258 ; AVX512BW-ONLY-NEXT: vpmovb2m %zmm0, %k1
259 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k1, %k2
260 ; AVX512BW-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k2} {z}
261 ; AVX512BW-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
262 ; AVX512BW-ONLY-NEXT: kshiftrq $32, %k1, %k1
263 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k1, %k2
264 ; AVX512BW-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k2} {z}
265 ; AVX512BW-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k1} {z}
266 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
267 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
268 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
269 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
270 ; AVX512BW-ONLY-NEXT: vzeroupper
271 ; AVX512BW-ONLY-NEXT: retq
273 ; AVX512VBMI-ONLY-LABEL: mask_replication_factor2_vf32:
274 ; AVX512VBMI-ONLY: # %bb.0:
275 ; AVX512VBMI-ONLY-NEXT: kmovq (%rdi), %k0
276 ; AVX512VBMI-ONLY-NEXT: vpmovm2b %k0, %zmm0
277 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31]
278 ; AVX512VBMI-ONLY-NEXT: vpermb %zmm0, %zmm1, %zmm0
279 ; AVX512VBMI-ONLY-NEXT: vpmovb2m %zmm0, %k1
280 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k1, %k2
281 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k2} {z}
282 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
283 ; AVX512VBMI-ONLY-NEXT: kshiftrq $32, %k1, %k1
284 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k1, %k2
285 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k2} {z}
286 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k1} {z}
287 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
288 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
289 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
290 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
291 ; AVX512VBMI-ONLY-NEXT: vzeroupper
292 ; AVX512VBMI-ONLY-NEXT: retq
293 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
294 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
295 %tgt.mask = shufflevector <32 x i1> %src.mask, <32 x i1> poison, <64 x i32> <i32 0, i32 0, i32 1, i32 1, i32 2, i32 2, i32 3, i32 3, i32 4, i32 4, i32 5, i32 5, i32 6, i32 6, i32 7, i32 7, i32 8, i32 8, i32 9, i32 9, i32 10, i32 10, i32 11, i32 11, i32 12, i32 12, i32 13, i32 13, i32 14, i32 14, i32 15, i32 15, i32 16, i32 16, i32 17, i32 17, i32 18, i32 18, i32 19, i32 19, i32 20, i32 20, i32 21, i32 21, i32 22, i32 22, i32 23, i32 23, i32 24, i32 24, i32 25, i32 25, i32 26, i32 26, i32 27, i32 27, i32 28, i32 28, i32 29, i32 29, i32 30, i32 30, i32 31, i32 31>
296 %data = call <64 x i32> @llvm.masked.load.v64i32.p0(ptr %in.vec, i32 64, <64 x i1> %tgt.mask, <64 x i32> poison)
297 store <64 x i32> %data, ptr %out.vec, align 64
301 define void @mask_replication_factor2_vf64(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
302 ; AVX512F-ONLY-LABEL: mask_replication_factor2_vf64:
303 ; AVX512F-ONLY: # %bb.0:
304 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k3
305 ; AVX512F-ONLY-NEXT: kmovw 2(%rdi), %k5
306 ; AVX512F-ONLY-NEXT: kmovw 4(%rdi), %k4
307 ; AVX512F-ONLY-NEXT: kmovw 6(%rdi), %k1
308 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
309 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
310 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm2
311 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
312 ; AVX512F-ONLY-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
313 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm2 = [8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
314 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm2, %zmm0
315 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k2
316 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k4} {z}
317 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm3
318 ; AVX512F-ONLY-NEXT: vptestmd %zmm3, %zmm3, %k4
319 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm2, %zmm0
320 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k6
321 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k5} {z}
322 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm3
323 ; AVX512F-ONLY-NEXT: vptestmd %zmm3, %zmm3, %k5
324 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm2, %zmm0
325 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k7
326 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k3} {z}
327 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
328 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k3
329 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm2, %zmm0
330 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1
331 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
332 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k3} {z}
333 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k7} {z}
334 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k5} {z}
335 ; AVX512F-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm4 {%k6} {z}
336 ; AVX512F-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm5 {%k4} {z}
337 ; AVX512F-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm6 {%k2} {z}
338 ; AVX512F-ONLY-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
339 ; AVX512F-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm7 {%k1} {z}
340 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm7, 384(%rdx)
341 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm6, 448(%rdx)
342 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm5, 256(%rdx)
343 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm4, 320(%rdx)
344 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
345 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
346 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
347 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
348 ; AVX512F-ONLY-NEXT: vzeroupper
349 ; AVX512F-ONLY-NEXT: retq
351 ; AVX512DQ-LABEL: mask_replication_factor2_vf64:
353 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
354 ; AVX512DQ-NEXT: kmovw 2(%rdi), %k5
355 ; AVX512DQ-NEXT: kmovw 4(%rdi), %k3
356 ; AVX512DQ-NEXT: kmovw 6(%rdi), %k1
357 ; AVX512DQ-NEXT: vpmovm2d %k1, %zmm0
358 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
359 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm2
360 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k1
361 ; AVX512DQ-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
362 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm2 = [8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
363 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm2, %zmm0
364 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k2
365 ; AVX512DQ-NEXT: vpmovm2d %k3, %zmm0
366 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm3
367 ; AVX512DQ-NEXT: vpmovd2m %zmm3, %k3
368 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm2, %zmm0
369 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k4
370 ; AVX512DQ-NEXT: vpmovm2d %k5, %zmm0
371 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm3
372 ; AVX512DQ-NEXT: vpmovd2m %zmm3, %k5
373 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm2, %zmm0
374 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k6
375 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
376 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
377 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k7
378 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm2, %zmm0
379 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k1
380 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
381 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm1 {%k7} {z}
382 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k6} {z}
383 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k5} {z}
384 ; AVX512DQ-NEXT: vmovdqa32 320(%rsi), %zmm4 {%k4} {z}
385 ; AVX512DQ-NEXT: vmovdqa32 256(%rsi), %zmm5 {%k3} {z}
386 ; AVX512DQ-NEXT: vmovdqa32 448(%rsi), %zmm6 {%k2} {z}
387 ; AVX512DQ-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
388 ; AVX512DQ-NEXT: vmovdqa32 384(%rsi), %zmm7 {%k1} {z}
389 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, 384(%rdx)
390 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 448(%rdx)
391 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 256(%rdx)
392 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 320(%rdx)
393 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 128(%rdx)
394 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 192(%rdx)
395 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, (%rdx)
396 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 64(%rdx)
397 ; AVX512DQ-NEXT: vzeroupper
398 ; AVX512DQ-NEXT: retq
400 ; AVX512BW-ONLY-LABEL: mask_replication_factor2_vf64:
401 ; AVX512BW-ONLY: # %bb.0:
402 ; AVX512BW-ONLY-NEXT: kmovq (%rdi), %k0
403 ; AVX512BW-ONLY-NEXT: vpmovm2b %k0, %zmm0
404 ; AVX512BW-ONLY-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm0[4,5,4,5,6,7,6,7]
405 ; AVX512BW-ONLY-NEXT: vbroadcasti64x4 {{.*#+}} zmm2 = [0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
406 ; AVX512BW-ONLY-NEXT: # zmm2 = mem[0,1,2,3,0,1,2,3]
407 ; AVX512BW-ONLY-NEXT: vpshufb %zmm2, %zmm1, %zmm1
408 ; AVX512BW-ONLY-NEXT: vpmovb2m %zmm1, %k1
409 ; AVX512BW-ONLY-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,2,3,2,3]
410 ; AVX512BW-ONLY-NEXT: vpshufb %zmm2, %zmm0, %zmm0
411 ; AVX512BW-ONLY-NEXT: vpmovb2m %zmm0, %k2
412 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k2, %k3
413 ; AVX512BW-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k3} {z}
414 ; AVX512BW-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k2} {z}
415 ; AVX512BW-ONLY-NEXT: kshiftrq $32, %k2, %k2
416 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k2, %k3
417 ; AVX512BW-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k3} {z}
418 ; AVX512BW-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k2} {z}
419 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k1, %k2
420 ; AVX512BW-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm4 {%k2} {z}
421 ; AVX512BW-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm5 {%k1} {z}
422 ; AVX512BW-ONLY-NEXT: kshiftrq $32, %k1, %k1
423 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k1, %k2
424 ; AVX512BW-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm6 {%k2} {z}
425 ; AVX512BW-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm7 {%k1} {z}
426 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm7, 384(%rdx)
427 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm6, 448(%rdx)
428 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm5, 256(%rdx)
429 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm4, 320(%rdx)
430 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
431 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
432 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
433 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
434 ; AVX512BW-ONLY-NEXT: vzeroupper
435 ; AVX512BW-ONLY-NEXT: retq
437 ; AVX512VBMI-ONLY-LABEL: mask_replication_factor2_vf64:
438 ; AVX512VBMI-ONLY: # %bb.0:
439 ; AVX512VBMI-ONLY-NEXT: kmovq (%rdi), %k0
440 ; AVX512VBMI-ONLY-NEXT: vpmovm2b %k0, %zmm0
441 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [32,32,33,33,34,34,35,35,36,36,37,37,38,38,39,39,40,40,41,41,42,42,43,43,44,44,45,45,46,46,47,47,48,48,49,49,50,50,51,51,52,52,53,53,54,54,55,55,56,56,57,57,58,58,59,59,60,60,61,61,62,62,63,63]
442 ; AVX512VBMI-ONLY-NEXT: vpermb %zmm0, %zmm1, %zmm1
443 ; AVX512VBMI-ONLY-NEXT: vpmovb2m %zmm1, %k1
444 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31]
445 ; AVX512VBMI-ONLY-NEXT: vpermb %zmm0, %zmm1, %zmm0
446 ; AVX512VBMI-ONLY-NEXT: vpmovb2m %zmm0, %k2
447 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k2, %k3
448 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k3} {z}
449 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k2} {z}
450 ; AVX512VBMI-ONLY-NEXT: kshiftrq $32, %k2, %k2
451 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k2, %k3
452 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k3} {z}
453 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k2} {z}
454 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k1, %k2
455 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm4 {%k2} {z}
456 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm5 {%k1} {z}
457 ; AVX512VBMI-ONLY-NEXT: kshiftrq $32, %k1, %k1
458 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k1, %k2
459 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm6 {%k2} {z}
460 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm7 {%k1} {z}
461 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm7, 384(%rdx)
462 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm6, 448(%rdx)
463 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm5, 256(%rdx)
464 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm4, 320(%rdx)
465 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
466 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
467 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
468 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
469 ; AVX512VBMI-ONLY-NEXT: vzeroupper
470 ; AVX512VBMI-ONLY-NEXT: retq
471 %src.mask = load <64 x i1>, ptr %in.maskvec, align 64
472 %tgt.mask = shufflevector <64 x i1> %src.mask, <64 x i1> poison, <128 x i32> <i32 0, i32 0, i32 1, i32 1, i32 2, i32 2, i32 3, i32 3, i32 4, i32 4, i32 5, i32 5, i32 6, i32 6, i32 7, i32 7, i32 8, i32 8, i32 9, i32 9, i32 10, i32 10, i32 11, i32 11, i32 12, i32 12, i32 13, i32 13, i32 14, i32 14, i32 15, i32 15, i32 16, i32 16, i32 17, i32 17, i32 18, i32 18, i32 19, i32 19, i32 20, i32 20, i32 21, i32 21, i32 22, i32 22, i32 23, i32 23, i32 24, i32 24, i32 25, i32 25, i32 26, i32 26, i32 27, i32 27, i32 28, i32 28, i32 29, i32 29, i32 30, i32 30, i32 31, i32 31, i32 32, i32 32, i32 33, i32 33, i32 34, i32 34, i32 35, i32 35, i32 36, i32 36, i32 37, i32 37, i32 38, i32 38, i32 39, i32 39, i32 40, i32 40, i32 41, i32 41, i32 42, i32 42, i32 43, i32 43, i32 44, i32 44, i32 45, i32 45, i32 46, i32 46, i32 47, i32 47, i32 48, i32 48, i32 49, i32 49, i32 50, i32 50, i32 51, i32 51, i32 52, i32 52, i32 53, i32 53, i32 54, i32 54, i32 55, i32 55, i32 56, i32 56, i32 57, i32 57, i32 58, i32 58, i32 59, i32 59, i32 60, i32 60, i32 61, i32 61, i32 62, i32 62, i32 63, i32 63>
473 %data = call <128 x i32> @llvm.masked.load.v128i32.p0(ptr %in.vec, i32 64, <128 x i1> %tgt.mask, <128 x i32> poison)
474 store <128 x i32> %data, ptr %out.vec, align 64
478 define void @mask_replication_factor3_vf2(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
479 ; AVX512F-ONLY-LABEL: mask_replication_factor3_vf2:
480 ; AVX512F-ONLY: # %bb.0:
481 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
482 ; AVX512F-ONLY-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0
483 ; AVX512F-ONLY-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z}
484 ; AVX512F-ONLY-NEXT: vmovdqa {{.*#+}} ymm1 = <0,0,0,1,1,1,u,u>
485 ; AVX512F-ONLY-NEXT: vpermd %ymm0, %ymm1, %ymm0
486 ; AVX512F-ONLY-NEXT: vpslld $31, %ymm0, %ymm0
487 ; AVX512F-ONLY-NEXT: movb $63, %al
488 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
489 ; AVX512F-ONLY-NEXT: vptestmd %ymm0, %ymm0, %k1 {%k1}
490 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %ymm0 {%k1} {z}
491 ; AVX512F-ONLY-NEXT: vextracti128 $1, %ymm0, %xmm1
492 ; AVX512F-ONLY-NEXT: vmovq %xmm1, 16(%rdx)
493 ; AVX512F-ONLY-NEXT: vmovdqa %xmm0, (%rdx)
494 ; AVX512F-ONLY-NEXT: vzeroupper
495 ; AVX512F-ONLY-NEXT: retq
497 ; AVX512DQ-LABEL: mask_replication_factor3_vf2:
499 ; AVX512DQ-NEXT: kmovb (%rdi), %k0
500 ; AVX512DQ-NEXT: vpmovm2d %k0, %ymm0
501 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm1 = <0,0,0,1,1,1,u,u>
502 ; AVX512DQ-NEXT: vpermd %ymm0, %ymm1, %ymm0
503 ; AVX512DQ-NEXT: vpxor %xmm1, %xmm1, %xmm1
504 ; AVX512DQ-NEXT: movb $63, %al
505 ; AVX512DQ-NEXT: kmovw %eax, %k1
506 ; AVX512DQ-NEXT: vpcmpgtd %ymm0, %ymm1, %k1 {%k1}
507 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %ymm0 {%k1} {z}
508 ; AVX512DQ-NEXT: vextracti128 $1, %ymm0, %xmm1
509 ; AVX512DQ-NEXT: vmovq %xmm1, 16(%rdx)
510 ; AVX512DQ-NEXT: vmovdqa %xmm0, (%rdx)
511 ; AVX512DQ-NEXT: vzeroupper
512 ; AVX512DQ-NEXT: retq
514 ; AVX512BW-LABEL: mask_replication_factor3_vf2:
516 ; AVX512BW-NEXT: kmovw (%rdi), %k1
517 ; AVX512BW-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0
518 ; AVX512BW-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z}
519 ; AVX512BW-NEXT: vmovdqa {{.*#+}} ymm1 = <0,0,0,1,1,1,u,u>
520 ; AVX512BW-NEXT: vpermd %ymm0, %ymm1, %ymm0
521 ; AVX512BW-NEXT: vpslld $31, %ymm0, %ymm0
522 ; AVX512BW-NEXT: movb $63, %al
523 ; AVX512BW-NEXT: kmovd %eax, %k1
524 ; AVX512BW-NEXT: vptestmd %ymm0, %ymm0, %k1 {%k1}
525 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %ymm0 {%k1} {z}
526 ; AVX512BW-NEXT: vextracti128 $1, %ymm0, %xmm1
527 ; AVX512BW-NEXT: vmovq %xmm1, 16(%rdx)
528 ; AVX512BW-NEXT: vmovdqa %xmm0, (%rdx)
529 ; AVX512BW-NEXT: vzeroupper
530 ; AVX512BW-NEXT: retq
531 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
532 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <2 x i32> <i32 0, i32 1>
533 %tgt.mask = shufflevector <2 x i1> %src.mask, <2 x i1> poison, <6 x i32> <i32 0, i32 0, i32 0, i32 1, i32 1, i32 1>
534 %data = call <6 x i32> @llvm.masked.load.v6i32.p0(ptr %in.vec, i32 64, <6 x i1> %tgt.mask, <6 x i32> poison)
535 %data.padded = shufflevector <6 x i32> %data, <6 x i32> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
536 store <6 x i32> %data, ptr %out.vec, align 64
540 define void @mask_replication_factor3_vf4(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
541 ; AVX512F-ONLY-LABEL: mask_replication_factor3_vf4:
542 ; AVX512F-ONLY: # %bb.0:
543 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
544 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
545 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,0,0,1,1,1,2,2,2,3,3,3,u,u,u,u>
546 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
547 ; AVX512F-ONLY-NEXT: vpslld $31, %zmm0, %zmm0
548 ; AVX512F-ONLY-NEXT: movw $4095, %ax # imm = 0xFFF
549 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
550 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1 {%k1}
551 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
552 ; AVX512F-ONLY-NEXT: vextracti32x4 $2, %zmm0, 32(%rdx)
553 ; AVX512F-ONLY-NEXT: vmovdqa %ymm0, (%rdx)
554 ; AVX512F-ONLY-NEXT: vzeroupper
555 ; AVX512F-ONLY-NEXT: retq
557 ; AVX512DQ-LABEL: mask_replication_factor3_vf4:
559 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
560 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
561 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,0,0,1,1,1,2,2,2,3,3,3,u,u,u,u>
562 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
563 ; AVX512DQ-NEXT: vpxor %xmm1, %xmm1, %xmm1
564 ; AVX512DQ-NEXT: movw $4095, %ax # imm = 0xFFF
565 ; AVX512DQ-NEXT: kmovw %eax, %k1
566 ; AVX512DQ-NEXT: vpcmpgtd %zmm0, %zmm1, %k1 {%k1}
567 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
568 ; AVX512DQ-NEXT: vextracti32x4 $2, %zmm0, 32(%rdx)
569 ; AVX512DQ-NEXT: vmovdqa %ymm0, (%rdx)
570 ; AVX512DQ-NEXT: vzeroupper
571 ; AVX512DQ-NEXT: retq
573 ; AVX512BW-LABEL: mask_replication_factor3_vf4:
575 ; AVX512BW-NEXT: kmovw (%rdi), %k1
576 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
577 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,0,0,1,1,1,2,2,2,3,3,3,u,u,u,u>
578 ; AVX512BW-NEXT: vpermd %zmm0, %zmm1, %zmm0
579 ; AVX512BW-NEXT: vpslld $31, %zmm0, %zmm0
580 ; AVX512BW-NEXT: movw $4095, %ax # imm = 0xFFF
581 ; AVX512BW-NEXT: kmovd %eax, %k1
582 ; AVX512BW-NEXT: vptestmd %zmm0, %zmm0, %k1 {%k1}
583 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
584 ; AVX512BW-NEXT: vextracti32x4 $2, %zmm0, 32(%rdx)
585 ; AVX512BW-NEXT: vmovdqa %ymm0, (%rdx)
586 ; AVX512BW-NEXT: vzeroupper
587 ; AVX512BW-NEXT: retq
588 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
589 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
590 %tgt.mask = shufflevector <4 x i1> %src.mask, <4 x i1> poison, <12 x i32> <i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3>
591 %data = call <12 x i32> @llvm.masked.load.v12i32.p0(ptr %in.vec, i32 64, <12 x i1> %tgt.mask, <12 x i32> poison)
592 %data.padded = shufflevector <12 x i32> %data, <12 x i32> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 undef, i32 undef, i32 undef, i32 undef>
593 store <12 x i32> %data, ptr %out.vec, align 64
597 define void @mask_replication_factor3_vf8(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
598 ; AVX512F-ONLY-LABEL: mask_replication_factor3_vf8:
599 ; AVX512F-ONLY: # %bb.0:
600 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
601 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
602 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,1,1,1,2,2,2,3,3,3,4,4,4,5]
603 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
604 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k2
605 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k2} {z}
606 ; AVX512F-ONLY-NEXT: movw $1, %ax
607 ; AVX512F-ONLY-NEXT: kmovw %eax, %k2
608 ; AVX512F-ONLY-NEXT: vmovdqa32 %zmm0, %zmm1 {%k2}
609 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k2
610 ; AVX512F-ONLY-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0
611 ; AVX512F-ONLY-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z}
612 ; AVX512F-ONLY-NEXT: vmovdqa {{.*#+}} ymm1 = [5,5,6,6,6,7,7,7]
613 ; AVX512F-ONLY-NEXT: vpermd %ymm0, %ymm1, %ymm0
614 ; AVX512F-ONLY-NEXT: vptestmd %ymm0, %ymm0, %k1
615 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k2} {z}
616 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm1 {%k1} {z}
617 ; AVX512F-ONLY-NEXT: vmovdqa %ymm1, 64(%rdx)
618 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, (%rdx)
619 ; AVX512F-ONLY-NEXT: vzeroupper
620 ; AVX512F-ONLY-NEXT: retq
622 ; AVX512DQ-LABEL: mask_replication_factor3_vf8:
624 ; AVX512DQ-NEXT: kmovb (%rdi), %k0
625 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
626 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,1,1,1,2,2,2,3,3,3,4,4,4,5]
627 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
628 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
629 ; AVX512DQ-NEXT: vpmovm2d %k1, %zmm1
630 ; AVX512DQ-NEXT: movw $1, %ax
631 ; AVX512DQ-NEXT: kmovw %eax, %k1
632 ; AVX512DQ-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
633 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
634 ; AVX512DQ-NEXT: vpmovm2d %k0, %ymm0
635 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm1 = [5,5,6,6,6,7,7,7]
636 ; AVX512DQ-NEXT: vpermd %ymm0, %ymm1, %ymm0
637 ; AVX512DQ-NEXT: vpmovd2m %ymm0, %k2
638 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
639 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm1 {%k2} {z}
640 ; AVX512DQ-NEXT: vmovdqa %ymm1, 64(%rdx)
641 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%rdx)
642 ; AVX512DQ-NEXT: vzeroupper
643 ; AVX512DQ-NEXT: retq
645 ; AVX512BW-LABEL: mask_replication_factor3_vf8:
647 ; AVX512BW-NEXT: kmovw (%rdi), %k0
648 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0
649 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,0,0,1,1,1,2,2,2,3,3,3,4,4,4,5,5,5,6,6,6,7,7,7,u,u,u,u,u,u,u,u>
650 ; AVX512BW-NEXT: vpermw %zmm0, %zmm1, %zmm0
651 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1
652 ; AVX512BW-NEXT: movl $16777215, %eax # imm = 0xFFFFFF
653 ; AVX512BW-NEXT: kmovd %eax, %k1
654 ; AVX512BW-NEXT: vpcmpgtw %zmm0, %zmm1, %k1 {%k1}
655 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k2
656 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k2} {z}
657 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
658 ; AVX512BW-NEXT: vmovdqa64 %zmm1, (%rdx)
659 ; AVX512BW-NEXT: vmovdqa %ymm0, 64(%rdx)
660 ; AVX512BW-NEXT: vzeroupper
661 ; AVX512BW-NEXT: retq
662 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
663 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
664 %tgt.mask = shufflevector <8 x i1> %src.mask, <8 x i1> poison, <24 x i32> <i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7>
665 %data = call <24 x i32> @llvm.masked.load.v24i32.p0(ptr %in.vec, i32 64, <24 x i1> %tgt.mask, <24 x i32> poison)
666 %data.padded = shufflevector <24 x i32> %data, <24 x i32> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
667 store <24 x i32> %data, ptr %out.vec, align 64
671 define void @mask_replication_factor3_vf16(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
672 ; AVX512F-ONLY-LABEL: mask_replication_factor3_vf16:
673 ; AVX512F-ONLY: # %bb.0:
674 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
675 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
676 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,1,1,1,2,2,2,3,3,3,4,4,4,5]
677 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
678 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
679 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
680 ; AVX512F-ONLY-NEXT: movw $1, %ax
681 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
682 ; AVX512F-ONLY-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
683 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
684 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [5,5,6,6,6,7,7,7,8,8,8,9,9,9,10,10]
685 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
686 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k2
687 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [10,11,11,11,12,12,12,13,13,13,14,14,14,15,15,15]
688 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
689 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k3
690 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
691 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm1 {%k3} {z}
692 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm2 {%k2} {z}
693 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 64(%rdx)
694 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, 128(%rdx)
695 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, (%rdx)
696 ; AVX512F-ONLY-NEXT: vzeroupper
697 ; AVX512F-ONLY-NEXT: retq
699 ; AVX512DQ-LABEL: mask_replication_factor3_vf16:
701 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
702 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
703 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,1,1,1,2,2,2,3,3,3,4,4,4,5]
704 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
705 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k0
706 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm1
707 ; AVX512DQ-NEXT: movw $1, %ax
708 ; AVX512DQ-NEXT: kmovw %eax, %k1
709 ; AVX512DQ-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
710 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
711 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [5,5,6,6,6,7,7,7,8,8,8,9,9,9,10,10]
712 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
713 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k2
714 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [10,11,11,11,12,12,12,13,13,13,14,14,14,15,15,15]
715 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
716 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k3
717 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
718 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm1 {%k3} {z}
719 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm2 {%k2} {z}
720 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 64(%rdx)
721 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 128(%rdx)
722 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%rdx)
723 ; AVX512DQ-NEXT: vzeroupper
724 ; AVX512DQ-NEXT: retq
726 ; AVX512BW-LABEL: mask_replication_factor3_vf16:
728 ; AVX512BW-NEXT: kmovw (%rdi), %k1
729 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
730 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,1,1,1,2,2,2,3,3,3,4,4,4,5]
731 ; AVX512BW-NEXT: vpermd %zmm0, %zmm1, %zmm1
732 ; AVX512BW-NEXT: vptestmd %zmm1, %zmm1, %k1
733 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
734 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [10,11,11,11,12,12,12,13,13,13,14,14,14,15,15,15]
735 ; AVX512BW-NEXT: vpermd %zmm0, %zmm2, %zmm2
736 ; AVX512BW-NEXT: vptestmd %zmm2, %zmm2, %k1
737 ; AVX512BW-NEXT: vmovdqa32 128(%rsi), %zmm2 {%k1} {z}
738 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [5,5,6,6,6,7,7,7,8,8,8,9,9,9,10,10]
739 ; AVX512BW-NEXT: vpermd %zmm0, %zmm3, %zmm0
740 ; AVX512BW-NEXT: vptestmd %zmm0, %zmm0, %k1
741 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
742 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 64(%rdx)
743 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 128(%rdx)
744 ; AVX512BW-NEXT: vmovdqa64 %zmm1, (%rdx)
745 ; AVX512BW-NEXT: vzeroupper
746 ; AVX512BW-NEXT: retq
747 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
748 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
749 %tgt.mask = shufflevector <16 x i1> %src.mask, <16 x i1> poison, <48 x i32> <i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 8, i32 8, i32 8, i32 9, i32 9, i32 9, i32 10, i32 10, i32 10, i32 11, i32 11, i32 11, i32 12, i32 12, i32 12, i32 13, i32 13, i32 13, i32 14, i32 14, i32 14, i32 15, i32 15, i32 15>
750 %data = call <48 x i32> @llvm.masked.load.v48i32.p0(ptr %in.vec, i32 64, <48 x i1> %tgt.mask, <48 x i32> poison)
751 store <48 x i32> %data, ptr %out.vec, align 64
755 define void @mask_replication_factor3_vf32(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
756 ; AVX512F-ONLY-LABEL: mask_replication_factor3_vf32:
757 ; AVX512F-ONLY: # %bb.0:
758 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k2
759 ; AVX512F-ONLY-NEXT: kmovw 2(%rdi), %k1
760 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k2} {z}
761 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,1,1,1,2,2,2,3,3,3,4,4,4,5]
762 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm2
763 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k2
764 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 {%k2} {z}
765 ; AVX512F-ONLY-NEXT: movw $1, %ax
766 ; AVX512F-ONLY-NEXT: kmovw %eax, %k2
767 ; AVX512F-ONLY-NEXT: vmovdqa32 %zmm0, %zmm2 {%k2}
768 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k3
769 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm2 = [5,5,6,6,6,7,7,7,8,8,8,9,9,9,10,10]
770 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm2, %zmm3
771 ; AVX512F-ONLY-NEXT: vptestmd %zmm3, %zmm3, %k2
772 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm3 = [10,11,11,11,12,12,12,13,13,13,14,14,14,15,15,15]
773 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm3, %zmm0
774 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k4
775 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
776 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
777 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
778 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm2, %zmm1
779 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k5
780 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm3, %zmm0
781 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k6
782 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k3} {z}
783 ; AVX512F-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm1 {%k6} {z}
784 ; AVX512F-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm2 {%k5} {z}
785 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm3 {%k1} {z}
786 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm4 {%k4} {z}
787 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm5 {%k2} {z}
788 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm5, 64(%rdx)
789 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm4, 128(%rdx)
790 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 192(%rdx)
791 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 256(%rdx)
792 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, 320(%rdx)
793 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, (%rdx)
794 ; AVX512F-ONLY-NEXT: vzeroupper
795 ; AVX512F-ONLY-NEXT: retq
797 ; AVX512DQ-LABEL: mask_replication_factor3_vf32:
799 ; AVX512DQ-NEXT: kmovw (%rdi), %k1
800 ; AVX512DQ-NEXT: kmovw 2(%rdi), %k0
801 ; AVX512DQ-NEXT: vpmovm2d %k1, %zmm0
802 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,1,1,1,2,2,2,3,3,3,4,4,4,5]
803 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm2
804 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k1
805 ; AVX512DQ-NEXT: vpmovm2d %k1, %zmm2
806 ; AVX512DQ-NEXT: movw $1, %ax
807 ; AVX512DQ-NEXT: kmovw %eax, %k1
808 ; AVX512DQ-NEXT: vmovdqa32 %zmm0, %zmm2 {%k1}
809 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k2
810 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm2 = [5,5,6,6,6,7,7,7,8,8,8,9,9,9,10,10]
811 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm2, %zmm3
812 ; AVX512DQ-NEXT: vpmovd2m %zmm3, %k1
813 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm3 = [10,11,11,11,12,12,12,13,13,13,14,14,14,15,15,15]
814 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm3, %zmm0
815 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k3
816 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
817 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
818 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k4
819 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm2, %zmm1
820 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k5
821 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm3, %zmm0
822 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k6
823 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k2} {z}
824 ; AVX512DQ-NEXT: vmovdqa32 320(%rsi), %zmm1 {%k6} {z}
825 ; AVX512DQ-NEXT: vmovdqa32 256(%rsi), %zmm2 {%k5} {z}
826 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm3 {%k4} {z}
827 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm4 {%k3} {z}
828 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm5 {%k1} {z}
829 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 64(%rdx)
830 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 128(%rdx)
831 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 192(%rdx)
832 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 256(%rdx)
833 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 320(%rdx)
834 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%rdx)
835 ; AVX512DQ-NEXT: vzeroupper
836 ; AVX512DQ-NEXT: retq
838 ; AVX512BW-LABEL: mask_replication_factor3_vf32:
840 ; AVX512BW-NEXT: kmovd (%rdi), %k0
841 ; AVX512BW-NEXT: kshiftrd $1, %k0, %k1
842 ; AVX512BW-NEXT: movw $-3, %ax
843 ; AVX512BW-NEXT: kmovd %eax, %k4
844 ; AVX512BW-NEXT: kmovw (%rdi), %k2
845 ; AVX512BW-NEXT: kandw %k4, %k2, %k3
846 ; AVX512BW-NEXT: kmovq %k4, %k7
847 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
848 ; AVX512BW-NEXT: kshiftrw $14, %k2, %k4
849 ; AVX512BW-NEXT: korw %k4, %k3, %k3
850 ; AVX512BW-NEXT: movw $-5, %ax
851 ; AVX512BW-NEXT: kmovd %eax, %k4
852 ; AVX512BW-NEXT: kmovw %k4, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
853 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
854 ; AVX512BW-NEXT: kshiftrw $13, %k2, %k2
855 ; AVX512BW-NEXT: korw %k2, %k3, %k2
856 ; AVX512BW-NEXT: movw $-9, %ax
857 ; AVX512BW-NEXT: kmovd %eax, %k3
858 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
859 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
860 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
861 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k3
862 ; AVX512BW-NEXT: korw %k3, %k2, %k2
863 ; AVX512BW-NEXT: movw $-17, %ax
864 ; AVX512BW-NEXT: kmovd %eax, %k5
865 ; AVX512BW-NEXT: kandw %k5, %k2, %k2
866 ; AVX512BW-NEXT: kmovw %k5, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
867 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k3
868 ; AVX512BW-NEXT: korw %k3, %k2, %k2
869 ; AVX512BW-NEXT: movw $-33, %ax
870 ; AVX512BW-NEXT: kmovd %eax, %k3
871 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
872 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
873 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k1
874 ; AVX512BW-NEXT: korw %k1, %k2, %k1
875 ; AVX512BW-NEXT: movw $-65, %ax
876 ; AVX512BW-NEXT: kmovd %eax, %k2
877 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
878 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
879 ; AVX512BW-NEXT: kshiftrd $2, %k0, %k2
880 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
881 ; AVX512BW-NEXT: kshiftrw $9, %k2, %k3
882 ; AVX512BW-NEXT: korw %k3, %k1, %k1
883 ; AVX512BW-NEXT: movw $-129, %ax
884 ; AVX512BW-NEXT: kmovd %eax, %k3
885 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
886 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
887 ; AVX512BW-NEXT: kshiftrw $8, %k2, %k3
888 ; AVX512BW-NEXT: korw %k3, %k1, %k1
889 ; AVX512BW-NEXT: movw $-257, %ax # imm = 0xFEFF
890 ; AVX512BW-NEXT: kmovd %eax, %k3
891 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
892 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
893 ; AVX512BW-NEXT: kshiftrw $7, %k2, %k2
894 ; AVX512BW-NEXT: korw %k2, %k1, %k1
895 ; AVX512BW-NEXT: movw $-513, %ax # imm = 0xFDFF
896 ; AVX512BW-NEXT: kmovd %eax, %k2
897 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
898 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
899 ; AVX512BW-NEXT: kshiftrd $3, %k0, %k2
900 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
901 ; AVX512BW-NEXT: kshiftrw $6, %k2, %k3
902 ; AVX512BW-NEXT: korw %k3, %k1, %k1
903 ; AVX512BW-NEXT: movw $-1025, %ax # imm = 0xFBFF
904 ; AVX512BW-NEXT: kmovd %eax, %k3
905 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
906 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
907 ; AVX512BW-NEXT: kshiftrw $5, %k2, %k3
908 ; AVX512BW-NEXT: korw %k3, %k1, %k1
909 ; AVX512BW-NEXT: movw $-2049, %ax # imm = 0xF7FF
910 ; AVX512BW-NEXT: kmovd %eax, %k3
911 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
912 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
913 ; AVX512BW-NEXT: kshiftrw $4, %k2, %k2
914 ; AVX512BW-NEXT: korw %k2, %k1, %k1
915 ; AVX512BW-NEXT: movw $-4097, %ax # imm = 0xEFFF
916 ; AVX512BW-NEXT: kmovd %eax, %k2
917 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
918 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
919 ; AVX512BW-NEXT: kshiftrd $4, %k0, %k4
920 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k2
921 ; AVX512BW-NEXT: kshiftrw $3, %k2, %k3
922 ; AVX512BW-NEXT: korw %k3, %k1, %k1
923 ; AVX512BW-NEXT: movw $-8193, %ax # imm = 0xDFFF
924 ; AVX512BW-NEXT: kmovd %eax, %k6
925 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
926 ; AVX512BW-NEXT: kmovw %k6, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
927 ; AVX512BW-NEXT: kshiftrw $2, %k2, %k2
928 ; AVX512BW-NEXT: korw %k2, %k1, %k1
929 ; AVX512BW-NEXT: movw $-16385, %ax # imm = 0xBFFF
930 ; AVX512BW-NEXT: kmovd %eax, %k2
931 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
932 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
933 ; AVX512BW-NEXT: kshiftlw $14, %k4, %k4
934 ; AVX512BW-NEXT: korw %k4, %k1, %k1
935 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
936 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
937 ; AVX512BW-NEXT: kshiftrd $5, %k0, %k2
938 ; AVX512BW-NEXT: kmovd %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
939 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
940 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
941 ; AVX512BW-NEXT: korw %k2, %k1, %k1
942 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
943 ; AVX512BW-NEXT: kshiftrd $27, %k0, %k1
944 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k4
945 ; AVX512BW-NEXT: kshiftrd $26, %k0, %k1
946 ; AVX512BW-NEXT: kmovd %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
947 ; AVX512BW-NEXT: kmovq %k7, %k2
948 ; AVX512BW-NEXT: kmovw %k7, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
949 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
950 ; AVX512BW-NEXT: kshiftrw $14, %k4, %k7
951 ; AVX512BW-NEXT: korw %k7, %k1, %k1
952 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
953 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
954 ; AVX512BW-NEXT: kshiftrw $13, %k4, %k7
955 ; AVX512BW-NEXT: korw %k7, %k1, %k1
956 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
957 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
958 ; AVX512BW-NEXT: kshiftrw $12, %k4, %k4
959 ; AVX512BW-NEXT: korw %k4, %k1, %k1
960 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
961 ; AVX512BW-NEXT: kshiftrd $28, %k0, %k4
962 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k4
963 ; AVX512BW-NEXT: kshiftrw $11, %k4, %k7
964 ; AVX512BW-NEXT: korw %k7, %k1, %k1
965 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
966 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
967 ; AVX512BW-NEXT: kshiftrw $10, %k4, %k7
968 ; AVX512BW-NEXT: korw %k7, %k1, %k1
969 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
970 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
971 ; AVX512BW-NEXT: kshiftrw $9, %k4, %k4
972 ; AVX512BW-NEXT: korw %k4, %k1, %k1
973 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
974 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
975 ; AVX512BW-NEXT: kshiftrd $29, %k0, %k4
976 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k4
977 ; AVX512BW-NEXT: kshiftrw $8, %k4, %k7
978 ; AVX512BW-NEXT: korw %k7, %k1, %k1
979 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
980 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
981 ; AVX512BW-NEXT: kshiftrw $7, %k4, %k7
982 ; AVX512BW-NEXT: korw %k7, %k1, %k1
983 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
984 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
985 ; AVX512BW-NEXT: kshiftrw $6, %k4, %k4
986 ; AVX512BW-NEXT: korw %k4, %k1, %k1
987 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
988 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
989 ; AVX512BW-NEXT: kshiftrd $30, %k0, %k4
990 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k4
991 ; AVX512BW-NEXT: kshiftrw $5, %k4, %k7
992 ; AVX512BW-NEXT: korw %k7, %k1, %k1
993 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
994 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
995 ; AVX512BW-NEXT: kshiftrw $4, %k4, %k7
996 ; AVX512BW-NEXT: korw %k7, %k1, %k1
997 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
998 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
999 ; AVX512BW-NEXT: kshiftrw $3, %k4, %k4
1000 ; AVX512BW-NEXT: korw %k4, %k1, %k1
1001 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
1002 ; AVX512BW-NEXT: kshiftrd $31, %k0, %k4
1003 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k7
1004 ; AVX512BW-NEXT: kshiftrw $2, %k7, %k6
1005 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1006 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
1007 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
1008 ; AVX512BW-NEXT: kshiftlw $14, %k4, %k4
1009 ; AVX512BW-NEXT: korw %k4, %k1, %k1
1010 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
1011 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
1012 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1013 ; AVX512BW-NEXT: vmovdqa32 320(%rsi), %zmm1 {%k1} {z}
1014 ; AVX512BW-NEXT: kshiftrd $21, %k0, %k1
1015 ; AVX512BW-NEXT: kandw %k2, %k1, %k6
1016 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
1017 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
1018 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k1
1019 ; AVX512BW-NEXT: korw %k1, %k6, %k1
1020 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1021 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1022 ; AVX512BW-NEXT: kshiftrd $22, %k0, %k6
1023 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
1024 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k7
1025 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1026 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
1027 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
1028 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k7
1029 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1030 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1031 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1032 ; AVX512BW-NEXT: kshiftrw $11, %k6, %k6
1033 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1034 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
1035 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
1036 ; AVX512BW-NEXT: kshiftrd $23, %k0, %k6
1037 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
1038 ; AVX512BW-NEXT: kshiftrw $10, %k6, %k7
1039 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1040 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
1041 ; AVX512BW-NEXT: kshiftrw $9, %k6, %k7
1042 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1043 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
1044 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
1045 ; AVX512BW-NEXT: kshiftrw $8, %k6, %k6
1046 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1047 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1048 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1049 ; AVX512BW-NEXT: kshiftrd $24, %k0, %k6
1050 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
1051 ; AVX512BW-NEXT: kshiftrw $7, %k6, %k7
1052 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1053 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1054 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1055 ; AVX512BW-NEXT: kshiftrw $6, %k6, %k7
1056 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1057 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1058 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1059 ; AVX512BW-NEXT: kshiftrw $5, %k6, %k6
1060 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1061 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1062 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1063 ; AVX512BW-NEXT: kshiftrd $25, %k0, %k6
1064 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
1065 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
1066 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1067 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1068 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1069 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
1070 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1071 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1072 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1073 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k6
1074 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1075 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1076 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1077 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 4-byte Reload
1078 ; AVX512BW-NEXT: kshiftlw $14, %k2, %k6
1079 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1080 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
1081 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
1082 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
1083 ; AVX512BW-NEXT: korw %k2, %k1, %k1
1084 ; AVX512BW-NEXT: vmovdqa32 256(%rsi), %zmm2 {%k1} {z}
1085 ; AVX512BW-NEXT: kshiftrd $16, %k0, %k1
1086 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1087 ; AVX512BW-NEXT: kandw %k2, %k1, %k2
1088 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
1089 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k6
1090 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1091 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
1092 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
1093 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k1
1094 ; AVX512BW-NEXT: korw %k1, %k2, %k1
1095 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
1096 ; AVX512BW-NEXT: kshiftrd $17, %k0, %k2
1097 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
1098 ; AVX512BW-NEXT: kshiftrw $12, %k2, %k6
1099 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1100 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
1101 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
1102 ; AVX512BW-NEXT: kshiftrw $11, %k2, %k6
1103 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1104 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
1105 ; AVX512BW-NEXT: kshiftrw $10, %k2, %k2
1106 ; AVX512BW-NEXT: korw %k2, %k1, %k1
1107 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1108 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1109 ; AVX512BW-NEXT: kshiftrd $18, %k0, %k2
1110 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
1111 ; AVX512BW-NEXT: kshiftrw $9, %k2, %k6
1112 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1113 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
1114 ; AVX512BW-NEXT: kshiftrw $8, %k2, %k6
1115 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1116 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
1117 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
1118 ; AVX512BW-NEXT: kshiftrw $7, %k2, %k2
1119 ; AVX512BW-NEXT: korw %k2, %k1, %k1
1120 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1121 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1122 ; AVX512BW-NEXT: kshiftrd $19, %k0, %k2
1123 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
1124 ; AVX512BW-NEXT: kshiftrw $6, %k2, %k6
1125 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1126 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
1127 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
1128 ; AVX512BW-NEXT: kshiftrw $5, %k2, %k6
1129 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1130 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
1131 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
1132 ; AVX512BW-NEXT: kshiftrw $4, %k2, %k2
1133 ; AVX512BW-NEXT: korw %k2, %k1, %k1
1134 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1135 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1136 ; AVX512BW-NEXT: kshiftrd $20, %k0, %k2
1137 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k6
1138 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
1139 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1140 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
1141 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
1142 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k6
1143 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1144 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
1145 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
1146 ; AVX512BW-NEXT: kshiftlw $14, %k2, %k2
1147 ; AVX512BW-NEXT: korw %k2, %k1, %k1
1148 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
1149 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
1150 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1151 ; AVX512BW-NEXT: korw %k2, %k1, %k1
1152 ; AVX512BW-NEXT: vmovdqa32 192(%rsi), %zmm3 {%k1} {z}
1153 ; AVX512BW-NEXT: kshiftrd $11, %k0, %k1
1154 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k2
1155 ; AVX512BW-NEXT: kshiftrd $10, %k0, %k4
1156 ; AVX512BW-NEXT: kmovd %k4, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
1157 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1158 ; AVX512BW-NEXT: kandw %k1, %k4, %k4
1159 ; AVX512BW-NEXT: kshiftrw $14, %k2, %k6
1160 ; AVX512BW-NEXT: korw %k6, %k4, %k4
1161 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1162 ; AVX512BW-NEXT: kandw %k1, %k4, %k4
1163 ; AVX512BW-NEXT: kshiftrw $13, %k2, %k6
1164 ; AVX512BW-NEXT: korw %k6, %k4, %k4
1165 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1166 ; AVX512BW-NEXT: kandw %k1, %k4, %k4
1167 ; AVX512BW-NEXT: kshiftrw $12, %k2, %k2
1168 ; AVX512BW-NEXT: korw %k2, %k4, %k2
1169 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
1170 ; AVX512BW-NEXT: kshiftrd $12, %k0, %k4
1171 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k4
1172 ; AVX512BW-NEXT: kshiftrw $11, %k4, %k6
1173 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1174 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1175 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
1176 ; AVX512BW-NEXT: kshiftrw $10, %k4, %k6
1177 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1178 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1179 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
1180 ; AVX512BW-NEXT: kshiftrw $9, %k4, %k4
1181 ; AVX512BW-NEXT: korw %k4, %k2, %k2
1182 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1183 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
1184 ; AVX512BW-NEXT: kshiftrd $13, %k0, %k4
1185 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k4
1186 ; AVX512BW-NEXT: kshiftrw $8, %k4, %k6
1187 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1188 ; AVX512BW-NEXT: kandw %k5, %k2, %k2
1189 ; AVX512BW-NEXT: kshiftrw $7, %k4, %k6
1190 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1191 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
1192 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
1193 ; AVX512BW-NEXT: kshiftrw $6, %k4, %k4
1194 ; AVX512BW-NEXT: korw %k4, %k2, %k2
1195 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1196 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
1197 ; AVX512BW-NEXT: kshiftrd $14, %k0, %k4
1198 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k4
1199 ; AVX512BW-NEXT: kshiftrw $5, %k4, %k6
1200 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1201 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1202 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
1203 ; AVX512BW-NEXT: kshiftrw $4, %k4, %k6
1204 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1205 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
1206 ; AVX512BW-NEXT: kandw %k5, %k2, %k2
1207 ; AVX512BW-NEXT: kshiftrw $3, %k4, %k4
1208 ; AVX512BW-NEXT: korw %k4, %k2, %k2
1209 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
1210 ; AVX512BW-NEXT: kshiftrd $15, %k0, %k4
1211 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k6
1212 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
1213 ; AVX512BW-NEXT: korw %k7, %k2, %k2
1214 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
1215 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
1216 ; AVX512BW-NEXT: kshiftlw $14, %k4, %k4
1217 ; AVX512BW-NEXT: korw %k4, %k2, %k2
1218 ; AVX512BW-NEXT: kshiftlw $1, %k2, %k2
1219 ; AVX512BW-NEXT: kshiftrw $1, %k2, %k2
1220 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1221 ; AVX512BW-NEXT: vmovdqa32 128(%rsi), %zmm4 {%k2} {z}
1222 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 4-byte Reload
1223 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
1224 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
1225 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
1226 ; AVX512BW-NEXT: kshiftrw $14, %k4, %k4
1227 ; AVX512BW-NEXT: korw %k4, %k2, %k2
1228 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
1229 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
1230 ; AVX512BW-NEXT: kshiftrd $6, %k0, %k4
1231 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k4
1232 ; AVX512BW-NEXT: kshiftrw $13, %k4, %k6
1233 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1234 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
1235 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
1236 ; AVX512BW-NEXT: kshiftrw $12, %k4, %k6
1237 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1238 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
1239 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
1240 ; AVX512BW-NEXT: kshiftrw $11, %k4, %k4
1241 ; AVX512BW-NEXT: korw %k4, %k2, %k2
1242 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
1243 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
1244 ; AVX512BW-NEXT: kshiftrd $7, %k0, %k4
1245 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k4
1246 ; AVX512BW-NEXT: kshiftrw $10, %k4, %k6
1247 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1248 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
1249 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
1250 ; AVX512BW-NEXT: kshiftrw $9, %k4, %k6
1251 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1252 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
1253 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
1254 ; AVX512BW-NEXT: kshiftrw $8, %k4, %k4
1255 ; AVX512BW-NEXT: korw %k4, %k2, %k2
1256 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
1257 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
1258 ; AVX512BW-NEXT: kshiftrd $8, %k0, %k4
1259 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k4
1260 ; AVX512BW-NEXT: kshiftrw $7, %k4, %k6
1261 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1262 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
1263 ; AVX512BW-NEXT: kshiftrw $6, %k4, %k6
1264 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1265 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
1266 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
1267 ; AVX512BW-NEXT: kshiftrw $5, %k4, %k4
1268 ; AVX512BW-NEXT: korw %k4, %k2, %k2
1269 ; AVX512BW-NEXT: kshiftrd $9, %k0, %k0
1270 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
1271 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
1272 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k4
1273 ; AVX512BW-NEXT: korw %k4, %k2, %k2
1274 ; AVX512BW-NEXT: kandw %k5, %k2, %k2
1275 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k4
1276 ; AVX512BW-NEXT: korw %k4, %k2, %k2
1277 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1278 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
1279 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k0
1280 ; AVX512BW-NEXT: korw %k0, %k2, %k0
1281 ; AVX512BW-NEXT: kandw %k7, %k0, %k0
1282 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 4-byte Reload
1283 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k2
1284 ; AVX512BW-NEXT: korw %k2, %k0, %k0
1285 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
1286 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
1287 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
1288 ; AVX512BW-NEXT: korw %k1, %k0, %k1
1289 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm5 {%k1} {z}
1290 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 64(%rdx)
1291 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 128(%rdx)
1292 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 192(%rdx)
1293 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 256(%rdx)
1294 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 320(%rdx)
1295 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%rdx)
1296 ; AVX512BW-NEXT: vzeroupper
1297 ; AVX512BW-NEXT: retq
1298 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
1299 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
1300 %tgt.mask = shufflevector <32 x i1> %src.mask, <32 x i1> poison, <96 x i32> <i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 8, i32 8, i32 8, i32 9, i32 9, i32 9, i32 10, i32 10, i32 10, i32 11, i32 11, i32 11, i32 12, i32 12, i32 12, i32 13, i32 13, i32 13, i32 14, i32 14, i32 14, i32 15, i32 15, i32 15, i32 16, i32 16, i32 16, i32 17, i32 17, i32 17, i32 18, i32 18, i32 18, i32 19, i32 19, i32 19, i32 20, i32 20, i32 20, i32 21, i32 21, i32 21, i32 22, i32 22, i32 22, i32 23, i32 23, i32 23, i32 24, i32 24, i32 24, i32 25, i32 25, i32 25, i32 26, i32 26, i32 26, i32 27, i32 27, i32 27, i32 28, i32 28, i32 28, i32 29, i32 29, i32 29, i32 30, i32 30, i32 30, i32 31, i32 31, i32 31>
1301 %data = call <96 x i32> @llvm.masked.load.v96i32.p0(ptr %in.vec, i32 64, <96 x i1> %tgt.mask, <96 x i32> poison)
1302 store <96 x i32> %data, ptr %out.vec, align 64
1306 define void @mask_replication_factor3_vf64(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
1307 ; AVX512F-ONLY-LABEL: mask_replication_factor3_vf64:
1308 ; AVX512F-ONLY: # %bb.0:
1309 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
1310 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
1311 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,1,1,1,2,2,2,3,3,3,4,4,4,5]
1312 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm2
1313 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
1314 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 {%k1} {z}
1315 ; AVX512F-ONLY-NEXT: movw $1, %ax
1316 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
1317 ; AVX512F-ONLY-NEXT: vmovdqa32 %zmm0, %zmm2 {%k1}
1318 ; AVX512F-ONLY-NEXT: kmovw 2(%rdi), %k1
1319 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm3, %zmm3, %zmm3 {%k1} {z}
1320 ; AVX512F-ONLY-NEXT: kmovw 4(%rdi), %k1
1321 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm4, %zmm4, %zmm4 {%k1} {z}
1322 ; AVX512F-ONLY-NEXT: kmovw 6(%rdi), %k1
1323 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm5, %zmm5, %zmm5 {%k1} {z}
1324 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
1325 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm2 = [5,5,6,6,6,7,7,7,8,8,8,9,9,9,10,10]
1326 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm2, %zmm6
1327 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm7 = [10,11,11,11,12,12,12,13,13,13,14,14,14,15,15,15]
1328 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm7, %zmm0
1329 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm1, %zmm8
1330 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm2, %zmm9
1331 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm7, %zmm3
1332 ; AVX512F-ONLY-NEXT: vpermd %zmm4, %zmm1, %zmm10
1333 ; AVX512F-ONLY-NEXT: vpermd %zmm4, %zmm2, %zmm11
1334 ; AVX512F-ONLY-NEXT: vpermd %zmm4, %zmm7, %zmm4
1335 ; AVX512F-ONLY-NEXT: vpermd %zmm5, %zmm1, %zmm1
1336 ; AVX512F-ONLY-NEXT: vpermd %zmm5, %zmm2, %zmm2
1337 ; AVX512F-ONLY-NEXT: vpermd %zmm5, %zmm7, %zmm5
1338 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm7 {%k1} {z}
1339 ; AVX512F-ONLY-NEXT: vptestmd %zmm5, %zmm5, %k1
1340 ; AVX512F-ONLY-NEXT: vmovdqa32 704(%rsi), %zmm5 {%k1} {z}
1341 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
1342 ; AVX512F-ONLY-NEXT: vmovdqa32 640(%rsi), %zmm2 {%k1} {z}
1343 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
1344 ; AVX512F-ONLY-NEXT: vmovdqa32 576(%rsi), %zmm1 {%k1} {z}
1345 ; AVX512F-ONLY-NEXT: vptestmd %zmm4, %zmm4, %k1
1346 ; AVX512F-ONLY-NEXT: vmovdqa32 512(%rsi), %zmm4 {%k1} {z}
1347 ; AVX512F-ONLY-NEXT: vptestmd %zmm11, %zmm11, %k1
1348 ; AVX512F-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm11 {%k1} {z}
1349 ; AVX512F-ONLY-NEXT: vptestmd %zmm10, %zmm10, %k1
1350 ; AVX512F-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm10 {%k1} {z}
1351 ; AVX512F-ONLY-NEXT: vptestmd %zmm3, %zmm3, %k1
1352 ; AVX512F-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm3 {%k1} {z}
1353 ; AVX512F-ONLY-NEXT: vptestmd %zmm9, %zmm9, %k1
1354 ; AVX512F-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm9 {%k1} {z}
1355 ; AVX512F-ONLY-NEXT: vptestmd %zmm8, %zmm8, %k1
1356 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm8 {%k1} {z}
1357 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1
1358 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm0 {%k1} {z}
1359 ; AVX512F-ONLY-NEXT: vptestmd %zmm6, %zmm6, %k1
1360 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm6 {%k1} {z}
1361 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm6, 64(%rdx)
1362 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, 128(%rdx)
1363 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm8, 192(%rdx)
1364 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm9, 256(%rdx)
1365 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 320(%rdx)
1366 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm10, 384(%rdx)
1367 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm11, 448(%rdx)
1368 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm4, 512(%rdx)
1369 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, 576(%rdx)
1370 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 640(%rdx)
1371 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm5, 704(%rdx)
1372 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm7, (%rdx)
1373 ; AVX512F-ONLY-NEXT: vzeroupper
1374 ; AVX512F-ONLY-NEXT: retq
1376 ; AVX512DQ-LABEL: mask_replication_factor3_vf64:
1377 ; AVX512DQ: # %bb.0:
1378 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
1379 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
1380 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,1,1,1,2,2,2,3,3,3,4,4,4,5]
1381 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm2
1382 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k0
1383 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm2
1384 ; AVX512DQ-NEXT: movw $1, %ax
1385 ; AVX512DQ-NEXT: kmovw %eax, %k1
1386 ; AVX512DQ-NEXT: vmovdqa32 %zmm0, %zmm2 {%k1}
1387 ; AVX512DQ-NEXT: kmovw 2(%rdi), %k0
1388 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm3
1389 ; AVX512DQ-NEXT: kmovw 4(%rdi), %k0
1390 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm4
1391 ; AVX512DQ-NEXT: kmovw 6(%rdi), %k0
1392 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm5
1393 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k1
1394 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm2 = [5,5,6,6,6,7,7,7,8,8,8,9,9,9,10,10]
1395 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm2, %zmm6
1396 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm7 = [10,11,11,11,12,12,12,13,13,13,14,14,14,15,15,15]
1397 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm7, %zmm0
1398 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm1, %zmm8
1399 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm2, %zmm9
1400 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm7, %zmm3
1401 ; AVX512DQ-NEXT: vpermd %zmm4, %zmm1, %zmm10
1402 ; AVX512DQ-NEXT: vpermd %zmm4, %zmm2, %zmm11
1403 ; AVX512DQ-NEXT: vpermd %zmm4, %zmm7, %zmm4
1404 ; AVX512DQ-NEXT: vpermd %zmm5, %zmm1, %zmm1
1405 ; AVX512DQ-NEXT: vpermd %zmm5, %zmm2, %zmm2
1406 ; AVX512DQ-NEXT: vpermd %zmm5, %zmm7, %zmm5
1407 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm7 {%k1} {z}
1408 ; AVX512DQ-NEXT: vpmovd2m %zmm5, %k1
1409 ; AVX512DQ-NEXT: vmovdqa32 704(%rsi), %zmm5 {%k1} {z}
1410 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k1
1411 ; AVX512DQ-NEXT: vmovdqa32 640(%rsi), %zmm2 {%k1} {z}
1412 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
1413 ; AVX512DQ-NEXT: vmovdqa32 576(%rsi), %zmm1 {%k1} {z}
1414 ; AVX512DQ-NEXT: vpmovd2m %zmm4, %k1
1415 ; AVX512DQ-NEXT: vmovdqa32 512(%rsi), %zmm4 {%k1} {z}
1416 ; AVX512DQ-NEXT: vpmovd2m %zmm11, %k1
1417 ; AVX512DQ-NEXT: vmovdqa32 448(%rsi), %zmm11 {%k1} {z}
1418 ; AVX512DQ-NEXT: vpmovd2m %zmm10, %k1
1419 ; AVX512DQ-NEXT: vmovdqa32 384(%rsi), %zmm10 {%k1} {z}
1420 ; AVX512DQ-NEXT: vpmovd2m %zmm3, %k1
1421 ; AVX512DQ-NEXT: vmovdqa32 320(%rsi), %zmm3 {%k1} {z}
1422 ; AVX512DQ-NEXT: vpmovd2m %zmm9, %k1
1423 ; AVX512DQ-NEXT: vmovdqa32 256(%rsi), %zmm9 {%k1} {z}
1424 ; AVX512DQ-NEXT: vpmovd2m %zmm8, %k1
1425 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm8 {%k1} {z}
1426 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k1
1427 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm0 {%k1} {z}
1428 ; AVX512DQ-NEXT: vpmovd2m %zmm6, %k1
1429 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm6 {%k1} {z}
1430 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 64(%rdx)
1431 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 128(%rdx)
1432 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, 192(%rdx)
1433 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, 256(%rdx)
1434 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 320(%rdx)
1435 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, 384(%rdx)
1436 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, 448(%rdx)
1437 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 512(%rdx)
1438 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 576(%rdx)
1439 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 640(%rdx)
1440 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 704(%rdx)
1441 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, (%rdx)
1442 ; AVX512DQ-NEXT: vzeroupper
1443 ; AVX512DQ-NEXT: retq
1445 ; AVX512BW-LABEL: mask_replication_factor3_vf64:
1446 ; AVX512BW: # %bb.0:
1447 ; AVX512BW-NEXT: kmovq (%rdi), %k0
1448 ; AVX512BW-NEXT: kshiftrq $1, %k0, %k1
1449 ; AVX512BW-NEXT: movw $-3, %ax
1450 ; AVX512BW-NEXT: kmovd %eax, %k4
1451 ; AVX512BW-NEXT: kmovw (%rdi), %k2
1452 ; AVX512BW-NEXT: kandw %k4, %k2, %k3
1453 ; AVX512BW-NEXT: kmovq %k4, %k7
1454 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
1455 ; AVX512BW-NEXT: kshiftrw $14, %k2, %k4
1456 ; AVX512BW-NEXT: korw %k4, %k3, %k3
1457 ; AVX512BW-NEXT: movw $-5, %ax
1458 ; AVX512BW-NEXT: kmovd %eax, %k4
1459 ; AVX512BW-NEXT: kmovw %k4, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
1460 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
1461 ; AVX512BW-NEXT: kshiftrw $13, %k2, %k2
1462 ; AVX512BW-NEXT: korw %k2, %k3, %k2
1463 ; AVX512BW-NEXT: movw $-9, %ax
1464 ; AVX512BW-NEXT: kmovd %eax, %k3
1465 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
1466 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
1467 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
1468 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k3
1469 ; AVX512BW-NEXT: korw %k3, %k2, %k2
1470 ; AVX512BW-NEXT: movw $-17, %ax
1471 ; AVX512BW-NEXT: kmovd %eax, %k3
1472 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
1473 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
1474 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k3
1475 ; AVX512BW-NEXT: korw %k3, %k2, %k2
1476 ; AVX512BW-NEXT: movw $-33, %ax
1477 ; AVX512BW-NEXT: kmovd %eax, %k3
1478 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
1479 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
1480 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k1
1481 ; AVX512BW-NEXT: korw %k1, %k2, %k1
1482 ; AVX512BW-NEXT: movw $-65, %ax
1483 ; AVX512BW-NEXT: kmovd %eax, %k2
1484 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
1485 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1486 ; AVX512BW-NEXT: kshiftrq $2, %k0, %k2
1487 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
1488 ; AVX512BW-NEXT: kshiftrw $9, %k2, %k3
1489 ; AVX512BW-NEXT: korw %k3, %k1, %k1
1490 ; AVX512BW-NEXT: movw $-129, %ax
1491 ; AVX512BW-NEXT: kmovd %eax, %k3
1492 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
1493 ; AVX512BW-NEXT: kmovq %k3, %k5
1494 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
1495 ; AVX512BW-NEXT: kshiftrw $8, %k2, %k3
1496 ; AVX512BW-NEXT: korw %k3, %k1, %k1
1497 ; AVX512BW-NEXT: movw $-257, %ax # imm = 0xFEFF
1498 ; AVX512BW-NEXT: kmovd %eax, %k3
1499 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
1500 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
1501 ; AVX512BW-NEXT: kshiftrw $7, %k2, %k2
1502 ; AVX512BW-NEXT: korw %k2, %k1, %k1
1503 ; AVX512BW-NEXT: movw $-513, %ax # imm = 0xFDFF
1504 ; AVX512BW-NEXT: kmovd %eax, %k2
1505 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
1506 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1507 ; AVX512BW-NEXT: kshiftrq $3, %k0, %k2
1508 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
1509 ; AVX512BW-NEXT: kshiftrw $6, %k2, %k3
1510 ; AVX512BW-NEXT: korw %k3, %k1, %k1
1511 ; AVX512BW-NEXT: movw $-1025, %ax # imm = 0xFBFF
1512 ; AVX512BW-NEXT: kmovd %eax, %k3
1513 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
1514 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
1515 ; AVX512BW-NEXT: kshiftrw $5, %k2, %k3
1516 ; AVX512BW-NEXT: korw %k3, %k1, %k1
1517 ; AVX512BW-NEXT: movw $-2049, %ax # imm = 0xF7FF
1518 ; AVX512BW-NEXT: kmovd %eax, %k3
1519 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
1520 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
1521 ; AVX512BW-NEXT: kshiftrw $4, %k2, %k2
1522 ; AVX512BW-NEXT: korw %k2, %k1, %k1
1523 ; AVX512BW-NEXT: movw $-4097, %ax # imm = 0xEFFF
1524 ; AVX512BW-NEXT: kmovd %eax, %k2
1525 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
1526 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1527 ; AVX512BW-NEXT: kshiftrq $4, %k0, %k2
1528 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k3
1529 ; AVX512BW-NEXT: kshiftrw $3, %k3, %k4
1530 ; AVX512BW-NEXT: korw %k4, %k1, %k1
1531 ; AVX512BW-NEXT: movw $-8193, %ax # imm = 0xDFFF
1532 ; AVX512BW-NEXT: kmovd %eax, %k6
1533 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
1534 ; AVX512BW-NEXT: kmovw %k6, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
1535 ; AVX512BW-NEXT: kshiftrw $2, %k3, %k3
1536 ; AVX512BW-NEXT: korw %k3, %k1, %k1
1537 ; AVX512BW-NEXT: movw $-16385, %ax # imm = 0xBFFF
1538 ; AVX512BW-NEXT: kmovd %eax, %k3
1539 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
1540 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
1541 ; AVX512BW-NEXT: kshiftlw $14, %k2, %k2
1542 ; AVX512BW-NEXT: korw %k2, %k1, %k1
1543 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
1544 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
1545 ; AVX512BW-NEXT: kshiftrq $5, %k0, %k2
1546 ; AVX512BW-NEXT: kmovq %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
1547 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
1548 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
1549 ; AVX512BW-NEXT: korw %k2, %k1, %k1
1550 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
1551 ; AVX512BW-NEXT: kshiftrq $59, %k0, %k1
1552 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k2
1553 ; AVX512BW-NEXT: kshiftrq $58, %k0, %k1
1554 ; AVX512BW-NEXT: kmovq %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
1555 ; AVX512BW-NEXT: kmovq %k7, %k3
1556 ; AVX512BW-NEXT: kmovw %k7, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
1557 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
1558 ; AVX512BW-NEXT: kshiftrw $14, %k2, %k7
1559 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1560 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
1561 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
1562 ; AVX512BW-NEXT: kshiftrw $13, %k2, %k7
1563 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1564 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
1565 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
1566 ; AVX512BW-NEXT: kshiftrw $12, %k2, %k2
1567 ; AVX512BW-NEXT: korw %k2, %k1, %k1
1568 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
1569 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
1570 ; AVX512BW-NEXT: kshiftrq $60, %k0, %k2
1571 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
1572 ; AVX512BW-NEXT: kshiftrw $11, %k2, %k7
1573 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1574 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
1575 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
1576 ; AVX512BW-NEXT: kshiftrw $10, %k2, %k7
1577 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1578 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
1579 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
1580 ; AVX512BW-NEXT: kshiftrw $9, %k2, %k2
1581 ; AVX512BW-NEXT: korw %k2, %k1, %k1
1582 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
1583 ; AVX512BW-NEXT: kshiftrq $61, %k0, %k2
1584 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
1585 ; AVX512BW-NEXT: kshiftrw $8, %k2, %k7
1586 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1587 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
1588 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
1589 ; AVX512BW-NEXT: kshiftrw $7, %k2, %k7
1590 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1591 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
1592 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
1593 ; AVX512BW-NEXT: kshiftrw $6, %k2, %k2
1594 ; AVX512BW-NEXT: korw %k2, %k1, %k1
1595 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1596 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1597 ; AVX512BW-NEXT: kshiftrq $62, %k0, %k2
1598 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
1599 ; AVX512BW-NEXT: kshiftrw $5, %k2, %k7
1600 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1601 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
1602 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
1603 ; AVX512BW-NEXT: kshiftrw $4, %k2, %k7
1604 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1605 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
1606 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
1607 ; AVX512BW-NEXT: kshiftrw $3, %k2, %k2
1608 ; AVX512BW-NEXT: korw %k2, %k1, %k1
1609 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
1610 ; AVX512BW-NEXT: kshiftrq $63, %k0, %k2
1611 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k7
1612 ; AVX512BW-NEXT: kshiftrw $2, %k7, %k6
1613 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1614 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
1615 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
1616 ; AVX512BW-NEXT: kshiftlw $14, %k2, %k2
1617 ; AVX512BW-NEXT: korw %k2, %k1, %k1
1618 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
1619 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
1620 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1621 ; AVX512BW-NEXT: vmovdqa32 704(%rsi), %zmm1 {%k1} {z}
1622 ; AVX512BW-NEXT: kshiftrq $53, %k0, %k1
1623 ; AVX512BW-NEXT: kandw %k3, %k1, %k6
1624 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
1625 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
1626 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k1
1627 ; AVX512BW-NEXT: korw %k1, %k6, %k1
1628 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1629 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1630 ; AVX512BW-NEXT: kshiftrq $54, %k0, %k6
1631 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
1632 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k7
1633 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1634 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1635 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1636 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k7
1637 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1638 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
1639 ; AVX512BW-NEXT: kshiftrw $11, %k6, %k6
1640 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1641 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1642 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1643 ; AVX512BW-NEXT: kshiftrq $55, %k0, %k6
1644 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
1645 ; AVX512BW-NEXT: kshiftrw $10, %k6, %k7
1646 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1647 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1648 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1649 ; AVX512BW-NEXT: kshiftrw $9, %k6, %k7
1650 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1651 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1652 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1653 ; AVX512BW-NEXT: kshiftrw $8, %k6, %k6
1654 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1655 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
1656 ; AVX512BW-NEXT: kshiftrq $56, %k0, %k6
1657 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
1658 ; AVX512BW-NEXT: kshiftrw $7, %k6, %k7
1659 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1660 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
1661 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
1662 ; AVX512BW-NEXT: kshiftrw $6, %k6, %k7
1663 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1664 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
1665 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
1666 ; AVX512BW-NEXT: kshiftrw $5, %k6, %k6
1667 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1668 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
1669 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
1670 ; AVX512BW-NEXT: kshiftrq $57, %k0, %k6
1671 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
1672 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
1673 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1674 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
1675 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
1676 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
1677 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1678 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
1679 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
1680 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k6
1681 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1682 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
1683 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
1684 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 8-byte Reload
1685 ; AVX512BW-NEXT: kshiftlw $14, %k3, %k6
1686 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1687 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
1688 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
1689 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
1690 ; AVX512BW-NEXT: korw %k3, %k1, %k1
1691 ; AVX512BW-NEXT: vmovdqa32 640(%rsi), %zmm2 {%k1} {z}
1692 ; AVX512BW-NEXT: kshiftrq $48, %k0, %k1
1693 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
1694 ; AVX512BW-NEXT: kandw %k3, %k1, %k3
1695 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
1696 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k6
1697 ; AVX512BW-NEXT: korw %k6, %k3, %k3
1698 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
1699 ; AVX512BW-NEXT: kandw %k6, %k3, %k3
1700 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k1
1701 ; AVX512BW-NEXT: korw %k1, %k3, %k1
1702 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
1703 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
1704 ; AVX512BW-NEXT: kshiftrq $49, %k0, %k3
1705 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
1706 ; AVX512BW-NEXT: kshiftrw $12, %k3, %k6
1707 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1708 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
1709 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
1710 ; AVX512BW-NEXT: kshiftrw $11, %k3, %k6
1711 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1712 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
1713 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
1714 ; AVX512BW-NEXT: kshiftrw $10, %k3, %k3
1715 ; AVX512BW-NEXT: korw %k3, %k1, %k1
1716 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
1717 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
1718 ; AVX512BW-NEXT: kshiftrq $50, %k0, %k3
1719 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
1720 ; AVX512BW-NEXT: kshiftrw $9, %k3, %k6
1721 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1722 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1723 ; AVX512BW-NEXT: kshiftrw $8, %k3, %k6
1724 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1725 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1726 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1727 ; AVX512BW-NEXT: kshiftrw $7, %k3, %k3
1728 ; AVX512BW-NEXT: korw %k3, %k1, %k1
1729 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
1730 ; AVX512BW-NEXT: kshiftrq $51, %k0, %k3
1731 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
1732 ; AVX512BW-NEXT: kshiftrw $6, %k3, %k6
1733 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1734 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
1735 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
1736 ; AVX512BW-NEXT: kshiftrw $5, %k3, %k6
1737 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1738 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1739 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1740 ; AVX512BW-NEXT: kshiftrw $4, %k3, %k3
1741 ; AVX512BW-NEXT: korw %k3, %k1, %k1
1742 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1743 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1744 ; AVX512BW-NEXT: kshiftrq $52, %k0, %k3
1745 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k6
1746 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
1747 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1748 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
1749 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
1750 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k6
1751 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1752 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
1753 ; AVX512BW-NEXT: kshiftlw $14, %k3, %k3
1754 ; AVX512BW-NEXT: korw %k3, %k1, %k1
1755 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
1756 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
1757 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1758 ; AVX512BW-NEXT: korw %k2, %k1, %k1
1759 ; AVX512BW-NEXT: vmovdqa32 576(%rsi), %zmm3 {%k1} {z}
1760 ; AVX512BW-NEXT: kshiftrq $43, %k0, %k1
1761 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k2
1762 ; AVX512BW-NEXT: kshiftrq $42, %k0, %k1
1763 ; AVX512BW-NEXT: kmovq %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
1764 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
1765 ; AVX512BW-NEXT: kandw %k4, %k1, %k3
1766 ; AVX512BW-NEXT: kshiftrw $14, %k2, %k6
1767 ; AVX512BW-NEXT: korw %k6, %k3, %k3
1768 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1769 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
1770 ; AVX512BW-NEXT: kshiftrw $13, %k2, %k6
1771 ; AVX512BW-NEXT: korw %k6, %k3, %k3
1772 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1773 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
1774 ; AVX512BW-NEXT: kshiftrw $12, %k2, %k2
1775 ; AVX512BW-NEXT: korw %k2, %k3, %k2
1776 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1777 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
1778 ; AVX512BW-NEXT: kshiftrq $44, %k0, %k3
1779 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
1780 ; AVX512BW-NEXT: kshiftrw $11, %k3, %k6
1781 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1782 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1783 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
1784 ; AVX512BW-NEXT: kshiftrw $10, %k3, %k6
1785 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1786 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
1787 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
1788 ; AVX512BW-NEXT: kshiftrw $9, %k3, %k3
1789 ; AVX512BW-NEXT: korw %k3, %k2, %k2
1790 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
1791 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
1792 ; AVX512BW-NEXT: kshiftrq $45, %k0, %k3
1793 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
1794 ; AVX512BW-NEXT: kshiftrw $8, %k3, %k6
1795 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1796 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
1797 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
1798 ; AVX512BW-NEXT: kshiftrw $7, %k3, %k6
1799 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1800 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
1801 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
1802 ; AVX512BW-NEXT: kshiftrw $6, %k3, %k3
1803 ; AVX512BW-NEXT: korw %k3, %k2, %k2
1804 ; AVX512BW-NEXT: kandw %k5, %k2, %k2
1805 ; AVX512BW-NEXT: kshiftrq $46, %k0, %k3
1806 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
1807 ; AVX512BW-NEXT: kshiftrw $5, %k3, %k6
1808 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1809 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
1810 ; AVX512BW-NEXT: kandw %k5, %k2, %k2
1811 ; AVX512BW-NEXT: kshiftrw $4, %k3, %k6
1812 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1813 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
1814 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
1815 ; AVX512BW-NEXT: kshiftrw $3, %k3, %k3
1816 ; AVX512BW-NEXT: korw %k3, %k2, %k2
1817 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
1818 ; AVX512BW-NEXT: kshiftrq $47, %k0, %k3
1819 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k6
1820 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
1821 ; AVX512BW-NEXT: korw %k7, %k2, %k2
1822 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
1823 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
1824 ; AVX512BW-NEXT: kshiftlw $14, %k3, %k3
1825 ; AVX512BW-NEXT: korw %k3, %k2, %k2
1826 ; AVX512BW-NEXT: kshiftlw $1, %k2, %k2
1827 ; AVX512BW-NEXT: kshiftrw $1, %k2, %k2
1828 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1829 ; AVX512BW-NEXT: vmovdqa32 512(%rsi), %zmm4 {%k2} {z}
1830 ; AVX512BW-NEXT: kshiftrq $37, %k0, %k2
1831 ; AVX512BW-NEXT: kandw %k4, %k2, %k3
1832 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
1833 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
1834 ; AVX512BW-NEXT: kshiftrw $14, %k2, %k6
1835 ; AVX512BW-NEXT: korw %k6, %k3, %k3
1836 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1837 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
1838 ; AVX512BW-NEXT: kshiftrq $38, %k0, %k6
1839 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
1840 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k7
1841 ; AVX512BW-NEXT: korw %k7, %k3, %k3
1842 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
1843 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
1844 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k7
1845 ; AVX512BW-NEXT: korw %k7, %k3, %k3
1846 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1847 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
1848 ; AVX512BW-NEXT: kshiftrw $11, %k6, %k6
1849 ; AVX512BW-NEXT: korw %k6, %k3, %k3
1850 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
1851 ; AVX512BW-NEXT: kshiftrq $39, %k0, %k6
1852 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
1853 ; AVX512BW-NEXT: kshiftrw $10, %k6, %k7
1854 ; AVX512BW-NEXT: korw %k7, %k3, %k3
1855 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1856 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
1857 ; AVX512BW-NEXT: kshiftrw $9, %k6, %k7
1858 ; AVX512BW-NEXT: korw %k7, %k3, %k3
1859 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1860 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
1861 ; AVX512BW-NEXT: kshiftrw $8, %k6, %k6
1862 ; AVX512BW-NEXT: korw %k6, %k3, %k3
1863 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1864 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
1865 ; AVX512BW-NEXT: kshiftrq $40, %k0, %k6
1866 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
1867 ; AVX512BW-NEXT: kshiftrw $7, %k6, %k7
1868 ; AVX512BW-NEXT: korw %k7, %k3, %k3
1869 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1870 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
1871 ; AVX512BW-NEXT: kshiftrw $6, %k6, %k7
1872 ; AVX512BW-NEXT: korw %k7, %k3, %k3
1873 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1874 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
1875 ; AVX512BW-NEXT: kshiftrw $5, %k6, %k6
1876 ; AVX512BW-NEXT: korw %k6, %k3, %k3
1877 ; AVX512BW-NEXT: kandw %k5, %k3, %k3
1878 ; AVX512BW-NEXT: kshiftrq $41, %k0, %k6
1879 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
1880 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
1881 ; AVX512BW-NEXT: korw %k7, %k3, %k3
1882 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
1883 ; AVX512BW-NEXT: kandw %k5, %k3, %k3
1884 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
1885 ; AVX512BW-NEXT: korw %k7, %k3, %k3
1886 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1887 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
1888 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k6
1889 ; AVX512BW-NEXT: korw %k6, %k3, %k3
1890 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1891 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
1892 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 8-byte Reload
1893 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k6
1894 ; AVX512BW-NEXT: korw %k6, %k3, %k3
1895 ; AVX512BW-NEXT: kshiftlw $1, %k3, %k3
1896 ; AVX512BW-NEXT: kshiftrw $1, %k3, %k3
1897 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
1898 ; AVX512BW-NEXT: korw %k1, %k3, %k1
1899 ; AVX512BW-NEXT: vmovdqa32 448(%rsi), %zmm5 {%k1} {z}
1900 ; AVX512BW-NEXT: kshiftrq $32, %k0, %k1
1901 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
1902 ; AVX512BW-NEXT: kandw %k3, %k1, %k3
1903 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
1904 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k6
1905 ; AVX512BW-NEXT: korw %k6, %k3, %k3
1906 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
1907 ; AVX512BW-NEXT: kandw %k6, %k3, %k3
1908 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k1
1909 ; AVX512BW-NEXT: korw %k1, %k3, %k1
1910 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
1911 ; AVX512BW-NEXT: kshiftrq $33, %k0, %k3
1912 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
1913 ; AVX512BW-NEXT: kshiftrw $12, %k3, %k6
1914 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1915 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
1916 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
1917 ; AVX512BW-NEXT: kshiftrw $11, %k3, %k6
1918 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1919 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
1920 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
1921 ; AVX512BW-NEXT: kshiftrw $10, %k3, %k3
1922 ; AVX512BW-NEXT: korw %k3, %k1, %k1
1923 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1924 ; AVX512BW-NEXT: kshiftrq $34, %k0, %k3
1925 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
1926 ; AVX512BW-NEXT: kshiftrw $9, %k3, %k6
1927 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1928 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1929 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1930 ; AVX512BW-NEXT: kshiftrw $8, %k3, %k6
1931 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1932 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1933 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1934 ; AVX512BW-NEXT: kshiftrw $7, %k3, %k3
1935 ; AVX512BW-NEXT: korw %k3, %k1, %k1
1936 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1937 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1938 ; AVX512BW-NEXT: kshiftrq $35, %k0, %k3
1939 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
1940 ; AVX512BW-NEXT: kshiftrw $6, %k3, %k6
1941 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1942 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1943 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1944 ; AVX512BW-NEXT: kshiftrw $5, %k3, %k6
1945 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1946 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1947 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
1948 ; AVX512BW-NEXT: kshiftrw $4, %k3, %k3
1949 ; AVX512BW-NEXT: korw %k3, %k1, %k1
1950 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
1951 ; AVX512BW-NEXT: kshiftrq $36, %k0, %k3
1952 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k6
1953 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
1954 ; AVX512BW-NEXT: korw %k7, %k1, %k1
1955 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
1956 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
1957 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k6
1958 ; AVX512BW-NEXT: korw %k6, %k1, %k1
1959 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
1960 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
1961 ; AVX512BW-NEXT: kshiftlw $14, %k3, %k3
1962 ; AVX512BW-NEXT: korw %k3, %k1, %k1
1963 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
1964 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
1965 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
1966 ; AVX512BW-NEXT: korw %k2, %k1, %k1
1967 ; AVX512BW-NEXT: vmovdqa32 384(%rsi), %zmm6 {%k1} {z}
1968 ; AVX512BW-NEXT: kshiftrq $27, %k0, %k1
1969 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k2
1970 ; AVX512BW-NEXT: kshiftrq $26, %k0, %k3
1971 ; AVX512BW-NEXT: kmovq %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
1972 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1973 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
1974 ; AVX512BW-NEXT: kshiftrw $14, %k2, %k6
1975 ; AVX512BW-NEXT: korw %k6, %k3, %k3
1976 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1977 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
1978 ; AVX512BW-NEXT: kshiftrw $13, %k2, %k6
1979 ; AVX512BW-NEXT: korw %k6, %k3, %k3
1980 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1981 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
1982 ; AVX512BW-NEXT: kshiftrw $12, %k2, %k2
1983 ; AVX512BW-NEXT: korw %k2, %k3, %k2
1984 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
1985 ; AVX512BW-NEXT: kshiftrq $28, %k0, %k3
1986 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
1987 ; AVX512BW-NEXT: kshiftrw $11, %k3, %k6
1988 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1989 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
1990 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
1991 ; AVX512BW-NEXT: kshiftrw $10, %k3, %k6
1992 ; AVX512BW-NEXT: korw %k6, %k2, %k2
1993 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1994 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
1995 ; AVX512BW-NEXT: kshiftrw $9, %k3, %k3
1996 ; AVX512BW-NEXT: korw %k3, %k2, %k2
1997 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
1998 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
1999 ; AVX512BW-NEXT: kshiftrq $29, %k0, %k3
2000 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
2001 ; AVX512BW-NEXT: kshiftrw $8, %k3, %k6
2002 ; AVX512BW-NEXT: korw %k6, %k2, %k2
2003 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
2004 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
2005 ; AVX512BW-NEXT: kshiftrw $7, %k3, %k6
2006 ; AVX512BW-NEXT: korw %k6, %k2, %k2
2007 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
2008 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
2009 ; AVX512BW-NEXT: kshiftrw $6, %k3, %k3
2010 ; AVX512BW-NEXT: korw %k3, %k2, %k2
2011 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
2012 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
2013 ; AVX512BW-NEXT: kshiftrq $30, %k0, %k3
2014 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
2015 ; AVX512BW-NEXT: kshiftrw $5, %k3, %k6
2016 ; AVX512BW-NEXT: korw %k6, %k2, %k2
2017 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
2018 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
2019 ; AVX512BW-NEXT: kshiftrw $4, %k3, %k6
2020 ; AVX512BW-NEXT: korw %k6, %k2, %k2
2021 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
2022 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
2023 ; AVX512BW-NEXT: kshiftrw $3, %k3, %k3
2024 ; AVX512BW-NEXT: korw %k3, %k2, %k2
2025 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
2026 ; AVX512BW-NEXT: kshiftrq $31, %k0, %k3
2027 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k6
2028 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
2029 ; AVX512BW-NEXT: korw %k7, %k2, %k2
2030 ; AVX512BW-NEXT: kandw %k5, %k2, %k2
2031 ; AVX512BW-NEXT: kshiftlw $14, %k3, %k3
2032 ; AVX512BW-NEXT: korw %k3, %k2, %k2
2033 ; AVX512BW-NEXT: kshiftlw $1, %k2, %k2
2034 ; AVX512BW-NEXT: kshiftrw $1, %k2, %k2
2035 ; AVX512BW-NEXT: korw %k6, %k2, %k2
2036 ; AVX512BW-NEXT: vmovdqa32 320(%rsi), %zmm7 {%k2} {z}
2037 ; AVX512BW-NEXT: kshiftrq $21, %k0, %k2
2038 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
2039 ; AVX512BW-NEXT: kandw %k5, %k2, %k3
2040 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
2041 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
2042 ; AVX512BW-NEXT: kshiftrw $14, %k2, %k6
2043 ; AVX512BW-NEXT: korw %k6, %k3, %k3
2044 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
2045 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
2046 ; AVX512BW-NEXT: kshiftrq $22, %k0, %k6
2047 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
2048 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k7
2049 ; AVX512BW-NEXT: korw %k7, %k3, %k3
2050 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
2051 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
2052 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k7
2053 ; AVX512BW-NEXT: korw %k7, %k3, %k3
2054 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
2055 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
2056 ; AVX512BW-NEXT: kshiftrw $11, %k6, %k6
2057 ; AVX512BW-NEXT: korw %k6, %k3, %k3
2058 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
2059 ; AVX512BW-NEXT: kshiftrq $23, %k0, %k6
2060 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
2061 ; AVX512BW-NEXT: kshiftrw $10, %k6, %k7
2062 ; AVX512BW-NEXT: korw %k7, %k3, %k3
2063 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
2064 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
2065 ; AVX512BW-NEXT: kshiftrw $9, %k6, %k7
2066 ; AVX512BW-NEXT: korw %k7, %k3, %k3
2067 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
2068 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
2069 ; AVX512BW-NEXT: kshiftrw $8, %k6, %k6
2070 ; AVX512BW-NEXT: korw %k6, %k3, %k3
2071 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
2072 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
2073 ; AVX512BW-NEXT: kshiftrq $24, %k0, %k6
2074 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
2075 ; AVX512BW-NEXT: kshiftrw $7, %k6, %k7
2076 ; AVX512BW-NEXT: korw %k7, %k3, %k3
2077 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
2078 ; AVX512BW-NEXT: kandw %k7, %k3, %k3
2079 ; AVX512BW-NEXT: kshiftrw $6, %k6, %k7
2080 ; AVX512BW-NEXT: korw %k7, %k3, %k3
2081 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
2082 ; AVX512BW-NEXT: kshiftrw $5, %k6, %k6
2083 ; AVX512BW-NEXT: korw %k6, %k3, %k3
2084 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
2085 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
2086 ; AVX512BW-NEXT: kshiftrq $25, %k0, %k6
2087 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
2088 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
2089 ; AVX512BW-NEXT: korw %k7, %k3, %k3
2090 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
2091 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
2092 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
2093 ; AVX512BW-NEXT: korw %k7, %k3, %k3
2094 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
2095 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
2096 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k6
2097 ; AVX512BW-NEXT: korw %k6, %k3, %k3
2098 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
2099 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
2100 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 8-byte Reload
2101 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k6
2102 ; AVX512BW-NEXT: korw %k6, %k3, %k3
2103 ; AVX512BW-NEXT: kshiftlw $1, %k3, %k3
2104 ; AVX512BW-NEXT: kshiftrw $1, %k3, %k3
2105 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
2106 ; AVX512BW-NEXT: korw %k1, %k3, %k1
2107 ; AVX512BW-NEXT: vmovdqa32 256(%rsi), %zmm8 {%k1} {z}
2108 ; AVX512BW-NEXT: kshiftrq $16, %k0, %k1
2109 ; AVX512BW-NEXT: kandw %k5, %k1, %k3
2110 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
2111 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k6
2112 ; AVX512BW-NEXT: korw %k6, %k3, %k3
2113 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
2114 ; AVX512BW-NEXT: kandw %k5, %k3, %k3
2115 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k1
2116 ; AVX512BW-NEXT: korw %k1, %k3, %k1
2117 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
2118 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
2119 ; AVX512BW-NEXT: kshiftrq $17, %k0, %k3
2120 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
2121 ; AVX512BW-NEXT: kshiftrw $12, %k3, %k6
2122 ; AVX512BW-NEXT: korw %k6, %k1, %k1
2123 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
2124 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
2125 ; AVX512BW-NEXT: kshiftrw $11, %k3, %k6
2126 ; AVX512BW-NEXT: korw %k6, %k1, %k1
2127 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
2128 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
2129 ; AVX512BW-NEXT: kshiftrw $10, %k3, %k3
2130 ; AVX512BW-NEXT: korw %k3, %k1, %k1
2131 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
2132 ; AVX512BW-NEXT: kshiftrq $18, %k0, %k3
2133 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
2134 ; AVX512BW-NEXT: kshiftrw $9, %k3, %k6
2135 ; AVX512BW-NEXT: korw %k6, %k1, %k1
2136 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
2137 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
2138 ; AVX512BW-NEXT: kshiftrw $8, %k3, %k6
2139 ; AVX512BW-NEXT: korw %k6, %k1, %k1
2140 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
2141 ; AVX512BW-NEXT: kshiftrw $7, %k3, %k3
2142 ; AVX512BW-NEXT: korw %k3, %k1, %k1
2143 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
2144 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
2145 ; AVX512BW-NEXT: kshiftrq $19, %k0, %k3
2146 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
2147 ; AVX512BW-NEXT: kshiftrw $6, %k3, %k6
2148 ; AVX512BW-NEXT: korw %k6, %k1, %k1
2149 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
2150 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
2151 ; AVX512BW-NEXT: kshiftrw $5, %k3, %k6
2152 ; AVX512BW-NEXT: korw %k6, %k1, %k1
2153 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
2154 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
2155 ; AVX512BW-NEXT: kshiftrw $4, %k3, %k3
2156 ; AVX512BW-NEXT: korw %k3, %k1, %k1
2157 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
2158 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
2159 ; AVX512BW-NEXT: kshiftrq $20, %k0, %k3
2160 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k6
2161 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
2162 ; AVX512BW-NEXT: korw %k7, %k1, %k1
2163 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
2164 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
2165 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k6
2166 ; AVX512BW-NEXT: korw %k6, %k1, %k1
2167 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
2168 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
2169 ; AVX512BW-NEXT: kshiftlw $14, %k3, %k3
2170 ; AVX512BW-NEXT: korw %k3, %k1, %k1
2171 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
2172 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
2173 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
2174 ; AVX512BW-NEXT: korw %k2, %k1, %k1
2175 ; AVX512BW-NEXT: vmovdqa32 192(%rsi), %zmm9 {%k1} {z}
2176 ; AVX512BW-NEXT: kshiftrq $11, %k0, %k1
2177 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k2
2178 ; AVX512BW-NEXT: kshiftrq $10, %k0, %k3
2179 ; AVX512BW-NEXT: kmovq %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
2180 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
2181 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
2182 ; AVX512BW-NEXT: kshiftrw $14, %k2, %k6
2183 ; AVX512BW-NEXT: korw %k6, %k3, %k3
2184 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
2185 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
2186 ; AVX512BW-NEXT: kshiftrw $13, %k2, %k6
2187 ; AVX512BW-NEXT: korw %k6, %k3, %k3
2188 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
2189 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
2190 ; AVX512BW-NEXT: kshiftrw $12, %k2, %k2
2191 ; AVX512BW-NEXT: korw %k2, %k3, %k2
2192 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
2193 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
2194 ; AVX512BW-NEXT: kshiftrq $12, %k0, %k3
2195 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
2196 ; AVX512BW-NEXT: kshiftrw $11, %k3, %k6
2197 ; AVX512BW-NEXT: korw %k6, %k2, %k2
2198 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
2199 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
2200 ; AVX512BW-NEXT: kshiftrw $10, %k3, %k6
2201 ; AVX512BW-NEXT: korw %k6, %k2, %k2
2202 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
2203 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
2204 ; AVX512BW-NEXT: kshiftrw $9, %k3, %k3
2205 ; AVX512BW-NEXT: korw %k3, %k2, %k2
2206 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
2207 ; AVX512BW-NEXT: kshiftrq $13, %k0, %k3
2208 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
2209 ; AVX512BW-NEXT: kshiftrw $8, %k3, %k6
2210 ; AVX512BW-NEXT: korw %k6, %k2, %k2
2211 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
2212 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
2213 ; AVX512BW-NEXT: kshiftrw $7, %k3, %k6
2214 ; AVX512BW-NEXT: korw %k6, %k2, %k2
2215 ; AVX512BW-NEXT: kandw %k5, %k2, %k2
2216 ; AVX512BW-NEXT: kshiftrw $6, %k3, %k3
2217 ; AVX512BW-NEXT: korw %k3, %k2, %k2
2218 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
2219 ; AVX512BW-NEXT: kandw %k5, %k2, %k2
2220 ; AVX512BW-NEXT: kshiftrq $14, %k0, %k3
2221 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
2222 ; AVX512BW-NEXT: kshiftrw $5, %k3, %k6
2223 ; AVX512BW-NEXT: korw %k6, %k2, %k2
2224 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
2225 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
2226 ; AVX512BW-NEXT: kshiftrw $4, %k3, %k6
2227 ; AVX512BW-NEXT: korw %k6, %k2, %k2
2228 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
2229 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
2230 ; AVX512BW-NEXT: kshiftrw $3, %k3, %k3
2231 ; AVX512BW-NEXT: korw %k3, %k2, %k2
2232 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
2233 ; AVX512BW-NEXT: kshiftrq $15, %k0, %k3
2234 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k6
2235 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
2236 ; AVX512BW-NEXT: korw %k7, %k2, %k2
2237 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
2238 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
2239 ; AVX512BW-NEXT: kshiftlw $14, %k3, %k3
2240 ; AVX512BW-NEXT: korw %k3, %k2, %k2
2241 ; AVX512BW-NEXT: kshiftlw $1, %k2, %k2
2242 ; AVX512BW-NEXT: kshiftrw $1, %k2, %k2
2243 ; AVX512BW-NEXT: korw %k6, %k2, %k2
2244 ; AVX512BW-NEXT: vmovdqa32 128(%rsi), %zmm10 {%k2} {z}
2245 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 8-byte Reload
2246 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
2247 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
2248 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
2249 ; AVX512BW-NEXT: kshiftrw $14, %k3, %k3
2250 ; AVX512BW-NEXT: korw %k3, %k2, %k2
2251 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
2252 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
2253 ; AVX512BW-NEXT: kshiftrq $6, %k0, %k3
2254 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
2255 ; AVX512BW-NEXT: kshiftrw $13, %k3, %k6
2256 ; AVX512BW-NEXT: korw %k6, %k2, %k2
2257 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
2258 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
2259 ; AVX512BW-NEXT: kshiftrw $12, %k3, %k6
2260 ; AVX512BW-NEXT: korw %k6, %k2, %k2
2261 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
2262 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
2263 ; AVX512BW-NEXT: kshiftrw $11, %k3, %k3
2264 ; AVX512BW-NEXT: korw %k3, %k2, %k2
2265 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
2266 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
2267 ; AVX512BW-NEXT: kshiftrq $7, %k0, %k3
2268 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
2269 ; AVX512BW-NEXT: kshiftrw $10, %k3, %k6
2270 ; AVX512BW-NEXT: korw %k6, %k2, %k2
2271 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
2272 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
2273 ; AVX512BW-NEXT: kshiftrw $9, %k3, %k6
2274 ; AVX512BW-NEXT: korw %k6, %k2, %k2
2275 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
2276 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
2277 ; AVX512BW-NEXT: kshiftrw $8, %k3, %k3
2278 ; AVX512BW-NEXT: korw %k3, %k2, %k2
2279 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
2280 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
2281 ; AVX512BW-NEXT: kshiftrq $8, %k0, %k3
2282 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
2283 ; AVX512BW-NEXT: kshiftrw $7, %k3, %k6
2284 ; AVX512BW-NEXT: korw %k6, %k2, %k2
2285 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
2286 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
2287 ; AVX512BW-NEXT: kshiftrw $6, %k3, %k6
2288 ; AVX512BW-NEXT: korw %k6, %k2, %k2
2289 ; AVX512BW-NEXT: kandw %k5, %k2, %k2
2290 ; AVX512BW-NEXT: kshiftrw $5, %k3, %k3
2291 ; AVX512BW-NEXT: korw %k3, %k2, %k2
2292 ; AVX512BW-NEXT: kshiftrq $9, %k0, %k0
2293 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
2294 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
2295 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k3
2296 ; AVX512BW-NEXT: korw %k3, %k2, %k2
2297 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
2298 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k3
2299 ; AVX512BW-NEXT: korw %k3, %k2, %k2
2300 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
2301 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
2302 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k0
2303 ; AVX512BW-NEXT: korw %k0, %k2, %k0
2304 ; AVX512BW-NEXT: kandw %k7, %k0, %k0
2305 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 8-byte Reload
2306 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k2
2307 ; AVX512BW-NEXT: korw %k2, %k0, %k0
2308 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
2309 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
2310 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
2311 ; AVX512BW-NEXT: korw %k1, %k0, %k1
2312 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm11 {%k1} {z}
2313 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 64(%rdx)
2314 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 128(%rdx)
2315 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 192(%rdx)
2316 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 256(%rdx)
2317 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 320(%rdx)
2318 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 384(%rdx)
2319 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 448(%rdx)
2320 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 512(%rdx)
2321 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 576(%rdx)
2322 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 640(%rdx)
2323 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 704(%rdx)
2324 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%rdx)
2325 ; AVX512BW-NEXT: vzeroupper
2326 ; AVX512BW-NEXT: retq
2327 %src.mask = load <64 x i1>, ptr %in.maskvec, align 64
2328 %tgt.mask = shufflevector <64 x i1> %src.mask, <64 x i1> poison, <192 x i32> <i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 8, i32 8, i32 8, i32 9, i32 9, i32 9, i32 10, i32 10, i32 10, i32 11, i32 11, i32 11, i32 12, i32 12, i32 12, i32 13, i32 13, i32 13, i32 14, i32 14, i32 14, i32 15, i32 15, i32 15, i32 16, i32 16, i32 16, i32 17, i32 17, i32 17, i32 18, i32 18, i32 18, i32 19, i32 19, i32 19, i32 20, i32 20, i32 20, i32 21, i32 21, i32 21, i32 22, i32 22, i32 22, i32 23, i32 23, i32 23, i32 24, i32 24, i32 24, i32 25, i32 25, i32 25, i32 26, i32 26, i32 26, i32 27, i32 27, i32 27, i32 28, i32 28, i32 28, i32 29, i32 29, i32 29, i32 30, i32 30, i32 30, i32 31, i32 31, i32 31, i32 32, i32 32, i32 32, i32 33, i32 33, i32 33, i32 34, i32 34, i32 34, i32 35, i32 35, i32 35, i32 36, i32 36, i32 36, i32 37, i32 37, i32 37, i32 38, i32 38, i32 38, i32 39, i32 39, i32 39, i32 40, i32 40, i32 40, i32 41, i32 41, i32 41, i32 42, i32 42, i32 42, i32 43, i32 43, i32 43, i32 44, i32 44, i32 44, i32 45, i32 45, i32 45, i32 46, i32 46, i32 46, i32 47, i32 47, i32 47, i32 48, i32 48, i32 48, i32 49, i32 49, i32 49, i32 50, i32 50, i32 50, i32 51, i32 51, i32 51, i32 52, i32 52, i32 52, i32 53, i32 53, i32 53, i32 54, i32 54, i32 54, i32 55, i32 55, i32 55, i32 56, i32 56, i32 56, i32 57, i32 57, i32 57, i32 58, i32 58, i32 58, i32 59, i32 59, i32 59, i32 60, i32 60, i32 60, i32 61, i32 61, i32 61, i32 62, i32 62, i32 62, i32 63, i32 63, i32 63>
2329 %data = call <192 x i32> @llvm.masked.load.v192i32.p0(ptr %in.vec, i32 64, <192 x i1> %tgt.mask, <192 x i32> poison)
2330 store <192 x i32> %data, ptr %out.vec, align 64
2334 define void @mask_replication_factor4_vf2(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
2335 ; AVX512F-SLOW-LABEL: mask_replication_factor4_vf2:
2336 ; AVX512F-SLOW: # %bb.0:
2337 ; AVX512F-SLOW-NEXT: kmovw (%rdi), %k1
2338 ; AVX512F-SLOW-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0
2339 ; AVX512F-SLOW-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z}
2340 ; AVX512F-SLOW-NEXT: vpmovsxdq %xmm0, %xmm0
2341 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
2342 ; AVX512F-SLOW-NEXT: vptestmd %ymm0, %ymm0, %k1
2343 ; AVX512F-SLOW-NEXT: vmovdqa32 (%rsi), %ymm0 {%k1} {z}
2344 ; AVX512F-SLOW-NEXT: vmovdqa %ymm0, (%rdx)
2345 ; AVX512F-SLOW-NEXT: vzeroupper
2346 ; AVX512F-SLOW-NEXT: retq
2348 ; AVX512F-FAST-LABEL: mask_replication_factor4_vf2:
2349 ; AVX512F-FAST: # %bb.0:
2350 ; AVX512F-FAST-NEXT: kmovw (%rdi), %k1
2351 ; AVX512F-FAST-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0
2352 ; AVX512F-FAST-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z}
2353 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [0,0,0,0,1,1,1,1]
2354 ; AVX512F-FAST-NEXT: vpermd %ymm0, %ymm1, %ymm0
2355 ; AVX512F-FAST-NEXT: vptestmd %ymm0, %ymm0, %k1
2356 ; AVX512F-FAST-NEXT: vmovdqa32 (%rsi), %ymm0 {%k1} {z}
2357 ; AVX512F-FAST-NEXT: vmovdqa %ymm0, (%rdx)
2358 ; AVX512F-FAST-NEXT: vzeroupper
2359 ; AVX512F-FAST-NEXT: retq
2361 ; AVX512DQ-SLOW-LABEL: mask_replication_factor4_vf2:
2362 ; AVX512DQ-SLOW: # %bb.0:
2363 ; AVX512DQ-SLOW-NEXT: kmovb (%rdi), %k0
2364 ; AVX512DQ-SLOW-NEXT: vpmovm2d %k0, %ymm0
2365 ; AVX512DQ-SLOW-NEXT: vpmovsxdq %xmm0, %xmm0
2366 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
2367 ; AVX512DQ-SLOW-NEXT: vpmovd2m %ymm0, %k1
2368 ; AVX512DQ-SLOW-NEXT: vmovdqa32 (%rsi), %ymm0 {%k1} {z}
2369 ; AVX512DQ-SLOW-NEXT: vmovdqa %ymm0, (%rdx)
2370 ; AVX512DQ-SLOW-NEXT: vzeroupper
2371 ; AVX512DQ-SLOW-NEXT: retq
2373 ; AVX512DQ-FAST-LABEL: mask_replication_factor4_vf2:
2374 ; AVX512DQ-FAST: # %bb.0:
2375 ; AVX512DQ-FAST-NEXT: kmovb (%rdi), %k0
2376 ; AVX512DQ-FAST-NEXT: vpmovm2d %k0, %ymm0
2377 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [0,0,0,0,1,1,1,1]
2378 ; AVX512DQ-FAST-NEXT: vpermd %ymm0, %ymm1, %ymm0
2379 ; AVX512DQ-FAST-NEXT: vpmovd2m %ymm0, %k1
2380 ; AVX512DQ-FAST-NEXT: vmovdqa32 (%rsi), %ymm0 {%k1} {z}
2381 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm0, (%rdx)
2382 ; AVX512DQ-FAST-NEXT: vzeroupper
2383 ; AVX512DQ-FAST-NEXT: retq
2385 ; AVX512BW-SLOW-LABEL: mask_replication_factor4_vf2:
2386 ; AVX512BW-SLOW: # %bb.0:
2387 ; AVX512BW-SLOW-NEXT: kmovw (%rdi), %k1
2388 ; AVX512BW-SLOW-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0
2389 ; AVX512BW-SLOW-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z}
2390 ; AVX512BW-SLOW-NEXT: vpmovsxdq %xmm0, %xmm0
2391 ; AVX512BW-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
2392 ; AVX512BW-SLOW-NEXT: vptestmd %ymm0, %ymm0, %k1
2393 ; AVX512BW-SLOW-NEXT: vmovdqa32 (%rsi), %ymm0 {%k1} {z}
2394 ; AVX512BW-SLOW-NEXT: vmovdqa %ymm0, (%rdx)
2395 ; AVX512BW-SLOW-NEXT: vzeroupper
2396 ; AVX512BW-SLOW-NEXT: retq
2398 ; AVX512BW-FAST-LABEL: mask_replication_factor4_vf2:
2399 ; AVX512BW-FAST: # %bb.0:
2400 ; AVX512BW-FAST-NEXT: kmovw (%rdi), %k1
2401 ; AVX512BW-FAST-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0
2402 ; AVX512BW-FAST-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z}
2403 ; AVX512BW-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [0,0,0,0,1,1,1,1]
2404 ; AVX512BW-FAST-NEXT: vpermd %ymm0, %ymm1, %ymm0
2405 ; AVX512BW-FAST-NEXT: vptestmd %ymm0, %ymm0, %k1
2406 ; AVX512BW-FAST-NEXT: vmovdqa32 (%rsi), %ymm0 {%k1} {z}
2407 ; AVX512BW-FAST-NEXT: vmovdqa %ymm0, (%rdx)
2408 ; AVX512BW-FAST-NEXT: vzeroupper
2409 ; AVX512BW-FAST-NEXT: retq
2411 ; AVX512VBMI-SLOW-LABEL: mask_replication_factor4_vf2:
2412 ; AVX512VBMI-SLOW: # %bb.0:
2413 ; AVX512VBMI-SLOW-NEXT: kmovw (%rdi), %k1
2414 ; AVX512VBMI-SLOW-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0
2415 ; AVX512VBMI-SLOW-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z}
2416 ; AVX512VBMI-SLOW-NEXT: vpmovsxdq %xmm0, %xmm0
2417 ; AVX512VBMI-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,0,1,1]
2418 ; AVX512VBMI-SLOW-NEXT: vptestmd %ymm0, %ymm0, %k1
2419 ; AVX512VBMI-SLOW-NEXT: vmovdqa32 (%rsi), %ymm0 {%k1} {z}
2420 ; AVX512VBMI-SLOW-NEXT: vmovdqa %ymm0, (%rdx)
2421 ; AVX512VBMI-SLOW-NEXT: vzeroupper
2422 ; AVX512VBMI-SLOW-NEXT: retq
2424 ; AVX512VBMI-FAST-LABEL: mask_replication_factor4_vf2:
2425 ; AVX512VBMI-FAST: # %bb.0:
2426 ; AVX512VBMI-FAST-NEXT: kmovw (%rdi), %k1
2427 ; AVX512VBMI-FAST-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0
2428 ; AVX512VBMI-FAST-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z}
2429 ; AVX512VBMI-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [0,0,0,0,1,1,1,1]
2430 ; AVX512VBMI-FAST-NEXT: vpermd %ymm0, %ymm1, %ymm0
2431 ; AVX512VBMI-FAST-NEXT: vptestmd %ymm0, %ymm0, %k1
2432 ; AVX512VBMI-FAST-NEXT: vmovdqa32 (%rsi), %ymm0 {%k1} {z}
2433 ; AVX512VBMI-FAST-NEXT: vmovdqa %ymm0, (%rdx)
2434 ; AVX512VBMI-FAST-NEXT: vzeroupper
2435 ; AVX512VBMI-FAST-NEXT: retq
2436 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
2437 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <2 x i32> <i32 0, i32 1>
2438 %tgt.mask = shufflevector <2 x i1> %src.mask, <2 x i1> poison, <8 x i32> <i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1>
2439 %data = call <8 x i32> @llvm.masked.load.v8i32.p0(ptr %in.vec, i32 64, <8 x i1> %tgt.mask, <8 x i32> poison)
2440 %data.padded = shufflevector <8 x i32> %data, <8 x i32> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
2441 store <8 x i32> %data, ptr %out.vec, align 64
2445 define void @mask_replication_factor4_vf4(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
2446 ; AVX512F-ONLY-LABEL: mask_replication_factor4_vf4:
2447 ; AVX512F-ONLY: # %bb.0:
2448 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
2449 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
2450 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3]
2451 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
2452 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1
2453 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
2454 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, (%rdx)
2455 ; AVX512F-ONLY-NEXT: vzeroupper
2456 ; AVX512F-ONLY-NEXT: retq
2458 ; AVX512DQ-LABEL: mask_replication_factor4_vf4:
2459 ; AVX512DQ: # %bb.0:
2460 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
2461 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
2462 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3]
2463 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
2464 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k1
2465 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
2466 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%rdx)
2467 ; AVX512DQ-NEXT: vzeroupper
2468 ; AVX512DQ-NEXT: retq
2470 ; AVX512BW-LABEL: mask_replication_factor4_vf4:
2471 ; AVX512BW: # %bb.0:
2472 ; AVX512BW-NEXT: kmovw (%rdi), %k1
2473 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
2474 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3]
2475 ; AVX512BW-NEXT: vpermd %zmm0, %zmm1, %zmm0
2476 ; AVX512BW-NEXT: vptestmd %zmm0, %zmm0, %k1
2477 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
2478 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%rdx)
2479 ; AVX512BW-NEXT: vzeroupper
2480 ; AVX512BW-NEXT: retq
2481 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
2482 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
2483 %tgt.mask = shufflevector <4 x i1> %src.mask, <4 x i1> poison, <16 x i32> <i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3>
2484 %data = call <16 x i32> @llvm.masked.load.v16i32.p0(ptr %in.vec, i32 64, <16 x i1> %tgt.mask, <16 x i32> poison)
2485 store <16 x i32> %data, ptr %out.vec, align 64
2489 define void @mask_replication_factor4_vf8(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
2490 ; AVX512F-ONLY-LABEL: mask_replication_factor4_vf8:
2491 ; AVX512F-ONLY: # %bb.0:
2492 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
2493 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
2494 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [4,4,4,4,5,5,5,5,6,6,6,6,7,7,7,7]
2495 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
2496 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
2497 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3]
2498 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
2499 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k2
2500 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k2} {z}
2501 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm1 {%k1} {z}
2502 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, 64(%rdx)
2503 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, (%rdx)
2504 ; AVX512F-ONLY-NEXT: vzeroupper
2505 ; AVX512F-ONLY-NEXT: retq
2507 ; AVX512DQ-LABEL: mask_replication_factor4_vf8:
2508 ; AVX512DQ: # %bb.0:
2509 ; AVX512DQ-NEXT: kmovb (%rdi), %k0
2510 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
2511 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [4,4,4,4,5,5,5,5,6,6,6,6,7,7,7,7]
2512 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
2513 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
2514 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3]
2515 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
2516 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k2
2517 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k2} {z}
2518 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm1 {%k1} {z}
2519 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 64(%rdx)
2520 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%rdx)
2521 ; AVX512DQ-NEXT: vzeroupper
2522 ; AVX512DQ-NEXT: retq
2524 ; AVX512BW-LABEL: mask_replication_factor4_vf8:
2525 ; AVX512BW: # %bb.0:
2526 ; AVX512BW-NEXT: kmovw (%rdi), %k0
2527 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0
2528 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,6,6,6,6,7,7,7,7]
2529 ; AVX512BW-NEXT: vpermw %zmm0, %zmm1, %zmm0
2530 ; AVX512BW-NEXT: vpmovw2m %zmm0, %k1
2531 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
2532 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k1
2533 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm1 {%k1} {z}
2534 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 64(%rdx)
2535 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%rdx)
2536 ; AVX512BW-NEXT: vzeroupper
2537 ; AVX512BW-NEXT: retq
2538 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
2539 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
2540 %tgt.mask = shufflevector <8 x i1> %src.mask, <8 x i1> poison, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7>
2541 %data = call <32 x i32> @llvm.masked.load.v32i32.p0(ptr %in.vec, i32 64, <32 x i1> %tgt.mask, <32 x i32> poison)
2542 store <32 x i32> %data, ptr %out.vec, align 64
2546 define void @mask_replication_factor4_vf16(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
2547 ; AVX512F-ONLY-LABEL: mask_replication_factor4_vf16:
2548 ; AVX512F-ONLY: # %bb.0:
2549 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
2550 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
2551 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [8,8,8,8,9,9,9,9,10,10,10,10,11,11,11,11]
2552 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
2553 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
2554 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [12,12,12,12,13,13,13,13,14,14,14,14,15,15,15,15]
2555 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
2556 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k2
2557 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3]
2558 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
2559 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k3
2560 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [4,4,4,4,5,5,5,5,6,6,6,6,7,7,7,7]
2561 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
2562 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k4
2563 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k4} {z}
2564 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k3} {z}
2565 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k2} {z}
2566 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k1} {z}
2567 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
2568 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
2569 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
2570 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
2571 ; AVX512F-ONLY-NEXT: vzeroupper
2572 ; AVX512F-ONLY-NEXT: retq
2574 ; AVX512DQ-LABEL: mask_replication_factor4_vf16:
2575 ; AVX512DQ: # %bb.0:
2576 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
2577 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
2578 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [8,8,8,8,9,9,9,9,10,10,10,10,11,11,11,11]
2579 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
2580 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
2581 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [12,12,12,12,13,13,13,13,14,14,14,14,15,15,15,15]
2582 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
2583 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k2
2584 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3]
2585 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
2586 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k3
2587 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [4,4,4,4,5,5,5,5,6,6,6,6,7,7,7,7]
2588 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
2589 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k4
2590 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k4} {z}
2591 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm1 {%k3} {z}
2592 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k2} {z}
2593 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k1} {z}
2594 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 128(%rdx)
2595 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 192(%rdx)
2596 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, (%rdx)
2597 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 64(%rdx)
2598 ; AVX512DQ-NEXT: vzeroupper
2599 ; AVX512DQ-NEXT: retq
2601 ; AVX512BW-ONLY-LABEL: mask_replication_factor4_vf16:
2602 ; AVX512BW-ONLY: # %bb.0:
2603 ; AVX512BW-ONLY-NEXT: kmovq (%rdi), %k0
2604 ; AVX512BW-ONLY-NEXT: vpmovm2b %k0, %zmm0
2605 ; AVX512BW-ONLY-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1]
2606 ; AVX512BW-ONLY-NEXT: vpshufb {{.*#+}} zmm0 = zmm0[0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,20,20,20,20,21,21,21,21,22,22,22,22,23,23,23,23,40,40,40,40,41,41,41,41,42,42,42,42,43,43,43,43,60,60,60,60,61,61,61,61,62,62,62,62,63,63,63,63]
2607 ; AVX512BW-ONLY-NEXT: vpmovb2m %zmm0, %k1
2608 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k1, %k2
2609 ; AVX512BW-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k2} {z}
2610 ; AVX512BW-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
2611 ; AVX512BW-ONLY-NEXT: kshiftrq $32, %k1, %k1
2612 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k1, %k2
2613 ; AVX512BW-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k2} {z}
2614 ; AVX512BW-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k1} {z}
2615 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
2616 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
2617 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
2618 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
2619 ; AVX512BW-ONLY-NEXT: vzeroupper
2620 ; AVX512BW-ONLY-NEXT: retq
2622 ; AVX512VBMI-ONLY-LABEL: mask_replication_factor4_vf16:
2623 ; AVX512VBMI-ONLY: # %bb.0:
2624 ; AVX512VBMI-ONLY-NEXT: kmovq (%rdi), %k0
2625 ; AVX512VBMI-ONLY-NEXT: vpmovm2b %k0, %zmm0
2626 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,6,6,6,6,7,7,7,7,8,8,8,8,9,9,9,9,10,10,10,10,11,11,11,11,12,12,12,12,13,13,13,13,14,14,14,14,15,15,15,15]
2627 ; AVX512VBMI-ONLY-NEXT: vpermb %zmm0, %zmm1, %zmm0
2628 ; AVX512VBMI-ONLY-NEXT: vpmovb2m %zmm0, %k1
2629 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k1, %k2
2630 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k2} {z}
2631 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
2632 ; AVX512VBMI-ONLY-NEXT: kshiftrq $32, %k1, %k1
2633 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k1, %k2
2634 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k2} {z}
2635 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k1} {z}
2636 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
2637 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
2638 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
2639 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
2640 ; AVX512VBMI-ONLY-NEXT: vzeroupper
2641 ; AVX512VBMI-ONLY-NEXT: retq
2642 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
2643 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
2644 %tgt.mask = shufflevector <16 x i1> %src.mask, <16 x i1> poison, <64 x i32> <i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 8, i32 8, i32 8, i32 8, i32 9, i32 9, i32 9, i32 9, i32 10, i32 10, i32 10, i32 10, i32 11, i32 11, i32 11, i32 11, i32 12, i32 12, i32 12, i32 12, i32 13, i32 13, i32 13, i32 13, i32 14, i32 14, i32 14, i32 14, i32 15, i32 15, i32 15, i32 15>
2645 %data = call <64 x i32> @llvm.masked.load.v64i32.p0(ptr %in.vec, i32 64, <64 x i1> %tgt.mask, <64 x i32> poison)
2646 store <64 x i32> %data, ptr %out.vec, align 64
2650 define void @mask_replication_factor4_vf32(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
2651 ; AVX512F-ONLY-LABEL: mask_replication_factor4_vf32:
2652 ; AVX512F-ONLY: # %bb.0:
2653 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k4
2654 ; AVX512F-ONLY-NEXT: kmovw 2(%rdi), %k1
2655 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
2656 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [8,8,8,8,9,9,9,9,10,10,10,10,11,11,11,11]
2657 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm2
2658 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
2659 ; AVX512F-ONLY-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
2660 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm2 = [12,12,12,12,13,13,13,13,14,14,14,14,15,15,15,15]
2661 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm2, %zmm3
2662 ; AVX512F-ONLY-NEXT: vptestmd %zmm3, %zmm3, %k2
2663 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3]
2664 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm3, %zmm4
2665 ; AVX512F-ONLY-NEXT: vptestmd %zmm4, %zmm4, %k3
2666 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm4 = [4,4,4,4,5,5,5,5,6,6,6,6,7,7,7,7]
2667 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm4, %zmm0
2668 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k5
2669 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k4} {z}
2670 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
2671 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k4
2672 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm2, %zmm1
2673 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k6
2674 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm3, %zmm1
2675 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k7
2676 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm4, %zmm0
2677 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1
2678 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
2679 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k7} {z}
2680 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k6} {z}
2681 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k4} {z}
2682 ; AVX512F-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm4 {%k5} {z}
2683 ; AVX512F-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm5 {%k3} {z}
2684 ; AVX512F-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm6 {%k2} {z}
2685 ; AVX512F-ONLY-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
2686 ; AVX512F-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm7 {%k1} {z}
2687 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm7, 384(%rdx)
2688 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm6, 448(%rdx)
2689 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm5, 256(%rdx)
2690 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm4, 320(%rdx)
2691 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
2692 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
2693 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
2694 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
2695 ; AVX512F-ONLY-NEXT: vzeroupper
2696 ; AVX512F-ONLY-NEXT: retq
2698 ; AVX512DQ-LABEL: mask_replication_factor4_vf32:
2699 ; AVX512DQ: # %bb.0:
2700 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
2701 ; AVX512DQ-NEXT: kmovw 2(%rdi), %k1
2702 ; AVX512DQ-NEXT: vpmovm2d %k1, %zmm0
2703 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [8,8,8,8,9,9,9,9,10,10,10,10,11,11,11,11]
2704 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm2
2705 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k1
2706 ; AVX512DQ-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
2707 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm2 = [12,12,12,12,13,13,13,13,14,14,14,14,15,15,15,15]
2708 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm2, %zmm3
2709 ; AVX512DQ-NEXT: vpmovd2m %zmm3, %k2
2710 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3]
2711 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm3, %zmm4
2712 ; AVX512DQ-NEXT: vpmovd2m %zmm4, %k3
2713 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm4 = [4,4,4,4,5,5,5,5,6,6,6,6,7,7,7,7]
2714 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm4, %zmm0
2715 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k4
2716 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
2717 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
2718 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k5
2719 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm2, %zmm1
2720 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k6
2721 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm3, %zmm1
2722 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k7
2723 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm4, %zmm0
2724 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k1
2725 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
2726 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm1 {%k7} {z}
2727 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k6} {z}
2728 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k5} {z}
2729 ; AVX512DQ-NEXT: vmovdqa32 320(%rsi), %zmm4 {%k4} {z}
2730 ; AVX512DQ-NEXT: vmovdqa32 256(%rsi), %zmm5 {%k3} {z}
2731 ; AVX512DQ-NEXT: vmovdqa32 448(%rsi), %zmm6 {%k2} {z}
2732 ; AVX512DQ-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
2733 ; AVX512DQ-NEXT: vmovdqa32 384(%rsi), %zmm7 {%k1} {z}
2734 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, 384(%rdx)
2735 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 448(%rdx)
2736 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 256(%rdx)
2737 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 320(%rdx)
2738 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 128(%rdx)
2739 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 192(%rdx)
2740 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, (%rdx)
2741 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 64(%rdx)
2742 ; AVX512DQ-NEXT: vzeroupper
2743 ; AVX512DQ-NEXT: retq
2745 ; AVX512BW-ONLY-LABEL: mask_replication_factor4_vf32:
2746 ; AVX512BW-ONLY: # %bb.0:
2747 ; AVX512BW-ONLY-NEXT: kmovd (%rdi), %k0
2748 ; AVX512BW-ONLY-NEXT: vpmovm2b %k0, %zmm0
2749 ; AVX512BW-ONLY-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm0[2,3,2,3,2,3,2,3]
2750 ; AVX512BW-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm2 = [0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,6,6,6,6,7,7,7,7,8,8,8,8,9,9,9,9,10,10,10,10,11,11,11,11,12,12,12,12,13,13,13,13,14,14,14,14,15,15,15,15]
2751 ; AVX512BW-ONLY-NEXT: vpshufb %zmm2, %zmm1, %zmm1
2752 ; AVX512BW-ONLY-NEXT: vpmovb2m %zmm1, %k1
2753 ; AVX512BW-ONLY-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1]
2754 ; AVX512BW-ONLY-NEXT: vpshufb %zmm2, %zmm0, %zmm0
2755 ; AVX512BW-ONLY-NEXT: vpmovb2m %zmm0, %k2
2756 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k2, %k3
2757 ; AVX512BW-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k3} {z}
2758 ; AVX512BW-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k2} {z}
2759 ; AVX512BW-ONLY-NEXT: kshiftrq $32, %k2, %k2
2760 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k2, %k3
2761 ; AVX512BW-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k3} {z}
2762 ; AVX512BW-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k2} {z}
2763 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k1, %k2
2764 ; AVX512BW-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm4 {%k2} {z}
2765 ; AVX512BW-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm5 {%k1} {z}
2766 ; AVX512BW-ONLY-NEXT: kshiftrq $32, %k1, %k1
2767 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k1, %k2
2768 ; AVX512BW-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm6 {%k2} {z}
2769 ; AVX512BW-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm7 {%k1} {z}
2770 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm7, 384(%rdx)
2771 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm6, 448(%rdx)
2772 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm5, 256(%rdx)
2773 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm4, 320(%rdx)
2774 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
2775 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
2776 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
2777 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
2778 ; AVX512BW-ONLY-NEXT: vzeroupper
2779 ; AVX512BW-ONLY-NEXT: retq
2781 ; AVX512VBMI-ONLY-LABEL: mask_replication_factor4_vf32:
2782 ; AVX512VBMI-ONLY: # %bb.0:
2783 ; AVX512VBMI-ONLY-NEXT: kmovd (%rdi), %k0
2784 ; AVX512VBMI-ONLY-NEXT: vpmovm2b %k0, %zmm0
2785 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [16,16,16,16,17,17,17,17,18,18,18,18,19,19,19,19,20,20,20,20,21,21,21,21,22,22,22,22,23,23,23,23,24,24,24,24,25,25,25,25,26,26,26,26,27,27,27,27,28,28,28,28,29,29,29,29,30,30,30,30,31,31,31,31]
2786 ; AVX512VBMI-ONLY-NEXT: vpermb %zmm0, %zmm1, %zmm1
2787 ; AVX512VBMI-ONLY-NEXT: vpmovb2m %zmm1, %k1
2788 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,6,6,6,6,7,7,7,7,8,8,8,8,9,9,9,9,10,10,10,10,11,11,11,11,12,12,12,12,13,13,13,13,14,14,14,14,15,15,15,15]
2789 ; AVX512VBMI-ONLY-NEXT: vpermb %zmm0, %zmm1, %zmm0
2790 ; AVX512VBMI-ONLY-NEXT: vpmovb2m %zmm0, %k2
2791 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k2, %k3
2792 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k3} {z}
2793 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k2} {z}
2794 ; AVX512VBMI-ONLY-NEXT: kshiftrq $32, %k2, %k2
2795 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k2, %k3
2796 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k3} {z}
2797 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k2} {z}
2798 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k1, %k2
2799 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm4 {%k2} {z}
2800 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm5 {%k1} {z}
2801 ; AVX512VBMI-ONLY-NEXT: kshiftrq $32, %k1, %k1
2802 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k1, %k2
2803 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm6 {%k2} {z}
2804 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm7 {%k1} {z}
2805 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm7, 384(%rdx)
2806 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm6, 448(%rdx)
2807 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm5, 256(%rdx)
2808 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm4, 320(%rdx)
2809 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
2810 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
2811 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
2812 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
2813 ; AVX512VBMI-ONLY-NEXT: vzeroupper
2814 ; AVX512VBMI-ONLY-NEXT: retq
2815 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
2816 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
2817 %tgt.mask = shufflevector <32 x i1> %src.mask, <32 x i1> poison, <128 x i32> <i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 8, i32 8, i32 8, i32 8, i32 9, i32 9, i32 9, i32 9, i32 10, i32 10, i32 10, i32 10, i32 11, i32 11, i32 11, i32 11, i32 12, i32 12, i32 12, i32 12, i32 13, i32 13, i32 13, i32 13, i32 14, i32 14, i32 14, i32 14, i32 15, i32 15, i32 15, i32 15, i32 16, i32 16, i32 16, i32 16, i32 17, i32 17, i32 17, i32 17, i32 18, i32 18, i32 18, i32 18, i32 19, i32 19, i32 19, i32 19, i32 20, i32 20, i32 20, i32 20, i32 21, i32 21, i32 21, i32 21, i32 22, i32 22, i32 22, i32 22, i32 23, i32 23, i32 23, i32 23, i32 24, i32 24, i32 24, i32 24, i32 25, i32 25, i32 25, i32 25, i32 26, i32 26, i32 26, i32 26, i32 27, i32 27, i32 27, i32 27, i32 28, i32 28, i32 28, i32 28, i32 29, i32 29, i32 29, i32 29, i32 30, i32 30, i32 30, i32 30, i32 31, i32 31, i32 31, i32 31>
2818 %data = call <128 x i32> @llvm.masked.load.v128i32.p0(ptr %in.vec, i32 64, <128 x i1> %tgt.mask, <128 x i32> poison)
2819 store <128 x i32> %data, ptr %out.vec, align 64
2823 define void @mask_replication_factor4_vf64(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
2824 ; AVX512F-ONLY-LABEL: mask_replication_factor4_vf64:
2825 ; AVX512F-ONLY: # %bb.0:
2826 ; AVX512F-ONLY-NEXT: kmovw 6(%rdi), %k1
2827 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
2828 ; AVX512F-ONLY-NEXT: kmovw 4(%rdi), %k1
2829 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
2830 ; AVX512F-ONLY-NEXT: kmovw 2(%rdi), %k1
2831 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 {%k1} {z}
2832 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
2833 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm3 = [12,12,12,12,13,13,13,13,14,14,14,14,15,15,15,15]
2834 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm3, %zmm4
2835 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm5 = [8,8,8,8,9,9,9,9,10,10,10,10,11,11,11,11]
2836 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm5, %zmm6
2837 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm7 = [4,4,4,4,5,5,5,5,6,6,6,6,7,7,7,7]
2838 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm7, %zmm8
2839 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm9 = [0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3]
2840 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm9, %zmm0
2841 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm3, %zmm10
2842 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm5, %zmm11
2843 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm7, %zmm12
2844 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm9, %zmm1
2845 ; AVX512F-ONLY-NEXT: vpermd %zmm2, %zmm3, %zmm13
2846 ; AVX512F-ONLY-NEXT: vpermd %zmm2, %zmm5, %zmm14
2847 ; AVX512F-ONLY-NEXT: vpermd %zmm2, %zmm7, %zmm15
2848 ; AVX512F-ONLY-NEXT: vpermd %zmm2, %zmm9, %zmm2
2849 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm16, %zmm16, %zmm16 {%k1} {z}
2850 ; AVX512F-ONLY-NEXT: vpermd %zmm16, %zmm3, %zmm3
2851 ; AVX512F-ONLY-NEXT: vpermd %zmm16, %zmm5, %zmm5
2852 ; AVX512F-ONLY-NEXT: vpermd %zmm16, %zmm7, %zmm7
2853 ; AVX512F-ONLY-NEXT: vpermd %zmm16, %zmm9, %zmm9
2854 ; AVX512F-ONLY-NEXT: vptestmd %zmm9, %zmm9, %k1
2855 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm9 {%k1} {z}
2856 ; AVX512F-ONLY-NEXT: vptestmd %zmm7, %zmm7, %k1
2857 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm7 {%k1} {z}
2858 ; AVX512F-ONLY-NEXT: vptestmd %zmm5, %zmm5, %k1
2859 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm5 {%k1} {z}
2860 ; AVX512F-ONLY-NEXT: vptestmd %zmm3, %zmm3, %k1
2861 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm3 {%k1} {z}
2862 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
2863 ; AVX512F-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm2 {%k1} {z}
2864 ; AVX512F-ONLY-NEXT: vptestmd %zmm15, %zmm15, %k1
2865 ; AVX512F-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm15 {%k1} {z}
2866 ; AVX512F-ONLY-NEXT: vptestmd %zmm14, %zmm14, %k1
2867 ; AVX512F-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm14 {%k1} {z}
2868 ; AVX512F-ONLY-NEXT: vptestmd %zmm13, %zmm13, %k1
2869 ; AVX512F-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm13 {%k1} {z}
2870 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
2871 ; AVX512F-ONLY-NEXT: vmovdqa32 512(%rsi), %zmm1 {%k1} {z}
2872 ; AVX512F-ONLY-NEXT: vptestmd %zmm12, %zmm12, %k1
2873 ; AVX512F-ONLY-NEXT: vmovdqa32 576(%rsi), %zmm12 {%k1} {z}
2874 ; AVX512F-ONLY-NEXT: vptestmd %zmm11, %zmm11, %k1
2875 ; AVX512F-ONLY-NEXT: vmovdqa32 640(%rsi), %zmm11 {%k1} {z}
2876 ; AVX512F-ONLY-NEXT: vptestmd %zmm10, %zmm10, %k1
2877 ; AVX512F-ONLY-NEXT: vmovdqa32 704(%rsi), %zmm10 {%k1} {z}
2878 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1
2879 ; AVX512F-ONLY-NEXT: vmovdqa32 768(%rsi), %zmm0 {%k1} {z}
2880 ; AVX512F-ONLY-NEXT: vptestmd %zmm8, %zmm8, %k1
2881 ; AVX512F-ONLY-NEXT: vmovdqa32 832(%rsi), %zmm8 {%k1} {z}
2882 ; AVX512F-ONLY-NEXT: vptestmd %zmm6, %zmm6, %k1
2883 ; AVX512F-ONLY-NEXT: vmovdqa32 896(%rsi), %zmm6 {%k1} {z}
2884 ; AVX512F-ONLY-NEXT: vptestmd %zmm4, %zmm4, %k1
2885 ; AVX512F-ONLY-NEXT: vmovdqa32 960(%rsi), %zmm4 {%k1} {z}
2886 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm4, 960(%rdx)
2887 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm6, 896(%rdx)
2888 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm8, 832(%rdx)
2889 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, 768(%rdx)
2890 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm10, 704(%rdx)
2891 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm11, 640(%rdx)
2892 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm12, 576(%rdx)
2893 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, 512(%rdx)
2894 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm13, 448(%rdx)
2895 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm14, 384(%rdx)
2896 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm15, 320(%rdx)
2897 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 256(%rdx)
2898 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 192(%rdx)
2899 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm5, 128(%rdx)
2900 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm7, 64(%rdx)
2901 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm9, (%rdx)
2902 ; AVX512F-ONLY-NEXT: vzeroupper
2903 ; AVX512F-ONLY-NEXT: retq
2905 ; AVX512DQ-LABEL: mask_replication_factor4_vf64:
2906 ; AVX512DQ: # %bb.0:
2907 ; AVX512DQ-NEXT: kmovw 6(%rdi), %k0
2908 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
2909 ; AVX512DQ-NEXT: kmovw 4(%rdi), %k0
2910 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm1
2911 ; AVX512DQ-NEXT: kmovw 2(%rdi), %k0
2912 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm2
2913 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
2914 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm3 = [12,12,12,12,13,13,13,13,14,14,14,14,15,15,15,15]
2915 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm3, %zmm4
2916 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm5 = [8,8,8,8,9,9,9,9,10,10,10,10,11,11,11,11]
2917 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm5, %zmm6
2918 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm7 = [4,4,4,4,5,5,5,5,6,6,6,6,7,7,7,7]
2919 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm7, %zmm8
2920 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm9 = [0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3]
2921 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm9, %zmm0
2922 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm3, %zmm10
2923 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm5, %zmm11
2924 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm7, %zmm12
2925 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm9, %zmm1
2926 ; AVX512DQ-NEXT: vpermd %zmm2, %zmm3, %zmm13
2927 ; AVX512DQ-NEXT: vpermd %zmm2, %zmm5, %zmm14
2928 ; AVX512DQ-NEXT: vpermd %zmm2, %zmm7, %zmm15
2929 ; AVX512DQ-NEXT: vpermd %zmm2, %zmm9, %zmm2
2930 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm16
2931 ; AVX512DQ-NEXT: vpermd %zmm16, %zmm3, %zmm3
2932 ; AVX512DQ-NEXT: vpermd %zmm16, %zmm5, %zmm5
2933 ; AVX512DQ-NEXT: vpermd %zmm16, %zmm7, %zmm7
2934 ; AVX512DQ-NEXT: vpermd %zmm16, %zmm9, %zmm9
2935 ; AVX512DQ-NEXT: vpmovd2m %zmm9, %k1
2936 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm9 {%k1} {z}
2937 ; AVX512DQ-NEXT: vpmovd2m %zmm7, %k1
2938 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm7 {%k1} {z}
2939 ; AVX512DQ-NEXT: vpmovd2m %zmm5, %k1
2940 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm5 {%k1} {z}
2941 ; AVX512DQ-NEXT: vpmovd2m %zmm3, %k1
2942 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm3 {%k1} {z}
2943 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k1
2944 ; AVX512DQ-NEXT: vmovdqa32 256(%rsi), %zmm2 {%k1} {z}
2945 ; AVX512DQ-NEXT: vpmovd2m %zmm15, %k1
2946 ; AVX512DQ-NEXT: vmovdqa32 320(%rsi), %zmm15 {%k1} {z}
2947 ; AVX512DQ-NEXT: vpmovd2m %zmm14, %k1
2948 ; AVX512DQ-NEXT: vmovdqa32 384(%rsi), %zmm14 {%k1} {z}
2949 ; AVX512DQ-NEXT: vpmovd2m %zmm13, %k1
2950 ; AVX512DQ-NEXT: vmovdqa32 448(%rsi), %zmm13 {%k1} {z}
2951 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
2952 ; AVX512DQ-NEXT: vmovdqa32 512(%rsi), %zmm1 {%k1} {z}
2953 ; AVX512DQ-NEXT: vpmovd2m %zmm12, %k1
2954 ; AVX512DQ-NEXT: vmovdqa32 576(%rsi), %zmm12 {%k1} {z}
2955 ; AVX512DQ-NEXT: vpmovd2m %zmm11, %k1
2956 ; AVX512DQ-NEXT: vmovdqa32 640(%rsi), %zmm11 {%k1} {z}
2957 ; AVX512DQ-NEXT: vpmovd2m %zmm10, %k1
2958 ; AVX512DQ-NEXT: vmovdqa32 704(%rsi), %zmm10 {%k1} {z}
2959 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k1
2960 ; AVX512DQ-NEXT: vmovdqa32 768(%rsi), %zmm0 {%k1} {z}
2961 ; AVX512DQ-NEXT: vpmovd2m %zmm8, %k1
2962 ; AVX512DQ-NEXT: vmovdqa32 832(%rsi), %zmm8 {%k1} {z}
2963 ; AVX512DQ-NEXT: vpmovd2m %zmm6, %k1
2964 ; AVX512DQ-NEXT: vmovdqa32 896(%rsi), %zmm6 {%k1} {z}
2965 ; AVX512DQ-NEXT: vpmovd2m %zmm4, %k1
2966 ; AVX512DQ-NEXT: vmovdqa32 960(%rsi), %zmm4 {%k1} {z}
2967 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 960(%rdx)
2968 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 896(%rdx)
2969 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, 832(%rdx)
2970 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 768(%rdx)
2971 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, 704(%rdx)
2972 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, 640(%rdx)
2973 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, 576(%rdx)
2974 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 512(%rdx)
2975 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, 448(%rdx)
2976 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, 384(%rdx)
2977 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, 320(%rdx)
2978 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 256(%rdx)
2979 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 192(%rdx)
2980 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 128(%rdx)
2981 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, 64(%rdx)
2982 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, (%rdx)
2983 ; AVX512DQ-NEXT: vzeroupper
2984 ; AVX512DQ-NEXT: retq
2986 ; AVX512BW-ONLY-LABEL: mask_replication_factor4_vf64:
2987 ; AVX512BW-ONLY: # %bb.0:
2988 ; AVX512BW-ONLY-NEXT: kmovq (%rdi), %k0
2989 ; AVX512BW-ONLY-NEXT: vpmovm2b %k0, %zmm0
2990 ; AVX512BW-ONLY-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm0[6,7,6,7,6,7,6,7]
2991 ; AVX512BW-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm2 = [0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,6,6,6,6,7,7,7,7,8,8,8,8,9,9,9,9,10,10,10,10,11,11,11,11,12,12,12,12,13,13,13,13,14,14,14,14,15,15,15,15]
2992 ; AVX512BW-ONLY-NEXT: vpshufb %zmm2, %zmm1, %zmm1
2993 ; AVX512BW-ONLY-NEXT: vpmovb2m %zmm1, %k1
2994 ; AVX512BW-ONLY-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm0[4,5,4,5,4,5,4,5]
2995 ; AVX512BW-ONLY-NEXT: vpshufb %zmm2, %zmm1, %zmm1
2996 ; AVX512BW-ONLY-NEXT: vpmovb2m %zmm1, %k2
2997 ; AVX512BW-ONLY-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm0[2,3,2,3,2,3,2,3]
2998 ; AVX512BW-ONLY-NEXT: vpshufb %zmm2, %zmm1, %zmm1
2999 ; AVX512BW-ONLY-NEXT: vpmovb2m %zmm1, %k3
3000 ; AVX512BW-ONLY-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1]
3001 ; AVX512BW-ONLY-NEXT: vpshufb %zmm2, %zmm0, %zmm0
3002 ; AVX512BW-ONLY-NEXT: vpmovb2m %zmm0, %k4
3003 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k4, %k5
3004 ; AVX512BW-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k5} {z}
3005 ; AVX512BW-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k4} {z}
3006 ; AVX512BW-ONLY-NEXT: kshiftrq $32, %k4, %k4
3007 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k4, %k5
3008 ; AVX512BW-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k5} {z}
3009 ; AVX512BW-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k4} {z}
3010 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k3, %k4
3011 ; AVX512BW-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm4 {%k4} {z}
3012 ; AVX512BW-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm5 {%k3} {z}
3013 ; AVX512BW-ONLY-NEXT: kshiftrq $32, %k3, %k3
3014 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k3, %k4
3015 ; AVX512BW-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm6 {%k4} {z}
3016 ; AVX512BW-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm7 {%k3} {z}
3017 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k2, %k3
3018 ; AVX512BW-ONLY-NEXT: vmovdqa32 576(%rsi), %zmm8 {%k3} {z}
3019 ; AVX512BW-ONLY-NEXT: vmovdqa32 512(%rsi), %zmm9 {%k2} {z}
3020 ; AVX512BW-ONLY-NEXT: kshiftrq $32, %k2, %k2
3021 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k2, %k3
3022 ; AVX512BW-ONLY-NEXT: vmovdqa32 704(%rsi), %zmm10 {%k3} {z}
3023 ; AVX512BW-ONLY-NEXT: vmovdqa32 640(%rsi), %zmm11 {%k2} {z}
3024 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k1, %k2
3025 ; AVX512BW-ONLY-NEXT: vmovdqa32 832(%rsi), %zmm12 {%k2} {z}
3026 ; AVX512BW-ONLY-NEXT: vmovdqa32 768(%rsi), %zmm13 {%k1} {z}
3027 ; AVX512BW-ONLY-NEXT: kshiftrq $32, %k1, %k1
3028 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k1, %k2
3029 ; AVX512BW-ONLY-NEXT: vmovdqa32 960(%rsi), %zmm14 {%k2} {z}
3030 ; AVX512BW-ONLY-NEXT: vmovdqa32 896(%rsi), %zmm15 {%k1} {z}
3031 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm15, 896(%rdx)
3032 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm14, 960(%rdx)
3033 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm13, 768(%rdx)
3034 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm12, 832(%rdx)
3035 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm11, 640(%rdx)
3036 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm10, 704(%rdx)
3037 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm9, 512(%rdx)
3038 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm8, 576(%rdx)
3039 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm7, 384(%rdx)
3040 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm6, 448(%rdx)
3041 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm5, 256(%rdx)
3042 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm4, 320(%rdx)
3043 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
3044 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
3045 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
3046 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
3047 ; AVX512BW-ONLY-NEXT: vzeroupper
3048 ; AVX512BW-ONLY-NEXT: retq
3050 ; AVX512VBMI-ONLY-LABEL: mask_replication_factor4_vf64:
3051 ; AVX512VBMI-ONLY: # %bb.0:
3052 ; AVX512VBMI-ONLY-NEXT: kmovq (%rdi), %k0
3053 ; AVX512VBMI-ONLY-NEXT: vpmovm2b %k0, %zmm0
3054 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [48,48,48,48,49,49,49,49,50,50,50,50,51,51,51,51,52,52,52,52,53,53,53,53,54,54,54,54,55,55,55,55,56,56,56,56,57,57,57,57,58,58,58,58,59,59,59,59,60,60,60,60,61,61,61,61,62,62,62,62,63,63,63,63]
3055 ; AVX512VBMI-ONLY-NEXT: vpermb %zmm0, %zmm1, %zmm1
3056 ; AVX512VBMI-ONLY-NEXT: vpmovb2m %zmm1, %k1
3057 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [32,32,32,32,33,33,33,33,34,34,34,34,35,35,35,35,36,36,36,36,37,37,37,37,38,38,38,38,39,39,39,39,40,40,40,40,41,41,41,41,42,42,42,42,43,43,43,43,44,44,44,44,45,45,45,45,46,46,46,46,47,47,47,47]
3058 ; AVX512VBMI-ONLY-NEXT: vpermb %zmm0, %zmm1, %zmm1
3059 ; AVX512VBMI-ONLY-NEXT: vpmovb2m %zmm1, %k2
3060 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [16,16,16,16,17,17,17,17,18,18,18,18,19,19,19,19,20,20,20,20,21,21,21,21,22,22,22,22,23,23,23,23,24,24,24,24,25,25,25,25,26,26,26,26,27,27,27,27,28,28,28,28,29,29,29,29,30,30,30,30,31,31,31,31]
3061 ; AVX512VBMI-ONLY-NEXT: vpermb %zmm0, %zmm1, %zmm1
3062 ; AVX512VBMI-ONLY-NEXT: vpmovb2m %zmm1, %k3
3063 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,6,6,6,6,7,7,7,7,8,8,8,8,9,9,9,9,10,10,10,10,11,11,11,11,12,12,12,12,13,13,13,13,14,14,14,14,15,15,15,15]
3064 ; AVX512VBMI-ONLY-NEXT: vpermb %zmm0, %zmm1, %zmm0
3065 ; AVX512VBMI-ONLY-NEXT: vpmovb2m %zmm0, %k4
3066 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k4, %k5
3067 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k5} {z}
3068 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k4} {z}
3069 ; AVX512VBMI-ONLY-NEXT: kshiftrq $32, %k4, %k4
3070 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k4, %k5
3071 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k5} {z}
3072 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k4} {z}
3073 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k3, %k4
3074 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm4 {%k4} {z}
3075 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm5 {%k3} {z}
3076 ; AVX512VBMI-ONLY-NEXT: kshiftrq $32, %k3, %k3
3077 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k3, %k4
3078 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm6 {%k4} {z}
3079 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm7 {%k3} {z}
3080 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k2, %k3
3081 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 576(%rsi), %zmm8 {%k3} {z}
3082 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 512(%rsi), %zmm9 {%k2} {z}
3083 ; AVX512VBMI-ONLY-NEXT: kshiftrq $32, %k2, %k2
3084 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k2, %k3
3085 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 704(%rsi), %zmm10 {%k3} {z}
3086 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 640(%rsi), %zmm11 {%k2} {z}
3087 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k1, %k2
3088 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 832(%rsi), %zmm12 {%k2} {z}
3089 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 768(%rsi), %zmm13 {%k1} {z}
3090 ; AVX512VBMI-ONLY-NEXT: kshiftrq $32, %k1, %k1
3091 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k1, %k2
3092 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 960(%rsi), %zmm14 {%k2} {z}
3093 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 896(%rsi), %zmm15 {%k1} {z}
3094 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm15, 896(%rdx)
3095 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm14, 960(%rdx)
3096 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm13, 768(%rdx)
3097 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm12, 832(%rdx)
3098 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm11, 640(%rdx)
3099 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm10, 704(%rdx)
3100 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm9, 512(%rdx)
3101 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm8, 576(%rdx)
3102 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm7, 384(%rdx)
3103 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm6, 448(%rdx)
3104 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm5, 256(%rdx)
3105 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm4, 320(%rdx)
3106 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
3107 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
3108 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
3109 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
3110 ; AVX512VBMI-ONLY-NEXT: vzeroupper
3111 ; AVX512VBMI-ONLY-NEXT: retq
3112 %src.mask = load <64 x i1>, ptr %in.maskvec, align 64
3113 %tgt.mask = shufflevector <64 x i1> %src.mask, <64 x i1> poison, <256 x i32> <i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 8, i32 8, i32 8, i32 8, i32 9, i32 9, i32 9, i32 9, i32 10, i32 10, i32 10, i32 10, i32 11, i32 11, i32 11, i32 11, i32 12, i32 12, i32 12, i32 12, i32 13, i32 13, i32 13, i32 13, i32 14, i32 14, i32 14, i32 14, i32 15, i32 15, i32 15, i32 15, i32 16, i32 16, i32 16, i32 16, i32 17, i32 17, i32 17, i32 17, i32 18, i32 18, i32 18, i32 18, i32 19, i32 19, i32 19, i32 19, i32 20, i32 20, i32 20, i32 20, i32 21, i32 21, i32 21, i32 21, i32 22, i32 22, i32 22, i32 22, i32 23, i32 23, i32 23, i32 23, i32 24, i32 24, i32 24, i32 24, i32 25, i32 25, i32 25, i32 25, i32 26, i32 26, i32 26, i32 26, i32 27, i32 27, i32 27, i32 27, i32 28, i32 28, i32 28, i32 28, i32 29, i32 29, i32 29, i32 29, i32 30, i32 30, i32 30, i32 30, i32 31, i32 31, i32 31, i32 31, i32 32, i32 32, i32 32, i32 32, i32 33, i32 33, i32 33, i32 33, i32 34, i32 34, i32 34, i32 34, i32 35, i32 35, i32 35, i32 35, i32 36, i32 36, i32 36, i32 36, i32 37, i32 37, i32 37, i32 37, i32 38, i32 38, i32 38, i32 38, i32 39, i32 39, i32 39, i32 39, i32 40, i32 40, i32 40, i32 40, i32 41, i32 41, i32 41, i32 41, i32 42, i32 42, i32 42, i32 42, i32 43, i32 43, i32 43, i32 43, i32 44, i32 44, i32 44, i32 44, i32 45, i32 45, i32 45, i32 45, i32 46, i32 46, i32 46, i32 46, i32 47, i32 47, i32 47, i32 47, i32 48, i32 48, i32 48, i32 48, i32 49, i32 49, i32 49, i32 49, i32 50, i32 50, i32 50, i32 50, i32 51, i32 51, i32 51, i32 51, i32 52, i32 52, i32 52, i32 52, i32 53, i32 53, i32 53, i32 53, i32 54, i32 54, i32 54, i32 54, i32 55, i32 55, i32 55, i32 55, i32 56, i32 56, i32 56, i32 56, i32 57, i32 57, i32 57, i32 57, i32 58, i32 58, i32 58, i32 58, i32 59, i32 59, i32 59, i32 59, i32 60, i32 60, i32 60, i32 60, i32 61, i32 61, i32 61, i32 61, i32 62, i32 62, i32 62, i32 62, i32 63, i32 63, i32 63, i32 63>
3114 %data = call <256 x i32> @llvm.masked.load.v256i32.p0(ptr %in.vec, i32 64, <256 x i1> %tgt.mask, <256 x i32> poison)
3115 store <256 x i32> %data, ptr %out.vec, align 64
3119 define void @mask_replication_factor5_vf2(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
3120 ; AVX512F-ONLY-LABEL: mask_replication_factor5_vf2:
3121 ; AVX512F-ONLY: # %bb.0:
3122 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
3123 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
3124 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,0,0,0,0,1,1,1,1,1,u,u,u,u,u,u>
3125 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
3126 ; AVX512F-ONLY-NEXT: vpslld $31, %zmm0, %zmm0
3127 ; AVX512F-ONLY-NEXT: movw $1023, %ax # imm = 0x3FF
3128 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
3129 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1 {%k1}
3130 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
3131 ; AVX512F-ONLY-NEXT: vextracti32x4 $2, %zmm0, %xmm1
3132 ; AVX512F-ONLY-NEXT: vmovq %xmm1, 32(%rdx)
3133 ; AVX512F-ONLY-NEXT: vmovdqa %ymm0, (%rdx)
3134 ; AVX512F-ONLY-NEXT: vzeroupper
3135 ; AVX512F-ONLY-NEXT: retq
3137 ; AVX512DQ-LABEL: mask_replication_factor5_vf2:
3138 ; AVX512DQ: # %bb.0:
3139 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
3140 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
3141 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,0,0,0,0,1,1,1,1,1,u,u,u,u,u,u>
3142 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
3143 ; AVX512DQ-NEXT: vpxor %xmm1, %xmm1, %xmm1
3144 ; AVX512DQ-NEXT: movw $1023, %ax # imm = 0x3FF
3145 ; AVX512DQ-NEXT: kmovw %eax, %k1
3146 ; AVX512DQ-NEXT: vpcmpgtd %zmm0, %zmm1, %k1 {%k1}
3147 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
3148 ; AVX512DQ-NEXT: vextracti32x4 $2, %zmm0, %xmm1
3149 ; AVX512DQ-NEXT: vmovq %xmm1, 32(%rdx)
3150 ; AVX512DQ-NEXT: vmovdqa %ymm0, (%rdx)
3151 ; AVX512DQ-NEXT: vzeroupper
3152 ; AVX512DQ-NEXT: retq
3154 ; AVX512BW-LABEL: mask_replication_factor5_vf2:
3155 ; AVX512BW: # %bb.0:
3156 ; AVX512BW-NEXT: kmovw (%rdi), %k1
3157 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
3158 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,0,0,0,0,1,1,1,1,1,u,u,u,u,u,u>
3159 ; AVX512BW-NEXT: vpermd %zmm0, %zmm1, %zmm0
3160 ; AVX512BW-NEXT: vpslld $31, %zmm0, %zmm0
3161 ; AVX512BW-NEXT: movw $1023, %ax # imm = 0x3FF
3162 ; AVX512BW-NEXT: kmovd %eax, %k1
3163 ; AVX512BW-NEXT: vptestmd %zmm0, %zmm0, %k1 {%k1}
3164 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
3165 ; AVX512BW-NEXT: vextracti32x4 $2, %zmm0, %xmm1
3166 ; AVX512BW-NEXT: vmovq %xmm1, 32(%rdx)
3167 ; AVX512BW-NEXT: vmovdqa %ymm0, (%rdx)
3168 ; AVX512BW-NEXT: vzeroupper
3169 ; AVX512BW-NEXT: retq
3170 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
3171 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <2 x i32> <i32 0, i32 1>
3172 %tgt.mask = shufflevector <2 x i1> %src.mask, <2 x i1> poison, <10 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1>
3173 %data = call <10 x i32> @llvm.masked.load.v10i32.p0(ptr %in.vec, i32 64, <10 x i1> %tgt.mask, <10 x i32> poison)
3174 %data.padded = shufflevector <10 x i32> %data, <10 x i32> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
3175 store <10 x i32> %data, ptr %out.vec, align 64
3179 define void @mask_replication_factor5_vf4(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
3180 ; AVX512F-ONLY-LABEL: mask_replication_factor5_vf4:
3181 ; AVX512F-ONLY: # %bb.0:
3182 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
3183 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
3184 ; AVX512F-ONLY-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[3,3,3,3]
3185 ; AVX512F-ONLY-NEXT: vpslld $31, %zmm1, %zmm1
3186 ; AVX512F-ONLY-NEXT: movw $15, %ax
3187 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
3188 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1 {%k1}
3189 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,1,1,1,1,1,2,2,2,2,2,3]
3190 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
3191 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k2
3192 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
3193 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k2} {z}
3194 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
3195 ; AVX512F-ONLY-NEXT: vmovdqa %xmm0, 64(%rdx)
3196 ; AVX512F-ONLY-NEXT: vzeroupper
3197 ; AVX512F-ONLY-NEXT: retq
3199 ; AVX512DQ-LABEL: mask_replication_factor5_vf4:
3200 ; AVX512DQ: # %bb.0:
3201 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
3202 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
3203 ; AVX512DQ-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[3,3,3,3]
3204 ; AVX512DQ-NEXT: vpxor %xmm2, %xmm2, %xmm2
3205 ; AVX512DQ-NEXT: movw $15, %ax
3206 ; AVX512DQ-NEXT: kmovw %eax, %k1
3207 ; AVX512DQ-NEXT: vpcmpgtd %zmm1, %zmm2, %k1 {%k1}
3208 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,1,1,1,1,1,2,2,2,2,2,3]
3209 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
3210 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k2
3211 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
3212 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm1 {%k2} {z}
3213 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, (%rdx)
3214 ; AVX512DQ-NEXT: vmovdqa %xmm0, 64(%rdx)
3215 ; AVX512DQ-NEXT: vzeroupper
3216 ; AVX512DQ-NEXT: retq
3218 ; AVX512BW-LABEL: mask_replication_factor5_vf4:
3219 ; AVX512BW: # %bb.0:
3220 ; AVX512BW-NEXT: kmovd (%rdi), %k0
3221 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0
3222 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,0,0,0,0,1,1,1,1,1,2,2,2,2,2,3,3,3,3,3,u,u,u,u,u,u,u,u,u,u,u,u>
3223 ; AVX512BW-NEXT: vpermw %zmm0, %zmm1, %zmm0
3224 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1
3225 ; AVX512BW-NEXT: movl $1048575, %eax # imm = 0xFFFFF
3226 ; AVX512BW-NEXT: kmovd %eax, %k1
3227 ; AVX512BW-NEXT: vpcmpgtw %zmm0, %zmm1, %k1 {%k1}
3228 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k2
3229 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k2} {z}
3230 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
3231 ; AVX512BW-NEXT: vmovdqa64 %zmm1, (%rdx)
3232 ; AVX512BW-NEXT: vmovdqa %xmm0, 64(%rdx)
3233 ; AVX512BW-NEXT: vzeroupper
3234 ; AVX512BW-NEXT: retq
3235 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
3236 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
3237 %tgt.mask = shufflevector <4 x i1> %src.mask, <4 x i1> poison, <20 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3>
3238 %data = call <20 x i32> @llvm.masked.load.v20i32.p0(ptr %in.vec, i32 64, <20 x i1> %tgt.mask, <20 x i32> poison)
3239 %data.padded = shufflevector <20 x i32> %data, <20 x i32> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
3240 store <20 x i32> %data, ptr %out.vec, align 64
3244 define void @mask_replication_factor5_vf8(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
3245 ; AVX512F-ONLY-LABEL: mask_replication_factor5_vf8:
3246 ; AVX512F-ONLY: # %bb.0:
3247 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
3248 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
3249 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,1,1,1,1,1,2,2,2,2,2,3]
3250 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
3251 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k2
3252 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k2} {z}
3253 ; AVX512F-ONLY-NEXT: movw $1, %ax
3254 ; AVX512F-ONLY-NEXT: kmovw %eax, %k2
3255 ; AVX512F-ONLY-NEXT: vmovdqa32 %zmm0, %zmm1 {%k2}
3256 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k2
3257 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [3,3,3,3,4,4,4,4,4,5,5,5,5,5,6,6]
3258 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
3259 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k3
3260 ; AVX512F-ONLY-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0
3261 ; AVX512F-ONLY-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z}
3262 ; AVX512F-ONLY-NEXT: vmovdqa {{.*#+}} ymm1 = [6,6,6,7,7,7,7,7]
3263 ; AVX512F-ONLY-NEXT: vpermd %ymm0, %ymm1, %ymm0
3264 ; AVX512F-ONLY-NEXT: vptestmd %ymm0, %ymm0, %k1
3265 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k2} {z}
3266 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm1 {%k1} {z}
3267 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm2 {%k3} {z}
3268 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 64(%rdx)
3269 ; AVX512F-ONLY-NEXT: vmovdqa %ymm1, 128(%rdx)
3270 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, (%rdx)
3271 ; AVX512F-ONLY-NEXT: vzeroupper
3272 ; AVX512F-ONLY-NEXT: retq
3274 ; AVX512DQ-LABEL: mask_replication_factor5_vf8:
3275 ; AVX512DQ: # %bb.0:
3276 ; AVX512DQ-NEXT: kmovb (%rdi), %k0
3277 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
3278 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,1,1,1,1,1,2,2,2,2,2,3]
3279 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
3280 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
3281 ; AVX512DQ-NEXT: vpmovm2d %k1, %zmm1
3282 ; AVX512DQ-NEXT: movw $1, %ax
3283 ; AVX512DQ-NEXT: kmovw %eax, %k1
3284 ; AVX512DQ-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
3285 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
3286 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [3,3,3,3,4,4,4,4,4,5,5,5,5,5,6,6]
3287 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
3288 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k2
3289 ; AVX512DQ-NEXT: vpmovm2d %k0, %ymm0
3290 ; AVX512DQ-NEXT: vmovdqa {{.*#+}} ymm1 = [6,6,6,7,7,7,7,7]
3291 ; AVX512DQ-NEXT: vpermd %ymm0, %ymm1, %ymm0
3292 ; AVX512DQ-NEXT: vpmovd2m %ymm0, %k3
3293 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
3294 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm1 {%k3} {z}
3295 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm2 {%k2} {z}
3296 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 64(%rdx)
3297 ; AVX512DQ-NEXT: vmovdqa %ymm1, 128(%rdx)
3298 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%rdx)
3299 ; AVX512DQ-NEXT: vzeroupper
3300 ; AVX512DQ-NEXT: retq
3302 ; AVX512BW-ONLY-LABEL: mask_replication_factor5_vf8:
3303 ; AVX512BW-ONLY: # %bb.0:
3304 ; AVX512BW-ONLY-NEXT: kmovw (%rdi), %k0
3305 ; AVX512BW-ONLY-NEXT: vpmovm2b %k0, %zmm0
3306 ; AVX512BW-ONLY-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1]
3307 ; AVX512BW-ONLY-NEXT: vpshufb {{.*#+}} zmm0 = zmm0[0,0,0,0,0,1,1,1,1,1,2,2,2,2,2,3,19,19,19,19,20,20,20,20,20,21,21,21,21,21,22,22,38,38,38,39,39,39,39,39,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u]
3308 ; AVX512BW-ONLY-NEXT: vpxor %xmm1, %xmm1, %xmm1
3309 ; AVX512BW-ONLY-NEXT: movabsq $1099511627775, %rax # imm = 0xFFFFFFFFFF
3310 ; AVX512BW-ONLY-NEXT: kmovq %rax, %k1
3311 ; AVX512BW-ONLY-NEXT: vpcmpgtb %zmm0, %zmm1, %k1 {%k1}
3312 ; AVX512BW-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
3313 ; AVX512BW-ONLY-NEXT: kshiftrq $32, %k1, %k2
3314 ; AVX512BW-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm1 {%k2} {z}
3315 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k1, %k1
3316 ; AVX512BW-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm2 {%k1} {z}
3317 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm2, 64(%rdx)
3318 ; AVX512BW-ONLY-NEXT: vmovdqa %ymm1, 128(%rdx)
3319 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm0, (%rdx)
3320 ; AVX512BW-ONLY-NEXT: vzeroupper
3321 ; AVX512BW-ONLY-NEXT: retq
3323 ; AVX512VBMI-ONLY-LABEL: mask_replication_factor5_vf8:
3324 ; AVX512VBMI-ONLY: # %bb.0:
3325 ; AVX512VBMI-ONLY-NEXT: kmovw (%rdi), %k0
3326 ; AVX512VBMI-ONLY-NEXT: vpmovm2b %k0, %zmm0
3327 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,0,0,0,0,1,1,1,1,1,2,2,2,2,2,3,3,3,3,3,4,4,4,4,4,5,5,5,5,5,6,6,6,6,6,7,7,7,7,7,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u>
3328 ; AVX512VBMI-ONLY-NEXT: vpermb %zmm0, %zmm1, %zmm0
3329 ; AVX512VBMI-ONLY-NEXT: vpxor %xmm1, %xmm1, %xmm1
3330 ; AVX512VBMI-ONLY-NEXT: movabsq $1099511627775, %rax # imm = 0xFFFFFFFFFF
3331 ; AVX512VBMI-ONLY-NEXT: kmovq %rax, %k1
3332 ; AVX512VBMI-ONLY-NEXT: vpcmpgtb %zmm0, %zmm1, %k1 {%k1}
3333 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
3334 ; AVX512VBMI-ONLY-NEXT: kshiftrq $32, %k1, %k2
3335 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm1 {%k2} {z}
3336 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k1, %k1
3337 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm2 {%k1} {z}
3338 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm2, 64(%rdx)
3339 ; AVX512VBMI-ONLY-NEXT: vmovdqa %ymm1, 128(%rdx)
3340 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm0, (%rdx)
3341 ; AVX512VBMI-ONLY-NEXT: vzeroupper
3342 ; AVX512VBMI-ONLY-NEXT: retq
3343 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
3344 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
3345 %tgt.mask = shufflevector <8 x i1> %src.mask, <8 x i1> poison, <40 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 7>
3346 %data = call <40 x i32> @llvm.masked.load.v40i32.p0(ptr %in.vec, i32 64, <40 x i1> %tgt.mask, <40 x i32> poison)
3347 %data.padded = shufflevector <40 x i32> %data, <40 x i32> poison, <48 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
3348 store <40 x i32> %data, ptr %out.vec, align 64
3352 define void @mask_replication_factor5_vf16(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
3353 ; AVX512F-ONLY-LABEL: mask_replication_factor5_vf16:
3354 ; AVX512F-ONLY: # %bb.0:
3355 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
3356 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
3357 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,1,1,1,1,1,2,2,2,2,2,3]
3358 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
3359 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
3360 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
3361 ; AVX512F-ONLY-NEXT: movw $1, %ax
3362 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
3363 ; AVX512F-ONLY-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
3364 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
3365 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [3,3,3,3,4,4,4,4,4,5,5,5,5,5,6,6]
3366 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
3367 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k2
3368 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [6,6,6,7,7,7,7,7,8,8,8,8,8,9,9,9]
3369 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
3370 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k3
3371 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [9,9,10,10,10,10,10,11,11,11,11,11,12,12,12,12]
3372 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
3373 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k4
3374 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [12,13,13,13,13,13,14,14,14,14,14,15,15,15,15,15]
3375 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
3376 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k5
3377 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
3378 ; AVX512F-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm1 {%k5} {z}
3379 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k4} {z}
3380 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k3} {z}
3381 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm4 {%k2} {z}
3382 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm4, 64(%rdx)
3383 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
3384 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
3385 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, 256(%rdx)
3386 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, (%rdx)
3387 ; AVX512F-ONLY-NEXT: vzeroupper
3388 ; AVX512F-ONLY-NEXT: retq
3390 ; AVX512DQ-LABEL: mask_replication_factor5_vf16:
3391 ; AVX512DQ: # %bb.0:
3392 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
3393 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
3394 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,1,1,1,1,1,2,2,2,2,2,3]
3395 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
3396 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k0
3397 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm1
3398 ; AVX512DQ-NEXT: movw $1, %ax
3399 ; AVX512DQ-NEXT: kmovw %eax, %k1
3400 ; AVX512DQ-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
3401 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
3402 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [3,3,3,3,4,4,4,4,4,5,5,5,5,5,6,6]
3403 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
3404 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k2
3405 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [6,6,6,7,7,7,7,7,8,8,8,8,8,9,9,9]
3406 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
3407 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k3
3408 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [9,9,10,10,10,10,10,11,11,11,11,11,12,12,12,12]
3409 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
3410 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k4
3411 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [12,13,13,13,13,13,14,14,14,14,14,15,15,15,15,15]
3412 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
3413 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k5
3414 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
3415 ; AVX512DQ-NEXT: vmovdqa32 256(%rsi), %zmm1 {%k5} {z}
3416 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k4} {z}
3417 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k3} {z}
3418 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm4 {%k2} {z}
3419 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 64(%rdx)
3420 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 128(%rdx)
3421 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 192(%rdx)
3422 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 256(%rdx)
3423 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%rdx)
3424 ; AVX512DQ-NEXT: vzeroupper
3425 ; AVX512DQ-NEXT: retq
3427 ; AVX512BW-LABEL: mask_replication_factor5_vf16:
3428 ; AVX512BW: # %bb.0:
3429 ; AVX512BW-NEXT: kmovw (%rdi), %k1
3430 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
3431 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,1,1,1,1,1,2,2,2,2,2,3]
3432 ; AVX512BW-NEXT: vpermd %zmm0, %zmm1, %zmm1
3433 ; AVX512BW-NEXT: vptestmd %zmm1, %zmm1, %k1
3434 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
3435 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [12,13,13,13,13,13,14,14,14,14,14,15,15,15,15,15]
3436 ; AVX512BW-NEXT: vpermd %zmm0, %zmm2, %zmm2
3437 ; AVX512BW-NEXT: vptestmd %zmm2, %zmm2, %k1
3438 ; AVX512BW-NEXT: vmovdqa32 256(%rsi), %zmm2 {%k1} {z}
3439 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [9,9,10,10,10,10,10,11,11,11,11,11,12,12,12,12]
3440 ; AVX512BW-NEXT: vpermd %zmm0, %zmm3, %zmm3
3441 ; AVX512BW-NEXT: vptestmd %zmm3, %zmm3, %k1
3442 ; AVX512BW-NEXT: vmovdqa32 192(%rsi), %zmm3 {%k1} {z}
3443 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [6,6,6,7,7,7,7,7,8,8,8,8,8,9,9,9]
3444 ; AVX512BW-NEXT: vpermd %zmm0, %zmm4, %zmm4
3445 ; AVX512BW-NEXT: vptestmd %zmm4, %zmm4, %k1
3446 ; AVX512BW-NEXT: vmovdqa32 128(%rsi), %zmm4 {%k1} {z}
3447 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm5 = [3,3,3,3,4,4,4,4,4,5,5,5,5,5,6,6]
3448 ; AVX512BW-NEXT: vpermd %zmm0, %zmm5, %zmm0
3449 ; AVX512BW-NEXT: vptestmd %zmm0, %zmm0, %k1
3450 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
3451 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 64(%rdx)
3452 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 128(%rdx)
3453 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 192(%rdx)
3454 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 256(%rdx)
3455 ; AVX512BW-NEXT: vmovdqa64 %zmm1, (%rdx)
3456 ; AVX512BW-NEXT: vzeroupper
3457 ; AVX512BW-NEXT: retq
3458 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
3459 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
3460 %tgt.mask = shufflevector <16 x i1> %src.mask, <16 x i1> poison, <80 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 7, i32 8, i32 8, i32 8, i32 8, i32 8, i32 9, i32 9, i32 9, i32 9, i32 9, i32 10, i32 10, i32 10, i32 10, i32 10, i32 11, i32 11, i32 11, i32 11, i32 11, i32 12, i32 12, i32 12, i32 12, i32 12, i32 13, i32 13, i32 13, i32 13, i32 13, i32 14, i32 14, i32 14, i32 14, i32 14, i32 15, i32 15, i32 15, i32 15, i32 15>
3461 %data = call <80 x i32> @llvm.masked.load.v80i32.p0(ptr %in.vec, i32 64, <80 x i1> %tgt.mask, <80 x i32> poison)
3462 store <80 x i32> %data, ptr %out.vec, align 64
3466 define void @mask_replication_factor5_vf32(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
3467 ; AVX512F-ONLY-LABEL: mask_replication_factor5_vf32:
3468 ; AVX512F-ONLY: # %bb.0:
3469 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
3470 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
3471 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,1,1,1,1,1,2,2,2,2,2,3]
3472 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm2
3473 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
3474 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 {%k1} {z}
3475 ; AVX512F-ONLY-NEXT: movw $1, %ax
3476 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
3477 ; AVX512F-ONLY-NEXT: vmovdqa32 %zmm0, %zmm2 {%k1}
3478 ; AVX512F-ONLY-NEXT: kmovw 2(%rdi), %k1
3479 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm3, %zmm3, %zmm3 {%k1} {z}
3480 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
3481 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm2 = [3,3,3,3,4,4,4,4,4,5,5,5,5,5,6,6]
3482 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm2, %zmm4
3483 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm5 = [6,6,6,7,7,7,7,7,8,8,8,8,8,9,9,9]
3484 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm5, %zmm6
3485 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm7 = [9,9,10,10,10,10,10,11,11,11,11,11,12,12,12,12]
3486 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm7, %zmm8
3487 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm9 = [12,13,13,13,13,13,14,14,14,14,14,15,15,15,15,15]
3488 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm9, %zmm0
3489 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm1, %zmm1
3490 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm2, %zmm2
3491 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm5, %zmm5
3492 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm7, %zmm7
3493 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm9, %zmm3
3494 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm9 {%k1} {z}
3495 ; AVX512F-ONLY-NEXT: vptestmd %zmm3, %zmm3, %k1
3496 ; AVX512F-ONLY-NEXT: vmovdqa32 576(%rsi), %zmm3 {%k1} {z}
3497 ; AVX512F-ONLY-NEXT: vptestmd %zmm7, %zmm7, %k1
3498 ; AVX512F-ONLY-NEXT: vmovdqa32 512(%rsi), %zmm7 {%k1} {z}
3499 ; AVX512F-ONLY-NEXT: vptestmd %zmm5, %zmm5, %k1
3500 ; AVX512F-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm5 {%k1} {z}
3501 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
3502 ; AVX512F-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm2 {%k1} {z}
3503 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
3504 ; AVX512F-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm1 {%k1} {z}
3505 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1
3506 ; AVX512F-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm0 {%k1} {z}
3507 ; AVX512F-ONLY-NEXT: vptestmd %zmm8, %zmm8, %k1
3508 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm8 {%k1} {z}
3509 ; AVX512F-ONLY-NEXT: vptestmd %zmm6, %zmm6, %k1
3510 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm6 {%k1} {z}
3511 ; AVX512F-ONLY-NEXT: vptestmd %zmm4, %zmm4, %k1
3512 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm4 {%k1} {z}
3513 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm4, 64(%rdx)
3514 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm6, 128(%rdx)
3515 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm8, 192(%rdx)
3516 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, 256(%rdx)
3517 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, 320(%rdx)
3518 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 384(%rdx)
3519 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm5, 448(%rdx)
3520 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm7, 512(%rdx)
3521 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 576(%rdx)
3522 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm9, (%rdx)
3523 ; AVX512F-ONLY-NEXT: vzeroupper
3524 ; AVX512F-ONLY-NEXT: retq
3526 ; AVX512DQ-LABEL: mask_replication_factor5_vf32:
3527 ; AVX512DQ: # %bb.0:
3528 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
3529 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
3530 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,1,1,1,1,1,2,2,2,2,2,3]
3531 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm2
3532 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k0
3533 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm2
3534 ; AVX512DQ-NEXT: movw $1, %ax
3535 ; AVX512DQ-NEXT: kmovw %eax, %k1
3536 ; AVX512DQ-NEXT: vmovdqa32 %zmm0, %zmm2 {%k1}
3537 ; AVX512DQ-NEXT: kmovw 2(%rdi), %k0
3538 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm3
3539 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k1
3540 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm2 = [3,3,3,3,4,4,4,4,4,5,5,5,5,5,6,6]
3541 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm2, %zmm4
3542 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm5 = [6,6,6,7,7,7,7,7,8,8,8,8,8,9,9,9]
3543 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm5, %zmm6
3544 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm7 = [9,9,10,10,10,10,10,11,11,11,11,11,12,12,12,12]
3545 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm7, %zmm8
3546 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm9 = [12,13,13,13,13,13,14,14,14,14,14,15,15,15,15,15]
3547 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm9, %zmm0
3548 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm1, %zmm1
3549 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm2, %zmm2
3550 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm5, %zmm5
3551 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm7, %zmm7
3552 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm9, %zmm3
3553 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm9 {%k1} {z}
3554 ; AVX512DQ-NEXT: vpmovd2m %zmm3, %k1
3555 ; AVX512DQ-NEXT: vmovdqa32 576(%rsi), %zmm3 {%k1} {z}
3556 ; AVX512DQ-NEXT: vpmovd2m %zmm7, %k1
3557 ; AVX512DQ-NEXT: vmovdqa32 512(%rsi), %zmm7 {%k1} {z}
3558 ; AVX512DQ-NEXT: vpmovd2m %zmm5, %k1
3559 ; AVX512DQ-NEXT: vmovdqa32 448(%rsi), %zmm5 {%k1} {z}
3560 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k1
3561 ; AVX512DQ-NEXT: vmovdqa32 384(%rsi), %zmm2 {%k1} {z}
3562 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
3563 ; AVX512DQ-NEXT: vmovdqa32 320(%rsi), %zmm1 {%k1} {z}
3564 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k1
3565 ; AVX512DQ-NEXT: vmovdqa32 256(%rsi), %zmm0 {%k1} {z}
3566 ; AVX512DQ-NEXT: vpmovd2m %zmm8, %k1
3567 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm8 {%k1} {z}
3568 ; AVX512DQ-NEXT: vpmovd2m %zmm6, %k1
3569 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm6 {%k1} {z}
3570 ; AVX512DQ-NEXT: vpmovd2m %zmm4, %k1
3571 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm4 {%k1} {z}
3572 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 64(%rdx)
3573 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 128(%rdx)
3574 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, 192(%rdx)
3575 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 256(%rdx)
3576 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 320(%rdx)
3577 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 384(%rdx)
3578 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 448(%rdx)
3579 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, 512(%rdx)
3580 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 576(%rdx)
3581 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, (%rdx)
3582 ; AVX512DQ-NEXT: vzeroupper
3583 ; AVX512DQ-NEXT: retq
3585 ; AVX512BW-LABEL: mask_replication_factor5_vf32:
3586 ; AVX512BW: # %bb.0:
3587 ; AVX512BW-NEXT: kmovd (%rdi), %k5
3588 ; AVX512BW-NEXT: kshiftrd $1, %k5, %k1
3589 ; AVX512BW-NEXT: movw $-3, %ax
3590 ; AVX512BW-NEXT: kmovd %eax, %k6
3591 ; AVX512BW-NEXT: kmovw (%rdi), %k2
3592 ; AVX512BW-NEXT: kandw %k6, %k2, %k3
3593 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
3594 ; AVX512BW-NEXT: kshiftrw $14, %k2, %k4
3595 ; AVX512BW-NEXT: korw %k4, %k3, %k3
3596 ; AVX512BW-NEXT: movw $-5, %ax
3597 ; AVX512BW-NEXT: kmovd %eax, %k4
3598 ; AVX512BW-NEXT: kmovw %k4, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
3599 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
3600 ; AVX512BW-NEXT: kshiftrw $13, %k2, %k4
3601 ; AVX512BW-NEXT: korw %k4, %k3, %k3
3602 ; AVX512BW-NEXT: movw $-9, %ax
3603 ; AVX512BW-NEXT: kmovd %eax, %k4
3604 ; AVX512BW-NEXT: kmovw %k4, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
3605 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
3606 ; AVX512BW-NEXT: kshiftrw $12, %k2, %k4
3607 ; AVX512BW-NEXT: korw %k4, %k3, %k3
3608 ; AVX512BW-NEXT: movw $-17, %ax
3609 ; AVX512BW-NEXT: kmovd %eax, %k4
3610 ; AVX512BW-NEXT: kmovw %k4, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
3611 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
3612 ; AVX512BW-NEXT: kshiftrw $11, %k2, %k2
3613 ; AVX512BW-NEXT: korw %k2, %k3, %k2
3614 ; AVX512BW-NEXT: movw $-33, %ax
3615 ; AVX512BW-NEXT: kmovd %eax, %k3
3616 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
3617 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
3618 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
3619 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k3
3620 ; AVX512BW-NEXT: korw %k3, %k2, %k2
3621 ; AVX512BW-NEXT: movw $-65, %ax
3622 ; AVX512BW-NEXT: kmovd %eax, %k3
3623 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
3624 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
3625 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k3
3626 ; AVX512BW-NEXT: korw %k3, %k2, %k2
3627 ; AVX512BW-NEXT: movw $-129, %ax
3628 ; AVX512BW-NEXT: kmovd %eax, %k3
3629 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
3630 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
3631 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k3
3632 ; AVX512BW-NEXT: korw %k3, %k2, %k2
3633 ; AVX512BW-NEXT: movw $-257, %ax # imm = 0xFEFF
3634 ; AVX512BW-NEXT: kmovd %eax, %k3
3635 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
3636 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
3637 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k3
3638 ; AVX512BW-NEXT: korw %k3, %k2, %k2
3639 ; AVX512BW-NEXT: movw $-513, %ax # imm = 0xFDFF
3640 ; AVX512BW-NEXT: kmovd %eax, %k7
3641 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
3642 ; AVX512BW-NEXT: kmovw %k7, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
3643 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k1
3644 ; AVX512BW-NEXT: korw %k1, %k2, %k1
3645 ; AVX512BW-NEXT: movw $-1025, %ax # imm = 0xFBFF
3646 ; AVX512BW-NEXT: kmovd %eax, %k2
3647 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
3648 ; AVX512BW-NEXT: kandw %k2, %k1, %k3
3649 ; AVX512BW-NEXT: kshiftrd $2, %k5, %k1
3650 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k2
3651 ; AVX512BW-NEXT: kshiftrw $5, %k2, %k4
3652 ; AVX512BW-NEXT: korw %k4, %k3, %k3
3653 ; AVX512BW-NEXT: movw $-2049, %ax # imm = 0xF7FF
3654 ; AVX512BW-NEXT: kmovd %eax, %k4
3655 ; AVX512BW-NEXT: kmovw %k4, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
3656 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
3657 ; AVX512BW-NEXT: kshiftrw $4, %k2, %k4
3658 ; AVX512BW-NEXT: korw %k4, %k3, %k3
3659 ; AVX512BW-NEXT: movw $-4097, %ax # imm = 0xEFFF
3660 ; AVX512BW-NEXT: kmovd %eax, %k4
3661 ; AVX512BW-NEXT: kmovw %k4, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
3662 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
3663 ; AVX512BW-NEXT: kshiftrw $3, %k2, %k4
3664 ; AVX512BW-NEXT: korw %k4, %k3, %k3
3665 ; AVX512BW-NEXT: movw $-8193, %ax # imm = 0xDFFF
3666 ; AVX512BW-NEXT: kmovd %eax, %k4
3667 ; AVX512BW-NEXT: kmovw %k4, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
3668 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
3669 ; AVX512BW-NEXT: kshiftrw $2, %k2, %k2
3670 ; AVX512BW-NEXT: korw %k2, %k3, %k2
3671 ; AVX512BW-NEXT: movw $-16385, %ax # imm = 0xBFFF
3672 ; AVX512BW-NEXT: kmovd %eax, %k3
3673 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
3674 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
3675 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k1
3676 ; AVX512BW-NEXT: korw %k1, %k2, %k1
3677 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
3678 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
3679 ; AVX512BW-NEXT: kshiftrd $3, %k5, %k2
3680 ; AVX512BW-NEXT: kmovd %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
3681 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
3682 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
3683 ; AVX512BW-NEXT: korw %k2, %k1, %k1
3684 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
3685 ; AVX512BW-NEXT: kshiftrd $29, %k5, %k1
3686 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k2
3687 ; AVX512BW-NEXT: kshiftrd $28, %k5, %k1
3688 ; AVX512BW-NEXT: kmovd %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
3689 ; AVX512BW-NEXT: kandw %k6, %k1, %k3
3690 ; AVX512BW-NEXT: kmovw %k6, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
3691 ; AVX512BW-NEXT: kshiftrw $14, %k2, %k4
3692 ; AVX512BW-NEXT: korw %k4, %k3, %k3
3693 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
3694 ; AVX512BW-NEXT: kandw %k0, %k3, %k3
3695 ; AVX512BW-NEXT: kshiftrw $13, %k2, %k4
3696 ; AVX512BW-NEXT: korw %k4, %k3, %k3
3697 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3698 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
3699 ; AVX512BW-NEXT: kshiftrw $12, %k2, %k4
3700 ; AVX512BW-NEXT: korw %k4, %k3, %k3
3701 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3702 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
3703 ; AVX512BW-NEXT: kshiftrw $11, %k2, %k4
3704 ; AVX512BW-NEXT: korw %k4, %k3, %k3
3705 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3706 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
3707 ; AVX512BW-NEXT: kshiftrw $10, %k2, %k2
3708 ; AVX512BW-NEXT: korw %k2, %k3, %k2
3709 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3710 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
3711 ; AVX512BW-NEXT: kshiftrd $30, %k5, %k3
3712 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
3713 ; AVX512BW-NEXT: kshiftrw $9, %k3, %k4
3714 ; AVX512BW-NEXT: korw %k4, %k2, %k2
3715 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3716 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
3717 ; AVX512BW-NEXT: kshiftrw $8, %k3, %k4
3718 ; AVX512BW-NEXT: korw %k4, %k2, %k2
3719 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3720 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
3721 ; AVX512BW-NEXT: kshiftrw $7, %k3, %k4
3722 ; AVX512BW-NEXT: korw %k4, %k2, %k2
3723 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
3724 ; AVX512BW-NEXT: kshiftrw $6, %k3, %k4
3725 ; AVX512BW-NEXT: korw %k4, %k2, %k2
3726 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3727 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
3728 ; AVX512BW-NEXT: kshiftrw $5, %k3, %k3
3729 ; AVX512BW-NEXT: korw %k3, %k2, %k2
3730 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3731 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
3732 ; AVX512BW-NEXT: kshiftrd $31, %k5, %k3
3733 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k4
3734 ; AVX512BW-NEXT: kshiftrw $4, %k4, %k7
3735 ; AVX512BW-NEXT: korw %k7, %k2, %k2
3736 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
3737 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
3738 ; AVX512BW-NEXT: kshiftrw $3, %k4, %k7
3739 ; AVX512BW-NEXT: korw %k7, %k2, %k2
3740 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
3741 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
3742 ; AVX512BW-NEXT: kshiftrw $2, %k4, %k7
3743 ; AVX512BW-NEXT: korw %k7, %k2, %k2
3744 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
3745 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
3746 ; AVX512BW-NEXT: kshiftlw $14, %k3, %k3
3747 ; AVX512BW-NEXT: korw %k3, %k2, %k2
3748 ; AVX512BW-NEXT: kshiftlw $1, %k2, %k2
3749 ; AVX512BW-NEXT: kshiftrw $1, %k2, %k2
3750 ; AVX512BW-NEXT: korw %k4, %k2, %k2
3751 ; AVX512BW-NEXT: vmovdqa32 576(%rsi), %zmm1 {%k2} {z}
3752 ; AVX512BW-NEXT: kshiftrd $25, %k5, %k2
3753 ; AVX512BW-NEXT: kmovd %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
3754 ; AVX512BW-NEXT: kandw %k6, %k2, %k3
3755 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
3756 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
3757 ; AVX512BW-NEXT: kshiftrw $14, %k2, %k7
3758 ; AVX512BW-NEXT: korw %k7, %k3, %k3
3759 ; AVX512BW-NEXT: kandw %k0, %k3, %k3
3760 ; AVX512BW-NEXT: kshiftrd $26, %k5, %k7
3761 ; AVX512BW-NEXT: kshiftlw $15, %k7, %k7
3762 ; AVX512BW-NEXT: kshiftrw $13, %k7, %k6
3763 ; AVX512BW-NEXT: korw %k6, %k3, %k3
3764 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
3765 ; AVX512BW-NEXT: kandw %k0, %k3, %k3
3766 ; AVX512BW-NEXT: kshiftrw $12, %k7, %k6
3767 ; AVX512BW-NEXT: korw %k6, %k3, %k3
3768 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
3769 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
3770 ; AVX512BW-NEXT: kshiftrw $11, %k7, %k6
3771 ; AVX512BW-NEXT: korw %k6, %k3, %k3
3772 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
3773 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
3774 ; AVX512BW-NEXT: kshiftrw $10, %k7, %k6
3775 ; AVX512BW-NEXT: korw %k6, %k3, %k3
3776 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
3777 ; AVX512BW-NEXT: kandw %k0, %k3, %k3
3778 ; AVX512BW-NEXT: kshiftrw $9, %k7, %k6
3779 ; AVX512BW-NEXT: korw %k6, %k3, %k3
3780 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
3781 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
3782 ; AVX512BW-NEXT: kshiftrd $27, %k5, %k6
3783 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
3784 ; AVX512BW-NEXT: kshiftrw $8, %k6, %k7
3785 ; AVX512BW-NEXT: korw %k7, %k3, %k3
3786 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
3787 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
3788 ; AVX512BW-NEXT: kshiftrw $7, %k6, %k7
3789 ; AVX512BW-NEXT: korw %k7, %k3, %k3
3790 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
3791 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
3792 ; AVX512BW-NEXT: kshiftrw $6, %k6, %k7
3793 ; AVX512BW-NEXT: korw %k7, %k3, %k3
3794 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
3795 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
3796 ; AVX512BW-NEXT: kshiftrw $5, %k6, %k7
3797 ; AVX512BW-NEXT: korw %k7, %k3, %k3
3798 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
3799 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k6
3800 ; AVX512BW-NEXT: korw %k6, %k3, %k3
3801 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3802 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
3803 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 4-byte Reload
3804 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k6
3805 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
3806 ; AVX512BW-NEXT: korw %k7, %k3, %k3
3807 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3808 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
3809 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
3810 ; AVX512BW-NEXT: korw %k7, %k3, %k3
3811 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
3812 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
3813 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k1
3814 ; AVX512BW-NEXT: korw %k1, %k3, %k1
3815 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
3816 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
3817 ; AVX512BW-NEXT: korw %k6, %k1, %k1
3818 ; AVX512BW-NEXT: vmovdqa32 512(%rsi), %zmm2 {%k1} {z}
3819 ; AVX512BW-NEXT: kshiftrd $22, %k5, %k0
3820 ; AVX512BW-NEXT: kmovd %k0, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
3821 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3822 ; AVX512BW-NEXT: kandw %k1, %k0, %k6
3823 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
3824 ; AVX512BW-NEXT: kmovw %k0, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
3825 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k7
3826 ; AVX512BW-NEXT: korw %k7, %k6, %k6
3827 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3828 ; AVX512BW-NEXT: kandw %k1, %k6, %k6
3829 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k7
3830 ; AVX512BW-NEXT: korw %k7, %k6, %k6
3831 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
3832 ; AVX512BW-NEXT: kandw %k0, %k6, %k6
3833 ; AVX512BW-NEXT: kshiftrd $23, %k5, %k7
3834 ; AVX512BW-NEXT: kmovq %k5, %k0
3835 ; AVX512BW-NEXT: kshiftlw $15, %k7, %k7
3836 ; AVX512BW-NEXT: kshiftrw $12, %k7, %k5
3837 ; AVX512BW-NEXT: korw %k5, %k6, %k5
3838 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3839 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
3840 ; AVX512BW-NEXT: kshiftrw $11, %k7, %k6
3841 ; AVX512BW-NEXT: korw %k6, %k5, %k5
3842 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3843 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
3844 ; AVX512BW-NEXT: kshiftrw $10, %k7, %k6
3845 ; AVX512BW-NEXT: korw %k6, %k5, %k5
3846 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3847 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
3848 ; AVX512BW-NEXT: kshiftrw $9, %k7, %k6
3849 ; AVX512BW-NEXT: korw %k6, %k5, %k5
3850 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3851 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
3852 ; AVX512BW-NEXT: kshiftrw $8, %k7, %k6
3853 ; AVX512BW-NEXT: korw %k6, %k5, %k5
3854 ; AVX512BW-NEXT: kandw %k2, %k5, %k5
3855 ; AVX512BW-NEXT: kshiftrd $24, %k0, %k6
3856 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
3857 ; AVX512BW-NEXT: kshiftrw $7, %k6, %k7
3858 ; AVX512BW-NEXT: korw %k7, %k5, %k5
3859 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3860 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
3861 ; AVX512BW-NEXT: kshiftrw $6, %k6, %k7
3862 ; AVX512BW-NEXT: korw %k7, %k5, %k5
3863 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
3864 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
3865 ; AVX512BW-NEXT: kshiftrw $5, %k6, %k7
3866 ; AVX512BW-NEXT: korw %k7, %k5, %k5
3867 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
3868 ; AVX512BW-NEXT: kandw %k2, %k5, %k5
3869 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
3870 ; AVX512BW-NEXT: korw %k7, %k5, %k5
3871 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
3872 ; AVX512BW-NEXT: kandw %k2, %k5, %k5
3873 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k6
3874 ; AVX512BW-NEXT: korw %k6, %k5, %k5
3875 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
3876 ; AVX512BW-NEXT: kandw %k2, %k5, %k5
3877 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
3878 ; AVX512BW-NEXT: kshiftrw $2, %k7, %k6
3879 ; AVX512BW-NEXT: korw %k6, %k5, %k5
3880 ; AVX512BW-NEXT: kandw %k4, %k5, %k5
3881 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 4-byte Reload
3882 ; AVX512BW-NEXT: kshiftlw $14, %k2, %k2
3883 ; AVX512BW-NEXT: korw %k2, %k5, %k2
3884 ; AVX512BW-NEXT: kshiftlw $1, %k2, %k2
3885 ; AVX512BW-NEXT: kshiftrw $1, %k2, %k2
3886 ; AVX512BW-NEXT: korw %k7, %k2, %k2
3887 ; AVX512BW-NEXT: vmovdqa32 448(%rsi), %zmm3 {%k2} {z}
3888 ; AVX512BW-NEXT: kshiftrd $19, %k0, %k2
3889 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
3890 ; AVX512BW-NEXT: kandw %k7, %k2, %k4
3891 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k6
3892 ; AVX512BW-NEXT: kmovw %k6, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
3893 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k5
3894 ; AVX512BW-NEXT: korw %k5, %k4, %k4
3895 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
3896 ; AVX512BW-NEXT: kandw %k2, %k4, %k4
3897 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k5
3898 ; AVX512BW-NEXT: korw %k5, %k4, %k4
3899 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
3900 ; AVX512BW-NEXT: kandw %k2, %k4, %k4
3901 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k5
3902 ; AVX512BW-NEXT: korw %k5, %k4, %k4
3903 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
3904 ; AVX512BW-NEXT: kandw %k2, %k4, %k4
3905 ; AVX512BW-NEXT: kshiftrd $20, %k0, %k5
3906 ; AVX512BW-NEXT: kshiftlw $15, %k5, %k5
3907 ; AVX512BW-NEXT: kshiftrw $11, %k5, %k6
3908 ; AVX512BW-NEXT: korw %k6, %k4, %k4
3909 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
3910 ; AVX512BW-NEXT: kandw %k2, %k4, %k4
3911 ; AVX512BW-NEXT: kshiftrw $10, %k5, %k6
3912 ; AVX512BW-NEXT: korw %k6, %k4, %k4
3913 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
3914 ; AVX512BW-NEXT: kandw %k6, %k4, %k4
3915 ; AVX512BW-NEXT: kshiftrw $9, %k5, %k6
3916 ; AVX512BW-NEXT: korw %k6, %k4, %k4
3917 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
3918 ; AVX512BW-NEXT: kandw %k6, %k4, %k4
3919 ; AVX512BW-NEXT: kshiftrw $8, %k5, %k6
3920 ; AVX512BW-NEXT: korw %k6, %k4, %k4
3921 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
3922 ; AVX512BW-NEXT: kandw %k6, %k4, %k4
3923 ; AVX512BW-NEXT: kshiftrw $7, %k5, %k5
3924 ; AVX512BW-NEXT: korw %k5, %k4, %k4
3925 ; AVX512BW-NEXT: kandw %k1, %k4, %k4
3926 ; AVX512BW-NEXT: kshiftrd $21, %k0, %k5
3927 ; AVX512BW-NEXT: kshiftlw $15, %k5, %k5
3928 ; AVX512BW-NEXT: kshiftrw $6, %k5, %k6
3929 ; AVX512BW-NEXT: korw %k6, %k4, %k4
3930 ; AVX512BW-NEXT: kandw %k3, %k4, %k4
3931 ; AVX512BW-NEXT: kshiftrw $5, %k5, %k6
3932 ; AVX512BW-NEXT: korw %k6, %k4, %k4
3933 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3934 ; AVX512BW-NEXT: kandw %k1, %k4, %k4
3935 ; AVX512BW-NEXT: kshiftrw $4, %k5, %k6
3936 ; AVX512BW-NEXT: korw %k6, %k4, %k4
3937 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3938 ; AVX512BW-NEXT: kandw %k1, %k4, %k4
3939 ; AVX512BW-NEXT: kshiftrw $3, %k5, %k6
3940 ; AVX512BW-NEXT: korw %k6, %k4, %k4
3941 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3942 ; AVX512BW-NEXT: kandw %k1, %k4, %k4
3943 ; AVX512BW-NEXT: kshiftrw $2, %k5, %k5
3944 ; AVX512BW-NEXT: korw %k5, %k4, %k4
3945 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
3946 ; AVX512BW-NEXT: kandw %k1, %k4, %k4
3947 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 4-byte Reload
3948 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k1
3949 ; AVX512BW-NEXT: korw %k1, %k4, %k1
3950 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
3951 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
3952 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
3953 ; AVX512BW-NEXT: korw %k3, %k1, %k1
3954 ; AVX512BW-NEXT: vmovdqa32 384(%rsi), %zmm4 {%k1} {z}
3955 ; AVX512BW-NEXT: kshiftrd $16, %k0, %k1
3956 ; AVX512BW-NEXT: kandw %k7, %k1, %k3
3957 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
3958 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k4
3959 ; AVX512BW-NEXT: korw %k4, %k3, %k3
3960 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
3961 ; AVX512BW-NEXT: kandw %k6, %k3, %k3
3962 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k4
3963 ; AVX512BW-NEXT: korw %k4, %k3, %k3
3964 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
3965 ; AVX512BW-NEXT: kandw %k7, %k3, %k3
3966 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k4
3967 ; AVX512BW-NEXT: korw %k4, %k3, %k3
3968 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
3969 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
3970 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k1
3971 ; AVX512BW-NEXT: korw %k1, %k3, %k1
3972 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
3973 ; AVX512BW-NEXT: kshiftrd $17, %k0, %k3
3974 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
3975 ; AVX512BW-NEXT: kshiftrw $10, %k3, %k4
3976 ; AVX512BW-NEXT: korw %k4, %k1, %k1
3977 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
3978 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
3979 ; AVX512BW-NEXT: kshiftrw $9, %k3, %k4
3980 ; AVX512BW-NEXT: korw %k4, %k1, %k1
3981 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
3982 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
3983 ; AVX512BW-NEXT: kshiftrw $8, %k3, %k4
3984 ; AVX512BW-NEXT: korw %k4, %k1, %k1
3985 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
3986 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
3987 ; AVX512BW-NEXT: kshiftrw $7, %k3, %k4
3988 ; AVX512BW-NEXT: korw %k4, %k1, %k1
3989 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
3990 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
3991 ; AVX512BW-NEXT: kshiftrw $6, %k3, %k3
3992 ; AVX512BW-NEXT: korw %k3, %k1, %k1
3993 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
3994 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
3995 ; AVX512BW-NEXT: kshiftrd $18, %k0, %k3
3996 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k4
3997 ; AVX512BW-NEXT: kshiftrw $5, %k4, %k5
3998 ; AVX512BW-NEXT: korw %k5, %k1, %k1
3999 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4000 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
4001 ; AVX512BW-NEXT: kshiftrw $4, %k4, %k5
4002 ; AVX512BW-NEXT: korw %k5, %k1, %k1
4003 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4004 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
4005 ; AVX512BW-NEXT: kshiftrw $3, %k4, %k5
4006 ; AVX512BW-NEXT: korw %k5, %k1, %k1
4007 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4008 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
4009 ; AVX512BW-NEXT: kshiftrw $2, %k4, %k4
4010 ; AVX512BW-NEXT: korw %k4, %k1, %k1
4011 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4012 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
4013 ; AVX512BW-NEXT: kshiftlw $14, %k3, %k3
4014 ; AVX512BW-NEXT: korw %k3, %k1, %k1
4015 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
4016 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
4017 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4018 ; AVX512BW-NEXT: korw %k2, %k1, %k1
4019 ; AVX512BW-NEXT: vmovdqa32 320(%rsi), %zmm5 {%k1} {z}
4020 ; AVX512BW-NEXT: kshiftrd $13, %k0, %k1
4021 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
4022 ; AVX512BW-NEXT: kshiftrd $12, %k0, %k3
4023 ; AVX512BW-NEXT: kmovd %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
4024 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4025 ; AVX512BW-NEXT: kandw %k2, %k3, %k2
4026 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k4
4027 ; AVX512BW-NEXT: korw %k4, %k2, %k2
4028 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
4029 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k4
4030 ; AVX512BW-NEXT: korw %k4, %k2, %k2
4031 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
4032 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k4
4033 ; AVX512BW-NEXT: korw %k4, %k2, %k2
4034 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
4035 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
4036 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k4
4037 ; AVX512BW-NEXT: korw %k4, %k2, %k2
4038 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4039 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
4040 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k1
4041 ; AVX512BW-NEXT: korw %k1, %k2, %k1
4042 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4043 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
4044 ; AVX512BW-NEXT: kshiftrd $14, %k0, %k2
4045 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
4046 ; AVX512BW-NEXT: kshiftrw $9, %k2, %k4
4047 ; AVX512BW-NEXT: korw %k4, %k1, %k1
4048 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4049 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
4050 ; AVX512BW-NEXT: kshiftrw $8, %k2, %k4
4051 ; AVX512BW-NEXT: korw %k4, %k1, %k1
4052 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4053 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
4054 ; AVX512BW-NEXT: kshiftrw $7, %k2, %k4
4055 ; AVX512BW-NEXT: korw %k4, %k1, %k1
4056 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4057 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
4058 ; AVX512BW-NEXT: kshiftrw $6, %k2, %k4
4059 ; AVX512BW-NEXT: korw %k4, %k1, %k1
4060 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4061 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
4062 ; AVX512BW-NEXT: kshiftrw $5, %k2, %k2
4063 ; AVX512BW-NEXT: korw %k2, %k1, %k1
4064 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4065 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
4066 ; AVX512BW-NEXT: kshiftrd $15, %k0, %k2
4067 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k4
4068 ; AVX512BW-NEXT: kshiftrw $4, %k4, %k5
4069 ; AVX512BW-NEXT: korw %k5, %k1, %k1
4070 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4071 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
4072 ; AVX512BW-NEXT: kshiftrw $3, %k4, %k5
4073 ; AVX512BW-NEXT: korw %k5, %k1, %k1
4074 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
4075 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
4076 ; AVX512BW-NEXT: kshiftrw $2, %k4, %k5
4077 ; AVX512BW-NEXT: korw %k5, %k1, %k1
4078 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
4079 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
4080 ; AVX512BW-NEXT: kshiftlw $14, %k2, %k2
4081 ; AVX512BW-NEXT: korw %k2, %k1, %k1
4082 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
4083 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
4084 ; AVX512BW-NEXT: korw %k4, %k1, %k1
4085 ; AVX512BW-NEXT: vmovdqa32 256(%rsi), %zmm6 {%k1} {z}
4086 ; AVX512BW-NEXT: kshiftrd $9, %k0, %k2
4087 ; AVX512BW-NEXT: kmovd %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
4088 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4089 ; AVX512BW-NEXT: kandw %k1, %k2, %k4
4090 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k1
4091 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
4092 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k5
4093 ; AVX512BW-NEXT: korw %k5, %k4, %k4
4094 ; AVX512BW-NEXT: kandw %k6, %k4, %k4
4095 ; AVX512BW-NEXT: kshiftrd $10, %k0, %k5
4096 ; AVX512BW-NEXT: kshiftlw $15, %k5, %k5
4097 ; AVX512BW-NEXT: kshiftrw $13, %k5, %k6
4098 ; AVX512BW-NEXT: korw %k6, %k4, %k4
4099 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4100 ; AVX512BW-NEXT: kandw %k1, %k4, %k4
4101 ; AVX512BW-NEXT: kshiftrw $12, %k5, %k6
4102 ; AVX512BW-NEXT: korw %k6, %k4, %k4
4103 ; AVX512BW-NEXT: kandw %k7, %k4, %k4
4104 ; AVX512BW-NEXT: kshiftrw $11, %k5, %k6
4105 ; AVX512BW-NEXT: korw %k6, %k4, %k4
4106 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4107 ; AVX512BW-NEXT: kandw %k1, %k4, %k4
4108 ; AVX512BW-NEXT: kshiftrw $10, %k5, %k6
4109 ; AVX512BW-NEXT: korw %k6, %k4, %k4
4110 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4111 ; AVX512BW-NEXT: kandw %k1, %k4, %k4
4112 ; AVX512BW-NEXT: kshiftrw $9, %k5, %k5
4113 ; AVX512BW-NEXT: korw %k5, %k4, %k4
4114 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4115 ; AVX512BW-NEXT: kandw %k1, %k4, %k4
4116 ; AVX512BW-NEXT: kshiftrd $11, %k0, %k5
4117 ; AVX512BW-NEXT: kshiftlw $15, %k5, %k5
4118 ; AVX512BW-NEXT: kshiftrw $8, %k5, %k6
4119 ; AVX512BW-NEXT: korw %k6, %k4, %k4
4120 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4121 ; AVX512BW-NEXT: kandw %k1, %k4, %k4
4122 ; AVX512BW-NEXT: kshiftrw $7, %k5, %k6
4123 ; AVX512BW-NEXT: korw %k6, %k4, %k4
4124 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4125 ; AVX512BW-NEXT: kandw %k1, %k4, %k4
4126 ; AVX512BW-NEXT: kshiftrw $6, %k5, %k6
4127 ; AVX512BW-NEXT: korw %k6, %k4, %k4
4128 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4129 ; AVX512BW-NEXT: kandw %k1, %k4, %k4
4130 ; AVX512BW-NEXT: kshiftrw $5, %k5, %k6
4131 ; AVX512BW-NEXT: korw %k6, %k4, %k4
4132 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4133 ; AVX512BW-NEXT: kandw %k2, %k4, %k4
4134 ; AVX512BW-NEXT: kshiftrw $4, %k5, %k5
4135 ; AVX512BW-NEXT: korw %k5, %k4, %k4
4136 ; AVX512BW-NEXT: kandw %k3, %k4, %k4
4137 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 4-byte Reload
4138 ; AVX512BW-NEXT: kshiftlw $15, %k7, %k5
4139 ; AVX512BW-NEXT: kshiftrw $3, %k5, %k6
4140 ; AVX512BW-NEXT: korw %k6, %k4, %k4
4141 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4142 ; AVX512BW-NEXT: kandw %k1, %k4, %k4
4143 ; AVX512BW-NEXT: kshiftrw $2, %k5, %k6
4144 ; AVX512BW-NEXT: korw %k6, %k4, %k4
4145 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4146 ; AVX512BW-NEXT: kandw %k3, %k4, %k4
4147 ; AVX512BW-NEXT: kshiftlw $14, %k7, %k3
4148 ; AVX512BW-NEXT: korw %k3, %k4, %k3
4149 ; AVX512BW-NEXT: kshiftlw $1, %k3, %k3
4150 ; AVX512BW-NEXT: kshiftrw $1, %k3, %k3
4151 ; AVX512BW-NEXT: korw %k5, %k3, %k3
4152 ; AVX512BW-NEXT: vmovdqa32 192(%rsi), %zmm7 {%k3} {z}
4153 ; AVX512BW-NEXT: kshiftrd $6, %k0, %k4
4154 ; AVX512BW-NEXT: kmovd %k4, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
4155 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4156 ; AVX512BW-NEXT: kandw %k3, %k4, %k5
4157 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k4
4158 ; AVX512BW-NEXT: kmovw %k4, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
4159 ; AVX512BW-NEXT: kshiftrw $14, %k4, %k6
4160 ; AVX512BW-NEXT: korw %k6, %k5, %k5
4161 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4162 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
4163 ; AVX512BW-NEXT: kshiftrw $13, %k4, %k6
4164 ; AVX512BW-NEXT: korw %k6, %k5, %k5
4165 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4166 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
4167 ; AVX512BW-NEXT: kshiftrd $7, %k0, %k6
4168 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
4169 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k7
4170 ; AVX512BW-NEXT: korw %k7, %k5, %k5
4171 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4172 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
4173 ; AVX512BW-NEXT: kshiftrw $11, %k6, %k7
4174 ; AVX512BW-NEXT: korw %k7, %k5, %k5
4175 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4176 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
4177 ; AVX512BW-NEXT: kshiftrw $10, %k6, %k7
4178 ; AVX512BW-NEXT: korw %k7, %k5, %k5
4179 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
4180 ; AVX512BW-NEXT: kandw %k4, %k5, %k5
4181 ; AVX512BW-NEXT: kshiftrw $9, %k6, %k7
4182 ; AVX512BW-NEXT: korw %k7, %k5, %k5
4183 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4184 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
4185 ; AVX512BW-NEXT: kshiftrw $8, %k6, %k6
4186 ; AVX512BW-NEXT: korw %k6, %k5, %k5
4187 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
4188 ; AVX512BW-NEXT: kandw %k6, %k5, %k5
4189 ; AVX512BW-NEXT: kshiftrd $8, %k0, %k6
4190 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
4191 ; AVX512BW-NEXT: kshiftrw $7, %k6, %k7
4192 ; AVX512BW-NEXT: korw %k7, %k5, %k5
4193 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
4194 ; AVX512BW-NEXT: kandw %k7, %k5, %k5
4195 ; AVX512BW-NEXT: kshiftrw $6, %k6, %k7
4196 ; AVX512BW-NEXT: korw %k7, %k5, %k5
4197 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
4198 ; AVX512BW-NEXT: kandw %k7, %k5, %k5
4199 ; AVX512BW-NEXT: kshiftrw $5, %k6, %k7
4200 ; AVX512BW-NEXT: korw %k7, %k5, %k5
4201 ; AVX512BW-NEXT: kandw %k2, %k5, %k5
4202 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
4203 ; AVX512BW-NEXT: korw %k7, %k5, %k5
4204 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
4205 ; AVX512BW-NEXT: kandw %k7, %k5, %k5
4206 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k6
4207 ; AVX512BW-NEXT: korw %k6, %k5, %k5
4208 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
4209 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4210 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k6
4211 ; AVX512BW-NEXT: korw %k6, %k5, %k5
4212 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
4213 ; AVX512BW-NEXT: kandw %k6, %k5, %k5
4214 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 4-byte Reload
4215 ; AVX512BW-NEXT: kshiftlw $14, %k2, %k2
4216 ; AVX512BW-NEXT: korw %k2, %k5, %k2
4217 ; AVX512BW-NEXT: kshiftlw $1, %k2, %k2
4218 ; AVX512BW-NEXT: kshiftrw $1, %k2, %k2
4219 ; AVX512BW-NEXT: korw %k1, %k2, %k1
4220 ; AVX512BW-NEXT: vmovdqa32 128(%rsi), %zmm8 {%k1} {z}
4221 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 4-byte Reload
4222 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4223 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
4224 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
4225 ; AVX512BW-NEXT: kshiftrw $14, %k5, %k2
4226 ; AVX512BW-NEXT: korw %k2, %k1, %k1
4227 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4228 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
4229 ; AVX512BW-NEXT: kshiftrw $13, %k5, %k2
4230 ; AVX512BW-NEXT: korw %k2, %k1, %k1
4231 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4232 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
4233 ; AVX512BW-NEXT: kshiftrw $12, %k5, %k2
4234 ; AVX512BW-NEXT: korw %k2, %k1, %k1
4235 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4236 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
4237 ; AVX512BW-NEXT: kshiftrd $4, %k0, %k2
4238 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
4239 ; AVX512BW-NEXT: kshiftrw $11, %k2, %k5
4240 ; AVX512BW-NEXT: korw %k5, %k1, %k1
4241 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
4242 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
4243 ; AVX512BW-NEXT: kshiftrw $10, %k2, %k5
4244 ; AVX512BW-NEXT: korw %k5, %k1, %k1
4245 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
4246 ; AVX512BW-NEXT: kshiftrw $9, %k2, %k5
4247 ; AVX512BW-NEXT: korw %k5, %k1, %k1
4248 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
4249 ; AVX512BW-NEXT: kshiftrw $8, %k2, %k5
4250 ; AVX512BW-NEXT: korw %k5, %k1, %k1
4251 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4252 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
4253 ; AVX512BW-NEXT: kshiftrw $7, %k2, %k2
4254 ; AVX512BW-NEXT: korw %k2, %k1, %k1
4255 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4256 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
4257 ; AVX512BW-NEXT: kshiftrd $5, %k0, %k0
4258 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
4259 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k2
4260 ; AVX512BW-NEXT: korw %k2, %k1, %k1
4261 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4262 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
4263 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k2
4264 ; AVX512BW-NEXT: korw %k2, %k1, %k1
4265 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4266 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
4267 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k2
4268 ; AVX512BW-NEXT: korw %k2, %k1, %k1
4269 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
4270 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k2
4271 ; AVX512BW-NEXT: korw %k2, %k1, %k1
4272 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4273 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
4274 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k0
4275 ; AVX512BW-NEXT: korw %k0, %k1, %k0
4276 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
4277 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 4-byte Reload
4278 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k1
4279 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4280 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
4281 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
4282 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4283 ; AVX512BW-NEXT: korw %k1, %k0, %k1
4284 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm9 {%k1} {z}
4285 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 64(%rdx)
4286 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 128(%rdx)
4287 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 192(%rdx)
4288 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 256(%rdx)
4289 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 320(%rdx)
4290 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 384(%rdx)
4291 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 448(%rdx)
4292 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 512(%rdx)
4293 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 576(%rdx)
4294 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%rdx)
4295 ; AVX512BW-NEXT: vzeroupper
4296 ; AVX512BW-NEXT: retq
4297 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
4298 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
4299 %tgt.mask = shufflevector <32 x i1> %src.mask, <32 x i1> poison, <160 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 7, i32 8, i32 8, i32 8, i32 8, i32 8, i32 9, i32 9, i32 9, i32 9, i32 9, i32 10, i32 10, i32 10, i32 10, i32 10, i32 11, i32 11, i32 11, i32 11, i32 11, i32 12, i32 12, i32 12, i32 12, i32 12, i32 13, i32 13, i32 13, i32 13, i32 13, i32 14, i32 14, i32 14, i32 14, i32 14, i32 15, i32 15, i32 15, i32 15, i32 15, i32 16, i32 16, i32 16, i32 16, i32 16, i32 17, i32 17, i32 17, i32 17, i32 17, i32 18, i32 18, i32 18, i32 18, i32 18, i32 19, i32 19, i32 19, i32 19, i32 19, i32 20, i32 20, i32 20, i32 20, i32 20, i32 21, i32 21, i32 21, i32 21, i32 21, i32 22, i32 22, i32 22, i32 22, i32 22, i32 23, i32 23, i32 23, i32 23, i32 23, i32 24, i32 24, i32 24, i32 24, i32 24, i32 25, i32 25, i32 25, i32 25, i32 25, i32 26, i32 26, i32 26, i32 26, i32 26, i32 27, i32 27, i32 27, i32 27, i32 27, i32 28, i32 28, i32 28, i32 28, i32 28, i32 29, i32 29, i32 29, i32 29, i32 29, i32 30, i32 30, i32 30, i32 30, i32 30, i32 31, i32 31, i32 31, i32 31, i32 31>
4300 %data = call <160 x i32> @llvm.masked.load.v160i32.p0(ptr %in.vec, i32 64, <160 x i1> %tgt.mask, <160 x i32> poison)
4301 store <160 x i32> %data, ptr %out.vec, align 64
4305 define void @mask_replication_factor5_vf64(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
4306 ; AVX512F-ONLY-LABEL: mask_replication_factor5_vf64:
4307 ; AVX512F-ONLY: # %bb.0:
4308 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
4309 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
4310 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,0,0,0,0,1,1,1,1,1,2,2,2,2,2,3]
4311 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm3, %zmm1
4312 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
4313 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
4314 ; AVX512F-ONLY-NEXT: movw $1, %ax
4315 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
4316 ; AVX512F-ONLY-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
4317 ; AVX512F-ONLY-NEXT: kmovw 6(%rdi), %k1
4318 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm4, %zmm4, %zmm4 {%k1} {z}
4319 ; AVX512F-ONLY-NEXT: kmovw 4(%rdi), %k1
4320 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm5, %zmm5, %zmm5 {%k1} {z}
4321 ; AVX512F-ONLY-NEXT: kmovw 2(%rdi), %k1
4322 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm6, %zmm6, %zmm6 {%k1} {z}
4323 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
4324 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm7 = [12,13,13,13,13,13,14,14,14,14,14,15,15,15,15,15]
4325 ; AVX512F-ONLY-NEXT: vpermd %zmm4, %zmm7, %zmm1
4326 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm8 = [9,9,10,10,10,10,10,11,11,11,11,11,12,12,12,12]
4327 ; AVX512F-ONLY-NEXT: vpermd %zmm4, %zmm8, %zmm2
4328 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm9 = [6,6,6,7,7,7,7,7,8,8,8,8,8,9,9,9]
4329 ; AVX512F-ONLY-NEXT: vpermd %zmm4, %zmm9, %zmm10
4330 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm11 = [3,3,3,3,4,4,4,4,4,5,5,5,5,5,6,6]
4331 ; AVX512F-ONLY-NEXT: vpermd %zmm4, %zmm11, %zmm12
4332 ; AVX512F-ONLY-NEXT: vpermd %zmm4, %zmm3, %zmm4
4333 ; AVX512F-ONLY-NEXT: vpermd %zmm5, %zmm7, %zmm13
4334 ; AVX512F-ONLY-NEXT: vpermd %zmm5, %zmm8, %zmm14
4335 ; AVX512F-ONLY-NEXT: vpermd %zmm5, %zmm9, %zmm15
4336 ; AVX512F-ONLY-NEXT: vpermd %zmm5, %zmm11, %zmm16
4337 ; AVX512F-ONLY-NEXT: vpermd %zmm5, %zmm3, %zmm5
4338 ; AVX512F-ONLY-NEXT: vpermd %zmm6, %zmm7, %zmm17
4339 ; AVX512F-ONLY-NEXT: vpermd %zmm6, %zmm8, %zmm18
4340 ; AVX512F-ONLY-NEXT: vpermd %zmm6, %zmm9, %zmm19
4341 ; AVX512F-ONLY-NEXT: vpermd %zmm6, %zmm3, %zmm3
4342 ; AVX512F-ONLY-NEXT: vpermd %zmm6, %zmm11, %zmm6
4343 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm7, %zmm7
4344 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm8, %zmm8
4345 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm9, %zmm9
4346 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm11, %zmm0
4347 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm11 {%k1} {z}
4348 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1
4349 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
4350 ; AVX512F-ONLY-NEXT: vptestmd %zmm9, %zmm9, %k1
4351 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm9 {%k1} {z}
4352 ; AVX512F-ONLY-NEXT: vptestmd %zmm8, %zmm8, %k1
4353 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm8 {%k1} {z}
4354 ; AVX512F-ONLY-NEXT: vptestmd %zmm7, %zmm7, %k1
4355 ; AVX512F-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm7 {%k1} {z}
4356 ; AVX512F-ONLY-NEXT: vptestmd %zmm3, %zmm3, %k1
4357 ; AVX512F-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm3 {%k1} {z}
4358 ; AVX512F-ONLY-NEXT: vptestmd %zmm6, %zmm6, %k1
4359 ; AVX512F-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm6 {%k1} {z}
4360 ; AVX512F-ONLY-NEXT: vptestmd %zmm19, %zmm19, %k1
4361 ; AVX512F-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm19 {%k1} {z}
4362 ; AVX512F-ONLY-NEXT: vptestmd %zmm18, %zmm18, %k1
4363 ; AVX512F-ONLY-NEXT: vmovdqa32 512(%rsi), %zmm18 {%k1} {z}
4364 ; AVX512F-ONLY-NEXT: vptestmd %zmm17, %zmm17, %k1
4365 ; AVX512F-ONLY-NEXT: vmovdqa32 576(%rsi), %zmm17 {%k1} {z}
4366 ; AVX512F-ONLY-NEXT: vptestmd %zmm5, %zmm5, %k1
4367 ; AVX512F-ONLY-NEXT: vmovdqa32 640(%rsi), %zmm5 {%k1} {z}
4368 ; AVX512F-ONLY-NEXT: vptestmd %zmm16, %zmm16, %k1
4369 ; AVX512F-ONLY-NEXT: vmovdqa32 704(%rsi), %zmm16 {%k1} {z}
4370 ; AVX512F-ONLY-NEXT: vptestmd %zmm15, %zmm15, %k1
4371 ; AVX512F-ONLY-NEXT: vmovdqa32 768(%rsi), %zmm15 {%k1} {z}
4372 ; AVX512F-ONLY-NEXT: vptestmd %zmm14, %zmm14, %k1
4373 ; AVX512F-ONLY-NEXT: vmovdqa32 832(%rsi), %zmm14 {%k1} {z}
4374 ; AVX512F-ONLY-NEXT: vptestmd %zmm13, %zmm13, %k1
4375 ; AVX512F-ONLY-NEXT: vmovdqa32 896(%rsi), %zmm13 {%k1} {z}
4376 ; AVX512F-ONLY-NEXT: vptestmd %zmm4, %zmm4, %k1
4377 ; AVX512F-ONLY-NEXT: vmovdqa32 960(%rsi), %zmm4 {%k1} {z}
4378 ; AVX512F-ONLY-NEXT: vptestmd %zmm12, %zmm12, %k1
4379 ; AVX512F-ONLY-NEXT: vmovdqa32 1024(%rsi), %zmm12 {%k1} {z}
4380 ; AVX512F-ONLY-NEXT: vptestmd %zmm10, %zmm10, %k1
4381 ; AVX512F-ONLY-NEXT: vmovdqa32 1088(%rsi), %zmm10 {%k1} {z}
4382 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
4383 ; AVX512F-ONLY-NEXT: vmovdqa32 1152(%rsi), %zmm2 {%k1} {z}
4384 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
4385 ; AVX512F-ONLY-NEXT: vmovdqa32 1216(%rsi), %zmm1 {%k1} {z}
4386 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, 1216(%rdx)
4387 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 1152(%rdx)
4388 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm10, 1088(%rdx)
4389 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm12, 1024(%rdx)
4390 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm4, 960(%rdx)
4391 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm13, 896(%rdx)
4392 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm14, 832(%rdx)
4393 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm15, 768(%rdx)
4394 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm16, 704(%rdx)
4395 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm5, 640(%rdx)
4396 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm17, 576(%rdx)
4397 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm18, 512(%rdx)
4398 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm19, 448(%rdx)
4399 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm6, 384(%rdx)
4400 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 320(%rdx)
4401 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm7, 256(%rdx)
4402 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm8, 192(%rdx)
4403 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm9, 128(%rdx)
4404 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
4405 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm11, (%rdx)
4406 ; AVX512F-ONLY-NEXT: vzeroupper
4407 ; AVX512F-ONLY-NEXT: retq
4409 ; AVX512DQ-LABEL: mask_replication_factor5_vf64:
4410 ; AVX512DQ: # %bb.0:
4411 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
4412 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
4413 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,0,0,0,0,1,1,1,1,1,2,2,2,2,2,3]
4414 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm3, %zmm1
4415 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k0
4416 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm1
4417 ; AVX512DQ-NEXT: movw $1, %ax
4418 ; AVX512DQ-NEXT: kmovw %eax, %k1
4419 ; AVX512DQ-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
4420 ; AVX512DQ-NEXT: kmovw 6(%rdi), %k0
4421 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm4
4422 ; AVX512DQ-NEXT: kmovw 4(%rdi), %k0
4423 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm5
4424 ; AVX512DQ-NEXT: kmovw 2(%rdi), %k0
4425 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm6
4426 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
4427 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm7 = [12,13,13,13,13,13,14,14,14,14,14,15,15,15,15,15]
4428 ; AVX512DQ-NEXT: vpermd %zmm4, %zmm7, %zmm1
4429 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm8 = [9,9,10,10,10,10,10,11,11,11,11,11,12,12,12,12]
4430 ; AVX512DQ-NEXT: vpermd %zmm4, %zmm8, %zmm2
4431 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm9 = [6,6,6,7,7,7,7,7,8,8,8,8,8,9,9,9]
4432 ; AVX512DQ-NEXT: vpermd %zmm4, %zmm9, %zmm10
4433 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm11 = [3,3,3,3,4,4,4,4,4,5,5,5,5,5,6,6]
4434 ; AVX512DQ-NEXT: vpermd %zmm4, %zmm11, %zmm12
4435 ; AVX512DQ-NEXT: vpermd %zmm4, %zmm3, %zmm4
4436 ; AVX512DQ-NEXT: vpermd %zmm5, %zmm7, %zmm13
4437 ; AVX512DQ-NEXT: vpermd %zmm5, %zmm8, %zmm14
4438 ; AVX512DQ-NEXT: vpermd %zmm5, %zmm9, %zmm15
4439 ; AVX512DQ-NEXT: vpermd %zmm5, %zmm11, %zmm16
4440 ; AVX512DQ-NEXT: vpermd %zmm5, %zmm3, %zmm5
4441 ; AVX512DQ-NEXT: vpermd %zmm6, %zmm7, %zmm17
4442 ; AVX512DQ-NEXT: vpermd %zmm6, %zmm8, %zmm18
4443 ; AVX512DQ-NEXT: vpermd %zmm6, %zmm9, %zmm19
4444 ; AVX512DQ-NEXT: vpermd %zmm6, %zmm3, %zmm3
4445 ; AVX512DQ-NEXT: vpermd %zmm6, %zmm11, %zmm6
4446 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm7, %zmm7
4447 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm8, %zmm8
4448 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm9, %zmm9
4449 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm11, %zmm0
4450 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm11 {%k1} {z}
4451 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k1
4452 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
4453 ; AVX512DQ-NEXT: vpmovd2m %zmm9, %k1
4454 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm9 {%k1} {z}
4455 ; AVX512DQ-NEXT: vpmovd2m %zmm8, %k1
4456 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm8 {%k1} {z}
4457 ; AVX512DQ-NEXT: vpmovd2m %zmm7, %k1
4458 ; AVX512DQ-NEXT: vmovdqa32 256(%rsi), %zmm7 {%k1} {z}
4459 ; AVX512DQ-NEXT: vpmovd2m %zmm3, %k1
4460 ; AVX512DQ-NEXT: vmovdqa32 320(%rsi), %zmm3 {%k1} {z}
4461 ; AVX512DQ-NEXT: vpmovd2m %zmm6, %k1
4462 ; AVX512DQ-NEXT: vmovdqa32 384(%rsi), %zmm6 {%k1} {z}
4463 ; AVX512DQ-NEXT: vpmovd2m %zmm19, %k1
4464 ; AVX512DQ-NEXT: vmovdqa32 448(%rsi), %zmm19 {%k1} {z}
4465 ; AVX512DQ-NEXT: vpmovd2m %zmm18, %k1
4466 ; AVX512DQ-NEXT: vmovdqa32 512(%rsi), %zmm18 {%k1} {z}
4467 ; AVX512DQ-NEXT: vpmovd2m %zmm17, %k1
4468 ; AVX512DQ-NEXT: vmovdqa32 576(%rsi), %zmm17 {%k1} {z}
4469 ; AVX512DQ-NEXT: vpmovd2m %zmm5, %k1
4470 ; AVX512DQ-NEXT: vmovdqa32 640(%rsi), %zmm5 {%k1} {z}
4471 ; AVX512DQ-NEXT: vpmovd2m %zmm16, %k1
4472 ; AVX512DQ-NEXT: vmovdqa32 704(%rsi), %zmm16 {%k1} {z}
4473 ; AVX512DQ-NEXT: vpmovd2m %zmm15, %k1
4474 ; AVX512DQ-NEXT: vmovdqa32 768(%rsi), %zmm15 {%k1} {z}
4475 ; AVX512DQ-NEXT: vpmovd2m %zmm14, %k1
4476 ; AVX512DQ-NEXT: vmovdqa32 832(%rsi), %zmm14 {%k1} {z}
4477 ; AVX512DQ-NEXT: vpmovd2m %zmm13, %k1
4478 ; AVX512DQ-NEXT: vmovdqa32 896(%rsi), %zmm13 {%k1} {z}
4479 ; AVX512DQ-NEXT: vpmovd2m %zmm4, %k1
4480 ; AVX512DQ-NEXT: vmovdqa32 960(%rsi), %zmm4 {%k1} {z}
4481 ; AVX512DQ-NEXT: vpmovd2m %zmm12, %k1
4482 ; AVX512DQ-NEXT: vmovdqa32 1024(%rsi), %zmm12 {%k1} {z}
4483 ; AVX512DQ-NEXT: vpmovd2m %zmm10, %k1
4484 ; AVX512DQ-NEXT: vmovdqa32 1088(%rsi), %zmm10 {%k1} {z}
4485 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k1
4486 ; AVX512DQ-NEXT: vmovdqa32 1152(%rsi), %zmm2 {%k1} {z}
4487 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
4488 ; AVX512DQ-NEXT: vmovdqa32 1216(%rsi), %zmm1 {%k1} {z}
4489 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 1216(%rdx)
4490 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 1152(%rdx)
4491 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, 1088(%rdx)
4492 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, 1024(%rdx)
4493 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 960(%rdx)
4494 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, 896(%rdx)
4495 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, 832(%rdx)
4496 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, 768(%rdx)
4497 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, 704(%rdx)
4498 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 640(%rdx)
4499 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, 576(%rdx)
4500 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, 512(%rdx)
4501 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, 448(%rdx)
4502 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 384(%rdx)
4503 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 320(%rdx)
4504 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, 256(%rdx)
4505 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, 192(%rdx)
4506 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, 128(%rdx)
4507 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 64(%rdx)
4508 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, (%rdx)
4509 ; AVX512DQ-NEXT: vzeroupper
4510 ; AVX512DQ-NEXT: retq
4512 ; AVX512BW-LABEL: mask_replication_factor5_vf64:
4513 ; AVX512BW: # %bb.0:
4514 ; AVX512BW-NEXT: kmovq (%rdi), %k5
4515 ; AVX512BW-NEXT: kshiftrq $1, %k5, %k0
4516 ; AVX512BW-NEXT: movw $-3, %ax
4517 ; AVX512BW-NEXT: kmovd %eax, %k1
4518 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
4519 ; AVX512BW-NEXT: kmovw (%rdi), %k2
4520 ; AVX512BW-NEXT: kandw %k1, %k2, %k3
4521 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
4522 ; AVX512BW-NEXT: kshiftrw $14, %k2, %k4
4523 ; AVX512BW-NEXT: korw %k4, %k3, %k3
4524 ; AVX512BW-NEXT: movw $-5, %ax
4525 ; AVX512BW-NEXT: kmovd %eax, %k1
4526 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
4527 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
4528 ; AVX512BW-NEXT: kshiftrw $13, %k2, %k4
4529 ; AVX512BW-NEXT: korw %k4, %k3, %k3
4530 ; AVX512BW-NEXT: movw $-9, %ax
4531 ; AVX512BW-NEXT: kmovd %eax, %k1
4532 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
4533 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
4534 ; AVX512BW-NEXT: kshiftrw $12, %k2, %k4
4535 ; AVX512BW-NEXT: korw %k4, %k3, %k3
4536 ; AVX512BW-NEXT: movw $-17, %ax
4537 ; AVX512BW-NEXT: kmovd %eax, %k1
4538 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
4539 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
4540 ; AVX512BW-NEXT: kshiftrw $11, %k2, %k2
4541 ; AVX512BW-NEXT: korw %k2, %k3, %k2
4542 ; AVX512BW-NEXT: movw $-33, %ax
4543 ; AVX512BW-NEXT: kmovd %eax, %k1
4544 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
4545 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
4546 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
4547 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k3
4548 ; AVX512BW-NEXT: korw %k3, %k2, %k2
4549 ; AVX512BW-NEXT: movw $-65, %ax
4550 ; AVX512BW-NEXT: kmovd %eax, %k1
4551 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
4552 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
4553 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k3
4554 ; AVX512BW-NEXT: korw %k3, %k2, %k2
4555 ; AVX512BW-NEXT: movw $-129, %ax
4556 ; AVX512BW-NEXT: kmovd %eax, %k1
4557 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
4558 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
4559 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k3
4560 ; AVX512BW-NEXT: korw %k3, %k2, %k2
4561 ; AVX512BW-NEXT: movw $-257, %ax # imm = 0xFEFF
4562 ; AVX512BW-NEXT: kmovd %eax, %k1
4563 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
4564 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
4565 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k3
4566 ; AVX512BW-NEXT: korw %k3, %k2, %k2
4567 ; AVX512BW-NEXT: movw $-513, %ax # imm = 0xFDFF
4568 ; AVX512BW-NEXT: kmovd %eax, %k1
4569 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
4570 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
4571 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k0
4572 ; AVX512BW-NEXT: korw %k0, %k2, %k0
4573 ; AVX512BW-NEXT: movw $-1025, %ax # imm = 0xFBFF
4574 ; AVX512BW-NEXT: kmovd %eax, %k1
4575 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
4576 ; AVX512BW-NEXT: kandw %k1, %k0, %k3
4577 ; AVX512BW-NEXT: kshiftrq $2, %k5, %k0
4578 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k2
4579 ; AVX512BW-NEXT: kshiftrw $5, %k2, %k4
4580 ; AVX512BW-NEXT: korw %k4, %k3, %k3
4581 ; AVX512BW-NEXT: movw $-2049, %ax # imm = 0xF7FF
4582 ; AVX512BW-NEXT: kmovd %eax, %k1
4583 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
4584 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
4585 ; AVX512BW-NEXT: kshiftrw $4, %k2, %k4
4586 ; AVX512BW-NEXT: korw %k4, %k3, %k3
4587 ; AVX512BW-NEXT: movw $-4097, %ax # imm = 0xEFFF
4588 ; AVX512BW-NEXT: kmovd %eax, %k1
4589 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
4590 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
4591 ; AVX512BW-NEXT: kshiftrw $3, %k2, %k7
4592 ; AVX512BW-NEXT: korw %k7, %k3, %k7
4593 ; AVX512BW-NEXT: movw $-8193, %ax # imm = 0xDFFF
4594 ; AVX512BW-NEXT: kmovd %eax, %k1
4595 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
4596 ; AVX512BW-NEXT: kandw %k1, %k7, %k7
4597 ; AVX512BW-NEXT: kshiftrw $2, %k2, %k2
4598 ; AVX512BW-NEXT: korw %k2, %k7, %k7
4599 ; AVX512BW-NEXT: movw $-16385, %ax # imm = 0xBFFF
4600 ; AVX512BW-NEXT: kmovd %eax, %k6
4601 ; AVX512BW-NEXT: kandw %k6, %k7, %k7
4602 ; AVX512BW-NEXT: kmovw %k6, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
4603 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k0
4604 ; AVX512BW-NEXT: korw %k0, %k7, %k0
4605 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
4606 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k1
4607 ; AVX512BW-NEXT: kshiftrq $3, %k5, %k7
4608 ; AVX512BW-NEXT: kshiftlw $15, %k7, %k0
4609 ; AVX512BW-NEXT: korw %k0, %k1, %k1
4610 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
4611 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4612 ; AVX512BW-NEXT: kandw %k2, %k7, %k1
4613 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k7
4614 ; AVX512BW-NEXT: korw %k7, %k1, %k1
4615 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4616 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
4617 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k7
4618 ; AVX512BW-NEXT: korw %k7, %k1, %k1
4619 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4620 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
4621 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k0
4622 ; AVX512BW-NEXT: korw %k0, %k1, %k0
4623 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4624 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
4625 ; AVX512BW-NEXT: kshiftrq $4, %k5, %k1
4626 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
4627 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k7
4628 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4629 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4630 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
4631 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k7
4632 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4633 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
4634 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
4635 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k7
4636 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4637 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
4638 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
4639 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k7
4640 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4641 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
4642 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
4643 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k1
4644 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4645 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4646 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
4647 ; AVX512BW-NEXT: kshiftrq $5, %k5, %k1
4648 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
4649 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k7
4650 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4651 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
4652 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
4653 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k7
4654 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4655 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
4656 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
4657 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k7
4658 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4659 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
4660 ; AVX512BW-NEXT: kandw %k7, %k0, %k0
4661 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k7
4662 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4663 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
4664 ; AVX512BW-NEXT: kandw %k7, %k0, %k0
4665 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k1
4666 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4667 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
4668 ; AVX512BW-NEXT: kshiftrq $6, %k5, %k1
4669 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k7
4670 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4671 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
4672 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
4673 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k7
4674 ; AVX512BW-NEXT: korw %k7, %k0, %k6
4675 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm1 {%k6} {z}
4676 ; AVX512BW-NEXT: kandw %k2, %k1, %k0
4677 ; AVX512BW-NEXT: kshiftrw $14, %k7, %k1
4678 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4679 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4680 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
4681 ; AVX512BW-NEXT: kshiftrw $13, %k7, %k1
4682 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4683 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4684 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
4685 ; AVX512BW-NEXT: kshiftrq $7, %k5, %k1
4686 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
4687 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
4688 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4689 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
4690 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
4691 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
4692 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4693 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
4694 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
4695 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4696 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4697 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
4698 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
4699 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4700 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
4701 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
4702 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k1
4703 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4704 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4705 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
4706 ; AVX512BW-NEXT: kshiftrq $8, %k5, %k1
4707 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
4708 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
4709 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4710 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
4711 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
4712 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
4713 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4714 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
4715 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
4716 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
4717 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4718 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
4719 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
4720 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4721 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
4722 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
4723 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k1
4724 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4725 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4726 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
4727 ; AVX512BW-NEXT: kshiftrq $9, %k5, %k1
4728 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
4729 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
4730 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4731 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
4732 ; AVX512BW-NEXT: kandw %k7, %k0, %k0
4733 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k7
4734 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4735 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
4736 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
4737 ; AVX512BW-NEXT: korw %k6, %k0, %k7
4738 ; AVX512BW-NEXT: vmovdqa32 128(%rsi), %zmm2 {%k7} {z}
4739 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
4740 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
4741 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
4742 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4743 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
4744 ; AVX512BW-NEXT: kshiftrq $10, %k5, %k1
4745 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
4746 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k6
4747 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4748 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4749 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
4750 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
4751 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4752 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
4753 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
4754 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
4755 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4756 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
4757 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
4758 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
4759 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4760 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
4761 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k1
4762 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4763 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4764 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
4765 ; AVX512BW-NEXT: kshiftrq $11, %k5, %k1
4766 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
4767 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
4768 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4769 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4770 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
4771 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
4772 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4773 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
4774 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
4775 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
4776 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4777 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
4778 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
4779 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
4780 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4781 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
4782 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
4783 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k1
4784 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4785 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
4786 ; AVX512BW-NEXT: kshiftrq $12, %k5, %k1
4787 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
4788 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
4789 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4790 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
4791 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
4792 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
4793 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4794 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
4795 ; AVX512BW-NEXT: kandw %k7, %k0, %k0
4796 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k7
4797 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4798 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
4799 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
4800 ; AVX512BW-NEXT: korw %k6, %k0, %k6
4801 ; AVX512BW-NEXT: vmovdqa32 192(%rsi), %zmm3 {%k6} {z}
4802 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
4803 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
4804 ; AVX512BW-NEXT: kshiftrq $13, %k5, %k1
4805 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
4806 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k6
4807 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4808 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
4809 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
4810 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k6
4811 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4812 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
4813 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
4814 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4815 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4816 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
4817 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
4818 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4819 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
4820 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
4821 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k1
4822 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4823 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4824 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
4825 ; AVX512BW-NEXT: kshiftrq $14, %k5, %k1
4826 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
4827 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
4828 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4829 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
4830 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
4831 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
4832 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4833 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
4834 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
4835 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4836 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4837 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
4838 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
4839 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4840 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
4841 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
4842 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k1
4843 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4844 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4845 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
4846 ; AVX512BW-NEXT: kshiftrq $15, %k5, %k1
4847 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
4848 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
4849 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4850 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
4851 ; AVX512BW-NEXT: kandw %k7, %k0, %k0
4852 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
4853 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4854 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
4855 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
4856 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4857 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
4858 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
4859 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k1
4860 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4861 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
4862 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
4863 ; AVX512BW-NEXT: korw %k6, %k0, %k1
4864 ; AVX512BW-NEXT: vmovdqa32 256(%rsi), %zmm4 {%k1} {z}
4865 ; AVX512BW-NEXT: kshiftrq $16, %k5, %k0
4866 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4867 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
4868 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
4869 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
4870 ; AVX512BW-NEXT: korw %k6, %k1, %k1
4871 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
4872 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
4873 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
4874 ; AVX512BW-NEXT: korw %k6, %k1, %k1
4875 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
4876 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
4877 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
4878 ; AVX512BW-NEXT: korw %k6, %k1, %k1
4879 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
4880 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k0
4881 ; AVX512BW-NEXT: korw %k0, %k1, %k0
4882 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4883 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
4884 ; AVX512BW-NEXT: kshiftrq $17, %k5, %k1
4885 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
4886 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
4887 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4888 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4889 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
4890 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
4891 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4892 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4893 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
4894 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
4895 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4896 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
4897 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
4898 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
4899 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4900 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
4901 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k1
4902 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4903 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4904 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
4905 ; AVX512BW-NEXT: kshiftrq $18, %k5, %k1
4906 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
4907 ; AVX512BW-NEXT: kshiftrw $5, %k6, %k7
4908 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4909 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
4910 ; AVX512BW-NEXT: kandw %k7, %k0, %k0
4911 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
4912 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4913 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
4914 ; AVX512BW-NEXT: kandw %k7, %k0, %k0
4915 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
4916 ; AVX512BW-NEXT: korw %k7, %k0, %k0
4917 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
4918 ; AVX512BW-NEXT: kandw %k7, %k0, %k0
4919 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k6
4920 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4921 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
4922 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k1
4923 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4924 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
4925 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
4926 ; AVX512BW-NEXT: kshiftrq $19, %k5, %k1
4927 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
4928 ; AVX512BW-NEXT: korw %k6, %k0, %k7
4929 ; AVX512BW-NEXT: vmovdqa32 320(%rsi), %zmm5 {%k7} {z}
4930 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
4931 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
4932 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
4933 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4934 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4935 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
4936 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
4937 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4938 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4939 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
4940 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k1
4941 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4942 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4943 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
4944 ; AVX512BW-NEXT: kshiftrq $20, %k5, %k1
4945 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
4946 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
4947 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4948 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
4949 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
4950 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
4951 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4952 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
4953 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
4954 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
4955 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4956 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
4957 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
4958 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4959 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4960 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
4961 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k1
4962 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4963 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4964 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
4965 ; AVX512BW-NEXT: kshiftrq $21, %k5, %k1
4966 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
4967 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
4968 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4969 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
4970 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
4971 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4972 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
4973 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
4974 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
4975 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4976 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
4977 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
4978 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k6
4979 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4980 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4981 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
4982 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k1
4983 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4984 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
4985 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
4986 ; AVX512BW-NEXT: kshiftrq $22, %k5, %k1
4987 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k6
4988 ; AVX512BW-NEXT: korw %k6, %k0, %k0
4989 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
4990 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
4991 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
4992 ; AVX512BW-NEXT: korw %k6, %k0, %k7
4993 ; AVX512BW-NEXT: vmovdqa32 384(%rsi), %zmm6 {%k7} {z}
4994 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
4995 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
4996 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
4997 ; AVX512BW-NEXT: korw %k1, %k0, %k0
4998 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
4999 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5000 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
5001 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5002 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5003 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5004 ; AVX512BW-NEXT: kshiftrq $23, %k5, %k1
5005 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5006 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
5007 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5008 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5009 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5010 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
5011 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5012 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5013 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5014 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
5015 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5016 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5017 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5018 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
5019 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5020 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5021 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5022 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k1
5023 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5024 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5025 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5026 ; AVX512BW-NEXT: kshiftrq $24, %k5, %k1
5027 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5028 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
5029 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5030 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5031 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5032 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
5033 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5034 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5035 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5036 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
5037 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5038 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5039 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
5040 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5041 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5042 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k1
5043 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5044 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5045 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5046 ; AVX512BW-NEXT: kshiftrq $25, %k5, %k1
5047 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
5048 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
5049 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5050 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5051 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5052 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k7
5053 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5054 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
5055 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
5056 ; AVX512BW-NEXT: korw %k6, %k0, %k7
5057 ; AVX512BW-NEXT: vmovdqa32 448(%rsi), %zmm7 {%k7} {z}
5058 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5059 ; AVX512BW-NEXT: kandw %k2, %k1, %k0
5060 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
5061 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5062 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5063 ; AVX512BW-NEXT: kshiftrq $26, %k5, %k1
5064 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5065 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k6
5066 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5067 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5068 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5069 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
5070 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5071 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5072 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5073 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
5074 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5075 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5076 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5077 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
5078 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5079 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
5080 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5081 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k1
5082 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5083 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5084 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5085 ; AVX512BW-NEXT: kshiftrq $27, %k5, %k1
5086 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5087 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
5088 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5089 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5090 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5091 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
5092 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5093 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5094 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5095 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
5096 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5097 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5098 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5099 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
5100 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5101 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5102 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5103 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k1
5104 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5105 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5106 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5107 ; AVX512BW-NEXT: kshiftrq $28, %k5, %k1
5108 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
5109 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
5110 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5111 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
5112 ; AVX512BW-NEXT: kandw %k7, %k0, %k0
5113 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
5114 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5115 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
5116 ; AVX512BW-NEXT: kandw %k7, %k0, %k0
5117 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k7
5118 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5119 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
5120 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
5121 ; AVX512BW-NEXT: korw %k6, %k0, %k6
5122 ; AVX512BW-NEXT: vmovdqa32 512(%rsi), %zmm8 {%k6} {z}
5123 ; AVX512BW-NEXT: kandw %k2, %k1, %k0
5124 ; AVX512BW-NEXT: kshiftrq $29, %k5, %k1
5125 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5126 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k6
5127 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5128 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5129 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5130 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k6
5131 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5132 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5133 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5134 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
5135 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5136 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5137 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5138 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
5139 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5140 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5141 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k1
5142 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5143 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5144 ; AVX512BW-NEXT: kshiftrq $30, %k5, %k1
5145 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5146 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
5147 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5148 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5149 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5150 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
5151 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5152 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5153 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5154 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
5155 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5156 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
5157 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5158 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
5159 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5160 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
5161 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5162 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k1
5163 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5164 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5165 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5166 ; AVX512BW-NEXT: kshiftrq $31, %k5, %k1
5167 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
5168 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
5169 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5170 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
5171 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5172 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
5173 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5174 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
5175 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5176 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
5177 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5178 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
5179 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5180 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k1
5181 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5182 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
5183 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
5184 ; AVX512BW-NEXT: korw %k6, %k0, %k1
5185 ; AVX512BW-NEXT: vmovdqa32 576(%rsi), %zmm9 {%k1} {z}
5186 ; AVX512BW-NEXT: kshiftrq $32, %k5, %k0
5187 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5188 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
5189 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
5190 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
5191 ; AVX512BW-NEXT: korw %k6, %k1, %k1
5192 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
5193 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
5194 ; AVX512BW-NEXT: korw %k6, %k1, %k1
5195 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5196 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
5197 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
5198 ; AVX512BW-NEXT: korw %k6, %k1, %k1
5199 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
5200 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
5201 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k0
5202 ; AVX512BW-NEXT: korw %k0, %k1, %k0
5203 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5204 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5205 ; AVX512BW-NEXT: kshiftrq $33, %k5, %k1
5206 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5207 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
5208 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5209 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5210 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5211 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
5212 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5213 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5214 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5215 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
5216 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5217 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5218 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
5219 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5220 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5221 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5222 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k1
5223 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5224 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5225 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5226 ; AVX512BW-NEXT: kshiftrq $34, %k5, %k1
5227 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
5228 ; AVX512BW-NEXT: kshiftrw $5, %k6, %k7
5229 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5230 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5231 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5232 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
5233 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5234 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5235 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5236 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
5237 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5238 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5239 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5240 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k6
5241 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5242 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5243 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5244 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k1
5245 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5246 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
5247 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
5248 ; AVX512BW-NEXT: kshiftrq $35, %k5, %k1
5249 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
5250 ; AVX512BW-NEXT: korw %k6, %k0, %k7
5251 ; AVX512BW-NEXT: vmovdqa32 640(%rsi), %zmm10 {%k7} {z}
5252 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
5253 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
5254 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
5255 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5256 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5257 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5258 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
5259 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5260 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5261 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k1
5262 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5263 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5264 ; AVX512BW-NEXT: kshiftrq $36, %k5, %k1
5265 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5266 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
5267 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5268 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5269 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5270 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
5271 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5272 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
5273 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5274 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
5275 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5276 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5277 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5278 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
5279 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5280 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5281 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5282 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k1
5283 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5284 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5285 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5286 ; AVX512BW-NEXT: kshiftrq $37, %k5, %k1
5287 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5288 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
5289 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5290 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5291 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5292 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
5293 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5294 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5295 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5296 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
5297 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5298 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5299 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5300 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k6
5301 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5302 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5303 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k1
5304 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5305 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5306 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5307 ; AVX512BW-NEXT: kshiftrq $38, %k5, %k1
5308 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k6
5309 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5310 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
5311 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
5312 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
5313 ; AVX512BW-NEXT: korw %k6, %k0, %k7
5314 ; AVX512BW-NEXT: vmovdqa32 704(%rsi), %zmm11 {%k7} {z}
5315 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5316 ; AVX512BW-NEXT: kandw %k3, %k1, %k0
5317 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
5318 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5319 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5320 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5321 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
5322 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5323 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5324 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5325 ; AVX512BW-NEXT: kshiftrq $39, %k5, %k1
5326 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5327 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
5328 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5329 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5330 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5331 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
5332 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5333 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5334 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
5335 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5336 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5337 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
5338 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5339 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
5340 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5341 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k1
5342 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5343 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5344 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5345 ; AVX512BW-NEXT: kshiftrq $40, %k5, %k1
5346 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5347 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
5348 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5349 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5350 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5351 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
5352 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5353 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5354 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5355 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
5356 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5357 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5358 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5359 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
5360 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5361 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5362 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5363 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k1
5364 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5365 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5366 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5367 ; AVX512BW-NEXT: kshiftrq $41, %k5, %k1
5368 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
5369 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
5370 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5371 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
5372 ; AVX512BW-NEXT: kandw %k7, %k0, %k0
5373 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k7
5374 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5375 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
5376 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
5377 ; AVX512BW-NEXT: korw %k6, %k0, %k7
5378 ; AVX512BW-NEXT: vmovdqa32 768(%rsi), %zmm12 {%k7} {z}
5379 ; AVX512BW-NEXT: kandw %k3, %k1, %k0
5380 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
5381 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5382 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5383 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5384 ; AVX512BW-NEXT: kshiftrq $42, %k5, %k1
5385 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5386 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k6
5387 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5388 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5389 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5390 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
5391 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5392 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5393 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5394 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
5395 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5396 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5397 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5398 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
5399 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5400 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5401 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5402 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k1
5403 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5404 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5405 ; AVX512BW-NEXT: kshiftrq $43, %k5, %k1
5406 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5407 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
5408 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5409 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
5410 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5411 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
5412 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5413 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5414 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
5415 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5416 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5417 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5418 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
5419 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5420 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5421 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5422 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k1
5423 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5424 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5425 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5426 ; AVX512BW-NEXT: kshiftrq $44, %k5, %k1
5427 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
5428 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
5429 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5430 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5431 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5432 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
5433 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5434 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5435 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5436 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k7
5437 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5438 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
5439 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
5440 ; AVX512BW-NEXT: korw %k6, %k0, %k6
5441 ; AVX512BW-NEXT: vmovdqa32 832(%rsi), %zmm13 {%k6} {z}
5442 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
5443 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
5444 ; AVX512BW-NEXT: kshiftrq $45, %k5, %k1
5445 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5446 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k6
5447 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5448 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5449 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5450 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k6
5451 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5452 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5453 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5454 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
5455 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5456 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5457 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5458 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
5459 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5460 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5461 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5462 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k1
5463 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5464 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5465 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5466 ; AVX512BW-NEXT: kshiftrq $46, %k5, %k1
5467 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5468 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
5469 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5470 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5471 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5472 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
5473 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5474 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5475 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
5476 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5477 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
5478 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5479 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
5480 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5481 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5482 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k1
5483 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5484 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
5485 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5486 ; AVX512BW-NEXT: kshiftrq $47, %k5, %k1
5487 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
5488 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
5489 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5490 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5491 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5492 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
5493 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5494 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
5495 ; AVX512BW-NEXT: kandw %k7, %k0, %k0
5496 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
5497 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5498 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5499 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k1
5500 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5501 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
5502 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
5503 ; AVX512BW-NEXT: korw %k6, %k0, %k1
5504 ; AVX512BW-NEXT: vmovdqa32 896(%rsi), %zmm14 {%k1} {z}
5505 ; AVX512BW-NEXT: kshiftrq $48, %k5, %k0
5506 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5507 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
5508 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
5509 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
5510 ; AVX512BW-NEXT: korw %k6, %k1, %k1
5511 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5512 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
5513 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
5514 ; AVX512BW-NEXT: korw %k6, %k1, %k1
5515 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5516 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
5517 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
5518 ; AVX512BW-NEXT: korw %k6, %k1, %k1
5519 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5520 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
5521 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k0
5522 ; AVX512BW-NEXT: korw %k0, %k1, %k0
5523 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5524 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5525 ; AVX512BW-NEXT: kshiftrq $49, %k5, %k1
5526 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5527 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
5528 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5529 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5530 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5531 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
5532 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5533 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5534 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5535 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
5536 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5537 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5538 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5539 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
5540 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5541 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5542 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5543 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k1
5544 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5545 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5546 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5547 ; AVX512BW-NEXT: kshiftrq $50, %k5, %k1
5548 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
5549 ; AVX512BW-NEXT: kshiftrw $5, %k6, %k7
5550 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5551 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5552 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
5553 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5554 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5555 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
5556 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5557 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5558 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5559 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k6
5560 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5561 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5562 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5563 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k1
5564 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5565 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
5566 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
5567 ; AVX512BW-NEXT: kshiftrq $51, %k5, %k1
5568 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
5569 ; AVX512BW-NEXT: korw %k6, %k0, %k7
5570 ; AVX512BW-NEXT: vmovdqa32 960(%rsi), %zmm15 {%k7} {z}
5571 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
5572 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
5573 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
5574 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5575 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5576 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
5577 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5578 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
5579 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5580 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k1
5581 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5582 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5583 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5584 ; AVX512BW-NEXT: kshiftrq $52, %k5, %k1
5585 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5586 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
5587 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5588 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5589 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5590 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
5591 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5592 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5593 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5594 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
5595 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5596 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5597 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5598 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
5599 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5600 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5601 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5602 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k1
5603 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5604 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5605 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5606 ; AVX512BW-NEXT: kshiftrq $53, %k5, %k1
5607 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5608 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
5609 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5610 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5611 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5612 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
5613 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5614 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5615 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5616 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
5617 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5618 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5619 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5620 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k6
5621 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5622 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5623 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5624 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k1
5625 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5626 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5627 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5628 ; AVX512BW-NEXT: kshiftrq $54, %k5, %k1
5629 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k6
5630 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5631 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
5632 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
5633 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
5634 ; AVX512BW-NEXT: korw %k6, %k0, %k7
5635 ; AVX512BW-NEXT: vmovdqa32 1024(%rsi), %zmm16 {%k7} {z}
5636 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
5637 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
5638 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
5639 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5640 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5641 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5642 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
5643 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5644 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5645 ; AVX512BW-NEXT: kshiftrq $55, %k5, %k1
5646 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5647 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
5648 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5649 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
5650 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5651 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
5652 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5653 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5654 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5655 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
5656 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5657 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5658 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5659 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
5660 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5661 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5662 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5663 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k1
5664 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5665 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5666 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5667 ; AVX512BW-NEXT: kshiftrq $56, %k5, %k1
5668 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5669 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
5670 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5671 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5672 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5673 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
5674 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5675 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5676 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5677 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
5678 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5679 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5680 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
5681 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5682 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5683 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5684 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k1
5685 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5686 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5687 ; AVX512BW-NEXT: kshiftrq $57, %k5, %k1
5688 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
5689 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
5690 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5691 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5692 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5693 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k7
5694 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5695 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
5696 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
5697 ; AVX512BW-NEXT: korw %k6, %k0, %k7
5698 ; AVX512BW-NEXT: vmovdqa32 1088(%rsi), %zmm17 {%k7} {z}
5699 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
5700 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
5701 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
5702 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5703 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5704 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5705 ; AVX512BW-NEXT: kshiftrq $58, %k5, %k1
5706 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5707 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k6
5708 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5709 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5710 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5711 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
5712 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5713 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5714 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
5715 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5716 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
5717 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5718 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
5719 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5720 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5721 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5722 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k1
5723 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5724 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5725 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5726 ; AVX512BW-NEXT: kshiftrq $59, %k5, %k1
5727 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5728 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
5729 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5730 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5731 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5732 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
5733 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5734 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5735 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5736 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
5737 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5738 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5739 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5740 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
5741 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5742 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
5743 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
5744 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k1
5745 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5746 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5747 ; AVX512BW-NEXT: kshiftrq $60, %k5, %k1
5748 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
5749 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
5750 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5751 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
5752 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5753 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
5754 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5755 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5756 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k7
5757 ; AVX512BW-NEXT: korw %k7, %k0, %k0
5758 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
5759 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
5760 ; AVX512BW-NEXT: korw %k6, %k0, %k6
5761 ; AVX512BW-NEXT: vmovdqa32 1152(%rsi), %zmm18 {%k6} {z}
5762 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
5763 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
5764 ; AVX512BW-NEXT: kshiftrq $61, %k5, %k1
5765 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5766 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k6
5767 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5768 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5769 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5770 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k6
5771 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5772 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5773 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5774 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
5775 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5776 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5777 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5778 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
5779 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5780 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
5781 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k1
5782 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5783 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5784 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5785 ; AVX512BW-NEXT: kshiftrq $62, %k5, %k1
5786 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
5787 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
5788 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5789 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5790 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5791 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
5792 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5793 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5794 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5795 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
5796 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5797 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5798 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5799 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
5800 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5801 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5802 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5803 ; AVX512BW-NEXT: kshiftrq $63, %k5, %k5
5804 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k1
5805 ; AVX512BW-NEXT: korw %k1, %k0, %k0
5806 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
5807 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
5808 ; AVX512BW-NEXT: kshiftlw $15, %k5, %k1
5809 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
5810 ; AVX512BW-NEXT: korw %k6, %k0, %k0
5811 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5812 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5813 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k4
5814 ; AVX512BW-NEXT: korw %k4, %k0, %k0
5815 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
5816 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k3
5817 ; AVX512BW-NEXT: korw %k3, %k0, %k0
5818 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
5819 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
5820 ; AVX512BW-NEXT: kshiftlw $14, %k5, %k2
5821 ; AVX512BW-NEXT: korw %k2, %k0, %k0
5822 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
5823 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
5824 ; AVX512BW-NEXT: korw %k1, %k0, %k1
5825 ; AVX512BW-NEXT: vmovdqa32 1216(%rsi), %zmm19 {%k1} {z}
5826 ; AVX512BW-NEXT: vmovdqa64 %zmm19, 1216(%rdx)
5827 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 1152(%rdx)
5828 ; AVX512BW-NEXT: vmovdqa64 %zmm17, 1088(%rdx)
5829 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 1024(%rdx)
5830 ; AVX512BW-NEXT: vmovdqa64 %zmm15, 960(%rdx)
5831 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 896(%rdx)
5832 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 832(%rdx)
5833 ; AVX512BW-NEXT: vmovdqa64 %zmm12, 768(%rdx)
5834 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 704(%rdx)
5835 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 640(%rdx)
5836 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 576(%rdx)
5837 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 512(%rdx)
5838 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 448(%rdx)
5839 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 384(%rdx)
5840 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 320(%rdx)
5841 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 256(%rdx)
5842 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 192(%rdx)
5843 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 128(%rdx)
5844 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 64(%rdx)
5845 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%rdx)
5846 ; AVX512BW-NEXT: vzeroupper
5847 ; AVX512BW-NEXT: retq
5848 %src.mask = load <64 x i1>, ptr %in.maskvec, align 64
5849 %tgt.mask = shufflevector <64 x i1> %src.mask, <64 x i1> poison, <320 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 7, i32 8, i32 8, i32 8, i32 8, i32 8, i32 9, i32 9, i32 9, i32 9, i32 9, i32 10, i32 10, i32 10, i32 10, i32 10, i32 11, i32 11, i32 11, i32 11, i32 11, i32 12, i32 12, i32 12, i32 12, i32 12, i32 13, i32 13, i32 13, i32 13, i32 13, i32 14, i32 14, i32 14, i32 14, i32 14, i32 15, i32 15, i32 15, i32 15, i32 15, i32 16, i32 16, i32 16, i32 16, i32 16, i32 17, i32 17, i32 17, i32 17, i32 17, i32 18, i32 18, i32 18, i32 18, i32 18, i32 19, i32 19, i32 19, i32 19, i32 19, i32 20, i32 20, i32 20, i32 20, i32 20, i32 21, i32 21, i32 21, i32 21, i32 21, i32 22, i32 22, i32 22, i32 22, i32 22, i32 23, i32 23, i32 23, i32 23, i32 23, i32 24, i32 24, i32 24, i32 24, i32 24, i32 25, i32 25, i32 25, i32 25, i32 25, i32 26, i32 26, i32 26, i32 26, i32 26, i32 27, i32 27, i32 27, i32 27, i32 27, i32 28, i32 28, i32 28, i32 28, i32 28, i32 29, i32 29, i32 29, i32 29, i32 29, i32 30, i32 30, i32 30, i32 30, i32 30, i32 31, i32 31, i32 31, i32 31, i32 31, i32 32, i32 32, i32 32, i32 32, i32 32, i32 33, i32 33, i32 33, i32 33, i32 33, i32 34, i32 34, i32 34, i32 34, i32 34, i32 35, i32 35, i32 35, i32 35, i32 35, i32 36, i32 36, i32 36, i32 36, i32 36, i32 37, i32 37, i32 37, i32 37, i32 37, i32 38, i32 38, i32 38, i32 38, i32 38, i32 39, i32 39, i32 39, i32 39, i32 39, i32 40, i32 40, i32 40, i32 40, i32 40, i32 41, i32 41, i32 41, i32 41, i32 41, i32 42, i32 42, i32 42, i32 42, i32 42, i32 43, i32 43, i32 43, i32 43, i32 43, i32 44, i32 44, i32 44, i32 44, i32 44, i32 45, i32 45, i32 45, i32 45, i32 45, i32 46, i32 46, i32 46, i32 46, i32 46, i32 47, i32 47, i32 47, i32 47, i32 47, i32 48, i32 48, i32 48, i32 48, i32 48, i32 49, i32 49, i32 49, i32 49, i32 49, i32 50, i32 50, i32 50, i32 50, i32 50, i32 51, i32 51, i32 51, i32 51, i32 51, i32 52, i32 52, i32 52, i32 52, i32 52, i32 53, i32 53, i32 53, i32 53, i32 53, i32 54, i32 54, i32 54, i32 54, i32 54, i32 55, i32 55, i32 55, i32 55, i32 55, i32 56, i32 56, i32 56, i32 56, i32 56, i32 57, i32 57, i32 57, i32 57, i32 57, i32 58, i32 58, i32 58, i32 58, i32 58, i32 59, i32 59, i32 59, i32 59, i32 59, i32 60, i32 60, i32 60, i32 60, i32 60, i32 61, i32 61, i32 61, i32 61, i32 61, i32 62, i32 62, i32 62, i32 62, i32 62, i32 63, i32 63, i32 63, i32 63, i32 63>
5850 %data = call <320 x i32> @llvm.masked.load.v320i32.p0(ptr %in.vec, i32 64, <320 x i1> %tgt.mask, <320 x i32> poison)
5851 store <320 x i32> %data, ptr %out.vec, align 64
5855 define void @mask_replication_factor6_vf2(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
5856 ; AVX512F-ONLY-LABEL: mask_replication_factor6_vf2:
5857 ; AVX512F-ONLY: # %bb.0:
5858 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
5859 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
5860 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,0,0,0,0,0,1,1,1,1,1,1,u,u,u,u>
5861 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
5862 ; AVX512F-ONLY-NEXT: vpslld $31, %zmm0, %zmm0
5863 ; AVX512F-ONLY-NEXT: movw $4095, %ax # imm = 0xFFF
5864 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
5865 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1 {%k1}
5866 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
5867 ; AVX512F-ONLY-NEXT: vextracti32x4 $2, %zmm0, 32(%rdx)
5868 ; AVX512F-ONLY-NEXT: vmovdqa %ymm0, (%rdx)
5869 ; AVX512F-ONLY-NEXT: vzeroupper
5870 ; AVX512F-ONLY-NEXT: retq
5872 ; AVX512DQ-LABEL: mask_replication_factor6_vf2:
5873 ; AVX512DQ: # %bb.0:
5874 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
5875 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
5876 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,0,0,0,0,0,1,1,1,1,1,1,u,u,u,u>
5877 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
5878 ; AVX512DQ-NEXT: vpxor %xmm1, %xmm1, %xmm1
5879 ; AVX512DQ-NEXT: movw $4095, %ax # imm = 0xFFF
5880 ; AVX512DQ-NEXT: kmovw %eax, %k1
5881 ; AVX512DQ-NEXT: vpcmpgtd %zmm0, %zmm1, %k1 {%k1}
5882 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
5883 ; AVX512DQ-NEXT: vextracti32x4 $2, %zmm0, 32(%rdx)
5884 ; AVX512DQ-NEXT: vmovdqa %ymm0, (%rdx)
5885 ; AVX512DQ-NEXT: vzeroupper
5886 ; AVX512DQ-NEXT: retq
5888 ; AVX512BW-LABEL: mask_replication_factor6_vf2:
5889 ; AVX512BW: # %bb.0:
5890 ; AVX512BW-NEXT: kmovw (%rdi), %k1
5891 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
5892 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,0,0,0,0,0,1,1,1,1,1,1,u,u,u,u>
5893 ; AVX512BW-NEXT: vpermd %zmm0, %zmm1, %zmm0
5894 ; AVX512BW-NEXT: vpslld $31, %zmm0, %zmm0
5895 ; AVX512BW-NEXT: movw $4095, %ax # imm = 0xFFF
5896 ; AVX512BW-NEXT: kmovd %eax, %k1
5897 ; AVX512BW-NEXT: vptestmd %zmm0, %zmm0, %k1 {%k1}
5898 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
5899 ; AVX512BW-NEXT: vextracti32x4 $2, %zmm0, 32(%rdx)
5900 ; AVX512BW-NEXT: vmovdqa %ymm0, (%rdx)
5901 ; AVX512BW-NEXT: vzeroupper
5902 ; AVX512BW-NEXT: retq
5903 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
5904 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <2 x i32> <i32 0, i32 1>
5905 %tgt.mask = shufflevector <2 x i1> %src.mask, <2 x i1> poison, <12 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1>
5906 %data = call <12 x i32> @llvm.masked.load.v12i32.p0(ptr %in.vec, i32 64, <12 x i1> %tgt.mask, <12 x i32> poison)
5907 %data.padded = shufflevector <12 x i32> %data, <12 x i32> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 undef, i32 undef, i32 undef, i32 undef>
5908 store <12 x i32> %data, ptr %out.vec, align 64
5912 define void @mask_replication_factor6_vf4(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
5913 ; AVX512F-SLOW-LABEL: mask_replication_factor6_vf4:
5914 ; AVX512F-SLOW: # %bb.0:
5915 ; AVX512F-SLOW-NEXT: kmovw (%rdi), %k1
5916 ; AVX512F-SLOW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
5917 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,2,3,3]
5918 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,1]
5919 ; AVX512F-SLOW-NEXT: vpslld $31, %zmm1, %zmm1
5920 ; AVX512F-SLOW-NEXT: movw $255, %ax
5921 ; AVX512F-SLOW-NEXT: kmovw %eax, %k1
5922 ; AVX512F-SLOW-NEXT: vptestmd %zmm1, %zmm1, %k1 {%k1}
5923 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,1,1,1,1,1,1,2,2,2,2]
5924 ; AVX512F-SLOW-NEXT: vpermd %zmm0, %zmm1, %zmm0
5925 ; AVX512F-SLOW-NEXT: vptestmd %zmm0, %zmm0, %k2
5926 ; AVX512F-SLOW-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
5927 ; AVX512F-SLOW-NEXT: vmovdqa32 (%rsi), %zmm1 {%k2} {z}
5928 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm1, (%rdx)
5929 ; AVX512F-SLOW-NEXT: vmovdqa %ymm0, 64(%rdx)
5930 ; AVX512F-SLOW-NEXT: vzeroupper
5931 ; AVX512F-SLOW-NEXT: retq
5933 ; AVX512F-FAST-LABEL: mask_replication_factor6_vf4:
5934 ; AVX512F-FAST: # %bb.0:
5935 ; AVX512F-FAST-NEXT: kmovw (%rdi), %k1
5936 ; AVX512F-FAST-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
5937 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [2,2,3,3,3,3,3,3]
5938 ; AVX512F-FAST-NEXT: vpermd %ymm0, %ymm1, %ymm1
5939 ; AVX512F-FAST-NEXT: vpslld $31, %zmm1, %zmm1
5940 ; AVX512F-FAST-NEXT: movw $255, %ax
5941 ; AVX512F-FAST-NEXT: kmovw %eax, %k1
5942 ; AVX512F-FAST-NEXT: vptestmd %zmm1, %zmm1, %k1 {%k1}
5943 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,1,1,1,1,1,1,2,2,2,2]
5944 ; AVX512F-FAST-NEXT: vpermd %zmm0, %zmm1, %zmm0
5945 ; AVX512F-FAST-NEXT: vptestmd %zmm0, %zmm0, %k2
5946 ; AVX512F-FAST-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
5947 ; AVX512F-FAST-NEXT: vmovdqa32 (%rsi), %zmm1 {%k2} {z}
5948 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm1, (%rdx)
5949 ; AVX512F-FAST-NEXT: vmovdqa %ymm0, 64(%rdx)
5950 ; AVX512F-FAST-NEXT: vzeroupper
5951 ; AVX512F-FAST-NEXT: retq
5953 ; AVX512DQ-SLOW-LABEL: mask_replication_factor6_vf4:
5954 ; AVX512DQ-SLOW: # %bb.0:
5955 ; AVX512DQ-SLOW-NEXT: kmovw (%rdi), %k0
5956 ; AVX512DQ-SLOW-NEXT: vpmovm2d %k0, %zmm0
5957 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} xmm1 = xmm0[2,2,3,3]
5958 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,1,1,1]
5959 ; AVX512DQ-SLOW-NEXT: vpxor %xmm2, %xmm2, %xmm2
5960 ; AVX512DQ-SLOW-NEXT: movw $255, %ax
5961 ; AVX512DQ-SLOW-NEXT: kmovw %eax, %k1
5962 ; AVX512DQ-SLOW-NEXT: vpcmpgtd %zmm1, %zmm2, %k1 {%k1}
5963 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,1,1,1,1,1,1,2,2,2,2]
5964 ; AVX512DQ-SLOW-NEXT: vpermd %zmm0, %zmm1, %zmm0
5965 ; AVX512DQ-SLOW-NEXT: vpmovd2m %zmm0, %k2
5966 ; AVX512DQ-SLOW-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
5967 ; AVX512DQ-SLOW-NEXT: vmovdqa32 (%rsi), %zmm1 {%k2} {z}
5968 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm1, (%rdx)
5969 ; AVX512DQ-SLOW-NEXT: vmovdqa %ymm0, 64(%rdx)
5970 ; AVX512DQ-SLOW-NEXT: vzeroupper
5971 ; AVX512DQ-SLOW-NEXT: retq
5973 ; AVX512DQ-FAST-LABEL: mask_replication_factor6_vf4:
5974 ; AVX512DQ-FAST: # %bb.0:
5975 ; AVX512DQ-FAST-NEXT: kmovw (%rdi), %k0
5976 ; AVX512DQ-FAST-NEXT: vpmovm2d %k0, %zmm0
5977 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [2,2,3,3,3,3,3,3]
5978 ; AVX512DQ-FAST-NEXT: vpermd %ymm0, %ymm1, %ymm1
5979 ; AVX512DQ-FAST-NEXT: vpxor %xmm2, %xmm2, %xmm2
5980 ; AVX512DQ-FAST-NEXT: movw $255, %ax
5981 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k1
5982 ; AVX512DQ-FAST-NEXT: vpcmpgtd %zmm1, %zmm2, %k1 {%k1}
5983 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,1,1,1,1,1,1,2,2,2,2]
5984 ; AVX512DQ-FAST-NEXT: vpermd %zmm0, %zmm1, %zmm0
5985 ; AVX512DQ-FAST-NEXT: vpmovd2m %zmm0, %k2
5986 ; AVX512DQ-FAST-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
5987 ; AVX512DQ-FAST-NEXT: vmovdqa32 (%rsi), %zmm1 {%k2} {z}
5988 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm1, (%rdx)
5989 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm0, 64(%rdx)
5990 ; AVX512DQ-FAST-NEXT: vzeroupper
5991 ; AVX512DQ-FAST-NEXT: retq
5993 ; AVX512BW-LABEL: mask_replication_factor6_vf4:
5994 ; AVX512BW: # %bb.0:
5995 ; AVX512BW-NEXT: kmovd (%rdi), %k0
5996 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0
5997 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,0,0,0,0,0,1,1,1,1,1,1,2,2,2,2,2,2,3,3,3,3,3,3,u,u,u,u,u,u,u,u>
5998 ; AVX512BW-NEXT: vpermw %zmm0, %zmm1, %zmm0
5999 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1
6000 ; AVX512BW-NEXT: movl $16777215, %eax # imm = 0xFFFFFF
6001 ; AVX512BW-NEXT: kmovd %eax, %k1
6002 ; AVX512BW-NEXT: vpcmpgtw %zmm0, %zmm1, %k1 {%k1}
6003 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k2
6004 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k2} {z}
6005 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
6006 ; AVX512BW-NEXT: vmovdqa64 %zmm1, (%rdx)
6007 ; AVX512BW-NEXT: vmovdqa %ymm0, 64(%rdx)
6008 ; AVX512BW-NEXT: vzeroupper
6009 ; AVX512BW-NEXT: retq
6010 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
6011 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
6012 %tgt.mask = shufflevector <4 x i1> %src.mask, <4 x i1> poison, <24 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3>
6013 %data = call <24 x i32> @llvm.masked.load.v24i32.p0(ptr %in.vec, i32 64, <24 x i1> %tgt.mask, <24 x i32> poison)
6014 %data.padded = shufflevector <24 x i32> %data, <24 x i32> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
6015 store <24 x i32> %data, ptr %out.vec, align 64
6019 define void @mask_replication_factor6_vf8(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
6020 ; AVX512F-ONLY-LABEL: mask_replication_factor6_vf8:
6021 ; AVX512F-ONLY: # %bb.0:
6022 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
6023 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
6024 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,1,1,1,1,1,1,2,2,2,2]
6025 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
6026 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
6027 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
6028 ; AVX512F-ONLY-NEXT: movw $1, %ax
6029 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
6030 ; AVX512F-ONLY-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
6031 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
6032 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [2,2,3,3,3,3,3,3,4,4,4,4,4,4,5,5]
6033 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
6034 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k2
6035 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7]
6036 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
6037 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k3
6038 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
6039 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm1 {%k3} {z}
6040 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm2 {%k2} {z}
6041 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 64(%rdx)
6042 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, 128(%rdx)
6043 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, (%rdx)
6044 ; AVX512F-ONLY-NEXT: vzeroupper
6045 ; AVX512F-ONLY-NEXT: retq
6047 ; AVX512DQ-LABEL: mask_replication_factor6_vf8:
6048 ; AVX512DQ: # %bb.0:
6049 ; AVX512DQ-NEXT: kmovb (%rdi), %k0
6050 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
6051 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,1,1,1,1,1,1,2,2,2,2]
6052 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
6053 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k0
6054 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm1
6055 ; AVX512DQ-NEXT: movw $1, %ax
6056 ; AVX512DQ-NEXT: kmovw %eax, %k1
6057 ; AVX512DQ-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
6058 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
6059 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [2,2,3,3,3,3,3,3,4,4,4,4,4,4,5,5]
6060 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
6061 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k2
6062 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7]
6063 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
6064 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k3
6065 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
6066 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm1 {%k3} {z}
6067 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm2 {%k2} {z}
6068 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 64(%rdx)
6069 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 128(%rdx)
6070 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%rdx)
6071 ; AVX512DQ-NEXT: vzeroupper
6072 ; AVX512DQ-NEXT: retq
6074 ; AVX512BW-LABEL: mask_replication_factor6_vf8:
6075 ; AVX512BW: # %bb.0:
6076 ; AVX512BW-NEXT: kmovw (%rdi), %k1
6077 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
6078 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,1,1,1,1,1,1,2,2,2,2]
6079 ; AVX512BW-NEXT: vpermd %zmm0, %zmm1, %zmm1
6080 ; AVX512BW-NEXT: vptestmd %zmm1, %zmm1, %k1
6081 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
6082 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7]
6083 ; AVX512BW-NEXT: vpermd %zmm0, %zmm2, %zmm2
6084 ; AVX512BW-NEXT: vptestmd %zmm2, %zmm2, %k1
6085 ; AVX512BW-NEXT: vmovdqa32 128(%rsi), %zmm2 {%k1} {z}
6086 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [2,2,3,3,3,3,3,3,4,4,4,4,4,4,5,5]
6087 ; AVX512BW-NEXT: vpermd %zmm0, %zmm3, %zmm0
6088 ; AVX512BW-NEXT: vptestmd %zmm0, %zmm0, %k1
6089 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
6090 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 64(%rdx)
6091 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 128(%rdx)
6092 ; AVX512BW-NEXT: vmovdqa64 %zmm1, (%rdx)
6093 ; AVX512BW-NEXT: vzeroupper
6094 ; AVX512BW-NEXT: retq
6095 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
6096 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
6097 %tgt.mask = shufflevector <8 x i1> %src.mask, <8 x i1> poison, <48 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7>
6098 %data = call <48 x i32> @llvm.masked.load.v48i32.p0(ptr %in.vec, i32 64, <48 x i1> %tgt.mask, <48 x i32> poison)
6099 store <48 x i32> %data, ptr %out.vec, align 64
6103 define void @mask_replication_factor6_vf16(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
6104 ; AVX512F-ONLY-LABEL: mask_replication_factor6_vf16:
6105 ; AVX512F-ONLY: # %bb.0:
6106 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
6107 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
6108 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,1,1,1,1,1,1,2,2,2,2]
6109 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
6110 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
6111 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
6112 ; AVX512F-ONLY-NEXT: movw $1, %ax
6113 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
6114 ; AVX512F-ONLY-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
6115 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k2
6116 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [2,2,3,3,3,3,3,3,4,4,4,4,4,4,5,5]
6117 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
6118 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
6119 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7]
6120 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
6121 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k3
6122 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [8,8,8,8,8,8,9,9,9,9,9,9,10,10,10,10]
6123 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
6124 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k4
6125 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [10,10,11,11,11,11,11,11,12,12,12,12,12,12,13,13]
6126 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
6127 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k5
6128 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [13,13,13,13,14,14,14,14,14,14,15,15,15,15,15,15]
6129 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
6130 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k6
6131 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k2} {z}
6132 ; AVX512F-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm1 {%k6} {z}
6133 ; AVX512F-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm2 {%k5} {z}
6134 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm3 {%k4} {z}
6135 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm4 {%k3} {z}
6136 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm5 {%k1} {z}
6137 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm5, 64(%rdx)
6138 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm4, 128(%rdx)
6139 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 192(%rdx)
6140 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 256(%rdx)
6141 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, 320(%rdx)
6142 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, (%rdx)
6143 ; AVX512F-ONLY-NEXT: vzeroupper
6144 ; AVX512F-ONLY-NEXT: retq
6146 ; AVX512DQ-LABEL: mask_replication_factor6_vf16:
6147 ; AVX512DQ: # %bb.0:
6148 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
6149 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
6150 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,1,1,1,1,1,1,2,2,2,2]
6151 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
6152 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k0
6153 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm1
6154 ; AVX512DQ-NEXT: movw $1, %ax
6155 ; AVX512DQ-NEXT: kmovw %eax, %k1
6156 ; AVX512DQ-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
6157 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k2
6158 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [2,2,3,3,3,3,3,3,4,4,4,4,4,4,5,5]
6159 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
6160 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
6161 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7]
6162 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
6163 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k3
6164 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [8,8,8,8,8,8,9,9,9,9,9,9,10,10,10,10]
6165 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
6166 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k4
6167 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [10,10,11,11,11,11,11,11,12,12,12,12,12,12,13,13]
6168 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
6169 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k5
6170 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [13,13,13,13,14,14,14,14,14,14,15,15,15,15,15,15]
6171 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
6172 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k6
6173 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k2} {z}
6174 ; AVX512DQ-NEXT: vmovdqa32 320(%rsi), %zmm1 {%k6} {z}
6175 ; AVX512DQ-NEXT: vmovdqa32 256(%rsi), %zmm2 {%k5} {z}
6176 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm3 {%k4} {z}
6177 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm4 {%k3} {z}
6178 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm5 {%k1} {z}
6179 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 64(%rdx)
6180 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 128(%rdx)
6181 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 192(%rdx)
6182 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 256(%rdx)
6183 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 320(%rdx)
6184 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%rdx)
6185 ; AVX512DQ-NEXT: vzeroupper
6186 ; AVX512DQ-NEXT: retq
6188 ; AVX512BW-LABEL: mask_replication_factor6_vf16:
6189 ; AVX512BW: # %bb.0:
6190 ; AVX512BW-NEXT: kmovw (%rdi), %k1
6191 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
6192 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,1,1,1,1,1,1,2,2,2,2]
6193 ; AVX512BW-NEXT: vpermd %zmm0, %zmm1, %zmm1
6194 ; AVX512BW-NEXT: vptestmd %zmm1, %zmm1, %k1
6195 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
6196 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [13,13,13,13,14,14,14,14,14,14,15,15,15,15,15,15]
6197 ; AVX512BW-NEXT: vpermd %zmm0, %zmm2, %zmm2
6198 ; AVX512BW-NEXT: vptestmd %zmm2, %zmm2, %k1
6199 ; AVX512BW-NEXT: vmovdqa32 320(%rsi), %zmm2 {%k1} {z}
6200 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [10,10,11,11,11,11,11,11,12,12,12,12,12,12,13,13]
6201 ; AVX512BW-NEXT: vpermd %zmm0, %zmm3, %zmm3
6202 ; AVX512BW-NEXT: vptestmd %zmm3, %zmm3, %k1
6203 ; AVX512BW-NEXT: vmovdqa32 256(%rsi), %zmm3 {%k1} {z}
6204 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [8,8,8,8,8,8,9,9,9,9,9,9,10,10,10,10]
6205 ; AVX512BW-NEXT: vpermd %zmm0, %zmm4, %zmm4
6206 ; AVX512BW-NEXT: vptestmd %zmm4, %zmm4, %k1
6207 ; AVX512BW-NEXT: vmovdqa32 192(%rsi), %zmm4 {%k1} {z}
6208 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm5 = [5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7]
6209 ; AVX512BW-NEXT: vpermd %zmm0, %zmm5, %zmm5
6210 ; AVX512BW-NEXT: vptestmd %zmm5, %zmm5, %k1
6211 ; AVX512BW-NEXT: vmovdqa32 128(%rsi), %zmm5 {%k1} {z}
6212 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm6 = [2,2,3,3,3,3,3,3,4,4,4,4,4,4,5,5]
6213 ; AVX512BW-NEXT: vpermd %zmm0, %zmm6, %zmm0
6214 ; AVX512BW-NEXT: vptestmd %zmm0, %zmm0, %k1
6215 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
6216 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 64(%rdx)
6217 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 128(%rdx)
6218 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 192(%rdx)
6219 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 256(%rdx)
6220 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 320(%rdx)
6221 ; AVX512BW-NEXT: vmovdqa64 %zmm1, (%rdx)
6222 ; AVX512BW-NEXT: vzeroupper
6223 ; AVX512BW-NEXT: retq
6224 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
6225 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
6226 %tgt.mask = shufflevector <16 x i1> %src.mask, <16 x i1> poison, <96 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15>
6227 %data = call <96 x i32> @llvm.masked.load.v96i32.p0(ptr %in.vec, i32 64, <96 x i1> %tgt.mask, <96 x i32> poison)
6228 store <96 x i32> %data, ptr %out.vec, align 64
6232 define void @mask_replication_factor6_vf32(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
6233 ; AVX512F-ONLY-LABEL: mask_replication_factor6_vf32:
6234 ; AVX512F-ONLY: # %bb.0:
6235 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
6236 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
6237 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,1,1,1,1,1,1,2,2,2,2]
6238 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm2
6239 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
6240 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 {%k1} {z}
6241 ; AVX512F-ONLY-NEXT: movw $1, %ax
6242 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
6243 ; AVX512F-ONLY-NEXT: vmovdqa32 %zmm0, %zmm2 {%k1}
6244 ; AVX512F-ONLY-NEXT: kmovw 2(%rdi), %k1
6245 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm3, %zmm3, %zmm3 {%k1} {z}
6246 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
6247 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm2 = [2,2,3,3,3,3,3,3,4,4,4,4,4,4,5,5]
6248 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm2, %zmm4
6249 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm5 = [5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7]
6250 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm5, %zmm6
6251 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm7 = [8,8,8,8,8,8,9,9,9,9,9,9,10,10,10,10]
6252 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm7, %zmm8
6253 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm9 = [10,10,11,11,11,11,11,11,12,12,12,12,12,12,13,13]
6254 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm9, %zmm10
6255 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm11 = [13,13,13,13,14,14,14,14,14,14,15,15,15,15,15,15]
6256 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm11, %zmm0
6257 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm1, %zmm1
6258 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm2, %zmm2
6259 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm5, %zmm5
6260 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm7, %zmm7
6261 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm9, %zmm9
6262 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm11, %zmm3
6263 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm11 {%k1} {z}
6264 ; AVX512F-ONLY-NEXT: vptestmd %zmm3, %zmm3, %k1
6265 ; AVX512F-ONLY-NEXT: vmovdqa32 704(%rsi), %zmm3 {%k1} {z}
6266 ; AVX512F-ONLY-NEXT: vptestmd %zmm9, %zmm9, %k1
6267 ; AVX512F-ONLY-NEXT: vmovdqa32 640(%rsi), %zmm9 {%k1} {z}
6268 ; AVX512F-ONLY-NEXT: vptestmd %zmm7, %zmm7, %k1
6269 ; AVX512F-ONLY-NEXT: vmovdqa32 576(%rsi), %zmm7 {%k1} {z}
6270 ; AVX512F-ONLY-NEXT: vptestmd %zmm5, %zmm5, %k1
6271 ; AVX512F-ONLY-NEXT: vmovdqa32 512(%rsi), %zmm5 {%k1} {z}
6272 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
6273 ; AVX512F-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm2 {%k1} {z}
6274 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
6275 ; AVX512F-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm1 {%k1} {z}
6276 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1
6277 ; AVX512F-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm0 {%k1} {z}
6278 ; AVX512F-ONLY-NEXT: vptestmd %zmm10, %zmm10, %k1
6279 ; AVX512F-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm10 {%k1} {z}
6280 ; AVX512F-ONLY-NEXT: vptestmd %zmm8, %zmm8, %k1
6281 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm8 {%k1} {z}
6282 ; AVX512F-ONLY-NEXT: vptestmd %zmm6, %zmm6, %k1
6283 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm6 {%k1} {z}
6284 ; AVX512F-ONLY-NEXT: vptestmd %zmm4, %zmm4, %k1
6285 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm4 {%k1} {z}
6286 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm4, 64(%rdx)
6287 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm6, 128(%rdx)
6288 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm8, 192(%rdx)
6289 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm10, 256(%rdx)
6290 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, 320(%rdx)
6291 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, 384(%rdx)
6292 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 448(%rdx)
6293 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm5, 512(%rdx)
6294 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm7, 576(%rdx)
6295 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm9, 640(%rdx)
6296 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 704(%rdx)
6297 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm11, (%rdx)
6298 ; AVX512F-ONLY-NEXT: vzeroupper
6299 ; AVX512F-ONLY-NEXT: retq
6301 ; AVX512DQ-LABEL: mask_replication_factor6_vf32:
6302 ; AVX512DQ: # %bb.0:
6303 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
6304 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
6305 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,1,1,1,1,1,1,2,2,2,2]
6306 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm2
6307 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k0
6308 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm2
6309 ; AVX512DQ-NEXT: movw $1, %ax
6310 ; AVX512DQ-NEXT: kmovw %eax, %k1
6311 ; AVX512DQ-NEXT: vmovdqa32 %zmm0, %zmm2 {%k1}
6312 ; AVX512DQ-NEXT: kmovw 2(%rdi), %k0
6313 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm3
6314 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k1
6315 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm2 = [2,2,3,3,3,3,3,3,4,4,4,4,4,4,5,5]
6316 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm2, %zmm4
6317 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm5 = [5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7]
6318 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm5, %zmm6
6319 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm7 = [8,8,8,8,8,8,9,9,9,9,9,9,10,10,10,10]
6320 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm7, %zmm8
6321 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm9 = [10,10,11,11,11,11,11,11,12,12,12,12,12,12,13,13]
6322 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm9, %zmm10
6323 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm11 = [13,13,13,13,14,14,14,14,14,14,15,15,15,15,15,15]
6324 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm11, %zmm0
6325 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm1, %zmm1
6326 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm2, %zmm2
6327 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm5, %zmm5
6328 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm7, %zmm7
6329 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm9, %zmm9
6330 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm11, %zmm3
6331 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm11 {%k1} {z}
6332 ; AVX512DQ-NEXT: vpmovd2m %zmm3, %k1
6333 ; AVX512DQ-NEXT: vmovdqa32 704(%rsi), %zmm3 {%k1} {z}
6334 ; AVX512DQ-NEXT: vpmovd2m %zmm9, %k1
6335 ; AVX512DQ-NEXT: vmovdqa32 640(%rsi), %zmm9 {%k1} {z}
6336 ; AVX512DQ-NEXT: vpmovd2m %zmm7, %k1
6337 ; AVX512DQ-NEXT: vmovdqa32 576(%rsi), %zmm7 {%k1} {z}
6338 ; AVX512DQ-NEXT: vpmovd2m %zmm5, %k1
6339 ; AVX512DQ-NEXT: vmovdqa32 512(%rsi), %zmm5 {%k1} {z}
6340 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k1
6341 ; AVX512DQ-NEXT: vmovdqa32 448(%rsi), %zmm2 {%k1} {z}
6342 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
6343 ; AVX512DQ-NEXT: vmovdqa32 384(%rsi), %zmm1 {%k1} {z}
6344 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k1
6345 ; AVX512DQ-NEXT: vmovdqa32 320(%rsi), %zmm0 {%k1} {z}
6346 ; AVX512DQ-NEXT: vpmovd2m %zmm10, %k1
6347 ; AVX512DQ-NEXT: vmovdqa32 256(%rsi), %zmm10 {%k1} {z}
6348 ; AVX512DQ-NEXT: vpmovd2m %zmm8, %k1
6349 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm8 {%k1} {z}
6350 ; AVX512DQ-NEXT: vpmovd2m %zmm6, %k1
6351 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm6 {%k1} {z}
6352 ; AVX512DQ-NEXT: vpmovd2m %zmm4, %k1
6353 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm4 {%k1} {z}
6354 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 64(%rdx)
6355 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 128(%rdx)
6356 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, 192(%rdx)
6357 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, 256(%rdx)
6358 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 320(%rdx)
6359 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 384(%rdx)
6360 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 448(%rdx)
6361 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 512(%rdx)
6362 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, 576(%rdx)
6363 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, 640(%rdx)
6364 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 704(%rdx)
6365 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, (%rdx)
6366 ; AVX512DQ-NEXT: vzeroupper
6367 ; AVX512DQ-NEXT: retq
6369 ; AVX512BW-LABEL: mask_replication_factor6_vf32:
6370 ; AVX512BW: # %bb.0:
6371 ; AVX512BW-NEXT: kmovd (%rdi), %k5
6372 ; AVX512BW-NEXT: movw $-3, %ax
6373 ; AVX512BW-NEXT: kmovd %eax, %k0
6374 ; AVX512BW-NEXT: kmovw %k0, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6375 ; AVX512BW-NEXT: kmovw (%rdi), %k1
6376 ; AVX512BW-NEXT: kandw %k0, %k1, %k2
6377 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
6378 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k3
6379 ; AVX512BW-NEXT: korw %k3, %k2, %k2
6380 ; AVX512BW-NEXT: movw $-5, %ax
6381 ; AVX512BW-NEXT: kmovd %eax, %k0
6382 ; AVX512BW-NEXT: kmovw %k0, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6383 ; AVX512BW-NEXT: kandw %k0, %k2, %k2
6384 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k3
6385 ; AVX512BW-NEXT: korw %k3, %k2, %k2
6386 ; AVX512BW-NEXT: movw $-9, %ax
6387 ; AVX512BW-NEXT: kmovd %eax, %k0
6388 ; AVX512BW-NEXT: kmovw %k0, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6389 ; AVX512BW-NEXT: kandw %k0, %k2, %k2
6390 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k3
6391 ; AVX512BW-NEXT: korw %k3, %k2, %k2
6392 ; AVX512BW-NEXT: movw $-17, %ax
6393 ; AVX512BW-NEXT: kmovd %eax, %k7
6394 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
6395 ; AVX512BW-NEXT: kmovw %k7, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6396 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k3
6397 ; AVX512BW-NEXT: korw %k3, %k2, %k2
6398 ; AVX512BW-NEXT: movw $-33, %ax
6399 ; AVX512BW-NEXT: kmovd %eax, %k0
6400 ; AVX512BW-NEXT: kmovw %k0, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6401 ; AVX512BW-NEXT: kandw %k0, %k2, %k2
6402 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k1
6403 ; AVX512BW-NEXT: korw %k1, %k2, %k1
6404 ; AVX512BW-NEXT: movw $-65, %ax
6405 ; AVX512BW-NEXT: kmovd %eax, %k0
6406 ; AVX512BW-NEXT: kmovw %k0, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6407 ; AVX512BW-NEXT: kandw %k0, %k1, %k2
6408 ; AVX512BW-NEXT: kshiftrd $1, %k5, %k1
6409 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
6410 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k3
6411 ; AVX512BW-NEXT: korw %k3, %k2, %k2
6412 ; AVX512BW-NEXT: movw $-129, %ax
6413 ; AVX512BW-NEXT: kmovd %eax, %k0
6414 ; AVX512BW-NEXT: kmovw %k0, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6415 ; AVX512BW-NEXT: kandw %k0, %k2, %k2
6416 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k3
6417 ; AVX512BW-NEXT: korw %k3, %k2, %k2
6418 ; AVX512BW-NEXT: movw $-257, %ax # imm = 0xFEFF
6419 ; AVX512BW-NEXT: kmovd %eax, %k0
6420 ; AVX512BW-NEXT: kmovw %k0, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6421 ; AVX512BW-NEXT: kandw %k0, %k2, %k2
6422 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k3
6423 ; AVX512BW-NEXT: korw %k3, %k2, %k2
6424 ; AVX512BW-NEXT: movw $-513, %ax # imm = 0xFDFF
6425 ; AVX512BW-NEXT: kmovd %eax, %k0
6426 ; AVX512BW-NEXT: kmovw %k0, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6427 ; AVX512BW-NEXT: kandw %k0, %k2, %k2
6428 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k3
6429 ; AVX512BW-NEXT: korw %k3, %k2, %k2
6430 ; AVX512BW-NEXT: movw $-1025, %ax # imm = 0xFBFF
6431 ; AVX512BW-NEXT: kmovd %eax, %k6
6432 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
6433 ; AVX512BW-NEXT: kmovw %k6, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6434 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k3
6435 ; AVX512BW-NEXT: korw %k3, %k2, %k2
6436 ; AVX512BW-NEXT: movw $-2049, %ax # imm = 0xF7FF
6437 ; AVX512BW-NEXT: kmovd %eax, %k0
6438 ; AVX512BW-NEXT: kmovw %k0, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6439 ; AVX512BW-NEXT: kandw %k0, %k2, %k2
6440 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k1
6441 ; AVX512BW-NEXT: korw %k1, %k2, %k1
6442 ; AVX512BW-NEXT: movw $-4097, %ax # imm = 0xEFFF
6443 ; AVX512BW-NEXT: kmovd %eax, %k0
6444 ; AVX512BW-NEXT: kmovw %k0, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6445 ; AVX512BW-NEXT: kandw %k0, %k1, %k1
6446 ; AVX512BW-NEXT: kshiftrd $2, %k5, %k2
6447 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k3
6448 ; AVX512BW-NEXT: kmovq %k2, %k4
6449 ; AVX512BW-NEXT: kmovd %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
6450 ; AVX512BW-NEXT: kshiftrw $3, %k3, %k2
6451 ; AVX512BW-NEXT: korw %k2, %k1, %k1
6452 ; AVX512BW-NEXT: movw $-8193, %ax # imm = 0xDFFF
6453 ; AVX512BW-NEXT: kmovd %eax, %k2
6454 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6455 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
6456 ; AVX512BW-NEXT: kshiftrw $2, %k3, %k2
6457 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6458 ; AVX512BW-NEXT: korw %k2, %k1, %k1
6459 ; AVX512BW-NEXT: movw $-16385, %ax # imm = 0xBFFF
6460 ; AVX512BW-NEXT: kmovd %eax, %k2
6461 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6462 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
6463 ; AVX512BW-NEXT: kshiftlw $14, %k4, %k2
6464 ; AVX512BW-NEXT: korw %k2, %k1, %k1
6465 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
6466 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
6467 ; AVX512BW-NEXT: korw %k3, %k1, %k1
6468 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
6469 ; AVX512BW-NEXT: kshiftrd $29, %k5, %k0
6470 ; AVX512BW-NEXT: kmovd %k0, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
6471 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6472 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
6473 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
6474 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k4
6475 ; AVX512BW-NEXT: korw %k4, %k1, %k1
6476 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
6477 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
6478 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k4
6479 ; AVX512BW-NEXT: kmovw %k0, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6480 ; AVX512BW-NEXT: korw %k4, %k1, %k1
6481 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
6482 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
6483 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k4
6484 ; AVX512BW-NEXT: korw %k4, %k1, %k1
6485 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
6486 ; AVX512BW-NEXT: kshiftrd $30, %k5, %k4
6487 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k4
6488 ; AVX512BW-NEXT: kshiftrw $11, %k4, %k7
6489 ; AVX512BW-NEXT: korw %k7, %k1, %k1
6490 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
6491 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
6492 ; AVX512BW-NEXT: kshiftrw $10, %k4, %k7
6493 ; AVX512BW-NEXT: korw %k7, %k1, %k1
6494 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6495 ; AVX512BW-NEXT: kandw %k0, %k1, %k1
6496 ; AVX512BW-NEXT: kshiftrw $9, %k4, %k7
6497 ; AVX512BW-NEXT: korw %k7, %k1, %k1
6498 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6499 ; AVX512BW-NEXT: kandw %k0, %k1, %k1
6500 ; AVX512BW-NEXT: kshiftrw $8, %k4, %k7
6501 ; AVX512BW-NEXT: korw %k7, %k1, %k1
6502 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
6503 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
6504 ; AVX512BW-NEXT: kshiftrw $7, %k4, %k7
6505 ; AVX512BW-NEXT: korw %k7, %k1, %k1
6506 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
6507 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
6508 ; AVX512BW-NEXT: kshiftrw $6, %k4, %k4
6509 ; AVX512BW-NEXT: korw %k4, %k1, %k1
6510 ; AVX512BW-NEXT: kandw %k6, %k1, %k4
6511 ; AVX512BW-NEXT: kshiftrd $31, %k5, %k7
6512 ; AVX512BW-NEXT: kshiftlw $15, %k7, %k1
6513 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
6514 ; AVX512BW-NEXT: korw %k6, %k4, %k4
6515 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6516 ; AVX512BW-NEXT: kandw %k0, %k4, %k4
6517 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
6518 ; AVX512BW-NEXT: korw %k6, %k4, %k4
6519 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6520 ; AVX512BW-NEXT: kandw %k0, %k4, %k4
6521 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k6
6522 ; AVX512BW-NEXT: korw %k6, %k4, %k4
6523 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
6524 ; AVX512BW-NEXT: kandw %k6, %k4, %k4
6525 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k6
6526 ; AVX512BW-NEXT: korw %k6, %k4, %k4
6527 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
6528 ; AVX512BW-NEXT: kandw %k6, %k4, %k4
6529 ; AVX512BW-NEXT: kshiftlw $14, %k7, %k6
6530 ; AVX512BW-NEXT: korw %k6, %k4, %k4
6531 ; AVX512BW-NEXT: kshiftlw $1, %k4, %k4
6532 ; AVX512BW-NEXT: kshiftrw $1, %k4, %k4
6533 ; AVX512BW-NEXT: korw %k1, %k4, %k1
6534 ; AVX512BW-NEXT: vmovdqa32 704(%rsi), %zmm1 {%k1} {z}
6535 ; AVX512BW-NEXT: kshiftrd $26, %k5, %k4
6536 ; AVX512BW-NEXT: kmovd %k4, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
6537 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6538 ; AVX512BW-NEXT: kandw %k1, %k4, %k6
6539 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k1
6540 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6541 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k7
6542 ; AVX512BW-NEXT: korw %k7, %k6, %k6
6543 ; AVX512BW-NEXT: kandw %k2, %k6, %k6
6544 ; AVX512BW-NEXT: kshiftrd $27, %k5, %k7
6545 ; AVX512BW-NEXT: kmovq %k5, %k2
6546 ; AVX512BW-NEXT: kmovd %k5, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
6547 ; AVX512BW-NEXT: kshiftlw $15, %k7, %k7
6548 ; AVX512BW-NEXT: kshiftrw $13, %k7, %k5
6549 ; AVX512BW-NEXT: korw %k5, %k6, %k5
6550 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
6551 ; AVX512BW-NEXT: kandw %k4, %k5, %k5
6552 ; AVX512BW-NEXT: kshiftrw $12, %k7, %k6
6553 ; AVX512BW-NEXT: korw %k6, %k5, %k5
6554 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6555 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6556 ; AVX512BW-NEXT: kshiftrw $11, %k7, %k6
6557 ; AVX512BW-NEXT: korw %k6, %k5, %k5
6558 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6559 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6560 ; AVX512BW-NEXT: kshiftrw $10, %k7, %k6
6561 ; AVX512BW-NEXT: korw %k6, %k5, %k5
6562 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6563 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6564 ; AVX512BW-NEXT: kshiftrw $9, %k7, %k6
6565 ; AVX512BW-NEXT: korw %k6, %k5, %k5
6566 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6567 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6568 ; AVX512BW-NEXT: kshiftrw $8, %k7, %k6
6569 ; AVX512BW-NEXT: korw %k6, %k5, %k5
6570 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6571 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6572 ; AVX512BW-NEXT: kshiftrd $28, %k2, %k6
6573 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
6574 ; AVX512BW-NEXT: kshiftrw $7, %k6, %k7
6575 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6576 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
6577 ; AVX512BW-NEXT: kshiftrw $6, %k6, %k7
6578 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6579 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6580 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6581 ; AVX512BW-NEXT: kshiftrw $5, %k6, %k7
6582 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6583 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6584 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6585 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
6586 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6587 ; AVX512BW-NEXT: kandw %k0, %k5, %k5
6588 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
6589 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6590 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6591 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6592 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k6
6593 ; AVX512BW-NEXT: korw %k6, %k5, %k5
6594 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
6595 ; AVX512BW-NEXT: kandw %k7, %k5, %k5
6596 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 4-byte Reload
6597 ; AVX512BW-NEXT: kshiftlw $14, %k2, %k3
6598 ; AVX512BW-NEXT: korw %k3, %k5, %k3
6599 ; AVX512BW-NEXT: kshiftlw $1, %k3, %k3
6600 ; AVX512BW-NEXT: kshiftrw $1, %k3, %k3
6601 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
6602 ; AVX512BW-NEXT: korw %k2, %k3, %k2
6603 ; AVX512BW-NEXT: vmovdqa32 640(%rsi), %zmm2 {%k2} {z}
6604 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 4-byte Reload
6605 ; AVX512BW-NEXT: kshiftrd $24, %k0, %k2
6606 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
6607 ; AVX512BW-NEXT: kandw %k3, %k2, %k3
6608 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
6609 ; AVX512BW-NEXT: kshiftrw $14, %k2, %k5
6610 ; AVX512BW-NEXT: korw %k5, %k3, %k3
6611 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
6612 ; AVX512BW-NEXT: kandw %k5, %k3, %k3
6613 ; AVX512BW-NEXT: kshiftrw $13, %k2, %k5
6614 ; AVX512BW-NEXT: korw %k5, %k3, %k3
6615 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
6616 ; AVX512BW-NEXT: kshiftrw $12, %k2, %k5
6617 ; AVX512BW-NEXT: korw %k5, %k3, %k3
6618 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
6619 ; AVX512BW-NEXT: kandw %k6, %k3, %k3
6620 ; AVX512BW-NEXT: kshiftrw $11, %k2, %k5
6621 ; AVX512BW-NEXT: korw %k5, %k3, %k3
6622 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
6623 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
6624 ; AVX512BW-NEXT: kshiftrw $10, %k2, %k2
6625 ; AVX512BW-NEXT: korw %k2, %k3, %k2
6626 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
6627 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
6628 ; AVX512BW-NEXT: kshiftrd $25, %k0, %k3
6629 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
6630 ; AVX512BW-NEXT: kshiftrw $9, %k3, %k5
6631 ; AVX512BW-NEXT: korw %k5, %k2, %k2
6632 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
6633 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
6634 ; AVX512BW-NEXT: kshiftrw $8, %k3, %k5
6635 ; AVX512BW-NEXT: korw %k5, %k2, %k2
6636 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
6637 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
6638 ; AVX512BW-NEXT: kshiftrw $7, %k3, %k5
6639 ; AVX512BW-NEXT: korw %k5, %k2, %k2
6640 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
6641 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
6642 ; AVX512BW-NEXT: kshiftrw $6, %k3, %k5
6643 ; AVX512BW-NEXT: korw %k5, %k2, %k2
6644 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
6645 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
6646 ; AVX512BW-NEXT: kshiftrw $5, %k3, %k5
6647 ; AVX512BW-NEXT: korw %k5, %k2, %k2
6648 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
6649 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
6650 ; AVX512BW-NEXT: kshiftrw $4, %k3, %k3
6651 ; AVX512BW-NEXT: korw %k3, %k2, %k2
6652 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
6653 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
6654 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
6655 ; AVX512BW-NEXT: kshiftrw $3, %k4, %k3
6656 ; AVX512BW-NEXT: korw %k3, %k2, %k2
6657 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
6658 ; AVX512BW-NEXT: kshiftrw $2, %k4, %k3
6659 ; AVX512BW-NEXT: korw %k3, %k2, %k2
6660 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
6661 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 4-byte Reload
6662 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k3
6663 ; AVX512BW-NEXT: korw %k3, %k2, %k2
6664 ; AVX512BW-NEXT: kshiftlw $1, %k2, %k2
6665 ; AVX512BW-NEXT: kshiftrw $1, %k2, %k2
6666 ; AVX512BW-NEXT: korw %k4, %k2, %k1
6667 ; AVX512BW-NEXT: vmovdqa32 576(%rsi), %zmm3 {%k1} {z}
6668 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 4-byte Reload
6669 ; AVX512BW-NEXT: kshiftrd $21, %k1, %k2
6670 ; AVX512BW-NEXT: kmovd %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
6671 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6672 ; AVX512BW-NEXT: kandw %k0, %k2, %k3
6673 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
6674 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6675 ; AVX512BW-NEXT: kshiftrw $14, %k2, %k4
6676 ; AVX512BW-NEXT: korw %k4, %k3, %k3
6677 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6678 ; AVX512BW-NEXT: kandw %k0, %k3, %k3
6679 ; AVX512BW-NEXT: kshiftrw $13, %k2, %k4
6680 ; AVX512BW-NEXT: korw %k4, %k3, %k3
6681 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6682 ; AVX512BW-NEXT: kandw %k0, %k3, %k3
6683 ; AVX512BW-NEXT: kshiftrw $12, %k2, %k4
6684 ; AVX512BW-NEXT: korw %k4, %k3, %k3
6685 ; AVX512BW-NEXT: kandw %k6, %k3, %k3
6686 ; AVX512BW-NEXT: kshiftrd $22, %k1, %k4
6687 ; AVX512BW-NEXT: kmovq %k1, %k7
6688 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k4
6689 ; AVX512BW-NEXT: kshiftrw $11, %k4, %k5
6690 ; AVX512BW-NEXT: korw %k5, %k3, %k3
6691 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6692 ; AVX512BW-NEXT: kandw %k0, %k3, %k3
6693 ; AVX512BW-NEXT: kshiftrw $10, %k4, %k5
6694 ; AVX512BW-NEXT: korw %k5, %k3, %k3
6695 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6696 ; AVX512BW-NEXT: kandw %k0, %k3, %k3
6697 ; AVX512BW-NEXT: kshiftrw $9, %k4, %k5
6698 ; AVX512BW-NEXT: korw %k5, %k3, %k3
6699 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6700 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
6701 ; AVX512BW-NEXT: kshiftrw $8, %k4, %k5
6702 ; AVX512BW-NEXT: korw %k5, %k3, %k3
6703 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
6704 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
6705 ; AVX512BW-NEXT: kshiftrw $7, %k4, %k5
6706 ; AVX512BW-NEXT: korw %k5, %k3, %k3
6707 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
6708 ; AVX512BW-NEXT: kandw %k5, %k3, %k3
6709 ; AVX512BW-NEXT: kshiftrw $6, %k4, %k4
6710 ; AVX512BW-NEXT: korw %k4, %k3, %k3
6711 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
6712 ; AVX512BW-NEXT: kandw %k4, %k3, %k4
6713 ; AVX512BW-NEXT: kshiftrd $23, %k7, %k5
6714 ; AVX512BW-NEXT: kshiftlw $15, %k5, %k3
6715 ; AVX512BW-NEXT: kshiftrw $5, %k3, %k6
6716 ; AVX512BW-NEXT: korw %k6, %k4, %k4
6717 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
6718 ; AVX512BW-NEXT: kandw %k6, %k4, %k4
6719 ; AVX512BW-NEXT: kshiftrw $4, %k3, %k6
6720 ; AVX512BW-NEXT: korw %k6, %k4, %k4
6721 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
6722 ; AVX512BW-NEXT: kandw %k6, %k4, %k4
6723 ; AVX512BW-NEXT: kshiftrw $3, %k3, %k6
6724 ; AVX512BW-NEXT: korw %k6, %k4, %k4
6725 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
6726 ; AVX512BW-NEXT: kandw %k6, %k4, %k4
6727 ; AVX512BW-NEXT: kshiftrw $2, %k3, %k6
6728 ; AVX512BW-NEXT: korw %k6, %k4, %k4
6729 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
6730 ; AVX512BW-NEXT: kandw %k6, %k4, %k4
6731 ; AVX512BW-NEXT: kshiftlw $14, %k5, %k5
6732 ; AVX512BW-NEXT: korw %k5, %k4, %k4
6733 ; AVX512BW-NEXT: kshiftlw $1, %k4, %k4
6734 ; AVX512BW-NEXT: kshiftrw $1, %k4, %k4
6735 ; AVX512BW-NEXT: korw %k3, %k4, %k3
6736 ; AVX512BW-NEXT: vmovdqa32 512(%rsi), %zmm4 {%k3} {z}
6737 ; AVX512BW-NEXT: kmovq %k7, %k4
6738 ; AVX512BW-NEXT: kshiftrd $18, %k7, %k6
6739 ; AVX512BW-NEXT: kmovd %k6, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
6740 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
6741 ; AVX512BW-NEXT: kandw %k3, %k6, %k5
6742 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k3
6743 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6744 ; AVX512BW-NEXT: kshiftrw $14, %k3, %k6
6745 ; AVX512BW-NEXT: korw %k6, %k5, %k5
6746 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
6747 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
6748 ; AVX512BW-NEXT: kshiftrd $19, %k7, %k6
6749 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
6750 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k7
6751 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6752 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
6753 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
6754 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k7
6755 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6756 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
6757 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
6758 ; AVX512BW-NEXT: kshiftrw $11, %k6, %k7
6759 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6760 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
6761 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
6762 ; AVX512BW-NEXT: kshiftrw $10, %k6, %k7
6763 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6764 ; AVX512BW-NEXT: kandw %k0, %k5, %k5
6765 ; AVX512BW-NEXT: kshiftrw $9, %k6, %k7
6766 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6767 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6768 ; AVX512BW-NEXT: kshiftrw $8, %k6, %k6
6769 ; AVX512BW-NEXT: korw %k6, %k5, %k5
6770 ; AVX512BW-NEXT: kandw %k2, %k5, %k5
6771 ; AVX512BW-NEXT: kshiftrd $20, %k4, %k6
6772 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
6773 ; AVX512BW-NEXT: kshiftrw $7, %k6, %k7
6774 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6775 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6776 ; AVX512BW-NEXT: kandw %k0, %k5, %k5
6777 ; AVX512BW-NEXT: kshiftrw $6, %k6, %k7
6778 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6779 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6780 ; AVX512BW-NEXT: kandw %k0, %k5, %k5
6781 ; AVX512BW-NEXT: kshiftrw $5, %k6, %k7
6782 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6783 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
6784 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
6785 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
6786 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6787 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6788 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6789 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
6790 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6791 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6792 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6793 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k6
6794 ; AVX512BW-NEXT: korw %k6, %k5, %k5
6795 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6796 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6797 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 4-byte Reload
6798 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k2
6799 ; AVX512BW-NEXT: korw %k2, %k5, %k2
6800 ; AVX512BW-NEXT: kshiftlw $1, %k2, %k2
6801 ; AVX512BW-NEXT: kshiftrw $1, %k2, %k2
6802 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6803 ; AVX512BW-NEXT: korw %k0, %k2, %k1
6804 ; AVX512BW-NEXT: vmovdqa32 448(%rsi), %zmm5 {%k1} {z}
6805 ; AVX512BW-NEXT: kmovq %k4, %k0
6806 ; AVX512BW-NEXT: kshiftrd $16, %k4, %k1
6807 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
6808 ; AVX512BW-NEXT: kandw %k2, %k1, %k2
6809 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
6810 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k5
6811 ; AVX512BW-NEXT: korw %k5, %k2, %k2
6812 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
6813 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
6814 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k5
6815 ; AVX512BW-NEXT: korw %k5, %k2, %k2
6816 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
6817 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
6818 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k5
6819 ; AVX512BW-NEXT: korw %k5, %k2, %k2
6820 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
6821 ; AVX512BW-NEXT: kandw %k5, %k2, %k2
6822 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k5
6823 ; AVX512BW-NEXT: korw %k5, %k2, %k2
6824 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
6825 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
6826 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k1
6827 ; AVX512BW-NEXT: korw %k1, %k2, %k1
6828 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
6829 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
6830 ; AVX512BW-NEXT: kshiftrd $17, %k0, %k2
6831 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
6832 ; AVX512BW-NEXT: kshiftrw $9, %k2, %k5
6833 ; AVX512BW-NEXT: korw %k5, %k1, %k1
6834 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
6835 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
6836 ; AVX512BW-NEXT: kshiftrw $8, %k2, %k5
6837 ; AVX512BW-NEXT: korw %k5, %k1, %k1
6838 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
6839 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
6840 ; AVX512BW-NEXT: kshiftrw $7, %k2, %k5
6841 ; AVX512BW-NEXT: korw %k5, %k1, %k1
6842 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
6843 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
6844 ; AVX512BW-NEXT: kshiftrw $6, %k2, %k5
6845 ; AVX512BW-NEXT: korw %k5, %k1, %k1
6846 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6847 ; AVX512BW-NEXT: kandw %k0, %k1, %k1
6848 ; AVX512BW-NEXT: kshiftrw $5, %k2, %k5
6849 ; AVX512BW-NEXT: korw %k5, %k1, %k1
6850 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
6851 ; AVX512BW-NEXT: kshiftrw $4, %k2, %k2
6852 ; AVX512BW-NEXT: korw %k2, %k1, %k1
6853 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6854 ; AVX512BW-NEXT: kandw %k0, %k1, %k1
6855 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
6856 ; AVX512BW-NEXT: kshiftrw $3, %k3, %k2
6857 ; AVX512BW-NEXT: korw %k2, %k1, %k1
6858 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6859 ; AVX512BW-NEXT: kandw %k0, %k1, %k1
6860 ; AVX512BW-NEXT: kshiftrw $2, %k3, %k2
6861 ; AVX512BW-NEXT: korw %k2, %k1, %k1
6862 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6863 ; AVX512BW-NEXT: kandw %k0, %k1, %k1
6864 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 4-byte Reload
6865 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k2
6866 ; AVX512BW-NEXT: korw %k2, %k1, %k1
6867 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
6868 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
6869 ; AVX512BW-NEXT: korw %k3, %k1, %k1
6870 ; AVX512BW-NEXT: vmovdqa32 384(%rsi), %zmm6 {%k1} {z}
6871 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 4-byte Reload
6872 ; AVX512BW-NEXT: kshiftrd $13, %k0, %k3
6873 ; AVX512BW-NEXT: kmovd %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
6874 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6875 ; AVX512BW-NEXT: kandw %k1, %k3, %k2
6876 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k5
6877 ; AVX512BW-NEXT: kmovw %k5, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6878 ; AVX512BW-NEXT: kshiftrw $14, %k5, %k3
6879 ; AVX512BW-NEXT: korw %k3, %k2, %k2
6880 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
6881 ; AVX512BW-NEXT: kshiftrw $13, %k5, %k3
6882 ; AVX512BW-NEXT: korw %k3, %k2, %k2
6883 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
6884 ; AVX512BW-NEXT: kshiftrw $12, %k5, %k3
6885 ; AVX512BW-NEXT: korw %k3, %k2, %k2
6886 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
6887 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
6888 ; AVX512BW-NEXT: kshiftrd $14, %k0, %k3
6889 ; AVX512BW-NEXT: kmovq %k0, %k7
6890 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k3
6891 ; AVX512BW-NEXT: kshiftrw $11, %k3, %k5
6892 ; AVX512BW-NEXT: korw %k5, %k2, %k2
6893 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6894 ; AVX512BW-NEXT: kandw %k0, %k2, %k2
6895 ; AVX512BW-NEXT: kshiftrw $10, %k3, %k5
6896 ; AVX512BW-NEXT: korw %k5, %k2, %k2
6897 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
6898 ; AVX512BW-NEXT: kshiftrw $9, %k3, %k5
6899 ; AVX512BW-NEXT: korw %k5, %k2, %k2
6900 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6901 ; AVX512BW-NEXT: kandw %k0, %k2, %k2
6902 ; AVX512BW-NEXT: kshiftrw $8, %k3, %k5
6903 ; AVX512BW-NEXT: korw %k5, %k2, %k2
6904 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
6905 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
6906 ; AVX512BW-NEXT: kshiftrw $7, %k3, %k5
6907 ; AVX512BW-NEXT: korw %k5, %k2, %k2
6908 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6909 ; AVX512BW-NEXT: kandw %k0, %k2, %k2
6910 ; AVX512BW-NEXT: kshiftrw $6, %k3, %k3
6911 ; AVX512BW-NEXT: korw %k3, %k2, %k2
6912 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6913 ; AVX512BW-NEXT: kandw %k0, %k2, %k3
6914 ; AVX512BW-NEXT: kshiftrd $15, %k7, %k5
6915 ; AVX512BW-NEXT: kshiftlw $15, %k5, %k2
6916 ; AVX512BW-NEXT: kshiftrw $5, %k2, %k6
6917 ; AVX512BW-NEXT: korw %k6, %k3, %k3
6918 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6919 ; AVX512BW-NEXT: kandw %k0, %k3, %k3
6920 ; AVX512BW-NEXT: kshiftrw $4, %k2, %k6
6921 ; AVX512BW-NEXT: korw %k6, %k3, %k3
6922 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6923 ; AVX512BW-NEXT: kandw %k0, %k3, %k3
6924 ; AVX512BW-NEXT: kshiftrw $3, %k2, %k6
6925 ; AVX512BW-NEXT: korw %k6, %k3, %k3
6926 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6927 ; AVX512BW-NEXT: kandw %k0, %k3, %k3
6928 ; AVX512BW-NEXT: kshiftrw $2, %k2, %k6
6929 ; AVX512BW-NEXT: korw %k6, %k3, %k3
6930 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6931 ; AVX512BW-NEXT: kandw %k0, %k3, %k3
6932 ; AVX512BW-NEXT: kshiftlw $14, %k5, %k5
6933 ; AVX512BW-NEXT: korw %k5, %k3, %k3
6934 ; AVX512BW-NEXT: kshiftlw $1, %k3, %k3
6935 ; AVX512BW-NEXT: kshiftrw $1, %k3, %k3
6936 ; AVX512BW-NEXT: korw %k2, %k3, %k2
6937 ; AVX512BW-NEXT: vmovdqa32 320(%rsi), %zmm7 {%k2} {z}
6938 ; AVX512BW-NEXT: kmovq %k7, %k2
6939 ; AVX512BW-NEXT: kshiftrd $10, %k7, %k0
6940 ; AVX512BW-NEXT: kmovd %k0, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
6941 ; AVX512BW-NEXT: kandw %k1, %k0, %k5
6942 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
6943 ; AVX512BW-NEXT: kmovw %k0, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
6944 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
6945 ; AVX512BW-NEXT: korw %k6, %k5, %k5
6946 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6947 ; AVX512BW-NEXT: kandw %k0, %k5, %k5
6948 ; AVX512BW-NEXT: kshiftrd $11, %k7, %k6
6949 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
6950 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k7
6951 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6952 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6953 ; AVX512BW-NEXT: kandw %k0, %k5, %k5
6954 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k7
6955 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6956 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
6957 ; AVX512BW-NEXT: kandw %k0, %k5, %k5
6958 ; AVX512BW-NEXT: kshiftrw $11, %k6, %k7
6959 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6960 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6961 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6962 ; AVX512BW-NEXT: kshiftrw $10, %k6, %k7
6963 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6964 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6965 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6966 ; AVX512BW-NEXT: kshiftrw $9, %k6, %k7
6967 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6968 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6969 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6970 ; AVX512BW-NEXT: kshiftrw $8, %k6, %k6
6971 ; AVX512BW-NEXT: korw %k6, %k5, %k5
6972 ; AVX512BW-NEXT: kandw %k4, %k5, %k5
6973 ; AVX512BW-NEXT: kshiftrd $12, %k2, %k6
6974 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k6
6975 ; AVX512BW-NEXT: kshiftrw $7, %k6, %k7
6976 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6977 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6978 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6979 ; AVX512BW-NEXT: kshiftrw $6, %k6, %k7
6980 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6981 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6982 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6983 ; AVX512BW-NEXT: kshiftrw $5, %k6, %k7
6984 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6985 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6986 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6987 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
6988 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6989 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
6990 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
6991 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
6992 ; AVX512BW-NEXT: korw %k7, %k5, %k5
6993 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
6994 ; AVX512BW-NEXT: kandw %k7, %k5, %k5
6995 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k6
6996 ; AVX512BW-NEXT: korw %k6, %k5, %k5
6997 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
6998 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
6999 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 4-byte Reload
7000 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k4
7001 ; AVX512BW-NEXT: korw %k4, %k5, %k4
7002 ; AVX512BW-NEXT: kshiftlw $1, %k4, %k4
7003 ; AVX512BW-NEXT: kshiftrw $1, %k4, %k4
7004 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7005 ; AVX512BW-NEXT: korw %k1, %k4, %k1
7006 ; AVX512BW-NEXT: vmovdqa32 256(%rsi), %zmm8 {%k1} {z}
7007 ; AVX512BW-NEXT: kshiftrd $8, %k2, %k1
7008 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7009 ; AVX512BW-NEXT: kandw %k6, %k1, %k4
7010 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
7011 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k5
7012 ; AVX512BW-NEXT: korw %k5, %k4, %k4
7013 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7014 ; AVX512BW-NEXT: kandw %k5, %k4, %k4
7015 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k5
7016 ; AVX512BW-NEXT: korw %k5, %k4, %k4
7017 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7018 ; AVX512BW-NEXT: kandw %k5, %k4, %k4
7019 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k5
7020 ; AVX512BW-NEXT: korw %k5, %k4, %k4
7021 ; AVX512BW-NEXT: kandw %k0, %k4, %k4
7022 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k5
7023 ; AVX512BW-NEXT: korw %k5, %k4, %k4
7024 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
7025 ; AVX512BW-NEXT: kandw %k0, %k4, %k4
7026 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k1
7027 ; AVX512BW-NEXT: korw %k1, %k4, %k1
7028 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
7029 ; AVX512BW-NEXT: kandw %k0, %k1, %k1
7030 ; AVX512BW-NEXT: kshiftrd $9, %k2, %k4
7031 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k4
7032 ; AVX512BW-NEXT: kshiftrw $9, %k4, %k5
7033 ; AVX512BW-NEXT: korw %k5, %k1, %k1
7034 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
7035 ; AVX512BW-NEXT: kandw %k0, %k1, %k1
7036 ; AVX512BW-NEXT: kshiftrw $8, %k4, %k5
7037 ; AVX512BW-NEXT: korw %k5, %k1, %k1
7038 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
7039 ; AVX512BW-NEXT: kandw %k0, %k1, %k1
7040 ; AVX512BW-NEXT: kshiftrw $7, %k4, %k5
7041 ; AVX512BW-NEXT: korw %k5, %k1, %k1
7042 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7043 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
7044 ; AVX512BW-NEXT: kshiftrw $6, %k4, %k5
7045 ; AVX512BW-NEXT: korw %k5, %k1, %k1
7046 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7047 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
7048 ; AVX512BW-NEXT: kshiftrw $5, %k4, %k5
7049 ; AVX512BW-NEXT: korw %k5, %k1, %k1
7050 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7051 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
7052 ; AVX512BW-NEXT: kshiftrw $4, %k4, %k4
7053 ; AVX512BW-NEXT: korw %k4, %k1, %k1
7054 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7055 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
7056 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7057 ; AVX512BW-NEXT: kshiftrw $3, %k2, %k4
7058 ; AVX512BW-NEXT: korw %k4, %k1, %k1
7059 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
7060 ; AVX512BW-NEXT: kshiftrw $2, %k2, %k4
7061 ; AVX512BW-NEXT: kmovq %k2, %k5
7062 ; AVX512BW-NEXT: korw %k4, %k1, %k1
7063 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
7064 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 4-byte Reload
7065 ; AVX512BW-NEXT: kshiftlw $14, %k2, %k2
7066 ; AVX512BW-NEXT: korw %k2, %k1, %k1
7067 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
7068 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
7069 ; AVX512BW-NEXT: korw %k5, %k1, %k1
7070 ; AVX512BW-NEXT: vmovdqa32 192(%rsi), %zmm9 {%k1} {z}
7071 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 4-byte Reload
7072 ; AVX512BW-NEXT: kshiftrd $5, %k1, %k2
7073 ; AVX512BW-NEXT: kmovd %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
7074 ; AVX512BW-NEXT: kandw %k6, %k2, %k3
7075 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k7
7076 ; AVX512BW-NEXT: kshiftrw $14, %k7, %k4
7077 ; AVX512BW-NEXT: korw %k4, %k3, %k3
7078 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7079 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
7080 ; AVX512BW-NEXT: kshiftrw $13, %k7, %k4
7081 ; AVX512BW-NEXT: korw %k4, %k3, %k3
7082 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7083 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
7084 ; AVX512BW-NEXT: kshiftrw $12, %k7, %k4
7085 ; AVX512BW-NEXT: korw %k4, %k3, %k3
7086 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7087 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
7088 ; AVX512BW-NEXT: kshiftrd $6, %k1, %k4
7089 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k4
7090 ; AVX512BW-NEXT: kshiftrw $11, %k4, %k5
7091 ; AVX512BW-NEXT: korw %k5, %k3, %k3
7092 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7093 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
7094 ; AVX512BW-NEXT: kshiftrw $10, %k4, %k5
7095 ; AVX512BW-NEXT: korw %k5, %k3, %k3
7096 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7097 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
7098 ; AVX512BW-NEXT: kshiftrw $9, %k4, %k5
7099 ; AVX512BW-NEXT: korw %k5, %k3, %k3
7100 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7101 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
7102 ; AVX512BW-NEXT: kshiftrw $8, %k4, %k5
7103 ; AVX512BW-NEXT: korw %k5, %k3, %k3
7104 ; AVX512BW-NEXT: kandw %k0, %k3, %k3
7105 ; AVX512BW-NEXT: kshiftrw $7, %k4, %k5
7106 ; AVX512BW-NEXT: korw %k5, %k3, %k3
7107 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7108 ; AVX512BW-NEXT: kandw %k5, %k3, %k3
7109 ; AVX512BW-NEXT: kshiftrw $6, %k4, %k4
7110 ; AVX512BW-NEXT: korw %k4, %k3, %k3
7111 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
7112 ; AVX512BW-NEXT: kandw %k4, %k3, %k4
7113 ; AVX512BW-NEXT: kshiftrd $7, %k1, %k5
7114 ; AVX512BW-NEXT: kshiftlw $15, %k5, %k3
7115 ; AVX512BW-NEXT: kshiftrw $5, %k3, %k6
7116 ; AVX512BW-NEXT: korw %k6, %k4, %k4
7117 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7118 ; AVX512BW-NEXT: kandw %k6, %k4, %k4
7119 ; AVX512BW-NEXT: kshiftrw $4, %k3, %k6
7120 ; AVX512BW-NEXT: korw %k6, %k4, %k4
7121 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7122 ; AVX512BW-NEXT: kandw %k6, %k4, %k4
7123 ; AVX512BW-NEXT: kshiftrw $3, %k3, %k6
7124 ; AVX512BW-NEXT: korw %k6, %k4, %k4
7125 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7126 ; AVX512BW-NEXT: kandw %k6, %k4, %k4
7127 ; AVX512BW-NEXT: kshiftrw $2, %k3, %k6
7128 ; AVX512BW-NEXT: korw %k6, %k4, %k4
7129 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7130 ; AVX512BW-NEXT: kandw %k6, %k4, %k4
7131 ; AVX512BW-NEXT: kshiftlw $14, %k5, %k5
7132 ; AVX512BW-NEXT: korw %k5, %k4, %k4
7133 ; AVX512BW-NEXT: kshiftlw $1, %k4, %k4
7134 ; AVX512BW-NEXT: kshiftrw $1, %k4, %k4
7135 ; AVX512BW-NEXT: korw %k3, %k4, %k3
7136 ; AVX512BW-NEXT: vmovdqa32 128(%rsi), %zmm10 {%k3} {z}
7137 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 4-byte Reload
7138 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
7139 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
7140 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
7141 ; AVX512BW-NEXT: kshiftrw $14, %k4, %k4
7142 ; AVX512BW-NEXT: korw %k4, %k3, %k3
7143 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
7144 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
7145 ; AVX512BW-NEXT: kshiftrd $3, %k1, %k4
7146 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k4
7147 ; AVX512BW-NEXT: kshiftrw $13, %k4, %k5
7148 ; AVX512BW-NEXT: korw %k5, %k3, %k3
7149 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7150 ; AVX512BW-NEXT: kandw %k5, %k3, %k3
7151 ; AVX512BW-NEXT: kshiftrw $12, %k4, %k5
7152 ; AVX512BW-NEXT: korw %k5, %k3, %k3
7153 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7154 ; AVX512BW-NEXT: kandw %k5, %k3, %k3
7155 ; AVX512BW-NEXT: kshiftrw $11, %k4, %k5
7156 ; AVX512BW-NEXT: korw %k5, %k3, %k3
7157 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7158 ; AVX512BW-NEXT: kandw %k5, %k3, %k3
7159 ; AVX512BW-NEXT: kshiftrw $10, %k4, %k5
7160 ; AVX512BW-NEXT: korw %k5, %k3, %k3
7161 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7162 ; AVX512BW-NEXT: kandw %k5, %k3, %k3
7163 ; AVX512BW-NEXT: kshiftrw $9, %k4, %k5
7164 ; AVX512BW-NEXT: korw %k5, %k3, %k3
7165 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
7166 ; AVX512BW-NEXT: kshiftrw $8, %k4, %k4
7167 ; AVX512BW-NEXT: korw %k4, %k3, %k3
7168 ; AVX512BW-NEXT: kandw %k0, %k3, %k3
7169 ; AVX512BW-NEXT: kshiftrd $4, %k1, %k0
7170 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
7171 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k4
7172 ; AVX512BW-NEXT: korw %k4, %k3, %k3
7173 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7174 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
7175 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k4
7176 ; AVX512BW-NEXT: korw %k4, %k3, %k3
7177 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7178 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
7179 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k4
7180 ; AVX512BW-NEXT: korw %k4, %k3, %k3
7181 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7182 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
7183 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k4
7184 ; AVX512BW-NEXT: korw %k4, %k3, %k3
7185 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7186 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
7187 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k4
7188 ; AVX512BW-NEXT: korw %k4, %k3, %k3
7189 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7190 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
7191 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k0
7192 ; AVX512BW-NEXT: korw %k0, %k3, %k0
7193 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
7194 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 4-byte Reload
7195 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k2
7196 ; AVX512BW-NEXT: korw %k2, %k0, %k0
7197 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
7198 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
7199 ; AVX512BW-NEXT: korw %k7, %k0, %k1
7200 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm11 {%k1} {z}
7201 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 64(%rdx)
7202 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 128(%rdx)
7203 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 192(%rdx)
7204 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 256(%rdx)
7205 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 320(%rdx)
7206 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 384(%rdx)
7207 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 448(%rdx)
7208 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 512(%rdx)
7209 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 576(%rdx)
7210 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 640(%rdx)
7211 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 704(%rdx)
7212 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%rdx)
7213 ; AVX512BW-NEXT: vzeroupper
7214 ; AVX512BW-NEXT: retq
7215 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
7216 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
7217 %tgt.mask = shufflevector <32 x i1> %src.mask, <32 x i1> poison, <192 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 17, i32 17, i32 17, i32 17, i32 17, i32 17, i32 18, i32 18, i32 18, i32 18, i32 18, i32 18, i32 19, i32 19, i32 19, i32 19, i32 19, i32 19, i32 20, i32 20, i32 20, i32 20, i32 20, i32 20, i32 21, i32 21, i32 21, i32 21, i32 21, i32 21, i32 22, i32 22, i32 22, i32 22, i32 22, i32 22, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 26, i32 26, i32 26, i32 26, i32 26, i32 26, i32 27, i32 27, i32 27, i32 27, i32 27, i32 27, i32 28, i32 28, i32 28, i32 28, i32 28, i32 28, i32 29, i32 29, i32 29, i32 29, i32 29, i32 29, i32 30, i32 30, i32 30, i32 30, i32 30, i32 30, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31>
7218 %data = call <192 x i32> @llvm.masked.load.v192i32.p0(ptr %in.vec, i32 64, <192 x i1> %tgt.mask, <192 x i32> poison)
7219 store <192 x i32> %data, ptr %out.vec, align 64
7223 define void @mask_replication_factor6_vf64(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
7224 ; AVX512F-ONLY-LABEL: mask_replication_factor6_vf64:
7225 ; AVX512F-ONLY: # %bb.0:
7226 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
7227 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
7228 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,0,0,0,0,0,1,1,1,1,1,1,2,2,2,2]
7229 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm4, %zmm1
7230 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
7231 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
7232 ; AVX512F-ONLY-NEXT: movw $1, %ax
7233 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
7234 ; AVX512F-ONLY-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
7235 ; AVX512F-ONLY-NEXT: kmovw 6(%rdi), %k1
7236 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm7, %zmm7, %zmm7 {%k1} {z}
7237 ; AVX512F-ONLY-NEXT: kmovw 4(%rdi), %k1
7238 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm8, %zmm8, %zmm8 {%k1} {z}
7239 ; AVX512F-ONLY-NEXT: kmovw 2(%rdi), %k1
7240 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm9, %zmm9, %zmm9 {%k1} {z}
7241 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
7242 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm10 = [13,13,13,13,14,14,14,14,14,14,15,15,15,15,15,15]
7243 ; AVX512F-ONLY-NEXT: vpermd %zmm7, %zmm10, %zmm1
7244 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm11 = [10,10,11,11,11,11,11,11,12,12,12,12,12,12,13,13]
7245 ; AVX512F-ONLY-NEXT: vpermd %zmm7, %zmm11, %zmm2
7246 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm12 = [8,8,8,8,8,8,9,9,9,9,9,9,10,10,10,10]
7247 ; AVX512F-ONLY-NEXT: vpermd %zmm7, %zmm12, %zmm3
7248 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm13 = [5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7]
7249 ; AVX512F-ONLY-NEXT: vpermd %zmm7, %zmm13, %zmm5
7250 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm14 = [2,2,3,3,3,3,3,3,4,4,4,4,4,4,5,5]
7251 ; AVX512F-ONLY-NEXT: vpermd %zmm7, %zmm14, %zmm6
7252 ; AVX512F-ONLY-NEXT: vpermd %zmm7, %zmm4, %zmm7
7253 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm10, %zmm15
7254 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm11, %zmm16
7255 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm12, %zmm17
7256 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm13, %zmm18
7257 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm14, %zmm19
7258 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm4, %zmm8
7259 ; AVX512F-ONLY-NEXT: vpermd %zmm9, %zmm10, %zmm20
7260 ; AVX512F-ONLY-NEXT: vpermd %zmm9, %zmm11, %zmm21
7261 ; AVX512F-ONLY-NEXT: vpermd %zmm9, %zmm12, %zmm22
7262 ; AVX512F-ONLY-NEXT: vpermd %zmm9, %zmm13, %zmm23
7263 ; AVX512F-ONLY-NEXT: vpermd %zmm9, %zmm4, %zmm24
7264 ; AVX512F-ONLY-NEXT: vpermd %zmm9, %zmm14, %zmm9
7265 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm10, %zmm10
7266 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm11, %zmm11
7267 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm12, %zmm12
7268 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm13, %zmm13
7269 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm14, %zmm4
7270 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
7271 ; AVX512F-ONLY-NEXT: vptestmd %zmm4, %zmm4, %k1
7272 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm4 {%k1} {z}
7273 ; AVX512F-ONLY-NEXT: vptestmd %zmm13, %zmm13, %k1
7274 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm13 {%k1} {z}
7275 ; AVX512F-ONLY-NEXT: vptestmd %zmm12, %zmm12, %k1
7276 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm12 {%k1} {z}
7277 ; AVX512F-ONLY-NEXT: vptestmd %zmm11, %zmm11, %k1
7278 ; AVX512F-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm11 {%k1} {z}
7279 ; AVX512F-ONLY-NEXT: vptestmd %zmm10, %zmm10, %k1
7280 ; AVX512F-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm10 {%k1} {z}
7281 ; AVX512F-ONLY-NEXT: vptestmd %zmm24, %zmm24, %k1
7282 ; AVX512F-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm14 {%k1} {z}
7283 ; AVX512F-ONLY-NEXT: vptestmd %zmm9, %zmm9, %k1
7284 ; AVX512F-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm9 {%k1} {z}
7285 ; AVX512F-ONLY-NEXT: vptestmd %zmm23, %zmm23, %k1
7286 ; AVX512F-ONLY-NEXT: vmovdqa32 512(%rsi), %zmm23 {%k1} {z}
7287 ; AVX512F-ONLY-NEXT: vptestmd %zmm22, %zmm22, %k1
7288 ; AVX512F-ONLY-NEXT: vmovdqa32 576(%rsi), %zmm22 {%k1} {z}
7289 ; AVX512F-ONLY-NEXT: vptestmd %zmm21, %zmm21, %k1
7290 ; AVX512F-ONLY-NEXT: vmovdqa32 640(%rsi), %zmm21 {%k1} {z}
7291 ; AVX512F-ONLY-NEXT: vptestmd %zmm20, %zmm20, %k1
7292 ; AVX512F-ONLY-NEXT: vmovdqa32 704(%rsi), %zmm20 {%k1} {z}
7293 ; AVX512F-ONLY-NEXT: vptestmd %zmm8, %zmm8, %k1
7294 ; AVX512F-ONLY-NEXT: vmovdqa32 768(%rsi), %zmm8 {%k1} {z}
7295 ; AVX512F-ONLY-NEXT: vptestmd %zmm19, %zmm19, %k1
7296 ; AVX512F-ONLY-NEXT: vmovdqa32 832(%rsi), %zmm19 {%k1} {z}
7297 ; AVX512F-ONLY-NEXT: vptestmd %zmm18, %zmm18, %k1
7298 ; AVX512F-ONLY-NEXT: vmovdqa32 896(%rsi), %zmm18 {%k1} {z}
7299 ; AVX512F-ONLY-NEXT: vptestmd %zmm17, %zmm17, %k1
7300 ; AVX512F-ONLY-NEXT: vmovdqa32 960(%rsi), %zmm17 {%k1} {z}
7301 ; AVX512F-ONLY-NEXT: vptestmd %zmm16, %zmm16, %k1
7302 ; AVX512F-ONLY-NEXT: vmovdqa32 1024(%rsi), %zmm16 {%k1} {z}
7303 ; AVX512F-ONLY-NEXT: vptestmd %zmm15, %zmm15, %k1
7304 ; AVX512F-ONLY-NEXT: vmovdqa32 1088(%rsi), %zmm15 {%k1} {z}
7305 ; AVX512F-ONLY-NEXT: vptestmd %zmm7, %zmm7, %k1
7306 ; AVX512F-ONLY-NEXT: vmovdqa32 1152(%rsi), %zmm7 {%k1} {z}
7307 ; AVX512F-ONLY-NEXT: vptestmd %zmm6, %zmm6, %k1
7308 ; AVX512F-ONLY-NEXT: vmovdqa32 1216(%rsi), %zmm6 {%k1} {z}
7309 ; AVX512F-ONLY-NEXT: vptestmd %zmm5, %zmm5, %k1
7310 ; AVX512F-ONLY-NEXT: vmovdqa32 1280(%rsi), %zmm5 {%k1} {z}
7311 ; AVX512F-ONLY-NEXT: vptestmd %zmm3, %zmm3, %k1
7312 ; AVX512F-ONLY-NEXT: vmovdqa32 1344(%rsi), %zmm3 {%k1} {z}
7313 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
7314 ; AVX512F-ONLY-NEXT: vmovdqa32 1408(%rsi), %zmm2 {%k1} {z}
7315 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
7316 ; AVX512F-ONLY-NEXT: vmovdqa32 1472(%rsi), %zmm1 {%k1} {z}
7317 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, 1472(%rdx)
7318 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 1408(%rdx)
7319 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 1344(%rdx)
7320 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm5, 1280(%rdx)
7321 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm6, 1216(%rdx)
7322 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm7, 1152(%rdx)
7323 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm15, 1088(%rdx)
7324 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm16, 1024(%rdx)
7325 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm17, 960(%rdx)
7326 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm18, 896(%rdx)
7327 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm19, 832(%rdx)
7328 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm8, 768(%rdx)
7329 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm20, 704(%rdx)
7330 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm21, 640(%rdx)
7331 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm22, 576(%rdx)
7332 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm23, 512(%rdx)
7333 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm9, 448(%rdx)
7334 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm14, 384(%rdx)
7335 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm10, 320(%rdx)
7336 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm11, 256(%rdx)
7337 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm12, 192(%rdx)
7338 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm13, 128(%rdx)
7339 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm4, 64(%rdx)
7340 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, (%rdx)
7341 ; AVX512F-ONLY-NEXT: vzeroupper
7342 ; AVX512F-ONLY-NEXT: retq
7344 ; AVX512DQ-LABEL: mask_replication_factor6_vf64:
7345 ; AVX512DQ: # %bb.0:
7346 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
7347 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
7348 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,0,0,0,0,0,1,1,1,1,1,1,2,2,2,2]
7349 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm4, %zmm1
7350 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k0
7351 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm1
7352 ; AVX512DQ-NEXT: movw $1, %ax
7353 ; AVX512DQ-NEXT: kmovw %eax, %k1
7354 ; AVX512DQ-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
7355 ; AVX512DQ-NEXT: kmovw 6(%rdi), %k0
7356 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm7
7357 ; AVX512DQ-NEXT: kmovw 4(%rdi), %k0
7358 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm8
7359 ; AVX512DQ-NEXT: kmovw 2(%rdi), %k0
7360 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm9
7361 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
7362 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm10 = [13,13,13,13,14,14,14,14,14,14,15,15,15,15,15,15]
7363 ; AVX512DQ-NEXT: vpermd %zmm7, %zmm10, %zmm1
7364 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm11 = [10,10,11,11,11,11,11,11,12,12,12,12,12,12,13,13]
7365 ; AVX512DQ-NEXT: vpermd %zmm7, %zmm11, %zmm2
7366 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm12 = [8,8,8,8,8,8,9,9,9,9,9,9,10,10,10,10]
7367 ; AVX512DQ-NEXT: vpermd %zmm7, %zmm12, %zmm3
7368 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm13 = [5,5,5,5,6,6,6,6,6,6,7,7,7,7,7,7]
7369 ; AVX512DQ-NEXT: vpermd %zmm7, %zmm13, %zmm5
7370 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm14 = [2,2,3,3,3,3,3,3,4,4,4,4,4,4,5,5]
7371 ; AVX512DQ-NEXT: vpermd %zmm7, %zmm14, %zmm6
7372 ; AVX512DQ-NEXT: vpermd %zmm7, %zmm4, %zmm7
7373 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm10, %zmm15
7374 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm11, %zmm16
7375 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm12, %zmm17
7376 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm13, %zmm18
7377 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm14, %zmm19
7378 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm4, %zmm8
7379 ; AVX512DQ-NEXT: vpermd %zmm9, %zmm10, %zmm20
7380 ; AVX512DQ-NEXT: vpermd %zmm9, %zmm11, %zmm21
7381 ; AVX512DQ-NEXT: vpermd %zmm9, %zmm12, %zmm22
7382 ; AVX512DQ-NEXT: vpermd %zmm9, %zmm13, %zmm23
7383 ; AVX512DQ-NEXT: vpermd %zmm9, %zmm4, %zmm24
7384 ; AVX512DQ-NEXT: vpermd %zmm9, %zmm14, %zmm9
7385 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm10, %zmm10
7386 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm11, %zmm11
7387 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm12, %zmm12
7388 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm13, %zmm13
7389 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm14, %zmm4
7390 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
7391 ; AVX512DQ-NEXT: vpmovd2m %zmm4, %k1
7392 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm4 {%k1} {z}
7393 ; AVX512DQ-NEXT: vpmovd2m %zmm13, %k1
7394 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm13 {%k1} {z}
7395 ; AVX512DQ-NEXT: vpmovd2m %zmm12, %k1
7396 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm12 {%k1} {z}
7397 ; AVX512DQ-NEXT: vpmovd2m %zmm11, %k1
7398 ; AVX512DQ-NEXT: vmovdqa32 256(%rsi), %zmm11 {%k1} {z}
7399 ; AVX512DQ-NEXT: vpmovd2m %zmm10, %k1
7400 ; AVX512DQ-NEXT: vmovdqa32 320(%rsi), %zmm10 {%k1} {z}
7401 ; AVX512DQ-NEXT: vpmovd2m %zmm24, %k1
7402 ; AVX512DQ-NEXT: vmovdqa32 384(%rsi), %zmm14 {%k1} {z}
7403 ; AVX512DQ-NEXT: vpmovd2m %zmm9, %k1
7404 ; AVX512DQ-NEXT: vmovdqa32 448(%rsi), %zmm9 {%k1} {z}
7405 ; AVX512DQ-NEXT: vpmovd2m %zmm23, %k1
7406 ; AVX512DQ-NEXT: vmovdqa32 512(%rsi), %zmm23 {%k1} {z}
7407 ; AVX512DQ-NEXT: vpmovd2m %zmm22, %k1
7408 ; AVX512DQ-NEXT: vmovdqa32 576(%rsi), %zmm22 {%k1} {z}
7409 ; AVX512DQ-NEXT: vpmovd2m %zmm21, %k1
7410 ; AVX512DQ-NEXT: vmovdqa32 640(%rsi), %zmm21 {%k1} {z}
7411 ; AVX512DQ-NEXT: vpmovd2m %zmm20, %k1
7412 ; AVX512DQ-NEXT: vmovdqa32 704(%rsi), %zmm20 {%k1} {z}
7413 ; AVX512DQ-NEXT: vpmovd2m %zmm8, %k1
7414 ; AVX512DQ-NEXT: vmovdqa32 768(%rsi), %zmm8 {%k1} {z}
7415 ; AVX512DQ-NEXT: vpmovd2m %zmm19, %k1
7416 ; AVX512DQ-NEXT: vmovdqa32 832(%rsi), %zmm19 {%k1} {z}
7417 ; AVX512DQ-NEXT: vpmovd2m %zmm18, %k1
7418 ; AVX512DQ-NEXT: vmovdqa32 896(%rsi), %zmm18 {%k1} {z}
7419 ; AVX512DQ-NEXT: vpmovd2m %zmm17, %k1
7420 ; AVX512DQ-NEXT: vmovdqa32 960(%rsi), %zmm17 {%k1} {z}
7421 ; AVX512DQ-NEXT: vpmovd2m %zmm16, %k1
7422 ; AVX512DQ-NEXT: vmovdqa32 1024(%rsi), %zmm16 {%k1} {z}
7423 ; AVX512DQ-NEXT: vpmovd2m %zmm15, %k1
7424 ; AVX512DQ-NEXT: vmovdqa32 1088(%rsi), %zmm15 {%k1} {z}
7425 ; AVX512DQ-NEXT: vpmovd2m %zmm7, %k1
7426 ; AVX512DQ-NEXT: vmovdqa32 1152(%rsi), %zmm7 {%k1} {z}
7427 ; AVX512DQ-NEXT: vpmovd2m %zmm6, %k1
7428 ; AVX512DQ-NEXT: vmovdqa32 1216(%rsi), %zmm6 {%k1} {z}
7429 ; AVX512DQ-NEXT: vpmovd2m %zmm5, %k1
7430 ; AVX512DQ-NEXT: vmovdqa32 1280(%rsi), %zmm5 {%k1} {z}
7431 ; AVX512DQ-NEXT: vpmovd2m %zmm3, %k1
7432 ; AVX512DQ-NEXT: vmovdqa32 1344(%rsi), %zmm3 {%k1} {z}
7433 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k1
7434 ; AVX512DQ-NEXT: vmovdqa32 1408(%rsi), %zmm2 {%k1} {z}
7435 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
7436 ; AVX512DQ-NEXT: vmovdqa32 1472(%rsi), %zmm1 {%k1} {z}
7437 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 1472(%rdx)
7438 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 1408(%rdx)
7439 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 1344(%rdx)
7440 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 1280(%rdx)
7441 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 1216(%rdx)
7442 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, 1152(%rdx)
7443 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, 1088(%rdx)
7444 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, 1024(%rdx)
7445 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, 960(%rdx)
7446 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, 896(%rdx)
7447 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, 832(%rdx)
7448 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, 768(%rdx)
7449 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, 704(%rdx)
7450 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, 640(%rdx)
7451 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, 576(%rdx)
7452 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, 512(%rdx)
7453 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, 448(%rdx)
7454 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, 384(%rdx)
7455 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, 320(%rdx)
7456 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, 256(%rdx)
7457 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, 192(%rdx)
7458 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, 128(%rdx)
7459 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 64(%rdx)
7460 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%rdx)
7461 ; AVX512DQ-NEXT: vzeroupper
7462 ; AVX512DQ-NEXT: retq
7464 ; AVX512BW-LABEL: mask_replication_factor6_vf64:
7465 ; AVX512BW: # %bb.0:
7466 ; AVX512BW-NEXT: kmovq (%rdi), %k5
7467 ; AVX512BW-NEXT: movw $-3, %ax
7468 ; AVX512BW-NEXT: kmovd %eax, %k1
7469 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
7470 ; AVX512BW-NEXT: kmovw (%rdi), %k0
7471 ; AVX512BW-NEXT: kandw %k1, %k0, %k3
7472 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k1
7473 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k0
7474 ; AVX512BW-NEXT: korw %k0, %k3, %k0
7475 ; AVX512BW-NEXT: movw $-5, %ax
7476 ; AVX512BW-NEXT: kmovd %eax, %k2
7477 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
7478 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7479 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k3
7480 ; AVX512BW-NEXT: korw %k3, %k0, %k0
7481 ; AVX512BW-NEXT: movw $-9, %ax
7482 ; AVX512BW-NEXT: kmovd %eax, %k2
7483 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
7484 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7485 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k3
7486 ; AVX512BW-NEXT: korw %k3, %k0, %k0
7487 ; AVX512BW-NEXT: movw $-17, %ax
7488 ; AVX512BW-NEXT: kmovd %eax, %k2
7489 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
7490 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7491 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k3
7492 ; AVX512BW-NEXT: korw %k3, %k0, %k0
7493 ; AVX512BW-NEXT: movw $-33, %ax
7494 ; AVX512BW-NEXT: kmovd %eax, %k2
7495 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
7496 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7497 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k1
7498 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7499 ; AVX512BW-NEXT: movw $-65, %ax
7500 ; AVX512BW-NEXT: kmovd %eax, %k1
7501 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
7502 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
7503 ; AVX512BW-NEXT: kshiftrq $1, %k5, %k1
7504 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
7505 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k3
7506 ; AVX512BW-NEXT: korw %k3, %k0, %k0
7507 ; AVX512BW-NEXT: movw $-129, %ax
7508 ; AVX512BW-NEXT: kmovd %eax, %k2
7509 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
7510 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7511 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k3
7512 ; AVX512BW-NEXT: korw %k3, %k0, %k0
7513 ; AVX512BW-NEXT: movw $-257, %ax # imm = 0xFEFF
7514 ; AVX512BW-NEXT: kmovd %eax, %k2
7515 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
7516 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7517 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k3
7518 ; AVX512BW-NEXT: korw %k3, %k0, %k0
7519 ; AVX512BW-NEXT: movw $-513, %ax # imm = 0xFDFF
7520 ; AVX512BW-NEXT: kmovd %eax, %k2
7521 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
7522 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7523 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k3
7524 ; AVX512BW-NEXT: korw %k3, %k0, %k0
7525 ; AVX512BW-NEXT: movw $-1025, %ax # imm = 0xFBFF
7526 ; AVX512BW-NEXT: kmovd %eax, %k2
7527 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
7528 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7529 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k3
7530 ; AVX512BW-NEXT: korw %k3, %k0, %k0
7531 ; AVX512BW-NEXT: movw $-2049, %ax # imm = 0xF7FF
7532 ; AVX512BW-NEXT: kmovd %eax, %k2
7533 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
7534 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7535 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k1
7536 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7537 ; AVX512BW-NEXT: movw $-4097, %ax # imm = 0xEFFF
7538 ; AVX512BW-NEXT: kmovd %eax, %k1
7539 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
7540 ; AVX512BW-NEXT: kandw %k1, %k0, %k3
7541 ; AVX512BW-NEXT: kshiftrq $2, %k5, %k1
7542 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k0
7543 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k4
7544 ; AVX512BW-NEXT: korw %k4, %k3, %k3
7545 ; AVX512BW-NEXT: movw $-8193, %ax # imm = 0xDFFF
7546 ; AVX512BW-NEXT: kmovd %eax, %k2
7547 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
7548 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
7549 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
7550 ; AVX512BW-NEXT: korw %k7, %k3, %k7
7551 ; AVX512BW-NEXT: movw $-16385, %ax # imm = 0xBFFF
7552 ; AVX512BW-NEXT: kmovd %eax, %k2
7553 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
7554 ; AVX512BW-NEXT: kandw %k2, %k7, %k7
7555 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k6
7556 ; AVX512BW-NEXT: korw %k6, %k7, %k6
7557 ; AVX512BW-NEXT: kshiftlw $1, %k6, %k6
7558 ; AVX512BW-NEXT: kshiftrw $1, %k6, %k6
7559 ; AVX512BW-NEXT: korw %k0, %k6, %k6
7560 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k6} {z}
7561 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
7562 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
7563 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k0
7564 ; AVX512BW-NEXT: korw %k0, %k1, %k0
7565 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7566 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
7567 ; AVX512BW-NEXT: kmovq %k5, %k3
7568 ; AVX512BW-NEXT: kshiftrq $3, %k5, %k1
7569 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
7570 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k6
7571 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7572 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7573 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7574 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
7575 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7576 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7577 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
7578 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
7579 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7580 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7581 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
7582 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
7583 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7584 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7585 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
7586 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
7587 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7588 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7589 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
7590 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k1
7591 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7592 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7593 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
7594 ; AVX512BW-NEXT: kshiftrq $4, %k3, %k1
7595 ; AVX512BW-NEXT: kmovq %k3, %k7
7596 ; AVX512BW-NEXT: kmovq %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
7597 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
7598 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
7599 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7600 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
7601 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
7602 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
7603 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7604 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7605 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
7606 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
7607 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7608 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7609 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
7610 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
7611 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7612 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7613 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
7614 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k6
7615 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7616 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7617 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
7618 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k1
7619 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7620 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7621 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
7622 ; AVX512BW-NEXT: kshiftrq $5, %k7, %k1
7623 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k6
7624 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7625 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
7626 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
7627 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
7628 ; AVX512BW-NEXT: korw %k6, %k0, %k7
7629 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm1 {%k7} {z}
7630 ; AVX512BW-NEXT: kandw %k4, %k1, %k0
7631 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
7632 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7633 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
7634 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
7635 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
7636 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7637 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7638 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k1
7639 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7640 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7641 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
7642 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 8-byte Reload
7643 ; AVX512BW-NEXT: kshiftrq $6, %k7, %k1
7644 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
7645 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
7646 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7647 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7648 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7649 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
7650 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7651 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7652 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
7653 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
7654 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7655 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7656 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
7657 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
7658 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7659 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7660 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
7661 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
7662 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7663 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
7664 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k1
7665 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7666 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7667 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
7668 ; AVX512BW-NEXT: kshiftrq $7, %k7, %k6
7669 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k0
7670 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k7
7671 ; AVX512BW-NEXT: korw %k7, %k1, %k1
7672 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
7673 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
7674 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k7
7675 ; AVX512BW-NEXT: korw %k7, %k1, %k1
7676 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
7677 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
7678 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k7
7679 ; AVX512BW-NEXT: korw %k7, %k1, %k1
7680 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
7681 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
7682 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
7683 ; AVX512BW-NEXT: korw %k7, %k1, %k1
7684 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
7685 ; AVX512BW-NEXT: kshiftlw $14, %k6, %k6
7686 ; AVX512BW-NEXT: korw %k6, %k1, %k1
7687 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
7688 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
7689 ; AVX512BW-NEXT: korw %k0, %k1, %k1
7690 ; AVX512BW-NEXT: vmovdqa32 128(%rsi), %zmm2 {%k1} {z}
7691 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 8-byte Reload
7692 ; AVX512BW-NEXT: kshiftrq $8, %k7, %k0
7693 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7694 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
7695 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
7696 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
7697 ; AVX512BW-NEXT: korw %k6, %k1, %k1
7698 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
7699 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
7700 ; AVX512BW-NEXT: korw %k6, %k1, %k1
7701 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
7702 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
7703 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
7704 ; AVX512BW-NEXT: korw %k6, %k1, %k1
7705 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
7706 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
7707 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
7708 ; AVX512BW-NEXT: korw %k6, %k1, %k1
7709 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
7710 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k0
7711 ; AVX512BW-NEXT: korw %k0, %k1, %k0
7712 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7713 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
7714 ; AVX512BW-NEXT: kshiftrq $9, %k7, %k1
7715 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
7716 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
7717 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7718 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7719 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7720 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
7721 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7722 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7723 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7724 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
7725 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7726 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7727 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7728 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
7729 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7730 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
7731 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
7732 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
7733 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7734 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7735 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7736 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k1
7737 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7738 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7739 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
7740 ; AVX512BW-NEXT: kshiftrq $10, %k7, %k1
7741 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
7742 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
7743 ; AVX512BW-NEXT: korw %k7, %k0, %k0
7744 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7745 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7746 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
7747 ; AVX512BW-NEXT: korw %k7, %k0, %k0
7748 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
7749 ; AVX512BW-NEXT: kandw %k7, %k0, %k0
7750 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k7
7751 ; AVX512BW-NEXT: korw %k7, %k0, %k0
7752 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
7753 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
7754 ; AVX512BW-NEXT: korw %k6, %k0, %k7
7755 ; AVX512BW-NEXT: vmovdqa32 192(%rsi), %zmm3 {%k7} {z}
7756 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
7757 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
7758 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
7759 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7760 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7761 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
7762 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 8-byte Reload
7763 ; AVX512BW-NEXT: kshiftrq $11, %k7, %k1
7764 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
7765 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k6
7766 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7767 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7768 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
7769 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
7770 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7771 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
7772 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
7773 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7774 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
7775 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
7776 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
7777 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7778 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
7779 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
7780 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7781 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7782 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
7783 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k1
7784 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7785 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
7786 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
7787 ; AVX512BW-NEXT: kshiftrq $12, %k7, %k1
7788 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
7789 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
7790 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7791 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7792 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
7793 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
7794 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7795 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
7796 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
7797 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7798 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
7799 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
7800 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
7801 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7802 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
7803 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
7804 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k6
7805 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7806 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7807 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k1
7808 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7809 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7810 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
7811 ; AVX512BW-NEXT: kshiftrq $13, %k7, %k1
7812 ; AVX512BW-NEXT: kmovq %k7, %k2
7813 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k6
7814 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7815 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
7816 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
7817 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
7818 ; AVX512BW-NEXT: korw %k6, %k0, %k7
7819 ; AVX512BW-NEXT: vmovdqa32 256(%rsi), %zmm4 {%k7} {z}
7820 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
7821 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
7822 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
7823 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7824 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7825 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
7826 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
7827 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7828 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7829 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
7830 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k1
7831 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7832 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7833 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
7834 ; AVX512BW-NEXT: kmovq %k2, %k7
7835 ; AVX512BW-NEXT: kshiftrq $14, %k2, %k1
7836 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
7837 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
7838 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7839 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7840 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7841 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
7842 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7843 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7844 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7845 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
7846 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7847 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
7848 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
7849 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7850 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
7851 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
7852 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7853 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7854 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7855 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k1
7856 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7857 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7858 ; AVX512BW-NEXT: kandw %k2, %k0, %k1
7859 ; AVX512BW-NEXT: kshiftrq $15, %k7, %k6
7860 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k0
7861 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k7
7862 ; AVX512BW-NEXT: korw %k7, %k1, %k1
7863 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
7864 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
7865 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k7
7866 ; AVX512BW-NEXT: korw %k7, %k1, %k1
7867 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
7868 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k7
7869 ; AVX512BW-NEXT: korw %k7, %k1, %k1
7870 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7871 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
7872 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
7873 ; AVX512BW-NEXT: korw %k7, %k1, %k1
7874 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7875 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
7876 ; AVX512BW-NEXT: kshiftlw $14, %k6, %k6
7877 ; AVX512BW-NEXT: korw %k6, %k1, %k1
7878 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
7879 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
7880 ; AVX512BW-NEXT: korw %k0, %k1, %k1
7881 ; AVX512BW-NEXT: vmovdqa32 320(%rsi), %zmm5 {%k1} {z}
7882 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 8-byte Reload
7883 ; AVX512BW-NEXT: kshiftrq $16, %k5, %k0
7884 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7885 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
7886 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
7887 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
7888 ; AVX512BW-NEXT: korw %k6, %k1, %k1
7889 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7890 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
7891 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
7892 ; AVX512BW-NEXT: korw %k6, %k1, %k1
7893 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7894 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
7895 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
7896 ; AVX512BW-NEXT: korw %k6, %k1, %k1
7897 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7898 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
7899 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
7900 ; AVX512BW-NEXT: korw %k6, %k1, %k1
7901 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7902 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
7903 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k0
7904 ; AVX512BW-NEXT: korw %k0, %k1, %k0
7905 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7906 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
7907 ; AVX512BW-NEXT: kshiftrq $17, %k5, %k1
7908 ; AVX512BW-NEXT: kmovq %k5, %k7
7909 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
7910 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
7911 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7912 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7913 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
7914 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
7915 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7916 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7917 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
7918 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
7919 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7920 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7921 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
7922 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
7923 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7924 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7925 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
7926 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7927 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
7928 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k1
7929 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7930 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
7931 ; AVX512BW-NEXT: kmovq %k7, %k4
7932 ; AVX512BW-NEXT: kshiftrq $18, %k7, %k1
7933 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
7934 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
7935 ; AVX512BW-NEXT: korw %k7, %k0, %k0
7936 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
7937 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
7938 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
7939 ; AVX512BW-NEXT: korw %k7, %k0, %k0
7940 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
7941 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
7942 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k7
7943 ; AVX512BW-NEXT: korw %k7, %k0, %k0
7944 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
7945 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
7946 ; AVX512BW-NEXT: korw %k6, %k0, %k7
7947 ; AVX512BW-NEXT: vmovdqa32 384(%rsi), %zmm6 {%k7} {z}
7948 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
7949 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
7950 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
7951 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7952 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7953 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
7954 ; AVX512BW-NEXT: kshiftrq $19, %k4, %k1
7955 ; AVX512BW-NEXT: kmovq %k4, %k7
7956 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
7957 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k6
7958 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7959 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
7960 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
7961 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
7962 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7963 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
7964 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
7965 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
7966 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7967 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7968 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
7969 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
7970 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7971 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7972 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
7973 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
7974 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7975 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7976 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
7977 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k1
7978 ; AVX512BW-NEXT: korw %k1, %k0, %k0
7979 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
7980 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
7981 ; AVX512BW-NEXT: kshiftrq $20, %k7, %k1
7982 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
7983 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
7984 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7985 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7986 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
7987 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
7988 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7989 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7990 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
7991 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
7992 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7993 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7994 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
7995 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
7996 ; AVX512BW-NEXT: korw %k6, %k0, %k0
7997 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
7998 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
7999 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k6
8000 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8001 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8002 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k1
8003 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8004 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8005 ; AVX512BW-NEXT: kshiftrq $21, %k7, %k1
8006 ; AVX512BW-NEXT: kmovq %k7, %k3
8007 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k6
8008 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8009 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
8010 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
8011 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
8012 ; AVX512BW-NEXT: korw %k6, %k0, %k7
8013 ; AVX512BW-NEXT: vmovdqa32 448(%rsi), %zmm7 {%k7} {z}
8014 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8015 ; AVX512BW-NEXT: kandw %k2, %k1, %k0
8016 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
8017 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8018 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8019 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8020 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
8021 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8022 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8023 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k1
8024 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8025 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8026 ; AVX512BW-NEXT: kmovq %k3, %k5
8027 ; AVX512BW-NEXT: kshiftrq $22, %k3, %k1
8028 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8029 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
8030 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8031 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8032 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8033 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
8034 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8035 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8036 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8037 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
8038 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8039 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8040 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8041 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
8042 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8043 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8044 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8045 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
8046 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8047 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8048 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8049 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k1
8050 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8051 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8052 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
8053 ; AVX512BW-NEXT: kshiftrq $23, %k5, %k6
8054 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k0
8055 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k7
8056 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8057 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8058 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
8059 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k7
8060 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8061 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8062 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
8063 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k7
8064 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8065 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8066 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
8067 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
8068 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8069 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8070 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
8071 ; AVX512BW-NEXT: kshiftlw $14, %k6, %k6
8072 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8073 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
8074 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
8075 ; AVX512BW-NEXT: korw %k0, %k1, %k1
8076 ; AVX512BW-NEXT: vmovdqa32 512(%rsi), %zmm8 {%k1} {z}
8077 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 8-byte Reload
8078 ; AVX512BW-NEXT: kshiftrq $24, %k5, %k0
8079 ; AVX512BW-NEXT: kandw %k2, %k0, %k1
8080 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
8081 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
8082 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8083 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8084 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
8085 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
8086 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8087 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8088 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
8089 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
8090 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8091 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8092 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
8093 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
8094 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8095 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
8096 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k0
8097 ; AVX512BW-NEXT: korw %k0, %k1, %k0
8098 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8099 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8100 ; AVX512BW-NEXT: kshiftrq $25, %k5, %k1
8101 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8102 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
8103 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8104 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8105 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8106 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
8107 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8108 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8109 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8110 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
8111 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8112 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8113 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
8114 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8115 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8116 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8117 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
8118 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8119 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8120 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8121 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k1
8122 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8123 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8124 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8125 ; AVX512BW-NEXT: kshiftrq $26, %k5, %k1
8126 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
8127 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
8128 ; AVX512BW-NEXT: korw %k7, %k0, %k0
8129 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8130 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8131 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
8132 ; AVX512BW-NEXT: korw %k7, %k0, %k0
8133 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8134 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8135 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k7
8136 ; AVX512BW-NEXT: korw %k7, %k0, %k0
8137 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
8138 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
8139 ; AVX512BW-NEXT: korw %k6, %k0, %k7
8140 ; AVX512BW-NEXT: vmovdqa32 576(%rsi), %zmm9 {%k7} {z}
8141 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
8142 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
8143 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
8144 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8145 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8146 ; AVX512BW-NEXT: kmovq %k5, %k7
8147 ; AVX512BW-NEXT: kshiftrq $27, %k5, %k1
8148 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8149 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k6
8150 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8151 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8152 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8153 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
8154 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8155 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8156 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8157 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
8158 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8159 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8160 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8161 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
8162 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8163 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8164 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8165 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
8166 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8167 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8168 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8169 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k1
8170 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8171 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8172 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8173 ; AVX512BW-NEXT: kshiftrq $28, %k7, %k1
8174 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8175 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
8176 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8177 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8178 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8179 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
8180 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8181 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8182 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8183 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
8184 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8185 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8186 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
8187 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
8188 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8189 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8190 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
8191 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k6
8192 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8193 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8194 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
8195 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k1
8196 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8197 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8198 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8199 ; AVX512BW-NEXT: kshiftrq $29, %k7, %k1
8200 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k6
8201 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8202 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
8203 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
8204 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
8205 ; AVX512BW-NEXT: korw %k6, %k0, %k7
8206 ; AVX512BW-NEXT: vmovdqa32 640(%rsi), %zmm10 {%k7} {z}
8207 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
8208 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
8209 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
8210 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8211 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8212 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8213 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
8214 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8215 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8216 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8217 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k1
8218 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8219 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8220 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 8-byte Reload
8221 ; AVX512BW-NEXT: kshiftrq $30, %k7, %k1
8222 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8223 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
8224 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8225 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8226 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8227 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
8228 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8229 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8230 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
8231 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8232 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8233 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
8234 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8235 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8236 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8237 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
8238 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8239 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8240 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8241 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k1
8242 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8243 ; AVX512BW-NEXT: kandw %k2, %k0, %k1
8244 ; AVX512BW-NEXT: kshiftrq $31, %k7, %k6
8245 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k0
8246 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k7
8247 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8248 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8249 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
8250 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k7
8251 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8252 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8253 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
8254 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k7
8255 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8256 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8257 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
8258 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
8259 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8260 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8261 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
8262 ; AVX512BW-NEXT: kshiftlw $14, %k6, %k6
8263 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8264 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
8265 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
8266 ; AVX512BW-NEXT: korw %k0, %k1, %k1
8267 ; AVX512BW-NEXT: vmovdqa32 704(%rsi), %zmm11 {%k1} {z}
8268 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 8-byte Reload
8269 ; AVX512BW-NEXT: kshiftrq $32, %k2, %k0
8270 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8271 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
8272 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
8273 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
8274 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8275 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8276 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
8277 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
8278 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8279 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8280 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
8281 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
8282 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8283 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8284 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
8285 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
8286 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8287 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8288 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
8289 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k0
8290 ; AVX512BW-NEXT: korw %k0, %k1, %k0
8291 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8292 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8293 ; AVX512BW-NEXT: kshiftrq $33, %k2, %k1
8294 ; AVX512BW-NEXT: kmovq %k2, %k7
8295 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8296 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
8297 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8298 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8299 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8300 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
8301 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8302 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8303 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
8304 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8305 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8306 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8307 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
8308 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8309 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8310 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8311 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
8312 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8313 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8314 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k1
8315 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8316 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8317 ; AVX512BW-NEXT: kmovq %k7, %k5
8318 ; AVX512BW-NEXT: kshiftrq $34, %k7, %k1
8319 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
8320 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
8321 ; AVX512BW-NEXT: korw %k7, %k0, %k0
8322 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8323 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8324 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
8325 ; AVX512BW-NEXT: korw %k7, %k0, %k0
8326 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8327 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8328 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k7
8329 ; AVX512BW-NEXT: korw %k7, %k0, %k0
8330 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
8331 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
8332 ; AVX512BW-NEXT: korw %k6, %k0, %k7
8333 ; AVX512BW-NEXT: vmovdqa32 768(%rsi), %zmm12 {%k7} {z}
8334 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
8335 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
8336 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
8337 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8338 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8339 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8340 ; AVX512BW-NEXT: kshiftrq $35, %k5, %k1
8341 ; AVX512BW-NEXT: kmovq %k5, %k7
8342 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8343 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k6
8344 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8345 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8346 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8347 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
8348 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8349 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8350 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8351 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
8352 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8353 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8354 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8355 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
8356 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8357 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8358 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8359 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
8360 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8361 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8362 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8363 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k1
8364 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8365 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8366 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8367 ; AVX512BW-NEXT: kshiftrq $36, %k7, %k1
8368 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8369 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
8370 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8371 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8372 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
8373 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8374 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8375 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8376 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
8377 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8378 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8379 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
8380 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
8381 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8382 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8383 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
8384 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k6
8385 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8386 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8387 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k1
8388 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8389 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8390 ; AVX512BW-NEXT: kshiftrq $37, %k7, %k1
8391 ; AVX512BW-NEXT: kmovq %k7, %k3
8392 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k6
8393 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8394 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
8395 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
8396 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
8397 ; AVX512BW-NEXT: korw %k6, %k0, %k7
8398 ; AVX512BW-NEXT: vmovdqa32 832(%rsi), %zmm13 {%k7} {z}
8399 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
8400 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
8401 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
8402 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8403 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8404 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8405 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
8406 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8407 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8408 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8409 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k1
8410 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8411 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8412 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8413 ; AVX512BW-NEXT: kmovq %k3, %k7
8414 ; AVX512BW-NEXT: kshiftrq $38, %k3, %k1
8415 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8416 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
8417 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8418 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8419 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8420 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
8421 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8422 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8423 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
8424 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
8425 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8426 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8427 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
8428 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8429 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8430 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8431 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
8432 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8433 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8434 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8435 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k1
8436 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8437 ; AVX512BW-NEXT: kandw %k2, %k0, %k1
8438 ; AVX512BW-NEXT: kshiftrq $39, %k7, %k6
8439 ; AVX512BW-NEXT: kmovq %k7, %k5
8440 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k0
8441 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k7
8442 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8443 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8444 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
8445 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k7
8446 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8447 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8448 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
8449 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k7
8450 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8451 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8452 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
8453 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
8454 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8455 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8456 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
8457 ; AVX512BW-NEXT: kshiftlw $14, %k6, %k6
8458 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8459 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
8460 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
8461 ; AVX512BW-NEXT: korw %k0, %k1, %k1
8462 ; AVX512BW-NEXT: vmovdqa32 896(%rsi), %zmm14 {%k1} {z}
8463 ; AVX512BW-NEXT: kshiftrq $40, %k5, %k0
8464 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8465 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
8466 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
8467 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
8468 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8469 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8470 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
8471 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
8472 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8473 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
8474 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
8475 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8476 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8477 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
8478 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
8479 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8480 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
8481 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k0
8482 ; AVX512BW-NEXT: korw %k0, %k1, %k0
8483 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8484 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8485 ; AVX512BW-NEXT: kshiftrq $41, %k5, %k1
8486 ; AVX512BW-NEXT: kmovq %k5, %k4
8487 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8488 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
8489 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8490 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8491 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8492 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
8493 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8494 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8495 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8496 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
8497 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8498 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8499 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8500 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
8501 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8502 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8503 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8504 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
8505 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8506 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8507 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8508 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k1
8509 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8510 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8511 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8512 ; AVX512BW-NEXT: kshiftrq $42, %k4, %k1
8513 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
8514 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
8515 ; AVX512BW-NEXT: korw %k7, %k0, %k0
8516 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8517 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8518 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
8519 ; AVX512BW-NEXT: korw %k7, %k0, %k0
8520 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8521 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8522 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k7
8523 ; AVX512BW-NEXT: korw %k7, %k0, %k0
8524 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
8525 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
8526 ; AVX512BW-NEXT: korw %k6, %k0, %k7
8527 ; AVX512BW-NEXT: vmovdqa32 960(%rsi), %zmm15 {%k7} {z}
8528 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8529 ; AVX512BW-NEXT: kandw %k5, %k1, %k0
8530 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
8531 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8532 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8533 ; AVX512BW-NEXT: kmovq %k4, %k7
8534 ; AVX512BW-NEXT: kshiftrq $43, %k4, %k1
8535 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8536 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k6
8537 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8538 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8539 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8540 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
8541 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8542 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8543 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8544 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
8545 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8546 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8547 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8548 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
8549 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8550 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8551 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
8552 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
8553 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8554 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8555 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
8556 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k1
8557 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8558 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8559 ; AVX512BW-NEXT: kshiftrq $44, %k7, %k1
8560 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8561 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
8562 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8563 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8564 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8565 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
8566 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8567 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8568 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
8569 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
8570 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8571 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8572 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
8573 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
8574 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8575 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8576 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
8577 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k6
8578 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8579 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8580 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
8581 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k1
8582 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8583 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8584 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8585 ; AVX512BW-NEXT: kshiftrq $45, %k7, %k1
8586 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k6
8587 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8588 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
8589 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
8590 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
8591 ; AVX512BW-NEXT: korw %k6, %k0, %k7
8592 ; AVX512BW-NEXT: vmovdqa32 1024(%rsi), %zmm16 {%k7} {z}
8593 ; AVX512BW-NEXT: kandw %k5, %k1, %k0
8594 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
8595 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8596 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8597 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8598 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
8599 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8600 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8601 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8602 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k1
8603 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8604 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8605 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 8-byte Reload
8606 ; AVX512BW-NEXT: kshiftrq $46, %k5, %k1
8607 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8608 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
8609 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8610 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8611 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
8612 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8613 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8614 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8615 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
8616 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8617 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8618 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8619 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
8620 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8621 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8622 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8623 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
8624 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8625 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8626 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k1
8627 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8628 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8629 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
8630 ; AVX512BW-NEXT: kshiftrq $47, %k5, %k6
8631 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k0
8632 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k7
8633 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8634 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8635 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
8636 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k7
8637 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8638 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8639 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
8640 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k7
8641 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8642 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8643 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
8644 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
8645 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8646 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8647 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
8648 ; AVX512BW-NEXT: kshiftlw $14, %k6, %k6
8649 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8650 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
8651 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
8652 ; AVX512BW-NEXT: korw %k0, %k1, %k1
8653 ; AVX512BW-NEXT: vmovdqa32 1088(%rsi), %zmm17 {%k1} {z}
8654 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 8-byte Reload
8655 ; AVX512BW-NEXT: kshiftrq $48, %k5, %k0
8656 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8657 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
8658 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
8659 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
8660 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8661 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8662 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
8663 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
8664 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8665 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8666 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
8667 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
8668 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8669 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8670 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
8671 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
8672 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8673 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8674 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
8675 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k0
8676 ; AVX512BW-NEXT: korw %k0, %k1, %k0
8677 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8678 ; AVX512BW-NEXT: kshiftrq $49, %k5, %k1
8679 ; AVX512BW-NEXT: kmovq %k5, %k7
8680 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8681 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
8682 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8683 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8684 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8685 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
8686 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8687 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8688 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8689 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
8690 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8691 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8692 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8693 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
8694 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8695 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8696 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
8697 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
8698 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8699 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8700 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k1
8701 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8702 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8703 ; AVX512BW-NEXT: kmovq %k7, %k5
8704 ; AVX512BW-NEXT: kshiftrq $50, %k7, %k1
8705 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
8706 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
8707 ; AVX512BW-NEXT: korw %k7, %k0, %k0
8708 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8709 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8710 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
8711 ; AVX512BW-NEXT: korw %k7, %k0, %k0
8712 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8713 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8714 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k7
8715 ; AVX512BW-NEXT: korw %k7, %k0, %k0
8716 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
8717 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
8718 ; AVX512BW-NEXT: korw %k6, %k0, %k7
8719 ; AVX512BW-NEXT: vmovdqa32 1152(%rsi), %zmm18 {%k7} {z}
8720 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
8721 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
8722 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
8723 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8724 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8725 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8726 ; AVX512BW-NEXT: kshiftrq $51, %k5, %k1
8727 ; AVX512BW-NEXT: kmovq %k5, %k7
8728 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8729 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k6
8730 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8731 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8732 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8733 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
8734 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8735 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8736 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8737 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
8738 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8739 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8740 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8741 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
8742 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8743 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8744 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8745 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
8746 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8747 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8748 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k1
8749 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8750 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8751 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8752 ; AVX512BW-NEXT: kshiftrq $52, %k7, %k1
8753 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8754 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
8755 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8756 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8757 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8758 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
8759 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8760 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8761 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8762 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
8763 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8764 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8765 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8766 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
8767 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8768 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8769 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8770 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k6
8771 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8772 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8773 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k1
8774 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8775 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8776 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8777 ; AVX512BW-NEXT: kshiftrq $53, %k7, %k1
8778 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k6
8779 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8780 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
8781 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
8782 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
8783 ; AVX512BW-NEXT: korw %k6, %k0, %k7
8784 ; AVX512BW-NEXT: vmovdqa32 1216(%rsi), %zmm19 {%k7} {z}
8785 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
8786 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
8787 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
8788 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8789 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8790 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8791 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
8792 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8793 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8794 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k1
8795 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8796 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8797 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8798 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 8-byte Reload
8799 ; AVX512BW-NEXT: kshiftrq $54, %k7, %k1
8800 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8801 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
8802 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8803 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8804 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
8805 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
8806 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8807 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8808 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8809 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
8810 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8811 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8812 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
8813 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
8814 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8815 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8816 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
8817 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8818 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8819 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8820 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k1
8821 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8822 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8823 ; AVX512BW-NEXT: kandw %k3, %k0, %k1
8824 ; AVX512BW-NEXT: kshiftrq $55, %k7, %k6
8825 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k0
8826 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k7
8827 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8828 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8829 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
8830 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k7
8831 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8832 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8833 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
8834 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k7
8835 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8836 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
8837 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
8838 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
8839 ; AVX512BW-NEXT: korw %k7, %k1, %k1
8840 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
8841 ; AVX512BW-NEXT: kshiftlw $14, %k6, %k6
8842 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8843 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
8844 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
8845 ; AVX512BW-NEXT: korw %k0, %k1, %k1
8846 ; AVX512BW-NEXT: vmovdqa32 1280(%rsi), %zmm20 {%k1} {z}
8847 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 8-byte Reload
8848 ; AVX512BW-NEXT: kshiftrq $56, %k5, %k0
8849 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8850 ; AVX512BW-NEXT: kandw %k2, %k0, %k1
8851 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
8852 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
8853 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8854 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8855 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
8856 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
8857 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8858 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8859 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
8860 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
8861 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8862 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
8863 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
8864 ; AVX512BW-NEXT: korw %k6, %k1, %k1
8865 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8866 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
8867 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k0
8868 ; AVX512BW-NEXT: korw %k0, %k1, %k0
8869 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8870 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8871 ; AVX512BW-NEXT: kshiftrq $57, %k5, %k1
8872 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8873 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
8874 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8875 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8876 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8877 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
8878 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8879 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8880 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8881 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
8882 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8883 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8884 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8885 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
8886 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8887 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8888 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
8889 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8890 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8891 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8892 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k1
8893 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8894 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8895 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8896 ; AVX512BW-NEXT: kshiftrq $58, %k5, %k1
8897 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
8898 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
8899 ; AVX512BW-NEXT: korw %k7, %k0, %k0
8900 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
8901 ; AVX512BW-NEXT: kandw %k7, %k0, %k0
8902 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k7
8903 ; AVX512BW-NEXT: korw %k7, %k0, %k0
8904 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
8905 ; AVX512BW-NEXT: kandw %k7, %k0, %k0
8906 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k7
8907 ; AVX512BW-NEXT: korw %k7, %k0, %k0
8908 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
8909 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
8910 ; AVX512BW-NEXT: korw %k6, %k0, %k7
8911 ; AVX512BW-NEXT: vmovdqa32 1344(%rsi), %zmm21 {%k7} {z}
8912 ; AVX512BW-NEXT: kandw %k2, %k1, %k0
8913 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
8914 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8915 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8916 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8917 ; AVX512BW-NEXT: kshiftrq $59, %k5, %k1
8918 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8919 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k6
8920 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8921 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8922 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8923 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k6
8924 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8925 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8926 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
8927 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
8928 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8929 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8930 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
8931 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
8932 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8933 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8934 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
8935 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
8936 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8937 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
8938 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
8939 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k1
8940 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8941 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8942 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8943 ; AVX512BW-NEXT: kshiftrq $60, %k5, %k1
8944 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8945 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
8946 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8947 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8948 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
8949 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8950 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8951 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8952 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
8953 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8954 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
8955 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
8956 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
8957 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8958 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8959 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k6
8960 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8961 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
8962 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
8963 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k1
8964 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8965 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8966 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8967 ; AVX512BW-NEXT: kshiftrq $61, %k5, %k1
8968 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k6
8969 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8970 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
8971 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
8972 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
8973 ; AVX512BW-NEXT: korw %k6, %k0, %k7
8974 ; AVX512BW-NEXT: vmovdqa32 1408(%rsi), %zmm22 {%k7} {z}
8975 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
8976 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
8977 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
8978 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8979 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8980 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8981 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
8982 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8983 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8984 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k1
8985 ; AVX512BW-NEXT: korw %k1, %k0, %k0
8986 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
8987 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
8988 ; AVX512BW-NEXT: kshiftrq $62, %k5, %k1
8989 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
8990 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k6
8991 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8992 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8993 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8994 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k6
8995 ; AVX512BW-NEXT: korw %k6, %k0, %k0
8996 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
8997 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
8998 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k6
8999 ; AVX512BW-NEXT: korw %k6, %k0, %k0
9000 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
9001 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
9002 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k6
9003 ; AVX512BW-NEXT: korw %k6, %k0, %k0
9004 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
9005 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
9006 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k6
9007 ; AVX512BW-NEXT: korw %k6, %k0, %k0
9008 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
9009 ; AVX512BW-NEXT: kandw %k2, %k0, %k6
9010 ; AVX512BW-NEXT: kshiftrq $63, %k5, %k0
9011 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k1
9012 ; AVX512BW-NEXT: korw %k1, %k6, %k1
9013 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
9014 ; AVX512BW-NEXT: kandw %k2, %k1, %k2
9015 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k1
9016 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
9017 ; AVX512BW-NEXT: korw %k6, %k2, %k2
9018 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
9019 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
9020 ; AVX512BW-NEXT: korw %k6, %k2, %k2
9021 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
9022 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
9023 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k5
9024 ; AVX512BW-NEXT: korw %k5, %k2, %k2
9025 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
9026 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k4
9027 ; AVX512BW-NEXT: korw %k4, %k2, %k2
9028 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
9029 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
9030 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k0
9031 ; AVX512BW-NEXT: korw %k0, %k2, %k0
9032 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
9033 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
9034 ; AVX512BW-NEXT: korw %k1, %k0, %k1
9035 ; AVX512BW-NEXT: vmovdqa32 1472(%rsi), %zmm23 {%k1} {z}
9036 ; AVX512BW-NEXT: vmovdqa64 %zmm23, 1472(%rdx)
9037 ; AVX512BW-NEXT: vmovdqa64 %zmm22, 1408(%rdx)
9038 ; AVX512BW-NEXT: vmovdqa64 %zmm21, 1344(%rdx)
9039 ; AVX512BW-NEXT: vmovdqa64 %zmm20, 1280(%rdx)
9040 ; AVX512BW-NEXT: vmovdqa64 %zmm19, 1216(%rdx)
9041 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 1152(%rdx)
9042 ; AVX512BW-NEXT: vmovdqa64 %zmm17, 1088(%rdx)
9043 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 1024(%rdx)
9044 ; AVX512BW-NEXT: vmovdqa64 %zmm15, 960(%rdx)
9045 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 896(%rdx)
9046 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 832(%rdx)
9047 ; AVX512BW-NEXT: vmovdqa64 %zmm12, 768(%rdx)
9048 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 704(%rdx)
9049 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 640(%rdx)
9050 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 576(%rdx)
9051 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 512(%rdx)
9052 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 448(%rdx)
9053 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 384(%rdx)
9054 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 320(%rdx)
9055 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 256(%rdx)
9056 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 192(%rdx)
9057 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 128(%rdx)
9058 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 64(%rdx)
9059 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%rdx)
9060 ; AVX512BW-NEXT: vzeroupper
9061 ; AVX512BW-NEXT: retq
9062 %src.mask = load <64 x i1>, ptr %in.maskvec, align 64
9063 %tgt.mask = shufflevector <64 x i1> %src.mask, <64 x i1> poison, <384 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 17, i32 17, i32 17, i32 17, i32 17, i32 17, i32 18, i32 18, i32 18, i32 18, i32 18, i32 18, i32 19, i32 19, i32 19, i32 19, i32 19, i32 19, i32 20, i32 20, i32 20, i32 20, i32 20, i32 20, i32 21, i32 21, i32 21, i32 21, i32 21, i32 21, i32 22, i32 22, i32 22, i32 22, i32 22, i32 22, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 26, i32 26, i32 26, i32 26, i32 26, i32 26, i32 27, i32 27, i32 27, i32 27, i32 27, i32 27, i32 28, i32 28, i32 28, i32 28, i32 28, i32 28, i32 29, i32 29, i32 29, i32 29, i32 29, i32 29, i32 30, i32 30, i32 30, i32 30, i32 30, i32 30, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 32, i32 32, i32 32, i32 32, i32 32, i32 32, i32 33, i32 33, i32 33, i32 33, i32 33, i32 33, i32 34, i32 34, i32 34, i32 34, i32 34, i32 34, i32 35, i32 35, i32 35, i32 35, i32 35, i32 35, i32 36, i32 36, i32 36, i32 36, i32 36, i32 36, i32 37, i32 37, i32 37, i32 37, i32 37, i32 37, i32 38, i32 38, i32 38, i32 38, i32 38, i32 38, i32 39, i32 39, i32 39, i32 39, i32 39, i32 39, i32 40, i32 40, i32 40, i32 40, i32 40, i32 40, i32 41, i32 41, i32 41, i32 41, i32 41, i32 41, i32 42, i32 42, i32 42, i32 42, i32 42, i32 42, i32 43, i32 43, i32 43, i32 43, i32 43, i32 43, i32 44, i32 44, i32 44, i32 44, i32 44, i32 44, i32 45, i32 45, i32 45, i32 45, i32 45, i32 45, i32 46, i32 46, i32 46, i32 46, i32 46, i32 46, i32 47, i32 47, i32 47, i32 47, i32 47, i32 47, i32 48, i32 48, i32 48, i32 48, i32 48, i32 48, i32 49, i32 49, i32 49, i32 49, i32 49, i32 49, i32 50, i32 50, i32 50, i32 50, i32 50, i32 50, i32 51, i32 51, i32 51, i32 51, i32 51, i32 51, i32 52, i32 52, i32 52, i32 52, i32 52, i32 52, i32 53, i32 53, i32 53, i32 53, i32 53, i32 53, i32 54, i32 54, i32 54, i32 54, i32 54, i32 54, i32 55, i32 55, i32 55, i32 55, i32 55, i32 55, i32 56, i32 56, i32 56, i32 56, i32 56, i32 56, i32 57, i32 57, i32 57, i32 57, i32 57, i32 57, i32 58, i32 58, i32 58, i32 58, i32 58, i32 58, i32 59, i32 59, i32 59, i32 59, i32 59, i32 59, i32 60, i32 60, i32 60, i32 60, i32 60, i32 60, i32 61, i32 61, i32 61, i32 61, i32 61, i32 61, i32 62, i32 62, i32 62, i32 62, i32 62, i32 62, i32 63, i32 63, i32 63, i32 63, i32 63, i32 63>
9064 %data = call <384 x i32> @llvm.masked.load.v384i32.p0(ptr %in.vec, i32 64, <384 x i1> %tgt.mask, <384 x i32> poison)
9065 store <384 x i32> %data, ptr %out.vec, align 64
9069 define void @mask_replication_factor7_vf2(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
9070 ; AVX512F-ONLY-LABEL: mask_replication_factor7_vf2:
9071 ; AVX512F-ONLY: # %bb.0:
9072 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
9073 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
9074 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,0,0,0,0,0,0,1,1,1,1,1,1,1,u,u>
9075 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
9076 ; AVX512F-ONLY-NEXT: vpslld $31, %zmm0, %zmm0
9077 ; AVX512F-ONLY-NEXT: movw $16383, %ax # imm = 0x3FFF
9078 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
9079 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1 {%k1}
9080 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
9081 ; AVX512F-ONLY-NEXT: vextracti32x4 $2, %zmm0, 32(%rdx)
9082 ; AVX512F-ONLY-NEXT: vextracti32x4 $3, %zmm0, %xmm1
9083 ; AVX512F-ONLY-NEXT: vmovq %xmm1, 48(%rdx)
9084 ; AVX512F-ONLY-NEXT: vmovdqa %ymm0, (%rdx)
9085 ; AVX512F-ONLY-NEXT: vzeroupper
9086 ; AVX512F-ONLY-NEXT: retq
9088 ; AVX512DQ-LABEL: mask_replication_factor7_vf2:
9089 ; AVX512DQ: # %bb.0:
9090 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
9091 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
9092 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,0,0,0,0,0,0,1,1,1,1,1,1,1,u,u>
9093 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
9094 ; AVX512DQ-NEXT: vpxor %xmm1, %xmm1, %xmm1
9095 ; AVX512DQ-NEXT: movw $16383, %ax # imm = 0x3FFF
9096 ; AVX512DQ-NEXT: kmovw %eax, %k1
9097 ; AVX512DQ-NEXT: vpcmpgtd %zmm0, %zmm1, %k1 {%k1}
9098 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
9099 ; AVX512DQ-NEXT: vextracti32x4 $2, %zmm0, 32(%rdx)
9100 ; AVX512DQ-NEXT: vextracti32x4 $3, %zmm0, %xmm1
9101 ; AVX512DQ-NEXT: vmovq %xmm1, 48(%rdx)
9102 ; AVX512DQ-NEXT: vmovdqa %ymm0, (%rdx)
9103 ; AVX512DQ-NEXT: vzeroupper
9104 ; AVX512DQ-NEXT: retq
9106 ; AVX512BW-LABEL: mask_replication_factor7_vf2:
9107 ; AVX512BW: # %bb.0:
9108 ; AVX512BW-NEXT: kmovw (%rdi), %k1
9109 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
9110 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,0,0,0,0,0,0,1,1,1,1,1,1,1,u,u>
9111 ; AVX512BW-NEXT: vpermd %zmm0, %zmm1, %zmm0
9112 ; AVX512BW-NEXT: vpslld $31, %zmm0, %zmm0
9113 ; AVX512BW-NEXT: movw $16383, %ax # imm = 0x3FFF
9114 ; AVX512BW-NEXT: kmovd %eax, %k1
9115 ; AVX512BW-NEXT: vptestmd %zmm0, %zmm0, %k1 {%k1}
9116 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
9117 ; AVX512BW-NEXT: vextracti32x4 $2, %zmm0, 32(%rdx)
9118 ; AVX512BW-NEXT: vmovdqa %ymm0, (%rdx)
9119 ; AVX512BW-NEXT: vextracti32x4 $3, %zmm0, %xmm0
9120 ; AVX512BW-NEXT: vmovq %xmm0, 48(%rdx)
9121 ; AVX512BW-NEXT: vzeroupper
9122 ; AVX512BW-NEXT: retq
9123 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
9124 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <2 x i32> <i32 0, i32 1>
9125 %tgt.mask = shufflevector <2 x i1> %src.mask, <2 x i1> poison, <14 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1>
9126 %data = call <14 x i32> @llvm.masked.load.v14i32.p0(ptr %in.vec, i32 64, <14 x i1> %tgt.mask, <14 x i32> poison)
9127 %data.padded = shufflevector <14 x i32> %data, <14 x i32> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 undef, i32 undef>
9128 store <14 x i32> %data, ptr %out.vec, align 64
9132 define void @mask_replication_factor7_vf4(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
9133 ; AVX512F-ONLY-LABEL: mask_replication_factor7_vf4:
9134 ; AVX512F-ONLY: # %bb.0:
9135 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
9136 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
9137 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = <2,2,2,2,2,3,3,3,3,3,3,3,u,u,u,u>
9138 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
9139 ; AVX512F-ONLY-NEXT: vpslld $31, %zmm1, %zmm1
9140 ; AVX512F-ONLY-NEXT: movw $4095, %ax # imm = 0xFFF
9141 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
9142 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1 {%k1}
9143 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2]
9144 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
9145 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k2
9146 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
9147 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k2} {z}
9148 ; AVX512F-ONLY-NEXT: vextracti32x4 $2, %zmm0, 96(%rdx)
9149 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
9150 ; AVX512F-ONLY-NEXT: vmovdqa %ymm0, 64(%rdx)
9151 ; AVX512F-ONLY-NEXT: vzeroupper
9152 ; AVX512F-ONLY-NEXT: retq
9154 ; AVX512DQ-LABEL: mask_replication_factor7_vf4:
9155 ; AVX512DQ: # %bb.0:
9156 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
9157 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
9158 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = <2,2,2,2,2,3,3,3,3,3,3,3,u,u,u,u>
9159 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
9160 ; AVX512DQ-NEXT: vpxor %xmm2, %xmm2, %xmm2
9161 ; AVX512DQ-NEXT: movw $4095, %ax # imm = 0xFFF
9162 ; AVX512DQ-NEXT: kmovw %eax, %k1
9163 ; AVX512DQ-NEXT: vpcmpgtd %zmm1, %zmm2, %k1 {%k1}
9164 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2]
9165 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
9166 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k2
9167 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
9168 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm1 {%k2} {z}
9169 ; AVX512DQ-NEXT: vextracti32x4 $2, %zmm0, 96(%rdx)
9170 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, (%rdx)
9171 ; AVX512DQ-NEXT: vmovdqa %ymm0, 64(%rdx)
9172 ; AVX512DQ-NEXT: vzeroupper
9173 ; AVX512DQ-NEXT: retq
9175 ; AVX512BW-LABEL: mask_replication_factor7_vf4:
9176 ; AVX512BW: # %bb.0:
9177 ; AVX512BW-NEXT: kmovd (%rdi), %k0
9178 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0
9179 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2,2,2,2,2,2,3,3,3,3,3,3,3,u,u,u,u>
9180 ; AVX512BW-NEXT: vpermw %zmm0, %zmm1, %zmm0
9181 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1
9182 ; AVX512BW-NEXT: movl $268435455, %eax # imm = 0xFFFFFFF
9183 ; AVX512BW-NEXT: kmovd %eax, %k1
9184 ; AVX512BW-NEXT: vpcmpgtw %zmm0, %zmm1, %k1 {%k1}
9185 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
9186 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k1
9187 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm1 {%k1} {z}
9188 ; AVX512BW-NEXT: vextracti32x4 $2, %zmm1, 96(%rdx)
9189 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%rdx)
9190 ; AVX512BW-NEXT: vmovdqa %ymm1, 64(%rdx)
9191 ; AVX512BW-NEXT: vzeroupper
9192 ; AVX512BW-NEXT: retq
9193 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
9194 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
9195 %tgt.mask = shufflevector <4 x i1> %src.mask, <4 x i1> poison, <28 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3>
9196 %data = call <28 x i32> @llvm.masked.load.v28i32.p0(ptr %in.vec, i32 64, <28 x i1> %tgt.mask, <28 x i32> poison)
9197 %data.padded = shufflevector <28 x i32> %data, <28 x i32> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 undef, i32 undef, i32 undef, i32 undef>
9198 store <28 x i32> %data, ptr %out.vec, align 64
9202 define void @mask_replication_factor7_vf8(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
9203 ; AVX512F-SLOW-LABEL: mask_replication_factor7_vf8:
9204 ; AVX512F-SLOW: # %bb.0:
9205 ; AVX512F-SLOW-NEXT: kmovw (%rdi), %k1
9206 ; AVX512F-SLOW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
9207 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2]
9208 ; AVX512F-SLOW-NEXT: vpermd %zmm0, %zmm1, %zmm1
9209 ; AVX512F-SLOW-NEXT: vptestmd %zmm1, %zmm1, %k2
9210 ; AVX512F-SLOW-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k2} {z}
9211 ; AVX512F-SLOW-NEXT: movw $1, %ax
9212 ; AVX512F-SLOW-NEXT: kmovw %eax, %k2
9213 ; AVX512F-SLOW-NEXT: vmovdqa32 %zmm0, %zmm1 {%k2}
9214 ; AVX512F-SLOW-NEXT: vptestmd %zmm1, %zmm1, %k2
9215 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [2,2,2,2,2,3,3,3,3,3,3,3,4,4,4,4]
9216 ; AVX512F-SLOW-NEXT: vpermd %zmm0, %zmm1, %zmm1
9217 ; AVX512F-SLOW-NEXT: vptestmd %zmm1, %zmm1, %k3
9218 ; AVX512F-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [4,4,4,5,5,5,5,5,5,5,6,6,6,6,6,6]
9219 ; AVX512F-SLOW-NEXT: vpermd %zmm0, %zmm1, %zmm0
9220 ; AVX512F-SLOW-NEXT: vptestmd %zmm0, %zmm0, %k4
9221 ; AVX512F-SLOW-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0
9222 ; AVX512F-SLOW-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z}
9223 ; AVX512F-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,3,3,3,6,7,7,7]
9224 ; AVX512F-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,3,3]
9225 ; AVX512F-SLOW-NEXT: vptestmd %ymm0, %ymm0, %k1
9226 ; AVX512F-SLOW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k2} {z}
9227 ; AVX512F-SLOW-NEXT: vmovdqa32 192(%rsi), %zmm1 {%k1} {z}
9228 ; AVX512F-SLOW-NEXT: vmovdqa32 128(%rsi), %zmm2 {%k4} {z}
9229 ; AVX512F-SLOW-NEXT: vmovdqa32 64(%rsi), %zmm3 {%k3} {z}
9230 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm3, 64(%rdx)
9231 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm2, 128(%rdx)
9232 ; AVX512F-SLOW-NEXT: vmovdqa64 %zmm0, (%rdx)
9233 ; AVX512F-SLOW-NEXT: vmovdqa %ymm1, 192(%rdx)
9234 ; AVX512F-SLOW-NEXT: vzeroupper
9235 ; AVX512F-SLOW-NEXT: retq
9237 ; AVX512F-FAST-LABEL: mask_replication_factor7_vf8:
9238 ; AVX512F-FAST: # %bb.0:
9239 ; AVX512F-FAST-NEXT: kmovw (%rdi), %k1
9240 ; AVX512F-FAST-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
9241 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2]
9242 ; AVX512F-FAST-NEXT: vpermd %zmm0, %zmm1, %zmm1
9243 ; AVX512F-FAST-NEXT: vptestmd %zmm1, %zmm1, %k2
9244 ; AVX512F-FAST-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k2} {z}
9245 ; AVX512F-FAST-NEXT: movw $1, %ax
9246 ; AVX512F-FAST-NEXT: kmovw %eax, %k2
9247 ; AVX512F-FAST-NEXT: vmovdqa32 %zmm0, %zmm1 {%k2}
9248 ; AVX512F-FAST-NEXT: vptestmd %zmm1, %zmm1, %k2
9249 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [2,2,2,2,2,3,3,3,3,3,3,3,4,4,4,4]
9250 ; AVX512F-FAST-NEXT: vpermd %zmm0, %zmm1, %zmm1
9251 ; AVX512F-FAST-NEXT: vptestmd %zmm1, %zmm1, %k3
9252 ; AVX512F-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [4,4,4,5,5,5,5,5,5,5,6,6,6,6,6,6]
9253 ; AVX512F-FAST-NEXT: vpermd %zmm0, %zmm1, %zmm0
9254 ; AVX512F-FAST-NEXT: vptestmd %zmm0, %zmm0, %k4
9255 ; AVX512F-FAST-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0
9256 ; AVX512F-FAST-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z}
9257 ; AVX512F-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [6,7,7,7,7,7,7,7]
9258 ; AVX512F-FAST-NEXT: vpermd %ymm0, %ymm1, %ymm0
9259 ; AVX512F-FAST-NEXT: vptestmd %ymm0, %ymm0, %k1
9260 ; AVX512F-FAST-NEXT: vmovdqa32 (%rsi), %zmm0 {%k2} {z}
9261 ; AVX512F-FAST-NEXT: vmovdqa32 192(%rsi), %zmm1 {%k1} {z}
9262 ; AVX512F-FAST-NEXT: vmovdqa32 128(%rsi), %zmm2 {%k4} {z}
9263 ; AVX512F-FAST-NEXT: vmovdqa32 64(%rsi), %zmm3 {%k3} {z}
9264 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm3, 64(%rdx)
9265 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm2, 128(%rdx)
9266 ; AVX512F-FAST-NEXT: vmovdqa %ymm1, 192(%rdx)
9267 ; AVX512F-FAST-NEXT: vmovdqa64 %zmm0, (%rdx)
9268 ; AVX512F-FAST-NEXT: vzeroupper
9269 ; AVX512F-FAST-NEXT: retq
9271 ; AVX512DQ-SLOW-LABEL: mask_replication_factor7_vf8:
9272 ; AVX512DQ-SLOW: # %bb.0:
9273 ; AVX512DQ-SLOW-NEXT: kmovb (%rdi), %k0
9274 ; AVX512DQ-SLOW-NEXT: vpmovm2d %k0, %zmm0
9275 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2]
9276 ; AVX512DQ-SLOW-NEXT: vpermd %zmm0, %zmm1, %zmm1
9277 ; AVX512DQ-SLOW-NEXT: vpmovd2m %zmm1, %k1
9278 ; AVX512DQ-SLOW-NEXT: vpmovm2d %k1, %zmm1
9279 ; AVX512DQ-SLOW-NEXT: movw $1, %ax
9280 ; AVX512DQ-SLOW-NEXT: kmovw %eax, %k1
9281 ; AVX512DQ-SLOW-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
9282 ; AVX512DQ-SLOW-NEXT: vpmovd2m %zmm1, %k1
9283 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [2,2,2,2,2,3,3,3,3,3,3,3,4,4,4,4]
9284 ; AVX512DQ-SLOW-NEXT: vpermd %zmm0, %zmm1, %zmm1
9285 ; AVX512DQ-SLOW-NEXT: vpmovd2m %zmm1, %k2
9286 ; AVX512DQ-SLOW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [4,4,4,5,5,5,5,5,5,5,6,6,6,6,6,6]
9287 ; AVX512DQ-SLOW-NEXT: vpermd %zmm0, %zmm1, %zmm0
9288 ; AVX512DQ-SLOW-NEXT: vpmovd2m %zmm0, %k3
9289 ; AVX512DQ-SLOW-NEXT: vpmovm2d %k0, %ymm0
9290 ; AVX512DQ-SLOW-NEXT: vpshufd {{.*#+}} ymm0 = ymm0[2,3,3,3,6,7,7,7]
9291 ; AVX512DQ-SLOW-NEXT: vpermq {{.*#+}} ymm0 = ymm0[2,3,3,3]
9292 ; AVX512DQ-SLOW-NEXT: vpmovd2m %ymm0, %k4
9293 ; AVX512DQ-SLOW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
9294 ; AVX512DQ-SLOW-NEXT: vmovdqa32 192(%rsi), %zmm1 {%k4} {z}
9295 ; AVX512DQ-SLOW-NEXT: vmovdqa32 128(%rsi), %zmm2 {%k3} {z}
9296 ; AVX512DQ-SLOW-NEXT: vmovdqa32 64(%rsi), %zmm3 {%k2} {z}
9297 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm3, 64(%rdx)
9298 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm2, 128(%rdx)
9299 ; AVX512DQ-SLOW-NEXT: vmovdqa %ymm1, 192(%rdx)
9300 ; AVX512DQ-SLOW-NEXT: vmovdqa64 %zmm0, (%rdx)
9301 ; AVX512DQ-SLOW-NEXT: vzeroupper
9302 ; AVX512DQ-SLOW-NEXT: retq
9304 ; AVX512DQ-FAST-LABEL: mask_replication_factor7_vf8:
9305 ; AVX512DQ-FAST: # %bb.0:
9306 ; AVX512DQ-FAST-NEXT: kmovb (%rdi), %k0
9307 ; AVX512DQ-FAST-NEXT: vpmovm2d %k0, %zmm0
9308 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2]
9309 ; AVX512DQ-FAST-NEXT: vpermd %zmm0, %zmm1, %zmm1
9310 ; AVX512DQ-FAST-NEXT: vpmovd2m %zmm1, %k1
9311 ; AVX512DQ-FAST-NEXT: vpmovm2d %k1, %zmm1
9312 ; AVX512DQ-FAST-NEXT: movw $1, %ax
9313 ; AVX512DQ-FAST-NEXT: kmovw %eax, %k1
9314 ; AVX512DQ-FAST-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
9315 ; AVX512DQ-FAST-NEXT: vpmovd2m %zmm1, %k1
9316 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [2,2,2,2,2,3,3,3,3,3,3,3,4,4,4,4]
9317 ; AVX512DQ-FAST-NEXT: vpermd %zmm0, %zmm1, %zmm1
9318 ; AVX512DQ-FAST-NEXT: vpmovd2m %zmm1, %k2
9319 ; AVX512DQ-FAST-NEXT: vmovdqa64 {{.*#+}} zmm1 = [4,4,4,5,5,5,5,5,5,5,6,6,6,6,6,6]
9320 ; AVX512DQ-FAST-NEXT: vpermd %zmm0, %zmm1, %zmm0
9321 ; AVX512DQ-FAST-NEXT: vpmovd2m %zmm0, %k3
9322 ; AVX512DQ-FAST-NEXT: vpmovm2d %k0, %ymm0
9323 ; AVX512DQ-FAST-NEXT: vmovdqa {{.*#+}} ymm1 = [6,7,7,7,7,7,7,7]
9324 ; AVX512DQ-FAST-NEXT: vpermd %ymm0, %ymm1, %ymm0
9325 ; AVX512DQ-FAST-NEXT: vpmovd2m %ymm0, %k4
9326 ; AVX512DQ-FAST-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
9327 ; AVX512DQ-FAST-NEXT: vmovdqa32 192(%rsi), %zmm1 {%k4} {z}
9328 ; AVX512DQ-FAST-NEXT: vmovdqa32 128(%rsi), %zmm2 {%k3} {z}
9329 ; AVX512DQ-FAST-NEXT: vmovdqa32 64(%rsi), %zmm3 {%k2} {z}
9330 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm3, 64(%rdx)
9331 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm2, 128(%rdx)
9332 ; AVX512DQ-FAST-NEXT: vmovdqa %ymm1, 192(%rdx)
9333 ; AVX512DQ-FAST-NEXT: vmovdqa64 %zmm0, (%rdx)
9334 ; AVX512DQ-FAST-NEXT: vzeroupper
9335 ; AVX512DQ-FAST-NEXT: retq
9337 ; AVX512BW-ONLY-LABEL: mask_replication_factor7_vf8:
9338 ; AVX512BW-ONLY: # %bb.0:
9339 ; AVX512BW-ONLY-NEXT: kmovw (%rdi), %k0
9340 ; AVX512BW-ONLY-NEXT: vpmovm2b %k0, %zmm0
9341 ; AVX512BW-ONLY-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1]
9342 ; AVX512BW-ONLY-NEXT: vpshufb {{.*#+}} zmm0 = zmm0[0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2,18,18,18,18,18,19,19,19,19,19,19,19,20,20,20,20,36,36,36,37,37,37,37,37,37,37,38,38,38,38,38,38,54,55,55,55,55,55,55,55,u,u,u,u,u,u,u,u]
9343 ; AVX512BW-ONLY-NEXT: vpxor %xmm1, %xmm1, %xmm1
9344 ; AVX512BW-ONLY-NEXT: movabsq $72057594037927935, %rax # imm = 0xFFFFFFFFFFFFFF
9345 ; AVX512BW-ONLY-NEXT: kmovq %rax, %k1
9346 ; AVX512BW-ONLY-NEXT: vpcmpgtb %zmm0, %zmm1, %k1 {%k1}
9347 ; AVX512BW-ONLY-NEXT: kshiftrq $32, %k1, %k2
9348 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k2, %k3
9349 ; AVX512BW-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm0 {%k3} {z}
9350 ; AVX512BW-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
9351 ; AVX512BW-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm2 {%k2} {z}
9352 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k1, %k1
9353 ; AVX512BW-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm3 {%k1} {z}
9354 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm3, 64(%rdx)
9355 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm2, 128(%rdx)
9356 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
9357 ; AVX512BW-ONLY-NEXT: vmovdqa %ymm0, 192(%rdx)
9358 ; AVX512BW-ONLY-NEXT: vzeroupper
9359 ; AVX512BW-ONLY-NEXT: retq
9361 ; AVX512VBMI-ONLY-LABEL: mask_replication_factor7_vf8:
9362 ; AVX512VBMI-ONLY: # %bb.0:
9363 ; AVX512VBMI-ONLY-NEXT: kmovw (%rdi), %k0
9364 ; AVX512VBMI-ONLY-NEXT: vpmovm2b %k0, %zmm0
9365 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = <0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2,2,2,2,2,2,3,3,3,3,3,3,3,4,4,4,4,4,4,4,5,5,5,5,5,5,5,6,6,6,6,6,6,6,7,7,7,7,7,7,7,u,u,u,u,u,u,u,u>
9366 ; AVX512VBMI-ONLY-NEXT: vpermb %zmm0, %zmm1, %zmm0
9367 ; AVX512VBMI-ONLY-NEXT: vpxor %xmm1, %xmm1, %xmm1
9368 ; AVX512VBMI-ONLY-NEXT: movabsq $72057594037927935, %rax # imm = 0xFFFFFFFFFFFFFF
9369 ; AVX512VBMI-ONLY-NEXT: kmovq %rax, %k1
9370 ; AVX512VBMI-ONLY-NEXT: vpcmpgtb %zmm0, %zmm1, %k1 {%k1}
9371 ; AVX512VBMI-ONLY-NEXT: kshiftrq $32, %k1, %k2
9372 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k2, %k3
9373 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm0 {%k3} {z}
9374 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
9375 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm2 {%k2} {z}
9376 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k1, %k1
9377 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm3 {%k1} {z}
9378 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm3, 64(%rdx)
9379 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm2, 128(%rdx)
9380 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
9381 ; AVX512VBMI-ONLY-NEXT: vmovdqa %ymm0, 192(%rdx)
9382 ; AVX512VBMI-ONLY-NEXT: vzeroupper
9383 ; AVX512VBMI-ONLY-NEXT: retq
9384 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
9385 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
9386 %tgt.mask = shufflevector <8 x i1> %src.mask, <8 x i1> poison, <56 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7>
9387 %data = call <56 x i32> @llvm.masked.load.v56i32.p0(ptr %in.vec, i32 64, <56 x i1> %tgt.mask, <56 x i32> poison)
9388 %data.padded = shufflevector <56 x i32> %data, <56 x i32> poison, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef, i32 undef>
9389 store <56 x i32> %data, ptr %out.vec, align 64
9393 define void @mask_replication_factor7_vf16(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
9394 ; AVX512F-ONLY-LABEL: mask_replication_factor7_vf16:
9395 ; AVX512F-ONLY: # %bb.0:
9396 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
9397 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
9398 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2]
9399 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
9400 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
9401 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
9402 ; AVX512F-ONLY-NEXT: movw $1, %ax
9403 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
9404 ; AVX512F-ONLY-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
9405 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k2
9406 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [2,2,2,2,2,3,3,3,3,3,3,3,4,4,4,4]
9407 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
9408 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
9409 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [4,4,4,5,5,5,5,5,5,5,6,6,6,6,6,6]
9410 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
9411 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k3
9412 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [6,7,7,7,7,7,7,7,8,8,8,8,8,8,8,9]
9413 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
9414 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k4
9415 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [9,9,9,9,9,9,10,10,10,10,10,10,10,11,11,11]
9416 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
9417 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k5
9418 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [11,11,11,11,12,12,12,12,12,12,12,13,13,13,13,13]
9419 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
9420 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k6
9421 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [13,13,14,14,14,14,14,14,14,15,15,15,15,15,15,15]
9422 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
9423 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k7
9424 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k2} {z}
9425 ; AVX512F-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm1 {%k7} {z}
9426 ; AVX512F-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm2 {%k6} {z}
9427 ; AVX512F-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm3 {%k5} {z}
9428 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm4 {%k4} {z}
9429 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm5 {%k3} {z}
9430 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm6 {%k1} {z}
9431 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm6, 64(%rdx)
9432 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm5, 128(%rdx)
9433 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm4, 192(%rdx)
9434 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 256(%rdx)
9435 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 320(%rdx)
9436 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, 384(%rdx)
9437 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, (%rdx)
9438 ; AVX512F-ONLY-NEXT: vzeroupper
9439 ; AVX512F-ONLY-NEXT: retq
9441 ; AVX512DQ-LABEL: mask_replication_factor7_vf16:
9442 ; AVX512DQ: # %bb.0:
9443 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
9444 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
9445 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2]
9446 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
9447 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k0
9448 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm1
9449 ; AVX512DQ-NEXT: movw $1, %ax
9450 ; AVX512DQ-NEXT: kmovw %eax, %k1
9451 ; AVX512DQ-NEXT: vmovdqa32 %zmm0, %zmm1 {%k1}
9452 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k2
9453 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [2,2,2,2,2,3,3,3,3,3,3,3,4,4,4,4]
9454 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
9455 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
9456 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [4,4,4,5,5,5,5,5,5,5,6,6,6,6,6,6]
9457 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
9458 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k3
9459 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [6,7,7,7,7,7,7,7,8,8,8,8,8,8,8,9]
9460 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
9461 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k4
9462 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [9,9,9,9,9,9,10,10,10,10,10,10,10,11,11,11]
9463 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
9464 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k5
9465 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [11,11,11,11,12,12,12,12,12,12,12,13,13,13,13,13]
9466 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
9467 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k6
9468 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [13,13,14,14,14,14,14,14,14,15,15,15,15,15,15,15]
9469 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
9470 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k7
9471 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k2} {z}
9472 ; AVX512DQ-NEXT: vmovdqa32 384(%rsi), %zmm1 {%k7} {z}
9473 ; AVX512DQ-NEXT: vmovdqa32 320(%rsi), %zmm2 {%k6} {z}
9474 ; AVX512DQ-NEXT: vmovdqa32 256(%rsi), %zmm3 {%k5} {z}
9475 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm4 {%k4} {z}
9476 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm5 {%k3} {z}
9477 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm6 {%k1} {z}
9478 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 64(%rdx)
9479 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 128(%rdx)
9480 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 192(%rdx)
9481 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 256(%rdx)
9482 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 320(%rdx)
9483 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 384(%rdx)
9484 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%rdx)
9485 ; AVX512DQ-NEXT: vzeroupper
9486 ; AVX512DQ-NEXT: retq
9488 ; AVX512BW-LABEL: mask_replication_factor7_vf16:
9489 ; AVX512BW: # %bb.0:
9490 ; AVX512BW-NEXT: kmovw (%rdi), %k1
9491 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
9492 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2]
9493 ; AVX512BW-NEXT: vpermd %zmm0, %zmm1, %zmm1
9494 ; AVX512BW-NEXT: vptestmd %zmm1, %zmm1, %k1
9495 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
9496 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [13,13,14,14,14,14,14,14,14,15,15,15,15,15,15,15]
9497 ; AVX512BW-NEXT: vpermd %zmm0, %zmm2, %zmm2
9498 ; AVX512BW-NEXT: vptestmd %zmm2, %zmm2, %k1
9499 ; AVX512BW-NEXT: vmovdqa32 384(%rsi), %zmm2 {%k1} {z}
9500 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [11,11,11,11,12,12,12,12,12,12,12,13,13,13,13,13]
9501 ; AVX512BW-NEXT: vpermd %zmm0, %zmm3, %zmm3
9502 ; AVX512BW-NEXT: vptestmd %zmm3, %zmm3, %k1
9503 ; AVX512BW-NEXT: vmovdqa32 320(%rsi), %zmm3 {%k1} {z}
9504 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [9,9,9,9,9,9,10,10,10,10,10,10,10,11,11,11]
9505 ; AVX512BW-NEXT: vpermd %zmm0, %zmm4, %zmm4
9506 ; AVX512BW-NEXT: vptestmd %zmm4, %zmm4, %k1
9507 ; AVX512BW-NEXT: vmovdqa32 256(%rsi), %zmm4 {%k1} {z}
9508 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm5 = [6,7,7,7,7,7,7,7,8,8,8,8,8,8,8,9]
9509 ; AVX512BW-NEXT: vpermd %zmm0, %zmm5, %zmm5
9510 ; AVX512BW-NEXT: vptestmd %zmm5, %zmm5, %k1
9511 ; AVX512BW-NEXT: vmovdqa32 192(%rsi), %zmm5 {%k1} {z}
9512 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm6 = [4,4,4,5,5,5,5,5,5,5,6,6,6,6,6,6]
9513 ; AVX512BW-NEXT: vpermd %zmm0, %zmm6, %zmm6
9514 ; AVX512BW-NEXT: vptestmd %zmm6, %zmm6, %k1
9515 ; AVX512BW-NEXT: vmovdqa32 128(%rsi), %zmm6 {%k1} {z}
9516 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm7 = [2,2,2,2,2,3,3,3,3,3,3,3,4,4,4,4]
9517 ; AVX512BW-NEXT: vpermd %zmm0, %zmm7, %zmm0
9518 ; AVX512BW-NEXT: vptestmd %zmm0, %zmm0, %k1
9519 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
9520 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 64(%rdx)
9521 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 128(%rdx)
9522 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 192(%rdx)
9523 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 256(%rdx)
9524 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 320(%rdx)
9525 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 384(%rdx)
9526 ; AVX512BW-NEXT: vmovdqa64 %zmm1, (%rdx)
9527 ; AVX512BW-NEXT: vzeroupper
9528 ; AVX512BW-NEXT: retq
9529 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
9530 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
9531 %tgt.mask = shufflevector <16 x i1> %src.mask, <16 x i1> poison, <112 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15>
9532 %data = call <112 x i32> @llvm.masked.load.v112i32.p0(ptr %in.vec, i32 64, <112 x i1> %tgt.mask, <112 x i32> poison)
9533 store <112 x i32> %data, ptr %out.vec, align 64
9537 define void @mask_replication_factor7_vf32(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
9538 ; AVX512F-ONLY-LABEL: mask_replication_factor7_vf32:
9539 ; AVX512F-ONLY: # %bb.0:
9540 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
9541 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
9542 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2]
9543 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm2
9544 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
9545 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 {%k1} {z}
9546 ; AVX512F-ONLY-NEXT: movw $1, %ax
9547 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
9548 ; AVX512F-ONLY-NEXT: vmovdqa32 %zmm0, %zmm2 {%k1}
9549 ; AVX512F-ONLY-NEXT: kmovw 2(%rdi), %k1
9550 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm3, %zmm3, %zmm3 {%k1} {z}
9551 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
9552 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm2 = [2,2,2,2,2,3,3,3,3,3,3,3,4,4,4,4]
9553 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm2, %zmm4
9554 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm5 = [4,4,4,5,5,5,5,5,5,5,6,6,6,6,6,6]
9555 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm5, %zmm6
9556 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm7 = [6,7,7,7,7,7,7,7,8,8,8,8,8,8,8,9]
9557 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm7, %zmm8
9558 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm9 = [9,9,9,9,9,9,10,10,10,10,10,10,10,11,11,11]
9559 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm9, %zmm10
9560 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm11 = [11,11,11,11,12,12,12,12,12,12,12,13,13,13,13,13]
9561 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm11, %zmm12
9562 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm13 = [13,13,14,14,14,14,14,14,14,15,15,15,15,15,15,15]
9563 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm13, %zmm0
9564 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm1, %zmm1
9565 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm2, %zmm2
9566 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm5, %zmm5
9567 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm7, %zmm7
9568 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm9, %zmm9
9569 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm11, %zmm11
9570 ; AVX512F-ONLY-NEXT: vpermd %zmm3, %zmm13, %zmm3
9571 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm13 {%k1} {z}
9572 ; AVX512F-ONLY-NEXT: vptestmd %zmm3, %zmm3, %k1
9573 ; AVX512F-ONLY-NEXT: vmovdqa32 832(%rsi), %zmm3 {%k1} {z}
9574 ; AVX512F-ONLY-NEXT: vptestmd %zmm11, %zmm11, %k1
9575 ; AVX512F-ONLY-NEXT: vmovdqa32 768(%rsi), %zmm11 {%k1} {z}
9576 ; AVX512F-ONLY-NEXT: vptestmd %zmm9, %zmm9, %k1
9577 ; AVX512F-ONLY-NEXT: vmovdqa32 704(%rsi), %zmm9 {%k1} {z}
9578 ; AVX512F-ONLY-NEXT: vptestmd %zmm7, %zmm7, %k1
9579 ; AVX512F-ONLY-NEXT: vmovdqa32 640(%rsi), %zmm7 {%k1} {z}
9580 ; AVX512F-ONLY-NEXT: vptestmd %zmm5, %zmm5, %k1
9581 ; AVX512F-ONLY-NEXT: vmovdqa32 576(%rsi), %zmm5 {%k1} {z}
9582 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
9583 ; AVX512F-ONLY-NEXT: vmovdqa32 512(%rsi), %zmm2 {%k1} {z}
9584 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
9585 ; AVX512F-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm1 {%k1} {z}
9586 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1
9587 ; AVX512F-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm0 {%k1} {z}
9588 ; AVX512F-ONLY-NEXT: vptestmd %zmm12, %zmm12, %k1
9589 ; AVX512F-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm12 {%k1} {z}
9590 ; AVX512F-ONLY-NEXT: vptestmd %zmm10, %zmm10, %k1
9591 ; AVX512F-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm10 {%k1} {z}
9592 ; AVX512F-ONLY-NEXT: vptestmd %zmm8, %zmm8, %k1
9593 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm8 {%k1} {z}
9594 ; AVX512F-ONLY-NEXT: vptestmd %zmm6, %zmm6, %k1
9595 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm6 {%k1} {z}
9596 ; AVX512F-ONLY-NEXT: vptestmd %zmm4, %zmm4, %k1
9597 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm4 {%k1} {z}
9598 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm4, 64(%rdx)
9599 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm6, 128(%rdx)
9600 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm8, 192(%rdx)
9601 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm10, 256(%rdx)
9602 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm12, 320(%rdx)
9603 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, 384(%rdx)
9604 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, 448(%rdx)
9605 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 512(%rdx)
9606 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm5, 576(%rdx)
9607 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm7, 640(%rdx)
9608 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm9, 704(%rdx)
9609 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm11, 768(%rdx)
9610 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 832(%rdx)
9611 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm13, (%rdx)
9612 ; AVX512F-ONLY-NEXT: vzeroupper
9613 ; AVX512F-ONLY-NEXT: retq
9615 ; AVX512DQ-LABEL: mask_replication_factor7_vf32:
9616 ; AVX512DQ: # %bb.0:
9617 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
9618 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
9619 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2]
9620 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm2
9621 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k0
9622 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm2
9623 ; AVX512DQ-NEXT: movw $1, %ax
9624 ; AVX512DQ-NEXT: kmovw %eax, %k1
9625 ; AVX512DQ-NEXT: vmovdqa32 %zmm0, %zmm2 {%k1}
9626 ; AVX512DQ-NEXT: kmovw 2(%rdi), %k0
9627 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm3
9628 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k1
9629 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm2 = [2,2,2,2,2,3,3,3,3,3,3,3,4,4,4,4]
9630 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm2, %zmm4
9631 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm5 = [4,4,4,5,5,5,5,5,5,5,6,6,6,6,6,6]
9632 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm5, %zmm6
9633 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm7 = [6,7,7,7,7,7,7,7,8,8,8,8,8,8,8,9]
9634 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm7, %zmm8
9635 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm9 = [9,9,9,9,9,9,10,10,10,10,10,10,10,11,11,11]
9636 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm9, %zmm10
9637 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm11 = [11,11,11,11,12,12,12,12,12,12,12,13,13,13,13,13]
9638 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm11, %zmm12
9639 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm13 = [13,13,14,14,14,14,14,14,14,15,15,15,15,15,15,15]
9640 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm13, %zmm0
9641 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm1, %zmm1
9642 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm2, %zmm2
9643 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm5, %zmm5
9644 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm7, %zmm7
9645 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm9, %zmm9
9646 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm11, %zmm11
9647 ; AVX512DQ-NEXT: vpermd %zmm3, %zmm13, %zmm3
9648 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm13 {%k1} {z}
9649 ; AVX512DQ-NEXT: vpmovd2m %zmm3, %k1
9650 ; AVX512DQ-NEXT: vmovdqa32 832(%rsi), %zmm3 {%k1} {z}
9651 ; AVX512DQ-NEXT: vpmovd2m %zmm11, %k1
9652 ; AVX512DQ-NEXT: vmovdqa32 768(%rsi), %zmm11 {%k1} {z}
9653 ; AVX512DQ-NEXT: vpmovd2m %zmm9, %k1
9654 ; AVX512DQ-NEXT: vmovdqa32 704(%rsi), %zmm9 {%k1} {z}
9655 ; AVX512DQ-NEXT: vpmovd2m %zmm7, %k1
9656 ; AVX512DQ-NEXT: vmovdqa32 640(%rsi), %zmm7 {%k1} {z}
9657 ; AVX512DQ-NEXT: vpmovd2m %zmm5, %k1
9658 ; AVX512DQ-NEXT: vmovdqa32 576(%rsi), %zmm5 {%k1} {z}
9659 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k1
9660 ; AVX512DQ-NEXT: vmovdqa32 512(%rsi), %zmm2 {%k1} {z}
9661 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
9662 ; AVX512DQ-NEXT: vmovdqa32 448(%rsi), %zmm1 {%k1} {z}
9663 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k1
9664 ; AVX512DQ-NEXT: vmovdqa32 384(%rsi), %zmm0 {%k1} {z}
9665 ; AVX512DQ-NEXT: vpmovd2m %zmm12, %k1
9666 ; AVX512DQ-NEXT: vmovdqa32 320(%rsi), %zmm12 {%k1} {z}
9667 ; AVX512DQ-NEXT: vpmovd2m %zmm10, %k1
9668 ; AVX512DQ-NEXT: vmovdqa32 256(%rsi), %zmm10 {%k1} {z}
9669 ; AVX512DQ-NEXT: vpmovd2m %zmm8, %k1
9670 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm8 {%k1} {z}
9671 ; AVX512DQ-NEXT: vpmovd2m %zmm6, %k1
9672 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm6 {%k1} {z}
9673 ; AVX512DQ-NEXT: vpmovd2m %zmm4, %k1
9674 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm4 {%k1} {z}
9675 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 64(%rdx)
9676 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 128(%rdx)
9677 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, 192(%rdx)
9678 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, 256(%rdx)
9679 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, 320(%rdx)
9680 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 384(%rdx)
9681 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 448(%rdx)
9682 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 512(%rdx)
9683 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 576(%rdx)
9684 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, 640(%rdx)
9685 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, 704(%rdx)
9686 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, 768(%rdx)
9687 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 832(%rdx)
9688 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, (%rdx)
9689 ; AVX512DQ-NEXT: vzeroupper
9690 ; AVX512DQ-NEXT: retq
9692 ; AVX512BW-LABEL: mask_replication_factor7_vf32:
9693 ; AVX512BW: # %bb.0:
9694 ; AVX512BW-NEXT: movw $-3, %ax
9695 ; AVX512BW-NEXT: kmovd %eax, %k2
9696 ; AVX512BW-NEXT: kmovw (%rdi), %k0
9697 ; AVX512BW-NEXT: kandw %k2, %k0, %k1
9698 ; AVX512BW-NEXT: kmovq %k2, %k3
9699 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
9700 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
9701 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k2
9702 ; AVX512BW-NEXT: korw %k2, %k1, %k1
9703 ; AVX512BW-NEXT: movw $-5, %ax
9704 ; AVX512BW-NEXT: kmovd %eax, %k2
9705 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
9706 ; AVX512BW-NEXT: kmovq %k2, %k4
9707 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k2
9708 ; AVX512BW-NEXT: korw %k2, %k1, %k1
9709 ; AVX512BW-NEXT: movw $-9, %ax
9710 ; AVX512BW-NEXT: kmovd %eax, %k2
9711 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
9712 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
9713 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k2
9714 ; AVX512BW-NEXT: korw %k2, %k1, %k1
9715 ; AVX512BW-NEXT: movw $-17, %ax
9716 ; AVX512BW-NEXT: kmovd %eax, %k2
9717 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
9718 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
9719 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k2
9720 ; AVX512BW-NEXT: korw %k2, %k1, %k1
9721 ; AVX512BW-NEXT: movw $-33, %ax
9722 ; AVX512BW-NEXT: kmovd %eax, %k2
9723 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
9724 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
9725 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k2
9726 ; AVX512BW-NEXT: korw %k2, %k1, %k1
9727 ; AVX512BW-NEXT: movw $-65, %ax
9728 ; AVX512BW-NEXT: kmovd %eax, %k2
9729 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
9730 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
9731 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k0
9732 ; AVX512BW-NEXT: korw %k0, %k1, %k0
9733 ; AVX512BW-NEXT: movw $-129, %ax
9734 ; AVX512BW-NEXT: kmovd %eax, %k1
9735 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
9736 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
9737 ; AVX512BW-NEXT: kmovd (%rdi), %k6
9738 ; AVX512BW-NEXT: kshiftrd $1, %k6, %k0
9739 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
9740 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k2
9741 ; AVX512BW-NEXT: korw %k2, %k1, %k1
9742 ; AVX512BW-NEXT: movw $-257, %ax # imm = 0xFEFF
9743 ; AVX512BW-NEXT: kmovd %eax, %k2
9744 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
9745 ; AVX512BW-NEXT: kmovq %k2, %k7
9746 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
9747 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k2
9748 ; AVX512BW-NEXT: korw %k2, %k1, %k1
9749 ; AVX512BW-NEXT: movw $-513, %ax # imm = 0xFDFF
9750 ; AVX512BW-NEXT: kmovd %eax, %k5
9751 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
9752 ; AVX512BW-NEXT: kmovw %k5, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
9753 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k2
9754 ; AVX512BW-NEXT: korw %k2, %k1, %k1
9755 ; AVX512BW-NEXT: movw $-1025, %ax # imm = 0xFBFF
9756 ; AVX512BW-NEXT: kmovd %eax, %k2
9757 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
9758 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
9759 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k2
9760 ; AVX512BW-NEXT: korw %k2, %k1, %k1
9761 ; AVX512BW-NEXT: movw $-2049, %ax # imm = 0xF7FF
9762 ; AVX512BW-NEXT: kmovd %eax, %k2
9763 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
9764 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
9765 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k2
9766 ; AVX512BW-NEXT: korw %k2, %k1, %k1
9767 ; AVX512BW-NEXT: movw $-4097, %ax # imm = 0xEFFF
9768 ; AVX512BW-NEXT: kmovd %eax, %k2
9769 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
9770 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
9771 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k2
9772 ; AVX512BW-NEXT: korw %k2, %k1, %k1
9773 ; AVX512BW-NEXT: movw $-8193, %ax # imm = 0xDFFF
9774 ; AVX512BW-NEXT: kmovd %eax, %k2
9775 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
9776 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
9777 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k0
9778 ; AVX512BW-NEXT: korw %k0, %k1, %k0
9779 ; AVX512BW-NEXT: movw $-16385, %ax # imm = 0xBFFF
9780 ; AVX512BW-NEXT: kmovd %eax, %k1
9781 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
9782 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
9783 ; AVX512BW-NEXT: kshiftrd $2, %k6, %k2
9784 ; AVX512BW-NEXT: kmovd %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
9785 ; AVX512BW-NEXT: kshiftlw $14, %k2, %k1
9786 ; AVX512BW-NEXT: korw %k1, %k0, %k0
9787 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
9788 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
9789 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k1
9790 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
9791 ; AVX512BW-NEXT: korw %k1, %k0, %k1
9792 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
9793 ; AVX512BW-NEXT: kmovq %k6, %k2
9794 ; AVX512BW-NEXT: kshiftrd $29, %k6, %k1
9795 ; AVX512BW-NEXT: kmovd %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
9796 ; AVX512BW-NEXT: kandw %k3, %k1, %k0
9797 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
9798 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
9799 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k1
9800 ; AVX512BW-NEXT: korw %k1, %k0, %k0
9801 ; AVX512BW-NEXT: kmovq %k4, %k6
9802 ; AVX512BW-NEXT: kmovw %k4, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
9803 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
9804 ; AVX512BW-NEXT: kshiftrd $30, %k2, %k1
9805 ; AVX512BW-NEXT: kmovq %k2, %k4
9806 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
9807 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k3
9808 ; AVX512BW-NEXT: korw %k3, %k0, %k0
9809 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
9810 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
9811 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k3
9812 ; AVX512BW-NEXT: korw %k3, %k0, %k0
9813 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
9814 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
9815 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k3
9816 ; AVX512BW-NEXT: korw %k3, %k0, %k0
9817 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
9818 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
9819 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k3
9820 ; AVX512BW-NEXT: korw %k3, %k0, %k0
9821 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
9822 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
9823 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k3
9824 ; AVX512BW-NEXT: korw %k3, %k0, %k0
9825 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
9826 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
9827 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k3
9828 ; AVX512BW-NEXT: korw %k3, %k0, %k0
9829 ; AVX512BW-NEXT: kandw %k7, %k0, %k0
9830 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k1
9831 ; AVX512BW-NEXT: korw %k1, %k0, %k0
9832 ; AVX512BW-NEXT: kandw %k5, %k0, %k3
9833 ; AVX512BW-NEXT: kshiftrd $31, %k4, %k0
9834 ; AVX512BW-NEXT: kmovd %k4, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
9835 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k1
9836 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k7
9837 ; AVX512BW-NEXT: korw %k7, %k3, %k3
9838 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
9839 ; AVX512BW-NEXT: kandw %k5, %k3, %k3
9840 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k7
9841 ; AVX512BW-NEXT: korw %k7, %k3, %k3
9842 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
9843 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
9844 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k7
9845 ; AVX512BW-NEXT: korw %k7, %k3, %k3
9846 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
9847 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
9848 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k7
9849 ; AVX512BW-NEXT: korw %k7, %k3, %k3
9850 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
9851 ; AVX512BW-NEXT: kandw %k7, %k3, %k3
9852 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k7
9853 ; AVX512BW-NEXT: korw %k7, %k3, %k3
9854 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
9855 ; AVX512BW-NEXT: kandw %k7, %k3, %k3
9856 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k0
9857 ; AVX512BW-NEXT: korw %k0, %k3, %k0
9858 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
9859 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
9860 ; AVX512BW-NEXT: korw %k1, %k0, %k1
9861 ; AVX512BW-NEXT: vmovdqa32 832(%rsi), %zmm1 {%k1} {z}
9862 ; AVX512BW-NEXT: kshiftrd $27, %k4, %k1
9863 ; AVX512BW-NEXT: kmovd %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
9864 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
9865 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
9866 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k3
9867 ; AVX512BW-NEXT: kmovw %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
9868 ; AVX512BW-NEXT: kshiftrw $14, %k3, %k7
9869 ; AVX512BW-NEXT: korw %k7, %k0, %k0
9870 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
9871 ; AVX512BW-NEXT: kshiftrw $13, %k3, %k7
9872 ; AVX512BW-NEXT: korw %k7, %k0, %k0
9873 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
9874 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
9875 ; AVX512BW-NEXT: kshiftrw $12, %k3, %k7
9876 ; AVX512BW-NEXT: korw %k7, %k0, %k0
9877 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
9878 ; AVX512BW-NEXT: kandw %k3, %k0, %k7
9879 ; AVX512BW-NEXT: kshiftrd $28, %k4, %k0
9880 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
9881 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
9882 ; AVX512BW-NEXT: korw %k6, %k7, %k6
9883 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
9884 ; AVX512BW-NEXT: kandw %k1, %k6, %k6
9885 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k7
9886 ; AVX512BW-NEXT: korw %k7, %k6, %k6
9887 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
9888 ; AVX512BW-NEXT: kandw %k1, %k6, %k6
9889 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k7
9890 ; AVX512BW-NEXT: korw %k7, %k6, %k6
9891 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
9892 ; AVX512BW-NEXT: kandw %k1, %k6, %k6
9893 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k7
9894 ; AVX512BW-NEXT: korw %k7, %k6, %k6
9895 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
9896 ; AVX512BW-NEXT: kandw %k1, %k6, %k6
9897 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k7
9898 ; AVX512BW-NEXT: korw %k7, %k6, %k6
9899 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
9900 ; AVX512BW-NEXT: kandw %k1, %k6, %k6
9901 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k7
9902 ; AVX512BW-NEXT: korw %k7, %k6, %k6
9903 ; AVX512BW-NEXT: kandw %k5, %k6, %k6
9904 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k0
9905 ; AVX512BW-NEXT: korw %k0, %k6, %k0
9906 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
9907 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
9908 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
9909 ; AVX512BW-NEXT: kshiftrw $4, %k5, %k6
9910 ; AVX512BW-NEXT: korw %k6, %k0, %k0
9911 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
9912 ; AVX512BW-NEXT: kmovq %k2, %k4
9913 ; AVX512BW-NEXT: kshiftrw $3, %k5, %k6
9914 ; AVX512BW-NEXT: korw %k6, %k0, %k0
9915 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
9916 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
9917 ; AVX512BW-NEXT: kshiftrw $2, %k5, %k6
9918 ; AVX512BW-NEXT: kmovq %k5, %k7
9919 ; AVX512BW-NEXT: korw %k6, %k0, %k0
9920 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
9921 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
9922 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 4-byte Reload
9923 ; AVX512BW-NEXT: kshiftlw $14, %k2, %k5
9924 ; AVX512BW-NEXT: korw %k5, %k0, %k0
9925 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
9926 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
9927 ; AVX512BW-NEXT: korw %k7, %k0, %k2
9928 ; AVX512BW-NEXT: vmovdqa32 768(%rsi), %zmm2 {%k2} {z}
9929 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 4-byte Reload
9930 ; AVX512BW-NEXT: kshiftrd $25, %k6, %k0
9931 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
9932 ; AVX512BW-NEXT: kandw %k2, %k0, %k2
9933 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
9934 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k5
9935 ; AVX512BW-NEXT: korw %k5, %k2, %k2
9936 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
9937 ; AVX512BW-NEXT: kandw %k5, %k2, %k2
9938 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k5
9939 ; AVX512BW-NEXT: korw %k5, %k2, %k2
9940 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
9941 ; AVX512BW-NEXT: kandw %k5, %k2, %k2
9942 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k5
9943 ; AVX512BW-NEXT: korw %k5, %k2, %k2
9944 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
9945 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k5
9946 ; AVX512BW-NEXT: korw %k5, %k2, %k2
9947 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
9948 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
9949 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k5
9950 ; AVX512BW-NEXT: korw %k5, %k2, %k2
9951 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
9952 ; AVX512BW-NEXT: kandw %k3, %k2, %k5
9953 ; AVX512BW-NEXT: kshiftrd $26, %k6, %k2
9954 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
9955 ; AVX512BW-NEXT: kshiftrw $9, %k2, %k6
9956 ; AVX512BW-NEXT: korw %k6, %k5, %k5
9957 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
9958 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
9959 ; AVX512BW-NEXT: kshiftrw $8, %k2, %k6
9960 ; AVX512BW-NEXT: korw %k6, %k5, %k5
9961 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
9962 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
9963 ; AVX512BW-NEXT: kshiftrw $7, %k2, %k6
9964 ; AVX512BW-NEXT: korw %k6, %k5, %k5
9965 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
9966 ; AVX512BW-NEXT: kshiftrw $6, %k2, %k6
9967 ; AVX512BW-NEXT: korw %k6, %k5, %k5
9968 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
9969 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
9970 ; AVX512BW-NEXT: kshiftrw $5, %k2, %k6
9971 ; AVX512BW-NEXT: korw %k6, %k5, %k5
9972 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
9973 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
9974 ; AVX512BW-NEXT: kshiftrw $4, %k2, %k6
9975 ; AVX512BW-NEXT: korw %k6, %k5, %k5
9976 ; AVX512BW-NEXT: kandw %k4, %k5, %k5
9977 ; AVX512BW-NEXT: kshiftrw $3, %k2, %k2
9978 ; AVX512BW-NEXT: korw %k2, %k5, %k2
9979 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
9980 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
9981 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
9982 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k5
9983 ; AVX512BW-NEXT: korw %k5, %k2, %k2
9984 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
9985 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
9986 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 4-byte Reload
9987 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k3
9988 ; AVX512BW-NEXT: korw %k3, %k2, %k2
9989 ; AVX512BW-NEXT: kshiftlw $1, %k2, %k2
9990 ; AVX512BW-NEXT: kshiftrw $1, %k2, %k2
9991 ; AVX512BW-NEXT: korw %k6, %k2, %k1
9992 ; AVX512BW-NEXT: vmovdqa32 704(%rsi), %zmm3 {%k1} {z}
9993 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 4-byte Reload
9994 ; AVX512BW-NEXT: kshiftrd $23, %k2, %k1
9995 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k3
9996 ; AVX512BW-NEXT: kshiftrd $22, %k2, %k5
9997 ; AVX512BW-NEXT: kmovd %k5, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
9998 ; AVX512BW-NEXT: kmovq %k2, %k6
9999 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10000 ; AVX512BW-NEXT: kandw %k1, %k5, %k2
10001 ; AVX512BW-NEXT: kshiftrw $14, %k3, %k5
10002 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10003 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10004 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
10005 ; AVX512BW-NEXT: kshiftrw $13, %k3, %k5
10006 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10007 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10008 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
10009 ; AVX512BW-NEXT: kshiftrw $12, %k3, %k5
10010 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10011 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
10012 ; AVX512BW-NEXT: kandw %k5, %k2, %k2
10013 ; AVX512BW-NEXT: kshiftrw $11, %k3, %k5
10014 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10015 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
10016 ; AVX512BW-NEXT: kshiftrw $10, %k3, %k5
10017 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10018 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
10019 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
10020 ; AVX512BW-NEXT: kshiftrw $9, %k3, %k5
10021 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10022 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
10023 ; AVX512BW-NEXT: kandw %k5, %k2, %k2
10024 ; AVX512BW-NEXT: kshiftrw $8, %k3, %k3
10025 ; AVX512BW-NEXT: korw %k3, %k2, %k2
10026 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
10027 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
10028 ; AVX512BW-NEXT: kshiftrd $24, %k6, %k3
10029 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k5
10030 ; AVX512BW-NEXT: kshiftrw $7, %k5, %k6
10031 ; AVX512BW-NEXT: korw %k6, %k2, %k2
10032 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
10033 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
10034 ; AVX512BW-NEXT: kshiftrw $6, %k5, %k6
10035 ; AVX512BW-NEXT: korw %k6, %k2, %k2
10036 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
10037 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
10038 ; AVX512BW-NEXT: kshiftrw $5, %k5, %k6
10039 ; AVX512BW-NEXT: korw %k6, %k2, %k2
10040 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
10041 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
10042 ; AVX512BW-NEXT: kshiftrw $4, %k5, %k6
10043 ; AVX512BW-NEXT: korw %k6, %k2, %k2
10044 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
10045 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
10046 ; AVX512BW-NEXT: kshiftrw $3, %k5, %k6
10047 ; AVX512BW-NEXT: korw %k6, %k2, %k2
10048 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
10049 ; AVX512BW-NEXT: kshiftrw $2, %k5, %k5
10050 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10051 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
10052 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
10053 ; AVX512BW-NEXT: kshiftlw $14, %k3, %k3
10054 ; AVX512BW-NEXT: korw %k3, %k2, %k2
10055 ; AVX512BW-NEXT: kshiftlw $1, %k2, %k2
10056 ; AVX512BW-NEXT: kshiftrw $1, %k2, %k2
10057 ; AVX512BW-NEXT: korw %k0, %k2, %k2
10058 ; AVX512BW-NEXT: vmovdqa32 640(%rsi), %zmm4 {%k2} {z}
10059 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 4-byte Reload
10060 ; AVX512BW-NEXT: kshiftrd $20, %k3, %k5
10061 ; AVX512BW-NEXT: kmovd %k5, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
10062 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10063 ; AVX512BW-NEXT: kandw %k0, %k5, %k2
10064 ; AVX512BW-NEXT: kshiftlw $15, %k5, %k6
10065 ; AVX512BW-NEXT: kmovw %k6, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
10066 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k5
10067 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10068 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10069 ; AVX512BW-NEXT: kandw %k0, %k2, %k2
10070 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k5
10071 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10072 ; AVX512BW-NEXT: kandw %k1, %k2, %k5
10073 ; AVX512BW-NEXT: kshiftrd $21, %k3, %k2
10074 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
10075 ; AVX512BW-NEXT: kshiftrw $12, %k2, %k6
10076 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10077 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10078 ; AVX512BW-NEXT: kandw %k0, %k5, %k5
10079 ; AVX512BW-NEXT: kshiftrw $11, %k2, %k6
10080 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10081 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10082 ; AVX512BW-NEXT: kandw %k0, %k5, %k5
10083 ; AVX512BW-NEXT: kshiftrw $10, %k2, %k6
10084 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10085 ; AVX512BW-NEXT: kandw %k7, %k5, %k5
10086 ; AVX512BW-NEXT: kshiftrw $9, %k2, %k6
10087 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10088 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10089 ; AVX512BW-NEXT: kandw %k0, %k5, %k5
10090 ; AVX512BW-NEXT: kshiftrw $8, %k2, %k6
10091 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10092 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10093 ; AVX512BW-NEXT: kandw %k0, %k5, %k5
10094 ; AVX512BW-NEXT: kshiftrw $7, %k2, %k6
10095 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10096 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10097 ; AVX512BW-NEXT: kandw %k0, %k5, %k5
10098 ; AVX512BW-NEXT: kshiftrw $6, %k2, %k2
10099 ; AVX512BW-NEXT: korw %k2, %k5, %k2
10100 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10101 ; AVX512BW-NEXT: kandw %k0, %k2, %k5
10102 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 4-byte Reload
10103 ; AVX512BW-NEXT: kshiftlw $15, %k7, %k2
10104 ; AVX512BW-NEXT: kshiftrw $5, %k2, %k6
10105 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10106 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
10107 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
10108 ; AVX512BW-NEXT: kshiftrw $4, %k2, %k6
10109 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10110 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10111 ; AVX512BW-NEXT: kandw %k0, %k5, %k5
10112 ; AVX512BW-NEXT: kshiftrw $3, %k2, %k6
10113 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10114 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10115 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
10116 ; AVX512BW-NEXT: kshiftrw $2, %k2, %k6
10117 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10118 ; AVX512BW-NEXT: kandw %k4, %k5, %k5
10119 ; AVX512BW-NEXT: kshiftlw $14, %k7, %k1
10120 ; AVX512BW-NEXT: korw %k1, %k5, %k1
10121 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
10122 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
10123 ; AVX512BW-NEXT: korw %k2, %k1, %k1
10124 ; AVX512BW-NEXT: vmovdqa32 576(%rsi), %zmm5 {%k1} {z}
10125 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 4-byte Reload
10126 ; AVX512BW-NEXT: kshiftrd $18, %k2, %k4
10127 ; AVX512BW-NEXT: kmovd %k4, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
10128 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10129 ; AVX512BW-NEXT: kandw %k1, %k4, %k5
10130 ; AVX512BW-NEXT: kshiftlw $15, %k4, %k4
10131 ; AVX512BW-NEXT: kmovw %k4, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
10132 ; AVX512BW-NEXT: kshiftrw $14, %k4, %k6
10133 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10134 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10135 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
10136 ; AVX512BW-NEXT: kshiftrw $13, %k4, %k6
10137 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10138 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10139 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
10140 ; AVX512BW-NEXT: kshiftrw $12, %k4, %k6
10141 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10142 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10143 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
10144 ; AVX512BW-NEXT: kshiftrw $11, %k4, %k6
10145 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10146 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10147 ; AVX512BW-NEXT: kandw %k1, %k5, %k6
10148 ; AVX512BW-NEXT: kshiftrd $19, %k2, %k5
10149 ; AVX512BW-NEXT: kshiftlw $15, %k5, %k5
10150 ; AVX512BW-NEXT: kshiftrw $10, %k5, %k7
10151 ; AVX512BW-NEXT: korw %k7, %k6, %k6
10152 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10153 ; AVX512BW-NEXT: kandw %k1, %k6, %k6
10154 ; AVX512BW-NEXT: kshiftrw $9, %k5, %k7
10155 ; AVX512BW-NEXT: korw %k7, %k6, %k6
10156 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
10157 ; AVX512BW-NEXT: kandw %k4, %k6, %k6
10158 ; AVX512BW-NEXT: kshiftrw $8, %k5, %k7
10159 ; AVX512BW-NEXT: korw %k7, %k6, %k6
10160 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
10161 ; AVX512BW-NEXT: kandw %k2, %k6, %k6
10162 ; AVX512BW-NEXT: kshiftrw $7, %k5, %k7
10163 ; AVX512BW-NEXT: korw %k7, %k6, %k6
10164 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
10165 ; AVX512BW-NEXT: kandw %k2, %k6, %k6
10166 ; AVX512BW-NEXT: kshiftrw $6, %k5, %k7
10167 ; AVX512BW-NEXT: korw %k7, %k6, %k6
10168 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
10169 ; AVX512BW-NEXT: kandw %k2, %k6, %k6
10170 ; AVX512BW-NEXT: kshiftrw $5, %k5, %k7
10171 ; AVX512BW-NEXT: korw %k7, %k6, %k6
10172 ; AVX512BW-NEXT: kandw %k3, %k6, %k6
10173 ; AVX512BW-NEXT: kshiftrw $4, %k5, %k5
10174 ; AVX512BW-NEXT: korw %k5, %k6, %k5
10175 ; AVX512BW-NEXT: kandw %k0, %k5, %k5
10176 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
10177 ; AVX512BW-NEXT: kshiftrw $3, %k3, %k6
10178 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10179 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10180 ; AVX512BW-NEXT: kandw %k0, %k5, %k5
10181 ; AVX512BW-NEXT: kshiftrw $2, %k3, %k6
10182 ; AVX512BW-NEXT: kmovq %k3, %k7
10183 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10184 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10185 ; AVX512BW-NEXT: kandw %k0, %k5, %k5
10186 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 4-byte Reload
10187 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k3
10188 ; AVX512BW-NEXT: korw %k3, %k5, %k3
10189 ; AVX512BW-NEXT: kshiftlw $1, %k3, %k3
10190 ; AVX512BW-NEXT: kshiftrw $1, %k3, %k3
10191 ; AVX512BW-NEXT: korw %k7, %k3, %k3
10192 ; AVX512BW-NEXT: vmovdqa32 512(%rsi), %zmm6 {%k3} {z}
10193 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 4-byte Reload
10194 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k0
10195 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
10196 ; AVX512BW-NEXT: kandw %k6, %k0, %k3
10197 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
10198 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k5
10199 ; AVX512BW-NEXT: korw %k5, %k3, %k3
10200 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
10201 ; AVX512BW-NEXT: kandw %k7, %k3, %k3
10202 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k5
10203 ; AVX512BW-NEXT: korw %k5, %k3, %k3
10204 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
10205 ; AVX512BW-NEXT: kandw %k5, %k3, %k3
10206 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k5
10207 ; AVX512BW-NEXT: korw %k5, %k3, %k3
10208 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
10209 ; AVX512BW-NEXT: kandw %k5, %k3, %k3
10210 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k5
10211 ; AVX512BW-NEXT: korw %k5, %k3, %k3
10212 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
10213 ; AVX512BW-NEXT: kandw %k5, %k3, %k3
10214 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k5
10215 ; AVX512BW-NEXT: korw %k5, %k3, %k3
10216 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
10217 ; AVX512BW-NEXT: kandw %k5, %k3, %k3
10218 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k0
10219 ; AVX512BW-NEXT: korw %k0, %k3, %k0
10220 ; AVX512BW-NEXT: kandw %k4, %k0, %k3
10221 ; AVX512BW-NEXT: kshiftrd $17, %k1, %k0
10222 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
10223 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k5
10224 ; AVX512BW-NEXT: korw %k5, %k3, %k3
10225 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
10226 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
10227 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k5
10228 ; AVX512BW-NEXT: korw %k5, %k3, %k3
10229 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10230 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
10231 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k5
10232 ; AVX512BW-NEXT: korw %k5, %k3, %k3
10233 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
10234 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k5
10235 ; AVX512BW-NEXT: korw %k5, %k3, %k3
10236 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10237 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
10238 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k5
10239 ; AVX512BW-NEXT: korw %k5, %k3, %k3
10240 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10241 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
10242 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k5
10243 ; AVX512BW-NEXT: korw %k5, %k3, %k3
10244 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10245 ; AVX512BW-NEXT: kandw %k1, %k3, %k3
10246 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k0
10247 ; AVX512BW-NEXT: korw %k0, %k3, %k0
10248 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10249 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
10250 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 4-byte Reload
10251 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k2
10252 ; AVX512BW-NEXT: korw %k2, %k0, %k0
10253 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
10254 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
10255 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10256 ; AVX512BW-NEXT: korw %k1, %k0, %k1
10257 ; AVX512BW-NEXT: vmovdqa32 448(%rsi), %zmm7 {%k1} {z}
10258 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 4-byte Reload
10259 ; AVX512BW-NEXT: kshiftrd $13, %k0, %k2
10260 ; AVX512BW-NEXT: kmovd %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
10261 ; AVX512BW-NEXT: kandw %k6, %k2, %k1
10262 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
10263 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
10264 ; AVX512BW-NEXT: kshiftrw $14, %k2, %k3
10265 ; AVX512BW-NEXT: korw %k3, %k1, %k1
10266 ; AVX512BW-NEXT: kandw %k7, %k1, %k3
10267 ; AVX512BW-NEXT: kshiftrd $14, %k0, %k1
10268 ; AVX512BW-NEXT: kmovq %k0, %k6
10269 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
10270 ; AVX512BW-NEXT: kshiftrw $13, %k1, %k5
10271 ; AVX512BW-NEXT: korw %k5, %k3, %k3
10272 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
10273 ; AVX512BW-NEXT: kandw %k7, %k3, %k3
10274 ; AVX512BW-NEXT: kshiftrw $12, %k1, %k5
10275 ; AVX512BW-NEXT: korw %k5, %k3, %k3
10276 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10277 ; AVX512BW-NEXT: kandw %k0, %k3, %k3
10278 ; AVX512BW-NEXT: kshiftrw $11, %k1, %k5
10279 ; AVX512BW-NEXT: korw %k5, %k3, %k3
10280 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
10281 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
10282 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k5
10283 ; AVX512BW-NEXT: korw %k5, %k3, %k3
10284 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
10285 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
10286 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k5
10287 ; AVX512BW-NEXT: korw %k5, %k3, %k3
10288 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
10289 ; AVX512BW-NEXT: kandw %k2, %k3, %k3
10290 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k5
10291 ; AVX512BW-NEXT: korw %k5, %k3, %k3
10292 ; AVX512BW-NEXT: kandw %k4, %k3, %k3
10293 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k1
10294 ; AVX512BW-NEXT: korw %k1, %k3, %k1
10295 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
10296 ; AVX512BW-NEXT: kandw %k4, %k1, %k5
10297 ; AVX512BW-NEXT: kshiftrd $15, %k6, %k3
10298 ; AVX512BW-NEXT: kmovq %k6, %k0
10299 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k1
10300 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k6
10301 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10302 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
10303 ; AVX512BW-NEXT: kandw %k2, %k5, %k5
10304 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k6
10305 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10306 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
10307 ; AVX512BW-NEXT: kandw %k2, %k5, %k5
10308 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k6
10309 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10310 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
10311 ; AVX512BW-NEXT: kandw %k2, %k5, %k5
10312 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k6
10313 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10314 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
10315 ; AVX512BW-NEXT: kandw %k2, %k5, %k5
10316 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k6
10317 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10318 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
10319 ; AVX512BW-NEXT: kandw %k6, %k5, %k5
10320 ; AVX512BW-NEXT: kshiftlw $14, %k3, %k3
10321 ; AVX512BW-NEXT: korw %k3, %k5, %k3
10322 ; AVX512BW-NEXT: kshiftlw $1, %k3, %k3
10323 ; AVX512BW-NEXT: kshiftrw $1, %k3, %k3
10324 ; AVX512BW-NEXT: korw %k1, %k3, %k1
10325 ; AVX512BW-NEXT: vmovdqa32 384(%rsi), %zmm8 {%k1} {z}
10326 ; AVX512BW-NEXT: kmovq %k0, %k3
10327 ; AVX512BW-NEXT: kshiftrd $11, %k0, %k0
10328 ; AVX512BW-NEXT: kmovd %k0, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
10329 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10330 ; AVX512BW-NEXT: kandw %k1, %k0, %k5
10331 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
10332 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
10333 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10334 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10335 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
10336 ; AVX512BW-NEXT: kmovw %k0, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
10337 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
10338 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10339 ; AVX512BW-NEXT: kandw %k7, %k5, %k5
10340 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
10341 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10342 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10343 ; AVX512BW-NEXT: kandw %k0, %k5, %k6
10344 ; AVX512BW-NEXT: kshiftrd $12, %k3, %k5
10345 ; AVX512BW-NEXT: kshiftlw $15, %k5, %k5
10346 ; AVX512BW-NEXT: kshiftrw $11, %k5, %k7
10347 ; AVX512BW-NEXT: korw %k7, %k6, %k6
10348 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10349 ; AVX512BW-NEXT: kandw %k1, %k6, %k6
10350 ; AVX512BW-NEXT: kshiftrw $10, %k5, %k7
10351 ; AVX512BW-NEXT: korw %k7, %k6, %k6
10352 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10353 ; AVX512BW-NEXT: kandw %k0, %k6, %k6
10354 ; AVX512BW-NEXT: kshiftrw $9, %k5, %k7
10355 ; AVX512BW-NEXT: korw %k7, %k6, %k6
10356 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10357 ; AVX512BW-NEXT: kandw %k0, %k6, %k6
10358 ; AVX512BW-NEXT: kshiftrw $8, %k5, %k7
10359 ; AVX512BW-NEXT: korw %k7, %k6, %k6
10360 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10361 ; AVX512BW-NEXT: kandw %k0, %k6, %k6
10362 ; AVX512BW-NEXT: kshiftrw $7, %k5, %k7
10363 ; AVX512BW-NEXT: korw %k7, %k6, %k6
10364 ; AVX512BW-NEXT: kandw %k4, %k6, %k6
10365 ; AVX512BW-NEXT: kshiftrw $6, %k5, %k7
10366 ; AVX512BW-NEXT: korw %k7, %k6, %k6
10367 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
10368 ; AVX512BW-NEXT: kandw %k4, %k6, %k6
10369 ; AVX512BW-NEXT: kshiftrw $5, %k5, %k5
10370 ; AVX512BW-NEXT: korw %k5, %k6, %k5
10371 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10372 ; AVX512BW-NEXT: kandw %k0, %k5, %k5
10373 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
10374 ; AVX512BW-NEXT: kshiftrw $4, %k3, %k6
10375 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10376 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10377 ; AVX512BW-NEXT: kandw %k0, %k5, %k5
10378 ; AVX512BW-NEXT: kshiftrw $3, %k3, %k6
10379 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10380 ; AVX512BW-NEXT: kandw %k2, %k5, %k5
10381 ; AVX512BW-NEXT: kshiftrw $2, %k3, %k6
10382 ; AVX512BW-NEXT: kmovq %k3, %k0
10383 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10384 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
10385 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
10386 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 4-byte Reload
10387 ; AVX512BW-NEXT: kshiftlw $14, %k2, %k2
10388 ; AVX512BW-NEXT: korw %k2, %k5, %k2
10389 ; AVX512BW-NEXT: kshiftlw $1, %k2, %k2
10390 ; AVX512BW-NEXT: kshiftrw $1, %k2, %k2
10391 ; AVX512BW-NEXT: korw %k0, %k2, %k2
10392 ; AVX512BW-NEXT: vmovdqa32 320(%rsi), %zmm9 {%k2} {z}
10393 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 4-byte Reload
10394 ; AVX512BW-NEXT: kshiftrd $9, %k6, %k0
10395 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
10396 ; AVX512BW-NEXT: kandw %k2, %k0, %k2
10397 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
10398 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k5
10399 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10400 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
10401 ; AVX512BW-NEXT: kandw %k5, %k2, %k2
10402 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k5
10403 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10404 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
10405 ; AVX512BW-NEXT: kandw %k5, %k2, %k2
10406 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k5
10407 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10408 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
10409 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
10410 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k5
10411 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10412 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
10413 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k5
10414 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10415 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10416 ; AVX512BW-NEXT: kandw %k1, %k2, %k5
10417 ; AVX512BW-NEXT: kshiftrd $10, %k6, %k2
10418 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
10419 ; AVX512BW-NEXT: kshiftrw $9, %k2, %k6
10420 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10421 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10422 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
10423 ; AVX512BW-NEXT: kshiftrw $8, %k2, %k6
10424 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10425 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10426 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
10427 ; AVX512BW-NEXT: kshiftrw $7, %k2, %k6
10428 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10429 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10430 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
10431 ; AVX512BW-NEXT: kshiftrw $6, %k2, %k6
10432 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10433 ; AVX512BW-NEXT: kandw %k4, %k5, %k5
10434 ; AVX512BW-NEXT: kshiftrw $5, %k2, %k6
10435 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10436 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10437 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
10438 ; AVX512BW-NEXT: kshiftrw $4, %k2, %k6
10439 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10440 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10441 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
10442 ; AVX512BW-NEXT: kshiftrw $3, %k2, %k2
10443 ; AVX512BW-NEXT: korw %k2, %k5, %k2
10444 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10445 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
10446 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
10447 ; AVX512BW-NEXT: kshiftrw $2, %k4, %k5
10448 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10449 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
10450 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 4-byte Reload
10451 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k1
10452 ; AVX512BW-NEXT: korw %k1, %k2, %k1
10453 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
10454 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
10455 ; AVX512BW-NEXT: korw %k4, %k1, %k1
10456 ; AVX512BW-NEXT: vmovdqa32 256(%rsi), %zmm10 {%k1} {z}
10457 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 4-byte Reload
10458 ; AVX512BW-NEXT: kshiftrd $7, %k4, %k1
10459 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k3
10460 ; AVX512BW-NEXT: kshiftrd $6, %k4, %k2
10461 ; AVX512BW-NEXT: kmovd %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
10462 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10463 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
10464 ; AVX512BW-NEXT: kshiftrw $14, %k3, %k5
10465 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10466 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10467 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
10468 ; AVX512BW-NEXT: kshiftrw $13, %k3, %k5
10469 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10470 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10471 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
10472 ; AVX512BW-NEXT: kshiftrw $12, %k3, %k5
10473 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10474 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
10475 ; AVX512BW-NEXT: kshiftrw $11, %k3, %k5
10476 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10477 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10478 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
10479 ; AVX512BW-NEXT: kshiftrw $10, %k3, %k5
10480 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10481 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10482 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
10483 ; AVX512BW-NEXT: kshiftrw $9, %k3, %k5
10484 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10485 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10486 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
10487 ; AVX512BW-NEXT: kshiftrw $8, %k3, %k3
10488 ; AVX512BW-NEXT: korw %k3, %k2, %k2
10489 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10490 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
10491 ; AVX512BW-NEXT: kshiftrd $8, %k4, %k3
10492 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k5
10493 ; AVX512BW-NEXT: kshiftrw $7, %k5, %k6
10494 ; AVX512BW-NEXT: korw %k6, %k2, %k2
10495 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
10496 ; AVX512BW-NEXT: kandw %k1, %k2, %k2
10497 ; AVX512BW-NEXT: kshiftrw $6, %k5, %k6
10498 ; AVX512BW-NEXT: korw %k6, %k2, %k2
10499 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
10500 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
10501 ; AVX512BW-NEXT: kshiftrw $5, %k5, %k6
10502 ; AVX512BW-NEXT: korw %k6, %k2, %k2
10503 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
10504 ; AVX512BW-NEXT: kandw %k7, %k2, %k2
10505 ; AVX512BW-NEXT: kshiftrw $4, %k5, %k6
10506 ; AVX512BW-NEXT: korw %k6, %k2, %k2
10507 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
10508 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
10509 ; AVX512BW-NEXT: kshiftrw $3, %k5, %k6
10510 ; AVX512BW-NEXT: korw %k6, %k2, %k2
10511 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
10512 ; AVX512BW-NEXT: kandw %k6, %k2, %k2
10513 ; AVX512BW-NEXT: kshiftrw $2, %k5, %k5
10514 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10515 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
10516 ; AVX512BW-NEXT: kandw %k5, %k2, %k2
10517 ; AVX512BW-NEXT: kshiftlw $14, %k3, %k3
10518 ; AVX512BW-NEXT: korw %k3, %k2, %k2
10519 ; AVX512BW-NEXT: kshiftlw $1, %k2, %k2
10520 ; AVX512BW-NEXT: kshiftrw $1, %k2, %k2
10521 ; AVX512BW-NEXT: korw %k0, %k2, %k2
10522 ; AVX512BW-NEXT: vmovdqa32 192(%rsi), %zmm11 {%k2} {z}
10523 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 4-byte Reload
10524 ; AVX512BW-NEXT: kshiftrd $4, %k6, %k3
10525 ; AVX512BW-NEXT: kmovd %k3, {{[-0-9]+}}(%r{{[sb]}}p) # 4-byte Spill
10526 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
10527 ; AVX512BW-NEXT: kandw %k0, %k3, %k2
10528 ; AVX512BW-NEXT: kshiftlw $15, %k3, %k0
10529 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k5
10530 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10531 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
10532 ; AVX512BW-NEXT: kandw %k3, %k2, %k2
10533 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k5
10534 ; AVX512BW-NEXT: korw %k5, %k2, %k2
10535 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
10536 ; AVX512BW-NEXT: kandw %k3, %k2, %k5
10537 ; AVX512BW-NEXT: kshiftrd $5, %k6, %k2
10538 ; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
10539 ; AVX512BW-NEXT: kshiftrw $12, %k2, %k6
10540 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10541 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
10542 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
10543 ; AVX512BW-NEXT: kshiftrw $11, %k2, %k6
10544 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10545 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
10546 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
10547 ; AVX512BW-NEXT: kshiftrw $10, %k2, %k6
10548 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10549 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
10550 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
10551 ; AVX512BW-NEXT: kshiftrw $9, %k2, %k6
10552 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10553 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
10554 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
10555 ; AVX512BW-NEXT: kshiftrw $8, %k2, %k6
10556 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10557 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
10558 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
10559 ; AVX512BW-NEXT: kshiftrw $7, %k2, %k6
10560 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10561 ; AVX512BW-NEXT: kandw %k1, %k5, %k5
10562 ; AVX512BW-NEXT: kshiftrw $6, %k2, %k2
10563 ; AVX512BW-NEXT: korw %k2, %k5, %k2
10564 ; AVX512BW-NEXT: kandw %k4, %k2, %k5
10565 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 4-byte Reload
10566 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k2
10567 ; AVX512BW-NEXT: kshiftrw $5, %k2, %k6
10568 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10569 ; AVX512BW-NEXT: kandw %k7, %k5, %k5
10570 ; AVX512BW-NEXT: kshiftrw $4, %k2, %k6
10571 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10572 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
10573 ; AVX512BW-NEXT: kandw %k3, %k5, %k5
10574 ; AVX512BW-NEXT: kshiftrw $3, %k2, %k6
10575 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10576 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
10577 ; AVX512BW-NEXT: kandw %k7, %k5, %k5
10578 ; AVX512BW-NEXT: kshiftrw $2, %k2, %k6
10579 ; AVX512BW-NEXT: korw %k6, %k5, %k5
10580 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
10581 ; AVX512BW-NEXT: kandw %k6, %k5, %k5
10582 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k1
10583 ; AVX512BW-NEXT: korw %k1, %k5, %k1
10584 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
10585 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
10586 ; AVX512BW-NEXT: korw %k2, %k1, %k1
10587 ; AVX512BW-NEXT: vmovdqa32 128(%rsi), %zmm12 {%k1} {z}
10588 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 4-byte Reload
10589 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
10590 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
10591 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
10592 ; AVX512BW-NEXT: kshiftrw $14, %k4, %k2
10593 ; AVX512BW-NEXT: korw %k2, %k1, %k1
10594 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
10595 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
10596 ; AVX512BW-NEXT: kshiftrw $13, %k4, %k2
10597 ; AVX512BW-NEXT: korw %k2, %k1, %k1
10598 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
10599 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
10600 ; AVX512BW-NEXT: kshiftrw $12, %k4, %k2
10601 ; AVX512BW-NEXT: korw %k2, %k1, %k1
10602 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
10603 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
10604 ; AVX512BW-NEXT: kshiftrw $11, %k4, %k2
10605 ; AVX512BW-NEXT: korw %k2, %k1, %k1
10606 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
10607 ; AVX512BW-NEXT: kandw %k2, %k1, %k2
10608 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 4-byte Reload
10609 ; AVX512BW-NEXT: kshiftrd $3, %k1, %k1
10610 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
10611 ; AVX512BW-NEXT: kshiftrw $10, %k1, %k4
10612 ; AVX512BW-NEXT: korw %k4, %k2, %k2
10613 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
10614 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
10615 ; AVX512BW-NEXT: kshiftrw $9, %k1, %k4
10616 ; AVX512BW-NEXT: korw %k4, %k2, %k2
10617 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
10618 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
10619 ; AVX512BW-NEXT: kshiftrw $8, %k1, %k4
10620 ; AVX512BW-NEXT: korw %k4, %k2, %k2
10621 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
10622 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
10623 ; AVX512BW-NEXT: kshiftrw $7, %k1, %k4
10624 ; AVX512BW-NEXT: korw %k4, %k2, %k2
10625 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
10626 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
10627 ; AVX512BW-NEXT: kshiftrw $6, %k1, %k4
10628 ; AVX512BW-NEXT: korw %k4, %k2, %k2
10629 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
10630 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
10631 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k4
10632 ; AVX512BW-NEXT: korw %k4, %k2, %k2
10633 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
10634 ; AVX512BW-NEXT: kandw %k4, %k2, %k2
10635 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k1
10636 ; AVX512BW-NEXT: korw %k1, %k2, %k1
10637 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
10638 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k2
10639 ; AVX512BW-NEXT: korw %k2, %k1, %k1
10640 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
10641 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k2
10642 ; AVX512BW-NEXT: korw %k2, %k1, %k1
10643 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
10644 ; AVX512BW-NEXT: kmovd {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 4-byte Reload
10645 ; AVX512BW-NEXT: kshiftlw $14, %k2, %k2
10646 ; AVX512BW-NEXT: korw %k2, %k1, %k1
10647 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
10648 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
10649 ; AVX512BW-NEXT: korw %k0, %k1, %k1
10650 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm13 {%k1} {z}
10651 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 64(%rdx)
10652 ; AVX512BW-NEXT: vmovdqa64 %zmm12, 128(%rdx)
10653 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 192(%rdx)
10654 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 256(%rdx)
10655 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 320(%rdx)
10656 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 384(%rdx)
10657 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 448(%rdx)
10658 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 512(%rdx)
10659 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 576(%rdx)
10660 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 640(%rdx)
10661 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 704(%rdx)
10662 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 768(%rdx)
10663 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 832(%rdx)
10664 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%rdx)
10665 ; AVX512BW-NEXT: vzeroupper
10666 ; AVX512BW-NEXT: retq
10667 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
10668 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
10669 %tgt.mask = shufflevector <32 x i1> %src.mask, <32 x i1> poison, <224 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 17, i32 17, i32 17, i32 17, i32 17, i32 17, i32 17, i32 18, i32 18, i32 18, i32 18, i32 18, i32 18, i32 18, i32 19, i32 19, i32 19, i32 19, i32 19, i32 19, i32 19, i32 20, i32 20, i32 20, i32 20, i32 20, i32 20, i32 20, i32 21, i32 21, i32 21, i32 21, i32 21, i32 21, i32 21, i32 22, i32 22, i32 22, i32 22, i32 22, i32 22, i32 22, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 26, i32 26, i32 26, i32 26, i32 26, i32 26, i32 26, i32 27, i32 27, i32 27, i32 27, i32 27, i32 27, i32 27, i32 28, i32 28, i32 28, i32 28, i32 28, i32 28, i32 28, i32 29, i32 29, i32 29, i32 29, i32 29, i32 29, i32 29, i32 30, i32 30, i32 30, i32 30, i32 30, i32 30, i32 30, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31>
10670 %data = call <224 x i32> @llvm.masked.load.v224i32.p0(ptr %in.vec, i32 64, <224 x i1> %tgt.mask, <224 x i32> poison)
10671 store <224 x i32> %data, ptr %out.vec, align 64
10675 define void @mask_replication_factor7_vf64(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
10676 ; AVX512F-ONLY-LABEL: mask_replication_factor7_vf64:
10677 ; AVX512F-ONLY: # %bb.0:
10678 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
10679 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
10680 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm5 = [0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2]
10681 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm5, %zmm0
10682 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1
10683 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
10684 ; AVX512F-ONLY-NEXT: movw $1, %ax
10685 ; AVX512F-ONLY-NEXT: kmovw %eax, %k1
10686 ; AVX512F-ONLY-NEXT: vmovdqa32 %zmm1, %zmm0 {%k1}
10687 ; AVX512F-ONLY-NEXT: kmovw 6(%rdi), %k1
10688 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm8, %zmm8, %zmm8 {%k1} {z}
10689 ; AVX512F-ONLY-NEXT: kmovw 4(%rdi), %k1
10690 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm9, %zmm9, %zmm9 {%k1} {z}
10691 ; AVX512F-ONLY-NEXT: kmovw 2(%rdi), %k1
10692 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm11, %zmm11, %zmm11 {%k1} {z}
10693 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1
10694 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm13 = [13,13,14,14,14,14,14,14,14,15,15,15,15,15,15,15]
10695 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm13, %zmm0
10696 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm15 = [11,11,11,11,12,12,12,12,12,12,12,13,13,13,13,13]
10697 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm15, %zmm2
10698 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm16 = [9,9,9,9,9,9,10,10,10,10,10,10,10,11,11,11]
10699 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm16, %zmm3
10700 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm17 = [6,7,7,7,7,7,7,7,8,8,8,8,8,8,8,9]
10701 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm17, %zmm4
10702 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm18 = [4,4,4,5,5,5,5,5,5,5,6,6,6,6,6,6]
10703 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm18, %zmm6
10704 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm19 = [2,2,2,2,2,3,3,3,3,3,3,3,4,4,4,4]
10705 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm19, %zmm7
10706 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm5, %zmm8
10707 ; AVX512F-ONLY-NEXT: vpermd %zmm9, %zmm13, %zmm10
10708 ; AVX512F-ONLY-NEXT: vpermd %zmm9, %zmm15, %zmm12
10709 ; AVX512F-ONLY-NEXT: vpermd %zmm9, %zmm16, %zmm14
10710 ; AVX512F-ONLY-NEXT: vpermd %zmm9, %zmm17, %zmm20
10711 ; AVX512F-ONLY-NEXT: vpermd %zmm9, %zmm18, %zmm21
10712 ; AVX512F-ONLY-NEXT: vpermd %zmm9, %zmm19, %zmm22
10713 ; AVX512F-ONLY-NEXT: vpermd %zmm9, %zmm5, %zmm23
10714 ; AVX512F-ONLY-NEXT: vpermd %zmm11, %zmm13, %zmm24
10715 ; AVX512F-ONLY-NEXT: vpermd %zmm11, %zmm15, %zmm25
10716 ; AVX512F-ONLY-NEXT: vpermd %zmm11, %zmm16, %zmm26
10717 ; AVX512F-ONLY-NEXT: vpermd %zmm11, %zmm17, %zmm27
10718 ; AVX512F-ONLY-NEXT: vpermd %zmm11, %zmm18, %zmm28
10719 ; AVX512F-ONLY-NEXT: vpermd %zmm11, %zmm5, %zmm29
10720 ; AVX512F-ONLY-NEXT: vpermd %zmm11, %zmm19, %zmm30
10721 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm13, %zmm31
10722 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm15, %zmm15
10723 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm16, %zmm13
10724 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm17, %zmm11
10725 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm18, %zmm9
10726 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm19, %zmm5
10727 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
10728 ; AVX512F-ONLY-NEXT: vptestmd %zmm5, %zmm5, %k1
10729 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm5 {%k1} {z}
10730 ; AVX512F-ONLY-NEXT: vptestmd %zmm9, %zmm9, %k1
10731 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm9 {%k1} {z}
10732 ; AVX512F-ONLY-NEXT: vptestmd %zmm11, %zmm11, %k1
10733 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm11 {%k1} {z}
10734 ; AVX512F-ONLY-NEXT: vptestmd %zmm13, %zmm13, %k1
10735 ; AVX512F-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm13 {%k1} {z}
10736 ; AVX512F-ONLY-NEXT: vptestmd %zmm15, %zmm15, %k1
10737 ; AVX512F-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm15 {%k1} {z}
10738 ; AVX512F-ONLY-NEXT: vptestmd %zmm31, %zmm31, %k1
10739 ; AVX512F-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm16 {%k1} {z}
10740 ; AVX512F-ONLY-NEXT: vptestmd %zmm29, %zmm29, %k1
10741 ; AVX512F-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm17 {%k1} {z}
10742 ; AVX512F-ONLY-NEXT: vptestmd %zmm30, %zmm30, %k1
10743 ; AVX512F-ONLY-NEXT: vmovdqa32 512(%rsi), %zmm18 {%k1} {z}
10744 ; AVX512F-ONLY-NEXT: vptestmd %zmm28, %zmm28, %k1
10745 ; AVX512F-ONLY-NEXT: vmovdqa32 576(%rsi), %zmm19 {%k1} {z}
10746 ; AVX512F-ONLY-NEXT: vptestmd %zmm27, %zmm27, %k1
10747 ; AVX512F-ONLY-NEXT: vmovdqa32 640(%rsi), %zmm27 {%k1} {z}
10748 ; AVX512F-ONLY-NEXT: vptestmd %zmm26, %zmm26, %k1
10749 ; AVX512F-ONLY-NEXT: vmovdqa32 704(%rsi), %zmm26 {%k1} {z}
10750 ; AVX512F-ONLY-NEXT: vptestmd %zmm25, %zmm25, %k1
10751 ; AVX512F-ONLY-NEXT: vmovdqa32 768(%rsi), %zmm25 {%k1} {z}
10752 ; AVX512F-ONLY-NEXT: vptestmd %zmm24, %zmm24, %k1
10753 ; AVX512F-ONLY-NEXT: vmovdqa32 832(%rsi), %zmm24 {%k1} {z}
10754 ; AVX512F-ONLY-NEXT: vptestmd %zmm23, %zmm23, %k1
10755 ; AVX512F-ONLY-NEXT: vmovdqa32 896(%rsi), %zmm23 {%k1} {z}
10756 ; AVX512F-ONLY-NEXT: vptestmd %zmm22, %zmm22, %k1
10757 ; AVX512F-ONLY-NEXT: vmovdqa32 960(%rsi), %zmm22 {%k1} {z}
10758 ; AVX512F-ONLY-NEXT: vptestmd %zmm21, %zmm21, %k1
10759 ; AVX512F-ONLY-NEXT: vmovdqa32 1024(%rsi), %zmm21 {%k1} {z}
10760 ; AVX512F-ONLY-NEXT: vptestmd %zmm20, %zmm20, %k1
10761 ; AVX512F-ONLY-NEXT: vmovdqa32 1088(%rsi), %zmm20 {%k1} {z}
10762 ; AVX512F-ONLY-NEXT: vptestmd %zmm14, %zmm14, %k1
10763 ; AVX512F-ONLY-NEXT: vmovdqa32 1152(%rsi), %zmm14 {%k1} {z}
10764 ; AVX512F-ONLY-NEXT: vptestmd %zmm12, %zmm12, %k1
10765 ; AVX512F-ONLY-NEXT: vmovdqa32 1216(%rsi), %zmm12 {%k1} {z}
10766 ; AVX512F-ONLY-NEXT: vptestmd %zmm10, %zmm10, %k1
10767 ; AVX512F-ONLY-NEXT: vmovdqa32 1280(%rsi), %zmm10 {%k1} {z}
10768 ; AVX512F-ONLY-NEXT: vptestmd %zmm8, %zmm8, %k1
10769 ; AVX512F-ONLY-NEXT: vmovdqa32 1344(%rsi), %zmm8 {%k1} {z}
10770 ; AVX512F-ONLY-NEXT: vptestmd %zmm7, %zmm7, %k1
10771 ; AVX512F-ONLY-NEXT: vmovdqa32 1408(%rsi), %zmm7 {%k1} {z}
10772 ; AVX512F-ONLY-NEXT: vptestmd %zmm6, %zmm6, %k1
10773 ; AVX512F-ONLY-NEXT: vmovdqa32 1472(%rsi), %zmm6 {%k1} {z}
10774 ; AVX512F-ONLY-NEXT: vptestmd %zmm4, %zmm4, %k1
10775 ; AVX512F-ONLY-NEXT: vmovdqa32 1536(%rsi), %zmm4 {%k1} {z}
10776 ; AVX512F-ONLY-NEXT: vptestmd %zmm3, %zmm3, %k1
10777 ; AVX512F-ONLY-NEXT: vmovdqa32 1600(%rsi), %zmm3 {%k1} {z}
10778 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
10779 ; AVX512F-ONLY-NEXT: vmovdqa32 1664(%rsi), %zmm2 {%k1} {z}
10780 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1
10781 ; AVX512F-ONLY-NEXT: vmovdqa32 1728(%rsi), %zmm0 {%k1} {z}
10782 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, 1728(%rdx)
10783 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 1664(%rdx)
10784 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 1600(%rdx)
10785 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm4, 1536(%rdx)
10786 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm6, 1472(%rdx)
10787 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm7, 1408(%rdx)
10788 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm8, 1344(%rdx)
10789 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm10, 1280(%rdx)
10790 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm12, 1216(%rdx)
10791 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm14, 1152(%rdx)
10792 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm20, 1088(%rdx)
10793 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm21, 1024(%rdx)
10794 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm22, 960(%rdx)
10795 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm23, 896(%rdx)
10796 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm24, 832(%rdx)
10797 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm25, 768(%rdx)
10798 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm26, 704(%rdx)
10799 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm27, 640(%rdx)
10800 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm19, 576(%rdx)
10801 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm18, 512(%rdx)
10802 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm17, 448(%rdx)
10803 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm16, 384(%rdx)
10804 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm15, 320(%rdx)
10805 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm13, 256(%rdx)
10806 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm11, 192(%rdx)
10807 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm9, 128(%rdx)
10808 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm5, 64(%rdx)
10809 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
10810 ; AVX512F-ONLY-NEXT: vzeroupper
10811 ; AVX512F-ONLY-NEXT: retq
10813 ; AVX512DQ-LABEL: mask_replication_factor7_vf64:
10814 ; AVX512DQ: # %bb.0:
10815 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
10816 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm1
10817 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm5 = [0,0,0,0,0,0,0,1,1,1,1,1,1,1,2,2]
10818 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm5, %zmm0
10819 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k0
10820 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
10821 ; AVX512DQ-NEXT: movw $1, %ax
10822 ; AVX512DQ-NEXT: kmovw %eax, %k1
10823 ; AVX512DQ-NEXT: vmovdqa32 %zmm1, %zmm0 {%k1}
10824 ; AVX512DQ-NEXT: kmovw 6(%rdi), %k0
10825 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm8
10826 ; AVX512DQ-NEXT: kmovw 4(%rdi), %k0
10827 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm9
10828 ; AVX512DQ-NEXT: kmovw 2(%rdi), %k0
10829 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm11
10830 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k1
10831 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm13 = [13,13,14,14,14,14,14,14,14,15,15,15,15,15,15,15]
10832 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm13, %zmm0
10833 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm15 = [11,11,11,11,12,12,12,12,12,12,12,13,13,13,13,13]
10834 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm15, %zmm2
10835 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm16 = [9,9,9,9,9,9,10,10,10,10,10,10,10,11,11,11]
10836 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm16, %zmm3
10837 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm17 = [6,7,7,7,7,7,7,7,8,8,8,8,8,8,8,9]
10838 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm17, %zmm4
10839 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm18 = [4,4,4,5,5,5,5,5,5,5,6,6,6,6,6,6]
10840 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm18, %zmm6
10841 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm19 = [2,2,2,2,2,3,3,3,3,3,3,3,4,4,4,4]
10842 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm19, %zmm7
10843 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm5, %zmm8
10844 ; AVX512DQ-NEXT: vpermd %zmm9, %zmm13, %zmm10
10845 ; AVX512DQ-NEXT: vpermd %zmm9, %zmm15, %zmm12
10846 ; AVX512DQ-NEXT: vpermd %zmm9, %zmm16, %zmm14
10847 ; AVX512DQ-NEXT: vpermd %zmm9, %zmm17, %zmm20
10848 ; AVX512DQ-NEXT: vpermd %zmm9, %zmm18, %zmm21
10849 ; AVX512DQ-NEXT: vpermd %zmm9, %zmm19, %zmm22
10850 ; AVX512DQ-NEXT: vpermd %zmm9, %zmm5, %zmm23
10851 ; AVX512DQ-NEXT: vpermd %zmm11, %zmm13, %zmm24
10852 ; AVX512DQ-NEXT: vpermd %zmm11, %zmm15, %zmm25
10853 ; AVX512DQ-NEXT: vpermd %zmm11, %zmm16, %zmm26
10854 ; AVX512DQ-NEXT: vpermd %zmm11, %zmm17, %zmm27
10855 ; AVX512DQ-NEXT: vpermd %zmm11, %zmm18, %zmm28
10856 ; AVX512DQ-NEXT: vpermd %zmm11, %zmm5, %zmm29
10857 ; AVX512DQ-NEXT: vpermd %zmm11, %zmm19, %zmm30
10858 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm13, %zmm31
10859 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm15, %zmm15
10860 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm16, %zmm13
10861 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm17, %zmm11
10862 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm18, %zmm9
10863 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm19, %zmm5
10864 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
10865 ; AVX512DQ-NEXT: vpmovd2m %zmm5, %k1
10866 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm5 {%k1} {z}
10867 ; AVX512DQ-NEXT: vpmovd2m %zmm9, %k1
10868 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm9 {%k1} {z}
10869 ; AVX512DQ-NEXT: vpmovd2m %zmm11, %k1
10870 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm11 {%k1} {z}
10871 ; AVX512DQ-NEXT: vpmovd2m %zmm13, %k1
10872 ; AVX512DQ-NEXT: vmovdqa32 256(%rsi), %zmm13 {%k1} {z}
10873 ; AVX512DQ-NEXT: vpmovd2m %zmm15, %k1
10874 ; AVX512DQ-NEXT: vmovdqa32 320(%rsi), %zmm15 {%k1} {z}
10875 ; AVX512DQ-NEXT: vpmovd2m %zmm31, %k1
10876 ; AVX512DQ-NEXT: vmovdqa32 384(%rsi), %zmm16 {%k1} {z}
10877 ; AVX512DQ-NEXT: vpmovd2m %zmm29, %k1
10878 ; AVX512DQ-NEXT: vmovdqa32 448(%rsi), %zmm17 {%k1} {z}
10879 ; AVX512DQ-NEXT: vpmovd2m %zmm30, %k1
10880 ; AVX512DQ-NEXT: vmovdqa32 512(%rsi), %zmm18 {%k1} {z}
10881 ; AVX512DQ-NEXT: vpmovd2m %zmm28, %k1
10882 ; AVX512DQ-NEXT: vmovdqa32 576(%rsi), %zmm19 {%k1} {z}
10883 ; AVX512DQ-NEXT: vpmovd2m %zmm27, %k1
10884 ; AVX512DQ-NEXT: vmovdqa32 640(%rsi), %zmm27 {%k1} {z}
10885 ; AVX512DQ-NEXT: vpmovd2m %zmm26, %k1
10886 ; AVX512DQ-NEXT: vmovdqa32 704(%rsi), %zmm26 {%k1} {z}
10887 ; AVX512DQ-NEXT: vpmovd2m %zmm25, %k1
10888 ; AVX512DQ-NEXT: vmovdqa32 768(%rsi), %zmm25 {%k1} {z}
10889 ; AVX512DQ-NEXT: vpmovd2m %zmm24, %k1
10890 ; AVX512DQ-NEXT: vmovdqa32 832(%rsi), %zmm24 {%k1} {z}
10891 ; AVX512DQ-NEXT: vpmovd2m %zmm23, %k1
10892 ; AVX512DQ-NEXT: vmovdqa32 896(%rsi), %zmm23 {%k1} {z}
10893 ; AVX512DQ-NEXT: vpmovd2m %zmm22, %k1
10894 ; AVX512DQ-NEXT: vmovdqa32 960(%rsi), %zmm22 {%k1} {z}
10895 ; AVX512DQ-NEXT: vpmovd2m %zmm21, %k1
10896 ; AVX512DQ-NEXT: vmovdqa32 1024(%rsi), %zmm21 {%k1} {z}
10897 ; AVX512DQ-NEXT: vpmovd2m %zmm20, %k1
10898 ; AVX512DQ-NEXT: vmovdqa32 1088(%rsi), %zmm20 {%k1} {z}
10899 ; AVX512DQ-NEXT: vpmovd2m %zmm14, %k1
10900 ; AVX512DQ-NEXT: vmovdqa32 1152(%rsi), %zmm14 {%k1} {z}
10901 ; AVX512DQ-NEXT: vpmovd2m %zmm12, %k1
10902 ; AVX512DQ-NEXT: vmovdqa32 1216(%rsi), %zmm12 {%k1} {z}
10903 ; AVX512DQ-NEXT: vpmovd2m %zmm10, %k1
10904 ; AVX512DQ-NEXT: vmovdqa32 1280(%rsi), %zmm10 {%k1} {z}
10905 ; AVX512DQ-NEXT: vpmovd2m %zmm8, %k1
10906 ; AVX512DQ-NEXT: vmovdqa32 1344(%rsi), %zmm8 {%k1} {z}
10907 ; AVX512DQ-NEXT: vpmovd2m %zmm7, %k1
10908 ; AVX512DQ-NEXT: vmovdqa32 1408(%rsi), %zmm7 {%k1} {z}
10909 ; AVX512DQ-NEXT: vpmovd2m %zmm6, %k1
10910 ; AVX512DQ-NEXT: vmovdqa32 1472(%rsi), %zmm6 {%k1} {z}
10911 ; AVX512DQ-NEXT: vpmovd2m %zmm4, %k1
10912 ; AVX512DQ-NEXT: vmovdqa32 1536(%rsi), %zmm4 {%k1} {z}
10913 ; AVX512DQ-NEXT: vpmovd2m %zmm3, %k1
10914 ; AVX512DQ-NEXT: vmovdqa32 1600(%rsi), %zmm3 {%k1} {z}
10915 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k1
10916 ; AVX512DQ-NEXT: vmovdqa32 1664(%rsi), %zmm2 {%k1} {z}
10917 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k1
10918 ; AVX512DQ-NEXT: vmovdqa32 1728(%rsi), %zmm0 {%k1} {z}
10919 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 1728(%rdx)
10920 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 1664(%rdx)
10921 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 1600(%rdx)
10922 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 1536(%rdx)
10923 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 1472(%rdx)
10924 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, 1408(%rdx)
10925 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, 1344(%rdx)
10926 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, 1280(%rdx)
10927 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, 1216(%rdx)
10928 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, 1152(%rdx)
10929 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, 1088(%rdx)
10930 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, 1024(%rdx)
10931 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, 960(%rdx)
10932 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, 896(%rdx)
10933 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, 832(%rdx)
10934 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, 768(%rdx)
10935 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, 704(%rdx)
10936 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, 640(%rdx)
10937 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, 576(%rdx)
10938 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, 512(%rdx)
10939 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, 448(%rdx)
10940 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, 384(%rdx)
10941 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, 320(%rdx)
10942 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, 256(%rdx)
10943 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, 192(%rdx)
10944 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, 128(%rdx)
10945 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 64(%rdx)
10946 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, (%rdx)
10947 ; AVX512DQ-NEXT: vzeroupper
10948 ; AVX512DQ-NEXT: retq
10950 ; AVX512BW-LABEL: mask_replication_factor7_vf64:
10951 ; AVX512BW: # %bb.0:
10952 ; AVX512BW-NEXT: movw $-3, %ax
10953 ; AVX512BW-NEXT: kmovd %eax, %k1
10954 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
10955 ; AVX512BW-NEXT: kmovw (%rdi), %k0
10956 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
10957 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
10958 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k2
10959 ; AVX512BW-NEXT: korw %k2, %k1, %k1
10960 ; AVX512BW-NEXT: movw $-5, %ax
10961 ; AVX512BW-NEXT: kmovd %eax, %k2
10962 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
10963 ; AVX512BW-NEXT: kmovq %k2, %k3
10964 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
10965 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k2
10966 ; AVX512BW-NEXT: korw %k2, %k1, %k1
10967 ; AVX512BW-NEXT: movw $-9, %ax
10968 ; AVX512BW-NEXT: kmovd %eax, %k2
10969 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
10970 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
10971 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k2
10972 ; AVX512BW-NEXT: korw %k2, %k1, %k1
10973 ; AVX512BW-NEXT: movw $-17, %ax
10974 ; AVX512BW-NEXT: kmovd %eax, %k2
10975 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
10976 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
10977 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k2
10978 ; AVX512BW-NEXT: korw %k2, %k1, %k1
10979 ; AVX512BW-NEXT: movw $-33, %ax
10980 ; AVX512BW-NEXT: kmovd %eax, %k2
10981 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
10982 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
10983 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k2
10984 ; AVX512BW-NEXT: korw %k2, %k1, %k1
10985 ; AVX512BW-NEXT: movw $-65, %ax
10986 ; AVX512BW-NEXT: kmovd %eax, %k2
10987 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
10988 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
10989 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k0
10990 ; AVX512BW-NEXT: korw %k0, %k1, %k0
10991 ; AVX512BW-NEXT: movw $-129, %ax
10992 ; AVX512BW-NEXT: kmovd %eax, %k1
10993 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
10994 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
10995 ; AVX512BW-NEXT: kmovq (%rdi), %k4
10996 ; AVX512BW-NEXT: kshiftrq $1, %k4, %k0
10997 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
10998 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k2
10999 ; AVX512BW-NEXT: korw %k2, %k1, %k1
11000 ; AVX512BW-NEXT: movw $-257, %ax # imm = 0xFEFF
11001 ; AVX512BW-NEXT: kmovd %eax, %k2
11002 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
11003 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11004 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k2
11005 ; AVX512BW-NEXT: korw %k2, %k1, %k1
11006 ; AVX512BW-NEXT: movw $-513, %ax # imm = 0xFDFF
11007 ; AVX512BW-NEXT: kmovd %eax, %k2
11008 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
11009 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11010 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k2
11011 ; AVX512BW-NEXT: korw %k2, %k1, %k1
11012 ; AVX512BW-NEXT: movw $-1025, %ax # imm = 0xFBFF
11013 ; AVX512BW-NEXT: kmovd %eax, %k5
11014 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
11015 ; AVX512BW-NEXT: kmovw %k5, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
11016 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k2
11017 ; AVX512BW-NEXT: korw %k2, %k1, %k1
11018 ; AVX512BW-NEXT: movw $-2049, %ax # imm = 0xF7FF
11019 ; AVX512BW-NEXT: kmovd %eax, %k2
11020 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
11021 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11022 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k2
11023 ; AVX512BW-NEXT: korw %k2, %k1, %k1
11024 ; AVX512BW-NEXT: movw $-4097, %ax # imm = 0xEFFF
11025 ; AVX512BW-NEXT: kmovd %eax, %k2
11026 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
11027 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11028 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k2
11029 ; AVX512BW-NEXT: korw %k2, %k1, %k1
11030 ; AVX512BW-NEXT: movw $-8193, %ax # imm = 0xDFFF
11031 ; AVX512BW-NEXT: kmovd %eax, %k2
11032 ; AVX512BW-NEXT: kmovw %k2, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
11033 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11034 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k0
11035 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11036 ; AVX512BW-NEXT: movw $-16385, %ax # imm = 0xBFFF
11037 ; AVX512BW-NEXT: kmovd %eax, %k1
11038 ; AVX512BW-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
11039 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
11040 ; AVX512BW-NEXT: kshiftrq $2, %k4, %k1
11041 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k7
11042 ; AVX512BW-NEXT: korw %k7, %k0, %k0
11043 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
11044 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
11045 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k7
11046 ; AVX512BW-NEXT: korw %k7, %k0, %k6
11047 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k6} {z}
11048 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
11049 ; AVX512BW-NEXT: kandw %k2, %k1, %k0
11050 ; AVX512BW-NEXT: kshiftrw $14, %k7, %k1
11051 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11052 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
11053 ; AVX512BW-NEXT: kshiftrw $13, %k7, %k1
11054 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11055 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11056 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
11057 ; AVX512BW-NEXT: kshiftrw $12, %k7, %k1
11058 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11059 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11060 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
11061 ; AVX512BW-NEXT: kshiftrw $11, %k7, %k1
11062 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11063 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11064 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
11065 ; AVX512BW-NEXT: kmovq %k4, %k7
11066 ; AVX512BW-NEXT: kmovq %k4, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
11067 ; AVX512BW-NEXT: kshiftrq $3, %k4, %k0
11068 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
11069 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
11070 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11071 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11072 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11073 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
11074 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11075 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11076 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11077 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
11078 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11079 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11080 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11081 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
11082 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11083 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11084 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11085 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k6
11086 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11087 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
11088 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k6
11089 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11090 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
11091 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
11092 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k0
11093 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11094 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11095 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
11096 ; AVX512BW-NEXT: kshiftrq $4, %k7, %k6
11097 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k0
11098 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k7
11099 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11100 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
11101 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
11102 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
11103 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11104 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
11105 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
11106 ; AVX512BW-NEXT: kshiftlw $14, %k6, %k7
11107 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11108 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
11109 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
11110 ; AVX512BW-NEXT: korw %k0, %k1, %k1
11111 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm1 {%k1} {z}
11112 ; AVX512BW-NEXT: kandw %k2, %k6, %k1
11113 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
11114 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11115 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
11116 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11117 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k0
11118 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11119 ; AVX512BW-NEXT: kandw %k3, %k0, %k1
11120 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 8-byte Reload
11121 ; AVX512BW-NEXT: kshiftrq $5, %k7, %k0
11122 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
11123 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
11124 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11125 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11126 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11127 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
11128 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11129 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11130 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11131 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
11132 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11133 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11134 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11135 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
11136 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11137 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11138 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11139 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
11140 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11141 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11142 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11143 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
11144 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11145 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11146 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k0
11147 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11148 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11149 ; AVX512BW-NEXT: kandw %k1, %k0, %k6
11150 ; AVX512BW-NEXT: kshiftrq $6, %k7, %k0
11151 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k1
11152 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k7
11153 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11154 ; AVX512BW-NEXT: kandw %k5, %k6, %k6
11155 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k7
11156 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11157 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11158 ; AVX512BW-NEXT: kandw %k4, %k6, %k6
11159 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k7
11160 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11161 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11162 ; AVX512BW-NEXT: kandw %k4, %k6, %k6
11163 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k7
11164 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11165 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11166 ; AVX512BW-NEXT: kandw %k4, %k6, %k6
11167 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k7
11168 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11169 ; AVX512BW-NEXT: kshiftlw $1, %k6, %k6
11170 ; AVX512BW-NEXT: kshiftrw $1, %k6, %k6
11171 ; AVX512BW-NEXT: korw %k1, %k6, %k1
11172 ; AVX512BW-NEXT: vmovdqa32 128(%rsi), %zmm2 {%k1} {z}
11173 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11174 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
11175 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 8-byte Reload
11176 ; AVX512BW-NEXT: kshiftrq $7, %k4, %k0
11177 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
11178 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
11179 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11180 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11181 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
11182 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11183 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
11184 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11185 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
11186 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11187 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
11188 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
11189 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
11190 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11191 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
11192 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
11193 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
11194 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11195 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11196 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11197 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
11198 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11199 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11200 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k0
11201 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11202 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11203 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
11204 ; AVX512BW-NEXT: kshiftrq $8, %k4, %k0
11205 ; AVX512BW-NEXT: kmovq %k4, %k5
11206 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k6
11207 ; AVX512BW-NEXT: kshiftrw $7, %k6, %k7
11208 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11209 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11210 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11211 ; AVX512BW-NEXT: kshiftrw $6, %k6, %k7
11212 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11213 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11214 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11215 ; AVX512BW-NEXT: kshiftrw $5, %k6, %k7
11216 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11217 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11218 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11219 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
11220 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11221 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11222 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11223 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
11224 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11225 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11226 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11227 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k6
11228 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11229 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11230 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11231 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k0
11232 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11233 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
11234 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
11235 ; AVX512BW-NEXT: kshiftrq $9, %k5, %k1
11236 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
11237 ; AVX512BW-NEXT: korw %k6, %k0, %k7
11238 ; AVX512BW-NEXT: vmovdqa32 192(%rsi), %zmm3 {%k7} {z}
11239 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
11240 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
11241 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
11242 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11243 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11244 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
11245 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
11246 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11247 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
11248 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k1
11249 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11250 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
11251 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
11252 ; AVX512BW-NEXT: kshiftrw $11, %k6, %k1
11253 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11254 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11255 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
11256 ; AVX512BW-NEXT: kshiftrw $10, %k6, %k1
11257 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11258 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11259 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
11260 ; AVX512BW-NEXT: kshiftrq $10, %k5, %k0
11261 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
11262 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
11263 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11264 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11265 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11266 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
11267 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11268 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11269 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11270 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
11271 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11272 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11273 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11274 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k6
11275 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11276 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11277 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k6
11278 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11279 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11280 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11281 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k6
11282 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11283 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11284 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11285 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k0
11286 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11287 ; AVX512BW-NEXT: kandw %k3, %k0, %k1
11288 ; AVX512BW-NEXT: kshiftrq $11, %k5, %k6
11289 ; AVX512BW-NEXT: kmovq %k5, %k4
11290 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k0
11291 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
11292 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11293 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
11294 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
11295 ; AVX512BW-NEXT: kshiftlw $14, %k6, %k7
11296 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11297 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
11298 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
11299 ; AVX512BW-NEXT: korw %k0, %k1, %k1
11300 ; AVX512BW-NEXT: vmovdqa32 256(%rsi), %zmm4 {%k1} {z}
11301 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11302 ; AVX512BW-NEXT: kandw %k1, %k6, %k1
11303 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
11304 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11305 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11306 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11307 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
11308 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11309 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11310 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11311 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k0
11312 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11313 ; AVX512BW-NEXT: kandw %k2, %k0, %k1
11314 ; AVX512BW-NEXT: kmovq %k4, %k7
11315 ; AVX512BW-NEXT: kshiftrq $12, %k4, %k0
11316 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
11317 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
11318 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11319 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
11320 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11321 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
11322 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11323 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11324 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11325 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
11326 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11327 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11328 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11329 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
11330 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11331 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11332 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11333 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
11334 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11335 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11336 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11337 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k6
11338 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11339 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11340 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11341 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k0
11342 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11343 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11344 ; AVX512BW-NEXT: kandw %k1, %k0, %k6
11345 ; AVX512BW-NEXT: kshiftrq $13, %k7, %k0
11346 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k1
11347 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k7
11348 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11349 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
11350 ; AVX512BW-NEXT: kandw %k7, %k6, %k6
11351 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k7
11352 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11353 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
11354 ; AVX512BW-NEXT: kandw %k7, %k6, %k6
11355 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k7
11356 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11357 ; AVX512BW-NEXT: kandw %k5, %k6, %k6
11358 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k7
11359 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11360 ; AVX512BW-NEXT: kshiftlw $1, %k6, %k6
11361 ; AVX512BW-NEXT: kshiftrw $1, %k6, %k6
11362 ; AVX512BW-NEXT: korw %k1, %k6, %k6
11363 ; AVX512BW-NEXT: vmovdqa32 320(%rsi), %zmm5 {%k6} {z}
11364 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
11365 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
11366 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k1
11367 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11368 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11369 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
11370 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 8-byte Reload
11371 ; AVX512BW-NEXT: kshiftrq $14, %k5, %k0
11372 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
11373 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
11374 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11375 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11376 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11377 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
11378 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11379 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11380 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11381 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
11382 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11383 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11384 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
11385 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11386 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
11387 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11388 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
11389 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11390 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11391 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11392 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
11393 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11394 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11395 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k0
11396 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11397 ; AVX512BW-NEXT: kandw %k3, %k0, %k6
11398 ; AVX512BW-NEXT: kshiftrq $15, %k5, %k1
11399 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k0
11400 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k7
11401 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11402 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11403 ; AVX512BW-NEXT: kandw %k3, %k6, %k6
11404 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k7
11405 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11406 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11407 ; AVX512BW-NEXT: kandw %k3, %k6, %k6
11408 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k7
11409 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11410 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11411 ; AVX512BW-NEXT: kandw %k4, %k6, %k6
11412 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k7
11413 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11414 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11415 ; AVX512BW-NEXT: kandw %k4, %k6, %k6
11416 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
11417 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11418 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11419 ; AVX512BW-NEXT: kandw %k4, %k6, %k6
11420 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k1
11421 ; AVX512BW-NEXT: korw %k1, %k6, %k1
11422 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
11423 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
11424 ; AVX512BW-NEXT: korw %k0, %k1, %k1
11425 ; AVX512BW-NEXT: vmovdqa32 384(%rsi), %zmm6 {%k1} {z}
11426 ; AVX512BW-NEXT: kshiftrq $16, %k5, %k0
11427 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11428 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
11429 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
11430 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
11431 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11432 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11433 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11434 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
11435 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11436 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11437 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11438 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
11439 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11440 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11441 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11442 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
11443 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11444 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11445 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11446 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
11447 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11448 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11449 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k0
11450 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11451 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
11452 ; AVX512BW-NEXT: kandw %k2, %k0, %k1
11453 ; AVX512BW-NEXT: kshiftrq $17, %k5, %k0
11454 ; AVX512BW-NEXT: kmovq %k5, %k7
11455 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
11456 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
11457 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11458 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
11459 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
11460 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
11461 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11462 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
11463 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
11464 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k6
11465 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11466 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
11467 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
11468 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k6
11469 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11470 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11471 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k6
11472 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11473 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11474 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11475 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k6
11476 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11477 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11478 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11479 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k0
11480 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11481 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
11482 ; AVX512BW-NEXT: kshiftrq $18, %k7, %k1
11483 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k6
11484 ; AVX512BW-NEXT: korw %k6, %k0, %k0
11485 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
11486 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
11487 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
11488 ; AVX512BW-NEXT: korw %k6, %k0, %k7
11489 ; AVX512BW-NEXT: vmovdqa32 448(%rsi), %zmm7 {%k7} {z}
11490 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
11491 ; AVX512BW-NEXT: kandw %k5, %k1, %k0
11492 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
11493 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11494 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11495 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
11496 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
11497 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11498 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11499 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
11500 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k1
11501 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11502 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11503 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
11504 ; AVX512BW-NEXT: kshiftrw $11, %k6, %k1
11505 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11506 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11507 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
11508 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 8-byte Reload
11509 ; AVX512BW-NEXT: kshiftrq $19, %k7, %k0
11510 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
11511 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
11512 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11513 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11514 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11515 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
11516 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11517 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11518 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
11519 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11520 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
11521 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11522 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
11523 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11524 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11525 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11526 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k6
11527 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11528 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11529 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11530 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k6
11531 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11532 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11533 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11534 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k0
11535 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11536 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11537 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
11538 ; AVX512BW-NEXT: kshiftrq $20, %k7, %k6
11539 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k0
11540 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k7
11541 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11542 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
11543 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
11544 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
11545 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11546 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
11547 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
11548 ; AVX512BW-NEXT: kshiftlw $14, %k6, %k7
11549 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11550 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
11551 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
11552 ; AVX512BW-NEXT: korw %k0, %k1, %k1
11553 ; AVX512BW-NEXT: vmovdqa32 512(%rsi), %zmm8 {%k1} {z}
11554 ; AVX512BW-NEXT: kandw %k5, %k6, %k1
11555 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
11556 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11557 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11558 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k0
11559 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11560 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11561 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
11562 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 8-byte Reload
11563 ; AVX512BW-NEXT: kshiftrq $21, %k7, %k0
11564 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
11565 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
11566 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11567 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11568 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
11569 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11570 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11571 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11572 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
11573 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11574 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11575 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11576 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
11577 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11578 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11579 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11580 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
11581 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11582 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11583 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
11584 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11585 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
11586 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11587 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k0
11588 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11589 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11590 ; AVX512BW-NEXT: kandw %k1, %k0, %k6
11591 ; AVX512BW-NEXT: kshiftrq $22, %k7, %k0
11592 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k1
11593 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k7
11594 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11595 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11596 ; AVX512BW-NEXT: kandw %k3, %k6, %k6
11597 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k7
11598 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11599 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11600 ; AVX512BW-NEXT: kandw %k3, %k6, %k6
11601 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k7
11602 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11603 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11604 ; AVX512BW-NEXT: kandw %k3, %k6, %k6
11605 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k7
11606 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11607 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11608 ; AVX512BW-NEXT: kandw %k3, %k6, %k6
11609 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k7
11610 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11611 ; AVX512BW-NEXT: kshiftlw $1, %k6, %k6
11612 ; AVX512BW-NEXT: kshiftrw $1, %k6, %k6
11613 ; AVX512BW-NEXT: korw %k1, %k6, %k1
11614 ; AVX512BW-NEXT: vmovdqa32 576(%rsi), %zmm9 {%k1} {z}
11615 ; AVX512BW-NEXT: kandw %k5, %k0, %k1
11616 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 8-byte Reload
11617 ; AVX512BW-NEXT: kshiftrq $23, %k7, %k0
11618 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
11619 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
11620 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11621 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11622 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11623 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
11624 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11625 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11626 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11627 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
11628 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11629 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11630 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11631 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
11632 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11633 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11634 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11635 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
11636 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11637 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11638 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11639 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
11640 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11641 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11642 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k0
11643 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11644 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11645 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
11646 ; AVX512BW-NEXT: kshiftrq $24, %k7, %k0
11647 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k6
11648 ; AVX512BW-NEXT: kshiftrw $7, %k6, %k7
11649 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11650 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11651 ; AVX512BW-NEXT: kshiftrw $6, %k6, %k7
11652 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11653 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
11654 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11655 ; AVX512BW-NEXT: kshiftrw $5, %k6, %k7
11656 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11657 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11658 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11659 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
11660 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11661 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11662 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11663 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
11664 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11665 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11666 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11667 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k6
11668 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11669 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11670 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11671 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k0
11672 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11673 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
11674 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
11675 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 8-byte Reload
11676 ; AVX512BW-NEXT: kshiftrq $25, %k2, %k1
11677 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
11678 ; AVX512BW-NEXT: korw %k6, %k0, %k7
11679 ; AVX512BW-NEXT: vmovdqa32 640(%rsi), %zmm10 {%k7} {z}
11680 ; AVX512BW-NEXT: kandw %k5, %k1, %k0
11681 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
11682 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11683 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11684 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
11685 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
11686 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11687 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
11688 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k1
11689 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11690 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11691 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
11692 ; AVX512BW-NEXT: kshiftrw $11, %k6, %k1
11693 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11694 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11695 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
11696 ; AVX512BW-NEXT: kshiftrw $10, %k6, %k1
11697 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11698 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11699 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
11700 ; AVX512BW-NEXT: kmovq %k2, %k7
11701 ; AVX512BW-NEXT: kshiftrq $26, %k2, %k0
11702 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
11703 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
11704 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11705 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
11706 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
11707 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
11708 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11709 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
11710 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
11711 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
11712 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11713 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
11714 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
11715 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k6
11716 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11717 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
11718 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11719 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k6
11720 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11721 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
11722 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11723 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k6
11724 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11725 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
11726 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
11727 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k0
11728 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11729 ; AVX512BW-NEXT: kandw %k4, %k0, %k1
11730 ; AVX512BW-NEXT: kshiftrq $27, %k7, %k6
11731 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k0
11732 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
11733 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11734 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11735 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11736 ; AVX512BW-NEXT: kshiftlw $14, %k6, %k7
11737 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11738 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
11739 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
11740 ; AVX512BW-NEXT: korw %k0, %k1, %k1
11741 ; AVX512BW-NEXT: vmovdqa32 704(%rsi), %zmm11 {%k1} {z}
11742 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11743 ; AVX512BW-NEXT: kandw %k1, %k6, %k1
11744 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
11745 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11746 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11747 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11748 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
11749 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11750 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11751 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11752 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k0
11753 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11754 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11755 ; AVX512BW-NEXT: kandw %k4, %k0, %k1
11756 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 8-byte Reload
11757 ; AVX512BW-NEXT: kshiftrq $28, %k7, %k0
11758 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
11759 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
11760 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11761 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11762 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
11763 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11764 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11765 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11766 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
11767 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11768 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11769 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11770 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
11771 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11772 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11773 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11774 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
11775 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11776 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11777 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11778 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k6
11779 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11780 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11781 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11782 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k0
11783 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11784 ; AVX512BW-NEXT: kandw %k2, %k0, %k6
11785 ; AVX512BW-NEXT: kshiftrq $29, %k7, %k0
11786 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k1
11787 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k7
11788 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11789 ; AVX512BW-NEXT: kandw %k5, %k6, %k6
11790 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k7
11791 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11792 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11793 ; AVX512BW-NEXT: kandw %k3, %k6, %k6
11794 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k7
11795 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11796 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
11797 ; AVX512BW-NEXT: kandw %k2, %k6, %k6
11798 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k7
11799 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11800 ; AVX512BW-NEXT: kshiftlw $1, %k6, %k6
11801 ; AVX512BW-NEXT: kshiftrw $1, %k6, %k6
11802 ; AVX512BW-NEXT: korw %k1, %k6, %k6
11803 ; AVX512BW-NEXT: vmovdqa32 768(%rsi), %zmm12 {%k6} {z}
11804 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
11805 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
11806 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k1
11807 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11808 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11809 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
11810 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 8-byte Reload
11811 ; AVX512BW-NEXT: kshiftrq $30, %k5, %k0
11812 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
11813 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
11814 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11815 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11816 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11817 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
11818 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11819 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11820 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
11821 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11822 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11823 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11824 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
11825 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11826 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11827 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11828 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
11829 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11830 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11831 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11832 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
11833 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11834 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11835 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11836 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k0
11837 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11838 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11839 ; AVX512BW-NEXT: kandw %k1, %k0, %k6
11840 ; AVX512BW-NEXT: kshiftrq $31, %k5, %k1
11841 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k0
11842 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k7
11843 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11844 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
11845 ; AVX512BW-NEXT: kandw %k7, %k6, %k6
11846 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k7
11847 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11848 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
11849 ; AVX512BW-NEXT: kandw %k7, %k6, %k6
11850 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k7
11851 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11852 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
11853 ; AVX512BW-NEXT: kandw %k7, %k6, %k6
11854 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k7
11855 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11856 ; AVX512BW-NEXT: kandw %k3, %k6, %k6
11857 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
11858 ; AVX512BW-NEXT: korw %k7, %k6, %k6
11859 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11860 ; AVX512BW-NEXT: kandw %k3, %k6, %k6
11861 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k1
11862 ; AVX512BW-NEXT: korw %k1, %k6, %k1
11863 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
11864 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
11865 ; AVX512BW-NEXT: korw %k0, %k1, %k1
11866 ; AVX512BW-NEXT: vmovdqa32 832(%rsi), %zmm13 {%k1} {z}
11867 ; AVX512BW-NEXT: kshiftrq $32, %k5, %k0
11868 ; AVX512BW-NEXT: kandw %k2, %k0, %k1
11869 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
11870 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
11871 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11872 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
11873 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
11874 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
11875 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11876 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11877 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11878 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
11879 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11880 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11881 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11882 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
11883 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11884 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11885 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11886 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
11887 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11888 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11889 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k0
11890 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11891 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11892 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
11893 ; AVX512BW-NEXT: kshiftrq $33, %k5, %k0
11894 ; AVX512BW-NEXT: kmovq %k5, %k7
11895 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
11896 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
11897 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11898 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11899 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11900 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
11901 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11902 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
11903 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
11904 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k6
11905 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11906 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11907 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11908 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k6
11909 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11910 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11911 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11912 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k6
11913 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11914 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11915 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11916 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k6
11917 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11918 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11919 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11920 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k0
11921 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11922 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
11923 ; AVX512BW-NEXT: kmovq %k7, %k3
11924 ; AVX512BW-NEXT: kshiftrq $34, %k7, %k1
11925 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k6
11926 ; AVX512BW-NEXT: korw %k6, %k0, %k0
11927 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
11928 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
11929 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
11930 ; AVX512BW-NEXT: korw %k6, %k0, %k7
11931 ; AVX512BW-NEXT: vmovdqa32 896(%rsi), %zmm14 {%k7} {z}
11932 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
11933 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
11934 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
11935 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11936 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
11937 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
11938 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11939 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
11940 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
11941 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k1
11942 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11943 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11944 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
11945 ; AVX512BW-NEXT: kshiftrw $11, %k6, %k1
11946 ; AVX512BW-NEXT: korw %k1, %k0, %k0
11947 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11948 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
11949 ; AVX512BW-NEXT: kshiftrq $35, %k3, %k0
11950 ; AVX512BW-NEXT: kmovq %k3, %k7
11951 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
11952 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
11953 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11954 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11955 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11956 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
11957 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11958 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
11959 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
11960 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
11961 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11962 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
11963 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
11964 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
11965 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11966 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
11967 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k6
11968 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11969 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
11970 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
11971 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k6
11972 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11973 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
11974 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
11975 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k0
11976 ; AVX512BW-NEXT: korw %k0, %k1, %k0
11977 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11978 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
11979 ; AVX512BW-NEXT: kshiftrq $36, %k7, %k6
11980 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k0
11981 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k7
11982 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11983 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
11984 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
11985 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
11986 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11987 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
11988 ; AVX512BW-NEXT: kandw %k7, %k1, %k1
11989 ; AVX512BW-NEXT: kshiftlw $14, %k6, %k7
11990 ; AVX512BW-NEXT: korw %k7, %k1, %k1
11991 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
11992 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
11993 ; AVX512BW-NEXT: korw %k0, %k1, %k1
11994 ; AVX512BW-NEXT: vmovdqa32 960(%rsi), %zmm15 {%k1} {z}
11995 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
11996 ; AVX512BW-NEXT: kandw %k1, %k6, %k1
11997 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
11998 ; AVX512BW-NEXT: korw %k6, %k1, %k1
11999 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12000 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12001 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k0
12002 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12003 ; AVX512BW-NEXT: kandw %k2, %k0, %k1
12004 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 8-byte Reload
12005 ; AVX512BW-NEXT: kshiftrq $37, %k7, %k0
12006 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
12007 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
12008 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12009 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12010 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12011 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
12012 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12013 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12014 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12015 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
12016 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12017 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12018 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12019 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
12020 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12021 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12022 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
12023 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12024 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12025 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
12026 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12027 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12028 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12029 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k0
12030 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12031 ; AVX512BW-NEXT: kandw %k5, %k0, %k6
12032 ; AVX512BW-NEXT: kshiftrq $38, %k7, %k0
12033 ; AVX512BW-NEXT: kmovq %k7, %k5
12034 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k1
12035 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k7
12036 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12037 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12038 ; AVX512BW-NEXT: kandw %k3, %k6, %k6
12039 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k7
12040 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12041 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12042 ; AVX512BW-NEXT: kandw %k4, %k6, %k6
12043 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k7
12044 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12045 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12046 ; AVX512BW-NEXT: kandw %k3, %k6, %k6
12047 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k7
12048 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12049 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12050 ; AVX512BW-NEXT: kandw %k3, %k6, %k6
12051 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k7
12052 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12053 ; AVX512BW-NEXT: kshiftlw $1, %k6, %k6
12054 ; AVX512BW-NEXT: kshiftrw $1, %k6, %k6
12055 ; AVX512BW-NEXT: korw %k1, %k6, %k1
12056 ; AVX512BW-NEXT: vmovdqa32 1024(%rsi), %zmm16 {%k1} {z}
12057 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12058 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
12059 ; AVX512BW-NEXT: kmovq %k5, %k7
12060 ; AVX512BW-NEXT: kshiftrq $39, %k5, %k0
12061 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
12062 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
12063 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12064 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12065 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12066 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
12067 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12068 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12069 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12070 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
12071 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12072 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12073 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
12074 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12075 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12076 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12077 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
12078 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12079 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12080 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12081 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
12082 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12083 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
12084 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
12085 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k0
12086 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12087 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12088 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
12089 ; AVX512BW-NEXT: kshiftrq $40, %k7, %k0
12090 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k6
12091 ; AVX512BW-NEXT: kshiftrw $7, %k6, %k7
12092 ; AVX512BW-NEXT: korw %k7, %k1, %k1
12093 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12094 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12095 ; AVX512BW-NEXT: kshiftrw $6, %k6, %k7
12096 ; AVX512BW-NEXT: korw %k7, %k1, %k1
12097 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12098 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12099 ; AVX512BW-NEXT: kshiftrw $5, %k6, %k7
12100 ; AVX512BW-NEXT: korw %k7, %k1, %k1
12101 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12102 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12103 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
12104 ; AVX512BW-NEXT: korw %k7, %k1, %k1
12105 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12106 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
12107 ; AVX512BW-NEXT: korw %k7, %k1, %k1
12108 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12109 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12110 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k6
12111 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12112 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12113 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12114 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k0
12115 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12116 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
12117 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
12118 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 8-byte Reload
12119 ; AVX512BW-NEXT: kshiftrq $41, %k4, %k1
12120 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
12121 ; AVX512BW-NEXT: korw %k6, %k0, %k7
12122 ; AVX512BW-NEXT: vmovdqa32 1088(%rsi), %zmm17 {%k7} {z}
12123 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
12124 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
12125 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
12126 ; AVX512BW-NEXT: korw %k1, %k0, %k0
12127 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12128 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
12129 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
12130 ; AVX512BW-NEXT: korw %k1, %k0, %k0
12131 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12132 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
12133 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k1
12134 ; AVX512BW-NEXT: korw %k1, %k0, %k0
12135 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12136 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
12137 ; AVX512BW-NEXT: kshiftrw $11, %k6, %k1
12138 ; AVX512BW-NEXT: korw %k1, %k0, %k0
12139 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
12140 ; AVX512BW-NEXT: kshiftrw $10, %k6, %k1
12141 ; AVX512BW-NEXT: korw %k1, %k0, %k0
12142 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12143 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
12144 ; AVX512BW-NEXT: kshiftrq $42, %k4, %k0
12145 ; AVX512BW-NEXT: kmovq %k4, %k3
12146 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
12147 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
12148 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12149 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
12150 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
12151 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12152 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12153 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12154 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
12155 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12156 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12157 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12158 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k6
12159 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12160 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12161 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12162 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k6
12163 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12164 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12165 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12166 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k6
12167 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12168 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
12169 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
12170 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k0
12171 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12172 ; AVX512BW-NEXT: kandw %k2, %k0, %k1
12173 ; AVX512BW-NEXT: kshiftrq $43, %k3, %k6
12174 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k0
12175 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
12176 ; AVX512BW-NEXT: korw %k7, %k1, %k1
12177 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12178 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12179 ; AVX512BW-NEXT: kshiftlw $14, %k6, %k7
12180 ; AVX512BW-NEXT: korw %k7, %k1, %k1
12181 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
12182 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
12183 ; AVX512BW-NEXT: korw %k0, %k1, %k1
12184 ; AVX512BW-NEXT: vmovdqa32 1152(%rsi), %zmm18 {%k1} {z}
12185 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
12186 ; AVX512BW-NEXT: kandw %k5, %k6, %k1
12187 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
12188 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12189 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12190 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12191 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
12192 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12193 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12194 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12195 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k0
12196 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12197 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12198 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
12199 ; AVX512BW-NEXT: kmovq %k3, %k7
12200 ; AVX512BW-NEXT: kshiftrq $44, %k3, %k0
12201 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
12202 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
12203 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12204 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12205 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12206 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
12207 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12208 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12209 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12210 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
12211 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12212 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12213 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12214 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
12215 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12216 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12217 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12218 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
12219 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12220 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12221 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12222 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k6
12223 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12224 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12225 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12226 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k0
12227 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12228 ; AVX512BW-NEXT: kandw %k4, %k0, %k6
12229 ; AVX512BW-NEXT: kshiftrq $45, %k7, %k0
12230 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k1
12231 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k7
12232 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12233 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12234 ; AVX512BW-NEXT: kandw %k4, %k6, %k6
12235 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k7
12236 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12237 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
12238 ; AVX512BW-NEXT: kandw %k7, %k6, %k6
12239 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k7
12240 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12241 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
12242 ; AVX512BW-NEXT: kandw %k7, %k6, %k6
12243 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k7
12244 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12245 ; AVX512BW-NEXT: kshiftlw $1, %k6, %k6
12246 ; AVX512BW-NEXT: kshiftrw $1, %k6, %k6
12247 ; AVX512BW-NEXT: korw %k1, %k6, %k6
12248 ; AVX512BW-NEXT: vmovdqa32 1216(%rsi), %zmm19 {%k6} {z}
12249 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
12250 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k1
12251 ; AVX512BW-NEXT: korw %k1, %k0, %k0
12252 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12253 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
12254 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 8-byte Reload
12255 ; AVX512BW-NEXT: kshiftrq $46, %k5, %k0
12256 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
12257 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
12258 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12259 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12260 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12261 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
12262 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12263 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12264 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12265 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
12266 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12267 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12268 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12269 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
12270 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12271 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12272 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
12273 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12274 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12275 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12276 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
12277 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12278 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12279 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12280 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k0
12281 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12282 ; AVX512BW-NEXT: kandw %k2, %k0, %k6
12283 ; AVX512BW-NEXT: kshiftrq $47, %k5, %k1
12284 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k0
12285 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k7
12286 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12287 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12288 ; AVX512BW-NEXT: kandw %k3, %k6, %k6
12289 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k7
12290 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12291 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12292 ; AVX512BW-NEXT: kandw %k2, %k6, %k6
12293 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k7
12294 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12295 ; AVX512BW-NEXT: kandw %k4, %k6, %k6
12296 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k7
12297 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12298 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12299 ; AVX512BW-NEXT: kandw %k2, %k6, %k6
12300 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
12301 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12302 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12303 ; AVX512BW-NEXT: kandw %k2, %k6, %k6
12304 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k1
12305 ; AVX512BW-NEXT: korw %k1, %k6, %k1
12306 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
12307 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
12308 ; AVX512BW-NEXT: korw %k0, %k1, %k1
12309 ; AVX512BW-NEXT: vmovdqa32 1280(%rsi), %zmm20 {%k1} {z}
12310 ; AVX512BW-NEXT: kshiftrq $48, %k5, %k0
12311 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12312 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
12313 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
12314 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
12315 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12316 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12317 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12318 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
12319 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12320 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12321 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12322 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
12323 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12324 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12325 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12326 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
12327 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12328 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12329 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12330 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
12331 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12332 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12333 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12334 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k0
12335 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12336 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12337 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
12338 ; AVX512BW-NEXT: kshiftrq $49, %k5, %k0
12339 ; AVX512BW-NEXT: kmovq %k5, %k7
12340 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
12341 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
12342 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12343 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
12344 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
12345 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
12346 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12347 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
12348 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
12349 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k6
12350 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12351 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12352 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k6
12353 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12354 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12355 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12356 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k6
12357 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12358 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
12359 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
12360 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k6
12361 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12362 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
12363 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
12364 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k0
12365 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12366 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
12367 ; AVX512BW-NEXT: kshiftrq $50, %k7, %k1
12368 ; AVX512BW-NEXT: kshiftlw $14, %k1, %k6
12369 ; AVX512BW-NEXT: korw %k6, %k0, %k0
12370 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
12371 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
12372 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
12373 ; AVX512BW-NEXT: korw %k6, %k0, %k7
12374 ; AVX512BW-NEXT: vmovdqa32 1344(%rsi), %zmm21 {%k7} {z}
12375 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
12376 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
12377 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
12378 ; AVX512BW-NEXT: korw %k1, %k0, %k0
12379 ; AVX512BW-NEXT: kandw %k4, %k0, %k0
12380 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
12381 ; AVX512BW-NEXT: korw %k1, %k0, %k0
12382 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12383 ; AVX512BW-NEXT: kandw %k2, %k0, %k0
12384 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k1
12385 ; AVX512BW-NEXT: korw %k1, %k0, %k0
12386 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
12387 ; AVX512BW-NEXT: kandw %k5, %k0, %k0
12388 ; AVX512BW-NEXT: kshiftrw $11, %k6, %k1
12389 ; AVX512BW-NEXT: korw %k1, %k0, %k0
12390 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12391 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
12392 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 8-byte Reload
12393 ; AVX512BW-NEXT: kshiftrq $51, %k7, %k0
12394 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
12395 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
12396 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12397 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12398 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12399 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
12400 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12401 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12402 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12403 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
12404 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12405 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12406 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12407 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
12408 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12409 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12410 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12411 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k6
12412 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12413 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12414 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12415 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k6
12416 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12417 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12418 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k0
12419 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12420 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12421 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
12422 ; AVX512BW-NEXT: kshiftrq $52, %k7, %k6
12423 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k0
12424 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k7
12425 ; AVX512BW-NEXT: korw %k7, %k1, %k1
12426 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12427 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12428 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
12429 ; AVX512BW-NEXT: korw %k7, %k1, %k1
12430 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12431 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12432 ; AVX512BW-NEXT: kshiftlw $14, %k6, %k7
12433 ; AVX512BW-NEXT: korw %k7, %k1, %k1
12434 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
12435 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
12436 ; AVX512BW-NEXT: korw %k0, %k1, %k1
12437 ; AVX512BW-NEXT: vmovdqa32 1408(%rsi), %zmm22 {%k1} {z}
12438 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12439 ; AVX512BW-NEXT: kandw %k3, %k6, %k1
12440 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
12441 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12442 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12443 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12444 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k0
12445 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12446 ; AVX512BW-NEXT: kandw %k2, %k0, %k1
12447 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 8-byte Reload
12448 ; AVX512BW-NEXT: kshiftrq $53, %k7, %k0
12449 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
12450 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
12451 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12452 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
12453 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
12454 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12455 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
12456 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
12457 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
12458 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12459 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12460 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12461 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
12462 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12463 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12464 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12465 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
12466 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12467 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12468 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
12469 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12470 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12471 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12472 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k0
12473 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12474 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12475 ; AVX512BW-NEXT: kandw %k1, %k0, %k6
12476 ; AVX512BW-NEXT: kshiftrq $54, %k7, %k0
12477 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k1
12478 ; AVX512BW-NEXT: kshiftrw $5, %k1, %k7
12479 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12480 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12481 ; AVX512BW-NEXT: kandw %k4, %k6, %k6
12482 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k7
12483 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12484 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12485 ; AVX512BW-NEXT: kandw %k4, %k6, %k6
12486 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k7
12487 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12488 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12489 ; AVX512BW-NEXT: kandw %k4, %k6, %k6
12490 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k7
12491 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12492 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
12493 ; AVX512BW-NEXT: kandw %k7, %k6, %k6
12494 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k7
12495 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12496 ; AVX512BW-NEXT: kshiftlw $1, %k6, %k6
12497 ; AVX512BW-NEXT: kshiftrw $1, %k6, %k6
12498 ; AVX512BW-NEXT: korw %k1, %k6, %k1
12499 ; AVX512BW-NEXT: vmovdqa32 1472(%rsi), %zmm23 {%k1} {z}
12500 ; AVX512BW-NEXT: kandw %k3, %k0, %k1
12501 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 8-byte Reload
12502 ; AVX512BW-NEXT: kshiftrq $55, %k7, %k0
12503 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
12504 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
12505 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12506 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12507 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12508 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
12509 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12510 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12511 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12512 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
12513 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12514 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12515 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12516 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
12517 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12518 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
12519 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
12520 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12521 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
12522 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
12523 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
12524 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12525 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12526 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k0
12527 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12528 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12529 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
12530 ; AVX512BW-NEXT: kshiftrq $56, %k7, %k0
12531 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k6
12532 ; AVX512BW-NEXT: kshiftrw $7, %k6, %k7
12533 ; AVX512BW-NEXT: korw %k7, %k1, %k1
12534 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12535 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12536 ; AVX512BW-NEXT: kshiftrw $6, %k6, %k7
12537 ; AVX512BW-NEXT: korw %k7, %k1, %k1
12538 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12539 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12540 ; AVX512BW-NEXT: kshiftrw $5, %k6, %k7
12541 ; AVX512BW-NEXT: korw %k7, %k1, %k1
12542 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12543 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12544 ; AVX512BW-NEXT: kshiftrw $4, %k6, %k7
12545 ; AVX512BW-NEXT: korw %k7, %k1, %k1
12546 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12547 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12548 ; AVX512BW-NEXT: kshiftrw $3, %k6, %k7
12549 ; AVX512BW-NEXT: korw %k7, %k1, %k1
12550 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12551 ; AVX512BW-NEXT: kshiftrw $2, %k6, %k6
12552 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12553 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
12554 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
12555 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k0
12556 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12557 ; AVX512BW-NEXT: kshiftlw $1, %k0, %k0
12558 ; AVX512BW-NEXT: kshiftrw $1, %k0, %k0
12559 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 8-byte Reload
12560 ; AVX512BW-NEXT: kshiftrq $57, %k4, %k1
12561 ; AVX512BW-NEXT: kshiftlw $15, %k1, %k6
12562 ; AVX512BW-NEXT: korw %k6, %k0, %k7
12563 ; AVX512BW-NEXT: vmovdqa32 1536(%rsi), %zmm24 {%k7} {z}
12564 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k0 # 2-byte Reload
12565 ; AVX512BW-NEXT: kandw %k0, %k1, %k0
12566 ; AVX512BW-NEXT: kshiftrw $14, %k6, %k1
12567 ; AVX512BW-NEXT: korw %k1, %k0, %k0
12568 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12569 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
12570 ; AVX512BW-NEXT: kshiftrw $13, %k6, %k1
12571 ; AVX512BW-NEXT: korw %k1, %k0, %k0
12572 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12573 ; AVX512BW-NEXT: kandw %k1, %k0, %k0
12574 ; AVX512BW-NEXT: kshiftrw $12, %k6, %k1
12575 ; AVX512BW-NEXT: korw %k1, %k0, %k0
12576 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
12577 ; AVX512BW-NEXT: kshiftrw $11, %k6, %k1
12578 ; AVX512BW-NEXT: korw %k1, %k0, %k0
12579 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12580 ; AVX512BW-NEXT: kandw %k3, %k0, %k0
12581 ; AVX512BW-NEXT: kshiftrw $10, %k6, %k1
12582 ; AVX512BW-NEXT: korw %k1, %k0, %k0
12583 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12584 ; AVX512BW-NEXT: kandw %k2, %k0, %k1
12585 ; AVX512BW-NEXT: kmovq %k4, %k7
12586 ; AVX512BW-NEXT: kshiftrq $58, %k4, %k0
12587 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
12588 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
12589 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12590 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12591 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12592 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
12593 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12594 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12595 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12596 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
12597 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12598 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12599 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12600 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k6
12601 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12602 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12603 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12604 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k6
12605 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12606 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12607 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12608 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k6
12609 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12610 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12611 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12612 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k0
12613 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12614 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12615 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
12616 ; AVX512BW-NEXT: kshiftrq $59, %k7, %k6
12617 ; AVX512BW-NEXT: kshiftlw $15, %k6, %k0
12618 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k7
12619 ; AVX512BW-NEXT: korw %k7, %k1, %k1
12620 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
12621 ; AVX512BW-NEXT: kshiftlw $14, %k6, %k7
12622 ; AVX512BW-NEXT: korw %k7, %k1, %k1
12623 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
12624 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
12625 ; AVX512BW-NEXT: korw %k0, %k1, %k1
12626 ; AVX512BW-NEXT: vmovdqa32 1600(%rsi), %zmm25 {%k1} {z}
12627 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12628 ; AVX512BW-NEXT: kandw %k1, %k6, %k1
12629 ; AVX512BW-NEXT: kshiftrw $14, %k0, %k6
12630 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12631 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
12632 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
12633 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
12634 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12635 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 2-byte Reload
12636 ; AVX512BW-NEXT: kandw %k5, %k1, %k1
12637 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k0
12638 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12639 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12640 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
12641 ; AVX512BW-NEXT: kmovq {{[-0-9]+}}(%r{{[sb]}}p), %k5 # 8-byte Reload
12642 ; AVX512BW-NEXT: kshiftrq $60, %k5, %k0
12643 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
12644 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
12645 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12646 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12647 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
12648 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12649 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12650 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
12651 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12652 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12653 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12654 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
12655 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12656 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12657 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12658 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k6
12659 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12660 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12661 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k6
12662 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12663 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k3 # 2-byte Reload
12664 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12665 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k0
12666 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12667 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k4 # 2-byte Reload
12668 ; AVX512BW-NEXT: kandw %k4, %k0, %k6
12669 ; AVX512BW-NEXT: kshiftrq $61, %k5, %k0
12670 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k1
12671 ; AVX512BW-NEXT: kshiftrw $4, %k1, %k7
12672 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12673 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12674 ; AVX512BW-NEXT: kandw %k2, %k6, %k6
12675 ; AVX512BW-NEXT: kshiftrw $3, %k1, %k7
12676 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12677 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
12678 ; AVX512BW-NEXT: kandw %k7, %k6, %k6
12679 ; AVX512BW-NEXT: kshiftrw $2, %k1, %k7
12680 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12681 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k7 # 2-byte Reload
12682 ; AVX512BW-NEXT: kandw %k7, %k6, %k6
12683 ; AVX512BW-NEXT: kshiftlw $14, %k0, %k7
12684 ; AVX512BW-NEXT: korw %k7, %k6, %k6
12685 ; AVX512BW-NEXT: kshiftlw $1, %k6, %k6
12686 ; AVX512BW-NEXT: kshiftrw $1, %k6, %k6
12687 ; AVX512BW-NEXT: korw %k1, %k6, %k6
12688 ; AVX512BW-NEXT: vmovdqa32 1664(%rsi), %zmm26 {%k6} {z}
12689 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12690 ; AVX512BW-NEXT: kandw %k6, %k0, %k0
12691 ; AVX512BW-NEXT: kshiftrw $14, %k1, %k1
12692 ; AVX512BW-NEXT: korw %k1, %k0, %k0
12693 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12694 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
12695 ; AVX512BW-NEXT: kshiftrq $62, %k5, %k0
12696 ; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
12697 ; AVX512BW-NEXT: kshiftrw $13, %k0, %k6
12698 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12699 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12700 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12701 ; AVX512BW-NEXT: kshiftrw $12, %k0, %k6
12702 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12703 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12704 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12705 ; AVX512BW-NEXT: kshiftrw $11, %k0, %k6
12706 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12707 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12708 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12709 ; AVX512BW-NEXT: kshiftrw $10, %k0, %k6
12710 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12711 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12712 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12713 ; AVX512BW-NEXT: kshiftrw $9, %k0, %k6
12714 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12715 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12716 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12717 ; AVX512BW-NEXT: kshiftrw $8, %k0, %k6
12718 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12719 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k6 # 2-byte Reload
12720 ; AVX512BW-NEXT: kandw %k6, %k1, %k1
12721 ; AVX512BW-NEXT: kshiftrw $7, %k0, %k0
12722 ; AVX512BW-NEXT: korw %k0, %k1, %k0
12723 ; AVX512BW-NEXT: kshiftrq $63, %k5, %k5
12724 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
12725 ; AVX512BW-NEXT: kandw %k1, %k0, %k1
12726 ; AVX512BW-NEXT: kshiftlw $15, %k5, %k0
12727 ; AVX512BW-NEXT: kshiftrw $6, %k0, %k6
12728 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12729 ; AVX512BW-NEXT: kandw %k3, %k1, %k1
12730 ; AVX512BW-NEXT: kshiftrw $5, %k0, %k6
12731 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12732 ; AVX512BW-NEXT: kandw %k4, %k1, %k1
12733 ; AVX512BW-NEXT: kshiftrw $4, %k0, %k6
12734 ; AVX512BW-NEXT: korw %k6, %k1, %k1
12735 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12736 ; AVX512BW-NEXT: kshiftrw $3, %k0, %k4
12737 ; AVX512BW-NEXT: korw %k4, %k1, %k1
12738 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12739 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12740 ; AVX512BW-NEXT: kshiftrw $2, %k0, %k3
12741 ; AVX512BW-NEXT: korw %k3, %k1, %k1
12742 ; AVX512BW-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k2 # 2-byte Reload
12743 ; AVX512BW-NEXT: kandw %k2, %k1, %k1
12744 ; AVX512BW-NEXT: kshiftlw $14, %k5, %k2
12745 ; AVX512BW-NEXT: korw %k2, %k1, %k1
12746 ; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
12747 ; AVX512BW-NEXT: kshiftrw $1, %k1, %k1
12748 ; AVX512BW-NEXT: korw %k0, %k1, %k1
12749 ; AVX512BW-NEXT: vmovdqa32 1728(%rsi), %zmm27 {%k1} {z}
12750 ; AVX512BW-NEXT: vmovdqa64 %zmm27, 1728(%rdx)
12751 ; AVX512BW-NEXT: vmovdqa64 %zmm26, 1664(%rdx)
12752 ; AVX512BW-NEXT: vmovdqa64 %zmm25, 1600(%rdx)
12753 ; AVX512BW-NEXT: vmovdqa64 %zmm24, 1536(%rdx)
12754 ; AVX512BW-NEXT: vmovdqa64 %zmm23, 1472(%rdx)
12755 ; AVX512BW-NEXT: vmovdqa64 %zmm22, 1408(%rdx)
12756 ; AVX512BW-NEXT: vmovdqa64 %zmm21, 1344(%rdx)
12757 ; AVX512BW-NEXT: vmovdqa64 %zmm20, 1280(%rdx)
12758 ; AVX512BW-NEXT: vmovdqa64 %zmm19, 1216(%rdx)
12759 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 1152(%rdx)
12760 ; AVX512BW-NEXT: vmovdqa64 %zmm17, 1088(%rdx)
12761 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 1024(%rdx)
12762 ; AVX512BW-NEXT: vmovdqa64 %zmm15, 960(%rdx)
12763 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 896(%rdx)
12764 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 832(%rdx)
12765 ; AVX512BW-NEXT: vmovdqa64 %zmm12, 768(%rdx)
12766 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 704(%rdx)
12767 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 640(%rdx)
12768 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 576(%rdx)
12769 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 512(%rdx)
12770 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 448(%rdx)
12771 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 384(%rdx)
12772 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 320(%rdx)
12773 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 256(%rdx)
12774 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 192(%rdx)
12775 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 128(%rdx)
12776 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 64(%rdx)
12777 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%rdx)
12778 ; AVX512BW-NEXT: vzeroupper
12779 ; AVX512BW-NEXT: retq
12780 %src.mask = load <64 x i1>, ptr %in.maskvec, align 64
12781 %tgt.mask = shufflevector <64 x i1> %src.mask, <64 x i1> poison, <448 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 17, i32 17, i32 17, i32 17, i32 17, i32 17, i32 17, i32 18, i32 18, i32 18, i32 18, i32 18, i32 18, i32 18, i32 19, i32 19, i32 19, i32 19, i32 19, i32 19, i32 19, i32 20, i32 20, i32 20, i32 20, i32 20, i32 20, i32 20, i32 21, i32 21, i32 21, i32 21, i32 21, i32 21, i32 21, i32 22, i32 22, i32 22, i32 22, i32 22, i32 22, i32 22, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 26, i32 26, i32 26, i32 26, i32 26, i32 26, i32 26, i32 27, i32 27, i32 27, i32 27, i32 27, i32 27, i32 27, i32 28, i32 28, i32 28, i32 28, i32 28, i32 28, i32 28, i32 29, i32 29, i32 29, i32 29, i32 29, i32 29, i32 29, i32 30, i32 30, i32 30, i32 30, i32 30, i32 30, i32 30, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 32, i32 32, i32 32, i32 32, i32 32, i32 32, i32 32, i32 33, i32 33, i32 33, i32 33, i32 33, i32 33, i32 33, i32 34, i32 34, i32 34, i32 34, i32 34, i32 34, i32 34, i32 35, i32 35, i32 35, i32 35, i32 35, i32 35, i32 35, i32 36, i32 36, i32 36, i32 36, i32 36, i32 36, i32 36, i32 37, i32 37, i32 37, i32 37, i32 37, i32 37, i32 37, i32 38, i32 38, i32 38, i32 38, i32 38, i32 38, i32 38, i32 39, i32 39, i32 39, i32 39, i32 39, i32 39, i32 39, i32 40, i32 40, i32 40, i32 40, i32 40, i32 40, i32 40, i32 41, i32 41, i32 41, i32 41, i32 41, i32 41, i32 41, i32 42, i32 42, i32 42, i32 42, i32 42, i32 42, i32 42, i32 43, i32 43, i32 43, i32 43, i32 43, i32 43, i32 43, i32 44, i32 44, i32 44, i32 44, i32 44, i32 44, i32 44, i32 45, i32 45, i32 45, i32 45, i32 45, i32 45, i32 45, i32 46, i32 46, i32 46, i32 46, i32 46, i32 46, i32 46, i32 47, i32 47, i32 47, i32 47, i32 47, i32 47, i32 47, i32 48, i32 48, i32 48, i32 48, i32 48, i32 48, i32 48, i32 49, i32 49, i32 49, i32 49, i32 49, i32 49, i32 49, i32 50, i32 50, i32 50, i32 50, i32 50, i32 50, i32 50, i32 51, i32 51, i32 51, i32 51, i32 51, i32 51, i32 51, i32 52, i32 52, i32 52, i32 52, i32 52, i32 52, i32 52, i32 53, i32 53, i32 53, i32 53, i32 53, i32 53, i32 53, i32 54, i32 54, i32 54, i32 54, i32 54, i32 54, i32 54, i32 55, i32 55, i32 55, i32 55, i32 55, i32 55, i32 55, i32 56, i32 56, i32 56, i32 56, i32 56, i32 56, i32 56, i32 57, i32 57, i32 57, i32 57, i32 57, i32 57, i32 57, i32 58, i32 58, i32 58, i32 58, i32 58, i32 58, i32 58, i32 59, i32 59, i32 59, i32 59, i32 59, i32 59, i32 59, i32 60, i32 60, i32 60, i32 60, i32 60, i32 60, i32 60, i32 61, i32 61, i32 61, i32 61, i32 61, i32 61, i32 61, i32 62, i32 62, i32 62, i32 62, i32 62, i32 62, i32 62, i32 63, i32 63, i32 63, i32 63, i32 63, i32 63, i32 63>
12782 %data = call <448 x i32> @llvm.masked.load.v448i32.p0(ptr %in.vec, i32 64, <448 x i1> %tgt.mask, <448 x i32> poison)
12783 store <448 x i32> %data, ptr %out.vec, align 64
12787 define void @mask_replication_factor8_vf2(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
12788 ; AVX512F-ONLY-LABEL: mask_replication_factor8_vf2:
12789 ; AVX512F-ONLY: # %bb.0:
12790 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
12791 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
12792 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1]
12793 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
12794 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1
12795 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
12796 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, (%rdx)
12797 ; AVX512F-ONLY-NEXT: vzeroupper
12798 ; AVX512F-ONLY-NEXT: retq
12800 ; AVX512DQ-LABEL: mask_replication_factor8_vf2:
12801 ; AVX512DQ: # %bb.0:
12802 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
12803 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
12804 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1]
12805 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
12806 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k1
12807 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
12808 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%rdx)
12809 ; AVX512DQ-NEXT: vzeroupper
12810 ; AVX512DQ-NEXT: retq
12812 ; AVX512BW-LABEL: mask_replication_factor8_vf2:
12813 ; AVX512BW: # %bb.0:
12814 ; AVX512BW-NEXT: kmovw (%rdi), %k1
12815 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
12816 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1]
12817 ; AVX512BW-NEXT: vpermd %zmm0, %zmm1, %zmm0
12818 ; AVX512BW-NEXT: vptestmd %zmm0, %zmm0, %k1
12819 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
12820 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%rdx)
12821 ; AVX512BW-NEXT: vzeroupper
12822 ; AVX512BW-NEXT: retq
12823 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
12824 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <2 x i32> <i32 0, i32 1>
12825 %tgt.mask = shufflevector <2 x i1> %src.mask, <2 x i1> poison, <16 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1>
12826 %data = call <16 x i32> @llvm.masked.load.v16i32.p0(ptr %in.vec, i32 64, <16 x i1> %tgt.mask, <16 x i32> poison)
12827 store <16 x i32> %data, ptr %out.vec, align 64
12831 define void @mask_replication_factor8_vf4(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
12832 ; AVX512F-ONLY-LABEL: mask_replication_factor8_vf4:
12833 ; AVX512F-ONLY: # %bb.0:
12834 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
12835 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
12836 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3]
12837 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
12838 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
12839 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1]
12840 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
12841 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k2
12842 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm0 {%k2} {z}
12843 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm1 {%k1} {z}
12844 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, 64(%rdx)
12845 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, (%rdx)
12846 ; AVX512F-ONLY-NEXT: vzeroupper
12847 ; AVX512F-ONLY-NEXT: retq
12849 ; AVX512DQ-LABEL: mask_replication_factor8_vf4:
12850 ; AVX512DQ: # %bb.0:
12851 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
12852 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
12853 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3]
12854 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
12855 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
12856 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1]
12857 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
12858 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k2
12859 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm0 {%k2} {z}
12860 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm1 {%k1} {z}
12861 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 64(%rdx)
12862 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, (%rdx)
12863 ; AVX512DQ-NEXT: vzeroupper
12864 ; AVX512DQ-NEXT: retq
12866 ; AVX512BW-LABEL: mask_replication_factor8_vf4:
12867 ; AVX512BW: # %bb.0:
12868 ; AVX512BW-NEXT: kmovd (%rdi), %k0
12869 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0
12870 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3]
12871 ; AVX512BW-NEXT: vpermw %zmm0, %zmm1, %zmm0
12872 ; AVX512BW-NEXT: vpmovw2m %zmm0, %k1
12873 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm0 {%k1} {z}
12874 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k1
12875 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm1 {%k1} {z}
12876 ; AVX512BW-NEXT: vmovdqa64 %zmm1, 64(%rdx)
12877 ; AVX512BW-NEXT: vmovdqa64 %zmm0, (%rdx)
12878 ; AVX512BW-NEXT: vzeroupper
12879 ; AVX512BW-NEXT: retq
12880 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
12881 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
12882 %tgt.mask = shufflevector <4 x i1> %src.mask, <4 x i1> poison, <32 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3>
12883 %data = call <32 x i32> @llvm.masked.load.v32i32.p0(ptr %in.vec, i32 64, <32 x i1> %tgt.mask, <32 x i32> poison)
12884 store <32 x i32> %data, ptr %out.vec, align 64
12888 define void @mask_replication_factor8_vf8(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
12889 ; AVX512F-ONLY-LABEL: mask_replication_factor8_vf8:
12890 ; AVX512F-ONLY: # %bb.0:
12891 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
12892 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
12893 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5]
12894 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
12895 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
12896 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [6,6,6,6,6,6,6,6,7,7,7,7,7,7,7,7]
12897 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
12898 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k2
12899 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1]
12900 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
12901 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k3
12902 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3]
12903 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
12904 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k4
12905 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k4} {z}
12906 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k3} {z}
12907 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k2} {z}
12908 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k1} {z}
12909 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
12910 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
12911 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
12912 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
12913 ; AVX512F-ONLY-NEXT: vzeroupper
12914 ; AVX512F-ONLY-NEXT: retq
12916 ; AVX512DQ-LABEL: mask_replication_factor8_vf8:
12917 ; AVX512DQ: # %bb.0:
12918 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
12919 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
12920 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5]
12921 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
12922 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
12923 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [6,6,6,6,6,6,6,6,7,7,7,7,7,7,7,7]
12924 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
12925 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k2
12926 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1]
12927 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
12928 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k3
12929 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3]
12930 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
12931 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k4
12932 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k4} {z}
12933 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm1 {%k3} {z}
12934 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k2} {z}
12935 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k1} {z}
12936 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 128(%rdx)
12937 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 192(%rdx)
12938 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, (%rdx)
12939 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 64(%rdx)
12940 ; AVX512DQ-NEXT: vzeroupper
12941 ; AVX512DQ-NEXT: retq
12943 ; AVX512BW-ONLY-LABEL: mask_replication_factor8_vf8:
12944 ; AVX512BW-ONLY: # %bb.0:
12945 ; AVX512BW-ONLY-NEXT: kmovq (%rdi), %k0
12946 ; AVX512BW-ONLY-NEXT: vpmovm2b %k0, %zmm0
12947 ; AVX512BW-ONLY-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1]
12948 ; AVX512BW-ONLY-NEXT: vpshufb {{.*#+}} zmm0 = zmm0[0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,18,18,18,18,18,18,18,18,19,19,19,19,19,19,19,19,36,36,36,36,36,36,36,36,37,37,37,37,37,37,37,37,54,54,54,54,54,54,54,54,55,55,55,55,55,55,55,55]
12949 ; AVX512BW-ONLY-NEXT: vpmovb2m %zmm0, %k1
12950 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k1, %k2
12951 ; AVX512BW-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k2} {z}
12952 ; AVX512BW-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
12953 ; AVX512BW-ONLY-NEXT: kshiftrq $32, %k1, %k1
12954 ; AVX512BW-ONLY-NEXT: kshiftrd $16, %k1, %k2
12955 ; AVX512BW-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k2} {z}
12956 ; AVX512BW-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k1} {z}
12957 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
12958 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
12959 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
12960 ; AVX512BW-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
12961 ; AVX512BW-ONLY-NEXT: vzeroupper
12962 ; AVX512BW-ONLY-NEXT: retq
12964 ; AVX512VBMI-ONLY-LABEL: mask_replication_factor8_vf8:
12965 ; AVX512VBMI-ONLY: # %bb.0:
12966 ; AVX512VBMI-ONLY-NEXT: kmovq (%rdi), %k0
12967 ; AVX512VBMI-ONLY-NEXT: vpmovm2b %k0, %zmm0
12968 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3,4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5,6,6,6,6,6,6,6,6,7,7,7,7,7,7,7,7]
12969 ; AVX512VBMI-ONLY-NEXT: vpermb %zmm0, %zmm1, %zmm0
12970 ; AVX512VBMI-ONLY-NEXT: vpmovb2m %zmm0, %k1
12971 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k1, %k2
12972 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k2} {z}
12973 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k1} {z}
12974 ; AVX512VBMI-ONLY-NEXT: kshiftrq $32, %k1, %k1
12975 ; AVX512VBMI-ONLY-NEXT: kshiftrd $16, %k1, %k2
12976 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k2} {z}
12977 ; AVX512VBMI-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k1} {z}
12978 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
12979 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
12980 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
12981 ; AVX512VBMI-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
12982 ; AVX512VBMI-ONLY-NEXT: vzeroupper
12983 ; AVX512VBMI-ONLY-NEXT: retq
12984 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
12985 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
12986 %tgt.mask = shufflevector <8 x i1> %src.mask, <8 x i1> poison, <64 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7>
12987 %data = call <64 x i32> @llvm.masked.load.v64i32.p0(ptr %in.vec, i32 64, <64 x i1> %tgt.mask, <64 x i32> poison)
12988 store <64 x i32> %data, ptr %out.vec, align 64
12992 define void @mask_replication_factor8_vf16(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
12993 ; AVX512F-ONLY-LABEL: mask_replication_factor8_vf16:
12994 ; AVX512F-ONLY: # %bb.0:
12995 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
12996 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
12997 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [12,12,12,12,12,12,12,12,13,13,13,13,13,13,13,13]
12998 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
12999 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
13000 ; AVX512F-ONLY-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
13001 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [14,14,14,14,14,14,14,14,15,15,15,15,15,15,15,15]
13002 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
13003 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k2
13004 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [8,8,8,8,8,8,8,8,9,9,9,9,9,9,9,9]
13005 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
13006 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k3
13007 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [10,10,10,10,10,10,10,10,11,11,11,11,11,11,11,11]
13008 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
13009 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k4
13010 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5]
13011 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
13012 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k5
13013 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [6,6,6,6,6,6,6,6,7,7,7,7,7,7,7,7]
13014 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
13015 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k6
13016 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1]
13017 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm1
13018 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k7
13019 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm1 = [2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3]
13020 ; AVX512F-ONLY-NEXT: vpermd %zmm0, %zmm1, %zmm0
13021 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1
13022 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
13023 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm1 {%k7} {z}
13024 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k6} {z}
13025 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k5} {z}
13026 ; AVX512F-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm4 {%k4} {z}
13027 ; AVX512F-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm5 {%k3} {z}
13028 ; AVX512F-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm6 {%k2} {z}
13029 ; AVX512F-ONLY-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
13030 ; AVX512F-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm7 {%k1} {z}
13031 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm7, 384(%rdx)
13032 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm6, 448(%rdx)
13033 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm5, 256(%rdx)
13034 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm4, 320(%rdx)
13035 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 128(%rdx)
13036 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 192(%rdx)
13037 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, (%rdx)
13038 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, 64(%rdx)
13039 ; AVX512F-ONLY-NEXT: vzeroupper
13040 ; AVX512F-ONLY-NEXT: retq
13042 ; AVX512DQ-LABEL: mask_replication_factor8_vf16:
13043 ; AVX512DQ: # %bb.0:
13044 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
13045 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
13046 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [12,12,12,12,12,12,12,12,13,13,13,13,13,13,13,13]
13047 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
13048 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
13049 ; AVX512DQ-NEXT: kmovw %k1, {{[-0-9]+}}(%r{{[sb]}}p) # 2-byte Spill
13050 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [14,14,14,14,14,14,14,14,15,15,15,15,15,15,15,15]
13051 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
13052 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k2
13053 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [8,8,8,8,8,8,8,8,9,9,9,9,9,9,9,9]
13054 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
13055 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k3
13056 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [10,10,10,10,10,10,10,10,11,11,11,11,11,11,11,11]
13057 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
13058 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k4
13059 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5]
13060 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
13061 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k5
13062 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [6,6,6,6,6,6,6,6,7,7,7,7,7,7,7,7]
13063 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
13064 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k6
13065 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1]
13066 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm1
13067 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k7
13068 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm1 = [2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3]
13069 ; AVX512DQ-NEXT: vpermd %zmm0, %zmm1, %zmm0
13070 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k1
13071 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
13072 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm1 {%k7} {z}
13073 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k6} {z}
13074 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k5} {z}
13075 ; AVX512DQ-NEXT: vmovdqa32 320(%rsi), %zmm4 {%k4} {z}
13076 ; AVX512DQ-NEXT: vmovdqa32 256(%rsi), %zmm5 {%k3} {z}
13077 ; AVX512DQ-NEXT: vmovdqa32 448(%rsi), %zmm6 {%k2} {z}
13078 ; AVX512DQ-NEXT: kmovw {{[-0-9]+}}(%r{{[sb]}}p), %k1 # 2-byte Reload
13079 ; AVX512DQ-NEXT: vmovdqa32 384(%rsi), %zmm7 {%k1} {z}
13080 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, 384(%rdx)
13081 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 448(%rdx)
13082 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 256(%rdx)
13083 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 320(%rdx)
13084 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 128(%rdx)
13085 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 192(%rdx)
13086 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, (%rdx)
13087 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 64(%rdx)
13088 ; AVX512DQ-NEXT: vzeroupper
13089 ; AVX512DQ-NEXT: retq
13091 ; AVX512BW-LABEL: mask_replication_factor8_vf16:
13092 ; AVX512BW: # %bb.0:
13093 ; AVX512BW-NEXT: kmovw (%rdi), %k0
13094 ; AVX512BW-NEXT: vpmovm2b %k0, %zmm0
13095 ; AVX512BW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1]
13096 ; AVX512BW-NEXT: vpshufb {{.*#+}} zmm1 = zmm0[8,8,8,8,8,8,8,8,9,9,9,9,9,9,9,9,26,26,26,26,26,26,26,26,27,27,27,27,27,27,27,27,44,44,44,44,44,44,44,44,45,45,45,45,45,45,45,45,62,62,62,62,62,62,62,62,63,63,63,63,63,63,63,63]
13097 ; AVX512BW-NEXT: vpmovb2m %zmm1, %k1
13098 ; AVX512BW-NEXT: vpshufb {{.*#+}} zmm0 = zmm0[0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,18,18,18,18,18,18,18,18,19,19,19,19,19,19,19,19,36,36,36,36,36,36,36,36,37,37,37,37,37,37,37,37,54,54,54,54,54,54,54,54,55,55,55,55,55,55,55,55]
13099 ; AVX512BW-NEXT: vpmovb2m %zmm0, %k2
13100 ; AVX512BW-NEXT: kshiftrd $16, %k2, %k3
13101 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k3} {z}
13102 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm1 {%k2} {z}
13103 ; AVX512BW-NEXT: kshiftrq $32, %k2, %k2
13104 ; AVX512BW-NEXT: kshiftrd $16, %k2, %k3
13105 ; AVX512BW-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k3} {z}
13106 ; AVX512BW-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k2} {z}
13107 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k2
13108 ; AVX512BW-NEXT: vmovdqa32 320(%rsi), %zmm4 {%k2} {z}
13109 ; AVX512BW-NEXT: vmovdqa32 256(%rsi), %zmm5 {%k1} {z}
13110 ; AVX512BW-NEXT: kshiftrq $32, %k1, %k1
13111 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k2
13112 ; AVX512BW-NEXT: vmovdqa32 448(%rsi), %zmm6 {%k2} {z}
13113 ; AVX512BW-NEXT: vmovdqa32 384(%rsi), %zmm7 {%k1} {z}
13114 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 384(%rdx)
13115 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 448(%rdx)
13116 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 256(%rdx)
13117 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 320(%rdx)
13118 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 128(%rdx)
13119 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 192(%rdx)
13120 ; AVX512BW-NEXT: vmovdqa64 %zmm1, (%rdx)
13121 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 64(%rdx)
13122 ; AVX512BW-NEXT: vzeroupper
13123 ; AVX512BW-NEXT: retq
13124 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
13125 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
13126 %tgt.mask = shufflevector <16 x i1> %src.mask, <16 x i1> poison, <128 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15>
13127 %data = call <128 x i32> @llvm.masked.load.v128i32.p0(ptr %in.vec, i32 64, <128 x i1> %tgt.mask, <128 x i32> poison)
13128 store <128 x i32> %data, ptr %out.vec, align 64
13132 define void @mask_replication_factor8_vf32(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
13133 ; AVX512F-ONLY-LABEL: mask_replication_factor8_vf32:
13134 ; AVX512F-ONLY: # %bb.0:
13135 ; AVX512F-ONLY-NEXT: kmovw 2(%rdi), %k1
13136 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
13137 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
13138 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm2 = [14,14,14,14,14,14,14,14,15,15,15,15,15,15,15,15]
13139 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm2, %zmm0
13140 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm3 = [12,12,12,12,12,12,12,12,13,13,13,13,13,13,13,13]
13141 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm3, %zmm4
13142 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm5 = [10,10,10,10,10,10,10,10,11,11,11,11,11,11,11,11]
13143 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm5, %zmm6
13144 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm7 = [8,8,8,8,8,8,8,8,9,9,9,9,9,9,9,9]
13145 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm7, %zmm8
13146 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm9 = [6,6,6,6,6,6,6,6,7,7,7,7,7,7,7,7]
13147 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm9, %zmm10
13148 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm11 = [4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5]
13149 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm11, %zmm12
13150 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm13 = [2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3]
13151 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm13, %zmm14
13152 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm15 = [0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1]
13153 ; AVX512F-ONLY-NEXT: vpermd %zmm1, %zmm15, %zmm1
13154 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm16, %zmm16, %zmm16 {%k1} {z}
13155 ; AVX512F-ONLY-NEXT: vpermd %zmm16, %zmm2, %zmm2
13156 ; AVX512F-ONLY-NEXT: vpermd %zmm16, %zmm3, %zmm3
13157 ; AVX512F-ONLY-NEXT: vpermd %zmm16, %zmm5, %zmm5
13158 ; AVX512F-ONLY-NEXT: vpermd %zmm16, %zmm7, %zmm7
13159 ; AVX512F-ONLY-NEXT: vpermd %zmm16, %zmm9, %zmm9
13160 ; AVX512F-ONLY-NEXT: vpermd %zmm16, %zmm11, %zmm11
13161 ; AVX512F-ONLY-NEXT: vpermd %zmm16, %zmm13, %zmm13
13162 ; AVX512F-ONLY-NEXT: vpermd %zmm16, %zmm15, %zmm15
13163 ; AVX512F-ONLY-NEXT: vptestmd %zmm15, %zmm15, %k1
13164 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm15 {%k1} {z}
13165 ; AVX512F-ONLY-NEXT: vptestmd %zmm13, %zmm13, %k1
13166 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm13 {%k1} {z}
13167 ; AVX512F-ONLY-NEXT: vptestmd %zmm11, %zmm11, %k1
13168 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm11 {%k1} {z}
13169 ; AVX512F-ONLY-NEXT: vptestmd %zmm9, %zmm9, %k1
13170 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm9 {%k1} {z}
13171 ; AVX512F-ONLY-NEXT: vptestmd %zmm7, %zmm7, %k1
13172 ; AVX512F-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm7 {%k1} {z}
13173 ; AVX512F-ONLY-NEXT: vptestmd %zmm5, %zmm5, %k1
13174 ; AVX512F-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm5 {%k1} {z}
13175 ; AVX512F-ONLY-NEXT: vptestmd %zmm3, %zmm3, %k1
13176 ; AVX512F-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm3 {%k1} {z}
13177 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
13178 ; AVX512F-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm2 {%k1} {z}
13179 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
13180 ; AVX512F-ONLY-NEXT: vmovdqa32 512(%rsi), %zmm1 {%k1} {z}
13181 ; AVX512F-ONLY-NEXT: vptestmd %zmm14, %zmm14, %k1
13182 ; AVX512F-ONLY-NEXT: vmovdqa32 576(%rsi), %zmm14 {%k1} {z}
13183 ; AVX512F-ONLY-NEXT: vptestmd %zmm12, %zmm12, %k1
13184 ; AVX512F-ONLY-NEXT: vmovdqa32 640(%rsi), %zmm12 {%k1} {z}
13185 ; AVX512F-ONLY-NEXT: vptestmd %zmm10, %zmm10, %k1
13186 ; AVX512F-ONLY-NEXT: vmovdqa32 704(%rsi), %zmm10 {%k1} {z}
13187 ; AVX512F-ONLY-NEXT: vptestmd %zmm8, %zmm8, %k1
13188 ; AVX512F-ONLY-NEXT: vmovdqa32 768(%rsi), %zmm8 {%k1} {z}
13189 ; AVX512F-ONLY-NEXT: vptestmd %zmm6, %zmm6, %k1
13190 ; AVX512F-ONLY-NEXT: vmovdqa32 832(%rsi), %zmm6 {%k1} {z}
13191 ; AVX512F-ONLY-NEXT: vptestmd %zmm4, %zmm4, %k1
13192 ; AVX512F-ONLY-NEXT: vmovdqa32 896(%rsi), %zmm4 {%k1} {z}
13193 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1
13194 ; AVX512F-ONLY-NEXT: vmovdqa32 960(%rsi), %zmm0 {%k1} {z}
13195 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, 960(%rdx)
13196 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm4, 896(%rdx)
13197 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm6, 832(%rdx)
13198 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm8, 768(%rdx)
13199 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm10, 704(%rdx)
13200 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm12, 640(%rdx)
13201 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm14, 576(%rdx)
13202 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, 512(%rdx)
13203 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 448(%rdx)
13204 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 384(%rdx)
13205 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm5, 320(%rdx)
13206 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm7, 256(%rdx)
13207 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm9, 192(%rdx)
13208 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm11, 128(%rdx)
13209 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm13, 64(%rdx)
13210 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm15, (%rdx)
13211 ; AVX512F-ONLY-NEXT: vzeroupper
13212 ; AVX512F-ONLY-NEXT: retq
13214 ; AVX512DQ-LABEL: mask_replication_factor8_vf32:
13215 ; AVX512DQ: # %bb.0:
13216 ; AVX512DQ-NEXT: kmovw 2(%rdi), %k0
13217 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm1
13218 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
13219 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm2 = [14,14,14,14,14,14,14,14,15,15,15,15,15,15,15,15]
13220 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm2, %zmm0
13221 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm3 = [12,12,12,12,12,12,12,12,13,13,13,13,13,13,13,13]
13222 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm3, %zmm4
13223 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm5 = [10,10,10,10,10,10,10,10,11,11,11,11,11,11,11,11]
13224 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm5, %zmm6
13225 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm7 = [8,8,8,8,8,8,8,8,9,9,9,9,9,9,9,9]
13226 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm7, %zmm8
13227 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm9 = [6,6,6,6,6,6,6,6,7,7,7,7,7,7,7,7]
13228 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm9, %zmm10
13229 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm11 = [4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5]
13230 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm11, %zmm12
13231 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm13 = [2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3]
13232 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm13, %zmm14
13233 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm15 = [0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1]
13234 ; AVX512DQ-NEXT: vpermd %zmm1, %zmm15, %zmm1
13235 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm16
13236 ; AVX512DQ-NEXT: vpermd %zmm16, %zmm2, %zmm2
13237 ; AVX512DQ-NEXT: vpermd %zmm16, %zmm3, %zmm3
13238 ; AVX512DQ-NEXT: vpermd %zmm16, %zmm5, %zmm5
13239 ; AVX512DQ-NEXT: vpermd %zmm16, %zmm7, %zmm7
13240 ; AVX512DQ-NEXT: vpermd %zmm16, %zmm9, %zmm9
13241 ; AVX512DQ-NEXT: vpermd %zmm16, %zmm11, %zmm11
13242 ; AVX512DQ-NEXT: vpermd %zmm16, %zmm13, %zmm13
13243 ; AVX512DQ-NEXT: vpermd %zmm16, %zmm15, %zmm15
13244 ; AVX512DQ-NEXT: vpmovd2m %zmm15, %k1
13245 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm15 {%k1} {z}
13246 ; AVX512DQ-NEXT: vpmovd2m %zmm13, %k1
13247 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm13 {%k1} {z}
13248 ; AVX512DQ-NEXT: vpmovd2m %zmm11, %k1
13249 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm11 {%k1} {z}
13250 ; AVX512DQ-NEXT: vpmovd2m %zmm9, %k1
13251 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm9 {%k1} {z}
13252 ; AVX512DQ-NEXT: vpmovd2m %zmm7, %k1
13253 ; AVX512DQ-NEXT: vmovdqa32 256(%rsi), %zmm7 {%k1} {z}
13254 ; AVX512DQ-NEXT: vpmovd2m %zmm5, %k1
13255 ; AVX512DQ-NEXT: vmovdqa32 320(%rsi), %zmm5 {%k1} {z}
13256 ; AVX512DQ-NEXT: vpmovd2m %zmm3, %k1
13257 ; AVX512DQ-NEXT: vmovdqa32 384(%rsi), %zmm3 {%k1} {z}
13258 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k1
13259 ; AVX512DQ-NEXT: vmovdqa32 448(%rsi), %zmm2 {%k1} {z}
13260 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
13261 ; AVX512DQ-NEXT: vmovdqa32 512(%rsi), %zmm1 {%k1} {z}
13262 ; AVX512DQ-NEXT: vpmovd2m %zmm14, %k1
13263 ; AVX512DQ-NEXT: vmovdqa32 576(%rsi), %zmm14 {%k1} {z}
13264 ; AVX512DQ-NEXT: vpmovd2m %zmm12, %k1
13265 ; AVX512DQ-NEXT: vmovdqa32 640(%rsi), %zmm12 {%k1} {z}
13266 ; AVX512DQ-NEXT: vpmovd2m %zmm10, %k1
13267 ; AVX512DQ-NEXT: vmovdqa32 704(%rsi), %zmm10 {%k1} {z}
13268 ; AVX512DQ-NEXT: vpmovd2m %zmm8, %k1
13269 ; AVX512DQ-NEXT: vmovdqa32 768(%rsi), %zmm8 {%k1} {z}
13270 ; AVX512DQ-NEXT: vpmovd2m %zmm6, %k1
13271 ; AVX512DQ-NEXT: vmovdqa32 832(%rsi), %zmm6 {%k1} {z}
13272 ; AVX512DQ-NEXT: vpmovd2m %zmm4, %k1
13273 ; AVX512DQ-NEXT: vmovdqa32 896(%rsi), %zmm4 {%k1} {z}
13274 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k1
13275 ; AVX512DQ-NEXT: vmovdqa32 960(%rsi), %zmm0 {%k1} {z}
13276 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 960(%rdx)
13277 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 896(%rdx)
13278 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 832(%rdx)
13279 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, 768(%rdx)
13280 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, 704(%rdx)
13281 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, 640(%rdx)
13282 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, 576(%rdx)
13283 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 512(%rdx)
13284 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 448(%rdx)
13285 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 384(%rdx)
13286 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 320(%rdx)
13287 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, 256(%rdx)
13288 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, 192(%rdx)
13289 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, 128(%rdx)
13290 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, 64(%rdx)
13291 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, (%rdx)
13292 ; AVX512DQ-NEXT: vzeroupper
13293 ; AVX512DQ-NEXT: retq
13295 ; AVX512BW-LABEL: mask_replication_factor8_vf32:
13296 ; AVX512BW: # %bb.0:
13297 ; AVX512BW-NEXT: kmovd (%rdi), %k0
13298 ; AVX512BW-NEXT: vpmovm2b %k0, %zmm0
13299 ; AVX512BW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm0[2,3,2,3,2,3,2,3]
13300 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [8,8,8,8,8,8,8,8,9,9,9,9,9,9,9,9,10,10,10,10,10,10,10,10,11,11,11,11,11,11,11,11,12,12,12,12,12,12,12,12,13,13,13,13,13,13,13,13,14,14,14,14,14,14,14,14,15,15,15,15,15,15,15,15]
13301 ; AVX512BW-NEXT: vpshufb %zmm2, %zmm1, %zmm3
13302 ; AVX512BW-NEXT: vpmovb2m %zmm3, %k1
13303 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3,4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5,6,6,6,6,6,6,6,6,7,7,7,7,7,7,7,7]
13304 ; AVX512BW-NEXT: vpshufb %zmm3, %zmm1, %zmm1
13305 ; AVX512BW-NEXT: vpmovb2m %zmm1, %k2
13306 ; AVX512BW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1]
13307 ; AVX512BW-NEXT: vpshufb %zmm2, %zmm0, %zmm1
13308 ; AVX512BW-NEXT: vpmovb2m %zmm1, %k3
13309 ; AVX512BW-NEXT: vpshufb %zmm3, %zmm0, %zmm0
13310 ; AVX512BW-NEXT: vpmovb2m %zmm0, %k4
13311 ; AVX512BW-NEXT: kshiftrd $16, %k4, %k5
13312 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k5} {z}
13313 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm1 {%k4} {z}
13314 ; AVX512BW-NEXT: kshiftrq $32, %k4, %k4
13315 ; AVX512BW-NEXT: kshiftrd $16, %k4, %k5
13316 ; AVX512BW-NEXT: vmovdqa32 192(%rsi), %zmm2 {%k5} {z}
13317 ; AVX512BW-NEXT: vmovdqa32 128(%rsi), %zmm3 {%k4} {z}
13318 ; AVX512BW-NEXT: kshiftrd $16, %k3, %k4
13319 ; AVX512BW-NEXT: vmovdqa32 320(%rsi), %zmm4 {%k4} {z}
13320 ; AVX512BW-NEXT: vmovdqa32 256(%rsi), %zmm5 {%k3} {z}
13321 ; AVX512BW-NEXT: kshiftrq $32, %k3, %k3
13322 ; AVX512BW-NEXT: kshiftrd $16, %k3, %k4
13323 ; AVX512BW-NEXT: vmovdqa32 448(%rsi), %zmm6 {%k4} {z}
13324 ; AVX512BW-NEXT: vmovdqa32 384(%rsi), %zmm7 {%k3} {z}
13325 ; AVX512BW-NEXT: kshiftrd $16, %k2, %k3
13326 ; AVX512BW-NEXT: vmovdqa32 576(%rsi), %zmm8 {%k3} {z}
13327 ; AVX512BW-NEXT: vmovdqa32 512(%rsi), %zmm9 {%k2} {z}
13328 ; AVX512BW-NEXT: kshiftrq $32, %k2, %k2
13329 ; AVX512BW-NEXT: kshiftrd $16, %k2, %k3
13330 ; AVX512BW-NEXT: vmovdqa32 704(%rsi), %zmm10 {%k3} {z}
13331 ; AVX512BW-NEXT: vmovdqa32 640(%rsi), %zmm11 {%k2} {z}
13332 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k2
13333 ; AVX512BW-NEXT: vmovdqa32 832(%rsi), %zmm12 {%k2} {z}
13334 ; AVX512BW-NEXT: vmovdqa32 768(%rsi), %zmm13 {%k1} {z}
13335 ; AVX512BW-NEXT: kshiftrq $32, %k1, %k1
13336 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k2
13337 ; AVX512BW-NEXT: vmovdqa32 960(%rsi), %zmm14 {%k2} {z}
13338 ; AVX512BW-NEXT: vmovdqa32 896(%rsi), %zmm15 {%k1} {z}
13339 ; AVX512BW-NEXT: vmovdqa64 %zmm15, 896(%rdx)
13340 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 960(%rdx)
13341 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 768(%rdx)
13342 ; AVX512BW-NEXT: vmovdqa64 %zmm12, 832(%rdx)
13343 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 640(%rdx)
13344 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 704(%rdx)
13345 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 512(%rdx)
13346 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 576(%rdx)
13347 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 384(%rdx)
13348 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 448(%rdx)
13349 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 256(%rdx)
13350 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 320(%rdx)
13351 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 128(%rdx)
13352 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 192(%rdx)
13353 ; AVX512BW-NEXT: vmovdqa64 %zmm1, (%rdx)
13354 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 64(%rdx)
13355 ; AVX512BW-NEXT: vzeroupper
13356 ; AVX512BW-NEXT: retq
13357 %src.mask.padded = load <64 x i1>, ptr %in.maskvec, align 64
13358 %src.mask = shufflevector <64 x i1> %src.mask.padded, <64 x i1> poison, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
13359 %tgt.mask = shufflevector <32 x i1> %src.mask, <32 x i1> poison, <256 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 17, i32 17, i32 17, i32 17, i32 17, i32 17, i32 17, i32 17, i32 18, i32 18, i32 18, i32 18, i32 18, i32 18, i32 18, i32 18, i32 19, i32 19, i32 19, i32 19, i32 19, i32 19, i32 19, i32 19, i32 20, i32 20, i32 20, i32 20, i32 20, i32 20, i32 20, i32 20, i32 21, i32 21, i32 21, i32 21, i32 21, i32 21, i32 21, i32 21, i32 22, i32 22, i32 22, i32 22, i32 22, i32 22, i32 22, i32 22, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 26, i32 26, i32 26, i32 26, i32 26, i32 26, i32 26, i32 26, i32 27, i32 27, i32 27, i32 27, i32 27, i32 27, i32 27, i32 27, i32 28, i32 28, i32 28, i32 28, i32 28, i32 28, i32 28, i32 28, i32 29, i32 29, i32 29, i32 29, i32 29, i32 29, i32 29, i32 29, i32 30, i32 30, i32 30, i32 30, i32 30, i32 30, i32 30, i32 30, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31>
13360 %data = call <256 x i32> @llvm.masked.load.v256i32.p0(ptr %in.vec, i32 64, <256 x i1> %tgt.mask, <256 x i32> poison)
13361 store <256 x i32> %data, ptr %out.vec, align 64
13365 define void @mask_replication_factor8_vf64(ptr %in.maskvec, ptr %in.vec, ptr %out.vec) nounwind {
13366 ; AVX512F-ONLY-LABEL: mask_replication_factor8_vf64:
13367 ; AVX512F-ONLY: # %bb.0:
13368 ; AVX512F-ONLY-NEXT: subq $136, %rsp
13369 ; AVX512F-ONLY-NEXT: kmovw 6(%rdi), %k1
13370 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm6, %zmm6, %zmm6 {%k1} {z}
13371 ; AVX512F-ONLY-NEXT: kmovw 4(%rdi), %k1
13372 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm8, %zmm8, %zmm8 {%k1} {z}
13373 ; AVX512F-ONLY-NEXT: kmovw 2(%rdi), %k1
13374 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm10, %zmm10, %zmm10 {%k1} {z}
13375 ; AVX512F-ONLY-NEXT: kmovw (%rdi), %k1
13376 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm12 = [14,14,14,14,14,14,14,14,15,15,15,15,15,15,15,15]
13377 ; AVX512F-ONLY-NEXT: vpermd %zmm6, %zmm12, %zmm0
13378 ; AVX512F-ONLY-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13379 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm14 = [12,12,12,12,12,12,12,12,13,13,13,13,13,13,13,13]
13380 ; AVX512F-ONLY-NEXT: vpermd %zmm6, %zmm14, %zmm0
13381 ; AVX512F-ONLY-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
13382 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm16 = [10,10,10,10,10,10,10,10,11,11,11,11,11,11,11,11]
13383 ; AVX512F-ONLY-NEXT: vpermd %zmm6, %zmm16, %zmm0
13384 ; AVX512F-ONLY-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13385 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm18 = [8,8,8,8,8,8,8,8,9,9,9,9,9,9,9,9]
13386 ; AVX512F-ONLY-NEXT: vpermd %zmm6, %zmm18, %zmm0
13387 ; AVX512F-ONLY-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13388 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm20 = [6,6,6,6,6,6,6,6,7,7,7,7,7,7,7,7]
13389 ; AVX512F-ONLY-NEXT: vpermd %zmm6, %zmm20, %zmm4
13390 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm22 = [4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5]
13391 ; AVX512F-ONLY-NEXT: vpermd %zmm6, %zmm22, %zmm5
13392 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm24 = [2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3]
13393 ; AVX512F-ONLY-NEXT: vpermd %zmm6, %zmm24, %zmm7
13394 ; AVX512F-ONLY-NEXT: vmovdqa64 {{.*#+}} zmm26 = [0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1]
13395 ; AVX512F-ONLY-NEXT: vpermd %zmm6, %zmm26, %zmm9
13396 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm12, %zmm11
13397 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm14, %zmm13
13398 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm16, %zmm15
13399 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm18, %zmm17
13400 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm20, %zmm19
13401 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm22, %zmm21
13402 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm24, %zmm23
13403 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm26, %zmm25
13404 ; AVX512F-ONLY-NEXT: vpermd %zmm10, %zmm12, %zmm27
13405 ; AVX512F-ONLY-NEXT: vpermd %zmm10, %zmm14, %zmm28
13406 ; AVX512F-ONLY-NEXT: vpermd %zmm10, %zmm16, %zmm29
13407 ; AVX512F-ONLY-NEXT: vpermd %zmm10, %zmm18, %zmm30
13408 ; AVX512F-ONLY-NEXT: vpermd %zmm10, %zmm20, %zmm31
13409 ; AVX512F-ONLY-NEXT: vpermd %zmm10, %zmm22, %zmm3
13410 ; AVX512F-ONLY-NEXT: vpermd %zmm10, %zmm24, %zmm6
13411 ; AVX512F-ONLY-NEXT: vpermd %zmm10, %zmm26, %zmm2
13412 ; AVX512F-ONLY-NEXT: vpternlogd $255, %zmm8, %zmm8, %zmm8 {%k1} {z}
13413 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm12, %zmm1
13414 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm14, %zmm0
13415 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm16, %zmm16
13416 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm18, %zmm14
13417 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm20, %zmm12
13418 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm22, %zmm10
13419 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm24, %zmm18
13420 ; AVX512F-ONLY-NEXT: vpermd %zmm8, %zmm26, %zmm8
13421 ; AVX512F-ONLY-NEXT: vptestmd %zmm8, %zmm8, %k1
13422 ; AVX512F-ONLY-NEXT: vmovdqa32 (%rsi), %zmm26 {%k1} {z}
13423 ; AVX512F-ONLY-NEXT: vptestmd %zmm18, %zmm18, %k1
13424 ; AVX512F-ONLY-NEXT: vmovdqa32 64(%rsi), %zmm8 {%k1} {z}
13425 ; AVX512F-ONLY-NEXT: vptestmd %zmm10, %zmm10, %k1
13426 ; AVX512F-ONLY-NEXT: vmovdqa32 128(%rsi), %zmm10 {%k1} {z}
13427 ; AVX512F-ONLY-NEXT: vptestmd %zmm12, %zmm12, %k1
13428 ; AVX512F-ONLY-NEXT: vmovdqa32 192(%rsi), %zmm12 {%k1} {z}
13429 ; AVX512F-ONLY-NEXT: vptestmd %zmm14, %zmm14, %k1
13430 ; AVX512F-ONLY-NEXT: vmovdqa32 256(%rsi), %zmm14 {%k1} {z}
13431 ; AVX512F-ONLY-NEXT: vptestmd %zmm16, %zmm16, %k1
13432 ; AVX512F-ONLY-NEXT: vmovdqa32 320(%rsi), %zmm16 {%k1} {z}
13433 ; AVX512F-ONLY-NEXT: vptestmd %zmm0, %zmm0, %k1
13434 ; AVX512F-ONLY-NEXT: vmovdqa32 384(%rsi), %zmm18 {%k1} {z}
13435 ; AVX512F-ONLY-NEXT: vptestmd %zmm1, %zmm1, %k1
13436 ; AVX512F-ONLY-NEXT: vmovdqa32 448(%rsi), %zmm20 {%k1} {z}
13437 ; AVX512F-ONLY-NEXT: vptestmd %zmm2, %zmm2, %k1
13438 ; AVX512F-ONLY-NEXT: vmovdqa32 512(%rsi), %zmm22 {%k1} {z}
13439 ; AVX512F-ONLY-NEXT: vptestmd %zmm6, %zmm6, %k1
13440 ; AVX512F-ONLY-NEXT: vmovdqa32 576(%rsi), %zmm24 {%k1} {z}
13441 ; AVX512F-ONLY-NEXT: vptestmd %zmm3, %zmm3, %k1
13442 ; AVX512F-ONLY-NEXT: vmovdqa32 640(%rsi), %zmm0 {%k1} {z}
13443 ; AVX512F-ONLY-NEXT: vptestmd %zmm31, %zmm31, %k1
13444 ; AVX512F-ONLY-NEXT: vmovdqa32 704(%rsi), %zmm1 {%k1} {z}
13445 ; AVX512F-ONLY-NEXT: vptestmd %zmm30, %zmm30, %k1
13446 ; AVX512F-ONLY-NEXT: vmovdqa32 768(%rsi), %zmm2 {%k1} {z}
13447 ; AVX512F-ONLY-NEXT: vptestmd %zmm29, %zmm29, %k1
13448 ; AVX512F-ONLY-NEXT: vmovdqa32 832(%rsi), %zmm3 {%k1} {z}
13449 ; AVX512F-ONLY-NEXT: vptestmd %zmm28, %zmm28, %k1
13450 ; AVX512F-ONLY-NEXT: vmovdqa32 896(%rsi), %zmm6 {%k1} {z}
13451 ; AVX512F-ONLY-NEXT: vptestmd %zmm27, %zmm27, %k1
13452 ; AVX512F-ONLY-NEXT: vmovdqa32 960(%rsi), %zmm27 {%k1} {z}
13453 ; AVX512F-ONLY-NEXT: vptestmd %zmm25, %zmm25, %k1
13454 ; AVX512F-ONLY-NEXT: vmovdqa32 1024(%rsi), %zmm25 {%k1} {z}
13455 ; AVX512F-ONLY-NEXT: vptestmd %zmm23, %zmm23, %k1
13456 ; AVX512F-ONLY-NEXT: vmovdqa32 1088(%rsi), %zmm23 {%k1} {z}
13457 ; AVX512F-ONLY-NEXT: vptestmd %zmm21, %zmm21, %k1
13458 ; AVX512F-ONLY-NEXT: vmovdqa32 1152(%rsi), %zmm21 {%k1} {z}
13459 ; AVX512F-ONLY-NEXT: vptestmd %zmm19, %zmm19, %k1
13460 ; AVX512F-ONLY-NEXT: vmovdqa32 1216(%rsi), %zmm19 {%k1} {z}
13461 ; AVX512F-ONLY-NEXT: vptestmd %zmm17, %zmm17, %k1
13462 ; AVX512F-ONLY-NEXT: vmovdqa32 1280(%rsi), %zmm17 {%k1} {z}
13463 ; AVX512F-ONLY-NEXT: vptestmd %zmm15, %zmm15, %k1
13464 ; AVX512F-ONLY-NEXT: vmovdqa32 1344(%rsi), %zmm15 {%k1} {z}
13465 ; AVX512F-ONLY-NEXT: vptestmd %zmm13, %zmm13, %k1
13466 ; AVX512F-ONLY-NEXT: vmovdqa32 1408(%rsi), %zmm13 {%k1} {z}
13467 ; AVX512F-ONLY-NEXT: vptestmd %zmm11, %zmm11, %k1
13468 ; AVX512F-ONLY-NEXT: vmovdqa32 1472(%rsi), %zmm11 {%k1} {z}
13469 ; AVX512F-ONLY-NEXT: vptestmd %zmm9, %zmm9, %k1
13470 ; AVX512F-ONLY-NEXT: vmovdqa32 1536(%rsi), %zmm9 {%k1} {z}
13471 ; AVX512F-ONLY-NEXT: vptestmd %zmm7, %zmm7, %k1
13472 ; AVX512F-ONLY-NEXT: vmovdqa32 1600(%rsi), %zmm7 {%k1} {z}
13473 ; AVX512F-ONLY-NEXT: vptestmd %zmm5, %zmm5, %k1
13474 ; AVX512F-ONLY-NEXT: vmovdqa32 1664(%rsi), %zmm5 {%k1} {z}
13475 ; AVX512F-ONLY-NEXT: vptestmd %zmm4, %zmm4, %k1
13476 ; AVX512F-ONLY-NEXT: vmovdqa32 1728(%rsi), %zmm4 {%k1} {z}
13477 ; AVX512F-ONLY-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
13478 ; AVX512F-ONLY-NEXT: vptestmd %zmm28, %zmm28, %k1
13479 ; AVX512F-ONLY-NEXT: vmovdqa32 1792(%rsi), %zmm28 {%k1} {z}
13480 ; AVX512F-ONLY-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm29 # 64-byte Reload
13481 ; AVX512F-ONLY-NEXT: vptestmd %zmm29, %zmm29, %k1
13482 ; AVX512F-ONLY-NEXT: vmovdqa32 1856(%rsi), %zmm29 {%k1} {z}
13483 ; AVX512F-ONLY-NEXT: vmovdqu64 (%rsp), %zmm30 # 64-byte Reload
13484 ; AVX512F-ONLY-NEXT: vptestmd %zmm30, %zmm30, %k1
13485 ; AVX512F-ONLY-NEXT: vmovdqa32 1920(%rsi), %zmm30 {%k1} {z}
13486 ; AVX512F-ONLY-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
13487 ; AVX512F-ONLY-NEXT: vptestmd %zmm31, %zmm31, %k1
13488 ; AVX512F-ONLY-NEXT: vmovdqa32 1984(%rsi), %zmm31 {%k1} {z}
13489 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm31, 1984(%rdx)
13490 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm30, 1920(%rdx)
13491 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm29, 1856(%rdx)
13492 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm28, 1792(%rdx)
13493 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm4, 1728(%rdx)
13494 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm5, 1664(%rdx)
13495 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm7, 1600(%rdx)
13496 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm9, 1536(%rdx)
13497 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm11, 1472(%rdx)
13498 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm13, 1408(%rdx)
13499 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm15, 1344(%rdx)
13500 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm17, 1280(%rdx)
13501 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm19, 1216(%rdx)
13502 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm21, 1152(%rdx)
13503 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm23, 1088(%rdx)
13504 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm25, 1024(%rdx)
13505 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm27, 960(%rdx)
13506 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm6, 896(%rdx)
13507 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm3, 832(%rdx)
13508 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm2, 768(%rdx)
13509 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm1, 704(%rdx)
13510 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm0, 640(%rdx)
13511 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm24, 576(%rdx)
13512 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm22, 512(%rdx)
13513 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm20, 448(%rdx)
13514 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm18, 384(%rdx)
13515 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm16, 320(%rdx)
13516 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm14, 256(%rdx)
13517 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm12, 192(%rdx)
13518 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm10, 128(%rdx)
13519 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm8, 64(%rdx)
13520 ; AVX512F-ONLY-NEXT: vmovdqa64 %zmm26, (%rdx)
13521 ; AVX512F-ONLY-NEXT: addq $136, %rsp
13522 ; AVX512F-ONLY-NEXT: vzeroupper
13523 ; AVX512F-ONLY-NEXT: retq
13525 ; AVX512DQ-LABEL: mask_replication_factor8_vf64:
13526 ; AVX512DQ: # %bb.0:
13527 ; AVX512DQ-NEXT: subq $136, %rsp
13528 ; AVX512DQ-NEXT: kmovw 6(%rdi), %k0
13529 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm6
13530 ; AVX512DQ-NEXT: kmovw 4(%rdi), %k0
13531 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm8
13532 ; AVX512DQ-NEXT: kmovw 2(%rdi), %k0
13533 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm10
13534 ; AVX512DQ-NEXT: kmovw (%rdi), %k0
13535 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm12 = [14,14,14,14,14,14,14,14,15,15,15,15,15,15,15,15]
13536 ; AVX512DQ-NEXT: vpermd %zmm6, %zmm12, %zmm0
13537 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13538 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm14 = [12,12,12,12,12,12,12,12,13,13,13,13,13,13,13,13]
13539 ; AVX512DQ-NEXT: vpermd %zmm6, %zmm14, %zmm0
13540 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
13541 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm16 = [10,10,10,10,10,10,10,10,11,11,11,11,11,11,11,11]
13542 ; AVX512DQ-NEXT: vpermd %zmm6, %zmm16, %zmm0
13543 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13544 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm18 = [8,8,8,8,8,8,8,8,9,9,9,9,9,9,9,9]
13545 ; AVX512DQ-NEXT: vpermd %zmm6, %zmm18, %zmm0
13546 ; AVX512DQ-NEXT: vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
13547 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm20 = [6,6,6,6,6,6,6,6,7,7,7,7,7,7,7,7]
13548 ; AVX512DQ-NEXT: vpermd %zmm6, %zmm20, %zmm4
13549 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm22 = [4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5]
13550 ; AVX512DQ-NEXT: vpermd %zmm6, %zmm22, %zmm5
13551 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm24 = [2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3]
13552 ; AVX512DQ-NEXT: vpermd %zmm6, %zmm24, %zmm7
13553 ; AVX512DQ-NEXT: vmovdqa64 {{.*#+}} zmm26 = [0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1]
13554 ; AVX512DQ-NEXT: vpermd %zmm6, %zmm26, %zmm9
13555 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm12, %zmm11
13556 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm14, %zmm13
13557 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm16, %zmm15
13558 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm18, %zmm17
13559 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm20, %zmm19
13560 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm22, %zmm21
13561 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm24, %zmm23
13562 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm26, %zmm25
13563 ; AVX512DQ-NEXT: vpermd %zmm10, %zmm12, %zmm27
13564 ; AVX512DQ-NEXT: vpermd %zmm10, %zmm14, %zmm28
13565 ; AVX512DQ-NEXT: vpermd %zmm10, %zmm16, %zmm29
13566 ; AVX512DQ-NEXT: vpermd %zmm10, %zmm18, %zmm30
13567 ; AVX512DQ-NEXT: vpermd %zmm10, %zmm20, %zmm31
13568 ; AVX512DQ-NEXT: vpermd %zmm10, %zmm22, %zmm3
13569 ; AVX512DQ-NEXT: vpermd %zmm10, %zmm24, %zmm6
13570 ; AVX512DQ-NEXT: vpermd %zmm10, %zmm26, %zmm2
13571 ; AVX512DQ-NEXT: vpmovm2d %k0, %zmm8
13572 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm12, %zmm1
13573 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm14, %zmm0
13574 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm16, %zmm16
13575 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm18, %zmm14
13576 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm20, %zmm12
13577 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm22, %zmm10
13578 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm24, %zmm18
13579 ; AVX512DQ-NEXT: vpermd %zmm8, %zmm26, %zmm8
13580 ; AVX512DQ-NEXT: vpmovd2m %zmm8, %k1
13581 ; AVX512DQ-NEXT: vmovdqa32 (%rsi), %zmm26 {%k1} {z}
13582 ; AVX512DQ-NEXT: vpmovd2m %zmm18, %k1
13583 ; AVX512DQ-NEXT: vmovdqa32 64(%rsi), %zmm8 {%k1} {z}
13584 ; AVX512DQ-NEXT: vpmovd2m %zmm10, %k1
13585 ; AVX512DQ-NEXT: vmovdqa32 128(%rsi), %zmm10 {%k1} {z}
13586 ; AVX512DQ-NEXT: vpmovd2m %zmm12, %k1
13587 ; AVX512DQ-NEXT: vmovdqa32 192(%rsi), %zmm12 {%k1} {z}
13588 ; AVX512DQ-NEXT: vpmovd2m %zmm14, %k1
13589 ; AVX512DQ-NEXT: vmovdqa32 256(%rsi), %zmm14 {%k1} {z}
13590 ; AVX512DQ-NEXT: vpmovd2m %zmm16, %k1
13591 ; AVX512DQ-NEXT: vmovdqa32 320(%rsi), %zmm16 {%k1} {z}
13592 ; AVX512DQ-NEXT: vpmovd2m %zmm0, %k1
13593 ; AVX512DQ-NEXT: vmovdqa32 384(%rsi), %zmm18 {%k1} {z}
13594 ; AVX512DQ-NEXT: vpmovd2m %zmm1, %k1
13595 ; AVX512DQ-NEXT: vmovdqa32 448(%rsi), %zmm20 {%k1} {z}
13596 ; AVX512DQ-NEXT: vpmovd2m %zmm2, %k1
13597 ; AVX512DQ-NEXT: vmovdqa32 512(%rsi), %zmm22 {%k1} {z}
13598 ; AVX512DQ-NEXT: vpmovd2m %zmm6, %k1
13599 ; AVX512DQ-NEXT: vmovdqa32 576(%rsi), %zmm24 {%k1} {z}
13600 ; AVX512DQ-NEXT: vpmovd2m %zmm3, %k1
13601 ; AVX512DQ-NEXT: vmovdqa32 640(%rsi), %zmm0 {%k1} {z}
13602 ; AVX512DQ-NEXT: vpmovd2m %zmm31, %k1
13603 ; AVX512DQ-NEXT: vmovdqa32 704(%rsi), %zmm1 {%k1} {z}
13604 ; AVX512DQ-NEXT: vpmovd2m %zmm30, %k1
13605 ; AVX512DQ-NEXT: vmovdqa32 768(%rsi), %zmm2 {%k1} {z}
13606 ; AVX512DQ-NEXT: vpmovd2m %zmm29, %k1
13607 ; AVX512DQ-NEXT: vmovdqa32 832(%rsi), %zmm3 {%k1} {z}
13608 ; AVX512DQ-NEXT: vpmovd2m %zmm28, %k1
13609 ; AVX512DQ-NEXT: vmovdqa32 896(%rsi), %zmm6 {%k1} {z}
13610 ; AVX512DQ-NEXT: vpmovd2m %zmm27, %k1
13611 ; AVX512DQ-NEXT: vmovdqa32 960(%rsi), %zmm27 {%k1} {z}
13612 ; AVX512DQ-NEXT: vpmovd2m %zmm25, %k1
13613 ; AVX512DQ-NEXT: vmovdqa32 1024(%rsi), %zmm25 {%k1} {z}
13614 ; AVX512DQ-NEXT: vpmovd2m %zmm23, %k1
13615 ; AVX512DQ-NEXT: vmovdqa32 1088(%rsi), %zmm23 {%k1} {z}
13616 ; AVX512DQ-NEXT: vpmovd2m %zmm21, %k1
13617 ; AVX512DQ-NEXT: vmovdqa32 1152(%rsi), %zmm21 {%k1} {z}
13618 ; AVX512DQ-NEXT: vpmovd2m %zmm19, %k1
13619 ; AVX512DQ-NEXT: vmovdqa32 1216(%rsi), %zmm19 {%k1} {z}
13620 ; AVX512DQ-NEXT: vpmovd2m %zmm17, %k1
13621 ; AVX512DQ-NEXT: vmovdqa32 1280(%rsi), %zmm17 {%k1} {z}
13622 ; AVX512DQ-NEXT: vpmovd2m %zmm15, %k1
13623 ; AVX512DQ-NEXT: vmovdqa32 1344(%rsi), %zmm15 {%k1} {z}
13624 ; AVX512DQ-NEXT: vpmovd2m %zmm13, %k1
13625 ; AVX512DQ-NEXT: vmovdqa32 1408(%rsi), %zmm13 {%k1} {z}
13626 ; AVX512DQ-NEXT: vpmovd2m %zmm11, %k1
13627 ; AVX512DQ-NEXT: vmovdqa32 1472(%rsi), %zmm11 {%k1} {z}
13628 ; AVX512DQ-NEXT: vpmovd2m %zmm9, %k1
13629 ; AVX512DQ-NEXT: vmovdqa32 1536(%rsi), %zmm9 {%k1} {z}
13630 ; AVX512DQ-NEXT: vpmovd2m %zmm7, %k1
13631 ; AVX512DQ-NEXT: vmovdqa32 1600(%rsi), %zmm7 {%k1} {z}
13632 ; AVX512DQ-NEXT: vpmovd2m %zmm5, %k1
13633 ; AVX512DQ-NEXT: vmovdqa32 1664(%rsi), %zmm5 {%k1} {z}
13634 ; AVX512DQ-NEXT: vpmovd2m %zmm4, %k1
13635 ; AVX512DQ-NEXT: vmovdqa32 1728(%rsi), %zmm4 {%k1} {z}
13636 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
13637 ; AVX512DQ-NEXT: vpmovd2m %zmm28, %k1
13638 ; AVX512DQ-NEXT: vmovdqa32 1792(%rsi), %zmm28 {%k1} {z}
13639 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm29 # 64-byte Reload
13640 ; AVX512DQ-NEXT: vpmovd2m %zmm29, %k1
13641 ; AVX512DQ-NEXT: vmovdqa32 1856(%rsi), %zmm29 {%k1} {z}
13642 ; AVX512DQ-NEXT: vmovdqu64 (%rsp), %zmm30 # 64-byte Reload
13643 ; AVX512DQ-NEXT: vpmovd2m %zmm30, %k1
13644 ; AVX512DQ-NEXT: vmovdqa32 1920(%rsi), %zmm30 {%k1} {z}
13645 ; AVX512DQ-NEXT: vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
13646 ; AVX512DQ-NEXT: vpmovd2m %zmm31, %k1
13647 ; AVX512DQ-NEXT: vmovdqa32 1984(%rsi), %zmm31 {%k1} {z}
13648 ; AVX512DQ-NEXT: vmovdqa64 %zmm31, 1984(%rdx)
13649 ; AVX512DQ-NEXT: vmovdqa64 %zmm30, 1920(%rdx)
13650 ; AVX512DQ-NEXT: vmovdqa64 %zmm29, 1856(%rdx)
13651 ; AVX512DQ-NEXT: vmovdqa64 %zmm28, 1792(%rdx)
13652 ; AVX512DQ-NEXT: vmovdqa64 %zmm4, 1728(%rdx)
13653 ; AVX512DQ-NEXT: vmovdqa64 %zmm5, 1664(%rdx)
13654 ; AVX512DQ-NEXT: vmovdqa64 %zmm7, 1600(%rdx)
13655 ; AVX512DQ-NEXT: vmovdqa64 %zmm9, 1536(%rdx)
13656 ; AVX512DQ-NEXT: vmovdqa64 %zmm11, 1472(%rdx)
13657 ; AVX512DQ-NEXT: vmovdqa64 %zmm13, 1408(%rdx)
13658 ; AVX512DQ-NEXT: vmovdqa64 %zmm15, 1344(%rdx)
13659 ; AVX512DQ-NEXT: vmovdqa64 %zmm17, 1280(%rdx)
13660 ; AVX512DQ-NEXT: vmovdqa64 %zmm19, 1216(%rdx)
13661 ; AVX512DQ-NEXT: vmovdqa64 %zmm21, 1152(%rdx)
13662 ; AVX512DQ-NEXT: vmovdqa64 %zmm23, 1088(%rdx)
13663 ; AVX512DQ-NEXT: vmovdqa64 %zmm25, 1024(%rdx)
13664 ; AVX512DQ-NEXT: vmovdqa64 %zmm27, 960(%rdx)
13665 ; AVX512DQ-NEXT: vmovdqa64 %zmm6, 896(%rdx)
13666 ; AVX512DQ-NEXT: vmovdqa64 %zmm3, 832(%rdx)
13667 ; AVX512DQ-NEXT: vmovdqa64 %zmm2, 768(%rdx)
13668 ; AVX512DQ-NEXT: vmovdqa64 %zmm1, 704(%rdx)
13669 ; AVX512DQ-NEXT: vmovdqa64 %zmm0, 640(%rdx)
13670 ; AVX512DQ-NEXT: vmovdqa64 %zmm24, 576(%rdx)
13671 ; AVX512DQ-NEXT: vmovdqa64 %zmm22, 512(%rdx)
13672 ; AVX512DQ-NEXT: vmovdqa64 %zmm20, 448(%rdx)
13673 ; AVX512DQ-NEXT: vmovdqa64 %zmm18, 384(%rdx)
13674 ; AVX512DQ-NEXT: vmovdqa64 %zmm16, 320(%rdx)
13675 ; AVX512DQ-NEXT: vmovdqa64 %zmm14, 256(%rdx)
13676 ; AVX512DQ-NEXT: vmovdqa64 %zmm12, 192(%rdx)
13677 ; AVX512DQ-NEXT: vmovdqa64 %zmm10, 128(%rdx)
13678 ; AVX512DQ-NEXT: vmovdqa64 %zmm8, 64(%rdx)
13679 ; AVX512DQ-NEXT: vmovdqa64 %zmm26, (%rdx)
13680 ; AVX512DQ-NEXT: addq $136, %rsp
13681 ; AVX512DQ-NEXT: vzeroupper
13682 ; AVX512DQ-NEXT: retq
13684 ; AVX512BW-LABEL: mask_replication_factor8_vf64:
13685 ; AVX512BW: # %bb.0:
13686 ; AVX512BW-NEXT: kmovq (%rdi), %k0
13687 ; AVX512BW-NEXT: vpmovm2b %k0, %zmm0
13688 ; AVX512BW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm0[6,7,6,7,6,7,6,7]
13689 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [8,8,8,8,8,8,8,8,9,9,9,9,9,9,9,9,10,10,10,10,10,10,10,10,11,11,11,11,11,11,11,11,12,12,12,12,12,12,12,12,13,13,13,13,13,13,13,13,14,14,14,14,14,14,14,14,15,15,15,15,15,15,15,15]
13690 ; AVX512BW-NEXT: vpshufb %zmm2, %zmm1, %zmm7
13691 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3,4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5,6,6,6,6,6,6,6,6,7,7,7,7,7,7,7,7]
13692 ; AVX512BW-NEXT: vpshufb %zmm3, %zmm1, %zmm12
13693 ; AVX512BW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm0[4,5,4,5,4,5,4,5]
13694 ; AVX512BW-NEXT: vpshufb %zmm2, %zmm1, %zmm16
13695 ; AVX512BW-NEXT: vpshufb %zmm3, %zmm1, %zmm15
13696 ; AVX512BW-NEXT: vshufi64x2 {{.*#+}} zmm1 = zmm0[2,3,2,3,2,3,2,3]
13697 ; AVX512BW-NEXT: vpshufb %zmm2, %zmm1, %zmm10
13698 ; AVX512BW-NEXT: vpshufb %zmm3, %zmm1, %zmm5
13699 ; AVX512BW-NEXT: vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1,0,1,0,1]
13700 ; AVX512BW-NEXT: vpshufb %zmm2, %zmm0, %zmm1
13701 ; AVX512BW-NEXT: vpshufb %zmm3, %zmm0, %zmm0
13702 ; AVX512BW-NEXT: vpmovb2m %zmm0, %k2
13703 ; AVX512BW-NEXT: kshiftrd $16, %k2, %k1
13704 ; AVX512BW-NEXT: vmovdqa32 64(%rsi), %zmm0 {%k1} {z}
13705 ; AVX512BW-NEXT: vpmovb2m %zmm1, %k1
13706 ; AVX512BW-NEXT: vmovdqa32 (%rsi), %zmm1 {%k2} {z}
13707 ; AVX512BW-NEXT: kshiftrq $32, %k2, %k2
13708 ; AVX512BW-NEXT: vmovdqa32 128(%rsi), %zmm2 {%k2} {z}
13709 ; AVX512BW-NEXT: kshiftrd $16, %k2, %k2
13710 ; AVX512BW-NEXT: vmovdqa32 192(%rsi), %zmm3 {%k2} {z}
13711 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k2
13712 ; AVX512BW-NEXT: vmovdqa32 320(%rsi), %zmm4 {%k2} {z}
13713 ; AVX512BW-NEXT: vpmovb2m %zmm5, %k2
13714 ; AVX512BW-NEXT: vmovdqa32 256(%rsi), %zmm5 {%k1} {z}
13715 ; AVX512BW-NEXT: kshiftrq $32, %k1, %k1
13716 ; AVX512BW-NEXT: vmovdqa32 384(%rsi), %zmm6 {%k1} {z}
13717 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k1
13718 ; AVX512BW-NEXT: vmovdqa32 448(%rsi), %zmm8 {%k1} {z}
13719 ; AVX512BW-NEXT: kshiftrd $16, %k2, %k1
13720 ; AVX512BW-NEXT: vmovdqa32 576(%rsi), %zmm9 {%k1} {z}
13721 ; AVX512BW-NEXT: vpmovb2m %zmm10, %k1
13722 ; AVX512BW-NEXT: vmovdqa32 512(%rsi), %zmm10 {%k2} {z}
13723 ; AVX512BW-NEXT: kshiftrq $32, %k2, %k2
13724 ; AVX512BW-NEXT: vmovdqa32 640(%rsi), %zmm11 {%k2} {z}
13725 ; AVX512BW-NEXT: kshiftrd $16, %k2, %k2
13726 ; AVX512BW-NEXT: vmovdqa32 704(%rsi), %zmm13 {%k2} {z}
13727 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k2
13728 ; AVX512BW-NEXT: vmovdqa32 832(%rsi), %zmm14 {%k2} {z}
13729 ; AVX512BW-NEXT: vpmovb2m %zmm15, %k2
13730 ; AVX512BW-NEXT: vmovdqa32 768(%rsi), %zmm15 {%k1} {z}
13731 ; AVX512BW-NEXT: kshiftrq $32, %k1, %k1
13732 ; AVX512BW-NEXT: vmovdqa32 896(%rsi), %zmm17 {%k1} {z}
13733 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k1
13734 ; AVX512BW-NEXT: vmovdqa32 960(%rsi), %zmm18 {%k1} {z}
13735 ; AVX512BW-NEXT: kshiftrd $16, %k2, %k1
13736 ; AVX512BW-NEXT: vmovdqa32 1088(%rsi), %zmm19 {%k1} {z}
13737 ; AVX512BW-NEXT: vpmovb2m %zmm16, %k1
13738 ; AVX512BW-NEXT: vmovdqa32 1024(%rsi), %zmm16 {%k2} {z}
13739 ; AVX512BW-NEXT: kshiftrq $32, %k2, %k2
13740 ; AVX512BW-NEXT: vmovdqa32 1152(%rsi), %zmm20 {%k2} {z}
13741 ; AVX512BW-NEXT: kshiftrd $16, %k2, %k2
13742 ; AVX512BW-NEXT: vmovdqa32 1216(%rsi), %zmm21 {%k2} {z}
13743 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k2
13744 ; AVX512BW-NEXT: vmovdqa32 1344(%rsi), %zmm22 {%k2} {z}
13745 ; AVX512BW-NEXT: vpmovb2m %zmm12, %k2
13746 ; AVX512BW-NEXT: vmovdqa32 1280(%rsi), %zmm12 {%k1} {z}
13747 ; AVX512BW-NEXT: kshiftrq $32, %k1, %k1
13748 ; AVX512BW-NEXT: vmovdqa32 1408(%rsi), %zmm23 {%k1} {z}
13749 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k1
13750 ; AVX512BW-NEXT: vmovdqa32 1472(%rsi), %zmm24 {%k1} {z}
13751 ; AVX512BW-NEXT: kshiftrd $16, %k2, %k1
13752 ; AVX512BW-NEXT: vmovdqa32 1600(%rsi), %zmm25 {%k1} {z}
13753 ; AVX512BW-NEXT: vpmovb2m %zmm7, %k1
13754 ; AVX512BW-NEXT: vmovdqa32 1536(%rsi), %zmm7 {%k2} {z}
13755 ; AVX512BW-NEXT: kshiftrq $32, %k2, %k2
13756 ; AVX512BW-NEXT: vmovdqa32 1664(%rsi), %zmm26 {%k2} {z}
13757 ; AVX512BW-NEXT: kshiftrd $16, %k2, %k2
13758 ; AVX512BW-NEXT: vmovdqa32 1728(%rsi), %zmm27 {%k2} {z}
13759 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k2
13760 ; AVX512BW-NEXT: vmovdqa32 1856(%rsi), %zmm28 {%k2} {z}
13761 ; AVX512BW-NEXT: vmovdqa32 1792(%rsi), %zmm29 {%k1} {z}
13762 ; AVX512BW-NEXT: kshiftrq $32, %k1, %k1
13763 ; AVX512BW-NEXT: vmovdqa32 1920(%rsi), %zmm30 {%k1} {z}
13764 ; AVX512BW-NEXT: kshiftrd $16, %k1, %k1
13765 ; AVX512BW-NEXT: vmovdqa32 1984(%rsi), %zmm31 {%k1} {z}
13766 ; AVX512BW-NEXT: vmovdqa64 %zmm31, 1984(%rdx)
13767 ; AVX512BW-NEXT: vmovdqa64 %zmm30, 1920(%rdx)
13768 ; AVX512BW-NEXT: vmovdqa64 %zmm28, 1856(%rdx)
13769 ; AVX512BW-NEXT: vmovdqa64 %zmm29, 1792(%rdx)
13770 ; AVX512BW-NEXT: vmovdqa64 %zmm27, 1728(%rdx)
13771 ; AVX512BW-NEXT: vmovdqa64 %zmm26, 1664(%rdx)
13772 ; AVX512BW-NEXT: vmovdqa64 %zmm25, 1600(%rdx)
13773 ; AVX512BW-NEXT: vmovdqa64 %zmm7, 1536(%rdx)
13774 ; AVX512BW-NEXT: vmovdqa64 %zmm24, 1472(%rdx)
13775 ; AVX512BW-NEXT: vmovdqa64 %zmm23, 1408(%rdx)
13776 ; AVX512BW-NEXT: vmovdqa64 %zmm22, 1344(%rdx)
13777 ; AVX512BW-NEXT: vmovdqa64 %zmm12, 1280(%rdx)
13778 ; AVX512BW-NEXT: vmovdqa64 %zmm21, 1216(%rdx)
13779 ; AVX512BW-NEXT: vmovdqa64 %zmm20, 1152(%rdx)
13780 ; AVX512BW-NEXT: vmovdqa64 %zmm19, 1088(%rdx)
13781 ; AVX512BW-NEXT: vmovdqa64 %zmm16, 1024(%rdx)
13782 ; AVX512BW-NEXT: vmovdqa64 %zmm18, 960(%rdx)
13783 ; AVX512BW-NEXT: vmovdqa64 %zmm17, 896(%rdx)
13784 ; AVX512BW-NEXT: vmovdqa64 %zmm14, 832(%rdx)
13785 ; AVX512BW-NEXT: vmovdqa64 %zmm15, 768(%rdx)
13786 ; AVX512BW-NEXT: vmovdqa64 %zmm13, 704(%rdx)
13787 ; AVX512BW-NEXT: vmovdqa64 %zmm11, 640(%rdx)
13788 ; AVX512BW-NEXT: vmovdqa64 %zmm9, 576(%rdx)
13789 ; AVX512BW-NEXT: vmovdqa64 %zmm10, 512(%rdx)
13790 ; AVX512BW-NEXT: vmovdqa64 %zmm8, 448(%rdx)
13791 ; AVX512BW-NEXT: vmovdqa64 %zmm6, 384(%rdx)
13792 ; AVX512BW-NEXT: vmovdqa64 %zmm4, 320(%rdx)
13793 ; AVX512BW-NEXT: vmovdqa64 %zmm5, 256(%rdx)
13794 ; AVX512BW-NEXT: vmovdqa64 %zmm3, 192(%rdx)
13795 ; AVX512BW-NEXT: vmovdqa64 %zmm2, 128(%rdx)
13796 ; AVX512BW-NEXT: vmovdqa64 %zmm0, 64(%rdx)
13797 ; AVX512BW-NEXT: vmovdqa64 %zmm1, (%rdx)
13798 ; AVX512BW-NEXT: vzeroupper
13799 ; AVX512BW-NEXT: retq
13800 %src.mask = load <64 x i1>, ptr %in.maskvec, align 64
13801 %tgt.mask = shufflevector <64 x i1> %src.mask, <64 x i1> poison, <512 x i32> <i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 0, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 17, i32 17, i32 17, i32 17, i32 17, i32 17, i32 17, i32 17, i32 18, i32 18, i32 18, i32 18, i32 18, i32 18, i32 18, i32 18, i32 19, i32 19, i32 19, i32 19, i32 19, i32 19, i32 19, i32 19, i32 20, i32 20, i32 20, i32 20, i32 20, i32 20, i32 20, i32 20, i32 21, i32 21, i32 21, i32 21, i32 21, i32 21, i32 21, i32 21, i32 22, i32 22, i32 22, i32 22, i32 22, i32 22, i32 22, i32 22, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 26, i32 26, i32 26, i32 26, i32 26, i32 26, i32 26, i32 26, i32 27, i32 27, i32 27, i32 27, i32 27, i32 27, i32 27, i32 27, i32 28, i32 28, i32 28, i32 28, i32 28, i32 28, i32 28, i32 28, i32 29, i32 29, i32 29, i32 29, i32 29, i32 29, i32 29, i32 29, i32 30, i32 30, i32 30, i32 30, i32 30, i32 30, i32 30, i32 30, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 32, i32 32, i32 32, i32 32, i32 32, i32 32, i32 32, i32 32, i32 33, i32 33, i32 33, i32 33, i32 33, i32 33, i32 33, i32 33, i32 34, i32 34, i32 34, i32 34, i32 34, i32 34, i32 34, i32 34, i32 35, i32 35, i32 35, i32 35, i32 35, i32 35, i32 35, i32 35, i32 36, i32 36, i32 36, i32 36, i32 36, i32 36, i32 36, i32 36, i32 37, i32 37, i32 37, i32 37, i32 37, i32 37, i32 37, i32 37, i32 38, i32 38, i32 38, i32 38, i32 38, i32 38, i32 38, i32 38, i32 39, i32 39, i32 39, i32 39, i32 39, i32 39, i32 39, i32 39, i32 40, i32 40, i32 40, i32 40, i32 40, i32 40, i32 40, i32 40, i32 41, i32 41, i32 41, i32 41, i32 41, i32 41, i32 41, i32 41, i32 42, i32 42, i32 42, i32 42, i32 42, i32 42, i32 42, i32 42, i32 43, i32 43, i32 43, i32 43, i32 43, i32 43, i32 43, i32 43, i32 44, i32 44, i32 44, i32 44, i32 44, i32 44, i32 44, i32 44, i32 45, i32 45, i32 45, i32 45, i32 45, i32 45, i32 45, i32 45, i32 46, i32 46, i32 46, i32 46, i32 46, i32 46, i32 46, i32 46, i32 47, i32 47, i32 47, i32 47, i32 47, i32 47, i32 47, i32 47, i32 48, i32 48, i32 48, i32 48, i32 48, i32 48, i32 48, i32 48, i32 49, i32 49, i32 49, i32 49, i32 49, i32 49, i32 49, i32 49, i32 50, i32 50, i32 50, i32 50, i32 50, i32 50, i32 50, i32 50, i32 51, i32 51, i32 51, i32 51, i32 51, i32 51, i32 51, i32 51, i32 52, i32 52, i32 52, i32 52, i32 52, i32 52, i32 52, i32 52, i32 53, i32 53, i32 53, i32 53, i32 53, i32 53, i32 53, i32 53, i32 54, i32 54, i32 54, i32 54, i32 54, i32 54, i32 54, i32 54, i32 55, i32 55, i32 55, i32 55, i32 55, i32 55, i32 55, i32 55, i32 56, i32 56, i32 56, i32 56, i32 56, i32 56, i32 56, i32 56, i32 57, i32 57, i32 57, i32 57, i32 57, i32 57, i32 57, i32 57, i32 58, i32 58, i32 58, i32 58, i32 58, i32 58, i32 58, i32 58, i32 59, i32 59, i32 59, i32 59, i32 59, i32 59, i32 59, i32 59, i32 60, i32 60, i32 60, i32 60, i32 60, i32 60, i32 60, i32 60, i32 61, i32 61, i32 61, i32 61, i32 61, i32 61, i32 61, i32 61, i32 62, i32 62, i32 62, i32 62, i32 62, i32 62, i32 62, i32 62, i32 63, i32 63, i32 63, i32 63, i32 63, i32 63, i32 63, i32 63>
13802 %data = call <512 x i32> @llvm.masked.load.v512i32.p0(ptr %in.vec, i32 64, <512 x i1> %tgt.mask, <512 x i32> poison)
13803 store <512 x i32> %data, ptr %out.vec, align 64
13807 declare <4 x i32> @llvm.masked.load.v4i32.p0(ptr, i32, <4 x i1>, <4 x i32>)
13808 declare <6 x i32> @llvm.masked.load.v6i32.p0(ptr, i32, <6 x i1>, <6 x i32>)
13809 declare <8 x i32> @llvm.masked.load.v8i32.p0(ptr, i32, <8 x i1>, <8 x i32>)
13810 declare <10 x i32> @llvm.masked.load.v10i32.p0(ptr, i32, <10 x i1>, <10 x i32>)
13811 declare <12 x i32> @llvm.masked.load.v12i32.p0(ptr, i32, <12 x i1>, <12 x i32>)
13812 declare <14 x i32> @llvm.masked.load.v14i32.p0(ptr, i32, <14 x i1>, <14 x i32>)
13813 declare <16 x i32> @llvm.masked.load.v16i32.p0(ptr, i32, <16 x i1>, <16 x i32>)
13814 declare <20 x i32> @llvm.masked.load.v20i32.p0(ptr, i32, <20 x i1>, <20 x i32>)
13815 declare <24 x i32> @llvm.masked.load.v24i32.p0(ptr, i32, <24 x i1>, <24 x i32>)
13816 declare <28 x i32> @llvm.masked.load.v28i32.p0(ptr, i32, <28 x i1>, <28 x i32>)
13817 declare <32 x i32> @llvm.masked.load.v32i32.p0(ptr, i32, <32 x i1>, <32 x i32>)
13818 declare <40 x i32> @llvm.masked.load.v40i32.p0(ptr, i32, <40 x i1>, <40 x i32>)
13819 declare <48 x i32> @llvm.masked.load.v48i32.p0(ptr, i32, <48 x i1>, <48 x i32>)
13820 declare <56 x i32> @llvm.masked.load.v56i32.p0(ptr, i32, <56 x i1>, <56 x i32>)
13821 declare <64 x i32> @llvm.masked.load.v64i32.p0(ptr, i32, <64 x i1>, <64 x i32>)
13822 declare <80 x i32> @llvm.masked.load.v80i32.p0(ptr, i32, <80 x i1>, <80 x i32>)
13823 declare <96 x i32> @llvm.masked.load.v96i32.p0(ptr, i32, <96 x i1>, <96 x i32>)
13824 declare <112 x i32> @llvm.masked.load.v112i32.p0(ptr, i32, <112 x i1>, <112 x i32>)
13825 declare <128 x i32> @llvm.masked.load.v128i32.p0(ptr, i32, <128 x i1>, <128 x i32>)
13826 declare <160 x i32> @llvm.masked.load.v160i32.p0(ptr, i32, <160 x i1>, <160 x i32>)
13827 declare <192 x i32> @llvm.masked.load.v192i32.p0(ptr, i32, <192 x i1>, <192 x i32>)
13828 declare <224 x i32> @llvm.masked.load.v224i32.p0(ptr, i32, <224 x i1>, <224 x i32>)
13829 declare <256 x i32> @llvm.masked.load.v256i32.p0(ptr, i32, <256 x i1>, <256 x i32>)
13830 declare <320 x i32> @llvm.masked.load.v320i32.p0(ptr, i32, <320 x i1>, <320 x i32>)
13831 declare <384 x i32> @llvm.masked.load.v384i32.p0(ptr, i32, <384 x i1>, <384 x i32>)
13832 declare <448 x i32> @llvm.masked.load.v448i32.p0(ptr, i32, <448 x i1>, <448 x i32>)
13833 declare <512 x i32> @llvm.masked.load.v512i32.p0(ptr, i32, <512 x i1>, <512 x i32>)
13834 ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
13836 ; FALLBACK0: {{.*}}
13837 ; FALLBACK1: {{.*}}
13838 ; FALLBACK2: {{.*}}
13839 ; FALLBACK3: {{.*}}
13840 ; FALLBACK4: {{.*}}
13841 ; FALLBACK5: {{.*}}
13842 ; FALLBACK6: {{.*}}
13843 ; FALLBACK7: {{.*}}