1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mcpu=skylake -mtriple=i386-unknown-linux-gnu -mattr=+avx2 | FileCheck --check-prefix=X86 %s
3 ; RUN: llc < %s -mcpu=skylake -mtriple=x86_64-unknown-linux-gnu -mattr=+avx2 | FileCheck --check-prefix=X64 %s
4 ; RUN: llc < %s -mcpu=skx -mtriple=x86_64-unknown-linux-gnu -mattr=+avx2,-avx512f | FileCheck --check-prefix=X64 %s
5 ; RUN: llc < %s -mcpu=skylake -mtriple=x86_64-unknown-linux-gnu -mattr=-avx2 | FileCheck --check-prefix=NOGATHER %s
7 declare <2 x i32> @llvm.masked.gather.v2i32(<2 x i32*> %ptrs, i32 %align, <2 x i1> %masks, <2 x i32> %passthro)
9 define <2 x i32> @masked_gather_v2i32(<2 x i32*>* %ptr, <2 x i1> %masks, <2 x i32> %passthro) {
10 ; X86-LABEL: masked_gather_v2i32:
11 ; X86: # %bb.0: # %entry
12 ; X86-NEXT: vinsertps {{.*#+}} xmm0 = xmm0[0,2],zero,zero
13 ; X86-NEXT: vpslld $31, %xmm0, %xmm0
14 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
15 ; X86-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
16 ; X86-NEXT: vpgatherdd %xmm0, (,%xmm2), %xmm1
17 ; X86-NEXT: vmovdqa %xmm1, %xmm0
20 ; X64-LABEL: masked_gather_v2i32:
21 ; X64: # %bb.0: # %entry
22 ; X64-NEXT: vmovdqa (%rdi), %xmm2
23 ; X64-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
24 ; X64-NEXT: vpslld $31, %xmm0, %xmm0
25 ; X64-NEXT: vpgatherqd %xmm0, (,%xmm2), %xmm1
26 ; X64-NEXT: vmovdqa %xmm1, %xmm0
29 ; NOGATHER-LABEL: masked_gather_v2i32:
30 ; NOGATHER: # %bb.0: # %entry
31 ; NOGATHER-NEXT: vmovdqa (%rdi), %xmm2
32 ; NOGATHER-NEXT: vpsllq $63, %xmm0, %xmm0
33 ; NOGATHER-NEXT: vmovmskpd %xmm0, %eax
34 ; NOGATHER-NEXT: testb $1, %al
35 ; NOGATHER-NEXT: jne .LBB0_1
36 ; NOGATHER-NEXT: # %bb.2: # %else
37 ; NOGATHER-NEXT: testb $2, %al
38 ; NOGATHER-NEXT: jne .LBB0_3
39 ; NOGATHER-NEXT: .LBB0_4: # %else2
40 ; NOGATHER-NEXT: vmovdqa %xmm1, %xmm0
42 ; NOGATHER-NEXT: .LBB0_1: # %cond.load
43 ; NOGATHER-NEXT: vmovq %xmm2, %rcx
44 ; NOGATHER-NEXT: vpinsrd $0, (%rcx), %xmm1, %xmm1
45 ; NOGATHER-NEXT: testb $2, %al
46 ; NOGATHER-NEXT: je .LBB0_4
47 ; NOGATHER-NEXT: .LBB0_3: # %cond.load1
48 ; NOGATHER-NEXT: vpextrq $1, %xmm2, %rax
49 ; NOGATHER-NEXT: vpinsrd $1, (%rax), %xmm1, %xmm1
50 ; NOGATHER-NEXT: vmovdqa %xmm1, %xmm0
53 %ld = load <2 x i32*>, <2 x i32*>* %ptr
54 %res = call <2 x i32> @llvm.masked.gather.v2i32(<2 x i32*> %ld, i32 0, <2 x i1> %masks, <2 x i32> %passthro)
58 define <4 x i32> @masked_gather_v2i32_concat(<2 x i32*>* %ptr, <2 x i1> %masks, <2 x i32> %passthro) {
59 ; X86-LABEL: masked_gather_v2i32_concat:
60 ; X86: # %bb.0: # %entry
61 ; X86-NEXT: vinsertps {{.*#+}} xmm0 = xmm0[0,2],zero,zero
62 ; X86-NEXT: vpslld $31, %xmm0, %xmm0
63 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
64 ; X86-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
65 ; X86-NEXT: vpgatherdd %xmm0, (,%xmm2), %xmm1
66 ; X86-NEXT: vmovdqa %xmm1, %xmm0
69 ; X64-LABEL: masked_gather_v2i32_concat:
70 ; X64: # %bb.0: # %entry
71 ; X64-NEXT: vmovdqa (%rdi), %xmm2
72 ; X64-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
73 ; X64-NEXT: vpslld $31, %xmm0, %xmm0
74 ; X64-NEXT: vpgatherqd %xmm0, (,%xmm2), %xmm1
75 ; X64-NEXT: vmovdqa %xmm1, %xmm0
78 ; NOGATHER-LABEL: masked_gather_v2i32_concat:
79 ; NOGATHER: # %bb.0: # %entry
80 ; NOGATHER-NEXT: vmovdqa (%rdi), %xmm2
81 ; NOGATHER-NEXT: vpsllq $63, %xmm0, %xmm0
82 ; NOGATHER-NEXT: vmovmskpd %xmm0, %eax
83 ; NOGATHER-NEXT: testb $1, %al
84 ; NOGATHER-NEXT: jne .LBB1_1
85 ; NOGATHER-NEXT: # %bb.2: # %else
86 ; NOGATHER-NEXT: testb $2, %al
87 ; NOGATHER-NEXT: jne .LBB1_3
88 ; NOGATHER-NEXT: .LBB1_4: # %else2
89 ; NOGATHER-NEXT: vmovdqa %xmm1, %xmm0
91 ; NOGATHER-NEXT: .LBB1_1: # %cond.load
92 ; NOGATHER-NEXT: vmovq %xmm2, %rcx
93 ; NOGATHER-NEXT: vpinsrd $0, (%rcx), %xmm1, %xmm1
94 ; NOGATHER-NEXT: testb $2, %al
95 ; NOGATHER-NEXT: je .LBB1_4
96 ; NOGATHER-NEXT: .LBB1_3: # %cond.load1
97 ; NOGATHER-NEXT: vpextrq $1, %xmm2, %rax
98 ; NOGATHER-NEXT: vpinsrd $1, (%rax), %xmm1, %xmm1
99 ; NOGATHER-NEXT: vmovdqa %xmm1, %xmm0
100 ; NOGATHER-NEXT: retq
102 %ld = load <2 x i32*>, <2 x i32*>* %ptr
103 %res = call <2 x i32> @llvm.masked.gather.v2i32(<2 x i32*> %ld, i32 0, <2 x i1> %masks, <2 x i32> %passthro)
104 %res2 = shufflevector <2 x i32> %res, <2 x i32> undef, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
108 declare <2 x float> @llvm.masked.gather.v2float(<2 x float*> %ptrs, i32 %align, <2 x i1> %masks, <2 x float> %passthro)
110 define <2 x float> @masked_gather_v2float(<2 x float*>* %ptr, <2 x i1> %masks, <2 x float> %passthro) {
111 ; X86-LABEL: masked_gather_v2float:
112 ; X86: # %bb.0: # %entry
113 ; X86-NEXT: vinsertps {{.*#+}} xmm0 = xmm0[0,2],zero,zero
114 ; X86-NEXT: vpslld $31, %xmm0, %xmm0
115 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
116 ; X86-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
117 ; X86-NEXT: vgatherdps %xmm0, (,%xmm2), %xmm1
118 ; X86-NEXT: vmovaps %xmm1, %xmm0
121 ; X64-LABEL: masked_gather_v2float:
122 ; X64: # %bb.0: # %entry
123 ; X64-NEXT: vmovaps (%rdi), %xmm2
124 ; X64-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
125 ; X64-NEXT: vpslld $31, %xmm0, %xmm0
126 ; X64-NEXT: vgatherqps %xmm0, (,%xmm2), %xmm1
127 ; X64-NEXT: vmovaps %xmm1, %xmm0
130 ; NOGATHER-LABEL: masked_gather_v2float:
131 ; NOGATHER: # %bb.0: # %entry
132 ; NOGATHER-NEXT: vmovdqa (%rdi), %xmm2
133 ; NOGATHER-NEXT: vpsllq $63, %xmm0, %xmm0
134 ; NOGATHER-NEXT: vmovmskpd %xmm0, %eax
135 ; NOGATHER-NEXT: testb $1, %al
136 ; NOGATHER-NEXT: jne .LBB2_1
137 ; NOGATHER-NEXT: # %bb.2: # %else
138 ; NOGATHER-NEXT: testb $2, %al
139 ; NOGATHER-NEXT: jne .LBB2_3
140 ; NOGATHER-NEXT: .LBB2_4: # %else2
141 ; NOGATHER-NEXT: vmovaps %xmm1, %xmm0
142 ; NOGATHER-NEXT: retq
143 ; NOGATHER-NEXT: .LBB2_1: # %cond.load
144 ; NOGATHER-NEXT: vmovq %xmm2, %rcx
145 ; NOGATHER-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
146 ; NOGATHER-NEXT: vblendps {{.*#+}} xmm1 = xmm0[0],xmm1[1,2,3]
147 ; NOGATHER-NEXT: testb $2, %al
148 ; NOGATHER-NEXT: je .LBB2_4
149 ; NOGATHER-NEXT: .LBB2_3: # %cond.load1
150 ; NOGATHER-NEXT: vpextrq $1, %xmm2, %rax
151 ; NOGATHER-NEXT: vinsertps {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[2,3]
152 ; NOGATHER-NEXT: vmovaps %xmm1, %xmm0
153 ; NOGATHER-NEXT: retq
155 %ld = load <2 x float*>, <2 x float*>* %ptr
156 %res = call <2 x float> @llvm.masked.gather.v2float(<2 x float*> %ld, i32 0, <2 x i1> %masks, <2 x float> %passthro)
160 define <4 x float> @masked_gather_v2float_concat(<2 x float*>* %ptr, <2 x i1> %masks, <2 x float> %passthro) {
161 ; X86-LABEL: masked_gather_v2float_concat:
162 ; X86: # %bb.0: # %entry
163 ; X86-NEXT: vinsertps {{.*#+}} xmm0 = xmm0[0,2],zero,zero
164 ; X86-NEXT: vpslld $31, %xmm0, %xmm0
165 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
166 ; X86-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
167 ; X86-NEXT: vgatherdps %xmm0, (,%xmm2), %xmm1
168 ; X86-NEXT: vmovaps %xmm1, %xmm0
171 ; X64-LABEL: masked_gather_v2float_concat:
172 ; X64: # %bb.0: # %entry
173 ; X64-NEXT: vmovaps (%rdi), %xmm2
174 ; X64-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
175 ; X64-NEXT: vpslld $31, %xmm0, %xmm0
176 ; X64-NEXT: vgatherqps %xmm0, (,%xmm2), %xmm1
177 ; X64-NEXT: vmovaps %xmm1, %xmm0
180 ; NOGATHER-LABEL: masked_gather_v2float_concat:
181 ; NOGATHER: # %bb.0: # %entry
182 ; NOGATHER-NEXT: vmovdqa (%rdi), %xmm2
183 ; NOGATHER-NEXT: vpsllq $63, %xmm0, %xmm0
184 ; NOGATHER-NEXT: vmovmskpd %xmm0, %eax
185 ; NOGATHER-NEXT: testb $1, %al
186 ; NOGATHER-NEXT: jne .LBB3_1
187 ; NOGATHER-NEXT: # %bb.2: # %else
188 ; NOGATHER-NEXT: testb $2, %al
189 ; NOGATHER-NEXT: jne .LBB3_3
190 ; NOGATHER-NEXT: .LBB3_4: # %else2
191 ; NOGATHER-NEXT: vmovaps %xmm1, %xmm0
192 ; NOGATHER-NEXT: retq
193 ; NOGATHER-NEXT: .LBB3_1: # %cond.load
194 ; NOGATHER-NEXT: vmovq %xmm2, %rcx
195 ; NOGATHER-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
196 ; NOGATHER-NEXT: vblendps {{.*#+}} xmm1 = xmm0[0],xmm1[1,2,3]
197 ; NOGATHER-NEXT: testb $2, %al
198 ; NOGATHER-NEXT: je .LBB3_4
199 ; NOGATHER-NEXT: .LBB3_3: # %cond.load1
200 ; NOGATHER-NEXT: vpextrq $1, %xmm2, %rax
201 ; NOGATHER-NEXT: vinsertps {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[2,3]
202 ; NOGATHER-NEXT: vmovaps %xmm1, %xmm0
203 ; NOGATHER-NEXT: retq
205 %ld = load <2 x float*>, <2 x float*>* %ptr
206 %res = call <2 x float> @llvm.masked.gather.v2float(<2 x float*> %ld, i32 0, <2 x i1> %masks, <2 x float> %passthro)
207 %res2 = shufflevector <2 x float> %res, <2 x float> undef, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
208 ret <4 x float> %res2
212 declare <4 x i32> @llvm.masked.gather.v4i32(<4 x i32*> %ptrs, i32 %align, <4 x i1> %masks, <4 x i32> %passthro)
214 define <4 x i32> @masked_gather_v4i32(<4 x i32*> %ptrs, <4 x i1> %masks, <4 x i32> %passthro) {
215 ; X86-LABEL: masked_gather_v4i32:
216 ; X86: # %bb.0: # %entry
217 ; X86-NEXT: vpslld $31, %xmm1, %xmm1
218 ; X86-NEXT: vpgatherdd %xmm1, (,%xmm0), %xmm2
219 ; X86-NEXT: vmovdqa %xmm2, %xmm0
222 ; X64-LABEL: masked_gather_v4i32:
223 ; X64: # %bb.0: # %entry
224 ; X64-NEXT: vpslld $31, %xmm1, %xmm1
225 ; X64-NEXT: vpgatherqd %xmm1, (,%ymm0), %xmm2
226 ; X64-NEXT: vmovdqa %xmm2, %xmm0
227 ; X64-NEXT: vzeroupper
230 ; NOGATHER-LABEL: masked_gather_v4i32:
231 ; NOGATHER: # %bb.0: # %entry
232 ; NOGATHER-NEXT: vpslld $31, %xmm1, %xmm1
233 ; NOGATHER-NEXT: vmovmskps %xmm1, %eax
234 ; NOGATHER-NEXT: testb $1, %al
235 ; NOGATHER-NEXT: je .LBB4_2
236 ; NOGATHER-NEXT: # %bb.1: # %cond.load
237 ; NOGATHER-NEXT: vmovq %xmm0, %rcx
238 ; NOGATHER-NEXT: vpinsrd $0, (%rcx), %xmm2, %xmm2
239 ; NOGATHER-NEXT: .LBB4_2: # %else
240 ; NOGATHER-NEXT: testb $2, %al
241 ; NOGATHER-NEXT: je .LBB4_4
242 ; NOGATHER-NEXT: # %bb.3: # %cond.load1
243 ; NOGATHER-NEXT: vpextrq $1, %xmm0, %rcx
244 ; NOGATHER-NEXT: vpinsrd $1, (%rcx), %xmm2, %xmm2
245 ; NOGATHER-NEXT: .LBB4_4: # %else2
246 ; NOGATHER-NEXT: vextractf128 $1, %ymm0, %xmm0
247 ; NOGATHER-NEXT: testb $4, %al
248 ; NOGATHER-NEXT: jne .LBB4_5
249 ; NOGATHER-NEXT: # %bb.6: # %else5
250 ; NOGATHER-NEXT: testb $8, %al
251 ; NOGATHER-NEXT: jne .LBB4_7
252 ; NOGATHER-NEXT: .LBB4_8: # %else8
253 ; NOGATHER-NEXT: vmovdqa %xmm2, %xmm0
254 ; NOGATHER-NEXT: vzeroupper
255 ; NOGATHER-NEXT: retq
256 ; NOGATHER-NEXT: .LBB4_5: # %cond.load4
257 ; NOGATHER-NEXT: vmovq %xmm0, %rcx
258 ; NOGATHER-NEXT: vpinsrd $2, (%rcx), %xmm2, %xmm2
259 ; NOGATHER-NEXT: testb $8, %al
260 ; NOGATHER-NEXT: je .LBB4_8
261 ; NOGATHER-NEXT: .LBB4_7: # %cond.load7
262 ; NOGATHER-NEXT: vpextrq $1, %xmm0, %rax
263 ; NOGATHER-NEXT: vpinsrd $3, (%rax), %xmm2, %xmm2
264 ; NOGATHER-NEXT: vmovdqa %xmm2, %xmm0
265 ; NOGATHER-NEXT: vzeroupper
266 ; NOGATHER-NEXT: retq
268 %res = call <4 x i32> @llvm.masked.gather.v4i32(<4 x i32*> %ptrs, i32 0, <4 x i1> %masks, <4 x i32> %passthro)
272 declare <4 x float> @llvm.masked.gather.v4float(<4 x float*> %ptrs, i32 %align, <4 x i1> %masks, <4 x float> %passthro)
274 define <4 x float> @masked_gather_v4float(<4 x float*> %ptrs, <4 x i1> %masks, <4 x float> %passthro) {
275 ; X86-LABEL: masked_gather_v4float:
276 ; X86: # %bb.0: # %entry
277 ; X86-NEXT: vpslld $31, %xmm1, %xmm1
278 ; X86-NEXT: vgatherdps %xmm1, (,%xmm0), %xmm2
279 ; X86-NEXT: vmovaps %xmm2, %xmm0
282 ; X64-LABEL: masked_gather_v4float:
283 ; X64: # %bb.0: # %entry
284 ; X64-NEXT: vpslld $31, %xmm1, %xmm1
285 ; X64-NEXT: vgatherqps %xmm1, (,%ymm0), %xmm2
286 ; X64-NEXT: vmovaps %xmm2, %xmm0
287 ; X64-NEXT: vzeroupper
290 ; NOGATHER-LABEL: masked_gather_v4float:
291 ; NOGATHER: # %bb.0: # %entry
292 ; NOGATHER-NEXT: vpslld $31, %xmm1, %xmm1
293 ; NOGATHER-NEXT: vmovmskps %xmm1, %eax
294 ; NOGATHER-NEXT: testb $1, %al
295 ; NOGATHER-NEXT: je .LBB5_2
296 ; NOGATHER-NEXT: # %bb.1: # %cond.load
297 ; NOGATHER-NEXT: vmovq %xmm0, %rcx
298 ; NOGATHER-NEXT: vmovss {{.*#+}} xmm1 = mem[0],zero,zero,zero
299 ; NOGATHER-NEXT: vblendps {{.*#+}} xmm2 = xmm1[0],xmm2[1,2,3]
300 ; NOGATHER-NEXT: .LBB5_2: # %else
301 ; NOGATHER-NEXT: testb $2, %al
302 ; NOGATHER-NEXT: je .LBB5_4
303 ; NOGATHER-NEXT: # %bb.3: # %cond.load1
304 ; NOGATHER-NEXT: vpextrq $1, %xmm0, %rcx
305 ; NOGATHER-NEXT: vinsertps {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[2,3]
306 ; NOGATHER-NEXT: .LBB5_4: # %else2
307 ; NOGATHER-NEXT: vextractf128 $1, %ymm0, %xmm0
308 ; NOGATHER-NEXT: testb $4, %al
309 ; NOGATHER-NEXT: jne .LBB5_5
310 ; NOGATHER-NEXT: # %bb.6: # %else5
311 ; NOGATHER-NEXT: testb $8, %al
312 ; NOGATHER-NEXT: jne .LBB5_7
313 ; NOGATHER-NEXT: .LBB5_8: # %else8
314 ; NOGATHER-NEXT: vmovaps %xmm2, %xmm0
315 ; NOGATHER-NEXT: vzeroupper
316 ; NOGATHER-NEXT: retq
317 ; NOGATHER-NEXT: .LBB5_5: # %cond.load4
318 ; NOGATHER-NEXT: vmovq %xmm0, %rcx
319 ; NOGATHER-NEXT: vinsertps {{.*#+}} xmm2 = xmm2[0,1],mem[0],xmm2[3]
320 ; NOGATHER-NEXT: testb $8, %al
321 ; NOGATHER-NEXT: je .LBB5_8
322 ; NOGATHER-NEXT: .LBB5_7: # %cond.load7
323 ; NOGATHER-NEXT: vpextrq $1, %xmm0, %rax
324 ; NOGATHER-NEXT: vinsertps {{.*#+}} xmm2 = xmm2[0,1,2],mem[0]
325 ; NOGATHER-NEXT: vmovaps %xmm2, %xmm0
326 ; NOGATHER-NEXT: vzeroupper
327 ; NOGATHER-NEXT: retq
329 %res = call <4 x float> @llvm.masked.gather.v4float(<4 x float*> %ptrs, i32 0, <4 x i1> %masks, <4 x float> %passthro)
333 declare <8 x i32> @llvm.masked.gather.v8i32(<8 x i32*> %ptrs, i32 %align, <8 x i1> %masks, <8 x i32> %passthro)
335 define <8 x i32> @masked_gather_v8i32(<8 x i32*>* %ptr, <8 x i1> %masks, <8 x i32> %passthro) {
336 ; X86-LABEL: masked_gather_v8i32:
337 ; X86: # %bb.0: # %entry
338 ; X86-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
339 ; X86-NEXT: vpslld $31, %ymm0, %ymm0
340 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
341 ; X86-NEXT: vmovdqa (%eax), %ymm2
342 ; X86-NEXT: vpgatherdd %ymm0, (,%ymm2), %ymm1
343 ; X86-NEXT: vmovdqa %ymm1, %ymm0
346 ; X64-LABEL: masked_gather_v8i32:
347 ; X64: # %bb.0: # %entry
348 ; X64-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
349 ; X64-NEXT: vpslld $31, %ymm0, %ymm0
350 ; X64-NEXT: vpsrad $31, %ymm0, %ymm0
351 ; X64-NEXT: vmovdqa (%rdi), %ymm2
352 ; X64-NEXT: vmovdqa 32(%rdi), %ymm3
353 ; X64-NEXT: vextracti128 $1, %ymm1, %xmm4
354 ; X64-NEXT: vextracti128 $1, %ymm0, %xmm5
355 ; X64-NEXT: vpgatherqd %xmm5, (,%ymm3), %xmm4
356 ; X64-NEXT: vpgatherqd %xmm0, (,%ymm2), %xmm1
357 ; X64-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm0
360 ; NOGATHER-LABEL: masked_gather_v8i32:
361 ; NOGATHER: # %bb.0: # %entry
362 ; NOGATHER-NEXT: vmovdqa (%rdi), %ymm3
363 ; NOGATHER-NEXT: vmovdqa 32(%rdi), %ymm2
364 ; NOGATHER-NEXT: vpsllw $15, %xmm0, %xmm0
365 ; NOGATHER-NEXT: vpacksswb %xmm0, %xmm0, %xmm0
366 ; NOGATHER-NEXT: vpmovmskb %xmm0, %eax
367 ; NOGATHER-NEXT: testb $1, %al
368 ; NOGATHER-NEXT: je .LBB6_2
369 ; NOGATHER-NEXT: # %bb.1: # %cond.load
370 ; NOGATHER-NEXT: vmovq %xmm3, %rcx
371 ; NOGATHER-NEXT: vpinsrd $0, (%rcx), %xmm1, %xmm0
372 ; NOGATHER-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
373 ; NOGATHER-NEXT: .LBB6_2: # %else
374 ; NOGATHER-NEXT: testb $2, %al
375 ; NOGATHER-NEXT: je .LBB6_4
376 ; NOGATHER-NEXT: # %bb.3: # %cond.load1
377 ; NOGATHER-NEXT: vpextrq $1, %xmm3, %rcx
378 ; NOGATHER-NEXT: vpinsrd $1, (%rcx), %xmm1, %xmm0
379 ; NOGATHER-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
380 ; NOGATHER-NEXT: .LBB6_4: # %else2
381 ; NOGATHER-NEXT: vextractf128 $1, %ymm3, %xmm0
382 ; NOGATHER-NEXT: testb $4, %al
383 ; NOGATHER-NEXT: jne .LBB6_5
384 ; NOGATHER-NEXT: # %bb.6: # %else5
385 ; NOGATHER-NEXT: testb $8, %al
386 ; NOGATHER-NEXT: jne .LBB6_7
387 ; NOGATHER-NEXT: .LBB6_8: # %else8
388 ; NOGATHER-NEXT: testb $16, %al
389 ; NOGATHER-NEXT: jne .LBB6_9
390 ; NOGATHER-NEXT: .LBB6_10: # %else11
391 ; NOGATHER-NEXT: testb $32, %al
392 ; NOGATHER-NEXT: je .LBB6_12
393 ; NOGATHER-NEXT: .LBB6_11: # %cond.load13
394 ; NOGATHER-NEXT: vpextrq $1, %xmm2, %rcx
395 ; NOGATHER-NEXT: vextractf128 $1, %ymm1, %xmm0
396 ; NOGATHER-NEXT: vpinsrd $1, (%rcx), %xmm0, %xmm0
397 ; NOGATHER-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm1
398 ; NOGATHER-NEXT: .LBB6_12: # %else14
399 ; NOGATHER-NEXT: vextractf128 $1, %ymm2, %xmm0
400 ; NOGATHER-NEXT: testb $64, %al
401 ; NOGATHER-NEXT: jne .LBB6_13
402 ; NOGATHER-NEXT: # %bb.14: # %else17
403 ; NOGATHER-NEXT: testb $-128, %al
404 ; NOGATHER-NEXT: jne .LBB6_15
405 ; NOGATHER-NEXT: .LBB6_16: # %else20
406 ; NOGATHER-NEXT: vmovaps %ymm1, %ymm0
407 ; NOGATHER-NEXT: retq
408 ; NOGATHER-NEXT: .LBB6_5: # %cond.load4
409 ; NOGATHER-NEXT: vmovq %xmm0, %rcx
410 ; NOGATHER-NEXT: vpinsrd $2, (%rcx), %xmm1, %xmm3
411 ; NOGATHER-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1,2,3],ymm1[4,5,6,7]
412 ; NOGATHER-NEXT: testb $8, %al
413 ; NOGATHER-NEXT: je .LBB6_8
414 ; NOGATHER-NEXT: .LBB6_7: # %cond.load7
415 ; NOGATHER-NEXT: vpextrq $1, %xmm0, %rcx
416 ; NOGATHER-NEXT: vpinsrd $3, (%rcx), %xmm1, %xmm0
417 ; NOGATHER-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
418 ; NOGATHER-NEXT: testb $16, %al
419 ; NOGATHER-NEXT: je .LBB6_10
420 ; NOGATHER-NEXT: .LBB6_9: # %cond.load10
421 ; NOGATHER-NEXT: vmovq %xmm2, %rcx
422 ; NOGATHER-NEXT: vextractf128 $1, %ymm1, %xmm0
423 ; NOGATHER-NEXT: vpinsrd $0, (%rcx), %xmm0, %xmm0
424 ; NOGATHER-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm1
425 ; NOGATHER-NEXT: testb $32, %al
426 ; NOGATHER-NEXT: jne .LBB6_11
427 ; NOGATHER-NEXT: jmp .LBB6_12
428 ; NOGATHER-NEXT: .LBB6_13: # %cond.load16
429 ; NOGATHER-NEXT: vmovq %xmm0, %rcx
430 ; NOGATHER-NEXT: vextractf128 $1, %ymm1, %xmm2
431 ; NOGATHER-NEXT: vpinsrd $2, (%rcx), %xmm2, %xmm2
432 ; NOGATHER-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
433 ; NOGATHER-NEXT: testb $-128, %al
434 ; NOGATHER-NEXT: je .LBB6_16
435 ; NOGATHER-NEXT: .LBB6_15: # %cond.load19
436 ; NOGATHER-NEXT: vpextrq $1, %xmm0, %rax
437 ; NOGATHER-NEXT: vextractf128 $1, %ymm1, %xmm0
438 ; NOGATHER-NEXT: vpinsrd $3, (%rax), %xmm0, %xmm0
439 ; NOGATHER-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm1
440 ; NOGATHER-NEXT: vmovaps %ymm1, %ymm0
441 ; NOGATHER-NEXT: retq
443 %ld = load <8 x i32*>, <8 x i32*>* %ptr
444 %res = call <8 x i32> @llvm.masked.gather.v8i32(<8 x i32*> %ld, i32 0, <8 x i1> %masks, <8 x i32> %passthro)
448 declare <8 x float> @llvm.masked.gather.v8float(<8 x float*> %ptrs, i32 %align, <8 x i1> %masks, <8 x float> %passthro)
450 define <8 x float> @masked_gather_v8float(<8 x float*>* %ptr, <8 x i1> %masks, <8 x float> %passthro) {
451 ; X86-LABEL: masked_gather_v8float:
452 ; X86: # %bb.0: # %entry
453 ; X86-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
454 ; X86-NEXT: vpslld $31, %ymm0, %ymm0
455 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
456 ; X86-NEXT: vmovaps (%eax), %ymm2
457 ; X86-NEXT: vgatherdps %ymm0, (,%ymm2), %ymm1
458 ; X86-NEXT: vmovaps %ymm1, %ymm0
461 ; X64-LABEL: masked_gather_v8float:
462 ; X64: # %bb.0: # %entry
463 ; X64-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
464 ; X64-NEXT: vpslld $31, %ymm0, %ymm0
465 ; X64-NEXT: vpsrad $31, %ymm0, %ymm0
466 ; X64-NEXT: vmovaps (%rdi), %ymm2
467 ; X64-NEXT: vmovaps 32(%rdi), %ymm3
468 ; X64-NEXT: vextractf128 $1, %ymm1, %xmm4
469 ; X64-NEXT: vextracti128 $1, %ymm0, %xmm5
470 ; X64-NEXT: vgatherqps %xmm5, (,%ymm3), %xmm4
471 ; X64-NEXT: vgatherqps %xmm0, (,%ymm2), %xmm1
472 ; X64-NEXT: vinsertf128 $1, %xmm4, %ymm1, %ymm0
475 ; NOGATHER-LABEL: masked_gather_v8float:
476 ; NOGATHER: # %bb.0: # %entry
477 ; NOGATHER-NEXT: vmovdqa (%rdi), %ymm3
478 ; NOGATHER-NEXT: vmovdqa 32(%rdi), %ymm2
479 ; NOGATHER-NEXT: vpsllw $15, %xmm0, %xmm0
480 ; NOGATHER-NEXT: vpacksswb %xmm0, %xmm0, %xmm0
481 ; NOGATHER-NEXT: vpmovmskb %xmm0, %eax
482 ; NOGATHER-NEXT: testb $1, %al
483 ; NOGATHER-NEXT: je .LBB7_2
484 ; NOGATHER-NEXT: # %bb.1: # %cond.load
485 ; NOGATHER-NEXT: vmovq %xmm3, %rcx
486 ; NOGATHER-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
487 ; NOGATHER-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0],ymm1[1,2,3,4,5,6,7]
488 ; NOGATHER-NEXT: .LBB7_2: # %else
489 ; NOGATHER-NEXT: testb $2, %al
490 ; NOGATHER-NEXT: je .LBB7_4
491 ; NOGATHER-NEXT: # %bb.3: # %cond.load1
492 ; NOGATHER-NEXT: vpextrq $1, %xmm3, %rcx
493 ; NOGATHER-NEXT: vinsertps {{.*#+}} xmm0 = xmm1[0],mem[0],xmm1[2,3]
494 ; NOGATHER-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
495 ; NOGATHER-NEXT: .LBB7_4: # %else2
496 ; NOGATHER-NEXT: vextractf128 $1, %ymm3, %xmm0
497 ; NOGATHER-NEXT: testb $4, %al
498 ; NOGATHER-NEXT: jne .LBB7_5
499 ; NOGATHER-NEXT: # %bb.6: # %else5
500 ; NOGATHER-NEXT: testb $8, %al
501 ; NOGATHER-NEXT: jne .LBB7_7
502 ; NOGATHER-NEXT: .LBB7_8: # %else8
503 ; NOGATHER-NEXT: testb $16, %al
504 ; NOGATHER-NEXT: jne .LBB7_9
505 ; NOGATHER-NEXT: .LBB7_10: # %else11
506 ; NOGATHER-NEXT: testb $32, %al
507 ; NOGATHER-NEXT: je .LBB7_12
508 ; NOGATHER-NEXT: .LBB7_11: # %cond.load13
509 ; NOGATHER-NEXT: vpextrq $1, %xmm2, %rcx
510 ; NOGATHER-NEXT: vextractf128 $1, %ymm1, %xmm0
511 ; NOGATHER-NEXT: vinsertps {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[2,3]
512 ; NOGATHER-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm1
513 ; NOGATHER-NEXT: .LBB7_12: # %else14
514 ; NOGATHER-NEXT: vextractf128 $1, %ymm2, %xmm0
515 ; NOGATHER-NEXT: testb $64, %al
516 ; NOGATHER-NEXT: jne .LBB7_13
517 ; NOGATHER-NEXT: # %bb.14: # %else17
518 ; NOGATHER-NEXT: testb $-128, %al
519 ; NOGATHER-NEXT: jne .LBB7_15
520 ; NOGATHER-NEXT: .LBB7_16: # %else20
521 ; NOGATHER-NEXT: vmovaps %ymm1, %ymm0
522 ; NOGATHER-NEXT: retq
523 ; NOGATHER-NEXT: .LBB7_5: # %cond.load4
524 ; NOGATHER-NEXT: vmovq %xmm0, %rcx
525 ; NOGATHER-NEXT: vinsertps {{.*#+}} xmm3 = xmm1[0,1],mem[0],xmm1[3]
526 ; NOGATHER-NEXT: vblendps {{.*#+}} ymm1 = ymm3[0,1,2,3],ymm1[4,5,6,7]
527 ; NOGATHER-NEXT: testb $8, %al
528 ; NOGATHER-NEXT: je .LBB7_8
529 ; NOGATHER-NEXT: .LBB7_7: # %cond.load7
530 ; NOGATHER-NEXT: vpextrq $1, %xmm0, %rcx
531 ; NOGATHER-NEXT: vinsertps {{.*#+}} xmm0 = xmm1[0,1,2],mem[0]
532 ; NOGATHER-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
533 ; NOGATHER-NEXT: testb $16, %al
534 ; NOGATHER-NEXT: je .LBB7_10
535 ; NOGATHER-NEXT: .LBB7_9: # %cond.load10
536 ; NOGATHER-NEXT: vmovq %xmm2, %rcx
537 ; NOGATHER-NEXT: vextractf128 $1, %ymm1, %xmm0
538 ; NOGATHER-NEXT: vmovd {{.*#+}} xmm3 = mem[0],zero,zero,zero
539 ; NOGATHER-NEXT: vpblendw {{.*#+}} xmm0 = xmm3[0,1],xmm0[2,3,4,5,6,7]
540 ; NOGATHER-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm1
541 ; NOGATHER-NEXT: testb $32, %al
542 ; NOGATHER-NEXT: jne .LBB7_11
543 ; NOGATHER-NEXT: jmp .LBB7_12
544 ; NOGATHER-NEXT: .LBB7_13: # %cond.load16
545 ; NOGATHER-NEXT: vmovq %xmm0, %rcx
546 ; NOGATHER-NEXT: vextractf128 $1, %ymm1, %xmm2
547 ; NOGATHER-NEXT: vinsertps {{.*#+}} xmm2 = xmm2[0,1],mem[0],xmm2[3]
548 ; NOGATHER-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
549 ; NOGATHER-NEXT: testb $-128, %al
550 ; NOGATHER-NEXT: je .LBB7_16
551 ; NOGATHER-NEXT: .LBB7_15: # %cond.load19
552 ; NOGATHER-NEXT: vpextrq $1, %xmm0, %rax
553 ; NOGATHER-NEXT: vextractf128 $1, %ymm1, %xmm0
554 ; NOGATHER-NEXT: vinsertps {{.*#+}} xmm0 = xmm0[0,1,2],mem[0]
555 ; NOGATHER-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm1
556 ; NOGATHER-NEXT: vmovaps %ymm1, %ymm0
557 ; NOGATHER-NEXT: retq
559 %ld = load <8 x float*>, <8 x float*>* %ptr
560 %res = call <8 x float> @llvm.masked.gather.v8float(<8 x float*> %ld, i32 0, <8 x i1> %masks, <8 x float> %passthro)
564 declare <4 x i64> @llvm.masked.gather.v4i64(<4 x i64*> %ptrs, i32 %align, <4 x i1> %masks, <4 x i64> %passthro)
566 define <4 x i64> @masked_gather_v4i64(<4 x i64*>* %ptr, <4 x i1> %masks, <4 x i64> %passthro) {
567 ; X86-LABEL: masked_gather_v4i64:
568 ; X86: # %bb.0: # %entry
569 ; X86-NEXT: vpslld $31, %xmm0, %xmm0
570 ; X86-NEXT: vpmovsxdq %xmm0, %ymm0
571 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
572 ; X86-NEXT: vmovdqa (%eax), %xmm2
573 ; X86-NEXT: vpgatherdq %ymm0, (,%xmm2), %ymm1
574 ; X86-NEXT: vmovdqa %ymm1, %ymm0
577 ; X64-LABEL: masked_gather_v4i64:
578 ; X64: # %bb.0: # %entry
579 ; X64-NEXT: vpslld $31, %xmm0, %xmm0
580 ; X64-NEXT: vpmovsxdq %xmm0, %ymm0
581 ; X64-NEXT: vmovdqa (%rdi), %ymm2
582 ; X64-NEXT: vpgatherqq %ymm0, (,%ymm2), %ymm1
583 ; X64-NEXT: vmovdqa %ymm1, %ymm0
586 ; NOGATHER-LABEL: masked_gather_v4i64:
587 ; NOGATHER: # %bb.0: # %entry
588 ; NOGATHER-NEXT: vmovdqa (%rdi), %ymm2
589 ; NOGATHER-NEXT: vpslld $31, %xmm0, %xmm0
590 ; NOGATHER-NEXT: vmovmskps %xmm0, %eax
591 ; NOGATHER-NEXT: testb $1, %al
592 ; NOGATHER-NEXT: je .LBB8_2
593 ; NOGATHER-NEXT: # %bb.1: # %cond.load
594 ; NOGATHER-NEXT: vmovq %xmm2, %rcx
595 ; NOGATHER-NEXT: vpinsrq $0, (%rcx), %xmm1, %xmm0
596 ; NOGATHER-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
597 ; NOGATHER-NEXT: .LBB8_2: # %else
598 ; NOGATHER-NEXT: testb $2, %al
599 ; NOGATHER-NEXT: je .LBB8_4
600 ; NOGATHER-NEXT: # %bb.3: # %cond.load1
601 ; NOGATHER-NEXT: vpextrq $1, %xmm2, %rcx
602 ; NOGATHER-NEXT: vpinsrq $1, (%rcx), %xmm1, %xmm0
603 ; NOGATHER-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
604 ; NOGATHER-NEXT: .LBB8_4: # %else2
605 ; NOGATHER-NEXT: vextractf128 $1, %ymm2, %xmm0
606 ; NOGATHER-NEXT: testb $4, %al
607 ; NOGATHER-NEXT: jne .LBB8_5
608 ; NOGATHER-NEXT: # %bb.6: # %else5
609 ; NOGATHER-NEXT: testb $8, %al
610 ; NOGATHER-NEXT: jne .LBB8_7
611 ; NOGATHER-NEXT: .LBB8_8: # %else8
612 ; NOGATHER-NEXT: vmovaps %ymm1, %ymm0
613 ; NOGATHER-NEXT: retq
614 ; NOGATHER-NEXT: .LBB8_5: # %cond.load4
615 ; NOGATHER-NEXT: vmovq %xmm0, %rcx
616 ; NOGATHER-NEXT: vextractf128 $1, %ymm1, %xmm2
617 ; NOGATHER-NEXT: vpinsrq $0, (%rcx), %xmm2, %xmm2
618 ; NOGATHER-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
619 ; NOGATHER-NEXT: testb $8, %al
620 ; NOGATHER-NEXT: je .LBB8_8
621 ; NOGATHER-NEXT: .LBB8_7: # %cond.load7
622 ; NOGATHER-NEXT: vpextrq $1, %xmm0, %rax
623 ; NOGATHER-NEXT: vextractf128 $1, %ymm1, %xmm0
624 ; NOGATHER-NEXT: vpinsrq $1, (%rax), %xmm0, %xmm0
625 ; NOGATHER-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm1
626 ; NOGATHER-NEXT: vmovaps %ymm1, %ymm0
627 ; NOGATHER-NEXT: retq
629 %ld = load <4 x i64*>, <4 x i64*>* %ptr
630 %res = call <4 x i64> @llvm.masked.gather.v4i64(<4 x i64*> %ld, i32 0, <4 x i1> %masks, <4 x i64> %passthro)
634 declare <4 x double> @llvm.masked.gather.v4double(<4 x double*> %ptrs, i32 %align, <4 x i1> %masks, <4 x double> %passthro)
636 define <4 x double> @masked_gather_v4double(<4 x double*>* %ptr, <4 x i1> %masks, <4 x double> %passthro) {
637 ; X86-LABEL: masked_gather_v4double:
638 ; X86: # %bb.0: # %entry
639 ; X86-NEXT: vpslld $31, %xmm0, %xmm0
640 ; X86-NEXT: vpmovsxdq %xmm0, %ymm0
641 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
642 ; X86-NEXT: vmovapd (%eax), %xmm2
643 ; X86-NEXT: vgatherdpd %ymm0, (,%xmm2), %ymm1
644 ; X86-NEXT: vmovapd %ymm1, %ymm0
647 ; X64-LABEL: masked_gather_v4double:
648 ; X64: # %bb.0: # %entry
649 ; X64-NEXT: vpslld $31, %xmm0, %xmm0
650 ; X64-NEXT: vpmovsxdq %xmm0, %ymm0
651 ; X64-NEXT: vmovapd (%rdi), %ymm2
652 ; X64-NEXT: vgatherqpd %ymm0, (,%ymm2), %ymm1
653 ; X64-NEXT: vmovapd %ymm1, %ymm0
656 ; NOGATHER-LABEL: masked_gather_v4double:
657 ; NOGATHER: # %bb.0: # %entry
658 ; NOGATHER-NEXT: vmovdqa (%rdi), %ymm2
659 ; NOGATHER-NEXT: vpslld $31, %xmm0, %xmm0
660 ; NOGATHER-NEXT: vmovmskps %xmm0, %eax
661 ; NOGATHER-NEXT: testb $1, %al
662 ; NOGATHER-NEXT: je .LBB9_2
663 ; NOGATHER-NEXT: # %bb.1: # %cond.load
664 ; NOGATHER-NEXT: vmovq %xmm2, %rcx
665 ; NOGATHER-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
666 ; NOGATHER-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1],ymm1[2,3,4,5,6,7]
667 ; NOGATHER-NEXT: .LBB9_2: # %else
668 ; NOGATHER-NEXT: testb $2, %al
669 ; NOGATHER-NEXT: je .LBB9_4
670 ; NOGATHER-NEXT: # %bb.3: # %cond.load1
671 ; NOGATHER-NEXT: vpextrq $1, %xmm2, %rcx
672 ; NOGATHER-NEXT: vmovhps {{.*#+}} xmm0 = xmm1[0,1],mem[0,1]
673 ; NOGATHER-NEXT: vblendps {{.*#+}} ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
674 ; NOGATHER-NEXT: .LBB9_4: # %else2
675 ; NOGATHER-NEXT: vextractf128 $1, %ymm2, %xmm0
676 ; NOGATHER-NEXT: testb $4, %al
677 ; NOGATHER-NEXT: jne .LBB9_5
678 ; NOGATHER-NEXT: # %bb.6: # %else5
679 ; NOGATHER-NEXT: testb $8, %al
680 ; NOGATHER-NEXT: jne .LBB9_7
681 ; NOGATHER-NEXT: .LBB9_8: # %else8
682 ; NOGATHER-NEXT: vmovaps %ymm1, %ymm0
683 ; NOGATHER-NEXT: retq
684 ; NOGATHER-NEXT: .LBB9_5: # %cond.load4
685 ; NOGATHER-NEXT: vmovq %xmm0, %rcx
686 ; NOGATHER-NEXT: vextractf128 $1, %ymm1, %xmm2
687 ; NOGATHER-NEXT: vmovlps {{.*#+}} xmm2 = mem[0,1],xmm2[2,3]
688 ; NOGATHER-NEXT: vinsertf128 $1, %xmm2, %ymm1, %ymm1
689 ; NOGATHER-NEXT: testb $8, %al
690 ; NOGATHER-NEXT: je .LBB9_8
691 ; NOGATHER-NEXT: .LBB9_7: # %cond.load7
692 ; NOGATHER-NEXT: vpextrq $1, %xmm0, %rax
693 ; NOGATHER-NEXT: vextractf128 $1, %ymm1, %xmm0
694 ; NOGATHER-NEXT: vmovhps {{.*#+}} xmm0 = xmm0[0,1],mem[0,1]
695 ; NOGATHER-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm1
696 ; NOGATHER-NEXT: vmovaps %ymm1, %ymm0
697 ; NOGATHER-NEXT: retq
699 %ld = load <4 x double*>, <4 x double*>* %ptr
700 %res = call <4 x double> @llvm.masked.gather.v4double(<4 x double*> %ld, i32 0, <4 x i1> %masks, <4 x double> %passthro)
701 ret <4 x double> %res
704 declare <2 x i64> @llvm.masked.gather.v2i64(<2 x i64*> %ptrs, i32 %align, <2 x i1> %masks, <2 x i64> %passthro)
706 define <2 x i64> @masked_gather_v2i64(<2 x i64*>* %ptr, <2 x i1> %masks, <2 x i64> %passthro) {
707 ; X86-LABEL: masked_gather_v2i64:
708 ; X86: # %bb.0: # %entry
709 ; X86-NEXT: vpsllq $63, %xmm0, %xmm0
710 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
711 ; X86-NEXT: vmovq {{.*#+}} xmm2 = mem[0],zero
712 ; X86-NEXT: vpgatherdq %xmm0, (,%xmm2), %xmm1
713 ; X86-NEXT: vmovdqa %xmm1, %xmm0
716 ; X64-LABEL: masked_gather_v2i64:
717 ; X64: # %bb.0: # %entry
718 ; X64-NEXT: vpsllq $63, %xmm0, %xmm0
719 ; X64-NEXT: vmovdqa (%rdi), %xmm2
720 ; X64-NEXT: vpgatherqq %xmm0, (,%xmm2), %xmm1
721 ; X64-NEXT: vmovdqa %xmm1, %xmm0
724 ; NOGATHER-LABEL: masked_gather_v2i64:
725 ; NOGATHER: # %bb.0: # %entry
726 ; NOGATHER-NEXT: vmovdqa (%rdi), %xmm2
727 ; NOGATHER-NEXT: vpsllq $63, %xmm0, %xmm0
728 ; NOGATHER-NEXT: vmovmskpd %xmm0, %eax
729 ; NOGATHER-NEXT: testb $1, %al
730 ; NOGATHER-NEXT: jne .LBB10_1
731 ; NOGATHER-NEXT: # %bb.2: # %else
732 ; NOGATHER-NEXT: testb $2, %al
733 ; NOGATHER-NEXT: jne .LBB10_3
734 ; NOGATHER-NEXT: .LBB10_4: # %else2
735 ; NOGATHER-NEXT: vmovdqa %xmm1, %xmm0
736 ; NOGATHER-NEXT: retq
737 ; NOGATHER-NEXT: .LBB10_1: # %cond.load
738 ; NOGATHER-NEXT: vmovq %xmm2, %rcx
739 ; NOGATHER-NEXT: vpinsrq $0, (%rcx), %xmm1, %xmm1
740 ; NOGATHER-NEXT: testb $2, %al
741 ; NOGATHER-NEXT: je .LBB10_4
742 ; NOGATHER-NEXT: .LBB10_3: # %cond.load1
743 ; NOGATHER-NEXT: vpextrq $1, %xmm2, %rax
744 ; NOGATHER-NEXT: vpinsrq $1, (%rax), %xmm1, %xmm1
745 ; NOGATHER-NEXT: vmovdqa %xmm1, %xmm0
746 ; NOGATHER-NEXT: retq
748 %ld = load <2 x i64*>, <2 x i64*>* %ptr
749 %res = call <2 x i64> @llvm.masked.gather.v2i64(<2 x i64*> %ld, i32 0, <2 x i1> %masks, <2 x i64> %passthro)
753 declare <2 x double> @llvm.masked.gather.v2double(<2 x double*> %ptrs, i32 %align, <2 x i1> %masks, <2 x double> %passthro)
755 define <2 x double> @masked_gather_v2double(<2 x double*>* %ptr, <2 x i1> %masks, <2 x double> %passthro) {
756 ; X86-LABEL: masked_gather_v2double:
757 ; X86: # %bb.0: # %entry
758 ; X86-NEXT: vpsllq $63, %xmm0, %xmm0
759 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
760 ; X86-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
761 ; X86-NEXT: vgatherdpd %xmm0, (,%xmm2), %xmm1
762 ; X86-NEXT: vmovapd %xmm1, %xmm0
765 ; X64-LABEL: masked_gather_v2double:
766 ; X64: # %bb.0: # %entry
767 ; X64-NEXT: vpsllq $63, %xmm0, %xmm0
768 ; X64-NEXT: vmovapd (%rdi), %xmm2
769 ; X64-NEXT: vgatherqpd %xmm0, (,%xmm2), %xmm1
770 ; X64-NEXT: vmovapd %xmm1, %xmm0
773 ; NOGATHER-LABEL: masked_gather_v2double:
774 ; NOGATHER: # %bb.0: # %entry
775 ; NOGATHER-NEXT: vmovdqa (%rdi), %xmm2
776 ; NOGATHER-NEXT: vpsllq $63, %xmm0, %xmm0
777 ; NOGATHER-NEXT: vmovmskpd %xmm0, %eax
778 ; NOGATHER-NEXT: testb $1, %al
779 ; NOGATHER-NEXT: jne .LBB11_1
780 ; NOGATHER-NEXT: # %bb.2: # %else
781 ; NOGATHER-NEXT: testb $2, %al
782 ; NOGATHER-NEXT: jne .LBB11_3
783 ; NOGATHER-NEXT: .LBB11_4: # %else2
784 ; NOGATHER-NEXT: vmovaps %xmm1, %xmm0
785 ; NOGATHER-NEXT: retq
786 ; NOGATHER-NEXT: .LBB11_1: # %cond.load
787 ; NOGATHER-NEXT: vmovq %xmm2, %rcx
788 ; NOGATHER-NEXT: vmovlps {{.*#+}} xmm1 = mem[0,1],xmm1[2,3]
789 ; NOGATHER-NEXT: testb $2, %al
790 ; NOGATHER-NEXT: je .LBB11_4
791 ; NOGATHER-NEXT: .LBB11_3: # %cond.load1
792 ; NOGATHER-NEXT: vpextrq $1, %xmm2, %rax
793 ; NOGATHER-NEXT: vmovhps {{.*#+}} xmm1 = xmm1[0,1],mem[0,1]
794 ; NOGATHER-NEXT: vmovaps %xmm1, %xmm0
795 ; NOGATHER-NEXT: retq
797 %ld = load <2 x double*>, <2 x double*>* %ptr
798 %res = call <2 x double> @llvm.masked.gather.v2double(<2 x double*> %ld, i32 0, <2 x i1> %masks, <2 x double> %passthro)
799 ret <2 x double> %res
803 define <2 x double> @masked_gather_zeromask(<2 x double*>* %ptr, <2 x double> %dummy, <2 x double> %passthru) {
804 ; X86-LABEL: masked_gather_zeromask:
805 ; X86: # %bb.0: # %entry
806 ; X86-NEXT: vmovaps %xmm1, %xmm0
809 ; X64-LABEL: masked_gather_zeromask:
810 ; X64: # %bb.0: # %entry
811 ; X64-NEXT: vmovaps %xmm1, %xmm0
814 ; NOGATHER-LABEL: masked_gather_zeromask:
815 ; NOGATHER: # %bb.0: # %entry
816 ; NOGATHER-NEXT: vmovaps %xmm1, %xmm0
817 ; NOGATHER-NEXT: retq
819 %ld = load <2 x double*>, <2 x double*>* %ptr
820 %res = call <2 x double> @llvm.masked.gather.v2double(<2 x double*> %ld, i32 0, <2 x i1> zeroinitializer, <2 x double> %passthru)
821 ret <2 x double> %res