[Flang] remove whole-archive option for AIX linker (#76039)
[llvm-project.git] / clang / lib / Headers / avx512vlvbmi2intrin.h
blob77af2d5cbd2a0e16dfdbeaeb1108f9ebe0db2387
1 /*===------------- avx512vlvbmi2intrin.h - VBMI2 intrinsics -----------------===
4 * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
5 * See https://llvm.org/LICENSE.txt for license information.
6 * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
8 *===-----------------------------------------------------------------------===
9 */
10 #ifndef __IMMINTRIN_H
11 #error "Never use <avx512vlvbmi2intrin.h> directly; include <immintrin.h> instead."
12 #endif
14 #ifndef __AVX512VLVBMI2INTRIN_H
15 #define __AVX512VLVBMI2INTRIN_H
17 /* Define the default attributes for the functions in this file. */
18 #define __DEFAULT_FN_ATTRS128 \
19 __attribute__((__always_inline__, __nodebug__, \
20 __target__("avx512vl,avx512vbmi2,no-evex512"), \
21 __min_vector_width__(128)))
22 #define __DEFAULT_FN_ATTRS256 \
23 __attribute__((__always_inline__, __nodebug__, \
24 __target__("avx512vl,avx512vbmi2,no-evex512"), \
25 __min_vector_width__(256)))
27 static __inline__ __m128i __DEFAULT_FN_ATTRS128
28 _mm_mask_compress_epi16(__m128i __S, __mmask8 __U, __m128i __D)
30 return (__m128i) __builtin_ia32_compresshi128_mask ((__v8hi) __D,
31 (__v8hi) __S,
32 __U);
35 static __inline__ __m128i __DEFAULT_FN_ATTRS128
36 _mm_maskz_compress_epi16(__mmask8 __U, __m128i __D)
38 return (__m128i) __builtin_ia32_compresshi128_mask ((__v8hi) __D,
39 (__v8hi) _mm_setzero_si128(),
40 __U);
43 static __inline__ __m128i __DEFAULT_FN_ATTRS128
44 _mm_mask_compress_epi8(__m128i __S, __mmask16 __U, __m128i __D)
46 return (__m128i) __builtin_ia32_compressqi128_mask ((__v16qi) __D,
47 (__v16qi) __S,
48 __U);
51 static __inline__ __m128i __DEFAULT_FN_ATTRS128
52 _mm_maskz_compress_epi8(__mmask16 __U, __m128i __D)
54 return (__m128i) __builtin_ia32_compressqi128_mask ((__v16qi) __D,
55 (__v16qi) _mm_setzero_si128(),
56 __U);
59 static __inline__ void __DEFAULT_FN_ATTRS128
60 _mm_mask_compressstoreu_epi16(void *__P, __mmask8 __U, __m128i __D)
62 __builtin_ia32_compressstorehi128_mask ((__v8hi *) __P, (__v8hi) __D,
63 __U);
66 static __inline__ void __DEFAULT_FN_ATTRS128
67 _mm_mask_compressstoreu_epi8(void *__P, __mmask16 __U, __m128i __D)
69 __builtin_ia32_compressstoreqi128_mask ((__v16qi *) __P, (__v16qi) __D,
70 __U);
73 static __inline__ __m128i __DEFAULT_FN_ATTRS128
74 _mm_mask_expand_epi16(__m128i __S, __mmask8 __U, __m128i __D)
76 return (__m128i) __builtin_ia32_expandhi128_mask ((__v8hi) __D,
77 (__v8hi) __S,
78 __U);
81 static __inline__ __m128i __DEFAULT_FN_ATTRS128
82 _mm_maskz_expand_epi16(__mmask8 __U, __m128i __D)
84 return (__m128i) __builtin_ia32_expandhi128_mask ((__v8hi) __D,
85 (__v8hi) _mm_setzero_si128(),
86 __U);
89 static __inline__ __m128i __DEFAULT_FN_ATTRS128
90 _mm_mask_expand_epi8(__m128i __S, __mmask16 __U, __m128i __D)
92 return (__m128i) __builtin_ia32_expandqi128_mask ((__v16qi) __D,
93 (__v16qi) __S,
94 __U);
97 static __inline__ __m128i __DEFAULT_FN_ATTRS128
98 _mm_maskz_expand_epi8(__mmask16 __U, __m128i __D)
100 return (__m128i) __builtin_ia32_expandqi128_mask ((__v16qi) __D,
101 (__v16qi) _mm_setzero_si128(),
102 __U);
105 static __inline__ __m128i __DEFAULT_FN_ATTRS128
106 _mm_mask_expandloadu_epi16(__m128i __S, __mmask8 __U, void const *__P)
108 return (__m128i) __builtin_ia32_expandloadhi128_mask ((const __v8hi *)__P,
109 (__v8hi) __S,
110 __U);
113 static __inline__ __m128i __DEFAULT_FN_ATTRS128
114 _mm_maskz_expandloadu_epi16(__mmask8 __U, void const *__P)
116 return (__m128i) __builtin_ia32_expandloadhi128_mask ((const __v8hi *)__P,
117 (__v8hi) _mm_setzero_si128(),
118 __U);
121 static __inline__ __m128i __DEFAULT_FN_ATTRS128
122 _mm_mask_expandloadu_epi8(__m128i __S, __mmask16 __U, void const *__P)
124 return (__m128i) __builtin_ia32_expandloadqi128_mask ((const __v16qi *)__P,
125 (__v16qi) __S,
126 __U);
129 static __inline__ __m128i __DEFAULT_FN_ATTRS128
130 _mm_maskz_expandloadu_epi8(__mmask16 __U, void const *__P)
132 return (__m128i) __builtin_ia32_expandloadqi128_mask ((const __v16qi *)__P,
133 (__v16qi) _mm_setzero_si128(),
134 __U);
137 static __inline__ __m256i __DEFAULT_FN_ATTRS256
138 _mm256_mask_compress_epi16(__m256i __S, __mmask16 __U, __m256i __D)
140 return (__m256i) __builtin_ia32_compresshi256_mask ((__v16hi) __D,
141 (__v16hi) __S,
142 __U);
145 static __inline__ __m256i __DEFAULT_FN_ATTRS256
146 _mm256_maskz_compress_epi16(__mmask16 __U, __m256i __D)
148 return (__m256i) __builtin_ia32_compresshi256_mask ((__v16hi) __D,
149 (__v16hi) _mm256_setzero_si256(),
150 __U);
153 static __inline__ __m256i __DEFAULT_FN_ATTRS256
154 _mm256_mask_compress_epi8(__m256i __S, __mmask32 __U, __m256i __D)
156 return (__m256i) __builtin_ia32_compressqi256_mask ((__v32qi) __D,
157 (__v32qi) __S,
158 __U);
161 static __inline__ __m256i __DEFAULT_FN_ATTRS256
162 _mm256_maskz_compress_epi8(__mmask32 __U, __m256i __D)
164 return (__m256i) __builtin_ia32_compressqi256_mask ((__v32qi) __D,
165 (__v32qi) _mm256_setzero_si256(),
166 __U);
169 static __inline__ void __DEFAULT_FN_ATTRS256
170 _mm256_mask_compressstoreu_epi16(void *__P, __mmask16 __U, __m256i __D)
172 __builtin_ia32_compressstorehi256_mask ((__v16hi *) __P, (__v16hi) __D,
173 __U);
176 static __inline__ void __DEFAULT_FN_ATTRS256
177 _mm256_mask_compressstoreu_epi8(void *__P, __mmask32 __U, __m256i __D)
179 __builtin_ia32_compressstoreqi256_mask ((__v32qi *) __P, (__v32qi) __D,
180 __U);
183 static __inline__ __m256i __DEFAULT_FN_ATTRS256
184 _mm256_mask_expand_epi16(__m256i __S, __mmask16 __U, __m256i __D)
186 return (__m256i) __builtin_ia32_expandhi256_mask ((__v16hi) __D,
187 (__v16hi) __S,
188 __U);
191 static __inline__ __m256i __DEFAULT_FN_ATTRS256
192 _mm256_maskz_expand_epi16(__mmask16 __U, __m256i __D)
194 return (__m256i) __builtin_ia32_expandhi256_mask ((__v16hi) __D,
195 (__v16hi) _mm256_setzero_si256(),
196 __U);
199 static __inline__ __m256i __DEFAULT_FN_ATTRS256
200 _mm256_mask_expand_epi8(__m256i __S, __mmask32 __U, __m256i __D)
202 return (__m256i) __builtin_ia32_expandqi256_mask ((__v32qi) __D,
203 (__v32qi) __S,
204 __U);
207 static __inline__ __m256i __DEFAULT_FN_ATTRS256
208 _mm256_maskz_expand_epi8(__mmask32 __U, __m256i __D)
210 return (__m256i) __builtin_ia32_expandqi256_mask ((__v32qi) __D,
211 (__v32qi) _mm256_setzero_si256(),
212 __U);
215 static __inline__ __m256i __DEFAULT_FN_ATTRS256
216 _mm256_mask_expandloadu_epi16(__m256i __S, __mmask16 __U, void const *__P)
218 return (__m256i) __builtin_ia32_expandloadhi256_mask ((const __v16hi *)__P,
219 (__v16hi) __S,
220 __U);
223 static __inline__ __m256i __DEFAULT_FN_ATTRS256
224 _mm256_maskz_expandloadu_epi16(__mmask16 __U, void const *__P)
226 return (__m256i) __builtin_ia32_expandloadhi256_mask ((const __v16hi *)__P,
227 (__v16hi) _mm256_setzero_si256(),
228 __U);
231 static __inline__ __m256i __DEFAULT_FN_ATTRS256
232 _mm256_mask_expandloadu_epi8(__m256i __S, __mmask32 __U, void const *__P)
234 return (__m256i) __builtin_ia32_expandloadqi256_mask ((const __v32qi *)__P,
235 (__v32qi) __S,
236 __U);
239 static __inline__ __m256i __DEFAULT_FN_ATTRS256
240 _mm256_maskz_expandloadu_epi8(__mmask32 __U, void const *__P)
242 return (__m256i) __builtin_ia32_expandloadqi256_mask ((const __v32qi *)__P,
243 (__v32qi) _mm256_setzero_si256(),
244 __U);
247 #define _mm256_shldi_epi64(A, B, I) \
248 ((__m256i)__builtin_ia32_vpshldq256((__v4di)(__m256i)(A), \
249 (__v4di)(__m256i)(B), (int)(I)))
251 #define _mm256_mask_shldi_epi64(S, U, A, B, I) \
252 ((__m256i)__builtin_ia32_selectq_256((__mmask8)(U), \
253 (__v4di)_mm256_shldi_epi64((A), (B), (I)), \
254 (__v4di)(__m256i)(S)))
256 #define _mm256_maskz_shldi_epi64(U, A, B, I) \
257 ((__m256i)__builtin_ia32_selectq_256((__mmask8)(U), \
258 (__v4di)_mm256_shldi_epi64((A), (B), (I)), \
259 (__v4di)_mm256_setzero_si256()))
261 #define _mm_shldi_epi64(A, B, I) \
262 ((__m128i)__builtin_ia32_vpshldq128((__v2di)(__m128i)(A), \
263 (__v2di)(__m128i)(B), (int)(I)))
265 #define _mm_mask_shldi_epi64(S, U, A, B, I) \
266 ((__m128i)__builtin_ia32_selectq_128((__mmask8)(U), \
267 (__v2di)_mm_shldi_epi64((A), (B), (I)), \
268 (__v2di)(__m128i)(S)))
270 #define _mm_maskz_shldi_epi64(U, A, B, I) \
271 ((__m128i)__builtin_ia32_selectq_128((__mmask8)(U), \
272 (__v2di)_mm_shldi_epi64((A), (B), (I)), \
273 (__v2di)_mm_setzero_si128()))
275 #define _mm256_shldi_epi32(A, B, I) \
276 ((__m256i)__builtin_ia32_vpshldd256((__v8si)(__m256i)(A), \
277 (__v8si)(__m256i)(B), (int)(I)))
279 #define _mm256_mask_shldi_epi32(S, U, A, B, I) \
280 ((__m256i)__builtin_ia32_selectd_256((__mmask8)(U), \
281 (__v8si)_mm256_shldi_epi32((A), (B), (I)), \
282 (__v8si)(__m256i)(S)))
284 #define _mm256_maskz_shldi_epi32(U, A, B, I) \
285 ((__m256i)__builtin_ia32_selectd_256((__mmask8)(U), \
286 (__v8si)_mm256_shldi_epi32((A), (B), (I)), \
287 (__v8si)_mm256_setzero_si256()))
289 #define _mm_shldi_epi32(A, B, I) \
290 ((__m128i)__builtin_ia32_vpshldd128((__v4si)(__m128i)(A), \
291 (__v4si)(__m128i)(B), (int)(I)))
293 #define _mm_mask_shldi_epi32(S, U, A, B, I) \
294 ((__m128i)__builtin_ia32_selectd_128((__mmask8)(U), \
295 (__v4si)_mm_shldi_epi32((A), (B), (I)), \
296 (__v4si)(__m128i)(S)))
298 #define _mm_maskz_shldi_epi32(U, A, B, I) \
299 ((__m128i)__builtin_ia32_selectd_128((__mmask8)(U), \
300 (__v4si)_mm_shldi_epi32((A), (B), (I)), \
301 (__v4si)_mm_setzero_si128()))
303 #define _mm256_shldi_epi16(A, B, I) \
304 ((__m256i)__builtin_ia32_vpshldw256((__v16hi)(__m256i)(A), \
305 (__v16hi)(__m256i)(B), (int)(I)))
307 #define _mm256_mask_shldi_epi16(S, U, A, B, I) \
308 ((__m256i)__builtin_ia32_selectw_256((__mmask16)(U), \
309 (__v16hi)_mm256_shldi_epi16((A), (B), (I)), \
310 (__v16hi)(__m256i)(S)))
312 #define _mm256_maskz_shldi_epi16(U, A, B, I) \
313 ((__m256i)__builtin_ia32_selectw_256((__mmask16)(U), \
314 (__v16hi)_mm256_shldi_epi16((A), (B), (I)), \
315 (__v16hi)_mm256_setzero_si256()))
317 #define _mm_shldi_epi16(A, B, I) \
318 ((__m128i)__builtin_ia32_vpshldw128((__v8hi)(__m128i)(A), \
319 (__v8hi)(__m128i)(B), (int)(I)))
321 #define _mm_mask_shldi_epi16(S, U, A, B, I) \
322 ((__m128i)__builtin_ia32_selectw_128((__mmask8)(U), \
323 (__v8hi)_mm_shldi_epi16((A), (B), (I)), \
324 (__v8hi)(__m128i)(S)))
326 #define _mm_maskz_shldi_epi16(U, A, B, I) \
327 ((__m128i)__builtin_ia32_selectw_128((__mmask8)(U), \
328 (__v8hi)_mm_shldi_epi16((A), (B), (I)), \
329 (__v8hi)_mm_setzero_si128()))
331 #define _mm256_shrdi_epi64(A, B, I) \
332 ((__m256i)__builtin_ia32_vpshrdq256((__v4di)(__m256i)(A), \
333 (__v4di)(__m256i)(B), (int)(I)))
335 #define _mm256_mask_shrdi_epi64(S, U, A, B, I) \
336 ((__m256i)__builtin_ia32_selectq_256((__mmask8)(U), \
337 (__v4di)_mm256_shrdi_epi64((A), (B), (I)), \
338 (__v4di)(__m256i)(S)))
340 #define _mm256_maskz_shrdi_epi64(U, A, B, I) \
341 ((__m256i)__builtin_ia32_selectq_256((__mmask8)(U), \
342 (__v4di)_mm256_shrdi_epi64((A), (B), (I)), \
343 (__v4di)_mm256_setzero_si256()))
345 #define _mm_shrdi_epi64(A, B, I) \
346 ((__m128i)__builtin_ia32_vpshrdq128((__v2di)(__m128i)(A), \
347 (__v2di)(__m128i)(B), (int)(I)))
349 #define _mm_mask_shrdi_epi64(S, U, A, B, I) \
350 ((__m128i)__builtin_ia32_selectq_128((__mmask8)(U), \
351 (__v2di)_mm_shrdi_epi64((A), (B), (I)), \
352 (__v2di)(__m128i)(S)))
354 #define _mm_maskz_shrdi_epi64(U, A, B, I) \
355 ((__m128i)__builtin_ia32_selectq_128((__mmask8)(U), \
356 (__v2di)_mm_shrdi_epi64((A), (B), (I)), \
357 (__v2di)_mm_setzero_si128()))
359 #define _mm256_shrdi_epi32(A, B, I) \
360 ((__m256i)__builtin_ia32_vpshrdd256((__v8si)(__m256i)(A), \
361 (__v8si)(__m256i)(B), (int)(I)))
363 #define _mm256_mask_shrdi_epi32(S, U, A, B, I) \
364 ((__m256i)__builtin_ia32_selectd_256((__mmask8)(U), \
365 (__v8si)_mm256_shrdi_epi32((A), (B), (I)), \
366 (__v8si)(__m256i)(S)))
368 #define _mm256_maskz_shrdi_epi32(U, A, B, I) \
369 ((__m256i)__builtin_ia32_selectd_256((__mmask8)(U), \
370 (__v8si)_mm256_shrdi_epi32((A), (B), (I)), \
371 (__v8si)_mm256_setzero_si256()))
373 #define _mm_shrdi_epi32(A, B, I) \
374 ((__m128i)__builtin_ia32_vpshrdd128((__v4si)(__m128i)(A), \
375 (__v4si)(__m128i)(B), (int)(I)))
377 #define _mm_mask_shrdi_epi32(S, U, A, B, I) \
378 ((__m128i)__builtin_ia32_selectd_128((__mmask8)(U), \
379 (__v4si)_mm_shrdi_epi32((A), (B), (I)), \
380 (__v4si)(__m128i)(S)))
382 #define _mm_maskz_shrdi_epi32(U, A, B, I) \
383 ((__m128i)__builtin_ia32_selectd_128((__mmask8)(U), \
384 (__v4si)_mm_shrdi_epi32((A), (B), (I)), \
385 (__v4si)_mm_setzero_si128()))
387 #define _mm256_shrdi_epi16(A, B, I) \
388 ((__m256i)__builtin_ia32_vpshrdw256((__v16hi)(__m256i)(A), \
389 (__v16hi)(__m256i)(B), (int)(I)))
391 #define _mm256_mask_shrdi_epi16(S, U, A, B, I) \
392 ((__m256i)__builtin_ia32_selectw_256((__mmask16)(U), \
393 (__v16hi)_mm256_shrdi_epi16((A), (B), (I)), \
394 (__v16hi)(__m256i)(S)))
396 #define _mm256_maskz_shrdi_epi16(U, A, B, I) \
397 ((__m256i)__builtin_ia32_selectw_256((__mmask16)(U), \
398 (__v16hi)_mm256_shrdi_epi16((A), (B), (I)), \
399 (__v16hi)_mm256_setzero_si256()))
401 #define _mm_shrdi_epi16(A, B, I) \
402 ((__m128i)__builtin_ia32_vpshrdw128((__v8hi)(__m128i)(A), \
403 (__v8hi)(__m128i)(B), (int)(I)))
405 #define _mm_mask_shrdi_epi16(S, U, A, B, I) \
406 ((__m128i)__builtin_ia32_selectw_128((__mmask8)(U), \
407 (__v8hi)_mm_shrdi_epi16((A), (B), (I)), \
408 (__v8hi)(__m128i)(S)))
410 #define _mm_maskz_shrdi_epi16(U, A, B, I) \
411 ((__m128i)__builtin_ia32_selectw_128((__mmask8)(U), \
412 (__v8hi)_mm_shrdi_epi16((A), (B), (I)), \
413 (__v8hi)_mm_setzero_si128()))
415 static __inline__ __m256i __DEFAULT_FN_ATTRS256
416 _mm256_shldv_epi64(__m256i __A, __m256i __B, __m256i __C)
418 return (__m256i)__builtin_ia32_vpshldvq256((__v4di)__A, (__v4di)__B,
419 (__v4di)__C);
422 static __inline__ __m256i __DEFAULT_FN_ATTRS256
423 _mm256_mask_shldv_epi64(__m256i __A, __mmask8 __U, __m256i __B, __m256i __C)
425 return (__m256i)__builtin_ia32_selectq_256(__U,
426 (__v4di)_mm256_shldv_epi64(__A, __B, __C),
427 (__v4di)__A);
430 static __inline__ __m256i __DEFAULT_FN_ATTRS256
431 _mm256_maskz_shldv_epi64(__mmask8 __U, __m256i __A, __m256i __B, __m256i __C)
433 return (__m256i)__builtin_ia32_selectq_256(__U,
434 (__v4di)_mm256_shldv_epi64(__A, __B, __C),
435 (__v4di)_mm256_setzero_si256());
438 static __inline__ __m128i __DEFAULT_FN_ATTRS128
439 _mm_shldv_epi64(__m128i __A, __m128i __B, __m128i __C)
441 return (__m128i)__builtin_ia32_vpshldvq128((__v2di)__A, (__v2di)__B,
442 (__v2di)__C);
445 static __inline__ __m128i __DEFAULT_FN_ATTRS128
446 _mm_mask_shldv_epi64(__m128i __A, __mmask8 __U, __m128i __B, __m128i __C)
448 return (__m128i)__builtin_ia32_selectq_128(__U,
449 (__v2di)_mm_shldv_epi64(__A, __B, __C),
450 (__v2di)__A);
453 static __inline__ __m128i __DEFAULT_FN_ATTRS128
454 _mm_maskz_shldv_epi64(__mmask8 __U, __m128i __A, __m128i __B, __m128i __C)
456 return (__m128i)__builtin_ia32_selectq_128(__U,
457 (__v2di)_mm_shldv_epi64(__A, __B, __C),
458 (__v2di)_mm_setzero_si128());
461 static __inline__ __m256i __DEFAULT_FN_ATTRS256
462 _mm256_shldv_epi32(__m256i __A, __m256i __B, __m256i __C)
464 return (__m256i)__builtin_ia32_vpshldvd256((__v8si)__A, (__v8si)__B,
465 (__v8si)__C);
468 static __inline__ __m256i __DEFAULT_FN_ATTRS256
469 _mm256_mask_shldv_epi32(__m256i __A, __mmask8 __U, __m256i __B, __m256i __C)
471 return (__m256i)__builtin_ia32_selectd_256(__U,
472 (__v8si)_mm256_shldv_epi32(__A, __B, __C),
473 (__v8si)__A);
476 static __inline__ __m256i __DEFAULT_FN_ATTRS256
477 _mm256_maskz_shldv_epi32(__mmask8 __U, __m256i __A, __m256i __B, __m256i __C)
479 return (__m256i)__builtin_ia32_selectd_256(__U,
480 (__v8si)_mm256_shldv_epi32(__A, __B, __C),
481 (__v8si)_mm256_setzero_si256());
484 static __inline__ __m128i __DEFAULT_FN_ATTRS128
485 _mm_shldv_epi32(__m128i __A, __m128i __B, __m128i __C)
487 return (__m128i)__builtin_ia32_vpshldvd128((__v4si)__A, (__v4si)__B,
488 (__v4si)__C);
491 static __inline__ __m128i __DEFAULT_FN_ATTRS128
492 _mm_mask_shldv_epi32(__m128i __A, __mmask8 __U, __m128i __B, __m128i __C)
494 return (__m128i)__builtin_ia32_selectd_128(__U,
495 (__v4si)_mm_shldv_epi32(__A, __B, __C),
496 (__v4si)__A);
499 static __inline__ __m128i __DEFAULT_FN_ATTRS128
500 _mm_maskz_shldv_epi32(__mmask8 __U, __m128i __A, __m128i __B, __m128i __C)
502 return (__m128i)__builtin_ia32_selectd_128(__U,
503 (__v4si)_mm_shldv_epi32(__A, __B, __C),
504 (__v4si)_mm_setzero_si128());
507 static __inline__ __m256i __DEFAULT_FN_ATTRS256
508 _mm256_shldv_epi16(__m256i __A, __m256i __B, __m256i __C)
510 return (__m256i)__builtin_ia32_vpshldvw256((__v16hi)__A, (__v16hi)__B,
511 (__v16hi)__C);
514 static __inline__ __m256i __DEFAULT_FN_ATTRS256
515 _mm256_mask_shldv_epi16(__m256i __A, __mmask16 __U, __m256i __B, __m256i __C)
517 return (__m256i)__builtin_ia32_selectw_256(__U,
518 (__v16hi)_mm256_shldv_epi16(__A, __B, __C),
519 (__v16hi)__A);
522 static __inline__ __m256i __DEFAULT_FN_ATTRS256
523 _mm256_maskz_shldv_epi16(__mmask16 __U, __m256i __A, __m256i __B, __m256i __C)
525 return (__m256i)__builtin_ia32_selectw_256(__U,
526 (__v16hi)_mm256_shldv_epi16(__A, __B, __C),
527 (__v16hi)_mm256_setzero_si256());
530 static __inline__ __m128i __DEFAULT_FN_ATTRS128
531 _mm_shldv_epi16(__m128i __A, __m128i __B, __m128i __C)
533 return (__m128i)__builtin_ia32_vpshldvw128((__v8hi)__A, (__v8hi)__B,
534 (__v8hi)__C);
537 static __inline__ __m128i __DEFAULT_FN_ATTRS128
538 _mm_mask_shldv_epi16(__m128i __A, __mmask8 __U, __m128i __B, __m128i __C)
540 return (__m128i)__builtin_ia32_selectw_128(__U,
541 (__v8hi)_mm_shldv_epi16(__A, __B, __C),
542 (__v8hi)__A);
545 static __inline__ __m128i __DEFAULT_FN_ATTRS128
546 _mm_maskz_shldv_epi16(__mmask8 __U, __m128i __A, __m128i __B, __m128i __C)
548 return (__m128i)__builtin_ia32_selectw_128(__U,
549 (__v8hi)_mm_shldv_epi16(__A, __B, __C),
550 (__v8hi)_mm_setzero_si128());
553 static __inline__ __m256i __DEFAULT_FN_ATTRS256
554 _mm256_shrdv_epi64(__m256i __A, __m256i __B, __m256i __C)
556 return (__m256i)__builtin_ia32_vpshrdvq256((__v4di)__A, (__v4di)__B,
557 (__v4di)__C);
560 static __inline__ __m256i __DEFAULT_FN_ATTRS256
561 _mm256_mask_shrdv_epi64(__m256i __A, __mmask8 __U, __m256i __B, __m256i __C)
563 return (__m256i)__builtin_ia32_selectq_256(__U,
564 (__v4di)_mm256_shrdv_epi64(__A, __B, __C),
565 (__v4di)__A);
568 static __inline__ __m256i __DEFAULT_FN_ATTRS256
569 _mm256_maskz_shrdv_epi64(__mmask8 __U, __m256i __A, __m256i __B, __m256i __C)
571 return (__m256i)__builtin_ia32_selectq_256(__U,
572 (__v4di)_mm256_shrdv_epi64(__A, __B, __C),
573 (__v4di)_mm256_setzero_si256());
576 static __inline__ __m128i __DEFAULT_FN_ATTRS128
577 _mm_shrdv_epi64(__m128i __A, __m128i __B, __m128i __C)
579 return (__m128i)__builtin_ia32_vpshrdvq128((__v2di)__A, (__v2di)__B,
580 (__v2di)__C);
583 static __inline__ __m128i __DEFAULT_FN_ATTRS128
584 _mm_mask_shrdv_epi64(__m128i __A, __mmask8 __U, __m128i __B, __m128i __C)
586 return (__m128i)__builtin_ia32_selectq_128(__U,
587 (__v2di)_mm_shrdv_epi64(__A, __B, __C),
588 (__v2di)__A);
591 static __inline__ __m128i __DEFAULT_FN_ATTRS128
592 _mm_maskz_shrdv_epi64(__mmask8 __U, __m128i __A, __m128i __B, __m128i __C)
594 return (__m128i)__builtin_ia32_selectq_128(__U,
595 (__v2di)_mm_shrdv_epi64(__A, __B, __C),
596 (__v2di)_mm_setzero_si128());
599 static __inline__ __m256i __DEFAULT_FN_ATTRS256
600 _mm256_shrdv_epi32(__m256i __A, __m256i __B, __m256i __C)
602 return (__m256i)__builtin_ia32_vpshrdvd256((__v8si)__A, (__v8si)__B,
603 (__v8si)__C);
606 static __inline__ __m256i __DEFAULT_FN_ATTRS256
607 _mm256_mask_shrdv_epi32(__m256i __A, __mmask8 __U, __m256i __B, __m256i __C)
609 return (__m256i)__builtin_ia32_selectd_256(__U,
610 (__v8si)_mm256_shrdv_epi32(__A, __B, __C),
611 (__v8si)__A);
614 static __inline__ __m256i __DEFAULT_FN_ATTRS256
615 _mm256_maskz_shrdv_epi32(__mmask8 __U, __m256i __A, __m256i __B, __m256i __C)
617 return (__m256i)__builtin_ia32_selectd_256(__U,
618 (__v8si)_mm256_shrdv_epi32(__A, __B, __C),
619 (__v8si)_mm256_setzero_si256());
622 static __inline__ __m128i __DEFAULT_FN_ATTRS128
623 _mm_shrdv_epi32(__m128i __A, __m128i __B, __m128i __C)
625 return (__m128i)__builtin_ia32_vpshrdvd128((__v4si)__A, (__v4si)__B,
626 (__v4si)__C);
629 static __inline__ __m128i __DEFAULT_FN_ATTRS128
630 _mm_mask_shrdv_epi32(__m128i __A, __mmask8 __U, __m128i __B, __m128i __C)
632 return (__m128i)__builtin_ia32_selectd_128(__U,
633 (__v4si)_mm_shrdv_epi32(__A, __B, __C),
634 (__v4si)__A);
637 static __inline__ __m128i __DEFAULT_FN_ATTRS128
638 _mm_maskz_shrdv_epi32(__mmask8 __U, __m128i __A, __m128i __B, __m128i __C)
640 return (__m128i)__builtin_ia32_selectd_128(__U,
641 (__v4si)_mm_shrdv_epi32(__A, __B, __C),
642 (__v4si)_mm_setzero_si128());
645 static __inline__ __m256i __DEFAULT_FN_ATTRS256
646 _mm256_shrdv_epi16(__m256i __A, __m256i __B, __m256i __C)
648 return (__m256i)__builtin_ia32_vpshrdvw256((__v16hi)__A, (__v16hi)__B,
649 (__v16hi)__C);
652 static __inline__ __m256i __DEFAULT_FN_ATTRS256
653 _mm256_mask_shrdv_epi16(__m256i __A, __mmask16 __U, __m256i __B, __m256i __C)
655 return (__m256i)__builtin_ia32_selectw_256(__U,
656 (__v16hi)_mm256_shrdv_epi16(__A, __B, __C),
657 (__v16hi)__A);
660 static __inline__ __m256i __DEFAULT_FN_ATTRS256
661 _mm256_maskz_shrdv_epi16(__mmask16 __U, __m256i __A, __m256i __B, __m256i __C)
663 return (__m256i)__builtin_ia32_selectw_256(__U,
664 (__v16hi)_mm256_shrdv_epi16(__A, __B, __C),
665 (__v16hi)_mm256_setzero_si256());
668 static __inline__ __m128i __DEFAULT_FN_ATTRS128
669 _mm_shrdv_epi16(__m128i __A, __m128i __B, __m128i __C)
671 return (__m128i)__builtin_ia32_vpshrdvw128((__v8hi)__A, (__v8hi)__B,
672 (__v8hi)__C);
675 static __inline__ __m128i __DEFAULT_FN_ATTRS128
676 _mm_mask_shrdv_epi16(__m128i __A, __mmask8 __U, __m128i __B, __m128i __C)
678 return (__m128i)__builtin_ia32_selectw_128(__U,
679 (__v8hi)_mm_shrdv_epi16(__A, __B, __C),
680 (__v8hi)__A);
683 static __inline__ __m128i __DEFAULT_FN_ATTRS128
684 _mm_maskz_shrdv_epi16(__mmask8 __U, __m128i __A, __m128i __B, __m128i __C)
686 return (__m128i)__builtin_ia32_selectw_128(__U,
687 (__v8hi)_mm_shrdv_epi16(__A, __B, __C),
688 (__v8hi)_mm_setzero_si128());
692 #undef __DEFAULT_FN_ATTRS128
693 #undef __DEFAULT_FN_ATTRS256
695 #endif