1 /*===-------------- avx10_2bf16intrin.h - AVX10-BF16 intrinsics ------------===
3 * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 * See https://llvm.org/LICENSE.txt for license information.
5 * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 *===-----------------------------------------------------------------------===
11 "Never use <avx10_2bf16intrin.h> directly; include <immintrin.h> instead."
16 #ifndef __AVX10_2BF16INTRIN_H
17 #define __AVX10_2BF16INTRIN_H
19 typedef __bf16 __m128bh_u
__attribute__((__vector_size__(16), __aligned__(1)));
20 typedef __bf16 __m256bh_u
__attribute__((__vector_size__(32), __aligned__(1)));
22 /* Define the default attributes for the functions in this file. */
23 #define __DEFAULT_FN_ATTRS256 \
24 __attribute__((__always_inline__, __nodebug__, __target__("avx10.2-256"), \
25 __min_vector_width__(256)))
26 #define __DEFAULT_FN_ATTRS128 \
27 __attribute__((__always_inline__, __nodebug__, __target__("avx10.2-256"), \
28 __min_vector_width__(128)))
30 static __inline __m256bh __DEFAULT_FN_ATTRS256
_mm256_setzero_pbh(void) {
31 return __builtin_bit_cast(__m256bh
, _mm256_setzero_ps());
34 static __inline __m128bh __DEFAULT_FN_ATTRS128
_mm_setzero_pbh(void) {
35 return __builtin_bit_cast(__m128bh
, _mm_setzero_ps());
38 static __inline__ __m128 __DEFAULT_FN_ATTRS128
_mm_castpbf16_ps(__m128bh __a
) {
42 static __inline__ __m256 __DEFAULT_FN_ATTRS256
43 _mm256_castpbf16_ps(__m256bh __a
) {
47 static __inline__ __m256d __DEFAULT_FN_ATTRS256
48 _mm256_castpbf16_pd(__m256bh __a
) {
52 static __inline__ __m128d __DEFAULT_FN_ATTRS128
_mm_castpbf16_pd(__m128bh __a
) {
56 static __inline__ __m128i __DEFAULT_FN_ATTRS128
57 _mm_castpbf16_si128(__m128bh __a
) {
61 static __inline__ __m256i __DEFAULT_FN_ATTRS256
62 _mm256_castpbf16_si256(__m256bh __a
) {
66 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_castps_pbh(__m128 __a
) {
70 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_castps_pbh(__m256 __a
) {
74 static __inline__ __bf16 __DEFAULT_FN_ATTRS128
_mm_cvtsbh_bf16(__m128bh __a
) {
78 static __inline__ __bf16 __DEFAULT_FN_ATTRS256
79 _mm256_cvtsbh_bf16(__m256bh __a
) {
83 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_castpd_pbh(__m128d __a
) {
87 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
88 _mm256_castpd_pbh(__m256d __a
) {
92 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
93 _mm_castsi128_pbh(__m128i __a
) {
97 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
98 _mm256_castsi256_pbh(__m256i __a
) {
102 static __inline__ __m128bh __DEFAULT_FN_ATTRS256
103 _mm256_castpbf16256_pbh128(__m256bh __a
) {
104 return __builtin_shufflevector(__a
, __a
, 0, 1, 2, 3, 4, 5, 6, 7);
107 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
108 _mm256_castpbf16128_pbh256(__m128bh __a
) {
109 return __builtin_shufflevector(__a
, __a
, 0, 1, 2, 3, 4, 5, 6, 7, -1, -1, -1,
113 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
114 _mm256_zextpbf16128_pbh256(__m128bh __a
) {
115 return __builtin_shufflevector(__a
, (__v8bf
)_mm_setzero_pbh(), 0, 1, 2, 3, 4,
116 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15);
119 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_undefined_pbh(void) {
120 return (__m256bh
)__builtin_ia32_undef256();
123 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
124 _mm_load_sbh(void const *__dp
) {
125 __m128bh src
= (__v8bf
)_mm_setzero_pbh();
126 return (__m128bh
)__builtin_ia32_loadsbf16128_mask((const __v8bf
*)__dp
, src
,
130 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
131 _mm_mask_load_sbh(__m128bh __W
, __mmask8 __U
, const void *__A
) {
132 __m128bh src
= (__v8bf
)__builtin_shufflevector(
133 (__v8bf
)__W
, (__v8bf
)_mm_setzero_pbh(), 0, 8, 8, 8, 8, 8, 8, 8);
135 return (__m128bh
)__builtin_ia32_loadsbf16128_mask((const __v8bf
*)__A
, src
,
139 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
140 _mm_maskz_load_sbh(__mmask8 __U
, const void *__A
) {
141 return (__m128bh
)__builtin_ia32_loadsbf16128_mask(
142 (const __v8bf
*)__A
, (__v8bf
)_mm_setzero_pbh(), __U
& 1);
145 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
146 _mm256_load_pbh(void const *__p
) {
147 return *(const __m256bh
*)__p
;
150 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_load_pbh(void const *__p
) {
151 return *(const __m128bh
*)__p
;
154 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
155 _mm256_loadu_pbh(void const *__p
) {
158 } __attribute__((__packed__
, __may_alias__
));
159 return ((const struct __loadu_pbh
*)__p
)->__v
;
162 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
163 _mm_loadu_pbh(void const *__p
) {
166 } __attribute__((__packed__
, __may_alias__
));
167 return ((const struct __loadu_pbh
*)__p
)->__v
;
170 static __inline__
void __DEFAULT_FN_ATTRS128
_mm_store_sbh(void *__dp
,
172 struct __mm_store_sbh_struct
{
174 } __attribute__((__packed__
, __may_alias__
));
175 ((struct __mm_store_sbh_struct
*)__dp
)->__u
= __a
[0];
178 static __inline__
void __DEFAULT_FN_ATTRS128
_mm_mask_store_sbh(void *__W
,
181 __builtin_ia32_storesbf16128_mask((__v8bf
*)__W
, __A
, __U
& 1);
184 static __inline__
void __DEFAULT_FN_ATTRS256
_mm256_store_pbh(void *__P
,
186 *(__m256bh
*)__P
= __A
;
189 static __inline__
void __DEFAULT_FN_ATTRS128
_mm_store_pbh(void *__P
,
191 *(__m128bh
*)__P
= __A
;
194 static __inline__
void __DEFAULT_FN_ATTRS256
_mm256_storeu_pbh(void *__P
,
196 struct __storeu_pbh
{
198 } __attribute__((__packed__
, __may_alias__
));
199 ((struct __storeu_pbh
*)__P
)->__v
= __A
;
202 static __inline__
void __DEFAULT_FN_ATTRS128
_mm_storeu_pbh(void *__P
,
204 struct __storeu_pbh
{
206 } __attribute__((__packed__
, __may_alias__
));
207 ((struct __storeu_pbh
*)__P
)->__v
= __A
;
210 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_move_sbh(__m128bh __a
,
216 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
217 _mm_mask_move_sbh(__m128bh __W
, __mmask8 __U
, __m128bh __A
, __m128bh __B
) {
218 return __builtin_ia32_selectsbf_128(__U
, _mm_move_sbh(__A
, __B
), __W
);
221 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
222 _mm_maskz_move_sbh(__mmask8 __U
, __m128bh __A
, __m128bh __B
) {
223 return __builtin_ia32_selectsbf_128(__U
, _mm_move_sbh(__A
, __B
),
227 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_undefined_pbh(void) {
228 return (__m128bh
)__builtin_ia32_undef128();
231 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_set_sbh(__bf16 bf
) {
232 return (__v8bf
)__builtin_shufflevector(
233 (__v8bf
){bf
, bf
, bf
, bf
, bf
, bf
, bf
, bf
}, (__v8bf
)_mm_setzero_pbh(), 0, 8,
237 static __inline __m128bh __DEFAULT_FN_ATTRS128
_mm_set1_pbh(__bf16 bf
) {
238 return (__m128bh
)(__v8bf
){bf
, bf
, bf
, bf
, bf
, bf
, bf
, bf
};
241 static __inline __m256bh __DEFAULT_FN_ATTRS256
_mm256_set1_pbh(__bf16 bf
) {
242 return (__m256bh
)(__v16bf
){bf
, bf
, bf
, bf
, bf
, bf
, bf
, bf
,
243 bf
, bf
, bf
, bf
, bf
, bf
, bf
, bf
};
246 static __inline __m128bh __DEFAULT_FN_ATTRS128
247 _mm_set_pbh(__bf16 bf1
, __bf16 bf2
, __bf16 bf3
, __bf16 bf4
, __bf16 bf5
,
248 __bf16 bf6
, __bf16 bf7
, __bf16 bf8
) {
249 return (__m128bh
)(__v8bf
){bf1
, bf2
, bf3
, bf4
, bf5
, bf6
, bf7
, bf8
};
252 static __inline __m256bh __DEFAULT_FN_ATTRS256
_mm256_set_pbh(
253 __bf16 bf1
, __bf16 bf2
, __bf16 bf3
, __bf16 bf4
, __bf16 bf5
, __bf16 bf6
,
254 __bf16 bf7
, __bf16 bf8
, __bf16 bf9
, __bf16 bf10
, __bf16 bf11
, __bf16 bf12
,
255 __bf16 bf13
, __bf16 bf14
, __bf16 bf15
, __bf16 bf16
) {
256 return (__m256bh
)(__v16bf
){bf1
, bf2
, bf3
, bf4
, bf5
, bf6
, bf7
, bf8
,
257 bf9
, bf10
, bf11
, bf12
, bf13
, bf14
, bf15
, bf16
};
260 #define _mm_setr_pbh(bf1, bf2, bf3, bf4, bf5, bf6, bf7, bf8) \
261 _mm_set_pbh((bf8), (bf7), (bf6), (bf5), (bf4), (bf3), (bf2), (bf1))
263 #define _mm256_setr_pbh(bf1, bf2, bf3, bf4, bf5, bf6, bf7, bf8, bf9, bf10, \
264 bf11, bf12, bf13, bf14, bf15, bf16) \
265 _mm256_set_pbh((bf16), (bf15), (bf14), (bf13), (bf12), (bf11), (bf10), \
266 (bf9), (bf8), (bf7), (bf6), (bf5), (bf4), (bf3), (bf2), \
269 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_abs_pbh(__m256bh __A
) {
270 return (__m256bh
)_mm256_and_epi32(_mm256_set1_epi32(0x7FFF7FFF),
274 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_abs_pbh(__m128bh __A
) {
275 return (__m128bh
)_mm_and_epi32(_mm_set1_epi32(0x7FFF7FFF), (__m128i
)__A
);
278 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
279 _mm_mask_blend_pbh(__mmask8 __U
, __m128bh __A
, __m128bh __W
) {
280 return (__m128bh
)__builtin_ia32_selectpbf_128((__mmask8
)__U
, (__v8bf
)__W
,
284 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
285 _mm256_mask_blend_pbh(__mmask16 __U
, __m256bh __A
, __m256bh __W
) {
286 return (__m256bh
)__builtin_ia32_selectpbf_256((__mmask16
)__U
, (__v16bf
)__W
,
290 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
291 _mm_permutex2var_pbh(__m128bh __A
, __m128i __I
, __m128bh __B
) {
292 return (__m128bh
)__builtin_ia32_vpermi2varhi128((__v8hi
)__A
, (__v8hi
)__I
,
296 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
297 _mm256_permutex2var_pbh(__m256bh __A
, __m256i __I
, __m256bh __B
) {
298 return (__m256bh
)__builtin_ia32_vpermi2varhi256((__v16hi
)__A
, (__v16hi
)__I
,
302 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
303 _mm_permutexvar_pbh(__m128i __A
, __m128bh __B
) {
304 return (__m128bh
)__builtin_ia32_permvarhi128((__v8hi
)__B
, (__v8hi
)__A
);
307 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
308 _mm256_permutexvar_pbh(__m256i __A
, __m256bh __B
) {
309 return (__m256bh
)__builtin_ia32_permvarhi256((__v16hi
)__B
, (__v16hi
)__A
);
312 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
313 _mm256_addne_pbh(__m256bh __A
, __m256bh __B
) {
314 return (__m256bh
)((__v16bf
)__A
+ (__v16bf
)__B
);
317 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
318 _mm256_mask_addne_pbh(__m256bh __W
, __mmask16 __U
, __m256bh __A
, __m256bh __B
) {
319 return (__m256bh
)__builtin_ia32_selectpbf_256(
320 (__mmask16
)__U
, (__v16bf
)_mm256_addne_pbh(__A
, __B
), (__v16bf
)__W
);
323 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
324 _mm256_maskz_addne_pbh(__mmask16 __U
, __m256bh __A
, __m256bh __B
) {
325 return (__m256bh
)__builtin_ia32_selectpbf_256(
326 (__mmask16
)__U
, (__v16bf
)_mm256_addne_pbh(__A
, __B
),
327 (__v16bf
)_mm256_setzero_pbh());
330 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_addne_pbh(__m128bh __A
,
332 return (__m128bh
)((__v8bf
)__A
+ (__v8bf
)__B
);
335 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
336 _mm_mask_addne_pbh(__m128bh __W
, __mmask8 __U
, __m128bh __A
, __m128bh __B
) {
337 return (__m128bh
)__builtin_ia32_selectpbf_128(
338 (__mmask8
)__U
, (__v8bf
)_mm_addne_pbh(__A
, __B
), (__v8bf
)__W
);
341 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
342 _mm_maskz_addne_pbh(__mmask8 __U
, __m128bh __A
, __m128bh __B
) {
343 return (__m128bh
)__builtin_ia32_selectpbf_128((__mmask8
)__U
,
344 (__v8bf
)_mm_addne_pbh(__A
, __B
),
345 (__v8bf
)_mm_setzero_pbh());
348 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
349 _mm256_subne_pbh(__m256bh __A
, __m256bh __B
) {
350 return (__m256bh
)((__v16bf
)__A
- (__v16bf
)__B
);
353 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
354 _mm256_mask_subne_pbh(__m256bh __W
, __mmask16 __U
, __m256bh __A
, __m256bh __B
) {
355 return (__m256bh
)__builtin_ia32_selectpbf_256(
356 (__mmask16
)__U
, (__v16bf
)_mm256_subne_pbh(__A
, __B
), (__v16bf
)__W
);
359 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
360 _mm256_maskz_subne_pbh(__mmask16 __U
, __m256bh __A
, __m256bh __B
) {
361 return (__m256bh
)__builtin_ia32_selectpbf_256(
362 (__mmask16
)__U
, (__v16bf
)_mm256_subne_pbh(__A
, __B
),
363 (__v16bf
)_mm256_setzero_pbh());
366 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_subne_pbh(__m128bh __A
,
368 return (__m128bh
)((__v8bf
)__A
- (__v8bf
)__B
);
371 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
372 _mm_mask_subne_pbh(__m128bh __W
, __mmask8 __U
, __m128bh __A
, __m128bh __B
) {
373 return (__m128bh
)__builtin_ia32_selectpbf_128(
374 (__mmask8
)__U
, (__v8bf
)_mm_subne_pbh(__A
, __B
), (__v8bf
)__W
);
377 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
378 _mm_maskz_subne_pbh(__mmask8 __U
, __m128bh __A
, __m128bh __B
) {
379 return (__m128bh
)__builtin_ia32_selectpbf_128((__mmask8
)__U
,
380 (__v8bf
)_mm_subne_pbh(__A
, __B
),
381 (__v8bf
)_mm_setzero_pbh());
384 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
385 _mm256_mulne_pbh(__m256bh __A
, __m256bh __B
) {
386 return (__m256bh
)((__v16bf
)__A
* (__v16bf
)__B
);
389 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
390 _mm256_mask_mulne_pbh(__m256bh __W
, __mmask16 __U
, __m256bh __A
, __m256bh __B
) {
391 return (__m256bh
)__builtin_ia32_selectpbf_256(
392 (__mmask16
)__U
, (__v16bf
)_mm256_mulne_pbh(__A
, __B
), (__v16bf
)__W
);
395 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
396 _mm256_maskz_mulne_pbh(__mmask16 __U
, __m256bh __A
, __m256bh __B
) {
397 return (__m256bh
)__builtin_ia32_selectpbf_256(
398 (__mmask16
)__U
, (__v16bf
)_mm256_mulne_pbh(__A
, __B
),
399 (__v16bf
)_mm256_setzero_pbh());
402 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_mulne_pbh(__m128bh __A
,
404 return (__m128bh
)((__v8bf
)__A
* (__v8bf
)__B
);
407 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
408 _mm_mask_mulne_pbh(__m128bh __W
, __mmask8 __U
, __m128bh __A
, __m128bh __B
) {
409 return (__m128bh
)__builtin_ia32_selectpbf_128(
410 (__mmask8
)__U
, (__v8bf
)_mm_mulne_pbh(__A
, __B
), (__v8bf
)__W
);
413 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
414 _mm_maskz_mulne_pbh(__mmask8 __U
, __m128bh __A
, __m128bh __B
) {
415 return (__m128bh
)__builtin_ia32_selectpbf_128((__mmask8
)__U
,
416 (__v8bf
)_mm_mulne_pbh(__A
, __B
),
417 (__v8bf
)_mm_setzero_pbh());
420 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
421 _mm256_divne_pbh(__m256bh __A
, __m256bh __B
) {
422 return (__m256bh
)((__v16bf
)__A
/ (__v16bf
)__B
);
425 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
426 _mm256_mask_divne_pbh(__m256bh __W
, __mmask16 __U
, __m256bh __A
, __m256bh __B
) {
427 return (__m256bh
)__builtin_ia32_selectpbf_256(
428 (__mmask16
)__U
, (__v16bf
)_mm256_divne_pbh(__A
, __B
), (__v16bf
)__W
);
431 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
432 _mm256_maskz_divne_pbh(__mmask16 __U
, __m256bh __A
, __m256bh __B
) {
433 return (__m256bh
)__builtin_ia32_selectpbf_256(
434 (__mmask16
)__U
, (__v16bf
)_mm256_divne_pbh(__A
, __B
),
435 (__v16bf
)_mm256_setzero_pbh());
438 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_divne_pbh(__m128bh __A
,
440 return (__m128bh
)((__v8bf
)__A
/ (__v8bf
)__B
);
443 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
444 _mm_mask_divne_pbh(__m128bh __W
, __mmask8 __U
, __m128bh __A
, __m128bh __B
) {
445 return (__m128bh
)__builtin_ia32_selectpbf_128(
446 (__mmask8
)__U
, (__v8bf
)_mm_divne_pbh(__A
, __B
), (__v8bf
)__W
);
449 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
450 _mm_maskz_divne_pbh(__mmask8 __U
, __m128bh __A
, __m128bh __B
) {
451 return (__m128bh
)__builtin_ia32_selectpbf_128((__mmask8
)__U
,
452 (__v8bf
)_mm_divne_pbh(__A
, __B
),
453 (__v8bf
)_mm_setzero_pbh());
456 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_max_pbh(__m256bh __A
,
458 return (__m256bh
)__builtin_ia32_vmaxpbf16256((__v16bf
)__A
, (__v16bf
)__B
);
461 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
462 _mm256_mask_max_pbh(__m256bh __W
, __mmask16 __U
, __m256bh __A
, __m256bh __B
) {
463 return (__m256bh
)__builtin_ia32_selectpbf_256(
464 (__mmask16
)__U
, (__v16bf
)_mm256_max_pbh(__A
, __B
), (__v16bf
)__W
);
467 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
468 _mm256_maskz_max_pbh(__mmask16 __U
, __m256bh __A
, __m256bh __B
) {
469 return (__m256bh
)__builtin_ia32_selectpbf_256(
470 (__mmask16
)__U
, (__v16bf
)_mm256_max_pbh(__A
, __B
),
471 (__v16bf
)_mm256_setzero_pbh());
474 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_max_pbh(__m128bh __A
,
476 return (__m128bh
)__builtin_ia32_vmaxpbf16128((__v8bf
)__A
, (__v8bf
)__B
);
479 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
480 _mm_mask_max_pbh(__m128bh __W
, __mmask8 __U
, __m128bh __A
, __m128bh __B
) {
481 return (__m128bh
)__builtin_ia32_selectpbf_128(
482 (__mmask8
)__U
, (__v8bf
)_mm_max_pbh(__A
, __B
), (__v8bf
)__W
);
485 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
486 _mm_maskz_max_pbh(__mmask8 __U
, __m128bh __A
, __m128bh __B
) {
487 return (__m128bh
)__builtin_ia32_selectpbf_128(
488 (__mmask8
)__U
, (__v8bf
)_mm_max_pbh(__A
, __B
), (__v8bf
)_mm_setzero_pbh());
491 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_min_pbh(__m256bh __A
,
493 return (__m256bh
)__builtin_ia32_vminpbf16256((__v16bf
)__A
, (__v16bf
)__B
);
496 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
497 _mm256_mask_min_pbh(__m256bh __W
, __mmask16 __U
, __m256bh __A
, __m256bh __B
) {
498 return (__m256bh
)__builtin_ia32_selectpbf_256(
499 (__mmask16
)__U
, (__v16bf
)_mm256_min_pbh(__A
, __B
), (__v16bf
)__W
);
502 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
503 _mm256_maskz_min_pbh(__mmask16 __U
, __m256bh __A
, __m256bh __B
) {
504 return (__m256bh
)__builtin_ia32_selectpbf_256(
505 (__mmask16
)__U
, (__v16bf
)_mm256_min_pbh(__A
, __B
),
506 (__v16bf
)_mm256_setzero_pbh());
509 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_min_pbh(__m128bh __A
,
511 return (__m128bh
)__builtin_ia32_vminpbf16128((__v8bf
)__A
, (__v8bf
)__B
);
514 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
515 _mm_mask_min_pbh(__m128bh __W
, __mmask8 __U
, __m128bh __A
, __m128bh __B
) {
516 return (__m128bh
)__builtin_ia32_selectpbf_128(
517 (__mmask8
)__U
, (__v8bf
)_mm_min_pbh(__A
, __B
), (__v8bf
)__W
);
520 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
521 _mm_maskz_min_pbh(__mmask8 __U
, __m128bh __A
, __m128bh __B
) {
522 return (__m128bh
)__builtin_ia32_selectpbf_128(
523 (__mmask8
)__U
, (__v8bf
)_mm_min_pbh(__A
, __B
), (__v8bf
)_mm_setzero_pbh());
526 static __inline__
int __DEFAULT_FN_ATTRS128
_mm_comeqsbh(__m128bh A
,
528 return __builtin_ia32_vcomsbf16eq((__v8bf
)A
, (__v8bf
)B
);
531 static __inline__
int __DEFAULT_FN_ATTRS128
_mm_comltsbh(__m128bh A
,
533 return __builtin_ia32_vcomsbf16lt((__v8bf
)A
, (__v8bf
)B
);
536 static __inline__
int __DEFAULT_FN_ATTRS128
_mm_comlesbh(__m128bh A
,
538 return __builtin_ia32_vcomsbf16le((__v8bf
)A
, (__v8bf
)B
);
541 static __inline__
int __DEFAULT_FN_ATTRS128
_mm_comgtsbh(__m128bh A
,
543 return __builtin_ia32_vcomsbf16gt((__v8bf
)A
, (__v8bf
)B
);
546 static __inline__
int __DEFAULT_FN_ATTRS128
_mm_comgesbh(__m128bh A
,
548 return __builtin_ia32_vcomsbf16ge((__v8bf
)A
, (__v8bf
)B
);
551 static __inline__
int __DEFAULT_FN_ATTRS128
_mm_comneqsbh(__m128bh A
,
553 return __builtin_ia32_vcomsbf16neq((__v8bf
)A
, (__v8bf
)B
);
556 #define _mm256_cmp_pbh_mask(__A, __B, __P) \
557 ((__mmask16)__builtin_ia32_vcmppbf16256_mask((__v16bf)(__m256bh)(__A), \
558 (__v16bf)(__m256bh)(__B), \
559 (int)(__P), (__mmask16) - 1))
561 #define _mm256_mask_cmp_pbh_mask(__U, __A, __B, __P) \
562 ((__mmask16)__builtin_ia32_vcmppbf16256_mask((__v16bf)(__m256bh)(__A), \
563 (__v16bf)(__m256bh)(__B), \
564 (int)(__P), (__mmask16)(__U)))
566 #define _mm_cmp_pbh_mask(__A, __B, __P) \
567 ((__mmask8)__builtin_ia32_vcmppbf16128_mask((__v8bf)(__m128bh)(__A), \
568 (__v8bf)(__m128bh)(__B), \
569 (int)(__P), (__mmask8) - 1))
571 #define _mm_mask_cmp_pbh_mask(__U, __A, __B, __P) \
572 ((__mmask8)__builtin_ia32_vcmppbf16128_mask((__v8bf)(__m128bh)(__A), \
573 (__v8bf)(__m128bh)(__B), \
574 (int)(__P), (__mmask8)(__U)))
576 #define _mm256_mask_fpclass_pbh_mask(__U, __A, imm) \
577 ((__mmask16)__builtin_ia32_vfpclasspbf16256_mask( \
578 (__v16bf)(__m256bh)(__A), (int)(imm), (__mmask16)(__U)))
580 #define _mm256_fpclass_pbh_mask(__A, imm) \
581 ((__mmask16)__builtin_ia32_vfpclasspbf16256_mask( \
582 (__v16bf)(__m256bh)(__A), (int)(imm), (__mmask16) - 1))
584 #define _mm_mask_fpclass_pbh_mask(__U, __A, imm) \
585 ((__mmask8)__builtin_ia32_vfpclasspbf16128_mask( \
586 (__v8bf)(__m128bh)(__A), (int)(imm), (__mmask8)(__U)))
588 #define _mm_fpclass_pbh_mask(__A, imm) \
589 ((__mmask8)__builtin_ia32_vfpclasspbf16128_mask((__v8bf)(__m128bh)(__A), \
590 (int)(imm), (__mmask8) - 1))
592 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
593 _mm256_scalef_pbh(__m256bh __A
, __m256bh __B
) {
594 return (__m256bh
)__builtin_ia32_vscalefpbf16256_mask(
595 (__v16bf
)__A
, (__v16bf
)__B
, (__v16bf
)_mm256_undefined_pbh(),
599 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_mask_scalef_pbh(
600 __m256bh __W
, __mmask16 __U
, __m256bh __A
, __m256bh __B
) {
601 return (__m256bh
)__builtin_ia32_vscalefpbf16256_mask(
602 (__v16bf
)__A
, (__v16bf
)__B
, (__v16bf
)__W
, (__mmask16
)__U
);
605 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
606 _mm256_maskz_scalef_pbh(__mmask16 __U
, __m256bh __A
, __m256bh __B
) {
607 return (__m256bh
)__builtin_ia32_vscalefpbf16256_mask(
608 (__v16bf
)__A
, (__v16bf
)__B
, (__v16bf
)_mm256_setzero_pbh(),
612 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_scalef_pbh(__m128bh __A
,
614 return (__m128bh
)__builtin_ia32_vscalefpbf16128_mask(
615 (__v8bf
)__A
, (__v8bf
)__B
, (__v8bf
)_mm_undefined_pbh(), (__mmask8
)-1);
618 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
619 _mm_mask_scalef_pbh(__m128bh __W
, __mmask8 __U
, __m128bh __A
, __m128bh __B
) {
620 return (__m128bh
)__builtin_ia32_vscalefpbf16128_mask(
621 (__v8bf
)__A
, (__v8bf
)__B
, (__v8bf
)__W
, (__mmask8
)__U
);
624 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
625 _mm_maskz_scalef_pbh(__mmask8 __U
, __m128bh __A
, __m128bh __B
) {
626 return (__m128bh
)__builtin_ia32_vscalefpbf16128_mask(
627 (__v8bf
)__A
, (__v8bf
)__B
, (__v8bf
)_mm_setzero_pbh(), (__mmask8
)__U
);
630 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_rcp_pbh(__m256bh __A
) {
631 return (__m256bh
)__builtin_ia32_vrcppbf16256_mask(
632 (__v16bf
)__A
, (__v16bf
)_mm256_undefined_pbh(), (__mmask16
)-1);
635 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
636 _mm256_mask_rcp_pbh(__m256bh __W
, __mmask16 __U
, __m256bh __A
) {
637 return (__m256bh
)__builtin_ia32_vrcppbf16256_mask((__v16bf
)__A
, (__v16bf
)__W
,
641 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
642 _mm256_maskz_rcp_pbh(__mmask16 __U
, __m256bh __A
) {
643 return (__m256bh
)__builtin_ia32_vrcppbf16256_mask(
644 (__v16bf
)__A
, (__v16bf
)_mm256_setzero_pbh(), (__mmask16
)__U
);
647 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_rcp_pbh(__m128bh __A
) {
648 return (__m128bh
)__builtin_ia32_vrcppbf16128_mask(
649 (__v8bf
)__A
, (__v8bf
)_mm_undefined_pbh(), (__mmask8
)-1);
652 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
653 _mm_mask_rcp_pbh(__m128bh __W
, __mmask8 __U
, __m128bh __A
) {
654 return (__m128bh
)__builtin_ia32_vrcppbf16128_mask((__v8bf
)__A
, (__v8bf
)__W
,
658 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
659 _mm_maskz_rcp_pbh(__mmask8 __U
, __m128bh __A
) {
660 return (__m128bh
)__builtin_ia32_vrcppbf16128_mask(
661 (__v8bf
)__A
, (__v8bf
)_mm_setzero_pbh(), (__mmask8
)__U
);
664 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
665 _mm256_getexp_pbh(__m256bh __A
) {
666 return (__m256bh
)__builtin_ia32_vgetexppbf16256_mask(
667 (__v16bf
)__A
, (__v16bf
)_mm256_undefined_pbh(), (__mmask16
)-1);
670 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
671 _mm256_mask_getexp_pbh(__m256bh __W
, __mmask16 __U
, __m256bh __A
) {
672 return (__m256bh
)__builtin_ia32_vgetexppbf16256_mask(
673 (__v16bf
)__A
, (__v16bf
)__W
, (__mmask16
)__U
);
676 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
677 _mm256_maskz_getexp_pbh(__mmask16 __U
, __m256bh __A
) {
678 return (__m256bh
)__builtin_ia32_vgetexppbf16256_mask(
679 (__v16bf
)__A
, (__v16bf
)_mm256_setzero_pbh(), (__mmask16
)__U
);
682 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_getexp_pbh(__m128bh __A
) {
683 return (__m128bh
)__builtin_ia32_vgetexppbf16128_mask(
684 (__v8bf
)__A
, (__v8bf
)_mm_undefined_pbh(), (__mmask8
)-1);
687 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
688 _mm_mask_getexp_pbh(__m128bh __W
, __mmask8 __U
, __m128bh __A
) {
689 return (__m128bh
)__builtin_ia32_vgetexppbf16128_mask((__v8bf
)__A
, (__v8bf
)__W
,
693 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
694 _mm_maskz_getexp_pbh(__mmask8 __U
, __m128bh __A
) {
695 return (__m128bh
)__builtin_ia32_vgetexppbf16128_mask(
696 (__v8bf
)__A
, (__v8bf
)_mm_setzero_pbh(), (__mmask8
)__U
);
699 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
700 _mm256_rsqrt_pbh(__m256bh __A
) {
701 return (__m256bh
)__builtin_ia32_vrsqrtpbf16256_mask(
702 (__v16bf
)__A
, (__v16bf
)_mm256_undefined_pbh(), (__mmask16
)-1);
705 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
706 _mm256_mask_rsqrt_pbh(__m256bh __W
, __mmask16 __U
, __m256bh __A
) {
707 return (__m256bh
)__builtin_ia32_vrsqrtpbf16256_mask(
708 (__v16bf
)__A
, (__v16bf
)__W
, (__mmask16
)__U
);
711 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
712 _mm256_maskz_rsqrt_pbh(__mmask16 __U
, __m256bh __A
) {
713 return (__m256bh
)__builtin_ia32_vrsqrtpbf16256_mask(
714 (__v16bf
)__A
, (__v16bf
)_mm256_setzero_pbh(), (__mmask16
)__U
);
717 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_rsqrt_pbh(__m128bh __A
) {
718 return (__m128bh
)__builtin_ia32_vrsqrtpbf16128_mask(
719 (__v8bf
)__A
, (__v8bf
)_mm_undefined_pbh(), (__mmask8
)-1);
722 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
723 _mm_mask_rsqrt_pbh(__m128bh __W
, __mmask8 __U
, __m128bh __A
) {
724 return (__m128bh
)__builtin_ia32_vrsqrtpbf16128_mask((__v8bf
)__A
, (__v8bf
)__W
,
728 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
729 _mm_maskz_rsqrt_pbh(__mmask8 __U
, __m128bh __A
) {
730 return (__m128bh
)__builtin_ia32_vrsqrtpbf16128_mask(
731 (__v8bf
)__A
, (__v8bf
)_mm_setzero_pbh(), (__mmask8
)__U
);
734 #define _mm256_reducene_pbh(__A, imm) \
735 ((__m256bh)__builtin_ia32_vreducenepbf16256_mask( \
736 (__v16bf)(__m256bh)(__A), (int)(imm), (__v16bf)_mm256_undefined_pbh(), \
739 #define _mm256_mask_reducene_pbh(__W, __U, __A, imm) \
740 ((__m256bh)__builtin_ia32_vreducenepbf16256_mask( \
741 (__v16bf)(__m256bh)(__A), (int)(imm), (__v16bf)(__m256bh)(__W), \
744 #define _mm256_maskz_reducene_pbh(__U, __A, imm) \
745 ((__m256bh)__builtin_ia32_vreducenepbf16256_mask( \
746 (__v16bf)(__m256bh)(__A), (int)(imm), (__v16bf)_mm256_setzero_pbh(), \
749 #define _mm_reducene_pbh(__A, imm) \
750 ((__m128bh)__builtin_ia32_vreducenepbf16128_mask( \
751 (__v8bf)(__m128bh)(__A), (int)(imm), (__v8bf)_mm_undefined_pbh(), \
754 #define _mm_mask_reducene_pbh(__W, __U, __A, imm) \
755 ((__m128bh)__builtin_ia32_vreducenepbf16128_mask( \
756 (__v8bf)(__m128bh)(__A), (int)(imm), (__v8bf)(__m128bh)(__W), \
759 #define _mm_maskz_reducene_pbh(__U, __A, imm) \
760 ((__m128bh)__builtin_ia32_vreducenepbf16128_mask( \
761 (__v8bf)(__m128bh)(__A), (int)(imm), (__v8bf)_mm_setzero_pbh(), \
764 #define _mm256_roundscalene_pbh(__A, imm) \
765 ((__m256bh)__builtin_ia32_vrndscalenepbf16_256_mask( \
766 (__v16bf)(__m256bh)(__A), (int)(imm), (__v16bf)_mm256_setzero_pbh(), \
769 #define _mm256_mask_roundscalene_pbh(__W, __U, __A, imm) \
770 ((__m256bh)__builtin_ia32_vrndscalenepbf16_256_mask( \
771 (__v16bf)(__m256bh)(__A), (int)(imm), (__v16bf)(__m256bh)(__W), \
774 #define _mm256_maskz_roundscalene_pbh(__U, __A, imm) \
775 ((__m256bh)__builtin_ia32_vrndscalenepbf16_256_mask( \
776 (__v16bf)(__m256bh)(__A), (int)(imm), (__v16bf)_mm256_setzero_pbh(), \
779 #define _mm_roundscalene_pbh(__A, imm) \
780 ((__m128bh)__builtin_ia32_vrndscalenepbf16_128_mask( \
781 (__v8bf)(__m128bh)(__A), (int)(imm), (__v8bf)_mm_setzero_pbh(), \
784 #define _mm_mask_roundscalene_pbh(__W, __U, __A, imm) \
785 ((__m128bh)__builtin_ia32_vrndscalenepbf16_128_mask( \
786 (__v8bf)(__m128bh)(__A), (int)(imm), (__v8bf)(__m128bh)(__W), \
789 #define _mm_maskz_roundscalene_pbh(__U, __A, imm) \
790 ((__m128bh)__builtin_ia32_vrndscalenepbf16_128_mask( \
791 (__v8bf)(__m128bh)(__A), (int)(imm), (__v8bf)_mm_setzero_pbh(), \
794 #define _mm256_getmant_pbh(__A, __B, __C) \
795 ((__m256bh)__builtin_ia32_vgetmantpbf16256_mask( \
796 (__v16bf)(__m256bh)(__A), (int)(((__C) << 2) | (__B)), \
797 (__v16bf)_mm256_undefined_pbh(), (__mmask16) - 1))
799 #define _mm256_mask_getmant_pbh(__W, __U, __A, __B, __C) \
800 ((__m256bh)__builtin_ia32_vgetmantpbf16256_mask( \
801 (__v16bf)(__m256bh)(__A), (int)(((__C) << 2) | (__B)), \
802 (__v16bf)(__m256bh)(__W), (__mmask16)(__U)))
804 #define _mm256_maskz_getmant_pbh(__U, __A, __B, __C) \
805 ((__m256bh)__builtin_ia32_vgetmantpbf16256_mask( \
806 (__v16bf)(__m256bh)(__A), (int)(((__C) << 2) | (__B)), \
807 (__v16bf)_mm256_setzero_pbh(), (__mmask16)(__U)))
809 #define _mm_getmant_pbh(__A, __B, __C) \
810 ((__m128bh)__builtin_ia32_vgetmantpbf16128_mask( \
811 (__v8bf)(__m128bh)(__A), (int)(((__C) << 2) | (__B)), \
812 (__v8bf)_mm_undefined_pbh(), (__mmask8) - 1))
814 #define _mm_mask_getmant_pbh(__W, __U, __A, __B, __C) \
815 ((__m128bh)__builtin_ia32_vgetmantpbf16128_mask( \
816 (__v8bf)(__m128bh)(__A), (int)(((__C) << 2) | (__B)), \
817 (__v8bf)(__m128bh)(__W), (__mmask8)(__U)))
819 #define _mm_maskz_getmant_pbh(__U, __A, __B, __C) \
820 ((__m128bh)__builtin_ia32_vgetmantpbf16128_mask( \
821 (__v8bf)(__m128bh)(__A), (int)(((__C) << 2) | (__B)), \
822 (__v8bf)_mm_setzero_pbh(), (__mmask8)(__U)))
824 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_sqrt_pbh(__m256bh __A
) {
825 return (__m256bh
)__builtin_ia32_vsqrtnepbf16256((__v16bf
)__A
);
828 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
829 _mm256_mask_sqrt_pbh(__m256bh __W
, __mmask16 __U
, __m256bh __A
) {
830 return (__m256bh
)__builtin_ia32_selectpbf_256(
831 (__mmask16
)__U
, (__v16bf
)_mm256_sqrt_pbh(__A
), (__v16bf
)__W
);
834 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
835 _mm256_maskz_sqrt_pbh(__mmask16 __U
, __m256bh __A
) {
836 return (__m256bh
)__builtin_ia32_selectpbf_256((__mmask16
)__U
,
837 (__v16bf
)_mm256_sqrt_pbh(__A
),
838 (__v16bf
)_mm256_setzero_pbh());
841 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_sqrt_pbh(__m128bh __A
) {
842 return (__m128bh
)__builtin_ia32_vsqrtnepbf16((__v8bf
)__A
);
845 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
846 _mm_mask_sqrt_pbh(__m128bh __W
, __mmask8 __U
, __m128bh __A
) {
847 return (__m128bh
)__builtin_ia32_selectpbf_128(
848 (__mmask8
)__U
, (__v8bf
)_mm_sqrt_pbh(__A
), (__v8bf
)__W
);
851 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
852 _mm_maskz_sqrt_pbh(__mmask8 __U
, __m128bh __A
) {
853 return (__m128bh
)__builtin_ia32_selectpbf_128(
854 (__mmask8
)__U
, (__v8bf
)_mm_sqrt_pbh(__A
), (__v8bf
)_mm_setzero_pbh());
857 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
858 _mm256_fmaddne_pbh(__m256bh __A
, __m256bh __B
, __m256bh __C
) {
859 return (__m256bh
)__builtin_ia32_vfmaddnepbh256((__v16bf
)__A
, (__v16bf
)__B
,
863 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_mask_fmaddne_pbh(
864 __m256bh __A
, __mmask16 __U
, __m256bh __B
, __m256bh __C
) {
865 return (__m256bh
)__builtin_ia32_selectpbf_256(
867 _mm256_fmaddne_pbh((__v16bf
)__A
, (__v16bf
)__B
, (__v16bf
)__C
),
871 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_mask3_fmaddne_pbh(
872 __m256bh __A
, __m256bh __B
, __m256bh __C
, __mmask16 __U
) {
873 return (__m256bh
)__builtin_ia32_selectpbf_256(
875 _mm256_fmaddne_pbh((__v16bf
)__A
, (__v16bf
)__B
, (__v16bf
)__C
),
879 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_maskz_fmaddne_pbh(
880 __mmask16 __U
, __m256bh __A
, __m256bh __B
, __m256bh __C
) {
881 return (__m256bh
)__builtin_ia32_selectpbf_256(
883 _mm256_fmaddne_pbh((__v16bf
)__A
, (__v16bf
)__B
, (__v16bf
)__C
),
884 (__v16bf
)_mm256_setzero_pbh());
887 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
888 _mm256_fmsubne_pbh(__m256bh __A
, __m256bh __B
, __m256bh __C
) {
889 return (__m256bh
)__builtin_ia32_vfmaddnepbh256((__v16bf
)__A
, (__v16bf
)__B
,
893 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_mask_fmsubne_pbh(
894 __m256bh __A
, __mmask16 __U
, __m256bh __B
, __m256bh __C
) {
895 return (__m256bh
)__builtin_ia32_selectpbf_256(
897 _mm256_fmsubne_pbh((__v16bf
)__A
, (__v16bf
)__B
, (__v16bf
)__C
),
901 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_mask3_fmsubne_pbh(
902 __m256bh __A
, __m256bh __B
, __m256bh __C
, __mmask16 __U
) {
903 return (__m256bh
)__builtin_ia32_selectpbf_256(
905 _mm256_fmsubne_pbh((__v16bf
)__A
, (__v16bf
)__B
, (__v16bf
)__C
),
909 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_maskz_fmsubne_pbh(
910 __mmask16 __U
, __m256bh __A
, __m256bh __B
, __m256bh __C
) {
911 return (__m256bh
)__builtin_ia32_selectpbf_256(
913 _mm256_fmsubne_pbh((__v16bf
)__A
, (__v16bf
)__B
, (__v16bf
)__C
),
914 (__v16bf
)_mm256_setzero_pbh());
917 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
918 _mm256_fnmaddne_pbh(__m256bh __A
, __m256bh __B
, __m256bh __C
) {
919 return (__m256bh
)__builtin_ia32_vfmaddnepbh256((__v16bf
)__A
, -(__v16bf
)__B
,
923 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_mask_fnmaddne_pbh(
924 __m256bh __A
, __mmask16 __U
, __m256bh __B
, __m256bh __C
) {
925 return (__m256bh
)__builtin_ia32_selectpbf_256(
927 _mm256_fnmaddne_pbh((__v16bf
)__A
, (__v16bf
)__B
, (__v16bf
)__C
),
931 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_mask3_fnmaddne_pbh(
932 __m256bh __A
, __m256bh __B
, __m256bh __C
, __mmask16 __U
) {
933 return (__m256bh
)__builtin_ia32_selectpbf_256(
935 _mm256_fnmaddne_pbh((__v16bf
)__A
, (__v16bf
)__B
, (__v16bf
)__C
),
939 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_maskz_fnmaddne_pbh(
940 __mmask16 __U
, __m256bh __A
, __m256bh __B
, __m256bh __C
) {
941 return (__m256bh
)__builtin_ia32_selectpbf_256(
943 _mm256_fnmaddne_pbh((__v16bf
)__A
, (__v16bf
)__B
, (__v16bf
)__C
),
944 (__v16bf
)_mm256_setzero_pbh());
947 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
948 _mm256_fnmsubne_pbh(__m256bh __A
, __m256bh __B
, __m256bh __C
) {
949 return (__m256bh
)__builtin_ia32_vfmaddnepbh256((__v16bf
)__A
, -(__v16bf
)__B
,
953 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_mask_fnmsubne_pbh(
954 __m256bh __A
, __mmask16 __U
, __m256bh __B
, __m256bh __C
) {
955 return (__m256bh
)__builtin_ia32_selectpbf_256(
957 _mm256_fnmsubne_pbh((__v16bf
)__A
, (__v16bf
)__B
, (__v16bf
)__C
),
961 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_mask3_fnmsubne_pbh(
962 __m256bh __A
, __m256bh __B
, __m256bh __C
, __mmask16 __U
) {
963 return (__m256bh
)__builtin_ia32_selectpbf_256(
965 _mm256_fnmsubne_pbh((__v16bf
)__A
, (__v16bf
)__B
, (__v16bf
)__C
),
969 static __inline__ __m256bh __DEFAULT_FN_ATTRS256
_mm256_maskz_fnmsubne_pbh(
970 __mmask16 __U
, __m256bh __A
, __m256bh __B
, __m256bh __C
) {
971 return (__m256bh
)__builtin_ia32_selectpbf_256(
973 _mm256_fnmsubne_pbh((__v16bf
)__A
, (__v16bf
)__B
, (__v16bf
)__C
),
974 (__v16bf
)_mm256_setzero_pbh());
977 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_fmaddne_pbh(__m128bh __A
,
980 return (__m128bh
)__builtin_ia32_vfmaddnepbh128((__v8bf
)__A
, (__v8bf
)__B
,
984 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
985 _mm_mask_fmaddne_pbh(__m128bh __A
, __mmask8 __U
, __m128bh __B
, __m128bh __C
) {
986 return (__m128bh
)__builtin_ia32_selectpbf_128(
987 (__mmask8
)__U
, _mm_fmaddne_pbh((__v8bf
)__A
, (__v8bf
)__B
, (__v8bf
)__C
),
991 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
992 _mm_mask3_fmaddne_pbh(__m128bh __A
, __m128bh __B
, __m128bh __C
, __mmask8 __U
) {
993 return (__m128bh
)__builtin_ia32_selectpbf_128(
994 (__mmask8
)__U
, _mm_fmaddne_pbh((__v8bf
)__A
, (__v8bf
)__B
, (__v8bf
)__C
),
998 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
999 _mm_maskz_fmaddne_pbh(__mmask8 __U
, __m128bh __A
, __m128bh __B
, __m128bh __C
) {
1000 return (__m128bh
)__builtin_ia32_selectpbf_128(
1001 (__mmask8
)__U
, _mm_fmaddne_pbh((__v8bf
)__A
, (__v8bf
)__B
, (__v8bf
)__C
),
1002 (__v8bf
)_mm_setzero_pbh());
1005 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
_mm_fmsubne_pbh(__m128bh __A
,
1008 return (__m128bh
)__builtin_ia32_vfmaddnepbh128((__v8bf
)__A
, (__v8bf
)__B
,
1012 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1013 _mm_mask_fmsubne_pbh(__m128bh __A
, __mmask8 __U
, __m128bh __B
, __m128bh __C
) {
1014 return (__m128bh
)__builtin_ia32_selectpbf_128(
1015 (__mmask8
)__U
, _mm_fmsubne_pbh((__v8bf
)__A
, (__v8bf
)__B
, (__v8bf
)__C
),
1019 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1020 _mm_mask3_fmsubne_pbh(__m128bh __A
, __m128bh __B
, __m128bh __C
, __mmask8 __U
) {
1021 return (__m128bh
)__builtin_ia32_selectpbf_128(
1022 (__mmask8
)__U
, _mm_fmsubne_pbh((__v8bf
)__A
, (__v8bf
)__B
, (__v8bf
)__C
),
1026 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1027 _mm_maskz_fmsubne_pbh(__mmask8 __U
, __m128bh __A
, __m128bh __B
, __m128bh __C
) {
1028 return (__m128bh
)__builtin_ia32_selectpbf_128(
1029 (__mmask8
)__U
, _mm_fmsubne_pbh((__v8bf
)__A
, (__v8bf
)__B
, (__v8bf
)__C
),
1030 (__v8bf
)_mm_setzero_pbh());
1033 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1034 _mm_fnmaddne_pbh(__m128bh __A
, __m128bh __B
, __m128bh __C
) {
1035 return (__m128bh
)__builtin_ia32_vfmaddnepbh128((__v8bf
)__A
, -(__v8bf
)__B
,
1039 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1040 _mm_mask_fnmaddne_pbh(__m128bh __A
, __mmask8 __U
, __m128bh __B
, __m128bh __C
) {
1041 return (__m128bh
)__builtin_ia32_selectpbf_128(
1042 (__mmask8
)__U
, _mm_fnmaddne_pbh((__v8bf
)__A
, (__v8bf
)__B
, (__v8bf
)__C
),
1046 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1047 _mm_mask3_fnmaddne_pbh(__m128bh __A
, __m128bh __B
, __m128bh __C
, __mmask8 __U
) {
1048 return (__m128bh
)__builtin_ia32_selectpbf_128(
1049 (__mmask8
)__U
, _mm_fnmaddne_pbh((__v8bf
)__A
, (__v8bf
)__B
, (__v8bf
)__C
),
1053 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1054 _mm_maskz_fnmaddne_pbh(__mmask8 __U
, __m128bh __A
, __m128bh __B
, __m128bh __C
) {
1055 return (__m128bh
)__builtin_ia32_selectpbf_128(
1056 (__mmask8
)__U
, _mm_fnmaddne_pbh((__v8bf
)__A
, (__v8bf
)__B
, (__v8bf
)__C
),
1057 (__v8bf
)_mm_setzero_pbh());
1060 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1061 _mm_fnmsubne_pbh(__m128bh __A
, __m128bh __B
, __m128bh __C
) {
1062 return (__m128bh
)__builtin_ia32_vfmaddnepbh128((__v8bf
)__A
, -(__v8bf
)__B
,
1066 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1067 _mm_mask_fnmsubne_pbh(__m128bh __A
, __mmask8 __U
, __m128bh __B
, __m128bh __C
) {
1068 return (__m128bh
)__builtin_ia32_selectpbf_128(
1069 (__mmask8
)__U
, _mm_fnmsubne_pbh((__v8bf
)__A
, (__v8bf
)__B
, (__v8bf
)__C
),
1073 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1074 _mm_mask3_fnmsubne_pbh(__m128bh __A
, __m128bh __B
, __m128bh __C
, __mmask8 __U
) {
1075 return (__m128bh
)__builtin_ia32_selectpbf_128(
1076 (__mmask8
)__U
, _mm_fnmsubne_pbh((__v8bf
)__A
, (__v8bf
)__B
, (__v8bf
)__C
),
1080 static __inline__ __m128bh __DEFAULT_FN_ATTRS128
1081 _mm_maskz_fnmsubne_pbh(__mmask8 __U
, __m128bh __A
, __m128bh __B
, __m128bh __C
) {
1082 return (__m128bh
)__builtin_ia32_selectpbf_128(
1083 (__mmask8
)__U
, _mm_fnmsubne_pbh((__v8bf
)__A
, (__v8bf
)__B
, (__v8bf
)__C
),
1084 (__v8bf
)_mm_setzero_pbh());
1087 #undef __DEFAULT_FN_ATTRS128
1088 #undef __DEFAULT_FN_ATTRS256