[SDAG] Use BatchAAResults for querying alias analysis (AA) results (#123934)
[llvm-project.git] / clang / lib / Headers / avx10_2_512convertintrin.h
blob60a5b1ef4548d858e84e287d49b69cadb68806ab
1 /*===--------- avx10_2_512convertintrin.h - AVX10_2_512CONVERT -------------===
3 * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 * See https://llvm.org/LICENSE.txt for license information.
5 * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 *===-----------------------------------------------------------------------===
8 */
9 #ifndef __IMMINTRIN_H
10 #error \
11 "Never use <avx10_2_512convertintrin.h> directly; include <immintrin.h> instead."
12 #endif // __IMMINTRIN_H
14 #ifdef __SSE2__
16 #ifndef __AVX10_2_512CONVERTINTRIN_H
17 #define __AVX10_2_512CONVERTINTRIN_H
19 /* Define the default attributes for the functions in this file. */
20 #define __DEFAULT_FN_ATTRS512 \
21 __attribute__((__always_inline__, __nodebug__, __target__("avx10.2-512"), \
22 __min_vector_width__(512)))
24 static __inline__ __m512h __DEFAULT_FN_ATTRS512 _mm512_cvtx2ps_ph(__m512 __A,
25 __m512 __B) {
26 return (__m512h)__builtin_ia32_vcvt2ps2phx512_mask(
27 (__v16sf)__A, (__v16sf)__B, (__v32hf)_mm512_setzero_ph(), (__mmask32)(-1),
28 _MM_FROUND_CUR_DIRECTION);
31 static __inline__ __m512h __DEFAULT_FN_ATTRS512
32 _mm512_mask_cvtx2ps_ph(__m512h __W, __mmask32 __U, __m512 __A, __m512 __B) {
33 return (__m512h)__builtin_ia32_vcvt2ps2phx512_mask(
34 (__v16sf)__A, (__v16sf)__B, (__v32hf)__W, (__mmask32)__U,
35 _MM_FROUND_CUR_DIRECTION);
38 static __inline__ __m512h __DEFAULT_FN_ATTRS512
39 _mm512_maskz_cvtx2ps_ph(__mmask32 __U, __m512 __A, __m512 __B) {
40 return (__m512h)__builtin_ia32_vcvt2ps2phx512_mask(
41 (__v16sf)__A, (__v16sf)__B, (__v32hf)_mm512_setzero_ph(), (__mmask32)__U,
42 _MM_FROUND_CUR_DIRECTION);
45 #define _mm512_cvtx_round2ps_ph(A, B, R) \
46 ((__m512h)__builtin_ia32_vcvt2ps2phx512_mask( \
47 (__v16sf)(A), (__v16sf)(B), (__v32hf)_mm512_undefined_ph(), \
48 (__mmask32)(-1), (const int)(R)))
50 #define _mm512_mask_cvtx_round2ps_ph(W, U, A, B, R) \
51 ((__m512h)__builtin_ia32_vcvt2ps2phx512_mask((__v16sf)(A), (__v16sf)(B), \
52 (__v32hf)(W), (__mmask32)(U), \
53 (const int)(R)))
55 #define _mm512_maskz_cvtx_round2ps_ph(U, A, B, R) \
56 ((__m512h)__builtin_ia32_vcvt2ps2phx512_mask( \
57 (__v16sf)(A), (__v16sf)(B), (__v32hf)_mm512_setzero_ph(), \
58 (__mmask32)(U), (const int)(R)))
60 static __inline__ __m256i __DEFAULT_FN_ATTRS512
61 _mm512_cvtbiasph_pbf8(__m512i __A, __m512h __B) {
62 return (__m256i)__builtin_ia32_vcvtbiasph2bf8_512_mask(
63 (__v64qi)__A, (__v32hf)__B, (__v32qi)_mm256_undefined_si256(),
64 (__mmask32)-1);
67 static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtbiasph_pbf8(
68 __m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
69 return (__m256i)__builtin_ia32_vcvtbiasph2bf8_512_mask(
70 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (__mmask32)__U);
73 static __inline__ __m256i __DEFAULT_FN_ATTRS512
74 _mm512_maskz_cvtbiasph_pbf8(__mmask32 __U, __m512i __A, __m512h __B) {
75 return (__m256i)__builtin_ia32_vcvtbiasph2bf8_512_mask(
76 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)_mm256_setzero_si256(),
77 (__mmask32)__U);
80 static __inline__ __m256i __DEFAULT_FN_ATTRS512
81 _mm512_cvtbiassph_pbf8(__m512i __A, __m512h __B) {
82 return (__m256i)__builtin_ia32_vcvtbiasph2bf8s_512_mask(
83 (__v64qi)__A, (__v32hf)__B, (__v32qi)_mm256_undefined_si256(),
84 (__mmask32)-1);
87 static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtbiassph_pbf8(
88 __m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
89 return (__m256i)__builtin_ia32_vcvtbiasph2bf8s_512_mask(
90 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (__mmask32)__U);
93 static __inline__ __m256i __DEFAULT_FN_ATTRS512
94 _mm512_maskz_cvtbiassph_pbf8(__mmask32 __U, __m512i __A, __m512h __B) {
95 return (__m256i)__builtin_ia32_vcvtbiasph2bf8s_512_mask(
96 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)_mm256_setzero_si256(),
97 (__mmask32)__U);
100 static __inline__ __m256i __DEFAULT_FN_ATTRS512
101 _mm512_cvtbiasph_phf8(__m512i __A, __m512h __B) {
102 return (__m256i)__builtin_ia32_vcvtbiasph2hf8_512_mask(
103 (__v64qi)__A, (__v32hf)__B, (__v32qi)_mm256_undefined_si256(),
104 (__mmask32)-1);
107 static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtbiasph_phf8(
108 __m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
109 return (__m256i)__builtin_ia32_vcvtbiasph2hf8_512_mask(
110 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (__mmask32)__U);
113 static __inline__ __m256i __DEFAULT_FN_ATTRS512
114 _mm512_maskz_cvtbiasph_phf8(__mmask32 __U, __m512i __A, __m512h __B) {
115 return (__m256i)__builtin_ia32_vcvtbiasph2hf8_512_mask(
116 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)_mm256_setzero_si256(),
117 (__mmask32)__U);
120 static __inline__ __m256i __DEFAULT_FN_ATTRS512
121 _mm512_cvtbiassph_phf8(__m512i __A, __m512h __B) {
122 return (__m256i)__builtin_ia32_vcvtbiasph2hf8s_512_mask(
123 (__v64qi)__A, (__v32hf)__B, (__v32qi)_mm256_undefined_si256(),
124 (__mmask32)-1);
127 static __inline__ __m256i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtbiassph_phf8(
128 __m256i __W, __mmask32 __U, __m512i __A, __m512h __B) {
129 return (__m256i)__builtin_ia32_vcvtbiasph2hf8s_512_mask(
130 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (__mmask32)__U);
133 static __inline__ __m256i __DEFAULT_FN_ATTRS512
134 _mm512_maskz_cvtbiassph_phf8(__mmask32 __U, __m512i __A, __m512h __B) {
135 return (__m256i)__builtin_ia32_vcvtbiasph2hf8s_512_mask(
136 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)_mm256_setzero_si256(),
137 (__mmask32)__U);
140 static __inline__ __m512i __DEFAULT_FN_ATTRS512
141 _mm512_cvtne2ph_pbf8(__m512h __A, __m512h __B) {
142 return (__m512i)__builtin_ia32_vcvtne2ph2bf8_512((__v32hf)(__A),
143 (__v32hf)(__B));
146 static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtne2ph_pbf8(
147 __m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
148 return (__m512i)__builtin_ia32_selectb_512(
149 (__mmask64)__U, (__v64qi)_mm512_cvtne2ph_pbf8(__A, __B), (__v64qi)__W);
152 static __inline__ __m512i __DEFAULT_FN_ATTRS512
153 _mm512_maskz_cvtne2ph_pbf8(__mmask64 __U, __m512h __A, __m512h __B) {
154 return (__m512i)__builtin_ia32_selectb_512(
155 (__mmask64)__U, (__v64qi)_mm512_cvtne2ph_pbf8(__A, __B),
156 (__v64qi)(__m512i)_mm512_setzero_si512());
159 static __inline__ __m512i __DEFAULT_FN_ATTRS512
160 _mm512_cvtnes2ph_pbf8(__m512h __A, __m512h __B) {
161 return (__m512i)__builtin_ia32_vcvtne2ph2bf8s_512((__v32hf)(__A),
162 (__v32hf)(__B));
165 static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtnes2ph_pbf8(
166 __m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
167 return (__m512i)__builtin_ia32_selectb_512(
168 (__mmask64)__U, (__v64qi)_mm512_cvtnes2ph_pbf8(__A, __B), (__v64qi)__W);
171 static __inline__ __m512i __DEFAULT_FN_ATTRS512
172 _mm512_maskz_cvtnes2ph_pbf8(__mmask64 __U, __m512h __A, __m512h __B) {
173 return (__m512i)__builtin_ia32_selectb_512(
174 (__mmask64)__U, (__v64qi)_mm512_cvtnes2ph_pbf8(__A, __B),
175 (__v64qi)(__m512i)_mm512_setzero_si512());
178 static __inline__ __m512i __DEFAULT_FN_ATTRS512
179 _mm512_cvtne2ph_phf8(__m512h __A, __m512h __B) {
180 return (__m512i)__builtin_ia32_vcvtne2ph2hf8_512((__v32hf)(__A),
181 (__v32hf)(__B));
184 static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtne2ph_phf8(
185 __m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
186 return (__m512i)__builtin_ia32_selectb_512(
187 (__mmask64)__U, (__v64qi)_mm512_cvtne2ph_phf8(__A, __B), (__v64qi)__W);
190 static __inline__ __m512i __DEFAULT_FN_ATTRS512
191 _mm512_maskz_cvtne2ph_phf8(__mmask64 __U, __m512h __A, __m512h __B) {
192 return (__m512i)__builtin_ia32_selectb_512(
193 (__mmask64)__U, (__v64qi)_mm512_cvtne2ph_phf8(__A, __B),
194 (__v64qi)(__m512i)_mm512_setzero_si512());
197 static __inline__ __m512i __DEFAULT_FN_ATTRS512
198 _mm512_cvtnes2ph_phf8(__m512h __A, __m512h __B) {
199 return (__m512i)__builtin_ia32_vcvtne2ph2hf8s_512((__v32hf)(__A),
200 (__v32hf)(__B));
203 static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_cvtnes2ph_phf8(
204 __m512i __W, __mmask64 __U, __m512h __A, __m512h __B) {
205 return (__m512i)__builtin_ia32_selectb_512(
206 (__mmask64)__U, (__v64qi)_mm512_cvtnes2ph_phf8(__A, __B), (__v64qi)__W);
209 static __inline__ __m512i __DEFAULT_FN_ATTRS512
210 _mm512_maskz_cvtnes2ph_phf8(__mmask64 __U, __m512h __A, __m512h __B) {
211 return (__m512i)__builtin_ia32_selectb_512(
212 (__mmask64)__U, (__v64qi)_mm512_cvtnes2ph_phf8(__A, __B),
213 (__v64qi)(__m512i)_mm512_setzero_si512());
216 static __inline__ __m512h __DEFAULT_FN_ATTRS512
217 _mm512_cvtnehf8_ph(__m256i __A) {
218 return (__m512h)__builtin_ia32_vcvthf8_2ph512_mask(
219 (__v32qi)__A, (__v32hf)(__m512h)_mm512_undefined_ph(), (__mmask32)-1);
222 static __inline__ __m512h __DEFAULT_FN_ATTRS512
223 _mm512_mask_cvtnehf8_ph(__m512h __W, __mmask32 __U, __m256i __A) {
224 return (__m512h)__builtin_ia32_vcvthf8_2ph512_mask(
225 (__v32qi)__A, (__v32hf)(__m512h)__W, (__mmask32)__U);
228 static __inline__ __m512h __DEFAULT_FN_ATTRS512
229 _mm512_maskz_cvtnehf8_ph(__mmask32 __U, __m256i __A) {
230 return (__m512h)__builtin_ia32_vcvthf8_2ph512_mask(
231 (__v32qi)__A, (__v32hf)(__m512h)_mm512_setzero_ph(), (__mmask32)__U);
234 static __inline__ __m256i __DEFAULT_FN_ATTRS512
235 _mm512_cvtneph_pbf8(__m512h __A) {
236 return (__m256i)__builtin_ia32_vcvtneph2bf8_512_mask(
237 (__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
240 static __inline__ __m256i __DEFAULT_FN_ATTRS512
241 _mm512_mask_cvtneph_pbf8(__m256i __W, __mmask32 __U, __m512h __A) {
242 return (__m256i)__builtin_ia32_vcvtneph2bf8_512_mask(
243 (__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
246 static __inline__ __m256i __DEFAULT_FN_ATTRS512
247 _mm512_maskz_cvtneph_pbf8(__mmask32 __U, __m512h __A) {
248 return (__m256i)__builtin_ia32_vcvtneph2bf8_512_mask(
249 (__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
252 static __inline__ __m256i __DEFAULT_FN_ATTRS512
253 _mm512_cvtnesph_pbf8(__m512h __A) {
254 return (__m256i)__builtin_ia32_vcvtneph2bf8s_512_mask(
255 (__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
258 static __inline__ __m256i __DEFAULT_FN_ATTRS512
259 _mm512_mask_cvtnesph_pbf8(__m256i __W, __mmask32 __U, __m512h __A) {
260 return (__m256i)__builtin_ia32_vcvtneph2bf8s_512_mask(
261 (__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
264 static __inline__ __m256i __DEFAULT_FN_ATTRS512
265 _mm512_maskz_cvtnesph_pbf8(__mmask32 __U, __m512h __A) {
266 return (__m256i)__builtin_ia32_vcvtneph2bf8s_512_mask(
267 (__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
270 static __inline__ __m256i __DEFAULT_FN_ATTRS512
271 _mm512_cvtneph_phf8(__m512h __A) {
272 return (__m256i)__builtin_ia32_vcvtneph2hf8_512_mask(
273 (__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
276 static __inline__ __m256i __DEFAULT_FN_ATTRS512
277 _mm512_mask_cvtneph_phf8(__m256i __W, __mmask32 __U, __m512h __A) {
278 return (__m256i)__builtin_ia32_vcvtneph2hf8_512_mask(
279 (__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
282 static __inline__ __m256i __DEFAULT_FN_ATTRS512
283 _mm512_maskz_cvtneph_phf8(__mmask32 __U, __m512h __A) {
284 return (__m256i)__builtin_ia32_vcvtneph2hf8_512_mask(
285 (__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
288 static __inline__ __m256i __DEFAULT_FN_ATTRS512
289 _mm512_cvtnesph_phf8(__m512h __A) {
290 return (__m256i)__builtin_ia32_vcvtneph2hf8s_512_mask(
291 (__v32hf)__A, (__v32qi)(__m256i)_mm256_undefined_si256(), (__mmask32)-1);
294 static __inline__ __m256i __DEFAULT_FN_ATTRS512
295 _mm512_mask_cvtnesph_phf8(__m256i __W, __mmask32 __U, __m512h __A) {
296 return (__m256i)__builtin_ia32_vcvtneph2hf8s_512_mask(
297 (__v32hf)__A, (__v32qi)(__m256i)__W, (__mmask32)__U);
300 static __inline__ __m256i __DEFAULT_FN_ATTRS512
301 _mm512_maskz_cvtnesph_phf8(__mmask32 __U, __m512h __A) {
302 return (__m256i)__builtin_ia32_vcvtneph2hf8s_512_mask(
303 (__v32hf)__A, (__v32qi)(__m256i)_mm256_setzero_si256(), (__mmask32)__U);
306 static __inline __m512h __DEFAULT_FN_ATTRS512 _mm512_cvtpbf8_ph(__m256i __A) {
307 return _mm512_castsi512_ph(_mm512_slli_epi16(_mm512_cvtepi8_epi16(__A), 8));
310 static __inline __m512h __DEFAULT_FN_ATTRS512
311 _mm512_mask_cvtpbf8_ph(__m512h __S, __mmask32 __U, __m256i __A) {
312 return _mm512_castsi512_ph(
313 _mm512_mask_slli_epi16((__m512i)__S, __U, _mm512_cvtepi8_epi16(__A), 8));
316 static __inline __m512h __DEFAULT_FN_ATTRS512
317 _mm512_maskz_cvtpbf8_ph(__mmask32 __U, __m256i __A) {
318 return _mm512_castsi512_ph(
319 _mm512_slli_epi16(_mm512_maskz_cvtepi8_epi16(__U, __A), 8));
322 #undef __DEFAULT_FN_ATTRS512
324 #endif // __AVX10_2_512CONVERTINTRIN_H
325 #endif // __SSE2__