1 /*===---- wasm_simd128.h - WebAssembly portable SIMD intrinsics ------------===
3 * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 * See https://llvm.org/LICENSE.txt for license information.
5 * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 *===-----------------------------------------------------------------------===
10 #ifndef __WASM_SIMD128_H
11 #define __WASM_SIMD128_H
17 typedef int32_t v128_t
__attribute__((__vector_size__(16), __aligned__(16)));
19 // Internal types determined by clang builtin definitions
20 typedef int32_t __v128_u
__attribute__((__vector_size__(16), __aligned__(1)));
21 typedef signed char __i8x16
22 __attribute__((__vector_size__(16), __aligned__(16)));
23 typedef unsigned char __u8x16
24 __attribute__((__vector_size__(16), __aligned__(16)));
25 typedef short __i16x8
__attribute__((__vector_size__(16), __aligned__(16)));
26 typedef unsigned short __u16x8
27 __attribute__((__vector_size__(16), __aligned__(16)));
28 typedef int __i32x4
__attribute__((__vector_size__(16), __aligned__(16)));
29 typedef unsigned int __u32x4
30 __attribute__((__vector_size__(16), __aligned__(16)));
31 typedef long long __i64x2
__attribute__((__vector_size__(16), __aligned__(16)));
32 typedef unsigned long long __u64x2
33 __attribute__((__vector_size__(16), __aligned__(16)));
34 typedef float __f32x4
__attribute__((__vector_size__(16), __aligned__(16)));
35 typedef double __f64x2
__attribute__((__vector_size__(16), __aligned__(16)));
36 typedef __fp16 __f16x8
__attribute__((__vector_size__(16), __aligned__(16)));
38 typedef signed char __i8x8
__attribute__((__vector_size__(8), __aligned__(8)));
39 typedef unsigned char __u8x8
40 __attribute__((__vector_size__(8), __aligned__(8)));
41 typedef short __i16x4
__attribute__((__vector_size__(8), __aligned__(8)));
42 typedef unsigned short __u16x4
43 __attribute__((__vector_size__(8), __aligned__(8)));
44 typedef int __i32x2
__attribute__((__vector_size__(8), __aligned__(8)));
45 typedef unsigned int __u32x2
46 __attribute__((__vector_size__(8), __aligned__(8)));
47 typedef float __f32x2
__attribute__((__vector_size__(8), __aligned__(8)));
49 #define __DEFAULT_FN_ATTRS \
50 __attribute__((__always_inline__, __nodebug__, __target__("simd128"), \
51 __min_vector_width__(128)))
53 #define __REQUIRE_CONSTANT(c) \
54 __attribute__((__diagnose_if__(!__builtin_constant_p(c), \
55 #c " must be constant", "error")))
57 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load(const void *__mem
) {
58 // UB-free unaligned access copied from xmmintrin.h
59 struct __wasm_v128_load_struct
{
61 } __attribute__((__packed__
, __may_alias__
));
62 return ((const struct __wasm_v128_load_struct
*)__mem
)->__v
;
65 static __inline__ v128_t __DEFAULT_FN_ATTRS
66 wasm_v128_load8_splat(const void *__mem
) {
67 struct __wasm_v128_load8_splat_struct
{
69 } __attribute__((__packed__
, __may_alias__
));
70 uint8_t __v
= ((const struct __wasm_v128_load8_splat_struct
*)__mem
)->__v
;
71 return (v128_t
)(__u8x16
){__v
, __v
, __v
, __v
, __v
, __v
, __v
, __v
,
72 __v
, __v
, __v
, __v
, __v
, __v
, __v
, __v
};
75 static __inline__ v128_t __DEFAULT_FN_ATTRS
76 wasm_v128_load16_splat(const void *__mem
) {
77 struct __wasm_v128_load16_splat_struct
{
79 } __attribute__((__packed__
, __may_alias__
));
80 uint16_t __v
= ((const struct __wasm_v128_load16_splat_struct
*)__mem
)->__v
;
81 return (v128_t
)(__u16x8
){__v
, __v
, __v
, __v
, __v
, __v
, __v
, __v
};
84 static __inline__ v128_t __DEFAULT_FN_ATTRS
85 wasm_v128_load32_splat(const void *__mem
) {
86 struct __wasm_v128_load32_splat_struct
{
88 } __attribute__((__packed__
, __may_alias__
));
89 uint32_t __v
= ((const struct __wasm_v128_load32_splat_struct
*)__mem
)->__v
;
90 return (v128_t
)(__u32x4
){__v
, __v
, __v
, __v
};
93 static __inline__ v128_t __DEFAULT_FN_ATTRS
94 wasm_v128_load64_splat(const void *__mem
) {
95 struct __wasm_v128_load64_splat_struct
{
97 } __attribute__((__packed__
, __may_alias__
));
98 uint64_t __v
= ((const struct __wasm_v128_load64_splat_struct
*)__mem
)->__v
;
99 return (v128_t
)(__u64x2
){__v
, __v
};
102 static __inline__ v128_t __DEFAULT_FN_ATTRS
103 wasm_i16x8_load8x8(const void *__mem
) {
104 struct __wasm_i16x8_load8x8_struct
{
106 } __attribute__((__packed__
, __may_alias__
));
107 __i8x8 __v
= ((const struct __wasm_i16x8_load8x8_struct
*)__mem
)->__v
;
108 return (v128_t
) __builtin_convertvector(__v
, __i16x8
);
111 static __inline__ v128_t __DEFAULT_FN_ATTRS
112 wasm_u16x8_load8x8(const void *__mem
) {
113 struct __wasm_u16x8_load8x8_struct
{
115 } __attribute__((__packed__
, __may_alias__
));
116 __u8x8 __v
= ((const struct __wasm_u16x8_load8x8_struct
*)__mem
)->__v
;
117 return (v128_t
) __builtin_convertvector(__v
, __u16x8
);
120 static __inline__ v128_t __DEFAULT_FN_ATTRS
121 wasm_i32x4_load16x4(const void *__mem
) {
122 struct __wasm_i32x4_load16x4_struct
{
124 } __attribute__((__packed__
, __may_alias__
));
125 __i16x4 __v
= ((const struct __wasm_i32x4_load16x4_struct
*)__mem
)->__v
;
126 return (v128_t
) __builtin_convertvector(__v
, __i32x4
);
129 static __inline__ v128_t __DEFAULT_FN_ATTRS
130 wasm_u32x4_load16x4(const void *__mem
) {
131 struct __wasm_u32x4_load16x4_struct
{
133 } __attribute__((__packed__
, __may_alias__
));
134 __u16x4 __v
= ((const struct __wasm_u32x4_load16x4_struct
*)__mem
)->__v
;
135 return (v128_t
) __builtin_convertvector(__v
, __u32x4
);
138 static __inline__ v128_t __DEFAULT_FN_ATTRS
139 wasm_i64x2_load32x2(const void *__mem
) {
140 struct __wasm_i64x2_load32x2_struct
{
142 } __attribute__((__packed__
, __may_alias__
));
143 __i32x2 __v
= ((const struct __wasm_i64x2_load32x2_struct
*)__mem
)->__v
;
144 return (v128_t
) __builtin_convertvector(__v
, __i64x2
);
147 static __inline__ v128_t __DEFAULT_FN_ATTRS
148 wasm_u64x2_load32x2(const void *__mem
) {
149 struct __wasm_u64x2_load32x2_struct
{
151 } __attribute__((__packed__
, __may_alias__
));
152 __u32x2 __v
= ((const struct __wasm_u64x2_load32x2_struct
*)__mem
)->__v
;
153 return (v128_t
) __builtin_convertvector(__v
, __u64x2
);
156 static __inline__ v128_t __DEFAULT_FN_ATTRS
157 wasm_v128_load32_zero(const void *__mem
) {
158 struct __wasm_v128_load32_zero_struct
{
160 } __attribute__((__packed__
, __may_alias__
));
161 int32_t __v
= ((const struct __wasm_v128_load32_zero_struct
*)__mem
)->__v
;
162 return (v128_t
)(__i32x4
){__v
, 0, 0, 0};
165 static __inline__ v128_t __DEFAULT_FN_ATTRS
166 wasm_v128_load64_zero(const void *__mem
) {
167 struct __wasm_v128_load64_zero_struct
{
169 } __attribute__((__packed__
, __may_alias__
));
170 int64_t __v
= ((const struct __wasm_v128_load64_zero_struct
*)__mem
)->__v
;
171 return (v128_t
)(__i64x2
){__v
, 0};
174 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load8_lane(
175 const void *__mem
, v128_t __vec
, int __i
) __REQUIRE_CONSTANT(__i
) {
176 struct __wasm_v128_load8_lane_struct
{
178 } __attribute__((__packed__
, __may_alias__
));
179 int8_t __v
= ((const struct __wasm_v128_load8_lane_struct
*)__mem
)->__v
;
180 __i8x16 __ret
= (__i8x16
)__vec
;
182 return (v128_t
)__ret
;
185 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load16_lane(
186 const void *__mem
, v128_t __vec
, int __i
) __REQUIRE_CONSTANT(__i
) {
187 struct __wasm_v128_load16_lane_struct
{
189 } __attribute__((__packed__
, __may_alias__
));
190 int16_t __v
= ((const struct __wasm_v128_load16_lane_struct
*)__mem
)->__v
;
191 __i16x8 __ret
= (__i16x8
)__vec
;
193 return (v128_t
)__ret
;
196 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load32_lane(
197 const void *__mem
, v128_t __vec
, int __i
) __REQUIRE_CONSTANT(__i
) {
198 struct __wasm_v128_load32_lane_struct
{
200 } __attribute__((__packed__
, __may_alias__
));
201 int32_t __v
= ((const struct __wasm_v128_load32_lane_struct
*)__mem
)->__v
;
202 __i32x4 __ret
= (__i32x4
)__vec
;
204 return (v128_t
)__ret
;
207 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load64_lane(
208 const void *__mem
, v128_t __vec
, int __i
) __REQUIRE_CONSTANT(__i
) {
209 struct __wasm_v128_load64_lane_struct
{
211 } __attribute__((__packed__
, __may_alias__
));
212 int64_t __v
= ((const struct __wasm_v128_load64_lane_struct
*)__mem
)->__v
;
213 __i64x2 __ret
= (__i64x2
)__vec
;
215 return (v128_t
)__ret
;
218 static __inline__
void __DEFAULT_FN_ATTRS
wasm_v128_store(void *__mem
,
220 // UB-free unaligned access copied from xmmintrin.h
221 struct __wasm_v128_store_struct
{
223 } __attribute__((__packed__
, __may_alias__
));
224 ((struct __wasm_v128_store_struct
*)__mem
)->__v
= __a
;
227 static __inline__
void __DEFAULT_FN_ATTRS
wasm_v128_store8_lane(void *__mem
,
230 __REQUIRE_CONSTANT(__i
) {
231 struct __wasm_v128_store8_lane_struct
{
233 } __attribute__((__packed__
, __may_alias__
));
234 ((struct __wasm_v128_store8_lane_struct
*)__mem
)->__v
= ((__i8x16
)__vec
)[__i
];
237 static __inline__
void __DEFAULT_FN_ATTRS
wasm_v128_store16_lane(void *__mem
,
240 __REQUIRE_CONSTANT(__i
) {
241 struct __wasm_v128_store16_lane_struct
{
243 } __attribute__((__packed__
, __may_alias__
));
244 ((struct __wasm_v128_store16_lane_struct
*)__mem
)->__v
=
245 ((__i16x8
)__vec
)[__i
];
248 static __inline__
void __DEFAULT_FN_ATTRS
wasm_v128_store32_lane(void *__mem
,
251 __REQUIRE_CONSTANT(__i
) {
252 struct __wasm_v128_store32_lane_struct
{
254 } __attribute__((__packed__
, __may_alias__
));
255 ((struct __wasm_v128_store32_lane_struct
*)__mem
)->__v
=
256 ((__i32x4
)__vec
)[__i
];
259 static __inline__
void __DEFAULT_FN_ATTRS
wasm_v128_store64_lane(void *__mem
,
262 __REQUIRE_CONSTANT(__i
) {
263 struct __wasm_v128_store64_lane_struct
{
265 } __attribute__((__packed__
, __may_alias__
));
266 ((struct __wasm_v128_store64_lane_struct
*)__mem
)->__v
=
267 ((__i64x2
)__vec
)[__i
];
270 static __inline__ v128_t __DEFAULT_FN_ATTRS
271 wasm_i8x16_make(int8_t __c0
, int8_t __c1
, int8_t __c2
, int8_t __c3
, int8_t __c4
,
272 int8_t __c5
, int8_t __c6
, int8_t __c7
, int8_t __c8
, int8_t __c9
,
273 int8_t __c10
, int8_t __c11
, int8_t __c12
, int8_t __c13
,
274 int8_t __c14
, int8_t __c15
) {
275 return (v128_t
)(__i8x16
){__c0
, __c1
, __c2
, __c3
, __c4
, __c5
,
276 __c6
, __c7
, __c8
, __c9
, __c10
, __c11
,
277 __c12
, __c13
, __c14
, __c15
};
280 static __inline__ v128_t __DEFAULT_FN_ATTRS
281 wasm_u8x16_make(uint8_t __c0
, uint8_t __c1
, uint8_t __c2
, uint8_t __c3
,
282 uint8_t __c4
, uint8_t __c5
, uint8_t __c6
, uint8_t __c7
,
283 uint8_t __c8
, uint8_t __c9
, uint8_t __c10
, uint8_t __c11
,
284 uint8_t __c12
, uint8_t __c13
, uint8_t __c14
, uint8_t __c15
) {
285 return (v128_t
)(__u8x16
){__c0
, __c1
, __c2
, __c3
, __c4
, __c5
,
286 __c6
, __c7
, __c8
, __c9
, __c10
, __c11
,
287 __c12
, __c13
, __c14
, __c15
};
290 static __inline__ v128_t __DEFAULT_FN_ATTRS
291 wasm_i16x8_make(int16_t __c0
, int16_t __c1
, int16_t __c2
, int16_t __c3
,
292 int16_t __c4
, int16_t __c5
, int16_t __c6
, int16_t __c7
) {
293 return (v128_t
)(__i16x8
){__c0
, __c1
, __c2
, __c3
, __c4
, __c5
, __c6
, __c7
};
296 static __inline__ v128_t __DEFAULT_FN_ATTRS
297 wasm_u16x8_make(uint16_t __c0
, uint16_t __c1
, uint16_t __c2
, uint16_t __c3
,
298 uint16_t __c4
, uint16_t __c5
, uint16_t __c6
, uint16_t __c7
) {
299 return (v128_t
)(__u16x8
){__c0
, __c1
, __c2
, __c3
, __c4
, __c5
, __c6
, __c7
};
302 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_make(int32_t __c0
,
306 return (v128_t
)(__i32x4
){__c0
, __c1
, __c2
, __c3
};
309 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_make(uint32_t __c0
,
313 return (v128_t
)(__u32x4
){__c0
, __c1
, __c2
, __c3
};
316 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_make(int64_t __c0
,
318 return (v128_t
)(__i64x2
){__c0
, __c1
};
321 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_make(uint64_t __c0
,
323 return (v128_t
)(__u64x2
){__c0
, __c1
};
326 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_make(float __c0
,
330 return (v128_t
)(__f32x4
){__c0
, __c1
, __c2
, __c3
};
333 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_make(double __c0
,
335 return (v128_t
)(__f64x2
){__c0
, __c1
};
338 static __inline__ v128_t __DEFAULT_FN_ATTRS
339 wasm_i8x16_const(int8_t __c0
, int8_t __c1
, int8_t __c2
, int8_t __c3
,
340 int8_t __c4
, int8_t __c5
, int8_t __c6
, int8_t __c7
,
341 int8_t __c8
, int8_t __c9
, int8_t __c10
, int8_t __c11
,
342 int8_t __c12
, int8_t __c13
, int8_t __c14
, int8_t __c15
)
343 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) __REQUIRE_CONSTANT(__c2
)
344 __REQUIRE_CONSTANT(__c3
) __REQUIRE_CONSTANT(__c4
)
345 __REQUIRE_CONSTANT(__c5
) __REQUIRE_CONSTANT(__c6
)
346 __REQUIRE_CONSTANT(__c7
) __REQUIRE_CONSTANT(__c8
)
347 __REQUIRE_CONSTANT(__c9
) __REQUIRE_CONSTANT(__c10
)
348 __REQUIRE_CONSTANT(__c11
) __REQUIRE_CONSTANT(__c12
)
349 __REQUIRE_CONSTANT(__c13
) __REQUIRE_CONSTANT(__c14
)
350 __REQUIRE_CONSTANT(__c15
) {
351 return (v128_t
)(__i8x16
){__c0
, __c1
, __c2
, __c3
, __c4
, __c5
,
352 __c6
, __c7
, __c8
, __c9
, __c10
, __c11
,
353 __c12
, __c13
, __c14
, __c15
};
356 static __inline__ v128_t __DEFAULT_FN_ATTRS
357 wasm_u8x16_const(uint8_t __c0
, uint8_t __c1
, uint8_t __c2
, uint8_t __c3
,
358 uint8_t __c4
, uint8_t __c5
, uint8_t __c6
, uint8_t __c7
,
359 uint8_t __c8
, uint8_t __c9
, uint8_t __c10
, uint8_t __c11
,
360 uint8_t __c12
, uint8_t __c13
, uint8_t __c14
, uint8_t __c15
)
361 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) __REQUIRE_CONSTANT(__c2
)
362 __REQUIRE_CONSTANT(__c3
) __REQUIRE_CONSTANT(__c4
)
363 __REQUIRE_CONSTANT(__c5
) __REQUIRE_CONSTANT(__c6
)
364 __REQUIRE_CONSTANT(__c7
) __REQUIRE_CONSTANT(__c8
)
365 __REQUIRE_CONSTANT(__c9
) __REQUIRE_CONSTANT(__c10
)
366 __REQUIRE_CONSTANT(__c11
) __REQUIRE_CONSTANT(__c12
)
367 __REQUIRE_CONSTANT(__c13
) __REQUIRE_CONSTANT(__c14
)
368 __REQUIRE_CONSTANT(__c15
) {
369 return (v128_t
)(__u8x16
){__c0
, __c1
, __c2
, __c3
, __c4
, __c5
,
370 __c6
, __c7
, __c8
, __c9
, __c10
, __c11
,
371 __c12
, __c13
, __c14
, __c15
};
374 static __inline__ v128_t __DEFAULT_FN_ATTRS
375 wasm_i16x8_const(int16_t __c0
, int16_t __c1
, int16_t __c2
, int16_t __c3
,
376 int16_t __c4
, int16_t __c5
, int16_t __c6
, int16_t __c7
)
377 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) __REQUIRE_CONSTANT(__c2
)
378 __REQUIRE_CONSTANT(__c3
) __REQUIRE_CONSTANT(__c4
)
379 __REQUIRE_CONSTANT(__c5
) __REQUIRE_CONSTANT(__c6
)
380 __REQUIRE_CONSTANT(__c7
) {
381 return (v128_t
)(__i16x8
){__c0
, __c1
, __c2
, __c3
, __c4
, __c5
, __c6
, __c7
};
384 static __inline__ v128_t __DEFAULT_FN_ATTRS
385 wasm_u16x8_const(uint16_t __c0
, uint16_t __c1
, uint16_t __c2
, uint16_t __c3
,
386 uint16_t __c4
, uint16_t __c5
, uint16_t __c6
, uint16_t __c7
)
387 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) __REQUIRE_CONSTANT(__c2
)
388 __REQUIRE_CONSTANT(__c3
) __REQUIRE_CONSTANT(__c4
)
389 __REQUIRE_CONSTANT(__c5
) __REQUIRE_CONSTANT(__c6
)
390 __REQUIRE_CONSTANT(__c7
) {
391 return (v128_t
)(__u16x8
){__c0
, __c1
, __c2
, __c3
, __c4
, __c5
, __c6
, __c7
};
394 static __inline__ v128_t __DEFAULT_FN_ATTRS
395 wasm_i32x4_const(int32_t __c0
, int32_t __c1
, int32_t __c2
, int32_t __c3
)
396 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) __REQUIRE_CONSTANT(__c2
)
397 __REQUIRE_CONSTANT(__c3
) {
398 return (v128_t
)(__i32x4
){__c0
, __c1
, __c2
, __c3
};
401 static __inline__ v128_t __DEFAULT_FN_ATTRS
402 wasm_u32x4_const(uint32_t __c0
, uint32_t __c1
, uint32_t __c2
, uint32_t __c3
)
403 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) __REQUIRE_CONSTANT(__c2
)
404 __REQUIRE_CONSTANT(__c3
) {
405 return (v128_t
)(__u32x4
){__c0
, __c1
, __c2
, __c3
};
408 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_const(int64_t __c0
,
410 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) {
411 return (v128_t
)(__i64x2
){__c0
, __c1
};
414 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_const(uint64_t __c0
,
416 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) {
417 return (v128_t
)(__u64x2
){__c0
, __c1
};
420 static __inline__ v128_t __DEFAULT_FN_ATTRS
421 wasm_f32x4_const(float __c0
, float __c1
, float __c2
, float __c3
)
422 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) __REQUIRE_CONSTANT(__c2
)
423 __REQUIRE_CONSTANT(__c3
) {
424 return (v128_t
)(__f32x4
){__c0
, __c1
, __c2
, __c3
};
427 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_const(double __c0
,
429 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) {
430 return (v128_t
)(__f64x2
){__c0
, __c1
};
433 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_const_splat(int8_t __c
)
434 __REQUIRE_CONSTANT(__c
) {
435 return (v128_t
)(__i8x16
){__c
, __c
, __c
, __c
, __c
, __c
, __c
, __c
,
436 __c
, __c
, __c
, __c
, __c
, __c
, __c
, __c
};
439 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_const_splat(uint8_t __c
)
440 __REQUIRE_CONSTANT(__c
) {
441 return (v128_t
)(__u8x16
){__c
, __c
, __c
, __c
, __c
, __c
, __c
, __c
,
442 __c
, __c
, __c
, __c
, __c
, __c
, __c
, __c
};
445 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_const_splat(int16_t __c
)
446 __REQUIRE_CONSTANT(__c
) {
447 return (v128_t
)(__i16x8
){__c
, __c
, __c
, __c
, __c
, __c
, __c
, __c
};
450 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_const_splat(uint16_t __c
)
451 __REQUIRE_CONSTANT(__c
) {
452 return (v128_t
)(__u16x8
){__c
, __c
, __c
, __c
, __c
, __c
, __c
, __c
};
455 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_const_splat(int32_t __c
)
456 __REQUIRE_CONSTANT(__c
) {
457 return (v128_t
)(__i32x4
){__c
, __c
, __c
, __c
};
460 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_const_splat(uint32_t __c
)
461 __REQUIRE_CONSTANT(__c
) {
462 return (v128_t
)(__u32x4
){__c
, __c
, __c
, __c
};
465 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_const_splat(int64_t __c
)
466 __REQUIRE_CONSTANT(__c
) {
467 return (v128_t
)(__i64x2
){__c
, __c
};
470 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_const_splat(uint64_t __c
)
471 __REQUIRE_CONSTANT(__c
) {
472 return (v128_t
)(__u64x2
){__c
, __c
};
475 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_const_splat(float __c
)
476 __REQUIRE_CONSTANT(__c
) {
477 return (v128_t
)(__f32x4
){__c
, __c
, __c
, __c
};
480 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_const_splat(double __c
)
481 __REQUIRE_CONSTANT(__c
) {
482 return (v128_t
)(__f64x2
){__c
, __c
};
485 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_splat(int8_t __a
) {
486 return (v128_t
)(__i8x16
){__a
, __a
, __a
, __a
, __a
, __a
, __a
, __a
,
487 __a
, __a
, __a
, __a
, __a
, __a
, __a
, __a
};
490 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_splat(uint8_t __a
) {
491 return (v128_t
)(__u8x16
){__a
, __a
, __a
, __a
, __a
, __a
, __a
, __a
,
492 __a
, __a
, __a
, __a
, __a
, __a
, __a
, __a
};
495 static __inline__
int8_t __DEFAULT_FN_ATTRS
wasm_i8x16_extract_lane(v128_t __a
,
497 __REQUIRE_CONSTANT(__i
) {
498 return ((__i8x16
)__a
)[__i
];
501 static __inline__
uint8_t __DEFAULT_FN_ATTRS
wasm_u8x16_extract_lane(v128_t __a
,
503 __REQUIRE_CONSTANT(__i
) {
504 return ((__u8x16
)__a
)[__i
];
507 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_replace_lane(v128_t __a
,
510 __REQUIRE_CONSTANT(__i
) {
511 __i8x16 __v
= (__i8x16
)__a
;
516 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_replace_lane(v128_t __a
,
519 __REQUIRE_CONSTANT(__i
) {
520 __u8x16 __v
= (__u8x16
)__a
;
525 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_splat(int16_t __a
) {
526 return (v128_t
)(__i16x8
){__a
, __a
, __a
, __a
, __a
, __a
, __a
, __a
};
529 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_splat(uint16_t __a
) {
530 return (v128_t
)(__u16x8
){__a
, __a
, __a
, __a
, __a
, __a
, __a
, __a
};
533 static __inline__
int16_t __DEFAULT_FN_ATTRS
wasm_i16x8_extract_lane(v128_t __a
,
535 __REQUIRE_CONSTANT(__i
) {
536 return ((__i16x8
)__a
)[__i
];
539 static __inline__
uint16_t __DEFAULT_FN_ATTRS
540 wasm_u16x8_extract_lane(v128_t __a
, int __i
) __REQUIRE_CONSTANT(__i
) {
541 return ((__u16x8
)__a
)[__i
];
544 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_replace_lane(v128_t __a
,
547 __REQUIRE_CONSTANT(__i
) {
548 __i16x8 __v
= (__i16x8
)__a
;
553 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_replace_lane(
554 v128_t __a
, int __i
, uint16_t __b
) __REQUIRE_CONSTANT(__i
) {
555 __u16x8 __v
= (__u16x8
)__a
;
560 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_splat(int32_t __a
) {
561 return (v128_t
)(__i32x4
){__a
, __a
, __a
, __a
};
564 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_splat(uint32_t __a
) {
565 return (v128_t
)(__u32x4
){__a
, __a
, __a
, __a
};
568 static __inline__
int32_t __DEFAULT_FN_ATTRS
wasm_i32x4_extract_lane(v128_t __a
,
570 __REQUIRE_CONSTANT(__i
) {
571 return ((__i32x4
)__a
)[__i
];
574 static __inline__
uint32_t __DEFAULT_FN_ATTRS
575 wasm_u32x4_extract_lane(v128_t __a
, int __i
) __REQUIRE_CONSTANT(__i
) {
576 return ((__u32x4
)__a
)[__i
];
579 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_replace_lane(v128_t __a
,
582 __REQUIRE_CONSTANT(__i
) {
583 __i32x4 __v
= (__i32x4
)__a
;
588 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_replace_lane(
589 v128_t __a
, int __i
, uint32_t __b
) __REQUIRE_CONSTANT(__i
) {
590 __u32x4 __v
= (__u32x4
)__a
;
595 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_splat(int64_t __a
) {
596 return (v128_t
)(__i64x2
){__a
, __a
};
599 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_splat(uint64_t __a
) {
600 return (v128_t
)(__u64x2
){__a
, __a
};
603 static __inline__
int64_t __DEFAULT_FN_ATTRS
wasm_i64x2_extract_lane(v128_t __a
,
605 __REQUIRE_CONSTANT(__i
) {
606 return ((__i64x2
)__a
)[__i
];
609 static __inline__
uint64_t __DEFAULT_FN_ATTRS
610 wasm_u64x2_extract_lane(v128_t __a
, int __i
) __REQUIRE_CONSTANT(__i
) {
611 return ((__u64x2
)__a
)[__i
];
614 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_replace_lane(v128_t __a
,
617 __REQUIRE_CONSTANT(__i
) {
618 __i64x2 __v
= (__i64x2
)__a
;
623 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_replace_lane(
624 v128_t __a
, int __i
, uint64_t __b
) __REQUIRE_CONSTANT(__i
) {
625 __u64x2 __v
= (__u64x2
)__a
;
630 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_splat(float __a
) {
631 return (v128_t
)(__f32x4
){__a
, __a
, __a
, __a
};
634 static __inline__
float __DEFAULT_FN_ATTRS
wasm_f32x4_extract_lane(v128_t __a
,
636 __REQUIRE_CONSTANT(__i
) {
637 return ((__f32x4
)__a
)[__i
];
640 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_replace_lane(v128_t __a
,
643 __REQUIRE_CONSTANT(__i
) {
644 __f32x4 __v
= (__f32x4
)__a
;
649 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_splat(double __a
) {
650 return (v128_t
)(__f64x2
){__a
, __a
};
653 static __inline__
double __DEFAULT_FN_ATTRS
wasm_f64x2_extract_lane(v128_t __a
,
655 __REQUIRE_CONSTANT(__i
) {
656 return ((__f64x2
)__a
)[__i
];
659 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_replace_lane(v128_t __a
,
662 __REQUIRE_CONSTANT(__i
) {
663 __f64x2 __v
= (__f64x2
)__a
;
668 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_eq(v128_t __a
,
670 return (v128_t
)((__i8x16
)__a
== (__i8x16
)__b
);
673 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_ne(v128_t __a
,
675 return (v128_t
)((__i8x16
)__a
!= (__i8x16
)__b
);
678 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_lt(v128_t __a
,
680 return (v128_t
)((__i8x16
)__a
< (__i8x16
)__b
);
683 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_lt(v128_t __a
,
685 return (v128_t
)((__u8x16
)__a
< (__u8x16
)__b
);
688 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_gt(v128_t __a
,
690 return (v128_t
)((__i8x16
)__a
> (__i8x16
)__b
);
693 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_gt(v128_t __a
,
695 return (v128_t
)((__u8x16
)__a
> (__u8x16
)__b
);
698 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_le(v128_t __a
,
700 return (v128_t
)((__i8x16
)__a
<= (__i8x16
)__b
);
703 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_le(v128_t __a
,
705 return (v128_t
)((__u8x16
)__a
<= (__u8x16
)__b
);
708 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_ge(v128_t __a
,
710 return (v128_t
)((__i8x16
)__a
>= (__i8x16
)__b
);
713 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_ge(v128_t __a
,
715 return (v128_t
)((__u8x16
)__a
>= (__u8x16
)__b
);
718 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_eq(v128_t __a
,
720 return (v128_t
)((__i16x8
)__a
== (__i16x8
)__b
);
723 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_ne(v128_t __a
,
725 return (v128_t
)((__u16x8
)__a
!= (__u16x8
)__b
);
728 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_lt(v128_t __a
,
730 return (v128_t
)((__i16x8
)__a
< (__i16x8
)__b
);
733 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_lt(v128_t __a
,
735 return (v128_t
)((__u16x8
)__a
< (__u16x8
)__b
);
738 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_gt(v128_t __a
,
740 return (v128_t
)((__i16x8
)__a
> (__i16x8
)__b
);
743 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_gt(v128_t __a
,
745 return (v128_t
)((__u16x8
)__a
> (__u16x8
)__b
);
748 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_le(v128_t __a
,
750 return (v128_t
)((__i16x8
)__a
<= (__i16x8
)__b
);
753 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_le(v128_t __a
,
755 return (v128_t
)((__u16x8
)__a
<= (__u16x8
)__b
);
758 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_ge(v128_t __a
,
760 return (v128_t
)((__i16x8
)__a
>= (__i16x8
)__b
);
763 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_ge(v128_t __a
,
765 return (v128_t
)((__u16x8
)__a
>= (__u16x8
)__b
);
768 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_eq(v128_t __a
,
770 return (v128_t
)((__i32x4
)__a
== (__i32x4
)__b
);
773 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_ne(v128_t __a
,
775 return (v128_t
)((__i32x4
)__a
!= (__i32x4
)__b
);
778 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_lt(v128_t __a
,
780 return (v128_t
)((__i32x4
)__a
< (__i32x4
)__b
);
783 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_lt(v128_t __a
,
785 return (v128_t
)((__u32x4
)__a
< (__u32x4
)__b
);
788 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_gt(v128_t __a
,
790 return (v128_t
)((__i32x4
)__a
> (__i32x4
)__b
);
793 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_gt(v128_t __a
,
795 return (v128_t
)((__u32x4
)__a
> (__u32x4
)__b
);
798 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_le(v128_t __a
,
800 return (v128_t
)((__i32x4
)__a
<= (__i32x4
)__b
);
803 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_le(v128_t __a
,
805 return (v128_t
)((__u32x4
)__a
<= (__u32x4
)__b
);
808 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_ge(v128_t __a
,
810 return (v128_t
)((__i32x4
)__a
>= (__i32x4
)__b
);
813 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_ge(v128_t __a
,
815 return (v128_t
)((__u32x4
)__a
>= (__u32x4
)__b
);
818 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_eq(v128_t __a
,
820 return (v128_t
)((__i64x2
)__a
== (__i64x2
)__b
);
823 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_ne(v128_t __a
,
825 return (v128_t
)((__i64x2
)__a
!= (__i64x2
)__b
);
828 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_lt(v128_t __a
,
830 return (v128_t
)((__i64x2
)__a
< (__i64x2
)__b
);
833 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_gt(v128_t __a
,
835 return (v128_t
)((__i64x2
)__a
> (__i64x2
)__b
);
838 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_le(v128_t __a
,
840 return (v128_t
)((__i64x2
)__a
<= (__i64x2
)__b
);
843 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_ge(v128_t __a
,
845 return (v128_t
)((__i64x2
)__a
>= (__i64x2
)__b
);
848 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_eq(v128_t __a
,
850 return (v128_t
)((__f32x4
)__a
== (__f32x4
)__b
);
853 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_ne(v128_t __a
,
855 return (v128_t
)((__f32x4
)__a
!= (__f32x4
)__b
);
858 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_lt(v128_t __a
,
860 return (v128_t
)((__f32x4
)__a
< (__f32x4
)__b
);
863 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_gt(v128_t __a
,
865 return (v128_t
)((__f32x4
)__a
> (__f32x4
)__b
);
868 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_le(v128_t __a
,
870 return (v128_t
)((__f32x4
)__a
<= (__f32x4
)__b
);
873 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_ge(v128_t __a
,
875 return (v128_t
)((__f32x4
)__a
>= (__f32x4
)__b
);
878 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_eq(v128_t __a
,
880 return (v128_t
)((__f64x2
)__a
== (__f64x2
)__b
);
883 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_ne(v128_t __a
,
885 return (v128_t
)((__f64x2
)__a
!= (__f64x2
)__b
);
888 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_lt(v128_t __a
,
890 return (v128_t
)((__f64x2
)__a
< (__f64x2
)__b
);
893 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_gt(v128_t __a
,
895 return (v128_t
)((__f64x2
)__a
> (__f64x2
)__b
);
898 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_le(v128_t __a
,
900 return (v128_t
)((__f64x2
)__a
<= (__f64x2
)__b
);
903 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_ge(v128_t __a
,
905 return (v128_t
)((__f64x2
)__a
>= (__f64x2
)__b
);
908 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_not(v128_t __a
) {
912 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_and(v128_t __a
,
917 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_or(v128_t __a
,
922 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_xor(v128_t __a
,
927 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_andnot(v128_t __a
,
932 static __inline__
bool __DEFAULT_FN_ATTRS
wasm_v128_any_true(v128_t __a
) {
933 return __builtin_wasm_any_true_v128((__i8x16
)__a
);
936 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_bitselect(v128_t __a
,
939 return (v128_t
)__builtin_wasm_bitselect((__i32x4
)__a
, (__i32x4
)__b
,
943 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_abs(v128_t __a
) {
944 return (v128_t
)__builtin_wasm_abs_i8x16((__i8x16
)__a
);
947 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_neg(v128_t __a
) {
948 return (v128_t
)(-(__u8x16
)__a
);
951 static __inline__
bool __DEFAULT_FN_ATTRS
wasm_i8x16_all_true(v128_t __a
) {
952 return __builtin_wasm_all_true_i8x16((__i8x16
)__a
);
955 static __inline__
uint32_t __DEFAULT_FN_ATTRS
wasm_i8x16_bitmask(v128_t __a
) {
956 return __builtin_wasm_bitmask_i8x16((__i8x16
)__a
);
959 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_popcnt(v128_t __a
) {
960 return (v128_t
)__builtin_elementwise_popcount((__i8x16
)__a
);
963 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_shl(v128_t __a
,
965 return (v128_t
)((__i8x16
)__a
<< (__b
& 0x7));
968 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_shr(v128_t __a
,
970 return (v128_t
)((__i8x16
)__a
>> (__b
& 0x7));
973 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_shr(v128_t __a
,
975 return (v128_t
)((__u8x16
)__a
>> (__b
& 0x7));
978 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_add(v128_t __a
,
980 return (v128_t
)((__u8x16
)__a
+ (__u8x16
)__b
);
983 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_add_sat(v128_t __a
,
985 return (v128_t
)__builtin_elementwise_add_sat((__i8x16
)__a
, (__i8x16
)__b
);
988 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_add_sat(v128_t __a
,
990 return (v128_t
)__builtin_elementwise_add_sat((__u8x16
)__a
, (__u8x16
)__b
);
993 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_sub(v128_t __a
,
995 return (v128_t
)((__u8x16
)__a
- (__u8x16
)__b
);
998 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_sub_sat(v128_t __a
,
1000 return (v128_t
)__builtin_elementwise_sub_sat((__i8x16
)__a
, (__i8x16
)__b
);
1003 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_sub_sat(v128_t __a
,
1005 return (v128_t
)__builtin_elementwise_sub_sat((__u8x16
)__a
, (__u8x16
)__b
);
1008 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_min(v128_t __a
,
1010 return (v128_t
)__builtin_elementwise_min((__i8x16
)__a
, (__i8x16
)__b
);
1013 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_min(v128_t __a
,
1015 return (v128_t
)__builtin_elementwise_min((__u8x16
)__a
, (__u8x16
)__b
);
1018 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_max(v128_t __a
,
1020 return (v128_t
)__builtin_elementwise_max((__i8x16
)__a
, (__i8x16
)__b
);
1023 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_max(v128_t __a
,
1025 return (v128_t
)__builtin_elementwise_max((__u8x16
)__a
, (__u8x16
)__b
);
1028 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_avgr(v128_t __a
,
1030 return (v128_t
)__builtin_wasm_avgr_u_i8x16((__u8x16
)__a
, (__u8x16
)__b
);
1033 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_abs(v128_t __a
) {
1034 return (v128_t
)__builtin_wasm_abs_i16x8((__i16x8
)__a
);
1037 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_neg(v128_t __a
) {
1038 return (v128_t
)(-(__u16x8
)__a
);
1041 static __inline__
bool __DEFAULT_FN_ATTRS
wasm_i16x8_all_true(v128_t __a
) {
1042 return __builtin_wasm_all_true_i16x8((__i16x8
)__a
);
1045 static __inline__
uint32_t __DEFAULT_FN_ATTRS
wasm_i16x8_bitmask(v128_t __a
) {
1046 return __builtin_wasm_bitmask_i16x8((__i16x8
)__a
);
1049 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_shl(v128_t __a
,
1051 return (v128_t
)((__i16x8
)__a
<< (__b
& 0xF));
1054 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_shr(v128_t __a
,
1056 return (v128_t
)((__i16x8
)__a
>> (__b
& 0xF));
1059 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_shr(v128_t __a
,
1061 return (v128_t
)((__u16x8
)__a
>> (__b
& 0xF));
1064 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_add(v128_t __a
,
1066 return (v128_t
)((__u16x8
)__a
+ (__u16x8
)__b
);
1069 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_add_sat(v128_t __a
,
1071 return (v128_t
)__builtin_elementwise_add_sat((__i16x8
)__a
, (__i16x8
)__b
);
1074 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_add_sat(v128_t __a
,
1076 return (v128_t
)__builtin_elementwise_add_sat((__u16x8
)__a
, (__u16x8
)__b
);
1079 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_sub(v128_t __a
,
1081 return (v128_t
)((__i16x8
)__a
- (__i16x8
)__b
);
1084 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_sub_sat(v128_t __a
,
1086 return (v128_t
)__builtin_elementwise_sub_sat((__i16x8
)__a
, (__i16x8
)__b
);
1089 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_sub_sat(v128_t __a
,
1091 return (v128_t
)__builtin_elementwise_sub_sat((__u16x8
)__a
, (__u16x8
)__b
);
1094 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_mul(v128_t __a
,
1096 return (v128_t
)((__u16x8
)__a
* (__u16x8
)__b
);
1099 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_min(v128_t __a
,
1101 return (v128_t
)__builtin_elementwise_min((__i16x8
)__a
, (__i16x8
)__b
);
1104 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_min(v128_t __a
,
1106 return (v128_t
)__builtin_elementwise_min((__u16x8
)__a
, (__u16x8
)__b
);
1109 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_max(v128_t __a
,
1111 return (v128_t
)__builtin_elementwise_max((__i16x8
)__a
, (__i16x8
)__b
);
1114 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_max(v128_t __a
,
1116 return (v128_t
)__builtin_elementwise_max((__u16x8
)__a
, (__u16x8
)__b
);
1119 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_avgr(v128_t __a
,
1121 return (v128_t
)__builtin_wasm_avgr_u_i16x8((__u16x8
)__a
, (__u16x8
)__b
);
1124 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_abs(v128_t __a
) {
1125 return (v128_t
)__builtin_wasm_abs_i32x4((__i32x4
)__a
);
1128 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_neg(v128_t __a
) {
1129 return (v128_t
)(-(__u32x4
)__a
);
1132 static __inline__
bool __DEFAULT_FN_ATTRS
wasm_i32x4_all_true(v128_t __a
) {
1133 return __builtin_wasm_all_true_i32x4((__i32x4
)__a
);
1136 static __inline__
uint32_t __DEFAULT_FN_ATTRS
wasm_i32x4_bitmask(v128_t __a
) {
1137 return __builtin_wasm_bitmask_i32x4((__i32x4
)__a
);
1140 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_shl(v128_t __a
,
1142 return (v128_t
)((__i32x4
)__a
<< (__b
& 0x1F));
1145 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_shr(v128_t __a
,
1147 return (v128_t
)((__i32x4
)__a
>> (__b
& 0x1F));
1150 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_shr(v128_t __a
,
1152 return (v128_t
)((__u32x4
)__a
>> (__b
& 0x1F));
1155 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_add(v128_t __a
,
1157 return (v128_t
)((__u32x4
)__a
+ (__u32x4
)__b
);
1160 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_sub(v128_t __a
,
1162 return (v128_t
)((__u32x4
)__a
- (__u32x4
)__b
);
1165 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_mul(v128_t __a
,
1167 return (v128_t
)((__u32x4
)__a
* (__u32x4
)__b
);
1170 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_min(v128_t __a
,
1172 return (v128_t
)__builtin_elementwise_min((__i32x4
)__a
, (__i32x4
)__b
);
1175 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_min(v128_t __a
,
1177 return (v128_t
)__builtin_elementwise_min((__u32x4
)__a
, (__u32x4
)__b
);
1180 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_max(v128_t __a
,
1182 return (v128_t
)__builtin_elementwise_max((__i32x4
)__a
, (__i32x4
)__b
);
1185 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_max(v128_t __a
,
1187 return (v128_t
)__builtin_elementwise_max((__u32x4
)__a
, (__u32x4
)__b
);
1190 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_dot_i16x8(v128_t __a
,
1192 return (v128_t
)__builtin_wasm_dot_s_i32x4_i16x8((__i16x8
)__a
, (__i16x8
)__b
);
1195 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_abs(v128_t __a
) {
1196 return (v128_t
)__builtin_wasm_abs_i64x2((__i64x2
)__a
);
1199 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_neg(v128_t __a
) {
1200 return (v128_t
)(-(__u64x2
)__a
);
1203 static __inline__
bool __DEFAULT_FN_ATTRS
wasm_i64x2_all_true(v128_t __a
) {
1204 return __builtin_wasm_all_true_i64x2((__i64x2
)__a
);
1207 static __inline__
uint32_t __DEFAULT_FN_ATTRS
wasm_i64x2_bitmask(v128_t __a
) {
1208 return __builtin_wasm_bitmask_i64x2((__i64x2
)__a
);
1211 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_shl(v128_t __a
,
1213 return (v128_t
)((__i64x2
)__a
<< ((int64_t)__b
& 0x3F));
1216 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_shr(v128_t __a
,
1218 return (v128_t
)((__i64x2
)__a
>> ((int64_t)__b
& 0x3F));
1221 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_shr(v128_t __a
,
1223 return (v128_t
)((__u64x2
)__a
>> ((int64_t)__b
& 0x3F));
1226 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_add(v128_t __a
,
1228 return (v128_t
)((__u64x2
)__a
+ (__u64x2
)__b
);
1231 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_sub(v128_t __a
,
1233 return (v128_t
)((__u64x2
)__a
- (__u64x2
)__b
);
1236 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_mul(v128_t __a
,
1238 return (v128_t
)((__u64x2
)__a
* (__u64x2
)__b
);
1241 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_abs(v128_t __a
) {
1242 return (v128_t
)__builtin_wasm_abs_f32x4((__f32x4
)__a
);
1245 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_neg(v128_t __a
) {
1246 return (v128_t
)(-(__f32x4
)__a
);
1249 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_sqrt(v128_t __a
) {
1250 return (v128_t
)__builtin_wasm_sqrt_f32x4((__f32x4
)__a
);
1253 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_ceil(v128_t __a
) {
1254 return (v128_t
)__builtin_wasm_ceil_f32x4((__f32x4
)__a
);
1257 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_floor(v128_t __a
) {
1258 return (v128_t
)__builtin_wasm_floor_f32x4((__f32x4
)__a
);
1261 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_trunc(v128_t __a
) {
1262 return (v128_t
)__builtin_wasm_trunc_f32x4((__f32x4
)__a
);
1265 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_nearest(v128_t __a
) {
1266 return (v128_t
)__builtin_wasm_nearest_f32x4((__f32x4
)__a
);
1269 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_add(v128_t __a
,
1271 return (v128_t
)((__f32x4
)__a
+ (__f32x4
)__b
);
1274 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_sub(v128_t __a
,
1276 return (v128_t
)((__f32x4
)__a
- (__f32x4
)__b
);
1279 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_mul(v128_t __a
,
1281 return (v128_t
)((__f32x4
)__a
* (__f32x4
)__b
);
1284 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_div(v128_t __a
,
1286 return (v128_t
)((__f32x4
)__a
/ (__f32x4
)__b
);
1289 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_min(v128_t __a
,
1291 return (v128_t
)__builtin_wasm_min_f32x4((__f32x4
)__a
, (__f32x4
)__b
);
1294 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_max(v128_t __a
,
1296 return (v128_t
)__builtin_wasm_max_f32x4((__f32x4
)__a
, (__f32x4
)__b
);
1299 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_pmin(v128_t __a
,
1301 return (v128_t
)__builtin_wasm_pmin_f32x4((__f32x4
)__a
, (__f32x4
)__b
);
1304 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_pmax(v128_t __a
,
1306 return (v128_t
)__builtin_wasm_pmax_f32x4((__f32x4
)__a
, (__f32x4
)__b
);
1309 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_abs(v128_t __a
) {
1310 return (v128_t
)__builtin_wasm_abs_f64x2((__f64x2
)__a
);
1313 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_neg(v128_t __a
) {
1314 return (v128_t
)(-(__f64x2
)__a
);
1317 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_sqrt(v128_t __a
) {
1318 return (v128_t
)__builtin_wasm_sqrt_f64x2((__f64x2
)__a
);
1321 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_ceil(v128_t __a
) {
1322 return (v128_t
)__builtin_wasm_ceil_f64x2((__f64x2
)__a
);
1325 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_floor(v128_t __a
) {
1326 return (v128_t
)__builtin_wasm_floor_f64x2((__f64x2
)__a
);
1329 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_trunc(v128_t __a
) {
1330 return (v128_t
)__builtin_wasm_trunc_f64x2((__f64x2
)__a
);
1333 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_nearest(v128_t __a
) {
1334 return (v128_t
)__builtin_wasm_nearest_f64x2((__f64x2
)__a
);
1337 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_add(v128_t __a
,
1339 return (v128_t
)((__f64x2
)__a
+ (__f64x2
)__b
);
1342 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_sub(v128_t __a
,
1344 return (v128_t
)((__f64x2
)__a
- (__f64x2
)__b
);
1347 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_mul(v128_t __a
,
1349 return (v128_t
)((__f64x2
)__a
* (__f64x2
)__b
);
1352 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_div(v128_t __a
,
1354 return (v128_t
)((__f64x2
)__a
/ (__f64x2
)__b
);
1357 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_min(v128_t __a
,
1359 return (v128_t
)__builtin_wasm_min_f64x2((__f64x2
)__a
, (__f64x2
)__b
);
1362 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_max(v128_t __a
,
1364 return (v128_t
)__builtin_wasm_max_f64x2((__f64x2
)__a
, (__f64x2
)__b
);
1367 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_pmin(v128_t __a
,
1369 return (v128_t
)__builtin_wasm_pmin_f64x2((__f64x2
)__a
, (__f64x2
)__b
);
1372 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_pmax(v128_t __a
,
1374 return (v128_t
)__builtin_wasm_pmax_f64x2((__f64x2
)__a
, (__f64x2
)__b
);
1377 static __inline__ v128_t __DEFAULT_FN_ATTRS
1378 wasm_i32x4_trunc_sat_f32x4(v128_t __a
) {
1379 return (v128_t
)__builtin_wasm_trunc_saturate_s_i32x4_f32x4((__f32x4
)__a
);
1382 static __inline__ v128_t __DEFAULT_FN_ATTRS
1383 wasm_u32x4_trunc_sat_f32x4(v128_t __a
) {
1384 return (v128_t
)__builtin_wasm_trunc_saturate_u_i32x4_f32x4((__f32x4
)__a
);
1387 static __inline__ v128_t __DEFAULT_FN_ATTRS
1388 wasm_f32x4_convert_i32x4(v128_t __a
) {
1389 return (v128_t
) __builtin_convertvector((__i32x4
)__a
, __f32x4
);
1392 static __inline__ v128_t __DEFAULT_FN_ATTRS
1393 wasm_f32x4_convert_u32x4(v128_t __a
) {
1394 return (v128_t
) __builtin_convertvector((__u32x4
)__a
, __f32x4
);
1397 static __inline__ v128_t __DEFAULT_FN_ATTRS
1398 wasm_f64x2_convert_low_i32x4(v128_t __a
) {
1399 return (v128_t
) __builtin_convertvector((__i32x2
){__a
[0], __a
[1]}, __f64x2
);
1402 static __inline__ v128_t __DEFAULT_FN_ATTRS
1403 wasm_f64x2_convert_low_u32x4(v128_t __a
) {
1404 return (v128_t
) __builtin_convertvector((__u32x2
){__a
[0], __a
[1]}, __f64x2
);
1407 static __inline__ v128_t __DEFAULT_FN_ATTRS
1408 wasm_i32x4_trunc_sat_f64x2_zero(v128_t __a
) {
1409 return (v128_t
)__builtin_wasm_trunc_sat_s_zero_f64x2_i32x4((__f64x2
)__a
);
1412 static __inline__ v128_t __DEFAULT_FN_ATTRS
1413 wasm_u32x4_trunc_sat_f64x2_zero(v128_t __a
) {
1414 return (v128_t
)__builtin_wasm_trunc_sat_u_zero_f64x2_i32x4((__f64x2
)__a
);
1417 static __inline__ v128_t __DEFAULT_FN_ATTRS
1418 wasm_f32x4_demote_f64x2_zero(v128_t __a
) {
1419 return (v128_t
) __builtin_convertvector(
1420 __builtin_shufflevector((__f64x2
)__a
, (__f64x2
){0, 0}, 0, 1, 2, 3),
1424 static __inline__ v128_t __DEFAULT_FN_ATTRS
1425 wasm_f64x2_promote_low_f32x4(v128_t __a
) {
1426 return (v128_t
) __builtin_convertvector(
1427 (__f32x2
){((__f32x4
)__a
)[0], ((__f32x4
)__a
)[1]}, __f64x2
);
1430 #define wasm_i8x16_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \
1431 __c7, __c8, __c9, __c10, __c11, __c12, __c13, \
1433 ((v128_t)__builtin_wasm_shuffle_i8x16( \
1434 (__i8x16)(__a), (__i8x16)(__b), __c0, __c1, __c2, __c3, __c4, __c5, \
1435 __c6, __c7, __c8, __c9, __c10, __c11, __c12, __c13, __c14, __c15))
1437 #define wasm_i16x8_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \
1439 ((v128_t)__builtin_wasm_shuffle_i8x16( \
1440 (__i8x16)(__a), (__i8x16)(__b), (__c0)*2, (__c0)*2 + 1, (__c1)*2, \
1441 (__c1)*2 + 1, (__c2)*2, (__c2)*2 + 1, (__c3)*2, (__c3)*2 + 1, (__c4)*2, \
1442 (__c4)*2 + 1, (__c5)*2, (__c5)*2 + 1, (__c6)*2, (__c6)*2 + 1, (__c7)*2, \
1445 #define wasm_i32x4_shuffle(__a, __b, __c0, __c1, __c2, __c3) \
1446 ((v128_t)__builtin_wasm_shuffle_i8x16( \
1447 (__i8x16)(__a), (__i8x16)(__b), (__c0)*4, (__c0)*4 + 1, (__c0)*4 + 2, \
1448 (__c0)*4 + 3, (__c1)*4, (__c1)*4 + 1, (__c1)*4 + 2, (__c1)*4 + 3, \
1449 (__c2)*4, (__c2)*4 + 1, (__c2)*4 + 2, (__c2)*4 + 3, (__c3)*4, \
1450 (__c3)*4 + 1, (__c3)*4 + 2, (__c3)*4 + 3))
1452 #define wasm_i64x2_shuffle(__a, __b, __c0, __c1) \
1453 ((v128_t)__builtin_wasm_shuffle_i8x16( \
1454 (__i8x16)(__a), (__i8x16)(__b), (__c0)*8, (__c0)*8 + 1, (__c0)*8 + 2, \
1455 (__c0)*8 + 3, (__c0)*8 + 4, (__c0)*8 + 5, (__c0)*8 + 6, (__c0)*8 + 7, \
1456 (__c1)*8, (__c1)*8 + 1, (__c1)*8 + 2, (__c1)*8 + 3, (__c1)*8 + 4, \
1457 (__c1)*8 + 5, (__c1)*8 + 6, (__c1)*8 + 7))
1459 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_swizzle(v128_t __a
,
1461 return (v128_t
)__builtin_wasm_swizzle_i8x16((__i8x16
)__a
, (__i8x16
)__b
);
1464 static __inline__ v128_t __DEFAULT_FN_ATTRS
1465 wasm_i8x16_narrow_i16x8(v128_t __a
, v128_t __b
) {
1466 return (v128_t
)__builtin_wasm_narrow_s_i8x16_i16x8((__i16x8
)__a
,
1470 static __inline__ v128_t __DEFAULT_FN_ATTRS
1471 wasm_u8x16_narrow_i16x8(v128_t __a
, v128_t __b
) {
1472 return (v128_t
)__builtin_wasm_narrow_u_i8x16_i16x8((__i16x8
)__a
,
1476 static __inline__ v128_t __DEFAULT_FN_ATTRS
1477 wasm_i16x8_narrow_i32x4(v128_t __a
, v128_t __b
) {
1478 return (v128_t
)__builtin_wasm_narrow_s_i16x8_i32x4((__i32x4
)__a
,
1482 static __inline__ v128_t __DEFAULT_FN_ATTRS
1483 wasm_u16x8_narrow_i32x4(v128_t __a
, v128_t __b
) {
1484 return (v128_t
)__builtin_wasm_narrow_u_i16x8_i32x4((__i32x4
)__a
,
1488 static __inline__ v128_t __DEFAULT_FN_ATTRS
1489 wasm_i16x8_extend_low_i8x16(v128_t __a
) {
1490 return (v128_t
) __builtin_convertvector(
1491 (__i8x8
){((__i8x16
)__a
)[0], ((__i8x16
)__a
)[1], ((__i8x16
)__a
)[2],
1492 ((__i8x16
)__a
)[3], ((__i8x16
)__a
)[4], ((__i8x16
)__a
)[5],
1493 ((__i8x16
)__a
)[6], ((__i8x16
)__a
)[7]},
1497 static __inline__ v128_t __DEFAULT_FN_ATTRS
1498 wasm_i16x8_extend_high_i8x16(v128_t __a
) {
1499 return (v128_t
) __builtin_convertvector(
1500 (__i8x8
){((__i8x16
)__a
)[8], ((__i8x16
)__a
)[9], ((__i8x16
)__a
)[10],
1501 ((__i8x16
)__a
)[11], ((__i8x16
)__a
)[12], ((__i8x16
)__a
)[13],
1502 ((__i8x16
)__a
)[14], ((__i8x16
)__a
)[15]},
1506 static __inline__ v128_t __DEFAULT_FN_ATTRS
1507 wasm_u16x8_extend_low_u8x16(v128_t __a
) {
1508 return (v128_t
) __builtin_convertvector(
1509 (__u8x8
){((__u8x16
)__a
)[0], ((__u8x16
)__a
)[1], ((__u8x16
)__a
)[2],
1510 ((__u8x16
)__a
)[3], ((__u8x16
)__a
)[4], ((__u8x16
)__a
)[5],
1511 ((__u8x16
)__a
)[6], ((__u8x16
)__a
)[7]},
1515 static __inline__ v128_t __DEFAULT_FN_ATTRS
1516 wasm_u16x8_extend_high_u8x16(v128_t __a
) {
1517 return (v128_t
) __builtin_convertvector(
1518 (__u8x8
){((__u8x16
)__a
)[8], ((__u8x16
)__a
)[9], ((__u8x16
)__a
)[10],
1519 ((__u8x16
)__a
)[11], ((__u8x16
)__a
)[12], ((__u8x16
)__a
)[13],
1520 ((__u8x16
)__a
)[14], ((__u8x16
)__a
)[15]},
1524 static __inline__ v128_t __DEFAULT_FN_ATTRS
1525 wasm_i32x4_extend_low_i16x8(v128_t __a
) {
1526 return (v128_t
) __builtin_convertvector(
1527 (__i16x4
){((__i16x8
)__a
)[0], ((__i16x8
)__a
)[1], ((__i16x8
)__a
)[2],
1532 static __inline__ v128_t __DEFAULT_FN_ATTRS
1533 wasm_i32x4_extend_high_i16x8(v128_t __a
) {
1534 return (v128_t
) __builtin_convertvector(
1535 (__i16x4
){((__i16x8
)__a
)[4], ((__i16x8
)__a
)[5], ((__i16x8
)__a
)[6],
1540 static __inline__ v128_t __DEFAULT_FN_ATTRS
1541 wasm_u32x4_extend_low_u16x8(v128_t __a
) {
1542 return (v128_t
) __builtin_convertvector(
1543 (__u16x4
){((__u16x8
)__a
)[0], ((__u16x8
)__a
)[1], ((__u16x8
)__a
)[2],
1548 static __inline__ v128_t __DEFAULT_FN_ATTRS
1549 wasm_u32x4_extend_high_u16x8(v128_t __a
) {
1550 return (v128_t
) __builtin_convertvector(
1551 (__u16x4
){((__u16x8
)__a
)[4], ((__u16x8
)__a
)[5], ((__u16x8
)__a
)[6],
1556 static __inline__ v128_t __DEFAULT_FN_ATTRS
1557 wasm_i64x2_extend_low_i32x4(v128_t __a
) {
1558 return (v128_t
) __builtin_convertvector(
1559 (__i32x2
){((__i32x4
)__a
)[0], ((__i32x4
)__a
)[1]}, __i64x2
);
1562 static __inline__ v128_t __DEFAULT_FN_ATTRS
1563 wasm_i64x2_extend_high_i32x4(v128_t __a
) {
1564 return (v128_t
) __builtin_convertvector(
1565 (__i32x2
){((__i32x4
)__a
)[2], ((__i32x4
)__a
)[3]}, __i64x2
);
1568 static __inline__ v128_t __DEFAULT_FN_ATTRS
1569 wasm_u64x2_extend_low_u32x4(v128_t __a
) {
1570 return (v128_t
) __builtin_convertvector(
1571 (__u32x2
){((__u32x4
)__a
)[0], ((__u32x4
)__a
)[1]}, __u64x2
);
1574 static __inline__ v128_t __DEFAULT_FN_ATTRS
1575 wasm_u64x2_extend_high_u32x4(v128_t __a
) {
1576 return (v128_t
) __builtin_convertvector(
1577 (__u32x2
){((__u32x4
)__a
)[2], ((__u32x4
)__a
)[3]}, __u64x2
);
1580 static __inline__ v128_t __DEFAULT_FN_ATTRS
1581 wasm_i16x8_extadd_pairwise_i8x16(v128_t __a
) {
1582 return (v128_t
)__builtin_wasm_extadd_pairwise_i8x16_s_i16x8((__i8x16
)__a
);
1585 static __inline__ v128_t __DEFAULT_FN_ATTRS
1586 wasm_u16x8_extadd_pairwise_u8x16(v128_t __a
) {
1587 return (v128_t
)__builtin_wasm_extadd_pairwise_i8x16_u_i16x8((__u8x16
)__a
);
1590 static __inline__ v128_t __DEFAULT_FN_ATTRS
1591 wasm_i32x4_extadd_pairwise_i16x8(v128_t __a
) {
1592 return (v128_t
)__builtin_wasm_extadd_pairwise_i16x8_s_i32x4((__i16x8
)__a
);
1595 static __inline__ v128_t __DEFAULT_FN_ATTRS
1596 wasm_u32x4_extadd_pairwise_u16x8(v128_t __a
) {
1597 return (v128_t
)__builtin_wasm_extadd_pairwise_i16x8_u_i32x4((__u16x8
)__a
);
1600 static __inline__ v128_t __DEFAULT_FN_ATTRS
1601 wasm_i16x8_extmul_low_i8x16(v128_t __a
, v128_t __b
) {
1602 return (v128_t
)((__i16x8
)wasm_i16x8_extend_low_i8x16(__a
) *
1603 (__i16x8
)wasm_i16x8_extend_low_i8x16(__b
));
1606 static __inline__ v128_t __DEFAULT_FN_ATTRS
1607 wasm_i16x8_extmul_high_i8x16(v128_t __a
, v128_t __b
) {
1608 return (v128_t
)((__i16x8
)wasm_i16x8_extend_high_i8x16(__a
) *
1609 (__i16x8
)wasm_i16x8_extend_high_i8x16(__b
));
1612 static __inline__ v128_t __DEFAULT_FN_ATTRS
1613 wasm_u16x8_extmul_low_u8x16(v128_t __a
, v128_t __b
) {
1614 return (v128_t
)((__u16x8
)wasm_u16x8_extend_low_u8x16(__a
) *
1615 (__u16x8
)wasm_u16x8_extend_low_u8x16(__b
));
1618 static __inline__ v128_t __DEFAULT_FN_ATTRS
1619 wasm_u16x8_extmul_high_u8x16(v128_t __a
, v128_t __b
) {
1620 return (v128_t
)((__u16x8
)wasm_u16x8_extend_high_u8x16(__a
) *
1621 (__u16x8
)wasm_u16x8_extend_high_u8x16(__b
));
1624 static __inline__ v128_t __DEFAULT_FN_ATTRS
1625 wasm_i32x4_extmul_low_i16x8(v128_t __a
, v128_t __b
) {
1626 return (v128_t
)((__i32x4
)wasm_i32x4_extend_low_i16x8(__a
) *
1627 (__i32x4
)wasm_i32x4_extend_low_i16x8(__b
));
1630 static __inline__ v128_t __DEFAULT_FN_ATTRS
1631 wasm_i32x4_extmul_high_i16x8(v128_t __a
, v128_t __b
) {
1632 return (v128_t
)((__i32x4
)wasm_i32x4_extend_high_i16x8(__a
) *
1633 (__i32x4
)wasm_i32x4_extend_high_i16x8(__b
));
1636 static __inline__ v128_t __DEFAULT_FN_ATTRS
1637 wasm_u32x4_extmul_low_u16x8(v128_t __a
, v128_t __b
) {
1638 return (v128_t
)((__u32x4
)wasm_u32x4_extend_low_u16x8(__a
) *
1639 (__u32x4
)wasm_u32x4_extend_low_u16x8(__b
));
1642 static __inline__ v128_t __DEFAULT_FN_ATTRS
1643 wasm_u32x4_extmul_high_u16x8(v128_t __a
, v128_t __b
) {
1644 return (v128_t
)((__u32x4
)wasm_u32x4_extend_high_u16x8(__a
) *
1645 (__u32x4
)wasm_u32x4_extend_high_u16x8(__b
));
1648 static __inline__ v128_t __DEFAULT_FN_ATTRS
1649 wasm_i64x2_extmul_low_i32x4(v128_t __a
, v128_t __b
) {
1650 return (v128_t
)((__i64x2
)wasm_i64x2_extend_low_i32x4(__a
) *
1651 (__i64x2
)wasm_i64x2_extend_low_i32x4(__b
));
1654 static __inline__ v128_t __DEFAULT_FN_ATTRS
1655 wasm_i64x2_extmul_high_i32x4(v128_t __a
, v128_t __b
) {
1656 return (v128_t
)((__i64x2
)wasm_i64x2_extend_high_i32x4(__a
) *
1657 (__i64x2
)wasm_i64x2_extend_high_i32x4(__b
));
1660 static __inline__ v128_t __DEFAULT_FN_ATTRS
1661 wasm_u64x2_extmul_low_u32x4(v128_t __a
, v128_t __b
) {
1662 return (v128_t
)((__u64x2
)wasm_u64x2_extend_low_u32x4(__a
) *
1663 (__u64x2
)wasm_u64x2_extend_low_u32x4(__b
));
1666 static __inline__ v128_t __DEFAULT_FN_ATTRS
1667 wasm_u64x2_extmul_high_u32x4(v128_t __a
, v128_t __b
) {
1668 return (v128_t
)((__u64x2
)wasm_u64x2_extend_high_u32x4(__a
) *
1669 (__u64x2
)wasm_u64x2_extend_high_u32x4(__b
));
1672 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_q15mulr_sat(v128_t __a
,
1674 return (v128_t
)__builtin_wasm_q15mulr_sat_s_i16x8((__i16x8
)__a
, (__i16x8
)__b
);
1677 // Old intrinsic names supported to ease transitioning to the standard names. Do
1678 // not use these; they will be removed in the near future.
1680 #define __DEPRECATED_FN_ATTRS(__replacement) \
1681 __DEFAULT_FN_ATTRS __attribute__( \
1682 (deprecated("use " __replacement " instead", __replacement)))
1684 #define __WASM_STR(X) #X
1687 #define __DEPRECATED_WASM_MACRO(__name, __replacement) \
1688 _Pragma(__WASM_STR(GCC warning( \
1689 "'" __name "' is deprecated: use '" __replacement "' instead")))
1691 #define __DEPRECATED_WASM_MACRO(__name, __replacement)
1694 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_v128_load8_splat")
1695 wasm_v8x16_load_splat(const void *__mem
) {
1696 return wasm_v128_load8_splat(__mem
);
1699 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_v128_load16_splat")
1700 wasm_v16x8_load_splat(const void *__mem
) {
1701 return wasm_v128_load16_splat(__mem
);
1704 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_v128_load32_splat")
1705 wasm_v32x4_load_splat(const void *__mem
) {
1706 return wasm_v128_load32_splat(__mem
);
1709 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_v128_load64_splat")
1710 wasm_v64x2_load_splat(const void *__mem
) {
1711 return wasm_v128_load64_splat(__mem
);
1714 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i16x8_load8x8")
1715 wasm_i16x8_load_8x8(const void *__mem
) {
1716 return wasm_i16x8_load8x8(__mem
);
1719 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u16x8_load8x8")
1720 wasm_u16x8_load_8x8(const void *__mem
) {
1721 return wasm_u16x8_load8x8(__mem
);
1724 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i32x4_load16x4")
1725 wasm_i32x4_load_16x4(const void *__mem
) {
1726 return wasm_i32x4_load16x4(__mem
);
1729 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u32x4_load16x4")
1730 wasm_u32x4_load_16x4(const void *__mem
) {
1731 return wasm_u32x4_load16x4(__mem
);
1734 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i64x2_load32x2")
1735 wasm_i64x2_load_32x2(const void *__mem
) {
1736 return wasm_i64x2_load32x2(__mem
);
1739 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u64x2_load32x2")
1740 wasm_u64x2_load_32x2(const void *__mem
) {
1741 return wasm_u64x2_load32x2(__mem
);
1744 #define wasm_v8x16_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \
1745 __c7, __c8, __c9, __c10, __c11, __c12, __c13, \
1747 __DEPRECATED_WASM_MACRO("wasm_v8x16_shuffle", "wasm_i8x16_shuffle") \
1748 wasm_i8x16_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7, \
1749 __c8, __c9, __c10, __c11, __c12, __c13, __c14, __c15)
1751 #define wasm_v16x8_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \
1753 __DEPRECATED_WASM_MACRO("wasm_v16x8_shuffle", "wasm_i16x8_shuffle") \
1754 wasm_i16x8_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7)
1756 #define wasm_v32x4_shuffle(__a, __b, __c0, __c1, __c2, __c3) \
1757 __DEPRECATED_WASM_MACRO("wasm_v32x4_shuffle", "wasm_i32x4_shuffle") \
1758 wasm_i32x4_shuffle(__a, __b, __c0, __c1, __c2, __c3)
1760 #define wasm_v64x2_shuffle(__a, __b, __c0, __c1) \
1761 __DEPRECATED_WASM_MACRO("wasm_v64x2_shuffle", "wasm_i64x2_shuffle") \
1762 wasm_i64x2_shuffle(__a, __b, __c0, __c1)
1764 // Relaxed SIMD intrinsics
1766 #define __RELAXED_FN_ATTRS \
1767 __attribute__((__always_inline__, __nodebug__, __target__("relaxed-simd"), \
1768 __min_vector_width__(128)))
1770 static __inline__ v128_t __RELAXED_FN_ATTRS
1771 wasm_f32x4_relaxed_madd(v128_t __a
, v128_t __b
, v128_t __c
) {
1772 return (v128_t
)__builtin_wasm_relaxed_madd_f32x4((__f32x4
)__a
, (__f32x4
)__b
,
1776 static __inline__ v128_t __RELAXED_FN_ATTRS
1777 wasm_f32x4_relaxed_nmadd(v128_t __a
, v128_t __b
, v128_t __c
) {
1778 return (v128_t
)__builtin_wasm_relaxed_nmadd_f32x4((__f32x4
)__a
, (__f32x4
)__b
,
1782 static __inline__ v128_t __RELAXED_FN_ATTRS
1783 wasm_f64x2_relaxed_madd(v128_t __a
, v128_t __b
, v128_t __c
) {
1784 return (v128_t
)__builtin_wasm_relaxed_madd_f64x2((__f64x2
)__a
, (__f64x2
)__b
,
1788 static __inline__ v128_t __RELAXED_FN_ATTRS
1789 wasm_f64x2_relaxed_nmadd(v128_t __a
, v128_t __b
, v128_t __c
) {
1790 return (v128_t
)__builtin_wasm_relaxed_nmadd_f64x2((__f64x2
)__a
, (__f64x2
)__b
,
1794 static __inline__ v128_t __RELAXED_FN_ATTRS
1795 wasm_i8x16_relaxed_laneselect(v128_t __a
, v128_t __b
, v128_t __m
) {
1796 return (v128_t
)__builtin_wasm_relaxed_laneselect_i8x16(
1797 (__i8x16
)__a
, (__i8x16
)__b
, (__i8x16
)__m
);
1800 static __inline__ v128_t __RELAXED_FN_ATTRS
1801 wasm_i16x8_relaxed_laneselect(v128_t __a
, v128_t __b
, v128_t __m
) {
1802 return (v128_t
)__builtin_wasm_relaxed_laneselect_i16x8(
1803 (__i16x8
)__a
, (__i16x8
)__b
, (__i16x8
)__m
);
1806 static __inline__ v128_t __RELAXED_FN_ATTRS
1807 wasm_i32x4_relaxed_laneselect(v128_t __a
, v128_t __b
, v128_t __m
) {
1808 return (v128_t
)__builtin_wasm_relaxed_laneselect_i32x4(
1809 (__i32x4
)__a
, (__i32x4
)__b
, (__i32x4
)__m
);
1812 static __inline__ v128_t __RELAXED_FN_ATTRS
1813 wasm_i64x2_relaxed_laneselect(v128_t __a
, v128_t __b
, v128_t __m
) {
1814 return (v128_t
)__builtin_wasm_relaxed_laneselect_i64x2(
1815 (__i64x2
)__a
, (__i64x2
)__b
, (__i64x2
)__m
);
1818 static __inline__ v128_t __RELAXED_FN_ATTRS
1819 wasm_i8x16_relaxed_swizzle(v128_t __a
, v128_t __s
) {
1820 return (v128_t
)__builtin_wasm_relaxed_swizzle_i8x16((__i8x16
)__a
,
1824 static __inline__ v128_t __RELAXED_FN_ATTRS
wasm_f32x4_relaxed_min(v128_t __a
,
1826 return (v128_t
)__builtin_wasm_relaxed_min_f32x4((__f32x4
)__a
, (__f32x4
)__b
);
1829 static __inline__ v128_t __RELAXED_FN_ATTRS
wasm_f32x4_relaxed_max(v128_t __a
,
1831 return (v128_t
)__builtin_wasm_relaxed_max_f32x4((__f32x4
)__a
, (__f32x4
)__b
);
1834 static __inline__ v128_t __RELAXED_FN_ATTRS
wasm_f64x2_relaxed_min(v128_t __a
,
1836 return (v128_t
)__builtin_wasm_relaxed_min_f64x2((__f64x2
)__a
, (__f64x2
)__b
);
1839 static __inline__ v128_t __RELAXED_FN_ATTRS
wasm_f64x2_relaxed_max(v128_t __a
,
1841 return (v128_t
)__builtin_wasm_relaxed_max_f64x2((__f64x2
)__a
, (__f64x2
)__b
);
1844 static __inline__ v128_t __RELAXED_FN_ATTRS
1845 wasm_i32x4_relaxed_trunc_f32x4(v128_t __a
) {
1846 return (v128_t
)__builtin_wasm_relaxed_trunc_s_i32x4_f32x4((__f32x4
)__a
);
1849 static __inline__ v128_t __RELAXED_FN_ATTRS
1850 wasm_u32x4_relaxed_trunc_f32x4(v128_t __a
) {
1851 return (v128_t
)__builtin_wasm_relaxed_trunc_u_i32x4_f32x4((__f32x4
)__a
);
1854 static __inline__ v128_t __RELAXED_FN_ATTRS
1855 wasm_i32x4_relaxed_trunc_f64x2_zero(v128_t __a
) {
1856 return (v128_t
)__builtin_wasm_relaxed_trunc_s_zero_i32x4_f64x2((__f64x2
)__a
);
1859 static __inline__ v128_t __RELAXED_FN_ATTRS
1860 wasm_u32x4_relaxed_trunc_f64x2_zero(v128_t __a
) {
1861 return (v128_t
)__builtin_wasm_relaxed_trunc_u_zero_i32x4_f64x2((__f64x2
)__a
);
1864 static __inline__ v128_t __RELAXED_FN_ATTRS
1865 wasm_i16x8_relaxed_q15mulr(v128_t __a
, v128_t __b
) {
1866 return (v128_t
)__builtin_wasm_relaxed_q15mulr_s_i16x8((__i16x8
)__a
,
1870 static __inline__ v128_t __RELAXED_FN_ATTRS
1871 wasm_i16x8_relaxed_dot_i8x16_i7x16(v128_t __a
, v128_t __b
) {
1872 return (v128_t
)__builtin_wasm_relaxed_dot_i8x16_i7x16_s_i16x8((__i8x16
)__a
,
1876 static __inline__ v128_t __RELAXED_FN_ATTRS
1877 wasm_i32x4_relaxed_dot_i8x16_i7x16_add(v128_t __a
, v128_t __b
, v128_t __c
) {
1878 return (v128_t
)__builtin_wasm_relaxed_dot_i8x16_i7x16_add_s_i32x4(
1879 (__i8x16
)__a
, (__i8x16
)__b
, (__i32x4
)__c
);
1883 #define __FP16_FN_ATTRS \
1884 __attribute__((__always_inline__, __nodebug__, __target__("fp16"), \
1885 __min_vector_width__(128)))
1887 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_splat(float __a
) {
1888 return (v128_t
)__builtin_wasm_splat_f16x8(__a
);
1891 #ifdef __wasm_fp16__
1892 // TODO Replace the following macros with regular C functions and use normal
1893 // target-independent vector code like the other replace/extract instructions.
1895 #define wasm_f16x8_extract_lane(__a, __i) \
1896 (__builtin_wasm_extract_lane_f16x8((__f16x8)(__a), __i))
1898 #define wasm_f16x8_replace_lane(__a, __i, __b) \
1899 ((v128_t)__builtin_wasm_replace_lane_f16x8((__f16x8)(__a), __i, __b))
1903 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_abs(v128_t __a
) {
1904 return (v128_t
)__builtin_wasm_abs_f16x8((__f16x8
)__a
);
1907 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_neg(v128_t __a
) {
1908 return (v128_t
)(-(__f16x8
)__a
);
1911 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_sqrt(v128_t __a
) {
1912 return (v128_t
)__builtin_wasm_sqrt_f16x8((__f16x8
)__a
);
1915 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_ceil(v128_t __a
) {
1916 return (v128_t
)__builtin_wasm_ceil_f16x8((__f16x8
)__a
);
1919 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_floor(v128_t __a
) {
1920 return (v128_t
)__builtin_wasm_floor_f16x8((__f16x8
)__a
);
1923 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_trunc(v128_t __a
) {
1924 return (v128_t
)__builtin_wasm_trunc_f16x8((__f16x8
)__a
);
1927 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_nearest(v128_t __a
) {
1928 return (v128_t
)__builtin_wasm_nearest_f16x8((__f16x8
)__a
);
1931 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_eq(v128_t __a
, v128_t __b
) {
1932 return (v128_t
)((__f16x8
)__a
== (__f16x8
)__b
);
1935 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_ne(v128_t __a
, v128_t __b
) {
1936 return (v128_t
)((__f16x8
)__a
!= (__f16x8
)__b
);
1939 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_lt(v128_t __a
, v128_t __b
) {
1940 return (v128_t
)((__f16x8
)__a
< (__f16x8
)__b
);
1943 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_gt(v128_t __a
, v128_t __b
) {
1944 return (v128_t
)((__f16x8
)__a
> (__f16x8
)__b
);
1947 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_le(v128_t __a
, v128_t __b
) {
1948 return (v128_t
)((__f16x8
)__a
<= (__f16x8
)__b
);
1951 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_ge(v128_t __a
, v128_t __b
) {
1952 return (v128_t
)((__f16x8
)__a
>= (__f16x8
)__b
);
1955 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_add(v128_t __a
,
1957 return (v128_t
)((__f16x8
)__a
+ (__f16x8
)__b
);
1960 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_sub(v128_t __a
,
1962 return (v128_t
)((__f16x8
)__a
- (__f16x8
)__b
);
1965 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_mul(v128_t __a
,
1967 return (v128_t
)((__f16x8
)__a
* (__f16x8
)__b
);
1970 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_div(v128_t __a
,
1972 return (v128_t
)((__f16x8
)__a
/ (__f16x8
)__b
);
1975 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_min(v128_t __a
,
1977 return (v128_t
)__builtin_wasm_min_f16x8((__f16x8
)__a
, (__f16x8
)__b
);
1980 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_max(v128_t __a
,
1982 return (v128_t
)__builtin_wasm_max_f16x8((__f16x8
)__a
, (__f16x8
)__b
);
1985 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_pmin(v128_t __a
,
1987 return (v128_t
)__builtin_wasm_pmin_f16x8((__f16x8
)__a
, (__f16x8
)__b
);
1990 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_pmax(v128_t __a
,
1992 return (v128_t
)__builtin_wasm_pmax_f16x8((__f16x8
)__a
, (__f16x8
)__b
);
1995 static __inline__ v128_t __FP16_FN_ATTRS
1996 wasm_i16x8_trunc_sat_f16x8(v128_t __a
) {
1997 return (v128_t
)__builtin_wasm_trunc_saturate_s_i16x8_f16x8((__f16x8
)__a
);
2000 static __inline__ v128_t __FP16_FN_ATTRS
2001 wasm_u16x8_trunc_sat_f16x8(v128_t __a
) {
2002 return (v128_t
)__builtin_wasm_trunc_saturate_u_i16x8_f16x8((__f16x8
)__a
);
2005 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_convert_i16x8(v128_t __a
) {
2006 return (v128_t
) __builtin_convertvector((__i16x8
)__a
, __f16x8
);
2009 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_convert_u16x8(v128_t __a
) {
2010 return (v128_t
) __builtin_convertvector((__u16x8
)__a
, __f16x8
);
2013 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_relaxed_madd(v128_t __a
,
2016 return (v128_t
)__builtin_wasm_relaxed_madd_f16x8((__f16x8
)__a
, (__f16x8
)__b
,
2020 static __inline__ v128_t __FP16_FN_ATTRS
wasm_f16x8_relaxed_nmadd(v128_t __a
,
2023 return (v128_t
)__builtin_wasm_relaxed_nmadd_f16x8((__f16x8
)__a
, (__f16x8
)__b
,
2027 // Deprecated intrinsics
2029 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i8x16_swizzle")
2030 wasm_v8x16_swizzle(v128_t __a
, v128_t __b
) {
2031 return wasm_i8x16_swizzle(__a
, __b
);
2034 static __inline__
bool __DEPRECATED_FN_ATTRS("wasm_v128_any_true")
2035 wasm_i8x16_any_true(v128_t __a
) {
2036 return wasm_v128_any_true(__a
);
2039 static __inline__
bool __DEPRECATED_FN_ATTRS("wasm_v128_any_true")
2040 wasm_i16x8_any_true(v128_t __a
) {
2041 return wasm_v128_any_true(__a
);
2044 static __inline__
bool __DEPRECATED_FN_ATTRS("wasm_v128_any_true")
2045 wasm_i32x4_any_true(v128_t __a
) {
2046 return wasm_v128_any_true(__a
);
2049 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i8x16_add_sat")
2050 wasm_i8x16_add_saturate(v128_t __a
, v128_t __b
) {
2051 return wasm_i8x16_add_sat(__a
, __b
);
2054 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u8x16_add_sat")
2055 wasm_u8x16_add_saturate(v128_t __a
, v128_t __b
) {
2056 return wasm_u8x16_add_sat(__a
, __b
);
2059 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i8x16_sub_sat")
2060 wasm_i8x16_sub_saturate(v128_t __a
, v128_t __b
) {
2061 return wasm_i8x16_sub_sat(__a
, __b
);
2064 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u8x16_sub_sat")
2065 wasm_u8x16_sub_saturate(v128_t __a
, v128_t __b
) {
2066 return wasm_u8x16_sub_sat(__a
, __b
);
2069 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i16x8_add_sat")
2070 wasm_i16x8_add_saturate(v128_t __a
, v128_t __b
) {
2071 return wasm_i16x8_add_sat(__a
, __b
);
2074 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u16x8_add_sat")
2075 wasm_u16x8_add_saturate(v128_t __a
, v128_t __b
) {
2076 return wasm_u16x8_add_sat(__a
, __b
);
2079 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i16x8_sub_sat")
2080 wasm_i16x8_sub_saturate(v128_t __a
, v128_t __b
) {
2081 return wasm_i16x8_sub_sat(__a
, __b
);
2084 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u16x8_sub_sat")
2085 wasm_u16x8_sub_saturate(v128_t __a
, v128_t __b
) {
2086 return wasm_u16x8_sub_sat(__a
, __b
);
2089 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i16x8_extend_low_i8x16")
2090 wasm_i16x8_widen_low_i8x16(v128_t __a
) {
2091 return wasm_i16x8_extend_low_i8x16(__a
);
2094 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i16x8_extend_high_i8x16")
2095 wasm_i16x8_widen_high_i8x16(v128_t __a
) {
2096 return wasm_i16x8_extend_high_i8x16(__a
);
2099 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u16x8_extend_low_u8x16")
2100 wasm_i16x8_widen_low_u8x16(v128_t __a
) {
2101 return wasm_u16x8_extend_low_u8x16(__a
);
2104 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u16x8_extend_high_u8x16")
2105 wasm_i16x8_widen_high_u8x16(v128_t __a
) {
2106 return wasm_u16x8_extend_high_u8x16(__a
);
2109 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i32x4_extend_low_i16x8")
2110 wasm_i32x4_widen_low_i16x8(v128_t __a
) {
2111 return wasm_i32x4_extend_low_i16x8(__a
);
2114 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i32x4_extend_high_i16x8")
2115 wasm_i32x4_widen_high_i16x8(v128_t __a
) {
2116 return wasm_i32x4_extend_high_i16x8(__a
);
2119 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u32x4_extend_low_u16x8")
2120 wasm_i32x4_widen_low_u16x8(v128_t __a
) {
2121 return wasm_u32x4_extend_low_u16x8(__a
);
2124 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u32x4_extend_high_u16x8")
2125 wasm_i32x4_widen_high_u16x8(v128_t __a
) {
2126 return wasm_u32x4_extend_high_u16x8(__a
);
2129 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i32x4_trunc_sat_f32x4")
2130 wasm_i32x4_trunc_saturate_f32x4(v128_t __a
) {
2131 return wasm_i32x4_trunc_sat_f32x4(__a
);
2134 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u32x4_trunc_sat_f32x4")
2135 wasm_u32x4_trunc_saturate_f32x4(v128_t __a
) {
2136 return wasm_u32x4_trunc_sat_f32x4(__a
);
2139 // Undefine helper macros
2140 #undef __DEFAULT_FN_ATTRS
2141 #undef __DEPRECATED_FN_ATTRS
2143 #endif // __WASM_SIMD128_H