1 /*===---- wasm_simd128.h - WebAssembly portable SIMD intrinsics ------------===
3 * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 * See https://llvm.org/LICENSE.txt for license information.
5 * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 *===-----------------------------------------------------------------------===
10 #ifndef __WASM_SIMD128_H
11 #define __WASM_SIMD128_H
17 typedef int32_t v128_t
__attribute__((__vector_size__(16), __aligned__(16)));
19 // Internal types determined by clang builtin definitions
20 typedef int32_t __v128_u
__attribute__((__vector_size__(16), __aligned__(1)));
21 typedef signed char __i8x16
22 __attribute__((__vector_size__(16), __aligned__(16)));
23 typedef unsigned char __u8x16
24 __attribute__((__vector_size__(16), __aligned__(16)));
25 typedef short __i16x8
__attribute__((__vector_size__(16), __aligned__(16)));
26 typedef unsigned short __u16x8
27 __attribute__((__vector_size__(16), __aligned__(16)));
28 typedef int __i32x4
__attribute__((__vector_size__(16), __aligned__(16)));
29 typedef unsigned int __u32x4
30 __attribute__((__vector_size__(16), __aligned__(16)));
31 typedef long long __i64x2
__attribute__((__vector_size__(16), __aligned__(16)));
32 typedef unsigned long long __u64x2
33 __attribute__((__vector_size__(16), __aligned__(16)));
34 typedef float __f32x4
__attribute__((__vector_size__(16), __aligned__(16)));
35 typedef double __f64x2
__attribute__((__vector_size__(16), __aligned__(16)));
37 typedef signed char __i8x8
__attribute__((__vector_size__(8), __aligned__(8)));
38 typedef unsigned char __u8x8
39 __attribute__((__vector_size__(8), __aligned__(8)));
40 typedef short __i16x4
__attribute__((__vector_size__(8), __aligned__(8)));
41 typedef unsigned short __u16x4
42 __attribute__((__vector_size__(8), __aligned__(8)));
43 typedef int __i32x2
__attribute__((__vector_size__(8), __aligned__(8)));
44 typedef unsigned int __u32x2
45 __attribute__((__vector_size__(8), __aligned__(8)));
46 typedef float __f32x2
__attribute__((__vector_size__(8), __aligned__(8)));
48 #define __DEFAULT_FN_ATTRS \
49 __attribute__((__always_inline__, __nodebug__, __target__("simd128"), \
50 __min_vector_width__(128)))
52 #define __REQUIRE_CONSTANT(c) \
53 __attribute__((__diagnose_if__(!__builtin_constant_p(c), \
54 #c " must be constant", "error")))
56 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load(const void *__mem
) {
57 // UB-free unaligned access copied from xmmintrin.h
58 struct __wasm_v128_load_struct
{
60 } __attribute__((__packed__
, __may_alias__
));
61 return ((const struct __wasm_v128_load_struct
*)__mem
)->__v
;
64 static __inline__ v128_t __DEFAULT_FN_ATTRS
65 wasm_v128_load8_splat(const void *__mem
) {
66 struct __wasm_v128_load8_splat_struct
{
68 } __attribute__((__packed__
, __may_alias__
));
69 uint8_t __v
= ((const struct __wasm_v128_load8_splat_struct
*)__mem
)->__v
;
70 return (v128_t
)(__u8x16
){__v
, __v
, __v
, __v
, __v
, __v
, __v
, __v
,
71 __v
, __v
, __v
, __v
, __v
, __v
, __v
, __v
};
74 static __inline__ v128_t __DEFAULT_FN_ATTRS
75 wasm_v128_load16_splat(const void *__mem
) {
76 struct __wasm_v128_load16_splat_struct
{
78 } __attribute__((__packed__
, __may_alias__
));
79 uint16_t __v
= ((const struct __wasm_v128_load16_splat_struct
*)__mem
)->__v
;
80 return (v128_t
)(__u16x8
){__v
, __v
, __v
, __v
, __v
, __v
, __v
, __v
};
83 static __inline__ v128_t __DEFAULT_FN_ATTRS
84 wasm_v128_load32_splat(const void *__mem
) {
85 struct __wasm_v128_load32_splat_struct
{
87 } __attribute__((__packed__
, __may_alias__
));
88 uint32_t __v
= ((const struct __wasm_v128_load32_splat_struct
*)__mem
)->__v
;
89 return (v128_t
)(__u32x4
){__v
, __v
, __v
, __v
};
92 static __inline__ v128_t __DEFAULT_FN_ATTRS
93 wasm_v128_load64_splat(const void *__mem
) {
94 struct __wasm_v128_load64_splat_struct
{
96 } __attribute__((__packed__
, __may_alias__
));
97 uint64_t __v
= ((const struct __wasm_v128_load64_splat_struct
*)__mem
)->__v
;
98 return (v128_t
)(__u64x2
){__v
, __v
};
101 static __inline__ v128_t __DEFAULT_FN_ATTRS
102 wasm_i16x8_load8x8(const void *__mem
) {
103 struct __wasm_i16x8_load8x8_struct
{
105 } __attribute__((__packed__
, __may_alias__
));
106 __i8x8 __v
= ((const struct __wasm_i16x8_load8x8_struct
*)__mem
)->__v
;
107 return (v128_t
) __builtin_convertvector(__v
, __i16x8
);
110 static __inline__ v128_t __DEFAULT_FN_ATTRS
111 wasm_u16x8_load8x8(const void *__mem
) {
112 struct __wasm_u16x8_load8x8_struct
{
114 } __attribute__((__packed__
, __may_alias__
));
115 __u8x8 __v
= ((const struct __wasm_u16x8_load8x8_struct
*)__mem
)->__v
;
116 return (v128_t
) __builtin_convertvector(__v
, __u16x8
);
119 static __inline__ v128_t __DEFAULT_FN_ATTRS
120 wasm_i32x4_load16x4(const void *__mem
) {
121 struct __wasm_i32x4_load16x4_struct
{
123 } __attribute__((__packed__
, __may_alias__
));
124 __i16x4 __v
= ((const struct __wasm_i32x4_load16x4_struct
*)__mem
)->__v
;
125 return (v128_t
) __builtin_convertvector(__v
, __i32x4
);
128 static __inline__ v128_t __DEFAULT_FN_ATTRS
129 wasm_u32x4_load16x4(const void *__mem
) {
130 struct __wasm_u32x4_load16x4_struct
{
132 } __attribute__((__packed__
, __may_alias__
));
133 __u16x4 __v
= ((const struct __wasm_u32x4_load16x4_struct
*)__mem
)->__v
;
134 return (v128_t
) __builtin_convertvector(__v
, __u32x4
);
137 static __inline__ v128_t __DEFAULT_FN_ATTRS
138 wasm_i64x2_load32x2(const void *__mem
) {
139 struct __wasm_i64x2_load32x2_struct
{
141 } __attribute__((__packed__
, __may_alias__
));
142 __i32x2 __v
= ((const struct __wasm_i64x2_load32x2_struct
*)__mem
)->__v
;
143 return (v128_t
) __builtin_convertvector(__v
, __i64x2
);
146 static __inline__ v128_t __DEFAULT_FN_ATTRS
147 wasm_u64x2_load32x2(const void *__mem
) {
148 struct __wasm_u64x2_load32x2_struct
{
150 } __attribute__((__packed__
, __may_alias__
));
151 __u32x2 __v
= ((const struct __wasm_u64x2_load32x2_struct
*)__mem
)->__v
;
152 return (v128_t
) __builtin_convertvector(__v
, __u64x2
);
155 static __inline__ v128_t __DEFAULT_FN_ATTRS
156 wasm_v128_load32_zero(const void *__mem
) {
157 struct __wasm_v128_load32_zero_struct
{
159 } __attribute__((__packed__
, __may_alias__
));
160 int32_t __v
= ((const struct __wasm_v128_load32_zero_struct
*)__mem
)->__v
;
161 return (v128_t
)(__i32x4
){__v
, 0, 0, 0};
164 static __inline__ v128_t __DEFAULT_FN_ATTRS
165 wasm_v128_load64_zero(const void *__mem
) {
166 struct __wasm_v128_load64_zero_struct
{
168 } __attribute__((__packed__
, __may_alias__
));
169 int64_t __v
= ((const struct __wasm_v128_load64_zero_struct
*)__mem
)->__v
;
170 return (v128_t
)(__i64x2
){__v
, 0};
173 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load8_lane(
174 const void *__mem
, v128_t __vec
, int __i
) __REQUIRE_CONSTANT(__i
) {
175 struct __wasm_v128_load8_lane_struct
{
177 } __attribute__((__packed__
, __may_alias__
));
178 int8_t __v
= ((const struct __wasm_v128_load8_lane_struct
*)__mem
)->__v
;
179 __i8x16 __ret
= (__i8x16
)__vec
;
181 return (v128_t
)__ret
;
184 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load16_lane(
185 const void *__mem
, v128_t __vec
, int __i
) __REQUIRE_CONSTANT(__i
) {
186 struct __wasm_v128_load16_lane_struct
{
188 } __attribute__((__packed__
, __may_alias__
));
189 int16_t __v
= ((const struct __wasm_v128_load16_lane_struct
*)__mem
)->__v
;
190 __i16x8 __ret
= (__i16x8
)__vec
;
192 return (v128_t
)__ret
;
195 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load32_lane(
196 const void *__mem
, v128_t __vec
, int __i
) __REQUIRE_CONSTANT(__i
) {
197 struct __wasm_v128_load32_lane_struct
{
199 } __attribute__((__packed__
, __may_alias__
));
200 int32_t __v
= ((const struct __wasm_v128_load32_lane_struct
*)__mem
)->__v
;
201 __i32x4 __ret
= (__i32x4
)__vec
;
203 return (v128_t
)__ret
;
206 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_load64_lane(
207 const void *__mem
, v128_t __vec
, int __i
) __REQUIRE_CONSTANT(__i
) {
208 struct __wasm_v128_load64_lane_struct
{
210 } __attribute__((__packed__
, __may_alias__
));
211 int64_t __v
= ((const struct __wasm_v128_load64_lane_struct
*)__mem
)->__v
;
212 __i64x2 __ret
= (__i64x2
)__vec
;
214 return (v128_t
)__ret
;
217 static __inline__
void __DEFAULT_FN_ATTRS
wasm_v128_store(void *__mem
,
219 // UB-free unaligned access copied from xmmintrin.h
220 struct __wasm_v128_store_struct
{
222 } __attribute__((__packed__
, __may_alias__
));
223 ((struct __wasm_v128_store_struct
*)__mem
)->__v
= __a
;
226 static __inline__
void __DEFAULT_FN_ATTRS
wasm_v128_store8_lane(void *__mem
,
229 __REQUIRE_CONSTANT(__i
) {
230 struct __wasm_v128_store8_lane_struct
{
232 } __attribute__((__packed__
, __may_alias__
));
233 ((struct __wasm_v128_store8_lane_struct
*)__mem
)->__v
= ((__i8x16
)__vec
)[__i
];
236 static __inline__
void __DEFAULT_FN_ATTRS
wasm_v128_store16_lane(void *__mem
,
239 __REQUIRE_CONSTANT(__i
) {
240 struct __wasm_v128_store16_lane_struct
{
242 } __attribute__((__packed__
, __may_alias__
));
243 ((struct __wasm_v128_store16_lane_struct
*)__mem
)->__v
=
244 ((__i16x8
)__vec
)[__i
];
247 static __inline__
void __DEFAULT_FN_ATTRS
wasm_v128_store32_lane(void *__mem
,
250 __REQUIRE_CONSTANT(__i
) {
251 struct __wasm_v128_store32_lane_struct
{
253 } __attribute__((__packed__
, __may_alias__
));
254 ((struct __wasm_v128_store32_lane_struct
*)__mem
)->__v
=
255 ((__i32x4
)__vec
)[__i
];
258 static __inline__
void __DEFAULT_FN_ATTRS
wasm_v128_store64_lane(void *__mem
,
261 __REQUIRE_CONSTANT(__i
) {
262 struct __wasm_v128_store64_lane_struct
{
264 } __attribute__((__packed__
, __may_alias__
));
265 ((struct __wasm_v128_store64_lane_struct
*)__mem
)->__v
=
266 ((__i64x2
)__vec
)[__i
];
269 static __inline__ v128_t __DEFAULT_FN_ATTRS
270 wasm_i8x16_make(int8_t __c0
, int8_t __c1
, int8_t __c2
, int8_t __c3
, int8_t __c4
,
271 int8_t __c5
, int8_t __c6
, int8_t __c7
, int8_t __c8
, int8_t __c9
,
272 int8_t __c10
, int8_t __c11
, int8_t __c12
, int8_t __c13
,
273 int8_t __c14
, int8_t __c15
) {
274 return (v128_t
)(__i8x16
){__c0
, __c1
, __c2
, __c3
, __c4
, __c5
,
275 __c6
, __c7
, __c8
, __c9
, __c10
, __c11
,
276 __c12
, __c13
, __c14
, __c15
};
279 static __inline__ v128_t __DEFAULT_FN_ATTRS
280 wasm_u8x16_make(uint8_t __c0
, uint8_t __c1
, uint8_t __c2
, uint8_t __c3
,
281 uint8_t __c4
, uint8_t __c5
, uint8_t __c6
, uint8_t __c7
,
282 uint8_t __c8
, uint8_t __c9
, uint8_t __c10
, uint8_t __c11
,
283 uint8_t __c12
, uint8_t __c13
, uint8_t __c14
, uint8_t __c15
) {
284 return (v128_t
)(__u8x16
){__c0
, __c1
, __c2
, __c3
, __c4
, __c5
,
285 __c6
, __c7
, __c8
, __c9
, __c10
, __c11
,
286 __c12
, __c13
, __c14
, __c15
};
289 static __inline__ v128_t __DEFAULT_FN_ATTRS
290 wasm_i16x8_make(int16_t __c0
, int16_t __c1
, int16_t __c2
, int16_t __c3
,
291 int16_t __c4
, int16_t __c5
, int16_t __c6
, int16_t __c7
) {
292 return (v128_t
)(__i16x8
){__c0
, __c1
, __c2
, __c3
, __c4
, __c5
, __c6
, __c7
};
295 static __inline__ v128_t __DEFAULT_FN_ATTRS
296 wasm_u16x8_make(uint16_t __c0
, uint16_t __c1
, uint16_t __c2
, uint16_t __c3
,
297 uint16_t __c4
, uint16_t __c5
, uint16_t __c6
, uint16_t __c7
) {
298 return (v128_t
)(__u16x8
){__c0
, __c1
, __c2
, __c3
, __c4
, __c5
, __c6
, __c7
};
301 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_make(int32_t __c0
,
305 return (v128_t
)(__i32x4
){__c0
, __c1
, __c2
, __c3
};
308 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_make(uint32_t __c0
,
312 return (v128_t
)(__u32x4
){__c0
, __c1
, __c2
, __c3
};
315 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_make(int64_t __c0
,
317 return (v128_t
)(__i64x2
){__c0
, __c1
};
320 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_make(uint64_t __c0
,
322 return (v128_t
)(__u64x2
){__c0
, __c1
};
325 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_make(float __c0
,
329 return (v128_t
)(__f32x4
){__c0
, __c1
, __c2
, __c3
};
332 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_make(double __c0
,
334 return (v128_t
)(__f64x2
){__c0
, __c1
};
337 static __inline__ v128_t __DEFAULT_FN_ATTRS
338 wasm_i8x16_const(int8_t __c0
, int8_t __c1
, int8_t __c2
, int8_t __c3
,
339 int8_t __c4
, int8_t __c5
, int8_t __c6
, int8_t __c7
,
340 int8_t __c8
, int8_t __c9
, int8_t __c10
, int8_t __c11
,
341 int8_t __c12
, int8_t __c13
, int8_t __c14
, int8_t __c15
)
342 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) __REQUIRE_CONSTANT(__c2
)
343 __REQUIRE_CONSTANT(__c3
) __REQUIRE_CONSTANT(__c4
)
344 __REQUIRE_CONSTANT(__c5
) __REQUIRE_CONSTANT(__c6
)
345 __REQUIRE_CONSTANT(__c7
) __REQUIRE_CONSTANT(__c8
)
346 __REQUIRE_CONSTANT(__c9
) __REQUIRE_CONSTANT(__c10
)
347 __REQUIRE_CONSTANT(__c11
) __REQUIRE_CONSTANT(__c12
)
348 __REQUIRE_CONSTANT(__c13
) __REQUIRE_CONSTANT(__c14
)
349 __REQUIRE_CONSTANT(__c15
) {
350 return (v128_t
)(__i8x16
){__c0
, __c1
, __c2
, __c3
, __c4
, __c5
,
351 __c6
, __c7
, __c8
, __c9
, __c10
, __c11
,
352 __c12
, __c13
, __c14
, __c15
};
355 static __inline__ v128_t __DEFAULT_FN_ATTRS
356 wasm_u8x16_const(uint8_t __c0
, uint8_t __c1
, uint8_t __c2
, uint8_t __c3
,
357 uint8_t __c4
, uint8_t __c5
, uint8_t __c6
, uint8_t __c7
,
358 uint8_t __c8
, uint8_t __c9
, uint8_t __c10
, uint8_t __c11
,
359 uint8_t __c12
, uint8_t __c13
, uint8_t __c14
, uint8_t __c15
)
360 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) __REQUIRE_CONSTANT(__c2
)
361 __REQUIRE_CONSTANT(__c3
) __REQUIRE_CONSTANT(__c4
)
362 __REQUIRE_CONSTANT(__c5
) __REQUIRE_CONSTANT(__c6
)
363 __REQUIRE_CONSTANT(__c7
) __REQUIRE_CONSTANT(__c8
)
364 __REQUIRE_CONSTANT(__c9
) __REQUIRE_CONSTANT(__c10
)
365 __REQUIRE_CONSTANT(__c11
) __REQUIRE_CONSTANT(__c12
)
366 __REQUIRE_CONSTANT(__c13
) __REQUIRE_CONSTANT(__c14
)
367 __REQUIRE_CONSTANT(__c15
) {
368 return (v128_t
)(__u8x16
){__c0
, __c1
, __c2
, __c3
, __c4
, __c5
,
369 __c6
, __c7
, __c8
, __c9
, __c10
, __c11
,
370 __c12
, __c13
, __c14
, __c15
};
373 static __inline__ v128_t __DEFAULT_FN_ATTRS
374 wasm_i16x8_const(int16_t __c0
, int16_t __c1
, int16_t __c2
, int16_t __c3
,
375 int16_t __c4
, int16_t __c5
, int16_t __c6
, int16_t __c7
)
376 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) __REQUIRE_CONSTANT(__c2
)
377 __REQUIRE_CONSTANT(__c3
) __REQUIRE_CONSTANT(__c4
)
378 __REQUIRE_CONSTANT(__c5
) __REQUIRE_CONSTANT(__c6
)
379 __REQUIRE_CONSTANT(__c7
) {
380 return (v128_t
)(__i16x8
){__c0
, __c1
, __c2
, __c3
, __c4
, __c5
, __c6
, __c7
};
383 static __inline__ v128_t __DEFAULT_FN_ATTRS
384 wasm_u16x8_const(uint16_t __c0
, uint16_t __c1
, uint16_t __c2
, uint16_t __c3
,
385 uint16_t __c4
, uint16_t __c5
, uint16_t __c6
, uint16_t __c7
)
386 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) __REQUIRE_CONSTANT(__c2
)
387 __REQUIRE_CONSTANT(__c3
) __REQUIRE_CONSTANT(__c4
)
388 __REQUIRE_CONSTANT(__c5
) __REQUIRE_CONSTANT(__c6
)
389 __REQUIRE_CONSTANT(__c7
) {
390 return (v128_t
)(__u16x8
){__c0
, __c1
, __c2
, __c3
, __c4
, __c5
, __c6
, __c7
};
393 static __inline__ v128_t __DEFAULT_FN_ATTRS
394 wasm_i32x4_const(int32_t __c0
, int32_t __c1
, int32_t __c2
, int32_t __c3
)
395 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) __REQUIRE_CONSTANT(__c2
)
396 __REQUIRE_CONSTANT(__c3
) {
397 return (v128_t
)(__i32x4
){__c0
, __c1
, __c2
, __c3
};
400 static __inline__ v128_t __DEFAULT_FN_ATTRS
401 wasm_u32x4_const(uint32_t __c0
, uint32_t __c1
, uint32_t __c2
, uint32_t __c3
)
402 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) __REQUIRE_CONSTANT(__c2
)
403 __REQUIRE_CONSTANT(__c3
) {
404 return (v128_t
)(__u32x4
){__c0
, __c1
, __c2
, __c3
};
407 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_const(int64_t __c0
,
409 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) {
410 return (v128_t
)(__i64x2
){__c0
, __c1
};
413 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_const(uint64_t __c0
,
415 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) {
416 return (v128_t
)(__u64x2
){__c0
, __c1
};
419 static __inline__ v128_t __DEFAULT_FN_ATTRS
420 wasm_f32x4_const(float __c0
, float __c1
, float __c2
, float __c3
)
421 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) __REQUIRE_CONSTANT(__c2
)
422 __REQUIRE_CONSTANT(__c3
) {
423 return (v128_t
)(__f32x4
){__c0
, __c1
, __c2
, __c3
};
426 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_const(double __c0
,
428 __REQUIRE_CONSTANT(__c0
) __REQUIRE_CONSTANT(__c1
) {
429 return (v128_t
)(__f64x2
){__c0
, __c1
};
432 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_const_splat(int8_t __c
)
433 __REQUIRE_CONSTANT(__c
) {
434 return (v128_t
)(__i8x16
){__c
, __c
, __c
, __c
, __c
, __c
, __c
, __c
,
435 __c
, __c
, __c
, __c
, __c
, __c
, __c
, __c
};
438 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_const_splat(uint8_t __c
)
439 __REQUIRE_CONSTANT(__c
) {
440 return (v128_t
)(__u8x16
){__c
, __c
, __c
, __c
, __c
, __c
, __c
, __c
,
441 __c
, __c
, __c
, __c
, __c
, __c
, __c
, __c
};
444 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_const_splat(int16_t __c
)
445 __REQUIRE_CONSTANT(__c
) {
446 return (v128_t
)(__i16x8
){__c
, __c
, __c
, __c
, __c
, __c
, __c
, __c
};
449 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_const_splat(uint16_t __c
)
450 __REQUIRE_CONSTANT(__c
) {
451 return (v128_t
)(__u16x8
){__c
, __c
, __c
, __c
, __c
, __c
, __c
, __c
};
454 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_const_splat(int32_t __c
)
455 __REQUIRE_CONSTANT(__c
) {
456 return (v128_t
)(__i32x4
){__c
, __c
, __c
, __c
};
459 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_const_splat(uint32_t __c
)
460 __REQUIRE_CONSTANT(__c
) {
461 return (v128_t
)(__u32x4
){__c
, __c
, __c
, __c
};
464 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_const_splat(int64_t __c
)
465 __REQUIRE_CONSTANT(__c
) {
466 return (v128_t
)(__i64x2
){__c
, __c
};
469 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_const_splat(uint64_t __c
)
470 __REQUIRE_CONSTANT(__c
) {
471 return (v128_t
)(__u64x2
){__c
, __c
};
474 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_const_splat(float __c
)
475 __REQUIRE_CONSTANT(__c
) {
476 return (v128_t
)(__f32x4
){__c
, __c
, __c
, __c
};
479 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_const_splat(double __c
)
480 __REQUIRE_CONSTANT(__c
) {
481 return (v128_t
)(__f64x2
){__c
, __c
};
484 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_splat(int8_t __a
) {
485 return (v128_t
)(__i8x16
){__a
, __a
, __a
, __a
, __a
, __a
, __a
, __a
,
486 __a
, __a
, __a
, __a
, __a
, __a
, __a
, __a
};
489 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_splat(uint8_t __a
) {
490 return (v128_t
)(__u8x16
){__a
, __a
, __a
, __a
, __a
, __a
, __a
, __a
,
491 __a
, __a
, __a
, __a
, __a
, __a
, __a
, __a
};
494 static __inline__
int8_t __DEFAULT_FN_ATTRS
wasm_i8x16_extract_lane(v128_t __a
,
496 __REQUIRE_CONSTANT(__i
) {
497 return ((__i8x16
)__a
)[__i
];
500 static __inline__
uint8_t __DEFAULT_FN_ATTRS
wasm_u8x16_extract_lane(v128_t __a
,
502 __REQUIRE_CONSTANT(__i
) {
503 return ((__u8x16
)__a
)[__i
];
506 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_replace_lane(v128_t __a
,
509 __REQUIRE_CONSTANT(__i
) {
510 __i8x16 __v
= (__i8x16
)__a
;
515 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_replace_lane(v128_t __a
,
518 __REQUIRE_CONSTANT(__i
) {
519 __u8x16 __v
= (__u8x16
)__a
;
524 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_splat(int16_t __a
) {
525 return (v128_t
)(__i16x8
){__a
, __a
, __a
, __a
, __a
, __a
, __a
, __a
};
528 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_splat(uint16_t __a
) {
529 return (v128_t
)(__u16x8
){__a
, __a
, __a
, __a
, __a
, __a
, __a
, __a
};
532 static __inline__
int16_t __DEFAULT_FN_ATTRS
wasm_i16x8_extract_lane(v128_t __a
,
534 __REQUIRE_CONSTANT(__i
) {
535 return ((__i16x8
)__a
)[__i
];
538 static __inline__
uint16_t __DEFAULT_FN_ATTRS
539 wasm_u16x8_extract_lane(v128_t __a
, int __i
) __REQUIRE_CONSTANT(__i
) {
540 return ((__u16x8
)__a
)[__i
];
543 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_replace_lane(v128_t __a
,
546 __REQUIRE_CONSTANT(__i
) {
547 __i16x8 __v
= (__i16x8
)__a
;
552 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_replace_lane(
553 v128_t __a
, int __i
, uint16_t __b
) __REQUIRE_CONSTANT(__i
) {
554 __u16x8 __v
= (__u16x8
)__a
;
559 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_splat(int32_t __a
) {
560 return (v128_t
)(__i32x4
){__a
, __a
, __a
, __a
};
563 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_splat(uint32_t __a
) {
564 return (v128_t
)(__u32x4
){__a
, __a
, __a
, __a
};
567 static __inline__
int32_t __DEFAULT_FN_ATTRS
wasm_i32x4_extract_lane(v128_t __a
,
569 __REQUIRE_CONSTANT(__i
) {
570 return ((__i32x4
)__a
)[__i
];
573 static __inline__
uint32_t __DEFAULT_FN_ATTRS
574 wasm_u32x4_extract_lane(v128_t __a
, int __i
) __REQUIRE_CONSTANT(__i
) {
575 return ((__u32x4
)__a
)[__i
];
578 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_replace_lane(v128_t __a
,
581 __REQUIRE_CONSTANT(__i
) {
582 __i32x4 __v
= (__i32x4
)__a
;
587 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_replace_lane(
588 v128_t __a
, int __i
, uint32_t __b
) __REQUIRE_CONSTANT(__i
) {
589 __u32x4 __v
= (__u32x4
)__a
;
594 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_splat(int64_t __a
) {
595 return (v128_t
)(__i64x2
){__a
, __a
};
598 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_splat(uint64_t __a
) {
599 return (v128_t
)(__u64x2
){__a
, __a
};
602 static __inline__
int64_t __DEFAULT_FN_ATTRS
wasm_i64x2_extract_lane(v128_t __a
,
604 __REQUIRE_CONSTANT(__i
) {
605 return ((__i64x2
)__a
)[__i
];
608 static __inline__
uint64_t __DEFAULT_FN_ATTRS
609 wasm_u64x2_extract_lane(v128_t __a
, int __i
) __REQUIRE_CONSTANT(__i
) {
610 return ((__u64x2
)__a
)[__i
];
613 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_replace_lane(v128_t __a
,
616 __REQUIRE_CONSTANT(__i
) {
617 __i64x2 __v
= (__i64x2
)__a
;
622 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_replace_lane(
623 v128_t __a
, int __i
, uint64_t __b
) __REQUIRE_CONSTANT(__i
) {
624 __u64x2 __v
= (__u64x2
)__a
;
629 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_splat(float __a
) {
630 return (v128_t
)(__f32x4
){__a
, __a
, __a
, __a
};
633 static __inline__
float __DEFAULT_FN_ATTRS
wasm_f32x4_extract_lane(v128_t __a
,
635 __REQUIRE_CONSTANT(__i
) {
636 return ((__f32x4
)__a
)[__i
];
639 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_replace_lane(v128_t __a
,
642 __REQUIRE_CONSTANT(__i
) {
643 __f32x4 __v
= (__f32x4
)__a
;
648 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_splat(double __a
) {
649 return (v128_t
)(__f64x2
){__a
, __a
};
652 static __inline__
double __DEFAULT_FN_ATTRS
wasm_f64x2_extract_lane(v128_t __a
,
654 __REQUIRE_CONSTANT(__i
) {
655 return ((__f64x2
)__a
)[__i
];
658 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_replace_lane(v128_t __a
,
661 __REQUIRE_CONSTANT(__i
) {
662 __f64x2 __v
= (__f64x2
)__a
;
667 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_eq(v128_t __a
,
669 return (v128_t
)((__i8x16
)__a
== (__i8x16
)__b
);
672 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_ne(v128_t __a
,
674 return (v128_t
)((__i8x16
)__a
!= (__i8x16
)__b
);
677 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_lt(v128_t __a
,
679 return (v128_t
)((__i8x16
)__a
< (__i8x16
)__b
);
682 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_lt(v128_t __a
,
684 return (v128_t
)((__u8x16
)__a
< (__u8x16
)__b
);
687 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_gt(v128_t __a
,
689 return (v128_t
)((__i8x16
)__a
> (__i8x16
)__b
);
692 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_gt(v128_t __a
,
694 return (v128_t
)((__u8x16
)__a
> (__u8x16
)__b
);
697 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_le(v128_t __a
,
699 return (v128_t
)((__i8x16
)__a
<= (__i8x16
)__b
);
702 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_le(v128_t __a
,
704 return (v128_t
)((__u8x16
)__a
<= (__u8x16
)__b
);
707 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_ge(v128_t __a
,
709 return (v128_t
)((__i8x16
)__a
>= (__i8x16
)__b
);
712 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_ge(v128_t __a
,
714 return (v128_t
)((__u8x16
)__a
>= (__u8x16
)__b
);
717 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_eq(v128_t __a
,
719 return (v128_t
)((__i16x8
)__a
== (__i16x8
)__b
);
722 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_ne(v128_t __a
,
724 return (v128_t
)((__u16x8
)__a
!= (__u16x8
)__b
);
727 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_lt(v128_t __a
,
729 return (v128_t
)((__i16x8
)__a
< (__i16x8
)__b
);
732 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_lt(v128_t __a
,
734 return (v128_t
)((__u16x8
)__a
< (__u16x8
)__b
);
737 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_gt(v128_t __a
,
739 return (v128_t
)((__i16x8
)__a
> (__i16x8
)__b
);
742 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_gt(v128_t __a
,
744 return (v128_t
)((__u16x8
)__a
> (__u16x8
)__b
);
747 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_le(v128_t __a
,
749 return (v128_t
)((__i16x8
)__a
<= (__i16x8
)__b
);
752 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_le(v128_t __a
,
754 return (v128_t
)((__u16x8
)__a
<= (__u16x8
)__b
);
757 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_ge(v128_t __a
,
759 return (v128_t
)((__i16x8
)__a
>= (__i16x8
)__b
);
762 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_ge(v128_t __a
,
764 return (v128_t
)((__u16x8
)__a
>= (__u16x8
)__b
);
767 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_eq(v128_t __a
,
769 return (v128_t
)((__i32x4
)__a
== (__i32x4
)__b
);
772 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_ne(v128_t __a
,
774 return (v128_t
)((__i32x4
)__a
!= (__i32x4
)__b
);
777 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_lt(v128_t __a
,
779 return (v128_t
)((__i32x4
)__a
< (__i32x4
)__b
);
782 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_lt(v128_t __a
,
784 return (v128_t
)((__u32x4
)__a
< (__u32x4
)__b
);
787 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_gt(v128_t __a
,
789 return (v128_t
)((__i32x4
)__a
> (__i32x4
)__b
);
792 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_gt(v128_t __a
,
794 return (v128_t
)((__u32x4
)__a
> (__u32x4
)__b
);
797 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_le(v128_t __a
,
799 return (v128_t
)((__i32x4
)__a
<= (__i32x4
)__b
);
802 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_le(v128_t __a
,
804 return (v128_t
)((__u32x4
)__a
<= (__u32x4
)__b
);
807 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_ge(v128_t __a
,
809 return (v128_t
)((__i32x4
)__a
>= (__i32x4
)__b
);
812 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_ge(v128_t __a
,
814 return (v128_t
)((__u32x4
)__a
>= (__u32x4
)__b
);
817 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_eq(v128_t __a
,
819 return (v128_t
)((__i64x2
)__a
== (__i64x2
)__b
);
822 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_ne(v128_t __a
,
824 return (v128_t
)((__i64x2
)__a
!= (__i64x2
)__b
);
827 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_lt(v128_t __a
,
829 return (v128_t
)((__i64x2
)__a
< (__i64x2
)__b
);
832 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_gt(v128_t __a
,
834 return (v128_t
)((__i64x2
)__a
> (__i64x2
)__b
);
837 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_le(v128_t __a
,
839 return (v128_t
)((__i64x2
)__a
<= (__i64x2
)__b
);
842 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_ge(v128_t __a
,
844 return (v128_t
)((__i64x2
)__a
>= (__i64x2
)__b
);
847 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_eq(v128_t __a
,
849 return (v128_t
)((__f32x4
)__a
== (__f32x4
)__b
);
852 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_ne(v128_t __a
,
854 return (v128_t
)((__f32x4
)__a
!= (__f32x4
)__b
);
857 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_lt(v128_t __a
,
859 return (v128_t
)((__f32x4
)__a
< (__f32x4
)__b
);
862 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_gt(v128_t __a
,
864 return (v128_t
)((__f32x4
)__a
> (__f32x4
)__b
);
867 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_le(v128_t __a
,
869 return (v128_t
)((__f32x4
)__a
<= (__f32x4
)__b
);
872 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_ge(v128_t __a
,
874 return (v128_t
)((__f32x4
)__a
>= (__f32x4
)__b
);
877 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_eq(v128_t __a
,
879 return (v128_t
)((__f64x2
)__a
== (__f64x2
)__b
);
882 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_ne(v128_t __a
,
884 return (v128_t
)((__f64x2
)__a
!= (__f64x2
)__b
);
887 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_lt(v128_t __a
,
889 return (v128_t
)((__f64x2
)__a
< (__f64x2
)__b
);
892 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_gt(v128_t __a
,
894 return (v128_t
)((__f64x2
)__a
> (__f64x2
)__b
);
897 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_le(v128_t __a
,
899 return (v128_t
)((__f64x2
)__a
<= (__f64x2
)__b
);
902 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_ge(v128_t __a
,
904 return (v128_t
)((__f64x2
)__a
>= (__f64x2
)__b
);
907 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_not(v128_t __a
) {
911 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_and(v128_t __a
,
916 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_or(v128_t __a
,
921 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_xor(v128_t __a
,
926 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_andnot(v128_t __a
,
931 static __inline__
bool __DEFAULT_FN_ATTRS
wasm_v128_any_true(v128_t __a
) {
932 return __builtin_wasm_any_true_v128((__i8x16
)__a
);
935 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_v128_bitselect(v128_t __a
,
938 return (v128_t
)__builtin_wasm_bitselect((__i32x4
)__a
, (__i32x4
)__b
,
942 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_abs(v128_t __a
) {
943 return (v128_t
)__builtin_wasm_abs_i8x16((__i8x16
)__a
);
946 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_neg(v128_t __a
) {
947 return (v128_t
)(-(__u8x16
)__a
);
950 static __inline__
bool __DEFAULT_FN_ATTRS
wasm_i8x16_all_true(v128_t __a
) {
951 return __builtin_wasm_all_true_i8x16((__i8x16
)__a
);
954 static __inline__
uint32_t __DEFAULT_FN_ATTRS
wasm_i8x16_bitmask(v128_t __a
) {
955 return __builtin_wasm_bitmask_i8x16((__i8x16
)__a
);
958 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_popcnt(v128_t __a
) {
959 return (v128_t
)__builtin_wasm_popcnt_i8x16((__i8x16
)__a
);
962 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_shl(v128_t __a
,
964 return (v128_t
)((__i8x16
)__a
<< (__b
& 0x7));
967 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_shr(v128_t __a
,
969 return (v128_t
)((__i8x16
)__a
>> (__b
& 0x7));
972 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_shr(v128_t __a
,
974 return (v128_t
)((__u8x16
)__a
>> (__b
& 0x7));
977 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_add(v128_t __a
,
979 return (v128_t
)((__u8x16
)__a
+ (__u8x16
)__b
);
982 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_add_sat(v128_t __a
,
984 return (v128_t
)__builtin_wasm_add_sat_s_i8x16((__i8x16
)__a
, (__i8x16
)__b
);
987 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_add_sat(v128_t __a
,
989 return (v128_t
)__builtin_wasm_add_sat_u_i8x16((__u8x16
)__a
, (__u8x16
)__b
);
992 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_sub(v128_t __a
,
994 return (v128_t
)((__u8x16
)__a
- (__u8x16
)__b
);
997 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_sub_sat(v128_t __a
,
999 return (v128_t
)__builtin_wasm_sub_sat_s_i8x16((__i8x16
)__a
, (__i8x16
)__b
);
1002 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_sub_sat(v128_t __a
,
1004 return (v128_t
)__builtin_wasm_sub_sat_u_i8x16((__u8x16
)__a
, (__u8x16
)__b
);
1007 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_min(v128_t __a
,
1009 return (v128_t
)__builtin_wasm_min_s_i8x16((__i8x16
)__a
, (__i8x16
)__b
);
1012 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_min(v128_t __a
,
1014 return (v128_t
)__builtin_wasm_min_u_i8x16((__u8x16
)__a
, (__u8x16
)__b
);
1017 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_max(v128_t __a
,
1019 return (v128_t
)__builtin_wasm_max_s_i8x16((__i8x16
)__a
, (__i8x16
)__b
);
1022 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_max(v128_t __a
,
1024 return (v128_t
)__builtin_wasm_max_u_i8x16((__u8x16
)__a
, (__u8x16
)__b
);
1027 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u8x16_avgr(v128_t __a
,
1029 return (v128_t
)__builtin_wasm_avgr_u_i8x16((__u8x16
)__a
, (__u8x16
)__b
);
1032 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_abs(v128_t __a
) {
1033 return (v128_t
)__builtin_wasm_abs_i16x8((__i16x8
)__a
);
1036 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_neg(v128_t __a
) {
1037 return (v128_t
)(-(__u16x8
)__a
);
1040 static __inline__
bool __DEFAULT_FN_ATTRS
wasm_i16x8_all_true(v128_t __a
) {
1041 return __builtin_wasm_all_true_i16x8((__i16x8
)__a
);
1044 static __inline__
uint32_t __DEFAULT_FN_ATTRS
wasm_i16x8_bitmask(v128_t __a
) {
1045 return __builtin_wasm_bitmask_i16x8((__i16x8
)__a
);
1048 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_shl(v128_t __a
,
1050 return (v128_t
)((__i16x8
)__a
<< (__b
& 0xF));
1053 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_shr(v128_t __a
,
1055 return (v128_t
)((__i16x8
)__a
>> (__b
& 0xF));
1058 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_shr(v128_t __a
,
1060 return (v128_t
)((__u16x8
)__a
>> (__b
& 0xF));
1063 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_add(v128_t __a
,
1065 return (v128_t
)((__u16x8
)__a
+ (__u16x8
)__b
);
1068 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_add_sat(v128_t __a
,
1070 return (v128_t
)__builtin_wasm_add_sat_s_i16x8((__i16x8
)__a
, (__i16x8
)__b
);
1073 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_add_sat(v128_t __a
,
1075 return (v128_t
)__builtin_wasm_add_sat_u_i16x8((__u16x8
)__a
, (__u16x8
)__b
);
1078 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_sub(v128_t __a
,
1080 return (v128_t
)((__i16x8
)__a
- (__i16x8
)__b
);
1083 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_sub_sat(v128_t __a
,
1085 return (v128_t
)__builtin_wasm_sub_sat_s_i16x8((__i16x8
)__a
, (__i16x8
)__b
);
1088 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_sub_sat(v128_t __a
,
1090 return (v128_t
)__builtin_wasm_sub_sat_u_i16x8((__u16x8
)__a
, (__u16x8
)__b
);
1093 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_mul(v128_t __a
,
1095 return (v128_t
)((__u16x8
)__a
* (__u16x8
)__b
);
1098 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_min(v128_t __a
,
1100 return (v128_t
)__builtin_wasm_min_s_i16x8((__i16x8
)__a
, (__i16x8
)__b
);
1103 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_min(v128_t __a
,
1105 return (v128_t
)__builtin_wasm_min_u_i16x8((__u16x8
)__a
, (__u16x8
)__b
);
1108 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_max(v128_t __a
,
1110 return (v128_t
)__builtin_wasm_max_s_i16x8((__i16x8
)__a
, (__i16x8
)__b
);
1113 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_max(v128_t __a
,
1115 return (v128_t
)__builtin_wasm_max_u_i16x8((__u16x8
)__a
, (__u16x8
)__b
);
1118 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u16x8_avgr(v128_t __a
,
1120 return (v128_t
)__builtin_wasm_avgr_u_i16x8((__u16x8
)__a
, (__u16x8
)__b
);
1123 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_abs(v128_t __a
) {
1124 return (v128_t
)__builtin_wasm_abs_i32x4((__i32x4
)__a
);
1127 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_neg(v128_t __a
) {
1128 return (v128_t
)(-(__u32x4
)__a
);
1131 static __inline__
bool __DEFAULT_FN_ATTRS
wasm_i32x4_all_true(v128_t __a
) {
1132 return __builtin_wasm_all_true_i32x4((__i32x4
)__a
);
1135 static __inline__
uint32_t __DEFAULT_FN_ATTRS
wasm_i32x4_bitmask(v128_t __a
) {
1136 return __builtin_wasm_bitmask_i32x4((__i32x4
)__a
);
1139 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_shl(v128_t __a
,
1141 return (v128_t
)((__i32x4
)__a
<< (__b
& 0x1F));
1144 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_shr(v128_t __a
,
1146 return (v128_t
)((__i32x4
)__a
>> (__b
& 0x1F));
1149 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_shr(v128_t __a
,
1151 return (v128_t
)((__u32x4
)__a
>> (__b
& 0x1F));
1154 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_add(v128_t __a
,
1156 return (v128_t
)((__u32x4
)__a
+ (__u32x4
)__b
);
1159 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_sub(v128_t __a
,
1161 return (v128_t
)((__u32x4
)__a
- (__u32x4
)__b
);
1164 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_mul(v128_t __a
,
1166 return (v128_t
)((__u32x4
)__a
* (__u32x4
)__b
);
1169 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_min(v128_t __a
,
1171 return (v128_t
)__builtin_wasm_min_s_i32x4((__i32x4
)__a
, (__i32x4
)__b
);
1174 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_min(v128_t __a
,
1176 return (v128_t
)__builtin_wasm_min_u_i32x4((__u32x4
)__a
, (__u32x4
)__b
);
1179 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_max(v128_t __a
,
1181 return (v128_t
)__builtin_wasm_max_s_i32x4((__i32x4
)__a
, (__i32x4
)__b
);
1184 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u32x4_max(v128_t __a
,
1186 return (v128_t
)__builtin_wasm_max_u_i32x4((__u32x4
)__a
, (__u32x4
)__b
);
1189 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i32x4_dot_i16x8(v128_t __a
,
1191 return (v128_t
)__builtin_wasm_dot_s_i32x4_i16x8((__i16x8
)__a
, (__i16x8
)__b
);
1194 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_abs(v128_t __a
) {
1195 return (v128_t
)__builtin_wasm_abs_i64x2((__i64x2
)__a
);
1198 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_neg(v128_t __a
) {
1199 return (v128_t
)(-(__u64x2
)__a
);
1202 static __inline__
bool __DEFAULT_FN_ATTRS
wasm_i64x2_all_true(v128_t __a
) {
1203 return __builtin_wasm_all_true_i64x2((__i64x2
)__a
);
1206 static __inline__
uint32_t __DEFAULT_FN_ATTRS
wasm_i64x2_bitmask(v128_t __a
) {
1207 return __builtin_wasm_bitmask_i64x2((__i64x2
)__a
);
1210 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_shl(v128_t __a
,
1212 return (v128_t
)((__i64x2
)__a
<< ((int64_t)__b
& 0x3F));
1215 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_shr(v128_t __a
,
1217 return (v128_t
)((__i64x2
)__a
>> ((int64_t)__b
& 0x3F));
1220 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_u64x2_shr(v128_t __a
,
1222 return (v128_t
)((__u64x2
)__a
>> ((int64_t)__b
& 0x3F));
1225 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_add(v128_t __a
,
1227 return (v128_t
)((__u64x2
)__a
+ (__u64x2
)__b
);
1230 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_sub(v128_t __a
,
1232 return (v128_t
)((__u64x2
)__a
- (__u64x2
)__b
);
1235 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i64x2_mul(v128_t __a
,
1237 return (v128_t
)((__u64x2
)__a
* (__u64x2
)__b
);
1240 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_abs(v128_t __a
) {
1241 return (v128_t
)__builtin_wasm_abs_f32x4((__f32x4
)__a
);
1244 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_neg(v128_t __a
) {
1245 return (v128_t
)(-(__f32x4
)__a
);
1248 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_sqrt(v128_t __a
) {
1249 return (v128_t
)__builtin_wasm_sqrt_f32x4((__f32x4
)__a
);
1252 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_ceil(v128_t __a
) {
1253 return (v128_t
)__builtin_wasm_ceil_f32x4((__f32x4
)__a
);
1256 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_floor(v128_t __a
) {
1257 return (v128_t
)__builtin_wasm_floor_f32x4((__f32x4
)__a
);
1260 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_trunc(v128_t __a
) {
1261 return (v128_t
)__builtin_wasm_trunc_f32x4((__f32x4
)__a
);
1264 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_nearest(v128_t __a
) {
1265 return (v128_t
)__builtin_wasm_nearest_f32x4((__f32x4
)__a
);
1268 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_add(v128_t __a
,
1270 return (v128_t
)((__f32x4
)__a
+ (__f32x4
)__b
);
1273 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_sub(v128_t __a
,
1275 return (v128_t
)((__f32x4
)__a
- (__f32x4
)__b
);
1278 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_mul(v128_t __a
,
1280 return (v128_t
)((__f32x4
)__a
* (__f32x4
)__b
);
1283 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_div(v128_t __a
,
1285 return (v128_t
)((__f32x4
)__a
/ (__f32x4
)__b
);
1288 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_min(v128_t __a
,
1290 return (v128_t
)__builtin_wasm_min_f32x4((__f32x4
)__a
, (__f32x4
)__b
);
1293 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_max(v128_t __a
,
1295 return (v128_t
)__builtin_wasm_max_f32x4((__f32x4
)__a
, (__f32x4
)__b
);
1298 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_pmin(v128_t __a
,
1300 return (v128_t
)__builtin_wasm_pmin_f32x4((__f32x4
)__a
, (__f32x4
)__b
);
1303 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f32x4_pmax(v128_t __a
,
1305 return (v128_t
)__builtin_wasm_pmax_f32x4((__f32x4
)__a
, (__f32x4
)__b
);
1308 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_abs(v128_t __a
) {
1309 return (v128_t
)__builtin_wasm_abs_f64x2((__f64x2
)__a
);
1312 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_neg(v128_t __a
) {
1313 return (v128_t
)(-(__f64x2
)__a
);
1316 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_sqrt(v128_t __a
) {
1317 return (v128_t
)__builtin_wasm_sqrt_f64x2((__f64x2
)__a
);
1320 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_ceil(v128_t __a
) {
1321 return (v128_t
)__builtin_wasm_ceil_f64x2((__f64x2
)__a
);
1324 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_floor(v128_t __a
) {
1325 return (v128_t
)__builtin_wasm_floor_f64x2((__f64x2
)__a
);
1328 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_trunc(v128_t __a
) {
1329 return (v128_t
)__builtin_wasm_trunc_f64x2((__f64x2
)__a
);
1332 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_nearest(v128_t __a
) {
1333 return (v128_t
)__builtin_wasm_nearest_f64x2((__f64x2
)__a
);
1336 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_add(v128_t __a
,
1338 return (v128_t
)((__f64x2
)__a
+ (__f64x2
)__b
);
1341 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_sub(v128_t __a
,
1343 return (v128_t
)((__f64x2
)__a
- (__f64x2
)__b
);
1346 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_mul(v128_t __a
,
1348 return (v128_t
)((__f64x2
)__a
* (__f64x2
)__b
);
1351 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_div(v128_t __a
,
1353 return (v128_t
)((__f64x2
)__a
/ (__f64x2
)__b
);
1356 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_min(v128_t __a
,
1358 return (v128_t
)__builtin_wasm_min_f64x2((__f64x2
)__a
, (__f64x2
)__b
);
1361 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_max(v128_t __a
,
1363 return (v128_t
)__builtin_wasm_max_f64x2((__f64x2
)__a
, (__f64x2
)__b
);
1366 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_pmin(v128_t __a
,
1368 return (v128_t
)__builtin_wasm_pmin_f64x2((__f64x2
)__a
, (__f64x2
)__b
);
1371 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_f64x2_pmax(v128_t __a
,
1373 return (v128_t
)__builtin_wasm_pmax_f64x2((__f64x2
)__a
, (__f64x2
)__b
);
1376 static __inline__ v128_t __DEFAULT_FN_ATTRS
1377 wasm_i32x4_trunc_sat_f32x4(v128_t __a
) {
1378 return (v128_t
)__builtin_wasm_trunc_saturate_s_i32x4_f32x4((__f32x4
)__a
);
1381 static __inline__ v128_t __DEFAULT_FN_ATTRS
1382 wasm_u32x4_trunc_sat_f32x4(v128_t __a
) {
1383 return (v128_t
)__builtin_wasm_trunc_saturate_u_i32x4_f32x4((__f32x4
)__a
);
1386 static __inline__ v128_t __DEFAULT_FN_ATTRS
1387 wasm_f32x4_convert_i32x4(v128_t __a
) {
1388 return (v128_t
) __builtin_convertvector((__i32x4
)__a
, __f32x4
);
1391 static __inline__ v128_t __DEFAULT_FN_ATTRS
1392 wasm_f32x4_convert_u32x4(v128_t __a
) {
1393 return (v128_t
) __builtin_convertvector((__u32x4
)__a
, __f32x4
);
1396 static __inline__ v128_t __DEFAULT_FN_ATTRS
1397 wasm_f64x2_convert_low_i32x4(v128_t __a
) {
1398 return (v128_t
) __builtin_convertvector((__i32x2
){__a
[0], __a
[1]}, __f64x2
);
1401 static __inline__ v128_t __DEFAULT_FN_ATTRS
1402 wasm_f64x2_convert_low_u32x4(v128_t __a
) {
1403 return (v128_t
) __builtin_convertvector((__u32x2
){__a
[0], __a
[1]}, __f64x2
);
1406 static __inline__ v128_t __DEFAULT_FN_ATTRS
1407 wasm_i32x4_trunc_sat_f64x2_zero(v128_t __a
) {
1408 return (v128_t
)__builtin_wasm_trunc_sat_s_zero_f64x2_i32x4((__f64x2
)__a
);
1411 static __inline__ v128_t __DEFAULT_FN_ATTRS
1412 wasm_u32x4_trunc_sat_f64x2_zero(v128_t __a
) {
1413 return (v128_t
)__builtin_wasm_trunc_sat_u_zero_f64x2_i32x4((__f64x2
)__a
);
1416 static __inline__ v128_t __DEFAULT_FN_ATTRS
1417 wasm_f32x4_demote_f64x2_zero(v128_t __a
) {
1418 return (v128_t
) __builtin_convertvector(
1419 __builtin_shufflevector((__f64x2
)__a
, (__f64x2
){0, 0}, 0, 1, 2, 3),
1423 static __inline__ v128_t __DEFAULT_FN_ATTRS
1424 wasm_f64x2_promote_low_f32x4(v128_t __a
) {
1425 return (v128_t
) __builtin_convertvector(
1426 (__f32x2
){((__f32x4
)__a
)[0], ((__f32x4
)__a
)[1]}, __f64x2
);
1429 #define wasm_i8x16_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \
1430 __c7, __c8, __c9, __c10, __c11, __c12, __c13, \
1432 ((v128_t)__builtin_wasm_shuffle_i8x16( \
1433 (__i8x16)(__a), (__i8x16)(__b), __c0, __c1, __c2, __c3, __c4, __c5, \
1434 __c6, __c7, __c8, __c9, __c10, __c11, __c12, __c13, __c14, __c15))
1436 #define wasm_i16x8_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \
1438 ((v128_t)__builtin_wasm_shuffle_i8x16( \
1439 (__i8x16)(__a), (__i8x16)(__b), (__c0)*2, (__c0)*2 + 1, (__c1)*2, \
1440 (__c1)*2 + 1, (__c2)*2, (__c2)*2 + 1, (__c3)*2, (__c3)*2 + 1, (__c4)*2, \
1441 (__c4)*2 + 1, (__c5)*2, (__c5)*2 + 1, (__c6)*2, (__c6)*2 + 1, (__c7)*2, \
1444 #define wasm_i32x4_shuffle(__a, __b, __c0, __c1, __c2, __c3) \
1445 ((v128_t)__builtin_wasm_shuffle_i8x16( \
1446 (__i8x16)(__a), (__i8x16)(__b), (__c0)*4, (__c0)*4 + 1, (__c0)*4 + 2, \
1447 (__c0)*4 + 3, (__c1)*4, (__c1)*4 + 1, (__c1)*4 + 2, (__c1)*4 + 3, \
1448 (__c2)*4, (__c2)*4 + 1, (__c2)*4 + 2, (__c2)*4 + 3, (__c3)*4, \
1449 (__c3)*4 + 1, (__c3)*4 + 2, (__c3)*4 + 3))
1451 #define wasm_i64x2_shuffle(__a, __b, __c0, __c1) \
1452 ((v128_t)__builtin_wasm_shuffle_i8x16( \
1453 (__i8x16)(__a), (__i8x16)(__b), (__c0)*8, (__c0)*8 + 1, (__c0)*8 + 2, \
1454 (__c0)*8 + 3, (__c0)*8 + 4, (__c0)*8 + 5, (__c0)*8 + 6, (__c0)*8 + 7, \
1455 (__c1)*8, (__c1)*8 + 1, (__c1)*8 + 2, (__c1)*8 + 3, (__c1)*8 + 4, \
1456 (__c1)*8 + 5, (__c1)*8 + 6, (__c1)*8 + 7))
1458 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i8x16_swizzle(v128_t __a
,
1460 return (v128_t
)__builtin_wasm_swizzle_i8x16((__i8x16
)__a
, (__i8x16
)__b
);
1463 static __inline__ v128_t __DEFAULT_FN_ATTRS
1464 wasm_i8x16_narrow_i16x8(v128_t __a
, v128_t __b
) {
1465 return (v128_t
)__builtin_wasm_narrow_s_i8x16_i16x8((__i16x8
)__a
,
1469 static __inline__ v128_t __DEFAULT_FN_ATTRS
1470 wasm_u8x16_narrow_i16x8(v128_t __a
, v128_t __b
) {
1471 return (v128_t
)__builtin_wasm_narrow_u_i8x16_i16x8((__i16x8
)__a
,
1475 static __inline__ v128_t __DEFAULT_FN_ATTRS
1476 wasm_i16x8_narrow_i32x4(v128_t __a
, v128_t __b
) {
1477 return (v128_t
)__builtin_wasm_narrow_s_i16x8_i32x4((__i32x4
)__a
,
1481 static __inline__ v128_t __DEFAULT_FN_ATTRS
1482 wasm_u16x8_narrow_i32x4(v128_t __a
, v128_t __b
) {
1483 return (v128_t
)__builtin_wasm_narrow_u_i16x8_i32x4((__i32x4
)__a
,
1487 static __inline__ v128_t __DEFAULT_FN_ATTRS
1488 wasm_i16x8_extend_low_i8x16(v128_t __a
) {
1489 return (v128_t
) __builtin_convertvector(
1490 (__i8x8
){((__i8x16
)__a
)[0], ((__i8x16
)__a
)[1], ((__i8x16
)__a
)[2],
1491 ((__i8x16
)__a
)[3], ((__i8x16
)__a
)[4], ((__i8x16
)__a
)[5],
1492 ((__i8x16
)__a
)[6], ((__i8x16
)__a
)[7]},
1496 static __inline__ v128_t __DEFAULT_FN_ATTRS
1497 wasm_i16x8_extend_high_i8x16(v128_t __a
) {
1498 return (v128_t
) __builtin_convertvector(
1499 (__i8x8
){((__i8x16
)__a
)[8], ((__i8x16
)__a
)[9], ((__i8x16
)__a
)[10],
1500 ((__i8x16
)__a
)[11], ((__i8x16
)__a
)[12], ((__i8x16
)__a
)[13],
1501 ((__i8x16
)__a
)[14], ((__i8x16
)__a
)[15]},
1505 static __inline__ v128_t __DEFAULT_FN_ATTRS
1506 wasm_u16x8_extend_low_u8x16(v128_t __a
) {
1507 return (v128_t
) __builtin_convertvector(
1508 (__u8x8
){((__u8x16
)__a
)[0], ((__u8x16
)__a
)[1], ((__u8x16
)__a
)[2],
1509 ((__u8x16
)__a
)[3], ((__u8x16
)__a
)[4], ((__u8x16
)__a
)[5],
1510 ((__u8x16
)__a
)[6], ((__u8x16
)__a
)[7]},
1514 static __inline__ v128_t __DEFAULT_FN_ATTRS
1515 wasm_u16x8_extend_high_u8x16(v128_t __a
) {
1516 return (v128_t
) __builtin_convertvector(
1517 (__u8x8
){((__u8x16
)__a
)[8], ((__u8x16
)__a
)[9], ((__u8x16
)__a
)[10],
1518 ((__u8x16
)__a
)[11], ((__u8x16
)__a
)[12], ((__u8x16
)__a
)[13],
1519 ((__u8x16
)__a
)[14], ((__u8x16
)__a
)[15]},
1523 static __inline__ v128_t __DEFAULT_FN_ATTRS
1524 wasm_i32x4_extend_low_i16x8(v128_t __a
) {
1525 return (v128_t
) __builtin_convertvector(
1526 (__i16x4
){((__i16x8
)__a
)[0], ((__i16x8
)__a
)[1], ((__i16x8
)__a
)[2],
1531 static __inline__ v128_t __DEFAULT_FN_ATTRS
1532 wasm_i32x4_extend_high_i16x8(v128_t __a
) {
1533 return (v128_t
) __builtin_convertvector(
1534 (__i16x4
){((__i16x8
)__a
)[4], ((__i16x8
)__a
)[5], ((__i16x8
)__a
)[6],
1539 static __inline__ v128_t __DEFAULT_FN_ATTRS
1540 wasm_u32x4_extend_low_u16x8(v128_t __a
) {
1541 return (v128_t
) __builtin_convertvector(
1542 (__u16x4
){((__u16x8
)__a
)[0], ((__u16x8
)__a
)[1], ((__u16x8
)__a
)[2],
1547 static __inline__ v128_t __DEFAULT_FN_ATTRS
1548 wasm_u32x4_extend_high_u16x8(v128_t __a
) {
1549 return (v128_t
) __builtin_convertvector(
1550 (__u16x4
){((__u16x8
)__a
)[4], ((__u16x8
)__a
)[5], ((__u16x8
)__a
)[6],
1555 static __inline__ v128_t __DEFAULT_FN_ATTRS
1556 wasm_i64x2_extend_low_i32x4(v128_t __a
) {
1557 return (v128_t
) __builtin_convertvector(
1558 (__i32x2
){((__i32x4
)__a
)[0], ((__i32x4
)__a
)[1]}, __i64x2
);
1561 static __inline__ v128_t __DEFAULT_FN_ATTRS
1562 wasm_i64x2_extend_high_i32x4(v128_t __a
) {
1563 return (v128_t
) __builtin_convertvector(
1564 (__i32x2
){((__i32x4
)__a
)[2], ((__i32x4
)__a
)[3]}, __i64x2
);
1567 static __inline__ v128_t __DEFAULT_FN_ATTRS
1568 wasm_u64x2_extend_low_u32x4(v128_t __a
) {
1569 return (v128_t
) __builtin_convertvector(
1570 (__u32x2
){((__u32x4
)__a
)[0], ((__u32x4
)__a
)[1]}, __u64x2
);
1573 static __inline__ v128_t __DEFAULT_FN_ATTRS
1574 wasm_u64x2_extend_high_u32x4(v128_t __a
) {
1575 return (v128_t
) __builtin_convertvector(
1576 (__u32x2
){((__u32x4
)__a
)[2], ((__u32x4
)__a
)[3]}, __u64x2
);
1579 static __inline__ v128_t __DEFAULT_FN_ATTRS
1580 wasm_i16x8_extadd_pairwise_i8x16(v128_t __a
) {
1581 return (v128_t
)__builtin_wasm_extadd_pairwise_i8x16_s_i16x8((__i8x16
)__a
);
1584 static __inline__ v128_t __DEFAULT_FN_ATTRS
1585 wasm_u16x8_extadd_pairwise_u8x16(v128_t __a
) {
1586 return (v128_t
)__builtin_wasm_extadd_pairwise_i8x16_u_i16x8((__u8x16
)__a
);
1589 static __inline__ v128_t __DEFAULT_FN_ATTRS
1590 wasm_i32x4_extadd_pairwise_i16x8(v128_t __a
) {
1591 return (v128_t
)__builtin_wasm_extadd_pairwise_i16x8_s_i32x4((__i16x8
)__a
);
1594 static __inline__ v128_t __DEFAULT_FN_ATTRS
1595 wasm_u32x4_extadd_pairwise_u16x8(v128_t __a
) {
1596 return (v128_t
)__builtin_wasm_extadd_pairwise_i16x8_u_i32x4((__u16x8
)__a
);
1599 static __inline__ v128_t __DEFAULT_FN_ATTRS
1600 wasm_i16x8_extmul_low_i8x16(v128_t __a
, v128_t __b
) {
1601 return (v128_t
)((__i16x8
)wasm_i16x8_extend_low_i8x16(__a
) *
1602 (__i16x8
)wasm_i16x8_extend_low_i8x16(__b
));
1605 static __inline__ v128_t __DEFAULT_FN_ATTRS
1606 wasm_i16x8_extmul_high_i8x16(v128_t __a
, v128_t __b
) {
1607 return (v128_t
)((__i16x8
)wasm_i16x8_extend_high_i8x16(__a
) *
1608 (__i16x8
)wasm_i16x8_extend_high_i8x16(__b
));
1611 static __inline__ v128_t __DEFAULT_FN_ATTRS
1612 wasm_u16x8_extmul_low_u8x16(v128_t __a
, v128_t __b
) {
1613 return (v128_t
)((__u16x8
)wasm_u16x8_extend_low_u8x16(__a
) *
1614 (__u16x8
)wasm_u16x8_extend_low_u8x16(__b
));
1617 static __inline__ v128_t __DEFAULT_FN_ATTRS
1618 wasm_u16x8_extmul_high_u8x16(v128_t __a
, v128_t __b
) {
1619 return (v128_t
)((__u16x8
)wasm_u16x8_extend_high_u8x16(__a
) *
1620 (__u16x8
)wasm_u16x8_extend_high_u8x16(__b
));
1623 static __inline__ v128_t __DEFAULT_FN_ATTRS
1624 wasm_i32x4_extmul_low_i16x8(v128_t __a
, v128_t __b
) {
1625 return (v128_t
)((__i32x4
)wasm_i32x4_extend_low_i16x8(__a
) *
1626 (__i32x4
)wasm_i32x4_extend_low_i16x8(__b
));
1629 static __inline__ v128_t __DEFAULT_FN_ATTRS
1630 wasm_i32x4_extmul_high_i16x8(v128_t __a
, v128_t __b
) {
1631 return (v128_t
)((__i32x4
)wasm_i32x4_extend_high_i16x8(__a
) *
1632 (__i32x4
)wasm_i32x4_extend_high_i16x8(__b
));
1635 static __inline__ v128_t __DEFAULT_FN_ATTRS
1636 wasm_u32x4_extmul_low_u16x8(v128_t __a
, v128_t __b
) {
1637 return (v128_t
)((__u32x4
)wasm_u32x4_extend_low_u16x8(__a
) *
1638 (__u32x4
)wasm_u32x4_extend_low_u16x8(__b
));
1641 static __inline__ v128_t __DEFAULT_FN_ATTRS
1642 wasm_u32x4_extmul_high_u16x8(v128_t __a
, v128_t __b
) {
1643 return (v128_t
)((__u32x4
)wasm_u32x4_extend_high_u16x8(__a
) *
1644 (__u32x4
)wasm_u32x4_extend_high_u16x8(__b
));
1647 static __inline__ v128_t __DEFAULT_FN_ATTRS
1648 wasm_i64x2_extmul_low_i32x4(v128_t __a
, v128_t __b
) {
1649 return (v128_t
)((__i64x2
)wasm_i64x2_extend_low_i32x4(__a
) *
1650 (__i64x2
)wasm_i64x2_extend_low_i32x4(__b
));
1653 static __inline__ v128_t __DEFAULT_FN_ATTRS
1654 wasm_i64x2_extmul_high_i32x4(v128_t __a
, v128_t __b
) {
1655 return (v128_t
)((__i64x2
)wasm_i64x2_extend_high_i32x4(__a
) *
1656 (__i64x2
)wasm_i64x2_extend_high_i32x4(__b
));
1659 static __inline__ v128_t __DEFAULT_FN_ATTRS
1660 wasm_u64x2_extmul_low_u32x4(v128_t __a
, v128_t __b
) {
1661 return (v128_t
)((__u64x2
)wasm_u64x2_extend_low_u32x4(__a
) *
1662 (__u64x2
)wasm_u64x2_extend_low_u32x4(__b
));
1665 static __inline__ v128_t __DEFAULT_FN_ATTRS
1666 wasm_u64x2_extmul_high_u32x4(v128_t __a
, v128_t __b
) {
1667 return (v128_t
)((__u64x2
)wasm_u64x2_extend_high_u32x4(__a
) *
1668 (__u64x2
)wasm_u64x2_extend_high_u32x4(__b
));
1671 static __inline__ v128_t __DEFAULT_FN_ATTRS
wasm_i16x8_q15mulr_sat(v128_t __a
,
1673 return (v128_t
)__builtin_wasm_q15mulr_sat_s_i16x8((__i16x8
)__a
, (__i16x8
)__b
);
1676 // Old intrinsic names supported to ease transitioning to the standard names. Do
1677 // not use these; they will be removed in the near future.
1679 #define __DEPRECATED_FN_ATTRS(__replacement) \
1680 __DEFAULT_FN_ATTRS __attribute__( \
1681 (deprecated("use " __replacement " instead", __replacement)))
1683 #define __WASM_STR(X) #X
1686 #define __DEPRECATED_WASM_MACRO(__name, __replacement) \
1687 _Pragma(__WASM_STR(GCC warning( \
1688 "'" __name "' is deprecated: use '" __replacement "' instead")))
1690 #define __DEPRECATED_WASM_MACRO(__name, __replacement)
1693 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_v128_load8_splat")
1694 wasm_v8x16_load_splat(const void *__mem
) {
1695 return wasm_v128_load8_splat(__mem
);
1698 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_v128_load16_splat")
1699 wasm_v16x8_load_splat(const void *__mem
) {
1700 return wasm_v128_load16_splat(__mem
);
1703 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_v128_load32_splat")
1704 wasm_v32x4_load_splat(const void *__mem
) {
1705 return wasm_v128_load32_splat(__mem
);
1708 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_v128_load64_splat")
1709 wasm_v64x2_load_splat(const void *__mem
) {
1710 return wasm_v128_load64_splat(__mem
);
1713 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i16x8_load8x8")
1714 wasm_i16x8_load_8x8(const void *__mem
) {
1715 return wasm_i16x8_load8x8(__mem
);
1718 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u16x8_load8x8")
1719 wasm_u16x8_load_8x8(const void *__mem
) {
1720 return wasm_u16x8_load8x8(__mem
);
1723 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i32x4_load16x4")
1724 wasm_i32x4_load_16x4(const void *__mem
) {
1725 return wasm_i32x4_load16x4(__mem
);
1728 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u32x4_load16x4")
1729 wasm_u32x4_load_16x4(const void *__mem
) {
1730 return wasm_u32x4_load16x4(__mem
);
1733 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i64x2_load32x2")
1734 wasm_i64x2_load_32x2(const void *__mem
) {
1735 return wasm_i64x2_load32x2(__mem
);
1738 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u64x2_load32x2")
1739 wasm_u64x2_load_32x2(const void *__mem
) {
1740 return wasm_u64x2_load32x2(__mem
);
1743 #define wasm_v8x16_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \
1744 __c7, __c8, __c9, __c10, __c11, __c12, __c13, \
1746 __DEPRECATED_WASM_MACRO("wasm_v8x16_shuffle", "wasm_i8x16_shuffle") \
1747 wasm_i8x16_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7, \
1748 __c8, __c9, __c10, __c11, __c12, __c13, __c14, __c15)
1750 #define wasm_v16x8_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, \
1752 __DEPRECATED_WASM_MACRO("wasm_v16x8_shuffle", "wasm_i16x8_shuffle") \
1753 wasm_i16x8_shuffle(__a, __b, __c0, __c1, __c2, __c3, __c4, __c5, __c6, __c7)
1755 #define wasm_v32x4_shuffle(__a, __b, __c0, __c1, __c2, __c3) \
1756 __DEPRECATED_WASM_MACRO("wasm_v32x4_shuffle", "wasm_i32x4_shuffle") \
1757 wasm_i32x4_shuffle(__a, __b, __c0, __c1, __c2, __c3)
1759 #define wasm_v64x2_shuffle(__a, __b, __c0, __c1) \
1760 __DEPRECATED_WASM_MACRO("wasm_v64x2_shuffle", "wasm_i64x2_shuffle") \
1761 wasm_i64x2_shuffle(__a, __b, __c0, __c1)
1763 // Relaxed SIMD intrinsics
1765 #define __RELAXED_FN_ATTRS \
1766 __attribute__((__always_inline__, __nodebug__, __target__("relaxed-simd"), \
1767 __min_vector_width__(128)))
1769 static __inline__ v128_t __RELAXED_FN_ATTRS
1770 wasm_f32x4_relaxed_madd(v128_t __a
, v128_t __b
, v128_t __c
) {
1771 return (v128_t
)__builtin_wasm_relaxed_madd_f32x4((__f32x4
)__a
, (__f32x4
)__b
,
1775 static __inline__ v128_t __RELAXED_FN_ATTRS
1776 wasm_f32x4_relaxed_nmadd(v128_t __a
, v128_t __b
, v128_t __c
) {
1777 return (v128_t
)__builtin_wasm_relaxed_nmadd_f32x4((__f32x4
)__a
, (__f32x4
)__b
,
1781 static __inline__ v128_t __RELAXED_FN_ATTRS
1782 wasm_f64x2_relaxed_madd(v128_t __a
, v128_t __b
, v128_t __c
) {
1783 return (v128_t
)__builtin_wasm_relaxed_madd_f64x2((__f64x2
)__a
, (__f64x2
)__b
,
1787 static __inline__ v128_t __RELAXED_FN_ATTRS
1788 wasm_f64x2_relaxed_nmadd(v128_t __a
, v128_t __b
, v128_t __c
) {
1789 return (v128_t
)__builtin_wasm_relaxed_nmadd_f64x2((__f64x2
)__a
, (__f64x2
)__b
,
1793 static __inline__ v128_t __RELAXED_FN_ATTRS
1794 wasm_i8x16_relaxed_laneselect(v128_t __a
, v128_t __b
, v128_t __m
) {
1795 return (v128_t
)__builtin_wasm_relaxed_laneselect_i8x16(
1796 (__i8x16
)__a
, (__i8x16
)__b
, (__i8x16
)__m
);
1799 static __inline__ v128_t __RELAXED_FN_ATTRS
1800 wasm_i16x8_relaxed_laneselect(v128_t __a
, v128_t __b
, v128_t __m
) {
1801 return (v128_t
)__builtin_wasm_relaxed_laneselect_i16x8(
1802 (__i16x8
)__a
, (__i16x8
)__b
, (__i16x8
)__m
);
1805 static __inline__ v128_t __RELAXED_FN_ATTRS
1806 wasm_i32x4_relaxed_laneselect(v128_t __a
, v128_t __b
, v128_t __m
) {
1807 return (v128_t
)__builtin_wasm_relaxed_laneselect_i32x4(
1808 (__i32x4
)__a
, (__i32x4
)__b
, (__i32x4
)__m
);
1811 static __inline__ v128_t __RELAXED_FN_ATTRS
1812 wasm_i64x2_relaxed_laneselect(v128_t __a
, v128_t __b
, v128_t __m
) {
1813 return (v128_t
)__builtin_wasm_relaxed_laneselect_i64x2(
1814 (__i64x2
)__a
, (__i64x2
)__b
, (__i64x2
)__m
);
1817 static __inline__ v128_t __RELAXED_FN_ATTRS
1818 wasm_i8x16_relaxed_swizzle(v128_t __a
, v128_t __s
) {
1819 return (v128_t
)__builtin_wasm_relaxed_swizzle_i8x16((__i8x16
)__a
,
1823 static __inline__ v128_t __RELAXED_FN_ATTRS
wasm_f32x4_relaxed_min(v128_t __a
,
1825 return (v128_t
)__builtin_wasm_relaxed_min_f32x4((__f32x4
)__a
, (__f32x4
)__b
);
1828 static __inline__ v128_t __RELAXED_FN_ATTRS
wasm_f32x4_relaxed_max(v128_t __a
,
1830 return (v128_t
)__builtin_wasm_relaxed_max_f32x4((__f32x4
)__a
, (__f32x4
)__b
);
1833 static __inline__ v128_t __RELAXED_FN_ATTRS
wasm_f64x2_relaxed_min(v128_t __a
,
1835 return (v128_t
)__builtin_wasm_relaxed_min_f64x2((__f64x2
)__a
, (__f64x2
)__b
);
1838 static __inline__ v128_t __RELAXED_FN_ATTRS
wasm_f64x2_relaxed_max(v128_t __a
,
1840 return (v128_t
)__builtin_wasm_relaxed_max_f64x2((__f64x2
)__a
, (__f64x2
)__b
);
1843 static __inline__ v128_t __RELAXED_FN_ATTRS
1844 wasm_i32x4_relaxed_trunc_f32x4(v128_t __a
) {
1845 return (v128_t
)__builtin_wasm_relaxed_trunc_s_i32x4_f32x4((__f32x4
)__a
);
1848 static __inline__ v128_t __RELAXED_FN_ATTRS
1849 wasm_u32x4_relaxed_trunc_f32x4(v128_t __a
) {
1850 return (v128_t
)__builtin_wasm_relaxed_trunc_u_i32x4_f32x4((__f32x4
)__a
);
1853 static __inline__ v128_t __RELAXED_FN_ATTRS
1854 wasm_i32x4_relaxed_trunc_f64x2_zero(v128_t __a
) {
1855 return (v128_t
)__builtin_wasm_relaxed_trunc_s_zero_i32x4_f64x2((__f64x2
)__a
);
1858 static __inline__ v128_t __RELAXED_FN_ATTRS
1859 wasm_u32x4_relaxed_trunc_f64x2_zero(v128_t __a
) {
1860 return (v128_t
)__builtin_wasm_relaxed_trunc_u_zero_i32x4_f64x2((__f64x2
)__a
);
1863 static __inline__ v128_t __RELAXED_FN_ATTRS
1864 wasm_i16x8_relaxed_q15mulr(v128_t __a
, v128_t __b
) {
1865 return (v128_t
)__builtin_wasm_relaxed_q15mulr_s_i16x8((__i16x8
)__a
,
1869 static __inline__ v128_t __RELAXED_FN_ATTRS
1870 wasm_i16x8_relaxed_dot_i8x16_i7x16(v128_t __a
, v128_t __b
) {
1871 return (v128_t
)__builtin_wasm_relaxed_dot_i8x16_i7x16_s_i16x8((__i8x16
)__a
,
1875 static __inline__ v128_t __RELAXED_FN_ATTRS
1876 wasm_i32x4_relaxed_dot_i8x16_i7x16_add(v128_t __a
, v128_t __b
, v128_t __c
) {
1877 return (v128_t
)__builtin_wasm_relaxed_dot_i8x16_i7x16_add_s_i32x4(
1878 (__i8x16
)__a
, (__i8x16
)__b
, (__i32x4
)__c
);
1881 // Deprecated intrinsics
1883 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i8x16_swizzle")
1884 wasm_v8x16_swizzle(v128_t __a
, v128_t __b
) {
1885 return wasm_i8x16_swizzle(__a
, __b
);
1888 static __inline__
bool __DEPRECATED_FN_ATTRS("wasm_v128_any_true")
1889 wasm_i8x16_any_true(v128_t __a
) {
1890 return wasm_v128_any_true(__a
);
1893 static __inline__
bool __DEPRECATED_FN_ATTRS("wasm_v128_any_true")
1894 wasm_i16x8_any_true(v128_t __a
) {
1895 return wasm_v128_any_true(__a
);
1898 static __inline__
bool __DEPRECATED_FN_ATTRS("wasm_v128_any_true")
1899 wasm_i32x4_any_true(v128_t __a
) {
1900 return wasm_v128_any_true(__a
);
1903 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i8x16_add_sat")
1904 wasm_i8x16_add_saturate(v128_t __a
, v128_t __b
) {
1905 return wasm_i8x16_add_sat(__a
, __b
);
1908 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u8x16_add_sat")
1909 wasm_u8x16_add_saturate(v128_t __a
, v128_t __b
) {
1910 return wasm_u8x16_add_sat(__a
, __b
);
1913 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i8x16_sub_sat")
1914 wasm_i8x16_sub_saturate(v128_t __a
, v128_t __b
) {
1915 return wasm_i8x16_sub_sat(__a
, __b
);
1918 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u8x16_sub_sat")
1919 wasm_u8x16_sub_saturate(v128_t __a
, v128_t __b
) {
1920 return wasm_u8x16_sub_sat(__a
, __b
);
1923 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i16x8_add_sat")
1924 wasm_i16x8_add_saturate(v128_t __a
, v128_t __b
) {
1925 return wasm_i16x8_add_sat(__a
, __b
);
1928 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u16x8_add_sat")
1929 wasm_u16x8_add_saturate(v128_t __a
, v128_t __b
) {
1930 return wasm_u16x8_add_sat(__a
, __b
);
1933 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i16x8_sub_sat")
1934 wasm_i16x8_sub_saturate(v128_t __a
, v128_t __b
) {
1935 return wasm_i16x8_sub_sat(__a
, __b
);
1938 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u16x8_sub_sat")
1939 wasm_u16x8_sub_saturate(v128_t __a
, v128_t __b
) {
1940 return wasm_u16x8_sub_sat(__a
, __b
);
1943 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i16x8_extend_low_i8x16")
1944 wasm_i16x8_widen_low_i8x16(v128_t __a
) {
1945 return wasm_i16x8_extend_low_i8x16(__a
);
1948 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i16x8_extend_high_i8x16")
1949 wasm_i16x8_widen_high_i8x16(v128_t __a
) {
1950 return wasm_i16x8_extend_high_i8x16(__a
);
1953 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u16x8_extend_low_u8x16")
1954 wasm_i16x8_widen_low_u8x16(v128_t __a
) {
1955 return wasm_u16x8_extend_low_u8x16(__a
);
1958 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u16x8_extend_high_u8x16")
1959 wasm_i16x8_widen_high_u8x16(v128_t __a
) {
1960 return wasm_u16x8_extend_high_u8x16(__a
);
1963 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i32x4_extend_low_i16x8")
1964 wasm_i32x4_widen_low_i16x8(v128_t __a
) {
1965 return wasm_i32x4_extend_low_i16x8(__a
);
1968 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i32x4_extend_high_i16x8")
1969 wasm_i32x4_widen_high_i16x8(v128_t __a
) {
1970 return wasm_i32x4_extend_high_i16x8(__a
);
1973 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u32x4_extend_low_u16x8")
1974 wasm_i32x4_widen_low_u16x8(v128_t __a
) {
1975 return wasm_u32x4_extend_low_u16x8(__a
);
1978 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u32x4_extend_high_u16x8")
1979 wasm_i32x4_widen_high_u16x8(v128_t __a
) {
1980 return wasm_u32x4_extend_high_u16x8(__a
);
1983 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_i32x4_trunc_sat_f32x4")
1984 wasm_i32x4_trunc_saturate_f32x4(v128_t __a
) {
1985 return wasm_i32x4_trunc_sat_f32x4(__a
);
1988 static __inline__ v128_t
__DEPRECATED_FN_ATTRS("wasm_u32x4_trunc_sat_f32x4")
1989 wasm_u32x4_trunc_saturate_f32x4(v128_t __a
) {
1990 return wasm_u32x4_trunc_sat_f32x4(__a
);
1993 // Undefine helper macros
1994 #undef __DEFAULT_FN_ATTRS
1995 #undef __DEPRECATED_FN_ATTRS
1997 #endif // __WASM_SIMD128_H