1 /* ACLE support for AArch64 SVE (__ARM_FEATURE_SVE2 intrinsics)
2 Copyright (C) 2020-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful, but
12 WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "insn-codes.h"
32 #include "basic-block.h"
34 #include "fold-const.h"
36 #include "gimple-iterator.h"
40 #include "tree-vector-builder.h"
41 #include "rtx-vector-builder.h"
42 #include "vec-perm-indices.h"
43 #include "aarch64-sve-builtins.h"
44 #include "aarch64-sve-builtins-shapes.h"
45 #include "aarch64-sve-builtins-base.h"
46 #include "aarch64-sve-builtins-sve2.h"
47 #include "aarch64-sve-builtins-functions.h"
49 using namespace aarch64_sve
;
53 /* Return the UNSPEC_CDOT* unspec for rotation amount ROT. */
59 case 0: return UNSPEC_CDOT
;
60 case 90: return UNSPEC_CDOT90
;
61 case 180: return UNSPEC_CDOT180
;
62 case 270: return UNSPEC_CDOT270
;
63 default: gcc_unreachable ();
67 /* Return the UNSPEC_SQRDCMLAH* unspec for rotation amount ROT. */
69 unspec_sqrdcmlah (int rot
)
73 case 0: return UNSPEC_SQRDCMLAH
;
74 case 90: return UNSPEC_SQRDCMLAH90
;
75 case 180: return UNSPEC_SQRDCMLAH180
;
76 case 270: return UNSPEC_SQRDCMLAH270
;
77 default: gcc_unreachable ();
81 class svaba_impl
: public function_base
85 expand (function_expander
&e
) const override
87 rtx_code max_code
= e
.type_suffix (0).unsigned_p
? UMAX
: SMAX
;
88 machine_mode mode
= e
.vector_mode (0);
89 return e
.use_exact_insn (code_for_aarch64_sve2_aba (max_code
, mode
));
93 class svcdot_impl
: public function_base
97 expand (function_expander
&e
) const override
99 /* Convert the rotation amount into a specific unspec. */
100 int rot
= INTVAL (e
.args
.pop ());
101 return e
.use_exact_insn (code_for_aarch64_sve (unspec_cdot (rot
),
106 class svcdot_lane_impl
: public function_base
110 expand (function_expander
&e
) const override
112 /* Convert the rotation amount into a specific unspec. */
113 int rot
= INTVAL (e
.args
.pop ());
114 return e
.use_exact_insn (code_for_aarch64_lane (unspec_cdot (rot
),
119 class svclamp_impl
: public function_base
123 expand (function_expander
&e
) const override
125 auto mode
= e
.tuple_mode (0);
127 if (e
.type_suffix (0).float_p
)
128 icode
= (e
.vectors_per_tuple () > 1
129 ? code_for_aarch64_sve_fclamp_single (mode
)
130 : code_for_aarch64_sve_fclamp (mode
));
133 auto max
= e
.type_suffix (0).unsigned_p
? UMAX
: SMAX
;
134 icode
= (e
.vectors_per_tuple () > 1
135 ? code_for_aarch64_sve_clamp_single (max
, mode
)
136 : code_for_aarch64_sve_clamp (max
, mode
));
138 return e
.use_exact_insn (icode
);
142 class svcvtn_impl
: public function_base
146 expand (function_expander
&e
) const override
148 return e
.use_exact_insn (code_for_aarch64_sve_cvtn (e
.result_mode ()));
152 class svldnt1_gather_impl
: public full_width_access
156 call_properties (const function_instance
&) const override
158 return CP_READ_MEMORY
;
162 expand (function_expander
&e
) const override
164 e
.prepare_gather_address_operands (1, false);
165 machine_mode mem_mode
= e
.memory_vector_mode ();
166 return e
.use_exact_insn (code_for_aarch64_gather_ldnt (mem_mode
));
170 /* Implements extending forms of svldnt1_gather. */
171 class svldnt1_gather_extend_impl
: public extending_load
174 using extending_load::extending_load
;
177 expand (function_expander
&e
) const override
179 e
.prepare_gather_address_operands (1, false);
180 /* Add a constant predicate for the extension rtx. */
181 e
.args
.quick_push (CONSTM1_RTX (VNx16BImode
));
182 insn_code icode
= code_for_aarch64_gather_ldnt (extend_rtx_code (),
184 e
.memory_vector_mode ());
185 return e
.use_exact_insn (icode
);
189 /* Implements both svmatch and svnmatch; the unspec parameter decides
191 class svmatch_svnmatch_impl
: public function_base
194 CONSTEXPR
svmatch_svnmatch_impl (int unspec
) : m_unspec (unspec
) {}
197 expand (function_expander
&e
) const override
199 /* These are UNSPEC_PRED_Z operations and so need a hint operand. */
200 e
.add_ptrue_hint (0, e
.gp_mode (0));
201 return e
.use_exact_insn (code_for_aarch64_pred (m_unspec
,
208 /* Implements both svmovlb and svmovlt; the unspec parameters decide
210 class svmovl_lb_impl
: public unspec_based_function_base
213 using unspec_based_function_base::unspec_based_function_base
;
216 expand (function_expander
&e
) const override
218 e
.args
.quick_push (const0_rtx
);
219 return e
.map_to_unspecs (m_unspec_for_sint
, m_unspec_for_uint
,
224 class svpext_lane_impl
: public function_base
228 expand (function_expander
&e
) const override
230 unsigned int bits
= e
.type_suffix (0).element_bits
;
231 return e
.use_exact_insn (e
.vectors_per_tuple () == 2
232 ? code_for_aarch64_sve_pextx2 (bits
)
233 : code_for_aarch64_sve_pext (bits
));
237 class svpsel_lane_impl
: public function_base
241 expand (function_expander
&e
) const override
243 unsigned int bits
= e
.type_suffix (0).element_bits
;
244 return e
.use_exact_insn (code_for_aarch64_sve_psel (bits
));
248 class svqcadd_impl
: public function_base
252 expand (function_expander
&e
) const override
254 /* Convert the rotation amount into a specific unspec. */
255 int rot
= INTVAL (e
.args
.pop ());
257 return e
.map_to_unspecs (UNSPEC_SQCADD90
, -1, -1);
259 return e
.map_to_unspecs (UNSPEC_SQCADD270
, -1, -1);
264 class svqrdcmlah_impl
: public function_base
268 expand (function_expander
&e
) const override
270 /* Convert the rotation amount into a specific unspec. */
271 int rot
= INTVAL (e
.args
.pop ());
272 return e
.use_exact_insn (code_for_aarch64_sve (unspec_sqrdcmlah (rot
),
277 class svqrdcmlah_lane_impl
: public function_base
281 expand (function_expander
&e
) const override
283 /* Convert the rotation amount into a specific unspec. */
284 int rot
= INTVAL (e
.args
.pop ());
285 return e
.use_exact_insn (code_for_aarch64_lane (unspec_sqrdcmlah (rot
),
290 class svqrshl_impl
: public unspec_based_function
293 CONSTEXPR
svqrshl_impl ()
294 : unspec_based_function (UNSPEC_SQRSHL
, UNSPEC_UQRSHL
, -1) {}
297 fold (gimple_folder
&f
) const override
299 if (tree amount
= uniform_integer_cst_p (gimple_call_arg (f
.call
, 2)))
301 if (wi::to_widest (amount
) >= 0)
303 /* The rounding has no effect, and [SU]QSHL has immediate forms
304 that we can use for sensible shift amounts. */
305 function_instance
instance ("svqshl", functions::svqshl
,
306 shapes::binary_int_opt_n
, MODE_n
,
307 f
.type_suffix_ids
, GROUP_none
, f
.pred
);
308 return f
.redirect_call (instance
);
312 /* The saturation has no effect, and [SU]RSHL has immediate forms
313 that we can use for sensible shift amounts. */
314 function_instance
instance ("svrshl", functions::svrshl
,
315 shapes::binary_int_opt_single_n
,
316 MODE_n
, f
.type_suffix_ids
, GROUP_none
,
318 return f
.redirect_call (instance
);
325 class svqshl_impl
: public unspec_based_function
328 CONSTEXPR
svqshl_impl ()
329 : unspec_based_function (UNSPEC_SQSHL
, UNSPEC_UQSHL
, -1) {}
332 fold (gimple_folder
&f
) const override
334 if (tree amount
= uniform_integer_cst_p (gimple_call_arg (f
.call
, 2)))
336 int element_bits
= f
.type_suffix (0).element_bits
;
337 if (wi::to_widest (amount
) >= -element_bits
338 && wi::to_widest (amount
) < 0)
340 /* The saturation has no effect for right shifts, so we can
341 use the immediate form of ASR or LSR. */
342 amount
= wide_int_to_tree (TREE_TYPE (amount
),
343 -wi::to_wide (amount
));
344 function_instance
instance ("svasr", functions::svasr
,
345 shapes::binary_uint_opt_n
, MODE_n
,
346 f
.type_suffix_ids
, GROUP_none
, f
.pred
);
347 if (f
.type_suffix (0).unsigned_p
)
349 instance
.base_name
= "svlsr";
350 instance
.base
= functions::svlsr
;
352 gcall
*call
= f
.redirect_call (instance
);
353 gimple_call_set_arg (call
, 2, amount
);
361 class svrshl_impl
: public unspec_based_function
364 CONSTEXPR
svrshl_impl ()
365 : unspec_based_function (UNSPEC_SRSHL
, UNSPEC_URSHL
, -1) {}
368 fold (gimple_folder
&f
) const override
370 if (f
.vectors_per_tuple () > 1)
373 if (tree amount
= uniform_integer_cst_p (gimple_call_arg (f
.call
, 2)))
375 if (wi::to_widest (amount
) >= 0)
377 /* The rounding has no effect, and LSL has immediate forms
378 that we can use for sensible shift amounts. */
379 function_instance
instance ("svlsl", functions::svlsl
,
380 shapes::binary_uint_opt_n
, MODE_n
,
381 f
.type_suffix_ids
, GROUP_none
, f
.pred
);
382 gcall
*call
= f
.redirect_call (instance
);
383 gimple_call_set_arg (call
, 2, amount
);
386 int element_bits
= f
.type_suffix (0).element_bits
;
387 if (wi::to_widest (amount
) >= -element_bits
)
389 /* The shift amount is in range of [SU]RSHR. */
390 amount
= wide_int_to_tree (TREE_TYPE (amount
),
391 -wi::to_wide (amount
));
392 function_instance
instance ("svrshr", functions::svrshr
,
393 shapes::shift_right_imm
, MODE_n
,
394 f
.type_suffix_ids
, GROUP_none
, f
.pred
);
395 gcall
*call
= f
.redirect_call (instance
);
396 gimple_call_set_arg (call
, 2, amount
);
404 class svsqadd_impl
: public function_base
408 expand (function_expander
&e
) const override
410 machine_mode mode
= e
.vector_mode (0);
412 && aarch64_sve_sqadd_sqsub_immediate_p (mode
, e
.args
[2], false))
413 return e
.map_to_rtx_codes (UNKNOWN
, US_PLUS
, -1, -1);
414 return e
.map_to_unspecs (-1, UNSPEC_USQADD
, -1);
418 class svsra_impl
: public function_base
422 expand (function_expander
&e
) const override
424 rtx_code shift_code
= e
.type_suffix (0).unsigned_p
? LSHIFTRT
: ASHIFTRT
;
425 machine_mode mode
= e
.vector_mode (0);
426 return e
.use_exact_insn (code_for_aarch64_sve_add (shift_code
, mode
));
430 class svstnt1_scatter_impl
: public full_width_access
434 call_properties (const function_instance
&) const override
436 return CP_WRITE_MEMORY
;
440 expand (function_expander
&e
) const override
442 e
.prepare_gather_address_operands (1, false);
443 machine_mode mem_mode
= e
.memory_vector_mode ();
444 return e
.use_exact_insn (code_for_aarch64_scatter_stnt (mem_mode
));
448 /* Implements truncating forms of svstnt1_scatter. */
449 class svstnt1_scatter_truncate_impl
: public truncating_store
452 using truncating_store::truncating_store
;
455 expand (function_expander
&e
) const override
457 e
.prepare_gather_address_operands (1, false);
458 insn_code icode
= code_for_aarch64_scatter_stnt (e
.vector_mode (0),
459 e
.memory_vector_mode ());
460 return e
.use_exact_insn (icode
);
464 class svtbl2_impl
: public quiet
<multi_vector_function
>
467 CONSTEXPR
svtbl2_impl () : quiet
<multi_vector_function
> (2) {}
470 expand (function_expander
&e
) const override
472 return e
.use_exact_insn (code_for_aarch64_sve2_tbl2 (e
.vector_mode (0)));
476 class svunpk_impl
: public function_base
480 expand (function_expander
&e
) const override
482 optab op
= (e
.type_suffix (0).unsigned_p
? zext_optab
: sext_optab
);
483 insn_code icode
= convert_optab_handler (op
, e
.result_mode (),
484 GET_MODE (e
.args
[0]));
485 return e
.use_exact_insn (icode
);
489 class svuqadd_impl
: public function_base
493 expand (function_expander
&e
) const override
495 machine_mode mode
= e
.vector_mode (0);
497 && aarch64_sve_arith_immediate_p (mode
, e
.args
[2], false))
498 return e
.use_unpred_insn (code_for_aarch64_sve_suqadd_const (mode
));
499 return e
.map_to_unspecs (UNSPEC_SUQADD
, -1, -1);
503 /* Implements both svwhilerw and svwhilewr; the unspec parameter decides
505 class svwhilerw_svwhilewr_impl
: public full_width_access
508 CONSTEXPR
svwhilerw_svwhilewr_impl (int unspec
) : m_unspec (unspec
) {}
511 expand (function_expander
&e
) const override
513 for (unsigned int i
= 0; i
< 2; ++i
)
514 e
.args
[i
] = e
.convert_to_pmode (e
.args
[i
]);
515 return e
.use_exact_insn (code_for_while (m_unspec
, Pmode
, e
.gp_mode (0)));
521 } /* end anonymous namespace */
523 namespace aarch64_sve
{
525 FUNCTION (svaba
, svaba_impl
,)
526 FUNCTION (svabalb
, unspec_based_add_function
, (UNSPEC_SABDLB
,
528 FUNCTION (svabalt
, unspec_based_add_function
, (UNSPEC_SABDLT
,
530 FUNCTION (svadclb
, unspec_based_function
, (-1, UNSPEC_ADCLB
, -1))
531 FUNCTION (svadclt
, unspec_based_function
, (-1, UNSPEC_ADCLT
, -1))
532 FUNCTION (svaddhnb
, unspec_based_function
, (UNSPEC_ADDHNB
, UNSPEC_ADDHNB
, -1))
533 FUNCTION (svaddhnt
, unspec_based_function
, (UNSPEC_ADDHNT
, UNSPEC_ADDHNT
, -1))
534 FUNCTION (svabdlb
, unspec_based_function
, (UNSPEC_SABDLB
, UNSPEC_UABDLB
, -1))
535 FUNCTION (svabdlt
, unspec_based_function
, (UNSPEC_SABDLT
, UNSPEC_UABDLT
, -1))
536 FUNCTION (svadalp
, unspec_based_function
, (UNSPEC_SADALP
, UNSPEC_UADALP
, -1))
537 FUNCTION (svaddlb
, unspec_based_function
, (UNSPEC_SADDLB
, UNSPEC_UADDLB
, -1))
538 FUNCTION (svaddlbt
, unspec_based_function
, (UNSPEC_SADDLBT
, -1, -1))
539 FUNCTION (svaddlt
, unspec_based_function
, (UNSPEC_SADDLT
, UNSPEC_UADDLT
, -1))
540 FUNCTION (svaddwb
, unspec_based_function
, (UNSPEC_SADDWB
, UNSPEC_UADDWB
, -1))
541 FUNCTION (svaddwt
, unspec_based_function
, (UNSPEC_SADDWT
, UNSPEC_UADDWT
, -1))
542 FUNCTION (svaddp
, unspec_based_pred_function
, (UNSPEC_ADDP
, UNSPEC_ADDP
,
544 FUNCTION (svaesd
, fixed_insn_function
, (CODE_FOR_aarch64_sve2_aesd
))
545 FUNCTION (svaese
, fixed_insn_function
, (CODE_FOR_aarch64_sve2_aese
))
546 FUNCTION (svaesimc
, fixed_insn_function
, (CODE_FOR_aarch64_sve2_aesimc
))
547 FUNCTION (svaesmc
, fixed_insn_function
, (CODE_FOR_aarch64_sve2_aesmc
))
548 FUNCTION (svbcax
, CODE_FOR_MODE0 (aarch64_sve2_bcax
),)
549 FUNCTION (svbdep
, unspec_based_function
, (UNSPEC_BDEP
, UNSPEC_BDEP
, -1))
550 FUNCTION (svbext
, unspec_based_function
, (UNSPEC_BEXT
, UNSPEC_BEXT
, -1))
551 FUNCTION (svbfmlslb
, fixed_insn_function
, (CODE_FOR_aarch64_sve_bfmlslbvnx4sf
))
552 FUNCTION (svbfmlslb_lane
, fixed_insn_function
,
553 (CODE_FOR_aarch64_sve_bfmlslb_lanevnx4sf
))
554 FUNCTION (svbfmlslt
, fixed_insn_function
, (CODE_FOR_aarch64_sve_bfmlsltvnx4sf
))
555 FUNCTION (svbfmlslt_lane
, fixed_insn_function
,
556 (CODE_FOR_aarch64_sve_bfmlslt_lanevnx4sf
))
557 FUNCTION (svbgrp
, unspec_based_function
, (UNSPEC_BGRP
, UNSPEC_BGRP
, -1))
558 FUNCTION (svbsl
, CODE_FOR_MODE0 (aarch64_sve2_bsl
),)
559 FUNCTION (svbsl1n
, CODE_FOR_MODE0 (aarch64_sve2_bsl1n
),)
560 FUNCTION (svbsl2n
, CODE_FOR_MODE0 (aarch64_sve2_bsl2n
),)
561 FUNCTION (svcdot
, svcdot_impl
,)
562 FUNCTION (svcdot_lane
, svcdot_lane_impl
,)
563 FUNCTION (svclamp
, svclamp_impl
,)
564 FUNCTION (svcvtlt
, unspec_based_function
, (-1, -1, UNSPEC_COND_FCVTLT
))
565 FUNCTION (svcvtn
, svcvtn_impl
,)
566 FUNCTION (svcvtx
, unspec_based_function
, (-1, -1, UNSPEC_COND_FCVTX
))
567 FUNCTION (svcvtxnt
, CODE_FOR_MODE1 (aarch64_sve2_cvtxnt
),)
568 FUNCTION (sveor3
, CODE_FOR_MODE0 (aarch64_sve2_eor3
),)
569 FUNCTION (sveorbt
, unspec_based_function
, (UNSPEC_EORBT
, UNSPEC_EORBT
, -1))
570 FUNCTION (sveortb
, unspec_based_function
, (UNSPEC_EORTB
, UNSPEC_EORTB
, -1))
571 FUNCTION (svhadd
, unspec_based_function
, (UNSPEC_SHADD
, UNSPEC_UHADD
, -1))
572 FUNCTION (svhsub
, unspec_based_function
, (UNSPEC_SHSUB
, UNSPEC_UHSUB
, -1))
573 FUNCTION (svhistcnt
, CODE_FOR_MODE0 (aarch64_sve2_histcnt
),)
574 FUNCTION (svhistseg
, CODE_FOR_MODE0 (aarch64_sve2_histseg
),)
575 FUNCTION (svhsubr
, unspec_based_function_rotated
, (UNSPEC_SHSUB
,
577 FUNCTION (svldnt1_gather
, svldnt1_gather_impl
,)
578 FUNCTION (svldnt1sb_gather
, svldnt1_gather_extend_impl
, (TYPE_SUFFIX_s8
))
579 FUNCTION (svldnt1sh_gather
, svldnt1_gather_extend_impl
, (TYPE_SUFFIX_s16
))
580 FUNCTION (svldnt1sw_gather
, svldnt1_gather_extend_impl
, (TYPE_SUFFIX_s32
))
581 FUNCTION (svldnt1ub_gather
, svldnt1_gather_extend_impl
, (TYPE_SUFFIX_u8
))
582 FUNCTION (svldnt1uh_gather
, svldnt1_gather_extend_impl
, (TYPE_SUFFIX_u16
))
583 FUNCTION (svldnt1uw_gather
, svldnt1_gather_extend_impl
, (TYPE_SUFFIX_u32
))
584 FUNCTION (svlogb
, unspec_based_function
, (-1, -1, UNSPEC_COND_FLOGB
))
585 FUNCTION (svmatch
, svmatch_svnmatch_impl
, (UNSPEC_MATCH
))
586 FUNCTION (svmaxp
, unspec_based_pred_function
, (UNSPEC_SMAXP
, UNSPEC_UMAXP
,
588 FUNCTION (svmaxnmp
, unspec_based_pred_function
, (-1, -1, UNSPEC_FMAXNMP
))
589 FUNCTION (svminp
, unspec_based_pred_function
, (UNSPEC_SMINP
, UNSPEC_UMINP
,
591 FUNCTION (svminnmp
, unspec_based_pred_function
, (-1, -1, UNSPEC_FMINNMP
))
592 FUNCTION (svmlalb
, unspec_based_mla_function
, (UNSPEC_SMULLB
,
593 UNSPEC_UMULLB
, UNSPEC_FMLALB
))
594 FUNCTION (svmlalb_lane
, unspec_based_mla_lane_function
, (UNSPEC_SMULLB
,
597 FUNCTION (svmlalt
, unspec_based_mla_function
, (UNSPEC_SMULLT
,
598 UNSPEC_UMULLT
, UNSPEC_FMLALT
))
599 FUNCTION (svmlalt_lane
, unspec_based_mla_lane_function
, (UNSPEC_SMULLT
,
602 FUNCTION (svmlslb
, unspec_based_mls_function
, (UNSPEC_SMULLB
,
603 UNSPEC_UMULLB
, UNSPEC_FMLSLB
))
604 FUNCTION (svmlslb_lane
, unspec_based_mls_lane_function
, (UNSPEC_SMULLB
,
607 FUNCTION (svmlslt
, unspec_based_mls_function
, (UNSPEC_SMULLT
,
608 UNSPEC_UMULLT
, UNSPEC_FMLSLT
))
609 FUNCTION (svmlslt_lane
, unspec_based_mls_lane_function
, (UNSPEC_SMULLT
,
612 FUNCTION (svmovlb
, svmovl_lb_impl
, (UNSPEC_SSHLLB
, UNSPEC_USHLLB
, -1))
613 FUNCTION (svmovlt
, svmovl_lb_impl
, (UNSPEC_SSHLLT
, UNSPEC_USHLLT
, -1))
614 FUNCTION (svmullb
, unspec_based_function
, (UNSPEC_SMULLB
, UNSPEC_UMULLB
, -1))
615 FUNCTION (svmullb_lane
, unspec_based_lane_function
, (UNSPEC_SMULLB
,
617 FUNCTION (svmullt
, unspec_based_function
, (UNSPEC_SMULLT
, UNSPEC_UMULLT
, -1))
618 FUNCTION (svmullt_lane
, unspec_based_lane_function
, (UNSPEC_SMULLT
,
620 FUNCTION (svnbsl
, CODE_FOR_MODE0 (aarch64_sve2_nbsl
),)
621 FUNCTION (svnmatch
, svmatch_svnmatch_impl
, (UNSPEC_NMATCH
))
622 FUNCTION (svpext_lane
, svpext_lane_impl
,)
623 FUNCTION (svpmul
, CODE_FOR_MODE0 (aarch64_sve2_pmul
),)
624 FUNCTION (svpmullb
, unspec_based_function
, (-1, UNSPEC_PMULLB
, -1))
625 FUNCTION (svpmullb_pair
, unspec_based_function
, (-1, UNSPEC_PMULLB_PAIR
, -1))
626 FUNCTION (svpmullt
, unspec_based_function
, (-1, UNSPEC_PMULLT
, -1))
627 FUNCTION (svpmullt_pair
, unspec_based_function
, (-1, UNSPEC_PMULLT_PAIR
, -1))
628 FUNCTION (svpsel_lane
, svpsel_lane_impl
,)
629 FUNCTION (svqabs
, rtx_code_function
, (SS_ABS
, UNKNOWN
, UNKNOWN
))
630 FUNCTION (svqcadd
, svqcadd_impl
,)
631 FUNCTION (svqcvt
, integer_conversion
, (UNSPEC_SQCVT
, UNSPEC_SQCVTU
,
633 FUNCTION (svqcvtn
, integer_conversion
, (UNSPEC_SQCVTN
, UNSPEC_SQCVTUN
,
635 FUNCTION (svqdmlalb
, unspec_based_qadd_function
, (UNSPEC_SQDMULLB
, -1, -1))
636 FUNCTION (svqdmlalb_lane
, unspec_based_qadd_lane_function
, (UNSPEC_SQDMULLB
,
638 FUNCTION (svqdmlalbt
, unspec_based_qadd_function
, (UNSPEC_SQDMULLBT
, -1, -1))
639 FUNCTION (svqdmlalt
, unspec_based_qadd_function
, (UNSPEC_SQDMULLT
, -1, -1))
640 FUNCTION (svqdmlalt_lane
, unspec_based_qadd_lane_function
, (UNSPEC_SQDMULLT
,
642 FUNCTION (svqdmlslb
, unspec_based_qsub_function
, (UNSPEC_SQDMULLB
, -1, -1))
643 FUNCTION (svqdmlslb_lane
, unspec_based_qsub_lane_function
, (UNSPEC_SQDMULLB
,
645 FUNCTION (svqdmlslbt
, unspec_based_qsub_function
, (UNSPEC_SQDMULLBT
, -1, -1))
646 FUNCTION (svqdmlslt
, unspec_based_qsub_function
, (UNSPEC_SQDMULLT
, -1, -1))
647 FUNCTION (svqdmlslt_lane
, unspec_based_qsub_lane_function
, (UNSPEC_SQDMULLT
,
649 FUNCTION (svqdmulh
, unspec_based_function
, (UNSPEC_SQDMULH
, -1, -1))
650 FUNCTION (svqdmulh_lane
, unspec_based_lane_function
, (UNSPEC_SQDMULH
, -1, -1))
651 FUNCTION (svqdmullb
, unspec_based_function
, (UNSPEC_SQDMULLB
, -1, -1))
652 FUNCTION (svqdmullb_lane
, unspec_based_lane_function
, (UNSPEC_SQDMULLB
,
654 FUNCTION (svqdmullt
, unspec_based_function
, (UNSPEC_SQDMULLT
, -1, -1))
655 FUNCTION (svqdmullt_lane
, unspec_based_lane_function
, (UNSPEC_SQDMULLT
,
657 FUNCTION (svqneg
, rtx_code_function
, (SS_NEG
, UNKNOWN
, UNKNOWN
))
658 FUNCTION (svqrdcmlah
, svqrdcmlah_impl
,)
659 FUNCTION (svqrdcmlah_lane
, svqrdcmlah_lane_impl
,)
660 FUNCTION (svqrdmulh
, unspec_based_function
, (UNSPEC_SQRDMULH
, -1, -1))
661 FUNCTION (svqrdmulh_lane
, unspec_based_lane_function
, (UNSPEC_SQRDMULH
,
663 FUNCTION (svqrdmlah
, unspec_based_function
, (UNSPEC_SQRDMLAH
, -1, -1))
664 FUNCTION (svqrdmlah_lane
, unspec_based_lane_function
, (UNSPEC_SQRDMLAH
,
666 FUNCTION (svqrdmlsh
, unspec_based_function
, (UNSPEC_SQRDMLSH
, -1, -1))
667 FUNCTION (svqrdmlsh_lane
, unspec_based_lane_function
, (UNSPEC_SQRDMLSH
,
669 FUNCTION (svqrshl
, svqrshl_impl
,)
670 FUNCTION (svqrshr
, unspec_based_uncond_function
, (UNSPEC_SQRSHR
,
671 UNSPEC_UQRSHR
, -1, 1))
672 FUNCTION (svqrshrn
, unspec_based_uncond_function
, (UNSPEC_SQRSHRN
,
673 UNSPEC_UQRSHRN
, -1, 1))
674 FUNCTION (svqrshrnb
, unspec_based_function
, (UNSPEC_SQRSHRNB
,
675 UNSPEC_UQRSHRNB
, -1))
676 FUNCTION (svqrshrnt
, unspec_based_function
, (UNSPEC_SQRSHRNT
,
677 UNSPEC_UQRSHRNT
, -1))
678 FUNCTION (svqrshru
, unspec_based_uncond_function
, (UNSPEC_SQRSHRU
, -1, -1, 1))
679 FUNCTION (svqrshrun
, unspec_based_uncond_function
, (UNSPEC_SQRSHRUN
, -1, -1, 1))
680 FUNCTION (svqrshrunb
, unspec_based_function
, (UNSPEC_SQRSHRUNB
, -1, -1))
681 FUNCTION (svqrshrunt
, unspec_based_function
, (UNSPEC_SQRSHRUNT
, -1, -1))
682 FUNCTION (svqshl
, svqshl_impl
,)
683 FUNCTION (svqshlu
, unspec_based_function
, (UNSPEC_SQSHLU
, -1, -1))
684 FUNCTION (svqshrnb
, unspec_based_function
, (UNSPEC_SQSHRNB
,
686 FUNCTION (svqshrnt
, unspec_based_function
, (UNSPEC_SQSHRNT
,
688 FUNCTION (svqshrunb
, unspec_based_function
, (UNSPEC_SQSHRUNB
, -1, -1))
689 FUNCTION (svqshrunt
, unspec_based_function
, (UNSPEC_SQSHRUNT
, -1, -1))
690 FUNCTION (svqsubr
, rtx_code_function_rotated
, (SS_MINUS
, US_MINUS
, -1))
691 FUNCTION (svqxtnb
, unspec_based_function
, (UNSPEC_SQXTNB
, UNSPEC_UQXTNB
, -1))
692 FUNCTION (svqxtnt
, unspec_based_function
, (UNSPEC_SQXTNT
, UNSPEC_UQXTNT
, -1))
693 FUNCTION (svqxtunb
, unspec_based_function
, (UNSPEC_SQXTUNB
, -1, -1))
694 FUNCTION (svqxtunt
, unspec_based_function
, (UNSPEC_SQXTUNT
, -1, -1))
695 FUNCTION (svraddhnb
, unspec_based_function
, (UNSPEC_RADDHNB
,
697 FUNCTION (svraddhnt
, unspec_based_function
, (UNSPEC_RADDHNT
,
699 FUNCTION (svrax1
, fixed_insn_function
, (CODE_FOR_aarch64_sve2_rax1
))
700 FUNCTION (svrevd
, unspec_based_function
, (UNSPEC_REVD
, UNSPEC_REVD
,
702 FUNCTION (svrhadd
, unspec_based_function
, (UNSPEC_SRHADD
, UNSPEC_URHADD
, -1))
703 FUNCTION (svrshl
, svrshl_impl
,)
704 FUNCTION (svrshr
, unspec_based_function
, (UNSPEC_SRSHR
, UNSPEC_URSHR
, -1))
705 FUNCTION (svrshrnb
, unspec_based_function
, (UNSPEC_RSHRNB
, UNSPEC_RSHRNB
, -1))
706 FUNCTION (svrshrnt
, unspec_based_function
, (UNSPEC_RSHRNT
, UNSPEC_RSHRNT
, -1))
707 FUNCTION (svrsra
, unspec_based_add_function
, (UNSPEC_SRSHR
, UNSPEC_URSHR
, -1))
708 FUNCTION (svrsubhnb
, unspec_based_function
, (UNSPEC_RSUBHNB
,
710 FUNCTION (svrsubhnt
, unspec_based_function
, (UNSPEC_RSUBHNT
,
712 FUNCTION (svsbclb
, unspec_based_function
, (-1, UNSPEC_SBCLB
, -1))
713 FUNCTION (svsbclt
, unspec_based_function
, (-1, UNSPEC_SBCLT
, -1))
714 FUNCTION (svshllb
, unspec_based_function
, (UNSPEC_SSHLLB
, UNSPEC_USHLLB
, -1))
715 FUNCTION (svshllt
, unspec_based_function
, (UNSPEC_SSHLLT
, UNSPEC_USHLLT
, -1))
716 FUNCTION (svshrnb
, unspec_based_function
, (UNSPEC_SHRNB
, UNSPEC_SHRNB
, -1))
717 FUNCTION (svshrnt
, unspec_based_function
, (UNSPEC_SHRNT
, UNSPEC_SHRNT
, -1))
718 FUNCTION (svsli
, unspec_based_function
, (UNSPEC_SLI
, UNSPEC_SLI
, -1))
719 FUNCTION (svsm4e
, fixed_insn_function
, (CODE_FOR_aarch64_sve2_sm4e
))
720 FUNCTION (svsm4ekey
, fixed_insn_function
, (CODE_FOR_aarch64_sve2_sm4ekey
))
721 FUNCTION (svsqadd
, svsqadd_impl
,)
722 FUNCTION (svsra
, svsra_impl
,)
723 FUNCTION (svsri
, unspec_based_function
, (UNSPEC_SRI
, UNSPEC_SRI
, -1))
724 FUNCTION (svstnt1_scatter
, svstnt1_scatter_impl
,)
725 FUNCTION (svstnt1b_scatter
, svstnt1_scatter_truncate_impl
, (QImode
))
726 FUNCTION (svstnt1h_scatter
, svstnt1_scatter_truncate_impl
, (HImode
))
727 FUNCTION (svstnt1w_scatter
, svstnt1_scatter_truncate_impl
, (SImode
))
728 FUNCTION (svsubhnb
, unspec_based_function
, (UNSPEC_SUBHNB
, UNSPEC_SUBHNB
, -1))
729 FUNCTION (svsubhnt
, unspec_based_function
, (UNSPEC_SUBHNT
, UNSPEC_SUBHNT
, -1))
730 FUNCTION (svsublb
, unspec_based_function
, (UNSPEC_SSUBLB
, UNSPEC_USUBLB
, -1))
731 FUNCTION (svsublbt
, unspec_based_function
, (UNSPEC_SSUBLBT
, -1, -1))
732 FUNCTION (svsublt
, unspec_based_function
, (UNSPEC_SSUBLT
, UNSPEC_USUBLT
, -1))
733 FUNCTION (svsubltb
, unspec_based_function
, (UNSPEC_SSUBLTB
, -1, -1))
734 FUNCTION (svsubwb
, unspec_based_function
, (UNSPEC_SSUBWB
, UNSPEC_USUBWB
, -1))
735 FUNCTION (svsubwt
, unspec_based_function
, (UNSPEC_SSUBWT
, UNSPEC_USUBWT
, -1))
736 FUNCTION (svtbl2
, svtbl2_impl
,)
737 FUNCTION (svtbx
, CODE_FOR_MODE0 (aarch64_sve2_tbx
),)
738 FUNCTION (svunpk
, svunpk_impl
,)
739 FUNCTION (svuqadd
, svuqadd_impl
,)
740 FUNCTION (svuzp
, multireg_permute
, (UNSPEC_UZP
))
741 FUNCTION (svuzpq
, multireg_permute
, (UNSPEC_UZPQ
))
742 FUNCTION (svzip
, multireg_permute
, (UNSPEC_ZIP
))
743 FUNCTION (svzipq
, multireg_permute
, (UNSPEC_ZIPQ
))
744 FUNCTION (svwhilege
, while_comparison
, (UNSPEC_WHILEGE
, UNSPEC_WHILEHS
))
745 FUNCTION (svwhilegt
, while_comparison
, (UNSPEC_WHILEGT
, UNSPEC_WHILEHI
))
746 FUNCTION (svwhilerw
, svwhilerw_svwhilewr_impl
, (UNSPEC_WHILERW
))
747 FUNCTION (svwhilewr
, svwhilerw_svwhilewr_impl
, (UNSPEC_WHILEWR
))
748 FUNCTION (svxar
, CODE_FOR_MODE0 (aarch64_sve2_xar
),)
750 } /* end namespace aarch64_sve */