Daily bump.
[official-gcc.git] / gcc / config / arm / arm-builtins.cc
blobe860607686c6ae2b456c07748e108a38e74fd778
1 /* Description of builtins used by the ARM backend.
2 Copyright (C) 2014-2025 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #define IN_TARGET_CODE 1
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "function.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple-expr.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "profile-count.h"
33 #include "optabs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "explow.h"
40 #include "expr.h"
41 #include "langhooks.h"
42 #include "case-cfn-macros.h"
43 #include "sbitmap.h"
44 #include "stringpool.h"
45 #include "arm-builtins.h"
46 #include "stringpool.h"
47 #include "attribs.h"
49 #define SIMD_MAX_BUILTIN_ARGS 7
51 /* The qualifier_internal allows generation of a unary builtin from
52 a pattern with a third pseudo-operand such as a match_scratch.
53 T (T). */
54 static enum arm_type_qualifiers
55 arm_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
56 = { qualifier_none, qualifier_none, qualifier_internal };
57 #define UNOP_QUALIFIERS (arm_unop_qualifiers)
59 /* unsigned T (unsigned T). */
60 static enum arm_type_qualifiers
61 arm_bswap_qualifiers[SIMD_MAX_BUILTIN_ARGS]
62 = { qualifier_unsigned, qualifier_unsigned };
63 #define BSWAP_QUALIFIERS (arm_bswap_qualifiers)
65 /* T (T, T [maybe_immediate]). */
66 static enum arm_type_qualifiers
67 arm_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
68 = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
69 #define BINOP_QUALIFIERS (arm_binop_qualifiers)
71 /* T (T, T, T). */
72 static enum arm_type_qualifiers
73 arm_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
74 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
75 #define TERNOP_QUALIFIERS (arm_ternop_qualifiers)
77 /* unsigned T (unsigned T, unsigned T, unsigned T). */
78 static enum arm_type_qualifiers
79 arm_unsigned_uternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
80 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
81 qualifier_unsigned };
82 #define UTERNOP_QUALIFIERS (arm_unsigned_uternop_qualifiers)
84 /* T (T, unsigned T, T). */
85 static enum arm_type_qualifiers
86 arm_usternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
87 = { qualifier_none, qualifier_none, qualifier_unsigned,
88 qualifier_none };
89 #define USTERNOP_QUALIFIERS (arm_usternop_qualifiers)
91 /* T (T, immediate). */
92 static enum arm_type_qualifiers
93 arm_binop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
94 = { qualifier_none, qualifier_none, qualifier_immediate };
95 #define BINOP_IMM_QUALIFIERS (arm_binop_imm_qualifiers)
97 /* T (T, unsigned immediate). */
98 static enum arm_type_qualifiers
99 arm_sat_binop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
100 = { qualifier_none, qualifier_none, qualifier_unsigned_immediate };
101 #define SAT_BINOP_UNSIGNED_IMM_QUALIFIERS \
102 (arm_sat_binop_imm_qualifiers)
104 /* unsigned T (T, unsigned immediate). */
105 static enum arm_type_qualifiers
106 arm_unsigned_sat_binop_unsigned_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
107 = { qualifier_unsigned, qualifier_none, qualifier_unsigned_immediate };
108 #define UNSIGNED_SAT_BINOP_UNSIGNED_IMM_QUALIFIERS \
109 (arm_unsigned_sat_binop_unsigned_imm_qualifiers)
111 /* T (T, lane index). */
112 static enum arm_type_qualifiers
113 arm_getlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
114 = { qualifier_none, qualifier_none, qualifier_lane_index };
115 #define GETLANE_QUALIFIERS (arm_getlane_qualifiers)
117 /* T (T, T, T, immediate). */
118 static enum arm_type_qualifiers
119 arm_mac_n_qualifiers[SIMD_MAX_BUILTIN_ARGS]
120 = { qualifier_none, qualifier_none, qualifier_none,
121 qualifier_none, qualifier_immediate };
122 #define MAC_N_QUALIFIERS (arm_mac_n_qualifiers)
124 /* T (T, T, T, lane index). */
125 static enum arm_type_qualifiers
126 arm_mac_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
127 = { qualifier_none, qualifier_none, qualifier_none,
128 qualifier_none, qualifier_lane_index };
129 #define MAC_LANE_QUALIFIERS (arm_mac_lane_qualifiers)
131 /* T (T, T, T, lane pair index). */
132 static enum arm_type_qualifiers
133 arm_mac_lane_pair_qualifiers[SIMD_MAX_BUILTIN_ARGS]
134 = { qualifier_none, qualifier_none, qualifier_none,
135 qualifier_none, qualifier_lane_pair_index };
136 #define MAC_LANE_PAIR_QUALIFIERS (arm_mac_lane_pair_qualifiers)
138 /* unsigned T (unsigned T, unsigned T, unsigend T, lane index). */
139 static enum arm_type_qualifiers
140 arm_umac_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
141 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
142 qualifier_unsigned, qualifier_lane_index };
143 #define UMAC_LANE_QUALIFIERS (arm_umac_lane_qualifiers)
145 /* T (T, unsigned T, T, lane index). */
146 static enum arm_type_qualifiers
147 arm_usmac_lane_quadtup_qualifiers[SIMD_MAX_BUILTIN_ARGS]
148 = { qualifier_none, qualifier_none, qualifier_unsigned,
149 qualifier_none, qualifier_lane_quadtup_index };
150 #define USMAC_LANE_QUADTUP_QUALIFIERS (arm_usmac_lane_quadtup_qualifiers)
152 /* T (T, T, unsigend T, lane index). */
153 static enum arm_type_qualifiers
154 arm_sumac_lane_quadtup_qualifiers[SIMD_MAX_BUILTIN_ARGS]
155 = { qualifier_none, qualifier_none, qualifier_none,
156 qualifier_unsigned, qualifier_lane_quadtup_index };
157 #define SUMAC_LANE_QUADTUP_QUALIFIERS (arm_sumac_lane_quadtup_qualifiers)
159 /* T (T, T, immediate). */
160 static enum arm_type_qualifiers
161 arm_ternop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
162 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate };
163 #define TERNOP_IMM_QUALIFIERS (arm_ternop_imm_qualifiers)
165 /* T (T, T, lane index). */
166 static enum arm_type_qualifiers
167 arm_setlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
168 = { qualifier_none, qualifier_none, qualifier_none, qualifier_lane_index };
169 #define SETLANE_QUALIFIERS (arm_setlane_qualifiers)
171 /* T (T, T). */
172 static enum arm_type_qualifiers
173 arm_combine_qualifiers[SIMD_MAX_BUILTIN_ARGS]
174 = { qualifier_none, qualifier_none, qualifier_none };
175 #define COMBINE_QUALIFIERS (arm_combine_qualifiers)
177 /* T ([T element type] *). */
178 static enum arm_type_qualifiers
179 arm_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
180 = { qualifier_none, qualifier_const_pointer_map_mode };
181 #define LOAD1_QUALIFIERS (arm_load1_qualifiers)
183 /* T ([T element type] *, T, immediate). */
184 static enum arm_type_qualifiers
185 arm_load1_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
186 = { qualifier_none, qualifier_const_pointer_map_mode,
187 qualifier_none, qualifier_struct_load_store_lane_index };
188 #define LOAD1LANE_QUALIFIERS (arm_load1_lane_qualifiers)
190 /* unsigned T (unsigned T, unsigned T, unsigned T). */
191 static enum arm_type_qualifiers
192 arm_unsigned_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
193 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
194 qualifier_unsigned };
195 #define UBINOP_QUALIFIERS (arm_unsigned_binop_qualifiers)
197 /* void (unsigned immediate, unsigned immediate, unsigned immediate,
198 unsigned immediate, unsigned immediate, unsigned immediate). */
199 static enum arm_type_qualifiers
200 arm_cdp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
201 = { qualifier_void, qualifier_unsigned_immediate,
202 qualifier_unsigned_immediate,
203 qualifier_unsigned_immediate,
204 qualifier_unsigned_immediate,
205 qualifier_unsigned_immediate,
206 qualifier_unsigned_immediate };
207 #define CDP_QUALIFIERS \
208 (arm_cdp_qualifiers)
210 /* void (unsigned immediate, unsigned immediate, const void *). */
211 static enum arm_type_qualifiers
212 arm_ldc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
213 = { qualifier_void, qualifier_unsigned_immediate,
214 qualifier_unsigned_immediate, qualifier_const_void_pointer };
215 #define LDC_QUALIFIERS \
216 (arm_ldc_qualifiers)
218 /* void (unsigned immediate, unsigned immediate, void *). */
219 static enum arm_type_qualifiers
220 arm_stc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
221 = { qualifier_void, qualifier_unsigned_immediate,
222 qualifier_unsigned_immediate, qualifier_void_pointer };
223 #define STC_QUALIFIERS \
224 (arm_stc_qualifiers)
226 /* void (unsigned immediate, unsigned immediate, T, unsigned immediate,
227 unsigned immediate, unsigned immediate). */
228 static enum arm_type_qualifiers
229 arm_mcr_qualifiers[SIMD_MAX_BUILTIN_ARGS]
230 = { qualifier_void, qualifier_unsigned_immediate,
231 qualifier_unsigned_immediate, qualifier_none,
232 qualifier_unsigned_immediate, qualifier_unsigned_immediate,
233 qualifier_unsigned_immediate };
234 #define MCR_QUALIFIERS \
235 (arm_mcr_qualifiers)
237 /* T (unsigned immediate, unsigned immediate, unsigned immediate,
238 unsigned immediate, unsigned immediate). */
239 static enum arm_type_qualifiers
240 arm_mrc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
241 = { qualifier_none, qualifier_unsigned_immediate,
242 qualifier_unsigned_immediate, qualifier_unsigned_immediate,
243 qualifier_unsigned_immediate, qualifier_unsigned_immediate };
244 #define MRC_QUALIFIERS \
245 (arm_mrc_qualifiers)
247 /* void (unsigned immediate, unsigned immediate, T, unsigned immediate). */
248 static enum arm_type_qualifiers
249 arm_mcrr_qualifiers[SIMD_MAX_BUILTIN_ARGS]
250 = { qualifier_void, qualifier_unsigned_immediate,
251 qualifier_unsigned_immediate, qualifier_none,
252 qualifier_unsigned_immediate };
253 #define MCRR_QUALIFIERS \
254 (arm_mcrr_qualifiers)
256 /* T (unsigned immediate, unsigned immediate, unsigned immediate). */
257 static enum arm_type_qualifiers
258 arm_mrrc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
259 = { qualifier_none, qualifier_unsigned_immediate,
260 qualifier_unsigned_immediate, qualifier_unsigned_immediate };
261 #define MRRC_QUALIFIERS \
262 (arm_mrrc_qualifiers)
264 /* T (immediate, unsigned immediate). */
265 static enum arm_type_qualifiers
266 arm_cx_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
267 = { qualifier_none, qualifier_immediate, qualifier_unsigned_immediate };
268 #define CX_IMM_QUALIFIERS (arm_cx_imm_qualifiers)
270 /* T (immediate, T, unsigned immediate). */
271 static enum arm_type_qualifiers
272 arm_cx_unary_qualifiers[SIMD_MAX_BUILTIN_ARGS]
273 = { qualifier_none, qualifier_immediate, qualifier_none,
274 qualifier_unsigned_immediate };
275 #define CX_UNARY_QUALIFIERS (arm_cx_unary_qualifiers)
277 /* T (immediate, T, T, unsigned immediate). */
278 static enum arm_type_qualifiers
279 arm_cx_binary_qualifiers[SIMD_MAX_BUILTIN_ARGS]
280 = { qualifier_none, qualifier_immediate,
281 qualifier_none, qualifier_none,
282 qualifier_unsigned_immediate };
283 #define CX_BINARY_QUALIFIERS (arm_cx_binary_qualifiers)
285 /* T (immediate, T, T, T, unsigned immediate). */
286 static enum arm_type_qualifiers
287 arm_cx_ternary_qualifiers[SIMD_MAX_BUILTIN_ARGS]
288 = { qualifier_none, qualifier_immediate,
289 qualifier_none, qualifier_none, qualifier_none,
290 qualifier_unsigned_immediate };
291 #define CX_TERNARY_QUALIFIERS (arm_cx_ternary_qualifiers)
293 /* T (immediate, T, unsigned immediate). */
294 static enum arm_type_qualifiers
295 arm_cx_unary_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
296 = { qualifier_none, qualifier_immediate, qualifier_none,
297 qualifier_unsigned_immediate,
298 qualifier_predicate };
299 #define CX_UNARY_UNONE_QUALIFIERS (arm_cx_unary_unone_qualifiers)
301 /* T (immediate, T, T, unsigned immediate). */
302 static enum arm_type_qualifiers
303 arm_cx_binary_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
304 = { qualifier_none, qualifier_immediate,
305 qualifier_none, qualifier_none,
306 qualifier_unsigned_immediate,
307 qualifier_predicate };
308 #define CX_BINARY_UNONE_QUALIFIERS (arm_cx_binary_unone_qualifiers)
310 /* T (immediate, T, T, T, unsigned immediate). */
311 static enum arm_type_qualifiers
312 arm_cx_ternary_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
313 = { qualifier_none, qualifier_immediate,
314 qualifier_none, qualifier_none, qualifier_none,
315 qualifier_unsigned_immediate,
316 qualifier_predicate };
317 #define CX_TERNARY_UNONE_QUALIFIERS (arm_cx_ternary_unone_qualifiers)
319 /* The first argument (return type) of a store should be void type,
320 which we represent with qualifier_void. Their first operand will be
321 a DImode pointer to the location to store to, so we must use
322 qualifier_map_mode | qualifier_pointer to build a pointer to the
323 element type of the vector.
325 void ([T element type] *, T). */
326 static enum arm_type_qualifiers
327 arm_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
328 = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
329 #define STORE1_QUALIFIERS (arm_store1_qualifiers)
331 /* Qualifiers for MVE builtins. */
333 static enum arm_type_qualifiers
334 arm_unop_none_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
335 = { qualifier_none, qualifier_none };
336 #define UNOP_NONE_NONE_QUALIFIERS \
337 (arm_unop_none_none_qualifiers)
339 static enum arm_type_qualifiers
340 arm_unop_none_snone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
341 = { qualifier_none, qualifier_none };
342 #define UNOP_NONE_SNONE_QUALIFIERS \
343 (arm_unop_none_snone_qualifiers)
345 static enum arm_type_qualifiers
346 arm_unop_none_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
347 = { qualifier_none, qualifier_unsigned };
348 #define UNOP_NONE_UNONE_QUALIFIERS \
349 (arm_unop_none_unone_qualifiers)
351 static enum arm_type_qualifiers
352 arm_unop_snone_snone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
353 = { qualifier_none, qualifier_none };
354 #define UNOP_SNONE_SNONE_QUALIFIERS \
355 (arm_unop_snone_snone_qualifiers)
357 static enum arm_type_qualifiers
358 arm_unop_snone_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
359 = { qualifier_none, qualifier_none };
360 #define UNOP_SNONE_NONE_QUALIFIERS \
361 (arm_unop_snone_none_qualifiers)
363 static enum arm_type_qualifiers
364 arm_unop_snone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
365 = { qualifier_none, qualifier_immediate };
366 #define UNOP_SNONE_IMM_QUALIFIERS \
367 (arm_unop_snone_imm_qualifiers)
369 static enum arm_type_qualifiers
370 arm_unop_unone_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
371 = { qualifier_unsigned, qualifier_none };
372 #define UNOP_UNONE_NONE_QUALIFIERS \
373 (arm_unop_unone_none_qualifiers)
375 static enum arm_type_qualifiers
376 arm_unop_unone_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
377 = { qualifier_unsigned, qualifier_unsigned };
378 #define UNOP_UNONE_UNONE_QUALIFIERS \
379 (arm_unop_unone_unone_qualifiers)
381 static enum arm_type_qualifiers
382 arm_unop_unone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
383 = { qualifier_unsigned, qualifier_immediate };
384 #define UNOP_UNONE_IMM_QUALIFIERS \
385 (arm_unop_unone_imm_qualifiers)
387 static enum arm_type_qualifiers
388 arm_unop_pred_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
389 = { qualifier_predicate, qualifier_unsigned };
390 #define UNOP_PRED_UNONE_QUALIFIERS \
391 (arm_unop_pred_unone_qualifiers)
393 static enum arm_type_qualifiers
394 arm_unop_pred_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
395 = { qualifier_predicate, qualifier_predicate };
396 #define UNOP_PRED_PRED_QUALIFIERS \
397 (arm_unop_pred_pred_qualifiers)
400 static enum arm_type_qualifiers
401 arm_binop_none_none_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
402 = { qualifier_none, qualifier_none, qualifier_none };
403 #define BINOP_NONE_NONE_NONE_QUALIFIERS \
404 (arm_binop_none_none_none_qualifiers)
406 static enum arm_type_qualifiers
407 arm_binop_none_none_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
408 = { qualifier_none, qualifier_none, qualifier_immediate };
409 #define BINOP_NONE_NONE_IMM_QUALIFIERS \
410 (arm_binop_none_none_imm_qualifiers)
412 static enum arm_type_qualifiers
413 arm_binop_none_unone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
414 = { qualifier_none, qualifier_unsigned, qualifier_immediate };
415 #define BINOP_NONE_UNONE_IMM_QUALIFIERS \
416 (arm_binop_none_unone_imm_qualifiers)
418 static enum arm_type_qualifiers
419 arm_binop_none_unone_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
420 = { qualifier_none, qualifier_unsigned, qualifier_unsigned };
421 #define BINOP_NONE_UNONE_UNONE_QUALIFIERS \
422 (arm_binop_none_unone_unone_qualifiers)
424 static enum arm_type_qualifiers
425 arm_binop_unone_unone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
426 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate };
427 #define BINOP_UNONE_UNONE_IMM_QUALIFIERS \
428 (arm_binop_unone_unone_imm_qualifiers)
430 static enum arm_type_qualifiers
431 arm_binop_unone_unone_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
432 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned };
433 #define BINOP_UNONE_UNONE_UNONE_QUALIFIERS \
434 (arm_binop_unone_unone_unone_qualifiers)
436 static enum arm_type_qualifiers
437 arm_binop_pred_unone_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
438 = { qualifier_predicate, qualifier_unsigned, qualifier_unsigned };
439 #define BINOP_PRED_UNONE_UNONE_QUALIFIERS \
440 (arm_binop_pred_unone_unone_qualifiers)
442 static enum arm_type_qualifiers
443 arm_binop_pred_unone_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
444 = { qualifier_predicate, qualifier_unsigned, qualifier_predicate };
445 #define BINOP_PRED_UNONE_PRED_QUALIFIERS \
446 (arm_binop_pred_unone_pred_qualifiers)
448 static enum arm_type_qualifiers
449 arm_binop_unone_none_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
450 = { qualifier_unsigned, qualifier_none, qualifier_immediate };
451 #define BINOP_UNONE_NONE_IMM_QUALIFIERS \
452 (arm_binop_unone_none_imm_qualifiers)
454 static enum arm_type_qualifiers
455 arm_binop_pred_none_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
456 = { qualifier_predicate, qualifier_none, qualifier_none };
457 #define BINOP_PRED_NONE_NONE_QUALIFIERS \
458 (arm_binop_pred_none_none_qualifiers)
460 static enum arm_type_qualifiers
461 arm_binop_unone_unone_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
462 = { qualifier_unsigned, qualifier_unsigned, qualifier_none };
463 #define BINOP_UNONE_UNONE_NONE_QUALIFIERS \
464 (arm_binop_unone_unone_none_qualifiers)
466 static enum arm_type_qualifiers
467 arm_ternop_unone_unone_unone_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
468 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
469 qualifier_immediate };
470 #define TERNOP_UNONE_UNONE_UNONE_IMM_QUALIFIERS \
471 (arm_ternop_unone_unone_unone_imm_qualifiers)
473 static enum arm_type_qualifiers
474 arm_ternop_unone_unone_none_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
475 = { qualifier_unsigned, qualifier_unsigned, qualifier_none, qualifier_none };
476 #define TERNOP_UNONE_UNONE_NONE_NONE_QUALIFIERS \
477 (arm_ternop_unone_unone_none_none_qualifiers)
479 static enum arm_type_qualifiers
480 arm_ternop_unone_unone_none_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
481 = { qualifier_unsigned, qualifier_unsigned, qualifier_none,
482 qualifier_immediate };
483 #define TERNOP_UNONE_UNONE_NONE_IMM_QUALIFIERS \
484 (arm_ternop_unone_unone_none_imm_qualifiers)
486 static enum arm_type_qualifiers
487 arm_ternop_unone_unone_none_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
488 = { qualifier_unsigned, qualifier_unsigned, qualifier_none,
489 qualifier_predicate };
490 #define TERNOP_UNONE_UNONE_NONE_PRED_QUALIFIERS \
491 (arm_ternop_unone_unone_none_pred_qualifiers)
493 static enum arm_type_qualifiers
494 arm_ternop_unone_unone_imm_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
495 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate,
496 qualifier_predicate };
497 #define TERNOP_UNONE_UNONE_IMM_PRED_QUALIFIERS \
498 (arm_ternop_unone_unone_imm_pred_qualifiers)
500 static enum arm_type_qualifiers
501 arm_ternop_pred_none_none_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
502 = { qualifier_predicate, qualifier_none, qualifier_none, qualifier_predicate };
503 #define TERNOP_PRED_NONE_NONE_PRED_QUALIFIERS \
504 (arm_ternop_pred_none_none_pred_qualifiers)
506 static enum arm_type_qualifiers
507 arm_ternop_none_none_none_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
508 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate };
509 #define TERNOP_NONE_NONE_NONE_IMM_QUALIFIERS \
510 (arm_ternop_none_none_none_imm_qualifiers)
512 static enum arm_type_qualifiers
513 arm_ternop_none_none_none_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
514 = { qualifier_none, qualifier_none, qualifier_none, qualifier_predicate };
515 #define TERNOP_NONE_NONE_NONE_PRED_QUALIFIERS \
516 (arm_ternop_none_none_none_pred_qualifiers)
518 static enum arm_type_qualifiers
519 arm_ternop_none_none_imm_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
520 = { qualifier_none, qualifier_none, qualifier_immediate, qualifier_predicate };
521 #define TERNOP_NONE_NONE_IMM_PRED_QUALIFIERS \
522 (arm_ternop_none_none_imm_pred_qualifiers)
524 static enum arm_type_qualifiers
525 arm_ternop_none_none_unone_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
526 = { qualifier_none, qualifier_none, qualifier_unsigned, qualifier_predicate };
527 #define TERNOP_NONE_NONE_UNONE_PRED_QUALIFIERS \
528 (arm_ternop_none_none_unone_pred_qualifiers)
530 static enum arm_type_qualifiers
531 arm_ternop_unone_unone_unone_unone_qualifiers[SIMD_MAX_BUILTIN_ARGS]
532 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
533 qualifier_unsigned };
534 #define TERNOP_UNONE_UNONE_UNONE_UNONE_QUALIFIERS \
535 (arm_ternop_unone_unone_unone_unone_qualifiers)
537 static enum arm_type_qualifiers
538 arm_ternop_unone_unone_unone_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
539 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
540 qualifier_predicate };
541 #define TERNOP_UNONE_UNONE_UNONE_PRED_QUALIFIERS \
542 (arm_ternop_unone_unone_unone_pred_qualifiers)
544 static enum arm_type_qualifiers
545 arm_ternop_pred_unone_unone_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
546 = { qualifier_predicate, qualifier_unsigned, qualifier_unsigned,
547 qualifier_predicate };
548 #define TERNOP_PRED_UNONE_UNONE_PRED_QUALIFIERS \
549 (arm_ternop_pred_unone_unone_pred_qualifiers)
551 static enum arm_type_qualifiers
552 arm_ternop_none_none_none_none_qualifiers[SIMD_MAX_BUILTIN_ARGS]
553 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
554 #define TERNOP_NONE_NONE_NONE_NONE_QUALIFIERS \
555 (arm_ternop_none_none_none_none_qualifiers)
557 static enum arm_type_qualifiers
558 arm_quadop_unone_unone_none_none_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
559 = { qualifier_unsigned, qualifier_unsigned, qualifier_none, qualifier_none,
560 qualifier_predicate };
561 #define QUADOP_UNONE_UNONE_NONE_NONE_PRED_QUALIFIERS \
562 (arm_quadop_unone_unone_none_none_pred_qualifiers)
564 static enum arm_type_qualifiers
565 arm_quadop_none_none_none_none_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
566 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none,
567 qualifier_predicate };
568 #define QUADOP_NONE_NONE_NONE_NONE_PRED_QUALIFIERS \
569 (arm_quadop_none_none_none_none_pred_qualifiers)
571 static enum arm_type_qualifiers
572 arm_quadop_none_none_none_imm_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
573 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate,
574 qualifier_predicate };
575 #define QUADOP_NONE_NONE_NONE_IMM_PRED_QUALIFIERS \
576 (arm_quadop_none_none_none_imm_pred_qualifiers)
578 static enum arm_type_qualifiers
579 arm_quadop_unone_unone_unone_unone_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
580 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
581 qualifier_unsigned, qualifier_predicate };
582 #define QUADOP_UNONE_UNONE_UNONE_UNONE_PRED_QUALIFIERS \
583 (arm_quadop_unone_unone_unone_unone_pred_qualifiers)
585 static enum arm_type_qualifiers
586 arm_quadop_unone_unone_none_imm_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
587 = { qualifier_unsigned, qualifier_unsigned, qualifier_none,
588 qualifier_immediate, qualifier_predicate };
589 #define QUADOP_UNONE_UNONE_NONE_IMM_PRED_QUALIFIERS \
590 (arm_quadop_unone_unone_none_imm_pred_qualifiers)
592 static enum arm_type_qualifiers
593 arm_quadop_none_none_unone_imm_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
594 = { qualifier_none, qualifier_none, qualifier_unsigned, qualifier_immediate,
595 qualifier_predicate };
596 #define QUADOP_NONE_NONE_UNONE_IMM_PRED_QUALIFIERS \
597 (arm_quadop_none_none_unone_imm_pred_qualifiers)
599 static enum arm_type_qualifiers
600 arm_quadop_unone_unone_unone_imm_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
601 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
602 qualifier_immediate, qualifier_predicate };
603 #define QUADOP_UNONE_UNONE_UNONE_IMM_PRED_QUALIFIERS \
604 (arm_quadop_unone_unone_unone_imm_pred_qualifiers)
606 static enum arm_type_qualifiers
607 arm_quadop_unone_unone_unone_none_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
608 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
609 qualifier_none, qualifier_predicate };
610 #define QUADOP_UNONE_UNONE_UNONE_NONE_PRED_QUALIFIERS \
611 (arm_quadop_unone_unone_unone_none_pred_qualifiers)
613 static enum arm_type_qualifiers
614 arm_lsll_qualifiers[SIMD_MAX_BUILTIN_ARGS]
615 = { qualifier_unsigned, qualifier_unsigned, qualifier_none};
616 #define LSLL_QUALIFIERS (arm_lsll_qualifiers)
618 static enum arm_type_qualifiers
619 arm_uqshl_qualifiers[SIMD_MAX_BUILTIN_ARGS]
620 = { qualifier_unsigned, qualifier_unsigned, qualifier_const};
621 #define UQSHL_QUALIFIERS (arm_uqshl_qualifiers)
623 static enum arm_type_qualifiers
624 arm_asrl_qualifiers[SIMD_MAX_BUILTIN_ARGS]
625 = { qualifier_none, qualifier_none, qualifier_none};
626 #define ASRL_QUALIFIERS (arm_asrl_qualifiers)
628 static enum arm_type_qualifiers
629 arm_sqshl_qualifiers[SIMD_MAX_BUILTIN_ARGS]
630 = { qualifier_unsigned, qualifier_unsigned, qualifier_const};
631 #define SQSHL_QUALIFIERS (arm_sqshl_qualifiers)
633 static enum arm_type_qualifiers
634 arm_binop_none_none_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
635 = { qualifier_none, qualifier_none, qualifier_predicate };
636 #define BINOP_NONE_NONE_PRED_QUALIFIERS \
637 (arm_binop_none_none_pred_qualifiers)
639 static enum arm_type_qualifiers
640 arm_binop_unone_unone_pred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
641 = { qualifier_unsigned, qualifier_unsigned, qualifier_predicate };
642 #define BINOP_UNONE_UNONE_PRED_QUALIFIERS \
643 (arm_binop_unone_unone_pred_qualifiers)
645 /* End of Qualifier for MVE builtins. */
647 /* void ([T element type] *, T, immediate). */
648 static enum arm_type_qualifiers
649 arm_storestruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
650 = { qualifier_void, qualifier_pointer_map_mode,
651 qualifier_none, qualifier_struct_load_store_lane_index };
652 #define STORE1LANE_QUALIFIERS (arm_storestruct_lane_qualifiers)
654 /* int (void). */
655 static enum arm_type_qualifiers
656 arm_sat_occurred_qualifiers[SIMD_MAX_BUILTIN_ARGS]
657 = { qualifier_none, qualifier_void };
658 #define SAT_OCCURRED_QUALIFIERS (arm_sat_occurred_qualifiers)
660 /* void (int). */
661 static enum arm_type_qualifiers
662 arm_set_sat_qualifiers[SIMD_MAX_BUILTIN_ARGS]
663 = { qualifier_void, qualifier_none };
664 #define SET_SAT_QUALIFIERS (arm_set_sat_qualifiers)
666 #define v2qi_UP E_V2QImode
667 #define v4bi_UP E_V4BImode
668 #define v8bi_UP E_V8BImode
669 #define v16bi_UP E_V16BImode
670 #define v8qi_UP E_V8QImode
671 #define v4hi_UP E_V4HImode
672 #define v4hf_UP E_V4HFmode
673 #define v4bf_UP E_V4BFmode
674 #define v2si_UP E_V2SImode
675 #define v2sf_UP E_V2SFmode
676 #define v2bf_UP E_V2BFmode
677 #define di_UP E_DImode
678 #define v16qi_UP E_V16QImode
679 #define v8hi_UP E_V8HImode
680 #define v8hf_UP E_V8HFmode
681 #define v8bf_UP E_V8BFmode
682 #define v4si_UP E_V4SImode
683 #define v4sf_UP E_V4SFmode
684 #define v2di_UP E_V2DImode
685 #define ti_UP E_TImode
686 #define ei_UP E_EImode
687 #define oi_UP E_OImode
688 #define hf_UP E_HFmode
689 #define bf_UP E_BFmode
690 #define si_UP E_SImode
691 #define hi_UP E_HImode
692 #define void_UP E_VOIDmode
693 #define sf_UP E_SFmode
694 #define UP(X) X##_UP
696 typedef struct {
697 const char *name;
698 machine_mode mode;
699 const enum insn_code code;
700 unsigned int fcode;
701 enum arm_type_qualifiers *qualifiers;
702 } arm_builtin_datum;
704 constexpr insn_code CODE_FOR_neon_sdotv8qi = CODE_FOR_neon_sdotv2siv8qi;
705 constexpr insn_code CODE_FOR_neon_udotv8qi = CODE_FOR_neon_udotv2siv8qi;
706 constexpr insn_code CODE_FOR_neon_usdotv8qi = CODE_FOR_neon_usdotv2siv8qi;
707 constexpr insn_code CODE_FOR_neon_sdotv16qi = CODE_FOR_neon_sdotv4siv16qi;
708 constexpr insn_code CODE_FOR_neon_udotv16qi = CODE_FOR_neon_udotv4siv16qi;
709 constexpr insn_code CODE_FOR_neon_usdotv16qi = CODE_FOR_neon_usdotv4siv16qi;
711 #define CF(N,X) CODE_FOR_neon_##N##X
713 #define VAR1(T, N, A) \
714 {#N #A, UP (A), CF (N, A), 0, T##_QUALIFIERS},
715 #define VAR2(T, N, A, B) \
716 VAR1 (T, N, A) \
717 VAR1 (T, N, B)
718 #define VAR3(T, N, A, B, C) \
719 VAR2 (T, N, A, B) \
720 VAR1 (T, N, C)
721 #define VAR4(T, N, A, B, C, D) \
722 VAR3 (T, N, A, B, C) \
723 VAR1 (T, N, D)
724 #define VAR5(T, N, A, B, C, D, E) \
725 VAR4 (T, N, A, B, C, D) \
726 VAR1 (T, N, E)
727 #define VAR6(T, N, A, B, C, D, E, F) \
728 VAR5 (T, N, A, B, C, D, E) \
729 VAR1 (T, N, F)
730 #define VAR7(T, N, A, B, C, D, E, F, G) \
731 VAR6 (T, N, A, B, C, D, E, F) \
732 VAR1 (T, N, G)
733 #define VAR8(T, N, A, B, C, D, E, F, G, H) \
734 VAR7 (T, N, A, B, C, D, E, F, G) \
735 VAR1 (T, N, H)
736 #define VAR9(T, N, A, B, C, D, E, F, G, H, I) \
737 VAR8 (T, N, A, B, C, D, E, F, G, H) \
738 VAR1 (T, N, I)
739 #define VAR10(T, N, A, B, C, D, E, F, G, H, I, J) \
740 VAR9 (T, N, A, B, C, D, E, F, G, H, I) \
741 VAR1 (T, N, J)
742 #define VAR11(T, N, A, B, C, D, E, F, G, H, I, J, K) \
743 VAR10 (T, N, A, B, C, D, E, F, G, H, I, J) \
744 VAR1 (T, N, K)
745 #define VAR12(T, N, A, B, C, D, E, F, G, H, I, J, K, L) \
746 VAR11 (T, N, A, B, C, D, E, F, G, H, I, J, K) \
747 VAR1 (T, N, L)
748 #define VAR13(T, N, A, B, C, D, E, F, G, H, I, J, K, L, M) \
749 VAR12 (T, N, A, B, C, D, E, F, G, H, I, J, K, L) \
750 VAR1 (T, N, M)
751 #define VAR14(T, N, A, B, C, D, E, F, G, H, I, J, K, L, M, O) \
752 VAR13 (T, N, A, B, C, D, E, F, G, H, I, J, K, L, M) \
753 VAR1 (T, N, O)
755 /* The builtin data can be found in arm_neon_builtins.def, arm_vfp_builtins.def
756 and arm_acle_builtins.def. The entries in arm_neon_builtins.def require
757 TARGET_NEON to be true. The feature tests are checked when the builtins are
758 expanded.
760 The mode entries in the following table correspond to the "key" type of the
761 instruction variant, i.e. equivalent to that which would be specified after
762 the assembler mnemonic for neon instructions, which usually refers to the
763 last vector operand. The modes listed per instruction should be the same as
764 those defined for that instruction's pattern, for instance in neon.md. */
766 static arm_builtin_datum vfp_builtin_data[] =
768 #include "arm_vfp_builtins.def"
771 static arm_builtin_datum neon_builtin_data[] =
773 #include "arm_neon_builtins.def"
776 #undef CF
777 #define CF(N,X) CODE_FOR_mve_##N##X
778 static arm_builtin_datum mve_builtin_data[] =
780 #include "arm_mve_builtins.def"
783 #undef CF
784 #undef VAR1
785 #define VAR1(T, N, A) \
786 {#N, UP (A), CODE_FOR_arm_##N, 0, T##_QUALIFIERS},
788 static arm_builtin_datum acle_builtin_data[] =
790 #include "arm_acle_builtins.def"
793 #undef VAR1
794 /* IMM_MAX sets the maximum valid value of the CDE immediate operand.
795 ECF_FLAG sets the flag used for set_call_expr_flags. */
796 #define VAR1(T, N, A, IMM_MAX, ECF_FLAG) \
797 {{#N #A, UP (A), CODE_FOR_arm_##N##A, 0, T##_QUALIFIERS}, IMM_MAX, ECF_FLAG},
799 typedef struct {
800 arm_builtin_datum base;
801 unsigned int imm_max;
802 int ecf_flag;
803 } arm_builtin_cde_datum;
805 static arm_builtin_cde_datum cde_builtin_data[] =
807 #include "arm_cde_builtins.def"
810 #undef VAR1
811 #define VAR1(T, N, X) \
812 ARM_BUILTIN_NEON_##N##X,
814 enum arm_builtins
816 ARM_BUILTIN_GETWCGR0,
817 ARM_BUILTIN_GETWCGR1,
818 ARM_BUILTIN_GETWCGR2,
819 ARM_BUILTIN_GETWCGR3,
821 ARM_BUILTIN_SETWCGR0,
822 ARM_BUILTIN_SETWCGR1,
823 ARM_BUILTIN_SETWCGR2,
824 ARM_BUILTIN_SETWCGR3,
826 ARM_BUILTIN_WZERO,
828 ARM_BUILTIN_WAVG2BR,
829 ARM_BUILTIN_WAVG2HR,
830 ARM_BUILTIN_WAVG2B,
831 ARM_BUILTIN_WAVG2H,
833 ARM_BUILTIN_WACCB,
834 ARM_BUILTIN_WACCH,
835 ARM_BUILTIN_WACCW,
837 ARM_BUILTIN_WMACS,
838 ARM_BUILTIN_WMACSZ,
839 ARM_BUILTIN_WMACU,
840 ARM_BUILTIN_WMACUZ,
842 ARM_BUILTIN_WSADB,
843 ARM_BUILTIN_WSADBZ,
844 ARM_BUILTIN_WSADH,
845 ARM_BUILTIN_WSADHZ,
847 ARM_BUILTIN_WALIGNI,
848 ARM_BUILTIN_WALIGNR0,
849 ARM_BUILTIN_WALIGNR1,
850 ARM_BUILTIN_WALIGNR2,
851 ARM_BUILTIN_WALIGNR3,
853 ARM_BUILTIN_TMIA,
854 ARM_BUILTIN_TMIAPH,
855 ARM_BUILTIN_TMIABB,
856 ARM_BUILTIN_TMIABT,
857 ARM_BUILTIN_TMIATB,
858 ARM_BUILTIN_TMIATT,
860 ARM_BUILTIN_TMOVMSKB,
861 ARM_BUILTIN_TMOVMSKH,
862 ARM_BUILTIN_TMOVMSKW,
864 ARM_BUILTIN_TBCSTB,
865 ARM_BUILTIN_TBCSTH,
866 ARM_BUILTIN_TBCSTW,
868 ARM_BUILTIN_WMADDS,
869 ARM_BUILTIN_WMADDU,
871 ARM_BUILTIN_WPACKHSS,
872 ARM_BUILTIN_WPACKWSS,
873 ARM_BUILTIN_WPACKDSS,
874 ARM_BUILTIN_WPACKHUS,
875 ARM_BUILTIN_WPACKWUS,
876 ARM_BUILTIN_WPACKDUS,
878 ARM_BUILTIN_WADDB,
879 ARM_BUILTIN_WADDH,
880 ARM_BUILTIN_WADDW,
881 ARM_BUILTIN_WADDSSB,
882 ARM_BUILTIN_WADDSSH,
883 ARM_BUILTIN_WADDSSW,
884 ARM_BUILTIN_WADDUSB,
885 ARM_BUILTIN_WADDUSH,
886 ARM_BUILTIN_WADDUSW,
887 ARM_BUILTIN_WSUBB,
888 ARM_BUILTIN_WSUBH,
889 ARM_BUILTIN_WSUBW,
890 ARM_BUILTIN_WSUBSSB,
891 ARM_BUILTIN_WSUBSSH,
892 ARM_BUILTIN_WSUBSSW,
893 ARM_BUILTIN_WSUBUSB,
894 ARM_BUILTIN_WSUBUSH,
895 ARM_BUILTIN_WSUBUSW,
897 ARM_BUILTIN_WAND,
898 ARM_BUILTIN_WANDN,
899 ARM_BUILTIN_WOR,
900 ARM_BUILTIN_WXOR,
902 ARM_BUILTIN_WCMPEQB,
903 ARM_BUILTIN_WCMPEQH,
904 ARM_BUILTIN_WCMPEQW,
905 ARM_BUILTIN_WCMPGTUB,
906 ARM_BUILTIN_WCMPGTUH,
907 ARM_BUILTIN_WCMPGTUW,
908 ARM_BUILTIN_WCMPGTSB,
909 ARM_BUILTIN_WCMPGTSH,
910 ARM_BUILTIN_WCMPGTSW,
912 ARM_BUILTIN_TEXTRMSB,
913 ARM_BUILTIN_TEXTRMSH,
914 ARM_BUILTIN_TEXTRMSW,
915 ARM_BUILTIN_TEXTRMUB,
916 ARM_BUILTIN_TEXTRMUH,
917 ARM_BUILTIN_TEXTRMUW,
918 ARM_BUILTIN_TINSRB,
919 ARM_BUILTIN_TINSRH,
920 ARM_BUILTIN_TINSRW,
922 ARM_BUILTIN_WMAXSW,
923 ARM_BUILTIN_WMAXSH,
924 ARM_BUILTIN_WMAXSB,
925 ARM_BUILTIN_WMAXUW,
926 ARM_BUILTIN_WMAXUH,
927 ARM_BUILTIN_WMAXUB,
928 ARM_BUILTIN_WMINSW,
929 ARM_BUILTIN_WMINSH,
930 ARM_BUILTIN_WMINSB,
931 ARM_BUILTIN_WMINUW,
932 ARM_BUILTIN_WMINUH,
933 ARM_BUILTIN_WMINUB,
935 ARM_BUILTIN_WMULUM,
936 ARM_BUILTIN_WMULSM,
937 ARM_BUILTIN_WMULUL,
939 ARM_BUILTIN_PSADBH,
940 ARM_BUILTIN_WSHUFH,
942 ARM_BUILTIN_WSLLH,
943 ARM_BUILTIN_WSLLW,
944 ARM_BUILTIN_WSLLD,
945 ARM_BUILTIN_WSRAH,
946 ARM_BUILTIN_WSRAW,
947 ARM_BUILTIN_WSRAD,
948 ARM_BUILTIN_WSRLH,
949 ARM_BUILTIN_WSRLW,
950 ARM_BUILTIN_WSRLD,
951 ARM_BUILTIN_WRORH,
952 ARM_BUILTIN_WRORW,
953 ARM_BUILTIN_WRORD,
954 ARM_BUILTIN_WSLLHI,
955 ARM_BUILTIN_WSLLWI,
956 ARM_BUILTIN_WSLLDI,
957 ARM_BUILTIN_WSRAHI,
958 ARM_BUILTIN_WSRAWI,
959 ARM_BUILTIN_WSRADI,
960 ARM_BUILTIN_WSRLHI,
961 ARM_BUILTIN_WSRLWI,
962 ARM_BUILTIN_WSRLDI,
963 ARM_BUILTIN_WRORHI,
964 ARM_BUILTIN_WRORWI,
965 ARM_BUILTIN_WRORDI,
967 ARM_BUILTIN_WUNPCKIHB,
968 ARM_BUILTIN_WUNPCKIHH,
969 ARM_BUILTIN_WUNPCKIHW,
970 ARM_BUILTIN_WUNPCKILB,
971 ARM_BUILTIN_WUNPCKILH,
972 ARM_BUILTIN_WUNPCKILW,
974 ARM_BUILTIN_WUNPCKEHSB,
975 ARM_BUILTIN_WUNPCKEHSH,
976 ARM_BUILTIN_WUNPCKEHSW,
977 ARM_BUILTIN_WUNPCKEHUB,
978 ARM_BUILTIN_WUNPCKEHUH,
979 ARM_BUILTIN_WUNPCKEHUW,
980 ARM_BUILTIN_WUNPCKELSB,
981 ARM_BUILTIN_WUNPCKELSH,
982 ARM_BUILTIN_WUNPCKELSW,
983 ARM_BUILTIN_WUNPCKELUB,
984 ARM_BUILTIN_WUNPCKELUH,
985 ARM_BUILTIN_WUNPCKELUW,
987 ARM_BUILTIN_WABSB,
988 ARM_BUILTIN_WABSH,
989 ARM_BUILTIN_WABSW,
991 ARM_BUILTIN_WADDSUBHX,
992 ARM_BUILTIN_WSUBADDHX,
994 ARM_BUILTIN_WABSDIFFB,
995 ARM_BUILTIN_WABSDIFFH,
996 ARM_BUILTIN_WABSDIFFW,
998 ARM_BUILTIN_WADDCH,
999 ARM_BUILTIN_WADDCW,
1001 ARM_BUILTIN_WAVG4,
1002 ARM_BUILTIN_WAVG4R,
1004 ARM_BUILTIN_WMADDSX,
1005 ARM_BUILTIN_WMADDUX,
1007 ARM_BUILTIN_WMADDSN,
1008 ARM_BUILTIN_WMADDUN,
1010 ARM_BUILTIN_WMULWSM,
1011 ARM_BUILTIN_WMULWUM,
1013 ARM_BUILTIN_WMULWSMR,
1014 ARM_BUILTIN_WMULWUMR,
1016 ARM_BUILTIN_WMULWL,
1018 ARM_BUILTIN_WMULSMR,
1019 ARM_BUILTIN_WMULUMR,
1021 ARM_BUILTIN_WQMULM,
1022 ARM_BUILTIN_WQMULMR,
1024 ARM_BUILTIN_WQMULWM,
1025 ARM_BUILTIN_WQMULWMR,
1027 ARM_BUILTIN_WADDBHUSM,
1028 ARM_BUILTIN_WADDBHUSL,
1030 ARM_BUILTIN_WQMIABB,
1031 ARM_BUILTIN_WQMIABT,
1032 ARM_BUILTIN_WQMIATB,
1033 ARM_BUILTIN_WQMIATT,
1035 ARM_BUILTIN_WQMIABBN,
1036 ARM_BUILTIN_WQMIABTN,
1037 ARM_BUILTIN_WQMIATBN,
1038 ARM_BUILTIN_WQMIATTN,
1040 ARM_BUILTIN_WMIABB,
1041 ARM_BUILTIN_WMIABT,
1042 ARM_BUILTIN_WMIATB,
1043 ARM_BUILTIN_WMIATT,
1045 ARM_BUILTIN_WMIABBN,
1046 ARM_BUILTIN_WMIABTN,
1047 ARM_BUILTIN_WMIATBN,
1048 ARM_BUILTIN_WMIATTN,
1050 ARM_BUILTIN_WMIAWBB,
1051 ARM_BUILTIN_WMIAWBT,
1052 ARM_BUILTIN_WMIAWTB,
1053 ARM_BUILTIN_WMIAWTT,
1055 ARM_BUILTIN_WMIAWBBN,
1056 ARM_BUILTIN_WMIAWBTN,
1057 ARM_BUILTIN_WMIAWTBN,
1058 ARM_BUILTIN_WMIAWTTN,
1060 ARM_BUILTIN_WMERGE,
1062 ARM_BUILTIN_GET_FPSCR,
1063 ARM_BUILTIN_SET_FPSCR,
1064 ARM_BUILTIN_GET_FPSCR_NZCVQC,
1065 ARM_BUILTIN_SET_FPSCR_NZCVQC,
1067 ARM_BUILTIN_CMSE_NONSECURE_CALLER,
1068 ARM_BUILTIN_SIMD_LANE_CHECK,
1070 #undef CRYPTO1
1071 #undef CRYPTO2
1072 #undef CRYPTO3
1074 #define CRYPTO1(L, U, M1, M2) \
1075 ARM_BUILTIN_CRYPTO_##U,
1076 #define CRYPTO2(L, U, M1, M2, M3) \
1077 ARM_BUILTIN_CRYPTO_##U,
1078 #define CRYPTO3(L, U, M1, M2, M3, M4) \
1079 ARM_BUILTIN_CRYPTO_##U,
1081 ARM_BUILTIN_CRYPTO_BASE,
1083 #include "crypto.def"
1085 #undef CRYPTO1
1086 #undef CRYPTO2
1087 #undef CRYPTO3
1089 ARM_BUILTIN_VFP_BASE,
1091 #include "arm_vfp_builtins.def"
1093 ARM_BUILTIN_NEON_BASE,
1095 #include "arm_neon_builtins.def"
1097 #undef VAR1
1098 #define VAR1(T, N, X) \
1099 ARM_BUILTIN_##N,
1101 ARM_BUILTIN_ACLE_BASE,
1102 ARM_BUILTIN_SAT_IMM_CHECK = ARM_BUILTIN_ACLE_BASE,
1104 #include "arm_acle_builtins.def"
1106 #undef VAR1
1107 #define VAR1(T, N, X, ... ) \
1108 ARM_BUILTIN_##N##X,
1110 ARM_BUILTIN_CDE_BASE,
1112 #include "arm_cde_builtins.def"
1114 ARM_BUILTIN_MVE_BASE,
1116 #undef VAR1
1117 #define VAR1(T, N, X) \
1118 ARM_BUILTIN_MVE_##N##X,
1119 #include "arm_mve_builtins.def"
1121 ARM_BUILTIN_MAX
1124 #define ARM_BUILTIN_VFP_PATTERN_START \
1125 (ARM_BUILTIN_VFP_BASE + 1)
1127 #define ARM_BUILTIN_NEON_PATTERN_START \
1128 (ARM_BUILTIN_NEON_BASE + 1)
1130 #define ARM_BUILTIN_MVE_PATTERN_START \
1131 (ARM_BUILTIN_MVE_BASE + 1)
1133 #define ARM_BUILTIN_ACLE_PATTERN_START \
1134 (ARM_BUILTIN_ACLE_BASE + 1)
1136 #define ARM_BUILTIN_CDE_PATTERN_START \
1137 (ARM_BUILTIN_CDE_BASE + 1)
1139 #define ARM_BUILTIN_CDE_PATTERN_END \
1140 (ARM_BUILTIN_CDE_BASE + ARRAY_SIZE (cde_builtin_data))
1142 #undef CF
1143 #undef VAR1
1144 #undef VAR2
1145 #undef VAR3
1146 #undef VAR4
1147 #undef VAR5
1148 #undef VAR6
1149 #undef VAR7
1150 #undef VAR8
1151 #undef VAR9
1152 #undef VAR10
1154 static GTY(()) tree arm_builtin_decls[ARM_BUILTIN_MAX];
1156 #define NUM_DREG_TYPES 5
1157 #define NUM_QREG_TYPES 6
1159 /* Internal scalar builtin types. These types are used to support
1160 neon intrinsic builtins. They are _not_ user-visible types. Therefore
1161 the mangling for these types are implementation defined. */
1162 const char *arm_scalar_builtin_types[] = {
1163 "__builtin_neon_qi",
1164 "__builtin_neon_hi",
1165 "__builtin_neon_si",
1166 "__builtin_neon_sf",
1167 "__builtin_neon_di",
1168 "__builtin_neon_df",
1169 "__builtin_neon_ti",
1170 "__builtin_neon_uqi",
1171 "__builtin_neon_uhi",
1172 "__builtin_neon_usi",
1173 "__builtin_neon_udi",
1174 "__builtin_neon_ei",
1175 "__builtin_neon_oi",
1176 "__builtin_neon_ci",
1177 "__builtin_neon_xi",
1178 "__builtin_neon_bf",
1179 NULL
1182 #define ENTRY(E, M, Q, S, T, G) \
1183 {E, \
1184 "__simd" #S "_" #T "_t", \
1185 #G "__simd" #S "_" #T "_t", \
1186 NULL_TREE, NULL_TREE, M##mode, qualifier_##Q},
1187 struct arm_simd_type_info arm_simd_types [] = {
1188 #include "arm-simd-builtin-types.def"
1190 #undef ENTRY
1192 /* The user-visible __fp16 type. */
1193 tree arm_fp16_type_node = NULL_TREE;
1195 /* Back-end node type for brain float (bfloat) types. */
1196 tree arm_bf16_type_node = NULL_TREE;
1197 tree arm_bf16_ptr_type_node = NULL_TREE;
1199 static tree arm_simd_intOI_type_node = NULL_TREE;
1200 static tree arm_simd_intEI_type_node = NULL_TREE;
1201 static tree arm_simd_intCI_type_node = NULL_TREE;
1202 static tree arm_simd_intXI_type_node = NULL_TREE;
1203 static tree arm_simd_polyQI_type_node = NULL_TREE;
1204 static tree arm_simd_polyHI_type_node = NULL_TREE;
1205 static tree arm_simd_polyDI_type_node = NULL_TREE;
1206 static tree arm_simd_polyTI_type_node = NULL_TREE;
1208 /* Wrapper around add_builtin_function. NAME is the name of the built-in
1209 function, TYPE is the function type, CODE is the function subcode
1210 (relative to ARM_BUILTIN_GENERAL), and ATTRS is the function
1211 attributes. */
1212 static tree
1213 arm_general_add_builtin_function (const char* name, tree type,
1214 unsigned int code, tree attrs = NULL_TREE)
1216 code = (code << ARM_BUILTIN_SHIFT) | ARM_BUILTIN_GENERAL;
1217 return add_builtin_function (name, type, code, BUILT_IN_MD, NULL, attrs);
1220 static const char *
1221 arm_mangle_builtin_scalar_type (const_tree type)
1223 int i = 0;
1225 while (arm_scalar_builtin_types[i] != NULL)
1227 const char *name = arm_scalar_builtin_types[i];
1229 if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1230 && DECL_NAME (TYPE_NAME (type))
1231 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))), name))
1232 return arm_scalar_builtin_types[i];
1233 i++;
1235 return NULL;
1238 static const char *
1239 arm_mangle_builtin_vector_type (const_tree type)
1241 tree attrs = TYPE_ATTRIBUTES (type);
1242 if (tree attr = lookup_attribute ("Advanced SIMD type", attrs))
1244 tree mangled_name = TREE_VALUE (TREE_VALUE (attr));
1245 return IDENTIFIER_POINTER (mangled_name);
1248 return NULL;
1251 const char *
1252 arm_mangle_builtin_type (const_tree type)
1254 const char *mangle;
1255 /* Walk through all the Arm builtins types tables to filter out the
1256 incoming type. */
1257 if ((mangle = arm_mangle_builtin_vector_type (type))
1258 || (mangle = arm_mangle_builtin_scalar_type (type)))
1259 return mangle;
1261 return NULL;
1264 static tree
1265 arm_simd_builtin_std_type (machine_mode mode,
1266 enum arm_type_qualifiers q)
1268 #define QUAL_TYPE(M) \
1269 ((q == qualifier_none) ? int##M##_type_node : unsigned_int##M##_type_node);
1270 switch (mode)
1272 case E_QImode:
1273 return QUAL_TYPE (QI);
1274 case E_HImode:
1275 return QUAL_TYPE (HI);
1276 case E_SImode:
1277 return QUAL_TYPE (SI);
1278 case E_DImode:
1279 return QUAL_TYPE (DI);
1280 case E_TImode:
1281 return QUAL_TYPE (TI);
1282 case E_OImode:
1283 return arm_simd_intOI_type_node;
1284 case E_EImode:
1285 return arm_simd_intEI_type_node;
1286 case E_CImode:
1287 return arm_simd_intCI_type_node;
1288 case E_XImode:
1289 return arm_simd_intXI_type_node;
1290 case E_HFmode:
1291 return arm_fp16_type_node;
1292 case E_SFmode:
1293 return float_type_node;
1294 case E_DFmode:
1295 return double_type_node;
1296 case E_BFmode:
1297 return arm_bf16_type_node;
1298 default:
1299 gcc_unreachable ();
1301 #undef QUAL_TYPE
1304 static tree
1305 arm_lookup_simd_builtin_type (machine_mode mode,
1306 enum arm_type_qualifiers q)
1308 int i;
1309 int nelts = ARRAY_SIZE (arm_simd_types);
1311 /* Non-poly scalar modes map to standard types not in the table. */
1312 if (q != qualifier_poly && !VECTOR_MODE_P (mode))
1313 return arm_simd_builtin_std_type (mode, q);
1315 for (i = 0; i < nelts; i++)
1316 if (arm_simd_types[i].mode == mode
1317 && arm_simd_types[i].q == q)
1318 return arm_simd_types[i].itype;
1320 /* Note that we won't have caught the underlying type for poly64x2_t
1321 in the above table. This gets default mangling. */
1323 return NULL_TREE;
1326 static tree
1327 arm_simd_builtin_type (machine_mode mode, arm_type_qualifiers qualifiers)
1329 if ((qualifiers & qualifier_poly) != 0)
1330 return arm_lookup_simd_builtin_type (mode, qualifier_poly);
1331 else if ((qualifiers & qualifier_unsigned) != 0)
1332 return arm_lookup_simd_builtin_type (mode, qualifier_unsigned);
1333 else if ((qualifiers & qualifier_predicate) != 0)
1334 return unsigned_intHI_type_node;
1335 else
1336 return arm_lookup_simd_builtin_type (mode, qualifier_none);
1339 static void
1340 arm_init_simd_builtin_types (void)
1342 int i;
1343 int nelts = ARRAY_SIZE (arm_simd_types);
1344 tree tdecl;
1346 /* Poly types are a world of their own. In order to maintain legacy
1347 ABI, they get initialized using the old interface, and don't get
1348 an entry in our mangling table, consequently, they get default
1349 mangling. As a further gotcha, poly8_t and poly16_t are signed
1350 types, poly64_t and poly128_t are unsigned types. */
1351 if (!TARGET_HAVE_MVE)
1353 arm_simd_polyQI_type_node
1354 = build_distinct_type_copy (intQI_type_node);
1355 (*lang_hooks.types.register_builtin_type) (arm_simd_polyQI_type_node,
1356 "__builtin_neon_poly8");
1357 arm_simd_polyHI_type_node
1358 = build_distinct_type_copy (intHI_type_node);
1359 (*lang_hooks.types.register_builtin_type) (arm_simd_polyHI_type_node,
1360 "__builtin_neon_poly16");
1361 arm_simd_polyDI_type_node
1362 = build_distinct_type_copy (unsigned_intDI_type_node);
1363 (*lang_hooks.types.register_builtin_type) (arm_simd_polyDI_type_node,
1364 "__builtin_neon_poly64");
1365 arm_simd_polyTI_type_node
1366 = build_distinct_type_copy (unsigned_intTI_type_node);
1367 (*lang_hooks.types.register_builtin_type) (arm_simd_polyTI_type_node,
1368 "__builtin_neon_poly128");
1369 /* Init poly vector element types with scalar poly types. */
1370 arm_simd_types[Poly8x8_t].eltype = arm_simd_polyQI_type_node;
1371 arm_simd_types[Poly8x16_t].eltype = arm_simd_polyQI_type_node;
1372 arm_simd_types[Poly16x4_t].eltype = arm_simd_polyHI_type_node;
1373 arm_simd_types[Poly16x8_t].eltype = arm_simd_polyHI_type_node;
1374 /* Note: poly64x2_t is defined in arm_neon.h, to ensure it gets default
1375 mangling. */
1377 /* Prevent front-ends from transforming poly vectors into string
1378 literals. */
1379 TYPE_STRING_FLAG (arm_simd_polyQI_type_node) = false;
1380 TYPE_STRING_FLAG (arm_simd_polyHI_type_node) = false;
1382 /* Init all the element types built by the front-end. */
1383 arm_simd_types[Int8x8_t].eltype = get_typenode_from_name (INT8_TYPE);
1384 arm_simd_types[Int8x16_t].eltype = get_typenode_from_name (INT8_TYPE);
1385 arm_simd_types[Int16x4_t].eltype = get_typenode_from_name (INT16_TYPE);
1386 arm_simd_types[Int16x8_t].eltype = get_typenode_from_name (INT16_TYPE);
1387 arm_simd_types[Int32x2_t].eltype = get_typenode_from_name (INT32_TYPE);
1388 arm_simd_types[Int32x4_t].eltype = get_typenode_from_name (INT32_TYPE);
1389 arm_simd_types[Int64x2_t].eltype = get_typenode_from_name (INT64_TYPE);
1390 arm_simd_types[Uint8x8_t].eltype = get_typenode_from_name (UINT8_TYPE);
1391 arm_simd_types[Uint8x16_t].eltype = get_typenode_from_name (UINT8_TYPE);
1392 arm_simd_types[Uint16x4_t].eltype = get_typenode_from_name (UINT16_TYPE);
1393 arm_simd_types[Uint16x8_t].eltype = get_typenode_from_name (UINT16_TYPE);
1394 arm_simd_types[Uint32x2_t].eltype = get_typenode_from_name (UINT32_TYPE);
1395 arm_simd_types[Uint32x4_t].eltype = get_typenode_from_name (UINT32_TYPE);
1396 arm_simd_types[Uint64x2_t].eltype = get_typenode_from_name (UINT64_TYPE);
1398 /* Note: poly64x2_t is defined in arm_neon.h, to ensure it gets default
1399 mangling. */
1401 /* Continue with standard types. */
1402 /* The __builtin_simd{64,128}_float16 types are kept private unless
1403 we have a scalar __fp16 type. */
1404 arm_simd_types[Float16x4_t].eltype = arm_fp16_type_node;
1405 arm_simd_types[Float16x8_t].eltype = arm_fp16_type_node;
1406 arm_simd_types[Float32x2_t].eltype = float_type_node;
1407 arm_simd_types[Float32x4_t].eltype = float_type_node;
1409 /* Init Bfloat vector types with underlying __bf16 scalar type. */
1410 arm_simd_types[Bfloat16x2_t].eltype = arm_bf16_type_node;
1411 arm_simd_types[Bfloat16x4_t].eltype = arm_bf16_type_node;
1412 arm_simd_types[Bfloat16x8_t].eltype = arm_bf16_type_node;
1414 for (i = 0; i < nelts; i++)
1416 tree eltype = arm_simd_types[i].eltype;
1417 machine_mode mode = arm_simd_types[i].mode;
1419 if (eltype == NULL
1420 /* VECTOR_BOOL is not supported unless MVE is activated,
1421 this would make build_truth_vector_type_for_mode
1422 crash. */
1423 && ((GET_MODE_CLASS (mode) != MODE_VECTOR_BOOL)
1424 || !TARGET_HAVE_MVE))
1425 continue;
1426 if (arm_simd_types[i].itype == NULL)
1428 tree type;
1429 if (GET_MODE_CLASS (mode) == MODE_VECTOR_BOOL)
1431 /* Handle MVE predicates: they are internally stored as
1432 16 bits, but are used as vectors of 1, 2 or 4-bit
1433 elements. */
1434 type = build_truth_vector_type_for_mode (GET_MODE_NUNITS (mode),
1435 mode);
1436 eltype = TREE_TYPE (type);
1438 else
1439 type = build_vector_type (eltype, GET_MODE_NUNITS (mode));
1441 type = build_distinct_type_copy (type);
1442 SET_TYPE_STRUCTURAL_EQUALITY (type);
1444 tree mangled_name = get_identifier (arm_simd_types[i].mangle);
1445 tree value = tree_cons (NULL_TREE, mangled_name, NULL_TREE);
1446 TYPE_ATTRIBUTES (type)
1447 = tree_cons (get_identifier ("Advanced SIMD type"), value,
1448 TYPE_ATTRIBUTES (type));
1449 arm_simd_types[i].itype = type;
1452 tdecl = add_builtin_type (arm_simd_types[i].name,
1453 arm_simd_types[i].itype);
1454 TYPE_NAME (arm_simd_types[i].itype) = tdecl;
1455 SET_TYPE_STRUCTURAL_EQUALITY (arm_simd_types[i].itype);
1458 #define AARCH_BUILD_SIGNED_TYPE(mode) \
1459 make_signed_type (GET_MODE_PRECISION (mode));
1460 arm_simd_intOI_type_node = AARCH_BUILD_SIGNED_TYPE (OImode);
1461 arm_simd_intEI_type_node = AARCH_BUILD_SIGNED_TYPE (EImode);
1462 arm_simd_intCI_type_node = AARCH_BUILD_SIGNED_TYPE (CImode);
1463 arm_simd_intXI_type_node = AARCH_BUILD_SIGNED_TYPE (XImode);
1464 #undef AARCH_BUILD_SIGNED_TYPE
1466 tdecl = add_builtin_type
1467 ("__builtin_neon_ei" , arm_simd_intEI_type_node);
1468 TYPE_NAME (arm_simd_intEI_type_node) = tdecl;
1469 tdecl = add_builtin_type
1470 ("__builtin_neon_oi" , arm_simd_intOI_type_node);
1471 TYPE_NAME (arm_simd_intOI_type_node) = tdecl;
1472 tdecl = add_builtin_type
1473 ("__builtin_neon_ci" , arm_simd_intCI_type_node);
1474 TYPE_NAME (arm_simd_intCI_type_node) = tdecl;
1475 tdecl = add_builtin_type
1476 ("__builtin_neon_xi" , arm_simd_intXI_type_node);
1477 TYPE_NAME (arm_simd_intXI_type_node) = tdecl;
1480 static void
1481 arm_init_simd_builtin_scalar_types (void)
1483 /* Define typedefs for all the standard scalar types. */
1484 (*lang_hooks.types.register_builtin_type) (intQI_type_node,
1485 "__builtin_neon_qi");
1486 (*lang_hooks.types.register_builtin_type) (intHI_type_node,
1487 "__builtin_neon_hi");
1488 (*lang_hooks.types.register_builtin_type) (intSI_type_node,
1489 "__builtin_neon_si");
1490 (*lang_hooks.types.register_builtin_type) (float_type_node,
1491 "__builtin_neon_sf");
1492 (*lang_hooks.types.register_builtin_type) (intDI_type_node,
1493 "__builtin_neon_di");
1494 (*lang_hooks.types.register_builtin_type) (double_type_node,
1495 "__builtin_neon_df");
1496 (*lang_hooks.types.register_builtin_type) (intTI_type_node,
1497 "__builtin_neon_ti");
1498 (*lang_hooks.types.register_builtin_type) (arm_bf16_type_node,
1499 "__builtin_neon_bf");
1500 /* Unsigned integer types for various mode sizes. */
1501 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
1502 "__builtin_neon_uqi");
1503 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
1504 "__builtin_neon_uhi");
1505 (*lang_hooks.types.register_builtin_type) (unsigned_intSI_type_node,
1506 "__builtin_neon_usi");
1507 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
1508 "__builtin_neon_udi");
1509 (*lang_hooks.types.register_builtin_type) (unsigned_intTI_type_node,
1510 "__builtin_neon_uti");
1513 /* Set up a builtin. It will use information stored in the argument struct D to
1514 derive the builtin's type signature and name. It will append the name in D
1515 to the PREFIX passed and use these to create a builtin declaration that is
1516 then stored in 'arm_builtin_decls' under index FCODE. This FCODE is also
1517 written back to D for future use. */
1519 static void
1520 arm_init_builtin (unsigned int fcode, arm_builtin_datum *d,
1521 const char * prefix)
1523 bool print_type_signature_p = false;
1524 char type_signature[SIMD_MAX_BUILTIN_ARGS] = { 0 };
1525 char namebuf[60];
1526 tree ftype = NULL;
1527 tree fndecl = NULL;
1529 d->fcode = fcode;
1531 /* We must track two variables here. op_num is
1532 the operand number as in the RTL pattern. This is
1533 required to access the mode (e.g. V4SF mode) of the
1534 argument, from which the base type can be derived.
1535 arg_num is an index in to the qualifiers data, which
1536 gives qualifiers to the type (e.g. const unsigned).
1537 The reason these two variables may differ by one is the
1538 void return type. While all return types take the 0th entry
1539 in the qualifiers array, there is no operand for them in the
1540 RTL pattern. */
1541 int op_num = insn_data[d->code].n_operands - 1;
1542 int arg_num = d->qualifiers[0] & qualifier_void
1543 ? op_num + 1
1544 : op_num;
1545 tree return_type = void_type_node, args = void_list_node;
1546 tree eltype;
1548 /* Build a function type directly from the insn_data for this
1549 builtin. The build_function_type () function takes care of
1550 removing duplicates for us. */
1551 for (; op_num >= 0; arg_num--, op_num--)
1553 machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
1554 enum arm_type_qualifiers qualifiers = d->qualifiers[arg_num];
1556 if (qualifiers & qualifier_unsigned)
1558 type_signature[arg_num] = 'u';
1559 print_type_signature_p = true;
1561 else if (qualifiers & qualifier_poly)
1563 type_signature[arg_num] = 'p';
1564 print_type_signature_p = true;
1566 else
1567 type_signature[arg_num] = 's';
1569 /* Skip an internal operand for vget_{low, high}. */
1570 if (qualifiers & qualifier_internal)
1571 continue;
1573 /* Some builtins have different user-facing types
1574 for certain arguments, encoded in d->mode. */
1575 if (qualifiers & qualifier_map_mode)
1576 op_mode = d->mode;
1578 /* MVE Predicates use HImode as mandated by the ABI: pred16_t is
1579 unsigned short. */
1580 if (qualifiers & qualifier_predicate)
1581 op_mode = HImode;
1583 /* For pointers, we want a pointer to the basic type
1584 of the vector. */
1585 if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
1586 op_mode = GET_MODE_INNER (op_mode);
1588 /* For void pointers we already have nodes constructed by the midend. */
1589 if (qualifiers & qualifier_void_pointer)
1590 eltype = qualifiers & qualifier_const
1591 ? const_ptr_type_node : ptr_type_node;
1592 else
1594 eltype
1595 = arm_simd_builtin_type (op_mode, qualifiers);
1596 gcc_assert (eltype != NULL);
1598 /* Add qualifiers. */
1599 if (qualifiers & qualifier_const)
1600 eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);
1602 if (qualifiers & qualifier_pointer)
1603 eltype = build_pointer_type (eltype);
1605 /* If we have reached arg_num == 0, we are at a non-void
1606 return type. Otherwise, we are still processing
1607 arguments. */
1608 if (arg_num == 0)
1609 return_type = eltype;
1610 else
1611 args = tree_cons (NULL_TREE, eltype, args);
1614 ftype = build_function_type (return_type, args);
1616 gcc_assert (ftype != NULL);
1618 if (print_type_signature_p
1619 && IN_RANGE (fcode, ARM_BUILTIN_VFP_BASE, ARM_BUILTIN_ACLE_BASE - 1))
1620 snprintf (namebuf, sizeof (namebuf), "%s_%s_%s",
1621 prefix, d->name, type_signature);
1622 else
1623 snprintf (namebuf, sizeof (namebuf), "%s_%s",
1624 prefix, d->name);
1626 fndecl = arm_general_add_builtin_function (namebuf, ftype, fcode);
1627 arm_builtin_decls[fcode] = fndecl;
1630 /* Initialize the backend REAL_TYPE type supporting bfloat types. */
1631 static void
1632 arm_init_bf16_types (void)
1634 arm_bf16_type_node = make_node (REAL_TYPE);
1635 TYPE_PRECISION (arm_bf16_type_node) = 16;
1636 SET_TYPE_MODE (arm_bf16_type_node, BFmode);
1637 layout_type (arm_bf16_type_node);
1639 lang_hooks.types.register_builtin_type (arm_bf16_type_node, "__bf16");
1640 arm_bf16_ptr_type_node = build_pointer_type (arm_bf16_type_node);
1643 /* Set up ACLE builtins, even builtins for instructions that are not
1644 in the current target ISA to allow the user to compile particular modules
1645 with different target specific options that differ from the command line
1646 options. Such builtins will be rejected in arm_general_expand_builtin. */
1648 static void
1649 arm_init_acle_builtins (void)
1651 unsigned int i, fcode = ARM_BUILTIN_ACLE_PATTERN_START;
1653 tree sat_check_fpr = build_function_type_list (void_type_node,
1654 intSI_type_node,
1655 intSI_type_node,
1656 intSI_type_node,
1657 NULL);
1658 arm_builtin_decls[ARM_BUILTIN_SAT_IMM_CHECK]
1659 = arm_general_add_builtin_function ("__builtin_sat_imm_check",
1660 sat_check_fpr,
1661 ARM_BUILTIN_SAT_IMM_CHECK);
1663 for (i = 0; i < ARRAY_SIZE (acle_builtin_data); i++, fcode++)
1665 arm_builtin_datum *d = &acle_builtin_data[i];
1666 arm_init_builtin (fcode, d, "__builtin_arm");
1670 static void
1671 arm_init_cde_builtins (void)
1673 unsigned int i, fcode = ARM_BUILTIN_CDE_PATTERN_START;
1674 for (i = 0; i < ARRAY_SIZE (cde_builtin_data); i++, fcode++)
1676 /* Only define CDE floating point builtins if the target has floating
1677 point registers. NOTE: without HARD_FLOAT we don't have MVE, so we
1678 can break out of this loop directly here. */
1679 if (!TARGET_MAYBE_HARD_FLOAT && fcode >= ARM_BUILTIN_vcx1si)
1680 break;
1681 /* Only define CDE/MVE builtins if MVE is available. */
1682 if (!TARGET_HAVE_MVE && fcode >= ARM_BUILTIN_vcx1qv16qi)
1683 break;
1684 arm_builtin_cde_datum *cde = &cde_builtin_data[i];
1685 arm_builtin_datum *d = &cde->base;
1686 arm_init_builtin (fcode, d, "__builtin_arm");
1687 set_call_expr_flags (arm_builtin_decls[fcode], cde->ecf_flag);
1691 /* Set up all the MVE builtins mentioned in arm_mve_builtins.def file. */
1692 static void
1693 arm_init_mve_builtins (void)
1695 volatile unsigned int i, fcode = ARM_BUILTIN_MVE_PATTERN_START;
1697 arm_init_simd_builtin_scalar_types ();
1698 arm_init_simd_builtin_types ();
1700 /* Add support for __builtin_{get,set}_fpscr_nzcvqc, used by MVE intrinsics
1701 that read and/or write the carry bit. */
1702 tree get_fpscr_nzcvqc = build_function_type_list (intSI_type_node,
1703 NULL);
1704 tree set_fpscr_nzcvqc = build_function_type_list (void_type_node,
1705 intSI_type_node,
1706 NULL);
1707 arm_builtin_decls[ARM_BUILTIN_GET_FPSCR_NZCVQC]
1708 = arm_general_add_builtin_function ("__builtin_arm_get_fpscr_nzcvqc",
1709 get_fpscr_nzcvqc,
1710 ARM_BUILTIN_GET_FPSCR_NZCVQC);
1711 arm_builtin_decls[ARM_BUILTIN_SET_FPSCR_NZCVQC]
1712 = arm_general_add_builtin_function ("__builtin_arm_set_fpscr_nzcvqc",
1713 set_fpscr_nzcvqc,
1714 ARM_BUILTIN_SET_FPSCR_NZCVQC);
1716 for (i = 0; i < ARRAY_SIZE (mve_builtin_data); i++, fcode++)
1718 arm_builtin_datum *d = &mve_builtin_data[i];
1719 arm_init_builtin (fcode, d, "__builtin_mve");
1722 if (in_lto_p)
1724 arm_mve::handle_arm_mve_types_h ();
1725 /* Under LTO, we cannot know whether
1726 __ARM_MVE_PRESERVE_USER_NAMESPACE was defined, so assume it
1727 was not. */
1728 arm_mve::handle_arm_mve_h (false);
1732 /* Set up all the NEON builtins, even builtins for instructions that are not
1733 in the current target ISA to allow the user to compile particular modules
1734 with different target specific options that differ from the command line
1735 options. Such builtins will be rejected in arm_general_expand_builtin. */
1737 static void
1738 arm_init_neon_builtins (void)
1740 unsigned int i, fcode = ARM_BUILTIN_NEON_PATTERN_START;
1742 arm_init_simd_builtin_types ();
1744 /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics.
1745 Therefore we need to preserve the old __builtin scalar types. It can be
1746 removed once all the intrinsics become strongly typed using the qualifier
1747 system. */
1748 arm_init_simd_builtin_scalar_types ();
1750 for (i = 0; i < ARRAY_SIZE (neon_builtin_data); i++, fcode++)
1752 arm_builtin_datum *d = &neon_builtin_data[i];
1753 arm_init_builtin (fcode, d, "__builtin_neon");
1757 /* Set up all the scalar floating point builtins. */
1759 static void
1760 arm_init_vfp_builtins (void)
1762 unsigned int i, fcode = ARM_BUILTIN_VFP_PATTERN_START;
1764 for (i = 0; i < ARRAY_SIZE (vfp_builtin_data); i++, fcode++)
1766 arm_builtin_datum *d = &vfp_builtin_data[i];
1767 arm_init_builtin (fcode, d, "__builtin_neon");
1771 static void
1772 arm_init_crypto_builtins (void)
1774 tree V16UQI_type_node
1775 = arm_simd_builtin_type (V16QImode, qualifier_unsigned);
1777 tree V4USI_type_node
1778 = arm_simd_builtin_type (V4SImode, qualifier_unsigned);
1780 tree v16uqi_ftype_v16uqi
1781 = build_function_type_list (V16UQI_type_node, V16UQI_type_node,
1782 NULL_TREE);
1784 tree v16uqi_ftype_v16uqi_v16uqi
1785 = build_function_type_list (V16UQI_type_node, V16UQI_type_node,
1786 V16UQI_type_node, NULL_TREE);
1788 tree v4usi_ftype_v4usi
1789 = build_function_type_list (V4USI_type_node, V4USI_type_node,
1790 NULL_TREE);
1792 tree v4usi_ftype_v4usi_v4usi
1793 = build_function_type_list (V4USI_type_node, V4USI_type_node,
1794 V4USI_type_node, NULL_TREE);
1796 tree v4usi_ftype_v4usi_v4usi_v4usi
1797 = build_function_type_list (V4USI_type_node, V4USI_type_node,
1798 V4USI_type_node, V4USI_type_node,
1799 NULL_TREE);
1801 tree uti_ftype_udi_udi
1802 = build_function_type_list (unsigned_intTI_type_node,
1803 unsigned_intDI_type_node,
1804 unsigned_intDI_type_node,
1805 NULL_TREE);
1807 #undef CRYPTO1
1808 #undef CRYPTO2
1809 #undef CRYPTO3
1810 #undef C
1811 #undef N
1812 #undef CF
1813 #undef FT1
1814 #undef FT2
1815 #undef FT3
1817 #define C(U) \
1818 ARM_BUILTIN_CRYPTO_##U
1819 #define N(L) \
1820 "__builtin_arm_crypto_"#L
1821 #define FT1(R, A) \
1822 R##_ftype_##A
1823 #define FT2(R, A1, A2) \
1824 R##_ftype_##A1##_##A2
1825 #define FT3(R, A1, A2, A3) \
1826 R##_ftype_##A1##_##A2##_##A3
1827 #define CRYPTO1(L, U, R, A) \
1828 arm_builtin_decls[C (U)] \
1829 = arm_general_add_builtin_function (N (L), FT1 (R, A), C (U));
1830 #define CRYPTO2(L, U, R, A1, A2) \
1831 arm_builtin_decls[C (U)] \
1832 = arm_general_add_builtin_function (N (L), FT2 (R, A1, A2), C (U));
1834 #define CRYPTO3(L, U, R, A1, A2, A3) \
1835 arm_builtin_decls[C (U)] \
1836 = arm_general_add_builtin_function (N (L), FT3 (R, A1, A2, A3), C (U));
1837 #include "crypto.def"
1839 #undef CRYPTO1
1840 #undef CRYPTO2
1841 #undef CRYPTO3
1842 #undef C
1843 #undef N
1844 #undef FT1
1845 #undef FT2
1846 #undef FT3
1849 #undef NUM_DREG_TYPES
1850 #undef NUM_QREG_TYPES
1852 #define def_mbuiltin(FLAG, NAME, TYPE, CODE) \
1853 do \
1855 if (FLAG == isa_nobit \
1856 || bitmap_bit_p (arm_active_target.isa, FLAG)) \
1858 tree bdecl; \
1859 bdecl = arm_general_add_builtin_function ((NAME), (TYPE), \
1860 (CODE)); \
1861 arm_builtin_decls[CODE] = bdecl; \
1864 while (0)
1866 struct builtin_description
1868 const enum isa_feature feature;
1869 const enum insn_code icode;
1870 const char * const name;
1871 const enum arm_builtins code;
1872 const enum rtx_code comparison;
1873 const unsigned int flag;
1876 static const struct builtin_description bdesc_2arg[] =
1878 #define IWMMXT_BUILTIN(code, string, builtin) \
1879 { isa_bit_iwmmxt, CODE_FOR_##code, \
1880 "__builtin_arm_" string, \
1881 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1883 #define IWMMXT2_BUILTIN(code, string, builtin) \
1884 { isa_bit_iwmmxt2, CODE_FOR_##code, \
1885 "__builtin_arm_" string, \
1886 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1888 IWMMXT_BUILTIN (addv8qi3, "waddb", WADDB)
1889 IWMMXT_BUILTIN (addv4hi3, "waddh", WADDH)
1890 IWMMXT_BUILTIN (addv2si3, "waddw", WADDW)
1891 IWMMXT_BUILTIN (subv8qi3, "wsubb", WSUBB)
1892 IWMMXT_BUILTIN (subv4hi3, "wsubh", WSUBH)
1893 IWMMXT_BUILTIN (subv2si3, "wsubw", WSUBW)
1894 IWMMXT_BUILTIN (ssaddv8qi3, "waddbss", WADDSSB)
1895 IWMMXT_BUILTIN (ssaddv4hi3, "waddhss", WADDSSH)
1896 IWMMXT_BUILTIN (ssaddv2si3, "waddwss", WADDSSW)
1897 IWMMXT_BUILTIN (sssubv8qi3, "wsubbss", WSUBSSB)
1898 IWMMXT_BUILTIN (sssubv4hi3, "wsubhss", WSUBSSH)
1899 IWMMXT_BUILTIN (sssubv2si3, "wsubwss", WSUBSSW)
1900 IWMMXT_BUILTIN (usaddv8qi3, "waddbus", WADDUSB)
1901 IWMMXT_BUILTIN (usaddv4hi3, "waddhus", WADDUSH)
1902 IWMMXT_BUILTIN (usaddv2si3, "waddwus", WADDUSW)
1903 IWMMXT_BUILTIN (ussubv8qi3, "wsubbus", WSUBUSB)
1904 IWMMXT_BUILTIN (ussubv4hi3, "wsubhus", WSUBUSH)
1905 IWMMXT_BUILTIN (ussubv2si3, "wsubwus", WSUBUSW)
1906 IWMMXT_BUILTIN (mulv4hi3, "wmulul", WMULUL)
1907 IWMMXT_BUILTIN (smulv4hi3_highpart, "wmulsm", WMULSM)
1908 IWMMXT_BUILTIN (umulv4hi3_highpart, "wmulum", WMULUM)
1909 IWMMXT_BUILTIN (eqv8qi3, "wcmpeqb", WCMPEQB)
1910 IWMMXT_BUILTIN (eqv4hi3, "wcmpeqh", WCMPEQH)
1911 IWMMXT_BUILTIN (eqv2si3, "wcmpeqw", WCMPEQW)
1912 IWMMXT_BUILTIN (gtuv8qi3, "wcmpgtub", WCMPGTUB)
1913 IWMMXT_BUILTIN (gtuv4hi3, "wcmpgtuh", WCMPGTUH)
1914 IWMMXT_BUILTIN (gtuv2si3, "wcmpgtuw", WCMPGTUW)
1915 IWMMXT_BUILTIN (gtv8qi3, "wcmpgtsb", WCMPGTSB)
1916 IWMMXT_BUILTIN (gtv4hi3, "wcmpgtsh", WCMPGTSH)
1917 IWMMXT_BUILTIN (gtv2si3, "wcmpgtsw", WCMPGTSW)
1918 IWMMXT_BUILTIN (umaxv8qi3, "wmaxub", WMAXUB)
1919 IWMMXT_BUILTIN (smaxv8qi3, "wmaxsb", WMAXSB)
1920 IWMMXT_BUILTIN (umaxv4hi3, "wmaxuh", WMAXUH)
1921 IWMMXT_BUILTIN (smaxv4hi3, "wmaxsh", WMAXSH)
1922 IWMMXT_BUILTIN (umaxv2si3, "wmaxuw", WMAXUW)
1923 IWMMXT_BUILTIN (smaxv2si3, "wmaxsw", WMAXSW)
1924 IWMMXT_BUILTIN (uminv8qi3, "wminub", WMINUB)
1925 IWMMXT_BUILTIN (sminv8qi3, "wminsb", WMINSB)
1926 IWMMXT_BUILTIN (uminv4hi3, "wminuh", WMINUH)
1927 IWMMXT_BUILTIN (sminv4hi3, "wminsh", WMINSH)
1928 IWMMXT_BUILTIN (uminv2si3, "wminuw", WMINUW)
1929 IWMMXT_BUILTIN (sminv2si3, "wminsw", WMINSW)
1930 IWMMXT_BUILTIN (iwmmxt_anddi3, "wand", WAND)
1931 IWMMXT_BUILTIN (iwmmxt_nanddi3, "wandn", WANDN)
1932 IWMMXT_BUILTIN (iwmmxt_iordi3, "wor", WOR)
1933 IWMMXT_BUILTIN (iwmmxt_xordi3, "wxor", WXOR)
1934 IWMMXT_BUILTIN (iwmmxt_uavgv8qi3, "wavg2b", WAVG2B)
1935 IWMMXT_BUILTIN (iwmmxt_uavgv4hi3, "wavg2h", WAVG2H)
1936 IWMMXT_BUILTIN (iwmmxt_uavgrndv8qi3, "wavg2br", WAVG2BR)
1937 IWMMXT_BUILTIN (iwmmxt_uavgrndv4hi3, "wavg2hr", WAVG2HR)
1938 IWMMXT_BUILTIN (iwmmxt_wunpckilb, "wunpckilb", WUNPCKILB)
1939 IWMMXT_BUILTIN (iwmmxt_wunpckilh, "wunpckilh", WUNPCKILH)
1940 IWMMXT_BUILTIN (iwmmxt_wunpckilw, "wunpckilw", WUNPCKILW)
1941 IWMMXT_BUILTIN (iwmmxt_wunpckihb, "wunpckihb", WUNPCKIHB)
1942 IWMMXT_BUILTIN (iwmmxt_wunpckihh, "wunpckihh", WUNPCKIHH)
1943 IWMMXT_BUILTIN (iwmmxt_wunpckihw, "wunpckihw", WUNPCKIHW)
1944 IWMMXT2_BUILTIN (iwmmxt_waddsubhx, "waddsubhx", WADDSUBHX)
1945 IWMMXT2_BUILTIN (iwmmxt_wsubaddhx, "wsubaddhx", WSUBADDHX)
1946 IWMMXT2_BUILTIN (iwmmxt_wabsdiffb, "wabsdiffb", WABSDIFFB)
1947 IWMMXT2_BUILTIN (iwmmxt_wabsdiffh, "wabsdiffh", WABSDIFFH)
1948 IWMMXT2_BUILTIN (iwmmxt_wabsdiffw, "wabsdiffw", WABSDIFFW)
1949 IWMMXT2_BUILTIN (iwmmxt_avg4, "wavg4", WAVG4)
1950 IWMMXT2_BUILTIN (iwmmxt_avg4r, "wavg4r", WAVG4R)
1951 IWMMXT2_BUILTIN (iwmmxt_wmulwsm, "wmulwsm", WMULWSM)
1952 IWMMXT2_BUILTIN (iwmmxt_wmulwum, "wmulwum", WMULWUM)
1953 IWMMXT2_BUILTIN (iwmmxt_wmulwsmr, "wmulwsmr", WMULWSMR)
1954 IWMMXT2_BUILTIN (iwmmxt_wmulwumr, "wmulwumr", WMULWUMR)
1955 IWMMXT2_BUILTIN (iwmmxt_wmulwl, "wmulwl", WMULWL)
1956 IWMMXT2_BUILTIN (iwmmxt_wmulsmr, "wmulsmr", WMULSMR)
1957 IWMMXT2_BUILTIN (iwmmxt_wmulumr, "wmulumr", WMULUMR)
1958 IWMMXT2_BUILTIN (iwmmxt_wqmulm, "wqmulm", WQMULM)
1959 IWMMXT2_BUILTIN (iwmmxt_wqmulmr, "wqmulmr", WQMULMR)
1960 IWMMXT2_BUILTIN (iwmmxt_wqmulwm, "wqmulwm", WQMULWM)
1961 IWMMXT2_BUILTIN (iwmmxt_wqmulwmr, "wqmulwmr", WQMULWMR)
1962 IWMMXT_BUILTIN (iwmmxt_walignr0, "walignr0", WALIGNR0)
1963 IWMMXT_BUILTIN (iwmmxt_walignr1, "walignr1", WALIGNR1)
1964 IWMMXT_BUILTIN (iwmmxt_walignr2, "walignr2", WALIGNR2)
1965 IWMMXT_BUILTIN (iwmmxt_walignr3, "walignr3", WALIGNR3)
1967 #define IWMMXT_BUILTIN2(code, builtin) \
1968 { isa_bit_iwmmxt, CODE_FOR_##code, NULL, \
1969 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1971 #define IWMMXT2_BUILTIN2(code, builtin) \
1972 { isa_bit_iwmmxt2, CODE_FOR_##code, NULL, \
1973 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1975 IWMMXT2_BUILTIN2 (iwmmxt_waddbhusm, WADDBHUSM)
1976 IWMMXT2_BUILTIN2 (iwmmxt_waddbhusl, WADDBHUSL)
1977 IWMMXT_BUILTIN2 (iwmmxt_wpackhss, WPACKHSS)
1978 IWMMXT_BUILTIN2 (iwmmxt_wpackwss, WPACKWSS)
1979 IWMMXT_BUILTIN2 (iwmmxt_wpackdss, WPACKDSS)
1980 IWMMXT_BUILTIN2 (iwmmxt_wpackhus, WPACKHUS)
1981 IWMMXT_BUILTIN2 (iwmmxt_wpackwus, WPACKWUS)
1982 IWMMXT_BUILTIN2 (iwmmxt_wpackdus, WPACKDUS)
1983 IWMMXT_BUILTIN2 (iwmmxt_wmacuz, WMACUZ)
1984 IWMMXT_BUILTIN2 (iwmmxt_wmacsz, WMACSZ)
1987 #define FP_BUILTIN(L, U) \
1988 {isa_nobit, CODE_FOR_##L, "__builtin_arm_"#L, ARM_BUILTIN_##U, \
1989 UNKNOWN, 0},
1991 FP_BUILTIN (get_fpscr, GET_FPSCR)
1992 FP_BUILTIN (set_fpscr, SET_FPSCR)
1993 #undef FP_BUILTIN
1995 #define CRYPTO_BUILTIN(L, U) \
1996 {isa_nobit, CODE_FOR_crypto_##L, "__builtin_arm_crypto_"#L, \
1997 ARM_BUILTIN_CRYPTO_##U, UNKNOWN, 0},
1998 #undef CRYPTO1
1999 #undef CRYPTO2
2000 #undef CRYPTO3
2001 #define CRYPTO2(L, U, R, A1, A2) CRYPTO_BUILTIN (L, U)
2002 #define CRYPTO1(L, U, R, A)
2003 #define CRYPTO3(L, U, R, A1, A2, A3)
2004 #include "crypto.def"
2005 #undef CRYPTO1
2006 #undef CRYPTO2
2007 #undef CRYPTO3
2011 static const struct builtin_description bdesc_1arg[] =
2013 IWMMXT_BUILTIN (iwmmxt_tmovmskb, "tmovmskb", TMOVMSKB)
2014 IWMMXT_BUILTIN (iwmmxt_tmovmskh, "tmovmskh", TMOVMSKH)
2015 IWMMXT_BUILTIN (iwmmxt_tmovmskw, "tmovmskw", TMOVMSKW)
2016 IWMMXT_BUILTIN (iwmmxt_waccb, "waccb", WACCB)
2017 IWMMXT_BUILTIN (iwmmxt_wacch, "wacch", WACCH)
2018 IWMMXT_BUILTIN (iwmmxt_waccw, "waccw", WACCW)
2019 IWMMXT_BUILTIN (iwmmxt_wunpckehub, "wunpckehub", WUNPCKEHUB)
2020 IWMMXT_BUILTIN (iwmmxt_wunpckehuh, "wunpckehuh", WUNPCKEHUH)
2021 IWMMXT_BUILTIN (iwmmxt_wunpckehuw, "wunpckehuw", WUNPCKEHUW)
2022 IWMMXT_BUILTIN (iwmmxt_wunpckehsb, "wunpckehsb", WUNPCKEHSB)
2023 IWMMXT_BUILTIN (iwmmxt_wunpckehsh, "wunpckehsh", WUNPCKEHSH)
2024 IWMMXT_BUILTIN (iwmmxt_wunpckehsw, "wunpckehsw", WUNPCKEHSW)
2025 IWMMXT_BUILTIN (iwmmxt_wunpckelub, "wunpckelub", WUNPCKELUB)
2026 IWMMXT_BUILTIN (iwmmxt_wunpckeluh, "wunpckeluh", WUNPCKELUH)
2027 IWMMXT_BUILTIN (iwmmxt_wunpckeluw, "wunpckeluw", WUNPCKELUW)
2028 IWMMXT_BUILTIN (iwmmxt_wunpckelsb, "wunpckelsb", WUNPCKELSB)
2029 IWMMXT_BUILTIN (iwmmxt_wunpckelsh, "wunpckelsh", WUNPCKELSH)
2030 IWMMXT_BUILTIN (iwmmxt_wunpckelsw, "wunpckelsw", WUNPCKELSW)
2031 IWMMXT2_BUILTIN (iwmmxt_wabsv8qi3, "wabsb", WABSB)
2032 IWMMXT2_BUILTIN (iwmmxt_wabsv4hi3, "wabsh", WABSH)
2033 IWMMXT2_BUILTIN (iwmmxt_wabsv2si3, "wabsw", WABSW)
2034 IWMMXT_BUILTIN (tbcstv8qi, "tbcstb", TBCSTB)
2035 IWMMXT_BUILTIN (tbcstv4hi, "tbcsth", TBCSTH)
2036 IWMMXT_BUILTIN (tbcstv2si, "tbcstw", TBCSTW)
2038 #define CRYPTO1(L, U, R, A) CRYPTO_BUILTIN (L, U)
2039 #define CRYPTO2(L, U, R, A1, A2)
2040 #define CRYPTO3(L, U, R, A1, A2, A3)
2041 #include "crypto.def"
2042 #undef CRYPTO1
2043 #undef CRYPTO2
2044 #undef CRYPTO3
2047 static const struct builtin_description bdesc_3arg[] =
2049 #define CRYPTO3(L, U, R, A1, A2, A3) CRYPTO_BUILTIN (L, U)
2050 #define CRYPTO1(L, U, R, A)
2051 #define CRYPTO2(L, U, R, A1, A2)
2052 #include "crypto.def"
2053 #undef CRYPTO1
2054 #undef CRYPTO2
2055 #undef CRYPTO3
2057 #undef CRYPTO_BUILTIN
2059 /* Set up all the iWMMXt builtins. This is not called if
2060 TARGET_IWMMXT is zero. */
2062 static void
2063 arm_init_iwmmxt_builtins (void)
2065 const struct builtin_description * d;
2066 size_t i;
2068 tree V2SI_type_node = build_vector_type_for_mode (intSI_type_node, V2SImode);
2069 tree V4HI_type_node = build_vector_type_for_mode (intHI_type_node, V4HImode);
2070 tree V8QI_type_node = build_vector_type_for_mode (intQI_type_node, V8QImode);
2072 tree v8qi_ftype_v8qi_v8qi_int
2073 = build_function_type_list (V8QI_type_node,
2074 V8QI_type_node, V8QI_type_node,
2075 integer_type_node, NULL_TREE);
2076 tree v4hi_ftype_v4hi_int
2077 = build_function_type_list (V4HI_type_node,
2078 V4HI_type_node, integer_type_node, NULL_TREE);
2079 tree v2si_ftype_v2si_int
2080 = build_function_type_list (V2SI_type_node,
2081 V2SI_type_node, integer_type_node, NULL_TREE);
2082 tree v2si_ftype_di_di
2083 = build_function_type_list (V2SI_type_node,
2084 long_long_integer_type_node,
2085 long_long_integer_type_node,
2086 NULL_TREE);
2087 tree di_ftype_di_int
2088 = build_function_type_list (long_long_integer_type_node,
2089 long_long_integer_type_node,
2090 integer_type_node, NULL_TREE);
2091 tree di_ftype_di_int_int
2092 = build_function_type_list (long_long_integer_type_node,
2093 long_long_integer_type_node,
2094 integer_type_node,
2095 integer_type_node, NULL_TREE);
2096 tree int_ftype_v8qi
2097 = build_function_type_list (integer_type_node,
2098 V8QI_type_node, NULL_TREE);
2099 tree int_ftype_v4hi
2100 = build_function_type_list (integer_type_node,
2101 V4HI_type_node, NULL_TREE);
2102 tree int_ftype_v2si
2103 = build_function_type_list (integer_type_node,
2104 V2SI_type_node, NULL_TREE);
2105 tree int_ftype_v8qi_int
2106 = build_function_type_list (integer_type_node,
2107 V8QI_type_node, integer_type_node, NULL_TREE);
2108 tree int_ftype_v4hi_int
2109 = build_function_type_list (integer_type_node,
2110 V4HI_type_node, integer_type_node, NULL_TREE);
2111 tree int_ftype_v2si_int
2112 = build_function_type_list (integer_type_node,
2113 V2SI_type_node, integer_type_node, NULL_TREE);
2114 tree v8qi_ftype_v8qi_int_int
2115 = build_function_type_list (V8QI_type_node,
2116 V8QI_type_node, integer_type_node,
2117 integer_type_node, NULL_TREE);
2118 tree v4hi_ftype_v4hi_int_int
2119 = build_function_type_list (V4HI_type_node,
2120 V4HI_type_node, integer_type_node,
2121 integer_type_node, NULL_TREE);
2122 tree v2si_ftype_v2si_int_int
2123 = build_function_type_list (V2SI_type_node,
2124 V2SI_type_node, integer_type_node,
2125 integer_type_node, NULL_TREE);
2126 /* Miscellaneous. */
2127 tree v8qi_ftype_v4hi_v4hi
2128 = build_function_type_list (V8QI_type_node,
2129 V4HI_type_node, V4HI_type_node, NULL_TREE);
2130 tree v4hi_ftype_v2si_v2si
2131 = build_function_type_list (V4HI_type_node,
2132 V2SI_type_node, V2SI_type_node, NULL_TREE);
2133 tree v8qi_ftype_v4hi_v8qi
2134 = build_function_type_list (V8QI_type_node,
2135 V4HI_type_node, V8QI_type_node, NULL_TREE);
2136 tree v2si_ftype_v4hi_v4hi
2137 = build_function_type_list (V2SI_type_node,
2138 V4HI_type_node, V4HI_type_node, NULL_TREE);
2139 tree v2si_ftype_v8qi_v8qi
2140 = build_function_type_list (V2SI_type_node,
2141 V8QI_type_node, V8QI_type_node, NULL_TREE);
2142 tree v4hi_ftype_v4hi_di
2143 = build_function_type_list (V4HI_type_node,
2144 V4HI_type_node, long_long_integer_type_node,
2145 NULL_TREE);
2146 tree v2si_ftype_v2si_di
2147 = build_function_type_list (V2SI_type_node,
2148 V2SI_type_node, long_long_integer_type_node,
2149 NULL_TREE);
2150 tree di_ftype_void
2151 = build_function_type_list (long_long_unsigned_type_node, NULL_TREE);
2152 tree int_ftype_void
2153 = build_function_type_list (integer_type_node, NULL_TREE);
2154 tree di_ftype_v8qi
2155 = build_function_type_list (long_long_integer_type_node,
2156 V8QI_type_node, NULL_TREE);
2157 tree di_ftype_v4hi
2158 = build_function_type_list (long_long_integer_type_node,
2159 V4HI_type_node, NULL_TREE);
2160 tree di_ftype_v2si
2161 = build_function_type_list (long_long_integer_type_node,
2162 V2SI_type_node, NULL_TREE);
2163 tree v2si_ftype_v4hi
2164 = build_function_type_list (V2SI_type_node,
2165 V4HI_type_node, NULL_TREE);
2166 tree v4hi_ftype_v8qi
2167 = build_function_type_list (V4HI_type_node,
2168 V8QI_type_node, NULL_TREE);
2169 tree v8qi_ftype_v8qi
2170 = build_function_type_list (V8QI_type_node,
2171 V8QI_type_node, NULL_TREE);
2172 tree v4hi_ftype_v4hi
2173 = build_function_type_list (V4HI_type_node,
2174 V4HI_type_node, NULL_TREE);
2175 tree v2si_ftype_v2si
2176 = build_function_type_list (V2SI_type_node,
2177 V2SI_type_node, NULL_TREE);
2179 tree di_ftype_di_v4hi_v4hi
2180 = build_function_type_list (long_long_unsigned_type_node,
2181 long_long_unsigned_type_node,
2182 V4HI_type_node, V4HI_type_node,
2183 NULL_TREE);
2185 tree di_ftype_v4hi_v4hi
2186 = build_function_type_list (long_long_unsigned_type_node,
2187 V4HI_type_node,V4HI_type_node,
2188 NULL_TREE);
2190 tree v2si_ftype_v2si_v4hi_v4hi
2191 = build_function_type_list (V2SI_type_node,
2192 V2SI_type_node, V4HI_type_node,
2193 V4HI_type_node, NULL_TREE);
2195 tree v2si_ftype_v2si_v8qi_v8qi
2196 = build_function_type_list (V2SI_type_node,
2197 V2SI_type_node, V8QI_type_node,
2198 V8QI_type_node, NULL_TREE);
2200 tree di_ftype_di_v2si_v2si
2201 = build_function_type_list (long_long_unsigned_type_node,
2202 long_long_unsigned_type_node,
2203 V2SI_type_node, V2SI_type_node,
2204 NULL_TREE);
2206 tree di_ftype_di_di_int
2207 = build_function_type_list (long_long_unsigned_type_node,
2208 long_long_unsigned_type_node,
2209 long_long_unsigned_type_node,
2210 integer_type_node, NULL_TREE);
2212 tree void_ftype_int
2213 = build_function_type_list (void_type_node,
2214 integer_type_node, NULL_TREE);
2216 tree v8qi_ftype_char
2217 = build_function_type_list (V8QI_type_node,
2218 signed_char_type_node, NULL_TREE);
2220 tree v4hi_ftype_short
2221 = build_function_type_list (V4HI_type_node,
2222 short_integer_type_node, NULL_TREE);
2224 tree v2si_ftype_int
2225 = build_function_type_list (V2SI_type_node,
2226 integer_type_node, NULL_TREE);
2228 /* Normal vector binops. */
2229 tree v8qi_ftype_v8qi_v8qi
2230 = build_function_type_list (V8QI_type_node,
2231 V8QI_type_node, V8QI_type_node, NULL_TREE);
2232 tree v4hi_ftype_v4hi_v4hi
2233 = build_function_type_list (V4HI_type_node,
2234 V4HI_type_node,V4HI_type_node, NULL_TREE);
2235 tree v2si_ftype_v2si_v2si
2236 = build_function_type_list (V2SI_type_node,
2237 V2SI_type_node, V2SI_type_node, NULL_TREE);
2238 tree di_ftype_di_di
2239 = build_function_type_list (long_long_unsigned_type_node,
2240 long_long_unsigned_type_node,
2241 long_long_unsigned_type_node,
2242 NULL_TREE);
2244 /* Add all builtins that are more or less simple operations on two
2245 operands. */
2246 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
2248 /* Use one of the operands; the target can have a different mode for
2249 mask-generating compares. */
2250 machine_mode mode;
2251 tree type;
2253 if (d->name == 0
2254 || !(d->feature == isa_bit_iwmmxt
2255 || d->feature == isa_bit_iwmmxt2))
2256 continue;
2258 mode = insn_data[d->icode].operand[1].mode;
2260 switch (mode)
2262 case E_V8QImode:
2263 type = v8qi_ftype_v8qi_v8qi;
2264 break;
2265 case E_V4HImode:
2266 type = v4hi_ftype_v4hi_v4hi;
2267 break;
2268 case E_V2SImode:
2269 type = v2si_ftype_v2si_v2si;
2270 break;
2271 case E_DImode:
2272 type = di_ftype_di_di;
2273 break;
2275 default:
2276 gcc_unreachable ();
2279 def_mbuiltin (d->feature, d->name, type, d->code);
2282 /* Add the remaining MMX insns with somewhat more complicated types. */
2283 #define iwmmx_mbuiltin(NAME, TYPE, CODE) \
2284 def_mbuiltin (isa_bit_iwmmxt, "__builtin_arm_" NAME, \
2285 (TYPE), ARM_BUILTIN_ ## CODE)
2287 #define iwmmx2_mbuiltin(NAME, TYPE, CODE) \
2288 def_mbuiltin (isa_bit_iwmmxt2, "__builtin_arm_" NAME, \
2289 (TYPE), ARM_BUILTIN_ ## CODE)
2291 iwmmx_mbuiltin ("wzero", di_ftype_void, WZERO);
2292 iwmmx_mbuiltin ("setwcgr0", void_ftype_int, SETWCGR0);
2293 iwmmx_mbuiltin ("setwcgr1", void_ftype_int, SETWCGR1);
2294 iwmmx_mbuiltin ("setwcgr2", void_ftype_int, SETWCGR2);
2295 iwmmx_mbuiltin ("setwcgr3", void_ftype_int, SETWCGR3);
2296 iwmmx_mbuiltin ("getwcgr0", int_ftype_void, GETWCGR0);
2297 iwmmx_mbuiltin ("getwcgr1", int_ftype_void, GETWCGR1);
2298 iwmmx_mbuiltin ("getwcgr2", int_ftype_void, GETWCGR2);
2299 iwmmx_mbuiltin ("getwcgr3", int_ftype_void, GETWCGR3);
2301 iwmmx_mbuiltin ("wsllh", v4hi_ftype_v4hi_di, WSLLH);
2302 iwmmx_mbuiltin ("wsllw", v2si_ftype_v2si_di, WSLLW);
2303 iwmmx_mbuiltin ("wslld", di_ftype_di_di, WSLLD);
2304 iwmmx_mbuiltin ("wsllhi", v4hi_ftype_v4hi_int, WSLLHI);
2305 iwmmx_mbuiltin ("wsllwi", v2si_ftype_v2si_int, WSLLWI);
2306 iwmmx_mbuiltin ("wslldi", di_ftype_di_int, WSLLDI);
2308 iwmmx_mbuiltin ("wsrlh", v4hi_ftype_v4hi_di, WSRLH);
2309 iwmmx_mbuiltin ("wsrlw", v2si_ftype_v2si_di, WSRLW);
2310 iwmmx_mbuiltin ("wsrld", di_ftype_di_di, WSRLD);
2311 iwmmx_mbuiltin ("wsrlhi", v4hi_ftype_v4hi_int, WSRLHI);
2312 iwmmx_mbuiltin ("wsrlwi", v2si_ftype_v2si_int, WSRLWI);
2313 iwmmx_mbuiltin ("wsrldi", di_ftype_di_int, WSRLDI);
2315 iwmmx_mbuiltin ("wsrah", v4hi_ftype_v4hi_di, WSRAH);
2316 iwmmx_mbuiltin ("wsraw", v2si_ftype_v2si_di, WSRAW);
2317 iwmmx_mbuiltin ("wsrad", di_ftype_di_di, WSRAD);
2318 iwmmx_mbuiltin ("wsrahi", v4hi_ftype_v4hi_int, WSRAHI);
2319 iwmmx_mbuiltin ("wsrawi", v2si_ftype_v2si_int, WSRAWI);
2320 iwmmx_mbuiltin ("wsradi", di_ftype_di_int, WSRADI);
2322 iwmmx_mbuiltin ("wrorh", v4hi_ftype_v4hi_di, WRORH);
2323 iwmmx_mbuiltin ("wrorw", v2si_ftype_v2si_di, WRORW);
2324 iwmmx_mbuiltin ("wrord", di_ftype_di_di, WRORD);
2325 iwmmx_mbuiltin ("wrorhi", v4hi_ftype_v4hi_int, WRORHI);
2326 iwmmx_mbuiltin ("wrorwi", v2si_ftype_v2si_int, WRORWI);
2327 iwmmx_mbuiltin ("wrordi", di_ftype_di_int, WRORDI);
2329 iwmmx_mbuiltin ("wshufh", v4hi_ftype_v4hi_int, WSHUFH);
2331 iwmmx_mbuiltin ("wsadb", v2si_ftype_v2si_v8qi_v8qi, WSADB);
2332 iwmmx_mbuiltin ("wsadh", v2si_ftype_v2si_v4hi_v4hi, WSADH);
2333 iwmmx_mbuiltin ("wmadds", v2si_ftype_v4hi_v4hi, WMADDS);
2334 iwmmx2_mbuiltin ("wmaddsx", v2si_ftype_v4hi_v4hi, WMADDSX);
2335 iwmmx2_mbuiltin ("wmaddsn", v2si_ftype_v4hi_v4hi, WMADDSN);
2336 iwmmx_mbuiltin ("wmaddu", v2si_ftype_v4hi_v4hi, WMADDU);
2337 iwmmx2_mbuiltin ("wmaddux", v2si_ftype_v4hi_v4hi, WMADDUX);
2338 iwmmx2_mbuiltin ("wmaddun", v2si_ftype_v4hi_v4hi, WMADDUN);
2339 iwmmx_mbuiltin ("wsadbz", v2si_ftype_v8qi_v8qi, WSADBZ);
2340 iwmmx_mbuiltin ("wsadhz", v2si_ftype_v4hi_v4hi, WSADHZ);
2342 iwmmx_mbuiltin ("textrmsb", int_ftype_v8qi_int, TEXTRMSB);
2343 iwmmx_mbuiltin ("textrmsh", int_ftype_v4hi_int, TEXTRMSH);
2344 iwmmx_mbuiltin ("textrmsw", int_ftype_v2si_int, TEXTRMSW);
2345 iwmmx_mbuiltin ("textrmub", int_ftype_v8qi_int, TEXTRMUB);
2346 iwmmx_mbuiltin ("textrmuh", int_ftype_v4hi_int, TEXTRMUH);
2347 iwmmx_mbuiltin ("textrmuw", int_ftype_v2si_int, TEXTRMUW);
2348 iwmmx_mbuiltin ("tinsrb", v8qi_ftype_v8qi_int_int, TINSRB);
2349 iwmmx_mbuiltin ("tinsrh", v4hi_ftype_v4hi_int_int, TINSRH);
2350 iwmmx_mbuiltin ("tinsrw", v2si_ftype_v2si_int_int, TINSRW);
2352 iwmmx_mbuiltin ("waccb", di_ftype_v8qi, WACCB);
2353 iwmmx_mbuiltin ("wacch", di_ftype_v4hi, WACCH);
2354 iwmmx_mbuiltin ("waccw", di_ftype_v2si, WACCW);
2356 iwmmx_mbuiltin ("tmovmskb", int_ftype_v8qi, TMOVMSKB);
2357 iwmmx_mbuiltin ("tmovmskh", int_ftype_v4hi, TMOVMSKH);
2358 iwmmx_mbuiltin ("tmovmskw", int_ftype_v2si, TMOVMSKW);
2360 iwmmx2_mbuiltin ("waddbhusm", v8qi_ftype_v4hi_v8qi, WADDBHUSM);
2361 iwmmx2_mbuiltin ("waddbhusl", v8qi_ftype_v4hi_v8qi, WADDBHUSL);
2363 iwmmx_mbuiltin ("wpackhss", v8qi_ftype_v4hi_v4hi, WPACKHSS);
2364 iwmmx_mbuiltin ("wpackhus", v8qi_ftype_v4hi_v4hi, WPACKHUS);
2365 iwmmx_mbuiltin ("wpackwus", v4hi_ftype_v2si_v2si, WPACKWUS);
2366 iwmmx_mbuiltin ("wpackwss", v4hi_ftype_v2si_v2si, WPACKWSS);
2367 iwmmx_mbuiltin ("wpackdus", v2si_ftype_di_di, WPACKDUS);
2368 iwmmx_mbuiltin ("wpackdss", v2si_ftype_di_di, WPACKDSS);
2370 iwmmx_mbuiltin ("wunpckehub", v4hi_ftype_v8qi, WUNPCKEHUB);
2371 iwmmx_mbuiltin ("wunpckehuh", v2si_ftype_v4hi, WUNPCKEHUH);
2372 iwmmx_mbuiltin ("wunpckehuw", di_ftype_v2si, WUNPCKEHUW);
2373 iwmmx_mbuiltin ("wunpckehsb", v4hi_ftype_v8qi, WUNPCKEHSB);
2374 iwmmx_mbuiltin ("wunpckehsh", v2si_ftype_v4hi, WUNPCKEHSH);
2375 iwmmx_mbuiltin ("wunpckehsw", di_ftype_v2si, WUNPCKEHSW);
2376 iwmmx_mbuiltin ("wunpckelub", v4hi_ftype_v8qi, WUNPCKELUB);
2377 iwmmx_mbuiltin ("wunpckeluh", v2si_ftype_v4hi, WUNPCKELUH);
2378 iwmmx_mbuiltin ("wunpckeluw", di_ftype_v2si, WUNPCKELUW);
2379 iwmmx_mbuiltin ("wunpckelsb", v4hi_ftype_v8qi, WUNPCKELSB);
2380 iwmmx_mbuiltin ("wunpckelsh", v2si_ftype_v4hi, WUNPCKELSH);
2381 iwmmx_mbuiltin ("wunpckelsw", di_ftype_v2si, WUNPCKELSW);
2383 iwmmx_mbuiltin ("wmacs", di_ftype_di_v4hi_v4hi, WMACS);
2384 iwmmx_mbuiltin ("wmacsz", di_ftype_v4hi_v4hi, WMACSZ);
2385 iwmmx_mbuiltin ("wmacu", di_ftype_di_v4hi_v4hi, WMACU);
2386 iwmmx_mbuiltin ("wmacuz", di_ftype_v4hi_v4hi, WMACUZ);
2388 iwmmx_mbuiltin ("walign", v8qi_ftype_v8qi_v8qi_int, WALIGNI);
2389 iwmmx_mbuiltin ("tmia", di_ftype_di_int_int, TMIA);
2390 iwmmx_mbuiltin ("tmiaph", di_ftype_di_int_int, TMIAPH);
2391 iwmmx_mbuiltin ("tmiabb", di_ftype_di_int_int, TMIABB);
2392 iwmmx_mbuiltin ("tmiabt", di_ftype_di_int_int, TMIABT);
2393 iwmmx_mbuiltin ("tmiatb", di_ftype_di_int_int, TMIATB);
2394 iwmmx_mbuiltin ("tmiatt", di_ftype_di_int_int, TMIATT);
2396 iwmmx2_mbuiltin ("wabsb", v8qi_ftype_v8qi, WABSB);
2397 iwmmx2_mbuiltin ("wabsh", v4hi_ftype_v4hi, WABSH);
2398 iwmmx2_mbuiltin ("wabsw", v2si_ftype_v2si, WABSW);
2400 iwmmx2_mbuiltin ("wqmiabb", v2si_ftype_v2si_v4hi_v4hi, WQMIABB);
2401 iwmmx2_mbuiltin ("wqmiabt", v2si_ftype_v2si_v4hi_v4hi, WQMIABT);
2402 iwmmx2_mbuiltin ("wqmiatb", v2si_ftype_v2si_v4hi_v4hi, WQMIATB);
2403 iwmmx2_mbuiltin ("wqmiatt", v2si_ftype_v2si_v4hi_v4hi, WQMIATT);
2405 iwmmx2_mbuiltin ("wqmiabbn", v2si_ftype_v2si_v4hi_v4hi, WQMIABBN);
2406 iwmmx2_mbuiltin ("wqmiabtn", v2si_ftype_v2si_v4hi_v4hi, WQMIABTN);
2407 iwmmx2_mbuiltin ("wqmiatbn", v2si_ftype_v2si_v4hi_v4hi, WQMIATBN);
2408 iwmmx2_mbuiltin ("wqmiattn", v2si_ftype_v2si_v4hi_v4hi, WQMIATTN);
2410 iwmmx2_mbuiltin ("wmiabb", di_ftype_di_v4hi_v4hi, WMIABB);
2411 iwmmx2_mbuiltin ("wmiabt", di_ftype_di_v4hi_v4hi, WMIABT);
2412 iwmmx2_mbuiltin ("wmiatb", di_ftype_di_v4hi_v4hi, WMIATB);
2413 iwmmx2_mbuiltin ("wmiatt", di_ftype_di_v4hi_v4hi, WMIATT);
2415 iwmmx2_mbuiltin ("wmiabbn", di_ftype_di_v4hi_v4hi, WMIABBN);
2416 iwmmx2_mbuiltin ("wmiabtn", di_ftype_di_v4hi_v4hi, WMIABTN);
2417 iwmmx2_mbuiltin ("wmiatbn", di_ftype_di_v4hi_v4hi, WMIATBN);
2418 iwmmx2_mbuiltin ("wmiattn", di_ftype_di_v4hi_v4hi, WMIATTN);
2420 iwmmx2_mbuiltin ("wmiawbb", di_ftype_di_v2si_v2si, WMIAWBB);
2421 iwmmx2_mbuiltin ("wmiawbt", di_ftype_di_v2si_v2si, WMIAWBT);
2422 iwmmx2_mbuiltin ("wmiawtb", di_ftype_di_v2si_v2si, WMIAWTB);
2423 iwmmx2_mbuiltin ("wmiawtt", di_ftype_di_v2si_v2si, WMIAWTT);
2425 iwmmx2_mbuiltin ("wmiawbbn", di_ftype_di_v2si_v2si, WMIAWBBN);
2426 iwmmx2_mbuiltin ("wmiawbtn", di_ftype_di_v2si_v2si, WMIAWBTN);
2427 iwmmx2_mbuiltin ("wmiawtbn", di_ftype_di_v2si_v2si, WMIAWTBN);
2428 iwmmx2_mbuiltin ("wmiawttn", di_ftype_di_v2si_v2si, WMIAWTTN);
2430 iwmmx2_mbuiltin ("wmerge", di_ftype_di_di_int, WMERGE);
2432 iwmmx_mbuiltin ("tbcstb", v8qi_ftype_char, TBCSTB);
2433 iwmmx_mbuiltin ("tbcsth", v4hi_ftype_short, TBCSTH);
2434 iwmmx_mbuiltin ("tbcstw", v2si_ftype_int, TBCSTW);
2436 #undef iwmmx_mbuiltin
2437 #undef iwmmx2_mbuiltin
2440 static void
2441 arm_init_fp16_builtins (void)
2443 arm_fp16_type_node = make_node (REAL_TYPE);
2444 TYPE_PRECISION (arm_fp16_type_node) = GET_MODE_PRECISION (HFmode);
2445 layout_type (arm_fp16_type_node);
2446 if (arm_fp16_format)
2447 (*lang_hooks.types.register_builtin_type) (arm_fp16_type_node,
2448 "__fp16");
2451 void
2452 arm_init_builtins (void)
2454 if (TARGET_REALLY_IWMMXT)
2455 arm_init_iwmmxt_builtins ();
2457 /* This creates the arm_simd_floatHF_type_node so must come before
2458 arm_init_neon_builtins which uses it. */
2459 arm_init_fp16_builtins ();
2461 arm_init_bf16_types ();
2463 if (TARGET_MAYBE_HARD_FLOAT)
2465 tree lane_check_fpr = build_function_type_list (void_type_node,
2466 intSI_type_node,
2467 intSI_type_node,
2468 NULL);
2469 arm_builtin_decls[ARM_BUILTIN_SIMD_LANE_CHECK]
2470 = arm_general_add_builtin_function ("__builtin_arm_lane_check",
2471 lane_check_fpr,
2472 ARM_BUILTIN_SIMD_LANE_CHECK);
2473 if (TARGET_HAVE_MVE)
2474 arm_init_mve_builtins ();
2475 else
2476 arm_init_neon_builtins ();
2477 arm_init_vfp_builtins ();
2478 arm_init_crypto_builtins ();
2481 if (TARGET_CDE)
2482 arm_init_cde_builtins ();
2484 arm_init_acle_builtins ();
2486 if (TARGET_MAYBE_HARD_FLOAT)
2488 tree ftype_set_fpscr
2489 = build_function_type_list (void_type_node, unsigned_type_node, NULL);
2490 tree ftype_get_fpscr
2491 = build_function_type_list (unsigned_type_node, NULL);
2493 arm_builtin_decls[ARM_BUILTIN_GET_FPSCR]
2494 = arm_general_add_builtin_function ("__builtin_arm_get_fpscr",
2495 ftype_get_fpscr,
2496 ARM_BUILTIN_GET_FPSCR);
2497 arm_builtin_decls[ARM_BUILTIN_SET_FPSCR]
2498 = arm_general_add_builtin_function ("__builtin_arm_set_fpscr",
2499 ftype_set_fpscr,
2500 ARM_BUILTIN_SET_FPSCR);
2503 if (use_cmse)
2505 tree ftype_cmse_nonsecure_caller
2506 = build_function_type_list (unsigned_type_node, NULL);
2507 arm_builtin_decls[ARM_BUILTIN_CMSE_NONSECURE_CALLER]
2508 = arm_general_add_builtin_function ("__builtin_arm_cmse_nonsecure_caller",
2509 ftype_cmse_nonsecure_caller,
2510 ARM_BUILTIN_CMSE_NONSECURE_CALLER);
2514 /* Implement TARGET_BUILTIN_DECL for general builtins. */
2515 tree
2516 arm_general_builtin_decl (unsigned code)
2518 if (code >= ARM_BUILTIN_MAX)
2519 return error_mark_node;
2521 return arm_builtin_decls[code];
2524 /* Implement TARGET_BUILTIN_DECL. */
2525 /* Return the ARM builtin for CODE. */
2526 tree
2527 arm_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2529 unsigned subcode = code >> ARM_BUILTIN_SHIFT;
2530 switch (code & ARM_BUILTIN_CLASS)
2532 case ARM_BUILTIN_GENERAL:
2533 return arm_general_builtin_decl (subcode);
2534 case ARM_BUILTIN_MVE:
2535 return arm_mve::builtin_decl (subcode);
2536 default:
2537 gcc_unreachable ();
2541 /* Errors in the source file can cause expand_expr to return const0_rtx
2542 where we expect a vector. To avoid crashing, use one of the vector
2543 clear instructions. */
2545 static rtx
2546 safe_vector_operand (rtx x, machine_mode mode)
2548 if (x != const0_rtx)
2549 return x;
2550 x = gen_reg_rtx (mode);
2552 emit_insn (gen_iwmmxt_clrdi (mode == DImode ? x
2553 : gen_rtx_SUBREG (DImode, x, 0)));
2554 return x;
2557 /* Function to expand ternary builtins. */
2558 static rtx
2559 arm_expand_ternop_builtin (enum insn_code icode,
2560 tree exp, rtx target)
2562 rtx pat;
2563 tree arg0 = CALL_EXPR_ARG (exp, 0);
2564 tree arg1 = CALL_EXPR_ARG (exp, 1);
2565 tree arg2 = CALL_EXPR_ARG (exp, 2);
2567 rtx op0 = expand_normal (arg0);
2568 rtx op1 = expand_normal (arg1);
2569 rtx op2 = expand_normal (arg2);
2571 machine_mode tmode = insn_data[icode].operand[0].mode;
2572 machine_mode mode0 = insn_data[icode].operand[1].mode;
2573 machine_mode mode1 = insn_data[icode].operand[2].mode;
2574 machine_mode mode2 = insn_data[icode].operand[3].mode;
2576 if (VECTOR_MODE_P (mode0))
2577 op0 = safe_vector_operand (op0, mode0);
2578 if (VECTOR_MODE_P (mode1))
2579 op1 = safe_vector_operand (op1, mode1);
2580 if (VECTOR_MODE_P (mode2))
2581 op2 = safe_vector_operand (op2, mode2);
2583 if (! target
2584 || GET_MODE (target) != tmode
2585 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2586 target = gen_reg_rtx (tmode);
2588 gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
2589 && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode)
2590 && (GET_MODE (op2) == mode2 || GET_MODE (op2) == VOIDmode));
2592 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2593 op0 = copy_to_mode_reg (mode0, op0);
2594 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
2595 op1 = copy_to_mode_reg (mode1, op1);
2596 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
2597 op2 = copy_to_mode_reg (mode2, op2);
2599 pat = GEN_FCN (icode) (target, op0, op1, op2);
2600 if (! pat)
2601 return 0;
2602 emit_insn (pat);
2603 return target;
2606 /* Subroutine of arm_general_expand_builtin to take care of binop insns. */
2608 static rtx
2609 arm_expand_binop_builtin (enum insn_code icode,
2610 tree exp, rtx target)
2612 rtx pat;
2613 tree arg0 = CALL_EXPR_ARG (exp, 0);
2614 tree arg1 = CALL_EXPR_ARG (exp, 1);
2615 rtx op0 = expand_normal (arg0);
2616 rtx op1 = expand_normal (arg1);
2617 machine_mode tmode = insn_data[icode].operand[0].mode;
2618 machine_mode mode0 = insn_data[icode].operand[1].mode;
2619 machine_mode mode1 = insn_data[icode].operand[2].mode;
2621 if (VECTOR_MODE_P (mode0))
2622 op0 = safe_vector_operand (op0, mode0);
2623 if (VECTOR_MODE_P (mode1))
2624 op1 = safe_vector_operand (op1, mode1);
2626 if (! target
2627 || GET_MODE (target) != tmode
2628 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2629 target = gen_reg_rtx (tmode);
2631 gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
2632 && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode));
2634 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2635 op0 = copy_to_mode_reg (mode0, op0);
2636 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
2637 op1 = copy_to_mode_reg (mode1, op1);
2639 pat = GEN_FCN (icode) (target, op0, op1);
2640 if (! pat)
2641 return 0;
2642 emit_insn (pat);
2643 return target;
2646 /* Subroutine of arm_general_expand_builtin to take care of unop insns. */
2648 static rtx
2649 arm_expand_unop_builtin (enum insn_code icode,
2650 tree exp, rtx target, int do_load)
2652 rtx pat;
2653 tree arg0 = CALL_EXPR_ARG (exp, 0);
2654 rtx op0 = expand_normal (arg0);
2655 machine_mode tmode = insn_data[icode].operand[0].mode;
2656 machine_mode mode0 = insn_data[icode].operand[1].mode;
2658 if (! target
2659 || GET_MODE (target) != tmode
2660 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2661 target = gen_reg_rtx (tmode);
2662 if (do_load)
2663 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
2664 else
2666 if (VECTOR_MODE_P (mode0))
2667 op0 = safe_vector_operand (op0, mode0);
2669 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2670 op0 = copy_to_mode_reg (mode0, op0);
2673 pat = GEN_FCN (icode) (target, op0);
2675 if (! pat)
2676 return 0;
2677 emit_insn (pat);
2678 return target;
2681 typedef enum {
2682 ARG_BUILTIN_COPY_TO_REG,
2683 ARG_BUILTIN_CONSTANT,
2684 ARG_BUILTIN_LANE_INDEX,
2685 ARG_BUILTIN_STRUCT_LOAD_STORE_LANE_INDEX,
2686 ARG_BUILTIN_LANE_PAIR_INDEX,
2687 ARG_BUILTIN_LANE_QUADTUP_INDEX,
2688 ARG_BUILTIN_NEON_MEMORY,
2689 ARG_BUILTIN_MEMORY,
2690 ARG_BUILTIN_STOP
2691 } builtin_arg;
2694 /* EXP is a pointer argument to a Neon load or store intrinsic. Derive
2695 and return an expression for the accessed memory.
2697 The intrinsic function operates on a block of registers that has
2698 mode REG_MODE. This block contains vectors of type TYPE_MODE. The
2699 function references the memory at EXP of type TYPE and in mode
2700 MEM_MODE; this mode may be BLKmode if no more suitable mode is
2701 available. */
2703 static tree
2704 neon_dereference_pointer (tree exp, tree type, machine_mode mem_mode,
2705 machine_mode reg_mode,
2706 machine_mode vector_mode)
2708 HOST_WIDE_INT reg_size, vector_size, nvectors, nelems;
2709 tree elem_type, upper_bound, array_type;
2711 /* Work out the size of the register block in bytes. */
2712 reg_size = GET_MODE_SIZE (reg_mode);
2714 /* Work out the size of each vector in bytes. */
2715 vector_size = GET_MODE_SIZE (vector_mode);
2717 /* Work out how many vectors there are. */
2718 gcc_assert (reg_size % vector_size == 0);
2719 nvectors = reg_size / vector_size;
2721 /* Work out the type of each element. */
2722 gcc_assert (POINTER_TYPE_P (type));
2723 elem_type = TREE_TYPE (type);
2725 /* Work out how many elements are being loaded or stored.
2726 MEM_MODE == REG_MODE implies a one-to-one mapping between register
2727 and memory elements; anything else implies a lane load or store. */
2728 if (mem_mode == reg_mode)
2729 nelems = vector_size * nvectors / int_size_in_bytes (elem_type);
2730 else
2731 nelems = nvectors;
2733 /* Create a type that describes the full access. */
2734 upper_bound = build_int_cst (size_type_node, nelems - 1);
2735 array_type = build_array_type (elem_type, build_index_type (upper_bound));
2737 /* Dereference EXP using that type. */
2738 return fold_build2 (MEM_REF, array_type, exp,
2739 build_int_cst (build_pointer_type (array_type), 0));
2742 /* EXP is a pointer argument to a vector scatter store intrinsics.
2744 Consider the following example:
2745 VSTRW<v>.<dt> Qd, [Qm{, #+/-<imm>}]!
2746 When <Qm> used as the base register for the target address,
2747 this function is used to derive and return an expression for the
2748 accessed memory.
2750 The intrinsic function operates on a block of registers that has mode
2751 REG_MODE. This block contains vectors of type TYPE_MODE. The function
2752 references the memory at EXP of type TYPE and in mode MEM_MODE. This
2753 mode may be BLKmode if no more suitable mode is available. */
2755 static tree
2756 mve_dereference_pointer (tree exp, tree type, machine_mode reg_mode,
2757 machine_mode vector_mode)
2759 HOST_WIDE_INT reg_size, vector_size, nelems;
2760 tree elem_type, upper_bound, array_type;
2762 /* Work out the size of each vector in bytes. */
2763 vector_size = GET_MODE_SIZE (vector_mode);
2765 /* Work out the size of the register block in bytes. */
2766 reg_size = GET_MODE_SIZE (reg_mode);
2768 /* Work out the type of each element. */
2769 gcc_assert (POINTER_TYPE_P (type));
2770 elem_type = TREE_TYPE (type);
2772 nelems = reg_size / vector_size;
2774 /* Create a type that describes the full access. */
2775 upper_bound = build_int_cst (size_type_node, nelems - 1);
2776 array_type = build_array_type (elem_type, build_index_type (upper_bound));
2778 /* Dereference EXP using that type. */
2779 return fold_build2 (MEM_REF, array_type, exp,
2780 build_int_cst (build_pointer_type (array_type), 0));
2783 /* Implement TARGET_EXPAND_BUILTIN for general builtins. */
2784 static rtx
2785 arm_general_expand_builtin_args (rtx target, machine_mode map_mode, int fcode,
2786 int icode, int have_retval, tree exp,
2787 builtin_arg *args)
2789 rtx pat;
2790 tree arg[SIMD_MAX_BUILTIN_ARGS];
2791 rtx op[SIMD_MAX_BUILTIN_ARGS];
2792 machine_mode tmode = insn_data[icode].operand[0].mode;
2793 machine_mode mode[SIMD_MAX_BUILTIN_ARGS];
2794 tree formals;
2795 int argc = 0;
2796 rtx_insn * insn;
2798 if (have_retval
2799 && (!target
2800 || GET_MODE (target) != tmode
2801 || !(*insn_data[icode].operand[0].predicate) (target, tmode)))
2802 target = gen_reg_rtx (tmode);
2804 formals = TYPE_ARG_TYPES (TREE_TYPE (arm_builtin_decls[fcode]));
2806 for (;;)
2808 builtin_arg thisarg = args[argc];
2810 if (thisarg == ARG_BUILTIN_STOP)
2811 break;
2812 else
2814 int opno = argc + have_retval;
2815 arg[argc] = CALL_EXPR_ARG (exp, argc);
2816 mode[argc] = insn_data[icode].operand[opno].mode;
2817 if (thisarg == ARG_BUILTIN_NEON_MEMORY)
2819 machine_mode other_mode
2820 = insn_data[icode].operand[1 - opno].mode;
2821 if (TARGET_HAVE_MVE && mode[argc] != other_mode)
2823 arg[argc] = mve_dereference_pointer (arg[argc],
2824 TREE_VALUE (formals),
2825 other_mode, map_mode);
2827 else
2828 arg[argc] = neon_dereference_pointer (arg[argc],
2829 TREE_VALUE (formals),
2830 mode[argc], other_mode,
2831 map_mode);
2834 /* Use EXPAND_MEMORY for ARG_BUILTIN_MEMORY and
2835 ARG_BUILTIN_NEON_MEMORY to ensure a MEM_P be returned. */
2836 op[argc] = expand_expr (arg[argc], NULL_RTX, VOIDmode,
2837 ((thisarg == ARG_BUILTIN_MEMORY
2838 || thisarg == ARG_BUILTIN_NEON_MEMORY)
2839 ? EXPAND_MEMORY : EXPAND_NORMAL));
2841 switch (thisarg)
2843 case ARG_BUILTIN_MEMORY:
2844 case ARG_BUILTIN_COPY_TO_REG:
2845 if (POINTER_TYPE_P (TREE_TYPE (arg[argc])))
2846 op[argc] = convert_memory_address (Pmode, op[argc]);
2848 /* MVE uses mve_pred16_t (aka HImode) for vectors of
2849 predicates, but internally we use V16BI/V8BI/V4BI/V2QI for
2850 MVE predicate modes. */
2851 if (TARGET_HAVE_MVE && VALID_MVE_PRED_MODE (mode[argc]))
2852 op[argc] = gen_lowpart (mode[argc], op[argc]);
2854 gcc_assert (GET_MODE (op[argc]) == mode[argc]
2855 || (GET_MODE(op[argc]) == E_VOIDmode
2856 && CONSTANT_P (op[argc])));
2857 if (!(*insn_data[icode].operand[opno].predicate)
2858 (op[argc], mode[argc]))
2859 op[argc] = copy_to_mode_reg (mode[argc], op[argc]);
2860 break;
2862 case ARG_BUILTIN_STRUCT_LOAD_STORE_LANE_INDEX:
2863 gcc_assert (argc > 1);
2864 if (CONST_INT_P (op[argc]))
2866 neon_lane_bounds (op[argc], 0,
2867 GET_MODE_NUNITS (map_mode), exp);
2868 /* Keep to GCC-vector-extension lane indices in the RTL. */
2869 op[argc] =
2870 GEN_INT (NEON_ENDIAN_LANE_N (map_mode, INTVAL (op[argc])));
2872 goto constant_arg;
2874 case ARG_BUILTIN_LANE_INDEX:
2875 /* Previous argument must be a vector, which this indexes. */
2876 gcc_assert (argc > 0);
2877 if (CONST_INT_P (op[argc]))
2879 machine_mode vmode = mode[argc - 1];
2880 neon_lane_bounds (op[argc], 0, GET_MODE_NUNITS (vmode), exp);
2882 /* If the lane index isn't a constant then error out. */
2883 goto constant_arg;
2885 case ARG_BUILTIN_LANE_PAIR_INDEX:
2886 /* Previous argument must be a vector, which this indexes. The
2887 indexing will always select i and i+1 out of the vector, which
2888 puts a limit on i. */
2889 gcc_assert (argc > 0);
2890 if (CONST_INT_P (op[argc]))
2892 machine_mode vmode = mode[argc - 1];
2893 neon_lane_bounds (op[argc], 0,
2894 GET_MODE_NUNITS (vmode) / 2, exp);
2896 /* If the lane index isn't a constant then error out. */
2897 goto constant_arg;
2899 case ARG_BUILTIN_LANE_QUADTUP_INDEX:
2900 /* Previous argument must be a vector, which this indexes. */
2901 gcc_assert (argc > 0);
2902 if (CONST_INT_P (op[argc]))
2904 machine_mode vmode = mode[argc - 1];
2905 neon_lane_bounds (op[argc], 0,
2906 GET_MODE_NUNITS (vmode) / 4, exp);
2908 /* If the lane index isn't a constant then error out. */
2909 goto constant_arg;
2911 case ARG_BUILTIN_CONSTANT:
2912 constant_arg:
2913 if (!(*insn_data[icode].operand[opno].predicate)
2914 (op[argc], mode[argc]))
2916 if (IN_RANGE (fcode, ARM_BUILTIN_CDE_PATTERN_START,
2917 ARM_BUILTIN_CDE_PATTERN_END))
2919 if (argc == 0)
2921 unsigned int cp_bit = (CONST_INT_P (op[argc])
2922 ? UINTVAL (op[argc]) : -1);
2923 if (IN_RANGE (cp_bit, 0, ARM_CDE_CONST_COPROC))
2924 error_at (EXPR_LOCATION (exp),
2925 "coprocessor %d is not enabled "
2926 "with +cdecp%d", cp_bit, cp_bit);
2927 else
2928 error_at (EXPR_LOCATION (exp),
2929 "coproc must be a constant immediate in "
2930 "range [0-%d] enabled with %<+cdecp<N>%>",
2931 ARM_CDE_CONST_COPROC);
2933 else
2934 /* Here we mention the builtin name to follow the same
2935 format that the C/C++ frontends use for referencing
2936 a given argument index. */
2937 error_at (EXPR_LOCATION (exp),
2938 "argument %d to %qE must be a constant "
2939 "immediate in range [0-%d]", argc + 1,
2940 arm_builtin_decls[fcode],
2941 cde_builtin_data[fcode -
2942 ARM_BUILTIN_CDE_PATTERN_START].imm_max);
2944 else
2945 error_at (EXPR_LOCATION (exp),
2946 "argument %d must be a constant immediate",
2947 argc + 1);
2948 /* We have failed to expand the pattern, and are safely
2949 in to invalid code. But the mid-end will still try to
2950 build an assignment for this node while it expands,
2951 before stopping for the error, just pass it back
2952 TARGET to ensure a valid assignment. */
2953 return target;
2955 break;
2957 case ARG_BUILTIN_NEON_MEMORY:
2958 /* Check if expand failed. */
2959 if (op[argc] == const0_rtx)
2960 return 0;
2961 gcc_assert (MEM_P (op[argc]));
2962 PUT_MODE (op[argc], mode[argc]);
2963 /* ??? arm_neon.h uses the same built-in functions for signed
2964 and unsigned accesses, casting where necessary. This isn't
2965 alias safe. */
2966 set_mem_alias_set (op[argc], 0);
2967 if (!(*insn_data[icode].operand[opno].predicate)
2968 (op[argc], mode[argc]))
2969 op[argc] = (replace_equiv_address
2970 (op[argc],
2971 copy_to_mode_reg (Pmode, XEXP (op[argc], 0))));
2972 break;
2974 case ARG_BUILTIN_STOP:
2975 gcc_unreachable ();
2978 argc++;
2982 if (have_retval)
2983 switch (argc)
2985 case 0:
2986 pat = GEN_FCN (icode) (target);
2987 break;
2988 case 1:
2989 pat = GEN_FCN (icode) (target, op[0]);
2990 break;
2992 case 2:
2993 pat = GEN_FCN (icode) (target, op[0], op[1]);
2994 break;
2996 case 3:
2997 pat = GEN_FCN (icode) (target, op[0], op[1], op[2]);
2998 break;
3000 case 4:
3001 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3]);
3002 break;
3004 case 5:
3005 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4]);
3006 break;
3008 case 6:
3009 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4], op[5]);
3010 break;
3012 default:
3013 gcc_unreachable ();
3015 else
3016 switch (argc)
3018 case 1:
3019 pat = GEN_FCN (icode) (op[0]);
3020 break;
3022 case 2:
3023 pat = GEN_FCN (icode) (op[0], op[1]);
3024 break;
3026 case 3:
3027 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
3028 break;
3030 case 4:
3031 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
3032 break;
3034 case 5:
3035 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
3036 break;
3038 case 6:
3039 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4], op[5]);
3040 break;
3042 default:
3043 gcc_unreachable ();
3046 if (!pat)
3047 return 0;
3049 /* Check whether our current target implements the pattern chosen for this
3050 builtin and error out if not. */
3051 start_sequence ();
3052 emit_insn (pat);
3053 insn = get_insns ();
3054 end_sequence ();
3056 if (recog_memoized (insn) < 0)
3057 error ("this builtin is not supported for this target");
3058 else
3059 emit_insn (insn);
3061 if (TARGET_HAVE_MVE && VALID_MVE_PRED_MODE (tmode))
3063 rtx HItarget = gen_reg_rtx (HImode);
3064 emit_move_insn (HItarget, gen_lowpart (HImode, target));
3065 return HItarget;
3068 return target;
3071 /* Expand a general builtin. These builtins are "special" because they don't
3072 have symbolic constants defined per-instruction or per instruction-variant.
3073 Instead, the required info is looked up in the ARM_BUILTIN_DATA record that
3074 is passed into the function. */
3076 static rtx
3077 arm_general_expand_builtin_1 (int fcode, tree exp, rtx target,
3078 arm_builtin_datum *d)
3080 enum insn_code icode = d->code;
3081 builtin_arg args[SIMD_MAX_BUILTIN_ARGS + 1];
3082 int num_args = insn_data[d->code].n_operands;
3083 int is_void = 0;
3084 int k;
3085 bool neon = false;
3086 bool mve = false;
3088 if (IN_RANGE (fcode, ARM_BUILTIN_VFP_BASE, ARM_BUILTIN_ACLE_BASE - 1))
3089 neon = true;
3091 if (IN_RANGE (fcode, ARM_BUILTIN_MVE_BASE, ARM_BUILTIN_MAX - 1))
3092 mve = true;
3094 is_void = !!(d->qualifiers[0] & qualifier_void);
3096 num_args += is_void;
3098 for (k = 1; k < num_args; k++)
3100 /* We have four arrays of data, each indexed in a different fashion.
3101 qualifiers - element 0 always describes the function return type.
3102 operands - element 0 is either the operand for return value (if
3103 the function has a non-void return type) or the operand for the
3104 first argument.
3105 expr_args - element 0 always holds the first argument.
3106 args - element 0 is always used for the return type. */
3107 int qualifiers_k = k;
3108 int operands_k = k - is_void;
3109 int expr_args_k = k - 1;
3111 if (d->qualifiers[qualifiers_k] & qualifier_lane_index)
3112 args[k] = ARG_BUILTIN_LANE_INDEX;
3113 else if (d->qualifiers[qualifiers_k] & qualifier_lane_pair_index)
3114 args[k] = ARG_BUILTIN_LANE_PAIR_INDEX;
3115 else if (d->qualifiers[qualifiers_k] & qualifier_lane_quadtup_index)
3116 args[k] = ARG_BUILTIN_LANE_QUADTUP_INDEX;
3117 else if (d->qualifiers[qualifiers_k] & qualifier_struct_load_store_lane_index)
3118 args[k] = ARG_BUILTIN_STRUCT_LOAD_STORE_LANE_INDEX;
3119 else if (d->qualifiers[qualifiers_k] & qualifier_immediate)
3120 args[k] = ARG_BUILTIN_CONSTANT;
3121 else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
3123 rtx arg
3124 = expand_normal (CALL_EXPR_ARG (exp,
3125 (expr_args_k)));
3126 /* Handle constants only if the predicate allows it. */
3127 bool op_const_int_p =
3128 (CONST_INT_P (arg)
3129 && (*insn_data[icode].operand[operands_k].predicate)
3130 (arg, insn_data[icode].operand[operands_k].mode));
3131 args[k] = op_const_int_p ? ARG_BUILTIN_CONSTANT : ARG_BUILTIN_COPY_TO_REG;
3133 else if (d->qualifiers[qualifiers_k] & qualifier_pointer)
3135 if (neon || mve)
3136 args[k] = ARG_BUILTIN_NEON_MEMORY;
3137 else
3138 args[k] = ARG_BUILTIN_MEMORY;
3140 else
3141 args[k] = ARG_BUILTIN_COPY_TO_REG;
3143 args[k] = ARG_BUILTIN_STOP;
3145 /* The interface to arm_general_expand_builtin_args expects a 0 if
3146 the function is void, and a 1 if it is not. */
3147 return arm_general_expand_builtin_args
3148 (target, d->mode, fcode, icode, !is_void, exp,
3149 &args[1]);
3152 /* Expand an ACLE builtin, i.e. those registered only if their respective
3153 target constraints are met. This check happens within
3154 arm_general_expand_builtin_args. */
3156 static rtx
3157 arm_expand_acle_builtin (int fcode, tree exp, rtx target)
3159 if (fcode == ARM_BUILTIN_SAT_IMM_CHECK)
3161 /* Check the saturation immediate bounds. */
3163 rtx min_sat = expand_normal (CALL_EXPR_ARG (exp, 1));
3164 rtx max_sat = expand_normal (CALL_EXPR_ARG (exp, 2));
3165 gcc_assert (CONST_INT_P (min_sat));
3166 gcc_assert (CONST_INT_P (max_sat));
3167 rtx sat_imm = expand_normal (CALL_EXPR_ARG (exp, 0));
3168 if (CONST_INT_P (sat_imm))
3170 if (!IN_RANGE (sat_imm, min_sat, max_sat))
3171 error_at (EXPR_LOCATION (exp),
3172 "saturation bit range must be in the range [%wd, %wd]",
3173 UINTVAL (min_sat), UINTVAL (max_sat));
3175 else
3176 error_at (EXPR_LOCATION (exp),
3177 "saturation bit range must be a constant immediate");
3178 /* Don't generate any RTL. */
3179 return const0_rtx;
3182 gcc_assert (fcode != ARM_BUILTIN_CDE_BASE);
3183 arm_builtin_datum *d
3184 = (fcode < ARM_BUILTIN_CDE_BASE)
3185 ? &acle_builtin_data[fcode - ARM_BUILTIN_ACLE_PATTERN_START]
3186 : &cde_builtin_data[fcode - ARM_BUILTIN_CDE_PATTERN_START].base;
3188 return arm_general_expand_builtin_1 (fcode, exp, target, d);
3191 /* Expand an MVE builtin, i.e. those registered only if their respective
3192 target constraints are met. This check happens within
3193 arm_general_expand_builtin. */
3195 static rtx
3196 arm_expand_mve_builtin (int fcode, tree exp, rtx target)
3198 if (fcode >= ARM_BUILTIN_MVE_BASE && !TARGET_HAVE_MVE)
3200 fatal_error (input_location,
3201 "You must enable MVE instructions"
3202 " to use these intrinsics");
3203 return const0_rtx;
3206 arm_builtin_datum *d
3207 = &mve_builtin_data[fcode - ARM_BUILTIN_MVE_PATTERN_START];
3209 return arm_general_expand_builtin_1 (fcode, exp, target, d);
3212 /* Expand a Neon builtin, i.e. those registered only if TARGET_NEON holds.
3213 Most of these are "special" because they don't have symbolic
3214 constants defined per-instruction or per instruction-variant. Instead, the
3215 required info is looked up in the table neon_builtin_data. */
3217 static rtx
3218 arm_expand_neon_builtin (int fcode, tree exp, rtx target)
3220 if (fcode >= ARM_BUILTIN_NEON_BASE && ! TARGET_NEON)
3222 fatal_error (input_location,
3223 "You must enable NEON instructions"
3224 " (e.g. %<-mfloat-abi=softfp%> %<-mfpu=neon%>)"
3225 " to use these intrinsics.");
3226 return const0_rtx;
3229 arm_builtin_datum *d
3230 = &neon_builtin_data[fcode - ARM_BUILTIN_NEON_PATTERN_START];
3232 return arm_general_expand_builtin_1 (fcode, exp, target, d);
3235 /* Expand a VFP builtin. These builtins are treated like
3236 neon builtins except that the data is looked up in table
3237 VFP_BUILTIN_DATA. */
3239 static rtx
3240 arm_expand_vfp_builtin (int fcode, tree exp, rtx target)
3242 if (fcode >= ARM_BUILTIN_VFP_BASE && ! TARGET_HARD_FLOAT)
3244 fatal_error (input_location,
3245 "You must enable VFP instructions"
3246 " to use these intrinsics.");
3247 return const0_rtx;
3250 arm_builtin_datum *d
3251 = &vfp_builtin_data[fcode - ARM_BUILTIN_VFP_PATTERN_START];
3253 return arm_general_expand_builtin_1 (fcode, exp, target, d);
3256 /* Implement TARGET_EXPAND_BUILTIN for general builtins. */
3258 arm_general_expand_builtin (unsigned int fcode,
3259 tree exp,
3260 rtx target,
3261 int ignore ATTRIBUTE_UNUSED)
3263 const struct builtin_description * d;
3264 enum insn_code icode;
3265 tree arg0;
3266 tree arg1;
3267 tree arg2;
3268 rtx op0;
3269 rtx op1;
3270 rtx op2;
3271 rtx pat;
3272 size_t i;
3273 machine_mode tmode;
3274 machine_mode mode0;
3275 machine_mode mode1;
3276 machine_mode mode2;
3277 int opint;
3278 int selector;
3279 int mask;
3280 int imm;
3282 if (fcode == ARM_BUILTIN_SIMD_LANE_CHECK)
3284 /* Builtin is only to check bounds of the lane passed to some intrinsics
3285 that are implemented with gcc vector extensions in arm_neon.h. */
3287 tree nlanes = CALL_EXPR_ARG (exp, 0);
3288 gcc_assert (TREE_CODE (nlanes) == INTEGER_CST);
3289 rtx lane_idx = expand_normal (CALL_EXPR_ARG (exp, 1));
3290 if (CONST_INT_P (lane_idx))
3291 neon_lane_bounds (lane_idx, 0, TREE_INT_CST_LOW (nlanes), exp);
3292 else
3293 error_at (EXPR_LOCATION (exp),
3294 "lane index must be a constant immediate");
3295 /* Don't generate any RTL. */
3296 return const0_rtx;
3298 if (fcode >= ARM_BUILTIN_MVE_BASE)
3299 return arm_expand_mve_builtin (fcode, exp, target);
3301 if (fcode >= ARM_BUILTIN_ACLE_BASE)
3302 return arm_expand_acle_builtin (fcode, exp, target);
3304 if (fcode >= ARM_BUILTIN_NEON_BASE)
3305 return arm_expand_neon_builtin (fcode, exp, target);
3307 if (fcode >= ARM_BUILTIN_VFP_BASE)
3308 return arm_expand_vfp_builtin (fcode, exp, target);
3310 /* Check in the context of the function making the call whether the
3311 builtin is supported. */
3312 if (fcode >= ARM_BUILTIN_CRYPTO_BASE
3313 && (!TARGET_CRYPTO || !TARGET_HARD_FLOAT))
3315 fatal_error (input_location,
3316 "You must enable crypto instructions"
3317 " (e.g. include %<-mfloat-abi=softfp%> "
3318 "%<-mfpu=crypto-neon%>)"
3319 " to use these intrinsics.");
3320 return const0_rtx;
3323 switch (fcode)
3325 case ARM_BUILTIN_GET_FPSCR_NZCVQC:
3326 case ARM_BUILTIN_SET_FPSCR_NZCVQC:
3327 if (fcode == ARM_BUILTIN_GET_FPSCR_NZCVQC)
3329 icode = CODE_FOR_get_fpscr_nzcvqc;
3330 target = gen_reg_rtx (SImode);
3331 emit_insn (GEN_FCN (icode) (target));
3332 return target;
3334 else
3336 icode = CODE_FOR_set_fpscr_nzcvqc;
3337 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
3338 emit_insn (GEN_FCN (icode) (force_reg (SImode, op0)));
3339 return NULL_RTX;
3342 case ARM_BUILTIN_GET_FPSCR:
3343 case ARM_BUILTIN_SET_FPSCR:
3344 if (fcode == ARM_BUILTIN_GET_FPSCR)
3346 icode = CODE_FOR_get_fpscr;
3347 target = gen_reg_rtx (SImode);
3348 pat = GEN_FCN (icode) (target);
3350 else
3352 target = NULL_RTX;
3353 icode = CODE_FOR_set_fpscr;
3354 arg0 = CALL_EXPR_ARG (exp, 0);
3355 op0 = expand_normal (arg0);
3356 pat = GEN_FCN (icode) (force_reg (SImode, op0));
3358 emit_insn (pat);
3359 return target;
3361 case ARM_BUILTIN_CMSE_NONSECURE_CALLER:
3362 target = gen_reg_rtx (SImode);
3363 op0 = arm_return_addr (0, NULL_RTX);
3364 emit_insn (gen_andsi3 (target, op0, const1_rtx));
3365 op1 = gen_rtx_EQ (SImode, target, const0_rtx);
3366 emit_insn (gen_cstoresi4 (target, op1, target, const0_rtx));
3367 return target;
3369 case ARM_BUILTIN_TEXTRMSB:
3370 case ARM_BUILTIN_TEXTRMUB:
3371 case ARM_BUILTIN_TEXTRMSH:
3372 case ARM_BUILTIN_TEXTRMUH:
3373 case ARM_BUILTIN_TEXTRMSW:
3374 case ARM_BUILTIN_TEXTRMUW:
3375 icode = (fcode == ARM_BUILTIN_TEXTRMSB ? CODE_FOR_iwmmxt_textrmsb
3376 : fcode == ARM_BUILTIN_TEXTRMUB ? CODE_FOR_iwmmxt_textrmub
3377 : fcode == ARM_BUILTIN_TEXTRMSH ? CODE_FOR_iwmmxt_textrmsh
3378 : fcode == ARM_BUILTIN_TEXTRMUH ? CODE_FOR_iwmmxt_textrmuh
3379 : CODE_FOR_iwmmxt_textrmw);
3381 arg0 = CALL_EXPR_ARG (exp, 0);
3382 arg1 = CALL_EXPR_ARG (exp, 1);
3383 op0 = expand_normal (arg0);
3384 op1 = expand_normal (arg1);
3385 tmode = insn_data[icode].operand[0].mode;
3386 mode0 = insn_data[icode].operand[1].mode;
3387 mode1 = insn_data[icode].operand[2].mode;
3389 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3390 op0 = copy_to_mode_reg (mode0, op0);
3391 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3393 /* @@@ better error message */
3394 error ("selector must be an immediate");
3395 return gen_reg_rtx (tmode);
3398 opint = INTVAL (op1);
3399 if (fcode == ARM_BUILTIN_TEXTRMSB || fcode == ARM_BUILTIN_TEXTRMUB)
3401 if (opint > 7 || opint < 0)
3402 error ("the range of selector should be in 0 to 7");
3404 else if (fcode == ARM_BUILTIN_TEXTRMSH || fcode == ARM_BUILTIN_TEXTRMUH)
3406 if (opint > 3 || opint < 0)
3407 error ("the range of selector should be in 0 to 3");
3409 else /* ARM_BUILTIN_TEXTRMSW || ARM_BUILTIN_TEXTRMUW. */
3411 if (opint > 1 || opint < 0)
3412 error ("the range of selector should be in 0 to 1");
3415 if (target == 0
3416 || GET_MODE (target) != tmode
3417 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3418 target = gen_reg_rtx (tmode);
3419 pat = GEN_FCN (icode) (target, op0, op1);
3420 if (! pat)
3421 return 0;
3422 emit_insn (pat);
3423 return target;
3425 case ARM_BUILTIN_WALIGNI:
3426 /* If op2 is immediate, call walighi, else call walighr. */
3427 arg0 = CALL_EXPR_ARG (exp, 0);
3428 arg1 = CALL_EXPR_ARG (exp, 1);
3429 arg2 = CALL_EXPR_ARG (exp, 2);
3430 op0 = expand_normal (arg0);
3431 op1 = expand_normal (arg1);
3432 op2 = expand_normal (arg2);
3433 if (CONST_INT_P (op2))
3435 icode = CODE_FOR_iwmmxt_waligni;
3436 tmode = insn_data[icode].operand[0].mode;
3437 mode0 = insn_data[icode].operand[1].mode;
3438 mode1 = insn_data[icode].operand[2].mode;
3439 mode2 = insn_data[icode].operand[3].mode;
3440 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
3441 op0 = copy_to_mode_reg (mode0, op0);
3442 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
3443 op1 = copy_to_mode_reg (mode1, op1);
3444 gcc_assert ((*insn_data[icode].operand[3].predicate) (op2, mode2));
3445 selector = INTVAL (op2);
3446 if (selector > 7 || selector < 0)
3447 error ("the range of selector should be in 0 to 7");
3449 else
3451 icode = CODE_FOR_iwmmxt_walignr;
3452 tmode = insn_data[icode].operand[0].mode;
3453 mode0 = insn_data[icode].operand[1].mode;
3454 mode1 = insn_data[icode].operand[2].mode;
3455 mode2 = insn_data[icode].operand[3].mode;
3456 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
3457 op0 = copy_to_mode_reg (mode0, op0);
3458 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
3459 op1 = copy_to_mode_reg (mode1, op1);
3460 if (!(*insn_data[icode].operand[3].predicate) (op2, mode2))
3461 op2 = copy_to_mode_reg (mode2, op2);
3463 if (target == 0
3464 || GET_MODE (target) != tmode
3465 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
3466 target = gen_reg_rtx (tmode);
3467 pat = GEN_FCN (icode) (target, op0, op1, op2);
3468 if (!pat)
3469 return 0;
3470 emit_insn (pat);
3471 return target;
3473 case ARM_BUILTIN_TINSRB:
3474 case ARM_BUILTIN_TINSRH:
3475 case ARM_BUILTIN_TINSRW:
3476 case ARM_BUILTIN_WMERGE:
3477 icode = (fcode == ARM_BUILTIN_TINSRB ? CODE_FOR_iwmmxt_tinsrb
3478 : fcode == ARM_BUILTIN_TINSRH ? CODE_FOR_iwmmxt_tinsrh
3479 : fcode == ARM_BUILTIN_WMERGE ? CODE_FOR_iwmmxt_wmerge
3480 : CODE_FOR_iwmmxt_tinsrw);
3481 arg0 = CALL_EXPR_ARG (exp, 0);
3482 arg1 = CALL_EXPR_ARG (exp, 1);
3483 arg2 = CALL_EXPR_ARG (exp, 2);
3484 op0 = expand_normal (arg0);
3485 op1 = expand_normal (arg1);
3486 op2 = expand_normal (arg2);
3487 tmode = insn_data[icode].operand[0].mode;
3488 mode0 = insn_data[icode].operand[1].mode;
3489 mode1 = insn_data[icode].operand[2].mode;
3490 mode2 = insn_data[icode].operand[3].mode;
3492 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3493 op0 = copy_to_mode_reg (mode0, op0);
3494 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3495 op1 = copy_to_mode_reg (mode1, op1);
3496 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3498 error ("selector must be an immediate");
3499 return const0_rtx;
3501 if (icode == CODE_FOR_iwmmxt_wmerge)
3503 selector = INTVAL (op2);
3504 if (selector > 7 || selector < 0)
3505 error ("the range of selector should be in 0 to 7");
3507 if ((icode == CODE_FOR_iwmmxt_tinsrb)
3508 || (icode == CODE_FOR_iwmmxt_tinsrh)
3509 || (icode == CODE_FOR_iwmmxt_tinsrw))
3511 mask = 0x01;
3512 selector= INTVAL (op2);
3513 if (icode == CODE_FOR_iwmmxt_tinsrb && (selector < 0 || selector > 7))
3514 error ("the range of selector should be in 0 to 7");
3515 else if (icode == CODE_FOR_iwmmxt_tinsrh && (selector < 0 ||selector > 3))
3516 error ("the range of selector should be in 0 to 3");
3517 else if (icode == CODE_FOR_iwmmxt_tinsrw && (selector < 0 ||selector > 1))
3518 error ("the range of selector should be in 0 to 1");
3519 mask <<= selector;
3520 op2 = GEN_INT (mask);
3522 if (target == 0
3523 || GET_MODE (target) != tmode
3524 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3525 target = gen_reg_rtx (tmode);
3526 pat = GEN_FCN (icode) (target, op0, op1, op2);
3527 if (! pat)
3528 return 0;
3529 emit_insn (pat);
3530 return target;
3532 case ARM_BUILTIN_SETWCGR0:
3533 case ARM_BUILTIN_SETWCGR1:
3534 case ARM_BUILTIN_SETWCGR2:
3535 case ARM_BUILTIN_SETWCGR3:
3536 icode = (fcode == ARM_BUILTIN_SETWCGR0 ? CODE_FOR_iwmmxt_setwcgr0
3537 : fcode == ARM_BUILTIN_SETWCGR1 ? CODE_FOR_iwmmxt_setwcgr1
3538 : fcode == ARM_BUILTIN_SETWCGR2 ? CODE_FOR_iwmmxt_setwcgr2
3539 : CODE_FOR_iwmmxt_setwcgr3);
3540 arg0 = CALL_EXPR_ARG (exp, 0);
3541 op0 = expand_normal (arg0);
3542 mode0 = insn_data[icode].operand[0].mode;
3543 if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
3544 op0 = copy_to_mode_reg (mode0, op0);
3545 pat = GEN_FCN (icode) (op0);
3546 if (!pat)
3547 return 0;
3548 emit_insn (pat);
3549 return 0;
3551 case ARM_BUILTIN_GETWCGR0:
3552 case ARM_BUILTIN_GETWCGR1:
3553 case ARM_BUILTIN_GETWCGR2:
3554 case ARM_BUILTIN_GETWCGR3:
3555 icode = (fcode == ARM_BUILTIN_GETWCGR0 ? CODE_FOR_iwmmxt_getwcgr0
3556 : fcode == ARM_BUILTIN_GETWCGR1 ? CODE_FOR_iwmmxt_getwcgr1
3557 : fcode == ARM_BUILTIN_GETWCGR2 ? CODE_FOR_iwmmxt_getwcgr2
3558 : CODE_FOR_iwmmxt_getwcgr3);
3559 tmode = insn_data[icode].operand[0].mode;
3560 if (target == 0
3561 || GET_MODE (target) != tmode
3562 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
3563 target = gen_reg_rtx (tmode);
3564 pat = GEN_FCN (icode) (target);
3565 if (!pat)
3566 return 0;
3567 emit_insn (pat);
3568 return target;
3570 case ARM_BUILTIN_WSHUFH:
3571 icode = CODE_FOR_iwmmxt_wshufh;
3572 arg0 = CALL_EXPR_ARG (exp, 0);
3573 arg1 = CALL_EXPR_ARG (exp, 1);
3574 op0 = expand_normal (arg0);
3575 op1 = expand_normal (arg1);
3576 tmode = insn_data[icode].operand[0].mode;
3577 mode1 = insn_data[icode].operand[1].mode;
3578 mode2 = insn_data[icode].operand[2].mode;
3580 if (! (*insn_data[icode].operand[1].predicate) (op0, mode1))
3581 op0 = copy_to_mode_reg (mode1, op0);
3582 if (! (*insn_data[icode].operand[2].predicate) (op1, mode2))
3584 error ("mask must be an immediate");
3585 return const0_rtx;
3587 selector = INTVAL (op1);
3588 if (selector < 0 || selector > 255)
3589 error ("the range of mask should be in 0 to 255");
3590 if (target == 0
3591 || GET_MODE (target) != tmode
3592 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3593 target = gen_reg_rtx (tmode);
3594 pat = GEN_FCN (icode) (target, op0, op1);
3595 if (! pat)
3596 return 0;
3597 emit_insn (pat);
3598 return target;
3600 case ARM_BUILTIN_WMADDS:
3601 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmadds, exp, target);
3602 case ARM_BUILTIN_WMADDSX:
3603 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddsx, exp, target);
3604 case ARM_BUILTIN_WMADDSN:
3605 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddsn, exp, target);
3606 case ARM_BUILTIN_WMADDU:
3607 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddu, exp, target);
3608 case ARM_BUILTIN_WMADDUX:
3609 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddux, exp, target);
3610 case ARM_BUILTIN_WMADDUN:
3611 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddun, exp, target);
3612 case ARM_BUILTIN_WSADBZ:
3613 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadbz, exp, target);
3614 case ARM_BUILTIN_WSADHZ:
3615 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadhz, exp, target);
3617 /* Several three-argument builtins. */
3618 case ARM_BUILTIN_WMACS:
3619 case ARM_BUILTIN_WMACU:
3620 case ARM_BUILTIN_TMIA:
3621 case ARM_BUILTIN_TMIAPH:
3622 case ARM_BUILTIN_TMIATT:
3623 case ARM_BUILTIN_TMIATB:
3624 case ARM_BUILTIN_TMIABT:
3625 case ARM_BUILTIN_TMIABB:
3626 case ARM_BUILTIN_WQMIABB:
3627 case ARM_BUILTIN_WQMIABT:
3628 case ARM_BUILTIN_WQMIATB:
3629 case ARM_BUILTIN_WQMIATT:
3630 case ARM_BUILTIN_WQMIABBN:
3631 case ARM_BUILTIN_WQMIABTN:
3632 case ARM_BUILTIN_WQMIATBN:
3633 case ARM_BUILTIN_WQMIATTN:
3634 case ARM_BUILTIN_WMIABB:
3635 case ARM_BUILTIN_WMIABT:
3636 case ARM_BUILTIN_WMIATB:
3637 case ARM_BUILTIN_WMIATT:
3638 case ARM_BUILTIN_WMIABBN:
3639 case ARM_BUILTIN_WMIABTN:
3640 case ARM_BUILTIN_WMIATBN:
3641 case ARM_BUILTIN_WMIATTN:
3642 case ARM_BUILTIN_WMIAWBB:
3643 case ARM_BUILTIN_WMIAWBT:
3644 case ARM_BUILTIN_WMIAWTB:
3645 case ARM_BUILTIN_WMIAWTT:
3646 case ARM_BUILTIN_WMIAWBBN:
3647 case ARM_BUILTIN_WMIAWBTN:
3648 case ARM_BUILTIN_WMIAWTBN:
3649 case ARM_BUILTIN_WMIAWTTN:
3650 case ARM_BUILTIN_WSADB:
3651 case ARM_BUILTIN_WSADH:
3652 icode = (fcode == ARM_BUILTIN_WMACS ? CODE_FOR_iwmmxt_wmacs
3653 : fcode == ARM_BUILTIN_WMACU ? CODE_FOR_iwmmxt_wmacu
3654 : fcode == ARM_BUILTIN_TMIA ? CODE_FOR_iwmmxt_tmia
3655 : fcode == ARM_BUILTIN_TMIAPH ? CODE_FOR_iwmmxt_tmiaph
3656 : fcode == ARM_BUILTIN_TMIABB ? CODE_FOR_iwmmxt_tmiabb
3657 : fcode == ARM_BUILTIN_TMIABT ? CODE_FOR_iwmmxt_tmiabt
3658 : fcode == ARM_BUILTIN_TMIATB ? CODE_FOR_iwmmxt_tmiatb
3659 : fcode == ARM_BUILTIN_TMIATT ? CODE_FOR_iwmmxt_tmiatt
3660 : fcode == ARM_BUILTIN_WQMIABB ? CODE_FOR_iwmmxt_wqmiabb
3661 : fcode == ARM_BUILTIN_WQMIABT ? CODE_FOR_iwmmxt_wqmiabt
3662 : fcode == ARM_BUILTIN_WQMIATB ? CODE_FOR_iwmmxt_wqmiatb
3663 : fcode == ARM_BUILTIN_WQMIATT ? CODE_FOR_iwmmxt_wqmiatt
3664 : fcode == ARM_BUILTIN_WQMIABBN ? CODE_FOR_iwmmxt_wqmiabbn
3665 : fcode == ARM_BUILTIN_WQMIABTN ? CODE_FOR_iwmmxt_wqmiabtn
3666 : fcode == ARM_BUILTIN_WQMIATBN ? CODE_FOR_iwmmxt_wqmiatbn
3667 : fcode == ARM_BUILTIN_WQMIATTN ? CODE_FOR_iwmmxt_wqmiattn
3668 : fcode == ARM_BUILTIN_WMIABB ? CODE_FOR_iwmmxt_wmiabb
3669 : fcode == ARM_BUILTIN_WMIABT ? CODE_FOR_iwmmxt_wmiabt
3670 : fcode == ARM_BUILTIN_WMIATB ? CODE_FOR_iwmmxt_wmiatb
3671 : fcode == ARM_BUILTIN_WMIATT ? CODE_FOR_iwmmxt_wmiatt
3672 : fcode == ARM_BUILTIN_WMIABBN ? CODE_FOR_iwmmxt_wmiabbn
3673 : fcode == ARM_BUILTIN_WMIABTN ? CODE_FOR_iwmmxt_wmiabtn
3674 : fcode == ARM_BUILTIN_WMIATBN ? CODE_FOR_iwmmxt_wmiatbn
3675 : fcode == ARM_BUILTIN_WMIATTN ? CODE_FOR_iwmmxt_wmiattn
3676 : fcode == ARM_BUILTIN_WMIAWBB ? CODE_FOR_iwmmxt_wmiawbb
3677 : fcode == ARM_BUILTIN_WMIAWBT ? CODE_FOR_iwmmxt_wmiawbt
3678 : fcode == ARM_BUILTIN_WMIAWTB ? CODE_FOR_iwmmxt_wmiawtb
3679 : fcode == ARM_BUILTIN_WMIAWTT ? CODE_FOR_iwmmxt_wmiawtt
3680 : fcode == ARM_BUILTIN_WMIAWBBN ? CODE_FOR_iwmmxt_wmiawbbn
3681 : fcode == ARM_BUILTIN_WMIAWBTN ? CODE_FOR_iwmmxt_wmiawbtn
3682 : fcode == ARM_BUILTIN_WMIAWTBN ? CODE_FOR_iwmmxt_wmiawtbn
3683 : fcode == ARM_BUILTIN_WMIAWTTN ? CODE_FOR_iwmmxt_wmiawttn
3684 : fcode == ARM_BUILTIN_WSADB ? CODE_FOR_iwmmxt_wsadb
3685 : CODE_FOR_iwmmxt_wsadh);
3686 arg0 = CALL_EXPR_ARG (exp, 0);
3687 arg1 = CALL_EXPR_ARG (exp, 1);
3688 arg2 = CALL_EXPR_ARG (exp, 2);
3689 op0 = expand_normal (arg0);
3690 op1 = expand_normal (arg1);
3691 op2 = expand_normal (arg2);
3692 tmode = insn_data[icode].operand[0].mode;
3693 mode0 = insn_data[icode].operand[1].mode;
3694 mode1 = insn_data[icode].operand[2].mode;
3695 mode2 = insn_data[icode].operand[3].mode;
3697 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3698 op0 = copy_to_mode_reg (mode0, op0);
3699 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3700 op1 = copy_to_mode_reg (mode1, op1);
3701 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3702 op2 = copy_to_mode_reg (mode2, op2);
3703 if (target == 0
3704 || GET_MODE (target) != tmode
3705 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3706 target = gen_reg_rtx (tmode);
3707 pat = GEN_FCN (icode) (target, op0, op1, op2);
3708 if (! pat)
3709 return 0;
3710 emit_insn (pat);
3711 return target;
3713 case ARM_BUILTIN_WZERO:
3714 target = gen_reg_rtx (DImode);
3715 emit_insn (gen_iwmmxt_clrdi (target));
3716 return target;
3718 case ARM_BUILTIN_WSRLHI:
3719 case ARM_BUILTIN_WSRLWI:
3720 case ARM_BUILTIN_WSRLDI:
3721 case ARM_BUILTIN_WSLLHI:
3722 case ARM_BUILTIN_WSLLWI:
3723 case ARM_BUILTIN_WSLLDI:
3724 case ARM_BUILTIN_WSRAHI:
3725 case ARM_BUILTIN_WSRAWI:
3726 case ARM_BUILTIN_WSRADI:
3727 case ARM_BUILTIN_WRORHI:
3728 case ARM_BUILTIN_WRORWI:
3729 case ARM_BUILTIN_WRORDI:
3730 case ARM_BUILTIN_WSRLH:
3731 case ARM_BUILTIN_WSRLW:
3732 case ARM_BUILTIN_WSRLD:
3733 case ARM_BUILTIN_WSLLH:
3734 case ARM_BUILTIN_WSLLW:
3735 case ARM_BUILTIN_WSLLD:
3736 case ARM_BUILTIN_WSRAH:
3737 case ARM_BUILTIN_WSRAW:
3738 case ARM_BUILTIN_WSRAD:
3739 case ARM_BUILTIN_WRORH:
3740 case ARM_BUILTIN_WRORW:
3741 case ARM_BUILTIN_WRORD:
3742 icode = (fcode == ARM_BUILTIN_WSRLHI ? CODE_FOR_lshrv4hi3_iwmmxt
3743 : fcode == ARM_BUILTIN_WSRLWI ? CODE_FOR_lshrv2si3_iwmmxt
3744 : fcode == ARM_BUILTIN_WSRLDI ? CODE_FOR_lshrdi3_iwmmxt
3745 : fcode == ARM_BUILTIN_WSLLHI ? CODE_FOR_ashlv4hi3_iwmmxt
3746 : fcode == ARM_BUILTIN_WSLLWI ? CODE_FOR_ashlv2si3_iwmmxt
3747 : fcode == ARM_BUILTIN_WSLLDI ? CODE_FOR_ashldi3_iwmmxt
3748 : fcode == ARM_BUILTIN_WSRAHI ? CODE_FOR_ashrv4hi3_iwmmxt
3749 : fcode == ARM_BUILTIN_WSRAWI ? CODE_FOR_ashrv2si3_iwmmxt
3750 : fcode == ARM_BUILTIN_WSRADI ? CODE_FOR_ashrdi3_iwmmxt
3751 : fcode == ARM_BUILTIN_WRORHI ? CODE_FOR_rorv4hi3
3752 : fcode == ARM_BUILTIN_WRORWI ? CODE_FOR_rorv2si3
3753 : fcode == ARM_BUILTIN_WRORDI ? CODE_FOR_rordi3
3754 : fcode == ARM_BUILTIN_WSRLH ? CODE_FOR_lshrv4hi3_di
3755 : fcode == ARM_BUILTIN_WSRLW ? CODE_FOR_lshrv2si3_di
3756 : fcode == ARM_BUILTIN_WSRLD ? CODE_FOR_lshrdi3_di
3757 : fcode == ARM_BUILTIN_WSLLH ? CODE_FOR_ashlv4hi3_di
3758 : fcode == ARM_BUILTIN_WSLLW ? CODE_FOR_ashlv2si3_di
3759 : fcode == ARM_BUILTIN_WSLLD ? CODE_FOR_ashldi3_di
3760 : fcode == ARM_BUILTIN_WSRAH ? CODE_FOR_ashrv4hi3_di
3761 : fcode == ARM_BUILTIN_WSRAW ? CODE_FOR_ashrv2si3_di
3762 : fcode == ARM_BUILTIN_WSRAD ? CODE_FOR_ashrdi3_di
3763 : fcode == ARM_BUILTIN_WRORH ? CODE_FOR_rorv4hi3_di
3764 : fcode == ARM_BUILTIN_WRORW ? CODE_FOR_rorv2si3_di
3765 : fcode == ARM_BUILTIN_WRORD ? CODE_FOR_rordi3_di
3766 : CODE_FOR_nothing);
3767 arg1 = CALL_EXPR_ARG (exp, 1);
3768 op1 = expand_normal (arg1);
3769 if (GET_MODE (op1) == VOIDmode)
3771 imm = INTVAL (op1);
3772 if ((fcode == ARM_BUILTIN_WRORWI || fcode == ARM_BUILTIN_WRORW)
3773 && (imm < 0 || imm > 32))
3775 const char *builtin = (fcode == ARM_BUILTIN_WRORWI
3776 ? "_mm_rori_pi32" : "_mm_ror_pi32");
3777 error ("the range of count should be in 0 to 32; "
3778 "please check the intrinsic %qs in code", builtin);
3780 else if ((fcode == ARM_BUILTIN_WRORHI || fcode == ARM_BUILTIN_WRORH)
3781 && (imm < 0 || imm > 16))
3783 const char *builtin = (fcode == ARM_BUILTIN_WRORHI
3784 ? "_mm_rori_pi16" : "_mm_ror_pi16");
3785 error ("the range of count should be in 0 to 16; "
3786 "please check the intrinsic %qs in code", builtin);
3788 else if ((fcode == ARM_BUILTIN_WRORDI || fcode == ARM_BUILTIN_WRORD)
3789 && (imm < 0 || imm > 64))
3791 const char *builtin = (fcode == ARM_BUILTIN_WRORDI
3792 ? "_mm_rori_si64" : "_mm_ror_si64");
3793 error ("the range of count should be in 0 to 64; "
3794 "please check the intrinsic %qs in code", builtin);
3796 else if (imm < 0)
3798 const char *builtin;
3799 switch (fcode)
3801 case ARM_BUILTIN_WSRLHI:
3802 builtin = "_mm_srli_pi16";
3803 break;
3804 case ARM_BUILTIN_WSRLWI:
3805 builtin = "_mm_srli_pi32";
3806 break;
3807 case ARM_BUILTIN_WSRLDI:
3808 builtin = "_mm_srli_si64";
3809 break;
3810 case ARM_BUILTIN_WSLLHI:
3811 builtin = "_mm_slli_pi16";
3812 break;
3813 case ARM_BUILTIN_WSLLWI:
3814 builtin = "_mm_slli_pi32";
3815 break;
3816 case ARM_BUILTIN_WSLLDI:
3817 builtin = "_mm_slli_si64";
3818 break;
3819 case ARM_BUILTIN_WSRAHI:
3820 builtin = "_mm_srai_pi16";
3821 break;
3822 case ARM_BUILTIN_WSRAWI:
3823 builtin = "_mm_srai_pi32";
3824 break;
3825 case ARM_BUILTIN_WSRADI:
3826 builtin = "_mm_srai_si64";
3827 break;
3828 case ARM_BUILTIN_WSRLH:
3829 builtin = "_mm_srl_pi16";
3830 break;
3831 case ARM_BUILTIN_WSRLW:
3832 builtin = "_mm_srl_pi32";
3833 break;
3834 case ARM_BUILTIN_WSRLD:
3835 builtin = "_mm_srl_si64";
3836 break;
3837 case ARM_BUILTIN_WSLLH:
3838 builtin = "_mm_sll_pi16";
3839 break;
3840 case ARM_BUILTIN_WSLLW:
3841 builtin = "_mm_sll_pi32";
3842 break;
3843 case ARM_BUILTIN_WSLLD:
3844 builtin = "_mm_sll_si64";
3845 break;
3846 case ARM_BUILTIN_WSRAH:
3847 builtin = "_mm_sra_pi16";
3848 break;
3849 case ARM_BUILTIN_WSRAW:
3850 builtin = "_mm_sra_si64";
3851 break;
3852 default:
3853 builtin = "_mm_sra_si64";
3854 break;
3856 error ("the count should be no less than 0; "
3857 "please check the intrinsic %qs in code", builtin);
3860 return arm_expand_binop_builtin (icode, exp, target);
3862 default:
3863 break;
3866 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
3867 if (d->code == (enum arm_builtins) fcode)
3868 return arm_expand_binop_builtin (d->icode, exp, target);
3870 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
3871 if (d->code == (enum arm_builtins) fcode)
3872 return arm_expand_unop_builtin (d->icode, exp, target, 0);
3874 for (i = 0, d = bdesc_3arg; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
3875 if (d->code == (enum arm_builtins) fcode)
3876 return arm_expand_ternop_builtin (d->icode, exp, target);
3878 /* @@@ Should really do something sensible here. */
3879 return NULL_RTX;
3882 /* Expand an expression EXP that calls a built-in function,
3883 with result going to TARGET if that's convenient
3884 (and in mode MODE if that's convenient).
3885 SUBTARGET may be used as the target for computing one of EXP's operands.
3886 IGNORE is nonzero if the value is to be ignored. */
3889 arm_expand_builtin (tree exp,
3890 rtx target,
3891 rtx subtarget ATTRIBUTE_UNUSED,
3892 machine_mode mode ATTRIBUTE_UNUSED,
3893 int ignore ATTRIBUTE_UNUSED)
3895 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3896 unsigned int code = DECL_MD_FUNCTION_CODE (fndecl);
3897 unsigned int subcode = code >> ARM_BUILTIN_SHIFT;
3898 switch (code & ARM_BUILTIN_CLASS)
3900 case ARM_BUILTIN_GENERAL:
3901 return arm_general_expand_builtin (subcode, exp, target, ignore);
3902 case ARM_BUILTIN_MVE:
3903 return arm_mve::expand_builtin (subcode, exp, target);
3904 default:
3905 gcc_unreachable ();
3909 void
3910 arm_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
3912 const unsigned ARM_FE_INVALID = 1;
3913 const unsigned ARM_FE_DIVBYZERO = 2;
3914 const unsigned ARM_FE_OVERFLOW = 4;
3915 const unsigned ARM_FE_UNDERFLOW = 8;
3916 const unsigned ARM_FE_INEXACT = 16;
3917 const unsigned HOST_WIDE_INT ARM_FE_ALL_EXCEPT = (ARM_FE_INVALID
3918 | ARM_FE_DIVBYZERO
3919 | ARM_FE_OVERFLOW
3920 | ARM_FE_UNDERFLOW
3921 | ARM_FE_INEXACT);
3922 const unsigned HOST_WIDE_INT ARM_FE_EXCEPT_SHIFT = 8;
3923 tree fenv_var, get_fpscr, set_fpscr, mask, ld_fenv, masked_fenv;
3924 tree new_fenv_var, reload_fenv, restore_fnenv;
3925 tree update_call, atomic_feraiseexcept, hold_fnclex;
3927 if (!TARGET_HARD_FLOAT)
3928 return;
3930 /* Generate the equivalent of :
3931 unsigned int fenv_var;
3932 fenv_var = __builtin_arm_get_fpscr ();
3934 unsigned int masked_fenv;
3935 masked_fenv = fenv_var & mask;
3937 __builtin_arm_set_fpscr (masked_fenv); */
3939 fenv_var = create_tmp_var_raw (unsigned_type_node);
3940 get_fpscr = arm_builtin_decls[ARM_BUILTIN_GET_FPSCR];
3941 set_fpscr = arm_builtin_decls[ARM_BUILTIN_SET_FPSCR];
3942 mask = build_int_cst (unsigned_type_node,
3943 ~((ARM_FE_ALL_EXCEPT << ARM_FE_EXCEPT_SHIFT)
3944 | ARM_FE_ALL_EXCEPT));
3945 ld_fenv = build4 (TARGET_EXPR, unsigned_type_node,
3946 fenv_var, build_call_expr (get_fpscr, 0),
3947 NULL_TREE, NULL_TREE);
3948 masked_fenv = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_var, mask);
3949 hold_fnclex = build_call_expr (set_fpscr, 1, masked_fenv);
3950 *hold = build2 (COMPOUND_EXPR, void_type_node,
3951 build2 (COMPOUND_EXPR, void_type_node, masked_fenv, ld_fenv),
3952 hold_fnclex);
3954 /* Store the value of masked_fenv to clear the exceptions:
3955 __builtin_arm_set_fpscr (masked_fenv); */
3957 *clear = build_call_expr (set_fpscr, 1, masked_fenv);
3959 /* Generate the equivalent of :
3960 unsigned int new_fenv_var;
3961 new_fenv_var = __builtin_arm_get_fpscr ();
3963 __builtin_arm_set_fpscr (fenv_var);
3965 __atomic_feraiseexcept (new_fenv_var); */
3967 new_fenv_var = create_tmp_var_raw (unsigned_type_node);
3968 reload_fenv = build4 (TARGET_EXPR, unsigned_type_node, new_fenv_var,
3969 build_call_expr (get_fpscr, 0), NULL_TREE, NULL_TREE);
3970 restore_fnenv = build_call_expr (set_fpscr, 1, fenv_var);
3971 atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
3972 update_call = build_call_expr (atomic_feraiseexcept, 1,
3973 fold_convert (integer_type_node, new_fenv_var));
3974 *update = build2 (COMPOUND_EXPR, void_type_node,
3975 build2 (COMPOUND_EXPR, void_type_node,
3976 reload_fenv, restore_fnenv), update_call);
3979 /* Implement TARGET_CHECK_BUILTIN_CALL for general builtins. Record a read of
3980 the Q bit through intrinsics in the machine function for general built-in
3981 functions. */
3982 bool
3983 arm_general_check_builtin_call (unsigned int code)
3985 if (code == ARM_BUILTIN_saturation_occurred
3986 || code == ARM_BUILTIN_set_saturation)
3988 if (cfun && cfun->decl)
3989 DECL_ATTRIBUTES (cfun->decl)
3990 = tree_cons (get_identifier ("acle qbit"), NULL_TREE,
3991 DECL_ATTRIBUTES (cfun->decl));
3993 else if (code == ARM_BUILTIN_sel)
3995 if (cfun && cfun->decl)
3996 DECL_ATTRIBUTES (cfun->decl)
3997 = tree_cons (get_identifier ("acle gebits"), NULL_TREE,
3998 DECL_ATTRIBUTES (cfun->decl));
4000 return true;
4003 /* Implement TARGET_CHECK_BUILTIN_CALL. */
4004 bool
4005 arm_check_builtin_call (location_t loc, vec<location_t> arg_loc, tree fndecl,
4006 tree orig_fndecl, unsigned int nargs, tree *args, bool)
4008 unsigned int code = DECL_MD_FUNCTION_CODE (fndecl);
4009 unsigned int subcode = code >> ARM_BUILTIN_SHIFT;
4010 switch (code & ARM_BUILTIN_CLASS)
4012 case ARM_BUILTIN_GENERAL:
4013 return arm_general_check_builtin_call (subcode);
4014 case ARM_BUILTIN_MVE:
4015 return arm_mve::check_builtin_call (loc, arg_loc, subcode,
4016 orig_fndecl, nargs, args);
4017 default:
4018 gcc_unreachable ();
4023 enum resolver_ident
4024 arm_describe_resolver (tree fndecl)
4026 unsigned int code = DECL_MD_FUNCTION_CODE (fndecl);
4027 unsigned int subcode = code >> ARM_BUILTIN_SHIFT;
4028 switch (code & ARM_BUILTIN_CLASS)
4030 case ARM_BUILTIN_GENERAL:
4031 if (subcode >= ARM_BUILTIN_vcx1qv16qi
4032 && subcode < ARM_BUILTIN_MVE_BASE)
4033 return arm_cde_resolver;
4034 return arm_no_resolver;
4035 case ARM_BUILTIN_MVE:
4036 return arm_mve_resolver;
4037 default:
4038 gcc_unreachable ();
4042 unsigned
4043 arm_cde_end_args (tree fndecl)
4045 unsigned int code = DECL_MD_FUNCTION_CODE (fndecl);
4046 unsigned int subcode = code >> ARM_BUILTIN_SHIFT;
4047 switch (code & ARM_BUILTIN_CLASS)
4049 case ARM_BUILTIN_GENERAL:
4050 return subcode >= ARM_BUILTIN_vcx1q_p_v16qi ? 2 : 1;
4051 default:
4052 gcc_unreachable ();
4056 #include "gt-arm-builtins.h"