libstdc++: Refactor loops in std::__platform_semaphore
[official-gcc.git] / gcc / builtins.cc
blob37c7c98e5c7db645235436a73411a0680b2317d8
1 /* Expand builtin functions.
2 Copyright (C) 1988-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.cc instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-access.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-iterator.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
73 #include "file-prefix-map.h" /* remap_macro_filename() */
74 #include "ipa-strub.h" /* strub_watermark_parm() */
75 #include "gomp-constants.h"
76 #include "omp-general.h"
77 #include "tree-dfa.h"
78 #include "gimple-ssa.h"
79 #include "tree-ssa-live.h"
80 #include "tree-outof-ssa.h"
81 #include "attr-fnspec.h"
82 #include "demangle.h"
83 #include "gimple-range.h"
84 #include "pointer-query.h"
86 struct target_builtins default_target_builtins;
87 #if SWITCHABLE_TARGET
88 struct target_builtins *this_target_builtins = &default_target_builtins;
89 #endif
91 /* Define the names of the builtin function types and codes. */
92 const char *const built_in_class_names[BUILT_IN_LAST]
93 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
95 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
96 const char * built_in_names[(int) END_BUILTINS] =
98 #include "builtins.def"
101 /* Setup an array of builtin_info_type, make sure each element decl is
102 initialized to NULL_TREE. */
103 builtin_info_type builtin_info[(int)END_BUILTINS];
105 /* Non-zero if __builtin_constant_p should be folded right away. */
106 bool force_folding_builtin_constant_p;
108 static int target_char_cast (tree, char *);
109 static int apply_args_size (void);
110 static int apply_result_size (void);
111 static rtx result_vector (int, rtx);
112 static void expand_builtin_prefetch (tree);
113 static rtx expand_builtin_apply_args (void);
114 static rtx expand_builtin_apply_args_1 (void);
115 static rtx expand_builtin_apply (rtx, rtx, rtx);
116 static void expand_builtin_return (rtx);
117 static rtx expand_builtin_classify_type (tree);
118 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
119 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
120 static rtx expand_builtin_interclass_mathfn (tree, rtx);
121 static rtx expand_builtin_sincos (tree);
122 static rtx expand_builtin_fegetround (tree, rtx, machine_mode);
123 static rtx expand_builtin_feclear_feraise_except (tree, rtx, machine_mode,
124 optab);
125 static rtx expand_builtin_cexpi (tree, rtx);
126 static rtx expand_builtin_issignaling (tree, rtx);
127 static rtx expand_builtin_int_roundingfn (tree, rtx);
128 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
129 static rtx expand_builtin_next_arg (void);
130 static rtx expand_builtin_va_start (tree);
131 static rtx expand_builtin_va_end (tree);
132 static rtx expand_builtin_va_copy (tree);
133 static rtx inline_expand_builtin_bytecmp (tree, rtx);
134 static rtx expand_builtin_strcmp (tree, rtx);
135 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
136 static rtx expand_builtin_memcpy (tree, rtx);
137 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
138 rtx target, tree exp,
139 memop_ret retmode,
140 bool might_overlap);
141 static rtx expand_builtin_memmove (tree, rtx);
142 static rtx expand_builtin_mempcpy (tree, rtx);
143 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
144 static rtx expand_builtin_strcpy (tree, rtx);
145 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
146 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
147 static rtx expand_builtin_strncpy (tree, rtx);
148 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
149 static rtx expand_builtin_bzero (tree);
150 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
151 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
152 static rtx expand_builtin_alloca (tree);
153 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
154 static rtx expand_builtin_frame_address (tree, tree);
155 static rtx expand_builtin_stack_address ();
156 static tree stabilize_va_list_loc (location_t, tree, int);
157 static rtx expand_builtin_expect (tree, rtx);
158 static rtx expand_builtin_expect_with_probability (tree, rtx);
159 static tree fold_builtin_constant_p (tree);
160 static tree fold_builtin_classify_type (tree);
161 static tree fold_builtin_strlen (location_t, tree, tree, tree);
162 static tree fold_builtin_inf (location_t, tree, int);
163 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
164 static bool validate_arg (const_tree, enum tree_code code);
165 static rtx expand_builtin_fabs (tree, rtx, rtx);
166 static rtx expand_builtin_signbit (tree, rtx);
167 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
168 static tree fold_builtin_isascii (location_t, tree);
169 static tree fold_builtin_toascii (location_t, tree);
170 static tree fold_builtin_isdigit (location_t, tree);
171 static tree fold_builtin_fabs (location_t, tree, tree);
172 static tree fold_builtin_abs (location_t, tree, tree);
173 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
174 enum tree_code);
175 static tree fold_builtin_iseqsig (location_t, tree, tree);
176 static tree fold_builtin_varargs (location_t, tree, tree*, int);
178 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
179 static tree fold_builtin_strspn (location_t, tree, tree, tree);
180 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
182 static rtx expand_builtin_object_size (tree);
183 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
184 enum built_in_function);
185 static void maybe_emit_chk_warning (tree, enum built_in_function);
186 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
187 static tree fold_builtin_object_size (tree, tree, enum built_in_function);
189 unsigned HOST_WIDE_INT target_newline;
190 unsigned HOST_WIDE_INT target_percent;
191 static unsigned HOST_WIDE_INT target_c;
192 static unsigned HOST_WIDE_INT target_s;
193 char target_percent_c[3];
194 char target_percent_s[3];
195 char target_percent_s_newline[4];
196 static tree do_mpfr_remquo (tree, tree, tree);
197 static tree do_mpfr_lgamma_r (tree, tree, tree);
198 static void expand_builtin_sync_synchronize (void);
200 /* Return true if NAME starts with __builtin_ or __sync_. */
202 static bool
203 is_builtin_name (const char *name)
205 return (startswith (name, "__builtin_")
206 || startswith (name, "__sync_")
207 || startswith (name, "__atomic_"));
210 /* Return true if NODE should be considered for inline expansion regardless
211 of the optimization level. This means whenever a function is invoked with
212 its "internal" name, which normally contains the prefix "__builtin". */
214 bool
215 called_as_built_in (tree node)
217 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
218 we want the name used to call the function, not the name it
219 will have. */
220 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
221 return is_builtin_name (name);
224 /* Compute values M and N such that M divides (address of EXP - N) and such
225 that N < M. If these numbers can be determined, store M in alignp and N in
226 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
227 *alignp and any bit-offset to *bitposp.
229 Note that the address (and thus the alignment) computed here is based
230 on the address to which a symbol resolves, whereas DECL_ALIGN is based
231 on the address at which an object is actually located. These two
232 addresses are not always the same. For example, on ARM targets,
233 the address &foo of a Thumb function foo() has the lowest bit set,
234 whereas foo() itself starts on an even address.
236 If ADDR_P is true we are taking the address of the memory reference EXP
237 and thus cannot rely on the access taking place. */
239 bool
240 get_object_alignment_2 (tree exp, unsigned int *alignp,
241 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
243 poly_int64 bitsize, bitpos;
244 tree offset;
245 machine_mode mode;
246 int unsignedp, reversep, volatilep;
247 unsigned int align = BITS_PER_UNIT;
248 bool known_alignment = false;
250 /* Get the innermost object and the constant (bitpos) and possibly
251 variable (offset) offset of the access. */
252 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
253 &unsignedp, &reversep, &volatilep);
255 /* Extract alignment information from the innermost object and
256 possibly adjust bitpos and offset. */
257 if (TREE_CODE (exp) == FUNCTION_DECL)
259 /* Function addresses can encode extra information besides their
260 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
261 allows the low bit to be used as a virtual bit, we know
262 that the address itself must be at least 2-byte aligned. */
263 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
264 align = 2 * BITS_PER_UNIT;
266 else if (TREE_CODE (exp) == LABEL_DECL)
268 else if (TREE_CODE (exp) == CONST_DECL)
270 /* The alignment of a CONST_DECL is determined by its initializer. */
271 exp = DECL_INITIAL (exp);
272 align = TYPE_ALIGN (TREE_TYPE (exp));
273 if (CONSTANT_CLASS_P (exp))
274 align = targetm.constant_alignment (exp, align);
276 known_alignment = true;
278 else if (DECL_P (exp))
280 align = DECL_ALIGN (exp);
281 known_alignment = true;
283 else if (TREE_CODE (exp) == INDIRECT_REF
284 || TREE_CODE (exp) == MEM_REF
285 || TREE_CODE (exp) == TARGET_MEM_REF)
287 tree addr = TREE_OPERAND (exp, 0);
288 unsigned ptr_align;
289 unsigned HOST_WIDE_INT ptr_bitpos;
290 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
292 /* If the address is explicitely aligned, handle that. */
293 if (TREE_CODE (addr) == BIT_AND_EXPR
294 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
296 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
297 ptr_bitmask *= BITS_PER_UNIT;
298 align = least_bit_hwi (ptr_bitmask);
299 addr = TREE_OPERAND (addr, 0);
302 known_alignment
303 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
304 align = MAX (ptr_align, align);
306 /* Re-apply explicit alignment to the bitpos. */
307 ptr_bitpos &= ptr_bitmask;
309 /* The alignment of the pointer operand in a TARGET_MEM_REF
310 has to take the variable offset parts into account. */
311 if (TREE_CODE (exp) == TARGET_MEM_REF)
313 if (TMR_INDEX (exp))
315 unsigned HOST_WIDE_INT step = 1;
316 if (TMR_STEP (exp))
317 step = TREE_INT_CST_LOW (TMR_STEP (exp));
318 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
320 if (TMR_INDEX2 (exp))
321 align = BITS_PER_UNIT;
322 known_alignment = false;
325 /* When EXP is an actual memory reference then we can use
326 TYPE_ALIGN of a pointer indirection to derive alignment.
327 Do so only if get_pointer_alignment_1 did not reveal absolute
328 alignment knowledge and if using that alignment would
329 improve the situation. */
330 unsigned int talign;
331 if (!addr_p && !known_alignment
332 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
333 && talign > align)
334 align = talign;
335 else
337 /* Else adjust bitpos accordingly. */
338 bitpos += ptr_bitpos;
339 if (TREE_CODE (exp) == MEM_REF
340 || TREE_CODE (exp) == TARGET_MEM_REF)
341 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
344 else if (TREE_CODE (exp) == STRING_CST)
346 /* STRING_CST are the only constant objects we allow to be not
347 wrapped inside a CONST_DECL. */
348 align = TYPE_ALIGN (TREE_TYPE (exp));
349 if (CONSTANT_CLASS_P (exp))
350 align = targetm.constant_alignment (exp, align);
352 known_alignment = true;
355 /* If there is a non-constant offset part extract the maximum
356 alignment that can prevail. */
357 if (offset)
359 unsigned int trailing_zeros = tree_ctz (offset);
360 if (trailing_zeros < HOST_BITS_PER_INT)
362 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
363 if (inner)
364 align = MIN (align, inner);
368 /* Account for the alignment of runtime coefficients, so that the constant
369 bitpos is guaranteed to be accurate. */
370 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
371 if (alt_align != 0 && alt_align < align)
373 align = alt_align;
374 known_alignment = false;
377 *alignp = align;
378 *bitposp = bitpos.coeffs[0] & (align - 1);
379 return known_alignment;
382 /* For a memory reference expression EXP compute values M and N such that M
383 divides (&EXP - N) and such that N < M. If these numbers can be determined,
384 store M in alignp and N in *BITPOSP and return true. Otherwise return false
385 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
387 bool
388 get_object_alignment_1 (tree exp, unsigned int *alignp,
389 unsigned HOST_WIDE_INT *bitposp)
391 /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
392 with it. */
393 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
394 exp = TREE_OPERAND (exp, 0);
395 return get_object_alignment_2 (exp, alignp, bitposp, false);
398 /* Return the alignment in bits of EXP, an object. */
400 unsigned int
401 get_object_alignment (tree exp)
403 unsigned HOST_WIDE_INT bitpos = 0;
404 unsigned int align;
406 get_object_alignment_1 (exp, &align, &bitpos);
408 /* align and bitpos now specify known low bits of the pointer.
409 ptr & (align - 1) == bitpos. */
411 if (bitpos != 0)
412 align = least_bit_hwi (bitpos);
413 return align;
416 /* For a pointer valued expression EXP compute values M and N such that M
417 divides (EXP - N) and such that N < M. If these numbers can be determined,
418 store M in alignp and N in *BITPOSP and return true. Return false if
419 the results are just a conservative approximation.
421 If EXP is not a pointer, false is returned too. */
423 bool
424 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
425 unsigned HOST_WIDE_INT *bitposp)
427 STRIP_NOPS (exp);
429 if (TREE_CODE (exp) == ADDR_EXPR)
430 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
431 alignp, bitposp, true);
432 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
434 unsigned int align;
435 unsigned HOST_WIDE_INT bitpos;
436 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
437 &align, &bitpos);
438 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
439 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
440 else
442 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
443 if (trailing_zeros < HOST_BITS_PER_INT)
445 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
446 if (inner)
447 align = MIN (align, inner);
450 *alignp = align;
451 *bitposp = bitpos & (align - 1);
452 return res;
454 else if (TREE_CODE (exp) == SSA_NAME
455 && POINTER_TYPE_P (TREE_TYPE (exp)))
457 unsigned int ptr_align, ptr_misalign;
458 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
460 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
462 *bitposp = ptr_misalign * BITS_PER_UNIT;
463 *alignp = ptr_align * BITS_PER_UNIT;
464 /* Make sure to return a sensible alignment when the multiplication
465 by BITS_PER_UNIT overflowed. */
466 if (*alignp == 0)
467 *alignp = 1u << (HOST_BITS_PER_INT - 1);
468 /* We cannot really tell whether this result is an approximation. */
469 return false;
471 else
473 *bitposp = 0;
474 *alignp = BITS_PER_UNIT;
475 return false;
478 else if (TREE_CODE (exp) == INTEGER_CST)
480 *alignp = BIGGEST_ALIGNMENT;
481 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
482 & (BIGGEST_ALIGNMENT - 1));
483 return true;
486 *bitposp = 0;
487 *alignp = BITS_PER_UNIT;
488 return false;
491 /* Return the alignment in bits of EXP, a pointer valued expression.
492 The alignment returned is, by default, the alignment of the thing that
493 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
495 Otherwise, look at the expression to see if we can do better, i.e., if the
496 expression is actually pointing at an object whose alignment is tighter. */
498 unsigned int
499 get_pointer_alignment (tree exp)
501 unsigned HOST_WIDE_INT bitpos = 0;
502 unsigned int align;
504 get_pointer_alignment_1 (exp, &align, &bitpos);
506 /* align and bitpos now specify known low bits of the pointer.
507 ptr & (align - 1) == bitpos. */
509 if (bitpos != 0)
510 align = least_bit_hwi (bitpos);
512 return align;
515 /* Return the number of leading non-zero elements in the sequence
516 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
517 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
519 unsigned
520 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
522 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
524 unsigned n;
526 if (eltsize == 1)
528 /* Optimize the common case of plain char. */
529 for (n = 0; n < maxelts; n++)
531 const char *elt = (const char*) ptr + n;
532 if (!*elt)
533 break;
536 else
538 for (n = 0; n < maxelts; n++)
540 const char *elt = (const char*) ptr + n * eltsize;
541 if (!memcmp (elt, "\0\0\0\0", eltsize))
542 break;
545 return n;
548 /* Compute the length of a null-terminated character string or wide
549 character string handling character sizes of 1, 2, and 4 bytes.
550 TREE_STRING_LENGTH is not the right way because it evaluates to
551 the size of the character array in bytes (as opposed to characters)
552 and because it can contain a zero byte in the middle.
554 ONLY_VALUE should be nonzero if the result is not going to be emitted
555 into the instruction stream and zero if it is going to be expanded.
556 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
557 is returned, otherwise NULL, since
558 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
559 evaluate the side-effects.
561 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
562 accesses. Note that this implies the result is not going to be emitted
563 into the instruction stream.
565 Additional information about the string accessed may be recorded
566 in DATA. For example, if ARG references an unterminated string,
567 then the declaration will be stored in the DECL field. If the
568 length of the unterminated string can be determined, it'll be
569 stored in the LEN field. Note this length could well be different
570 than what a C strlen call would return.
572 ELTSIZE is 1 for normal single byte character strings, and 2 or
573 4 for wide characer strings. ELTSIZE is by default 1.
575 The value returned is of type `ssizetype'. */
577 tree
578 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
580 /* If we were not passed a DATA pointer, then get one to a local
581 structure. That avoids having to check DATA for NULL before
582 each time we want to use it. */
583 c_strlen_data local_strlen_data = { };
584 if (!data)
585 data = &local_strlen_data;
587 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
589 tree src = STRIP_NOPS (arg);
590 if (TREE_CODE (src) == COND_EXPR
591 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
593 tree len1, len2;
595 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
596 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
597 if (tree_int_cst_equal (len1, len2))
598 return len1;
601 if (TREE_CODE (src) == COMPOUND_EXPR
602 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
603 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
605 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
607 /* Offset from the beginning of the string in bytes. */
608 tree byteoff;
609 tree memsize;
610 tree decl;
611 src = string_constant (src, &byteoff, &memsize, &decl);
612 if (src == 0)
613 return NULL_TREE;
615 /* Determine the size of the string element. */
616 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
617 return NULL_TREE;
619 /* Set MAXELTS to ARRAY_SIZE (SRC) - 1, the maximum possible
620 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
621 in case the latter is less than the size of the array, such as when
622 SRC refers to a short string literal used to initialize a large array.
623 In that case, the elements of the array after the terminating NUL are
624 all NUL. */
625 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
626 strelts = strelts / eltsize;
628 if (!tree_fits_uhwi_p (memsize))
629 return NULL_TREE;
631 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
633 /* PTR can point to the byte representation of any string type, including
634 char* and wchar_t*. */
635 const char *ptr = TREE_STRING_POINTER (src);
637 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
639 /* The code below works only for single byte character types. */
640 if (eltsize != 1)
641 return NULL_TREE;
643 /* If the string has an internal NUL character followed by any
644 non-NUL characters (e.g., "foo\0bar"), we can't compute
645 the offset to the following NUL if we don't know where to
646 start searching for it. */
647 unsigned len = string_length (ptr, eltsize, strelts);
649 /* Return when an embedded null character is found or none at all.
650 In the latter case, set the DECL/LEN field in the DATA structure
651 so that callers may examine them. */
652 if (len + 1 < strelts)
653 return NULL_TREE;
654 else if (len >= maxelts)
656 data->decl = decl;
657 data->off = byteoff;
658 data->minlen = ssize_int (len);
659 return NULL_TREE;
662 /* For empty strings the result should be zero. */
663 if (len == 0)
664 return ssize_int (0);
666 /* We don't know the starting offset, but we do know that the string
667 has no internal zero bytes. If the offset falls within the bounds
668 of the string subtract the offset from the length of the string,
669 and return that. Otherwise the length is zero. Take care to
670 use SAVE_EXPR in case the OFFSET has side-effects. */
671 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
672 : byteoff;
673 offsave = fold_convert_loc (loc, sizetype, offsave);
674 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
675 size_int (len));
676 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
677 offsave);
678 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
679 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
680 build_zero_cst (ssizetype));
683 /* Offset from the beginning of the string in elements. */
684 HOST_WIDE_INT eltoff;
686 /* We have a known offset into the string. Start searching there for
687 a null character if we can represent it as a single HOST_WIDE_INT. */
688 if (byteoff == 0)
689 eltoff = 0;
690 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
691 eltoff = -1;
692 else
693 eltoff = tree_to_uhwi (byteoff) / eltsize;
695 /* If the offset is known to be out of bounds, warn, and call strlen at
696 runtime. */
697 if (eltoff < 0 || eltoff >= maxelts)
699 /* Suppress multiple warnings for propagated constant strings. */
700 if (only_value != 2
701 && !warning_suppressed_p (arg, OPT_Warray_bounds_)
702 && warning_at (loc, OPT_Warray_bounds_,
703 "offset %qwi outside bounds of constant string",
704 eltoff))
706 if (decl)
707 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
708 suppress_warning (arg, OPT_Warray_bounds_);
710 return NULL_TREE;
713 /* If eltoff is larger than strelts but less than maxelts the
714 string length is zero, since the excess memory will be zero. */
715 if (eltoff > strelts)
716 return ssize_int (0);
718 /* Use strlen to search for the first zero byte. Since any strings
719 constructed with build_string will have nulls appended, we win even
720 if we get handed something like (char[4])"abcd".
722 Since ELTOFF is our starting index into the string, no further
723 calculation is needed. */
724 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
725 strelts - eltoff);
727 /* Don't know what to return if there was no zero termination.
728 Ideally this would turn into a gcc_checking_assert over time.
729 Set DECL/LEN so callers can examine them. */
730 if (len >= maxelts - eltoff)
732 data->decl = decl;
733 data->off = byteoff;
734 data->minlen = ssize_int (len);
735 return NULL_TREE;
738 return ssize_int (len);
741 /* Return a constant integer corresponding to target reading
742 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
743 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
744 are assumed to be zero, otherwise it reads as many characters
745 as needed. */
748 c_readstr (const char *str, fixed_size_mode mode,
749 bool null_terminated_p/*=true*/)
751 auto_vec<target_unit, MAX_BITSIZE_MODE_ANY_INT / BITS_PER_UNIT> bytes;
753 bytes.reserve (GET_MODE_SIZE (mode));
755 target_unit ch = 1;
756 for (unsigned int i = 0; i < GET_MODE_SIZE (mode); ++i)
758 if (ch || !null_terminated_p)
759 ch = (unsigned char) str[i];
760 bytes.quick_push (ch);
763 return native_decode_rtx (mode, bytes, 0);
766 /* Cast a target constant CST to target CHAR and if that value fits into
767 host char type, return zero and put that value into variable pointed to by
768 P. */
770 static int
771 target_char_cast (tree cst, char *p)
773 unsigned HOST_WIDE_INT val, hostval;
775 if (TREE_CODE (cst) != INTEGER_CST
776 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
777 return 1;
779 /* Do not care if it fits or not right here. */
780 val = TREE_INT_CST_LOW (cst);
782 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
783 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
785 hostval = val;
786 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
787 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
789 if (val != hostval)
790 return 1;
792 *p = hostval;
793 return 0;
796 /* Similar to save_expr, but assumes that arbitrary code is not executed
797 in between the multiple evaluations. In particular, we assume that a
798 non-addressable local variable will not be modified. */
800 static tree
801 builtin_save_expr (tree exp)
803 if (TREE_CODE (exp) == SSA_NAME
804 || (TREE_ADDRESSABLE (exp) == 0
805 && (TREE_CODE (exp) == PARM_DECL
806 || (VAR_P (exp) && !TREE_STATIC (exp)))))
807 return exp;
809 return save_expr (exp);
812 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
813 times to get the address of either a higher stack frame, or a return
814 address located within it (depending on FNDECL_CODE). */
816 static rtx
817 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
819 int i;
820 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
821 if (tem == NULL_RTX)
823 /* For a zero count with __builtin_return_address, we don't care what
824 frame address we return, because target-specific definitions will
825 override us. Therefore frame pointer elimination is OK, and using
826 the soft frame pointer is OK.
828 For a nonzero count, or a zero count with __builtin_frame_address,
829 we require a stable offset from the current frame pointer to the
830 previous one, so we must use the hard frame pointer, and
831 we must disable frame pointer elimination. */
832 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
833 tem = frame_pointer_rtx;
834 else
836 tem = hard_frame_pointer_rtx;
838 /* Tell reload not to eliminate the frame pointer. */
839 crtl->accesses_prior_frames = 1;
843 if (count > 0)
844 SETUP_FRAME_ADDRESSES ();
846 /* On the SPARC, the return address is not in the frame, it is in a
847 register. There is no way to access it off of the current frame
848 pointer, but it can be accessed off the previous frame pointer by
849 reading the value from the register window save area. */
850 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
851 count--;
853 /* Scan back COUNT frames to the specified frame. */
854 for (i = 0; i < count; i++)
856 /* Assume the dynamic chain pointer is in the word that the
857 frame address points to, unless otherwise specified. */
858 tem = DYNAMIC_CHAIN_ADDRESS (tem);
859 tem = memory_address (Pmode, tem);
860 tem = gen_frame_mem (Pmode, tem);
861 tem = copy_to_reg (tem);
864 /* For __builtin_frame_address, return what we've got. But, on
865 the SPARC for example, we may have to add a bias. */
866 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
867 return FRAME_ADDR_RTX (tem);
869 /* For __builtin_return_address, get the return address from that frame. */
870 #ifdef RETURN_ADDR_RTX
871 tem = RETURN_ADDR_RTX (count, tem);
872 #else
873 tem = memory_address (Pmode,
874 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
875 tem = gen_frame_mem (Pmode, tem);
876 #endif
877 return tem;
880 /* Alias set used for setjmp buffer. */
881 static alias_set_type setjmp_alias_set = -1;
883 /* Construct the leading half of a __builtin_setjmp call. Control will
884 return to RECEIVER_LABEL. This is also called directly by the SJLJ
885 exception handling code. */
887 void
888 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
890 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
891 rtx stack_save;
892 rtx mem;
894 if (setjmp_alias_set == -1)
895 setjmp_alias_set = new_alias_set ();
897 buf_addr = convert_memory_address (Pmode, buf_addr);
899 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
901 /* We store the frame pointer and the address of receiver_label in
902 the buffer and use the rest of it for the stack save area, which
903 is machine-dependent. */
905 mem = gen_rtx_MEM (Pmode, buf_addr);
906 set_mem_alias_set (mem, setjmp_alias_set);
907 emit_move_insn (mem, hard_frame_pointer_rtx);
909 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
910 GET_MODE_SIZE (Pmode))),
911 set_mem_alias_set (mem, setjmp_alias_set);
913 emit_move_insn (validize_mem (mem),
914 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
916 stack_save = gen_rtx_MEM (sa_mode,
917 plus_constant (Pmode, buf_addr,
918 2 * GET_MODE_SIZE (Pmode)));
919 set_mem_alias_set (stack_save, setjmp_alias_set);
920 emit_stack_save (SAVE_NONLOCAL, &stack_save);
922 /* If there is further processing to do, do it. */
923 if (targetm.have_builtin_setjmp_setup ())
924 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
926 /* We have a nonlocal label. */
927 cfun->has_nonlocal_label = 1;
930 /* Construct the trailing part of a __builtin_setjmp call. This is
931 also called directly by the SJLJ exception handling code.
932 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
934 void
935 expand_builtin_setjmp_receiver (rtx receiver_label)
937 rtx chain;
939 /* Mark the FP as used when we get here, so we have to make sure it's
940 marked as used by this function. */
941 emit_use (hard_frame_pointer_rtx);
943 /* Mark the static chain as clobbered here so life information
944 doesn't get messed up for it. */
945 chain = rtx_for_static_chain (current_function_decl, true);
946 if (chain && REG_P (chain))
947 emit_clobber (chain);
949 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
951 /* If the argument pointer can be eliminated in favor of the
952 frame pointer, we don't need to restore it. We assume here
953 that if such an elimination is present, it can always be used.
954 This is the case on all known machines; if we don't make this
955 assumption, we do unnecessary saving on many machines. */
956 size_t i;
957 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
959 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
960 if (elim_regs[i].from == ARG_POINTER_REGNUM
961 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
962 break;
964 if (i == ARRAY_SIZE (elim_regs))
966 /* Now restore our arg pointer from the address at which it
967 was saved in our stack frame. */
968 emit_move_insn (crtl->args.internal_arg_pointer,
969 copy_to_reg (get_arg_pointer_save_area ()));
973 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
974 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
975 else if (targetm.have_nonlocal_goto_receiver ())
976 emit_insn (targetm.gen_nonlocal_goto_receiver ());
977 else
978 { /* Nothing */ }
980 /* We must not allow the code we just generated to be reordered by
981 scheduling. Specifically, the update of the frame pointer must
982 happen immediately, not later. */
983 emit_insn (gen_blockage ());
986 /* __builtin_longjmp is passed a pointer to an array of five words (not
987 all will be used on all machines). It operates similarly to the C
988 library function of the same name, but is more efficient. Much of
989 the code below is copied from the handling of non-local gotos. */
991 static void
992 expand_builtin_longjmp (rtx buf_addr, rtx value)
994 rtx fp, lab, stack;
995 rtx_insn *insn, *last;
996 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
998 /* DRAP is needed for stack realign if longjmp is expanded to current
999 function */
1000 if (SUPPORTS_STACK_ALIGNMENT)
1001 crtl->need_drap = true;
1003 if (setjmp_alias_set == -1)
1004 setjmp_alias_set = new_alias_set ();
1006 buf_addr = convert_memory_address (Pmode, buf_addr);
1008 buf_addr = force_reg (Pmode, buf_addr);
1010 /* We require that the user must pass a second argument of 1, because
1011 that is what builtin_setjmp will return. */
1012 gcc_assert (value == const1_rtx);
1014 last = get_last_insn ();
1015 if (targetm.have_builtin_longjmp ())
1016 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1017 else
1019 fp = gen_rtx_MEM (Pmode, buf_addr);
1020 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1021 GET_MODE_SIZE (Pmode)));
1023 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1024 2 * GET_MODE_SIZE (Pmode)));
1025 set_mem_alias_set (fp, setjmp_alias_set);
1026 set_mem_alias_set (lab, setjmp_alias_set);
1027 set_mem_alias_set (stack, setjmp_alias_set);
1029 /* Pick up FP, label, and SP from the block and jump. This code is
1030 from expand_goto in stmt.cc; see there for detailed comments. */
1031 if (targetm.have_nonlocal_goto ())
1032 /* We have to pass a value to the nonlocal_goto pattern that will
1033 get copied into the static_chain pointer, but it does not matter
1034 what that value is, because builtin_setjmp does not use it. */
1035 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1036 else
1038 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1039 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1041 lab = copy_to_reg (lab);
1043 /* Restore the frame pointer and stack pointer. We must use a
1044 temporary since the setjmp buffer may be a local. */
1045 fp = copy_to_reg (fp);
1046 emit_stack_restore (SAVE_NONLOCAL, stack);
1048 /* Ensure the frame pointer move is not optimized. */
1049 emit_insn (gen_blockage ());
1050 emit_clobber (hard_frame_pointer_rtx);
1051 emit_clobber (frame_pointer_rtx);
1052 emit_move_insn (hard_frame_pointer_rtx, fp);
1054 emit_use (hard_frame_pointer_rtx);
1055 emit_use (stack_pointer_rtx);
1056 emit_indirect_jump (lab);
1060 /* Search backwards and mark the jump insn as a non-local goto.
1061 Note that this precludes the use of __builtin_longjmp to a
1062 __builtin_setjmp target in the same function. However, we've
1063 already cautioned the user that these functions are for
1064 internal exception handling use only. */
1065 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1067 gcc_assert (insn != last);
1069 if (JUMP_P (insn))
1071 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1072 break;
1074 else if (CALL_P (insn))
1075 break;
1079 static inline bool
1080 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1082 return (iter->i < iter->n);
1085 /* This function validates the types of a function call argument list
1086 against a specified list of tree_codes. If the last specifier is a 0,
1087 that represents an ellipsis, otherwise the last specifier must be a
1088 VOID_TYPE. */
1090 static bool
1091 validate_arglist (const_tree callexpr, ...)
1093 enum tree_code code;
1094 bool res = 0;
1095 va_list ap;
1096 const_call_expr_arg_iterator iter;
1097 const_tree arg;
1099 va_start (ap, callexpr);
1100 init_const_call_expr_arg_iterator (callexpr, &iter);
1102 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1103 tree fn = CALL_EXPR_FN (callexpr);
1104 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1106 for (unsigned argno = 1; ; ++argno)
1108 code = (enum tree_code) va_arg (ap, int);
1110 switch (code)
1112 case 0:
1113 /* This signifies an ellipses, any further arguments are all ok. */
1114 res = true;
1115 goto end;
1116 case VOID_TYPE:
1117 /* This signifies an endlink, if no arguments remain, return
1118 true, otherwise return false. */
1119 res = !more_const_call_expr_args_p (&iter);
1120 goto end;
1121 case POINTER_TYPE:
1122 /* The actual argument must be nonnull when either the whole
1123 called function has been declared nonnull, or when the formal
1124 argument corresponding to the actual argument has been. */
1125 if (argmap
1126 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1128 arg = next_const_call_expr_arg (&iter);
1129 if (!validate_arg (arg, code) || integer_zerop (arg))
1130 goto end;
1131 break;
1133 /* FALLTHRU */
1134 default:
1135 /* If no parameters remain or the parameter's code does not
1136 match the specified code, return false. Otherwise continue
1137 checking any remaining arguments. */
1138 arg = next_const_call_expr_arg (&iter);
1139 if (!validate_arg (arg, code))
1140 goto end;
1141 break;
1145 /* We need gotos here since we can only have one VA_CLOSE in a
1146 function. */
1147 end: ;
1148 va_end (ap);
1150 BITMAP_FREE (argmap);
1152 return res;
1155 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1156 and the address of the save area. */
1158 static rtx
1159 expand_builtin_nonlocal_goto (tree exp)
1161 tree t_label, t_save_area;
1162 rtx r_label, r_save_area, r_fp, r_sp;
1163 rtx_insn *insn;
1165 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1166 return NULL_RTX;
1168 t_label = CALL_EXPR_ARG (exp, 0);
1169 t_save_area = CALL_EXPR_ARG (exp, 1);
1171 r_label = expand_normal (t_label);
1172 r_label = convert_memory_address (Pmode, r_label);
1173 r_save_area = expand_normal (t_save_area);
1174 r_save_area = convert_memory_address (Pmode, r_save_area);
1175 /* Copy the address of the save location to a register just in case it was
1176 based on the frame pointer. */
1177 r_save_area = copy_to_reg (r_save_area);
1178 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1179 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1180 plus_constant (Pmode, r_save_area,
1181 GET_MODE_SIZE (Pmode)));
1183 crtl->has_nonlocal_goto = 1;
1185 /* ??? We no longer need to pass the static chain value, afaik. */
1186 if (targetm.have_nonlocal_goto ())
1187 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1188 else
1190 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1191 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1193 r_label = copy_to_reg (r_label);
1195 /* Restore the frame pointer and stack pointer. We must use a
1196 temporary since the setjmp buffer may be a local. */
1197 r_fp = copy_to_reg (r_fp);
1198 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1200 /* Ensure the frame pointer move is not optimized. */
1201 emit_insn (gen_blockage ());
1202 emit_clobber (hard_frame_pointer_rtx);
1203 emit_clobber (frame_pointer_rtx);
1204 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1206 /* USE of hard_frame_pointer_rtx added for consistency;
1207 not clear if really needed. */
1208 emit_use (hard_frame_pointer_rtx);
1209 emit_use (stack_pointer_rtx);
1211 /* If the architecture is using a GP register, we must
1212 conservatively assume that the target function makes use of it.
1213 The prologue of functions with nonlocal gotos must therefore
1214 initialize the GP register to the appropriate value, and we
1215 must then make sure that this value is live at the point
1216 of the jump. (Note that this doesn't necessarily apply
1217 to targets with a nonlocal_goto pattern; they are free
1218 to implement it in their own way. Note also that this is
1219 a no-op if the GP register is a global invariant.) */
1220 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1221 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1222 emit_use (pic_offset_table_rtx);
1224 emit_indirect_jump (r_label);
1227 /* Search backwards to the jump insn and mark it as a
1228 non-local goto. */
1229 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1231 if (JUMP_P (insn))
1233 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1234 break;
1236 else if (CALL_P (insn))
1237 break;
1240 return const0_rtx;
1243 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1244 (not all will be used on all machines) that was passed to __builtin_setjmp.
1245 It updates the stack pointer in that block to the current value. This is
1246 also called directly by the SJLJ exception handling code. */
1248 void
1249 expand_builtin_update_setjmp_buf (rtx buf_addr)
1251 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1252 buf_addr = convert_memory_address (Pmode, buf_addr);
1253 rtx stack_save
1254 = gen_rtx_MEM (sa_mode,
1255 memory_address
1256 (sa_mode,
1257 plus_constant (Pmode, buf_addr,
1258 2 * GET_MODE_SIZE (Pmode))));
1260 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1263 /* Expand a call to __builtin_prefetch. For a target that does not support
1264 data prefetch, evaluate the memory address argument in case it has side
1265 effects. */
1267 static void
1268 expand_builtin_prefetch (tree exp)
1270 tree arg0, arg1, arg2;
1271 int nargs;
1272 rtx op0, op1, op2;
1274 if (!validate_arglist (exp, POINTER_TYPE, 0))
1275 return;
1277 arg0 = CALL_EXPR_ARG (exp, 0);
1279 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1280 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1281 locality). */
1282 nargs = call_expr_nargs (exp);
1283 arg1 = nargs > 1 ? CALL_EXPR_ARG (exp, 1) : NULL_TREE;
1284 arg2 = nargs > 2 ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
1286 /* Argument 0 is an address. */
1287 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1289 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1290 if (arg1 == NULL_TREE)
1291 op1 = const0_rtx;
1292 else if (TREE_CODE (arg1) != INTEGER_CST)
1294 error ("second argument to %<__builtin_prefetch%> must be a constant");
1295 op1 = const0_rtx;
1297 else
1298 op1 = expand_normal (arg1);
1299 /* Argument 1 must be either zero or one. */
1300 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1302 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1303 " using zero");
1304 op1 = const0_rtx;
1307 /* Argument 2 (locality) must be a compile-time constant int. */
1308 if (arg2 == NULL_TREE)
1309 op2 = GEN_INT (3);
1310 else if (TREE_CODE (arg2) != INTEGER_CST)
1312 error ("third argument to %<__builtin_prefetch%> must be a constant");
1313 op2 = const0_rtx;
1315 else
1316 op2 = expand_normal (arg2);
1317 /* Argument 2 must be 0, 1, 2, or 3. */
1318 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1320 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1321 op2 = const0_rtx;
1324 if (targetm.have_prefetch ())
1326 class expand_operand ops[3];
1328 create_address_operand (&ops[0], op0);
1329 create_integer_operand (&ops[1], INTVAL (op1));
1330 create_integer_operand (&ops[2], INTVAL (op2));
1331 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1332 return;
1335 /* Don't do anything with direct references to volatile memory, but
1336 generate code to handle other side effects. */
1337 if (!MEM_P (op0) && side_effects_p (op0))
1338 emit_insn (op0);
1341 /* Get a MEM rtx for expression EXP which is the address of an operand
1342 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1343 the maximum length of the block of memory that might be accessed or
1344 NULL if unknown. */
1347 get_memory_rtx (tree exp, tree len)
1349 tree orig_exp = exp, base;
1350 rtx addr, mem;
1352 gcc_checking_assert
1353 (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))));
1355 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1356 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1357 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1358 exp = TREE_OPERAND (exp, 0);
1360 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1361 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1363 /* Get an expression we can use to find the attributes to assign to MEM.
1364 First remove any nops. */
1365 while (CONVERT_EXPR_P (exp)
1366 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1367 exp = TREE_OPERAND (exp, 0);
1369 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1370 (as builtin stringops may alias with anything). */
1371 exp = fold_build2 (MEM_REF,
1372 build_array_type (char_type_node,
1373 build_range_type (sizetype,
1374 size_one_node, len)),
1375 exp, build_int_cst (ptr_type_node, 0));
1377 /* If the MEM_REF has no acceptable address, try to get the base object
1378 from the original address we got, and build an all-aliasing
1379 unknown-sized access to that one. */
1380 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1381 set_mem_attributes (mem, exp, 0);
1382 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1383 && (base = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1384 0))))
1386 unsigned int align = get_pointer_alignment (TREE_OPERAND (exp, 0));
1387 exp = build_fold_addr_expr (base);
1388 exp = fold_build2 (MEM_REF,
1389 build_array_type (char_type_node,
1390 build_range_type (sizetype,
1391 size_zero_node,
1392 NULL)),
1393 exp, build_int_cst (ptr_type_node, 0));
1394 set_mem_attributes (mem, exp, 0);
1395 /* Since we stripped parts make sure the offset is unknown and the
1396 alignment is computed from the original address. */
1397 clear_mem_offset (mem);
1398 set_mem_align (mem, align);
1400 set_mem_alias_set (mem, 0);
1401 return mem;
1404 /* Built-in functions to perform an untyped call and return. */
1406 /* Wrapper that implicitly applies a delta when getting or setting the
1407 enclosed value. */
1408 template <typename T>
1409 class delta_type
1411 T &value; T const delta;
1412 public:
1413 delta_type (T &val, T dlt) : value (val), delta (dlt) {}
1414 operator T () const { return value + delta; }
1415 T operator = (T val) const { value = val - delta; return val; }
1418 #define saved_apply_args_size \
1419 (delta_type<int> (this_target_builtins->x_apply_args_size_plus_one, -1))
1420 #define apply_args_mode \
1421 (this_target_builtins->x_apply_args_mode)
1422 #define saved_apply_result_size \
1423 (delta_type<int> (this_target_builtins->x_apply_result_size_plus_one, -1))
1424 #define apply_result_mode \
1425 (this_target_builtins->x_apply_result_mode)
1427 /* Return the size required for the block returned by __builtin_apply_args,
1428 and initialize apply_args_mode. */
1430 static int
1431 apply_args_size (void)
1433 int size = saved_apply_args_size;
1434 int align;
1435 unsigned int regno;
1437 /* The values computed by this function never change. */
1438 if (size < 0)
1440 /* The first value is the incoming arg-pointer. */
1441 size = GET_MODE_SIZE (Pmode);
1443 /* The second value is the structure value address unless this is
1444 passed as an "invisible" first argument. */
1445 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1446 size += GET_MODE_SIZE (Pmode);
1448 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1449 if (FUNCTION_ARG_REGNO_P (regno))
1451 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1453 if (mode != VOIDmode)
1455 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1456 if (size % align != 0)
1457 size = CEIL (size, align) * align;
1458 size += GET_MODE_SIZE (mode);
1459 apply_args_mode[regno] = mode;
1461 else
1462 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1464 else
1465 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1467 saved_apply_args_size = size;
1469 return size;
1472 /* Return the size required for the block returned by __builtin_apply,
1473 and initialize apply_result_mode. */
1475 static int
1476 apply_result_size (void)
1478 int size = saved_apply_result_size;
1479 int align, regno;
1481 /* The values computed by this function never change. */
1482 if (size < 0)
1484 size = 0;
1486 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1487 if (targetm.calls.function_value_regno_p (regno))
1489 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1491 if (mode != VOIDmode)
1493 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1494 if (size % align != 0)
1495 size = CEIL (size, align) * align;
1496 size += GET_MODE_SIZE (mode);
1497 apply_result_mode[regno] = mode;
1499 else
1500 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1502 else
1503 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1505 /* Allow targets that use untyped_call and untyped_return to override
1506 the size so that machine-specific information can be stored here. */
1507 #ifdef APPLY_RESULT_SIZE
1508 size = APPLY_RESULT_SIZE;
1509 #endif
1511 saved_apply_result_size = size;
1513 return size;
1516 /* Create a vector describing the result block RESULT. If SAVEP is true,
1517 the result block is used to save the values; otherwise it is used to
1518 restore the values. */
1520 static rtx
1521 result_vector (int savep, rtx result)
1523 int regno, size, align, nelts;
1524 fixed_size_mode mode;
1525 rtx reg, mem;
1526 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1528 size = nelts = 0;
1529 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1530 if ((mode = apply_result_mode[regno]) != VOIDmode)
1532 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1533 if (size % align != 0)
1534 size = CEIL (size, align) * align;
1535 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1536 mem = adjust_address (result, mode, size);
1537 savevec[nelts++] = (savep
1538 ? gen_rtx_SET (mem, reg)
1539 : gen_rtx_SET (reg, mem));
1540 size += GET_MODE_SIZE (mode);
1542 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1545 /* Save the state required to perform an untyped call with the same
1546 arguments as were passed to the current function. */
1548 static rtx
1549 expand_builtin_apply_args_1 (void)
1551 rtx registers, tem;
1552 int size, align, regno;
1553 fixed_size_mode mode;
1554 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1556 /* Create a block where the arg-pointer, structure value address,
1557 and argument registers can be saved. */
1558 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1560 /* Walk past the arg-pointer and structure value address. */
1561 size = GET_MODE_SIZE (Pmode);
1562 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1563 size += GET_MODE_SIZE (Pmode);
1565 /* Save each register used in calling a function to the block. */
1566 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1567 if ((mode = apply_args_mode[regno]) != VOIDmode)
1569 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1570 if (size % align != 0)
1571 size = CEIL (size, align) * align;
1573 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1575 emit_move_insn (adjust_address (registers, mode, size), tem);
1576 size += GET_MODE_SIZE (mode);
1579 /* Save the arg pointer to the block. */
1580 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1581 /* We need the pointer as the caller actually passed them to us, not
1582 as we might have pretended they were passed. Make sure it's a valid
1583 operand, as emit_move_insn isn't expected to handle a PLUS. */
1584 if (STACK_GROWS_DOWNWARD)
1586 = force_operand (plus_constant (Pmode, tem,
1587 crtl->args.pretend_args_size),
1588 NULL_RTX);
1589 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1591 size = GET_MODE_SIZE (Pmode);
1593 /* Save the structure value address unless this is passed as an
1594 "invisible" first argument. */
1595 if (struct_incoming_value)
1596 emit_move_insn (adjust_address (registers, Pmode, size),
1597 copy_to_reg (struct_incoming_value));
1599 /* Return the address of the block. */
1600 return copy_addr_to_reg (XEXP (registers, 0));
1603 /* __builtin_apply_args returns block of memory allocated on
1604 the stack into which is stored the arg pointer, structure
1605 value address, static chain, and all the registers that might
1606 possibly be used in performing a function call. The code is
1607 moved to the start of the function so the incoming values are
1608 saved. */
1610 static rtx
1611 expand_builtin_apply_args (void)
1613 /* Don't do __builtin_apply_args more than once in a function.
1614 Save the result of the first call and reuse it. */
1615 if (apply_args_value != 0)
1616 return apply_args_value;
1618 /* When this function is called, it means that registers must be
1619 saved on entry to this function. So we migrate the
1620 call to the first insn of this function. */
1621 rtx temp;
1623 start_sequence ();
1624 temp = expand_builtin_apply_args_1 ();
1625 rtx_insn *seq = get_insns ();
1626 end_sequence ();
1628 apply_args_value = temp;
1630 /* Put the insns after the NOTE that starts the function.
1631 If this is inside a start_sequence, make the outer-level insn
1632 chain current, so the code is placed at the start of the
1633 function. If internal_arg_pointer is a non-virtual pseudo,
1634 it needs to be placed after the function that initializes
1635 that pseudo. */
1636 push_topmost_sequence ();
1637 if (REG_P (crtl->args.internal_arg_pointer)
1638 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1639 emit_insn_before (seq, parm_birth_insn);
1640 else
1641 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1642 pop_topmost_sequence ();
1643 return temp;
1647 /* Perform an untyped call and save the state required to perform an
1648 untyped return of whatever value was returned by the given function. */
1650 static rtx
1651 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1653 int size, align, regno;
1654 fixed_size_mode mode;
1655 rtx incoming_args, result, reg, dest, src;
1656 rtx_call_insn *call_insn;
1657 rtx old_stack_level = 0;
1658 rtx call_fusage = 0;
1659 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1661 arguments = convert_memory_address (Pmode, arguments);
1663 /* Create a block where the return registers can be saved. */
1664 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1666 /* Fetch the arg pointer from the ARGUMENTS block. */
1667 incoming_args = gen_reg_rtx (Pmode);
1668 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1669 if (!STACK_GROWS_DOWNWARD)
1670 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1671 incoming_args, 0, OPTAB_LIB_WIDEN);
1673 /* Push a new argument block and copy the arguments. Do not allow
1674 the (potential) memcpy call below to interfere with our stack
1675 manipulations. */
1676 do_pending_stack_adjust ();
1677 NO_DEFER_POP;
1679 /* Save the stack with nonlocal if available. */
1680 if (targetm.have_save_stack_nonlocal ())
1681 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1682 else
1683 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1685 /* Allocate a block of memory onto the stack and copy the memory
1686 arguments to the outgoing arguments address. We can pass TRUE
1687 as the 4th argument because we just saved the stack pointer
1688 and will restore it right after the call. */
1689 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1691 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1692 may have already set current_function_calls_alloca to true.
1693 current_function_calls_alloca won't be set if argsize is zero,
1694 so we have to guarantee need_drap is true here. */
1695 if (SUPPORTS_STACK_ALIGNMENT)
1696 crtl->need_drap = true;
1698 dest = virtual_outgoing_args_rtx;
1699 if (!STACK_GROWS_DOWNWARD)
1701 if (CONST_INT_P (argsize))
1702 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1703 else
1704 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1706 dest = gen_rtx_MEM (BLKmode, dest);
1707 set_mem_align (dest, PARM_BOUNDARY);
1708 src = gen_rtx_MEM (BLKmode, incoming_args);
1709 set_mem_align (src, PARM_BOUNDARY);
1710 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1712 /* Refer to the argument block. */
1713 apply_args_size ();
1714 arguments = gen_rtx_MEM (BLKmode, arguments);
1715 set_mem_align (arguments, PARM_BOUNDARY);
1717 /* Walk past the arg-pointer and structure value address. */
1718 size = GET_MODE_SIZE (Pmode);
1719 if (struct_value)
1720 size += GET_MODE_SIZE (Pmode);
1722 /* Restore each of the registers previously saved. Make USE insns
1723 for each of these registers for use in making the call. */
1724 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1725 if ((mode = apply_args_mode[regno]) != VOIDmode)
1727 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1728 if (size % align != 0)
1729 size = CEIL (size, align) * align;
1730 reg = gen_rtx_REG (mode, regno);
1731 emit_move_insn (reg, adjust_address (arguments, mode, size));
1732 use_reg (&call_fusage, reg);
1733 size += GET_MODE_SIZE (mode);
1736 /* Restore the structure value address unless this is passed as an
1737 "invisible" first argument. */
1738 size = GET_MODE_SIZE (Pmode);
1739 if (struct_value)
1741 rtx value = gen_reg_rtx (Pmode);
1742 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1743 emit_move_insn (struct_value, value);
1744 if (REG_P (struct_value))
1745 use_reg (&call_fusage, struct_value);
1748 /* All arguments and registers used for the call are set up by now! */
1749 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1751 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1752 and we don't want to load it into a register as an optimization,
1753 because prepare_call_address already did it if it should be done. */
1754 if (GET_CODE (function) != SYMBOL_REF)
1755 function = memory_address (FUNCTION_MODE, function);
1757 /* Generate the actual call instruction and save the return value. */
1758 if (targetm.have_untyped_call ())
1760 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1761 rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1762 result_vector (1, result));
1763 for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1764 if (CALL_P (insn))
1765 add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1766 emit_insn (seq);
1768 else if (targetm.have_call_value ())
1770 rtx valreg = 0;
1772 /* Locate the unique return register. It is not possible to
1773 express a call that sets more than one return register using
1774 call_value; use untyped_call for that. In fact, untyped_call
1775 only needs to save the return registers in the given block. */
1776 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1777 if ((mode = apply_result_mode[regno]) != VOIDmode)
1779 gcc_assert (!valreg); /* have_untyped_call required. */
1781 valreg = gen_rtx_REG (mode, regno);
1784 emit_insn (targetm.gen_call_value (valreg,
1785 gen_rtx_MEM (FUNCTION_MODE, function),
1786 const0_rtx, NULL_RTX, const0_rtx));
1788 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1790 else
1791 gcc_unreachable ();
1793 /* Find the CALL insn we just emitted, and attach the register usage
1794 information. */
1795 call_insn = last_call_insn ();
1796 add_function_usage_to (call_insn, call_fusage);
1798 /* Restore the stack. */
1799 if (targetm.have_save_stack_nonlocal ())
1800 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1801 else
1802 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1803 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1805 OK_DEFER_POP;
1807 /* Return the address of the result block. */
1808 result = copy_addr_to_reg (XEXP (result, 0));
1809 return convert_memory_address (ptr_mode, result);
1812 /* Perform an untyped return. */
1814 static void
1815 expand_builtin_return (rtx result)
1817 int size, align, regno;
1818 fixed_size_mode mode;
1819 rtx reg;
1820 rtx_insn *call_fusage = 0;
1822 result = convert_memory_address (Pmode, result);
1824 apply_result_size ();
1825 result = gen_rtx_MEM (BLKmode, result);
1827 if (targetm.have_untyped_return ())
1829 rtx vector = result_vector (0, result);
1830 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1831 emit_barrier ();
1832 return;
1835 /* Restore the return value and note that each value is used. */
1836 size = 0;
1837 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1838 if ((mode = apply_result_mode[regno]) != VOIDmode)
1840 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1841 if (size % align != 0)
1842 size = CEIL (size, align) * align;
1843 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1844 emit_move_insn (reg, adjust_address (result, mode, size));
1846 push_to_sequence (call_fusage);
1847 emit_use (reg);
1848 call_fusage = get_insns ();
1849 end_sequence ();
1850 size += GET_MODE_SIZE (mode);
1853 /* Put the USE insns before the return. */
1854 emit_insn (call_fusage);
1856 /* Return whatever values was restored by jumping directly to the end
1857 of the function. */
1858 expand_naked_return ();
1861 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1864 type_to_class (tree type)
1866 switch (TREE_CODE (type))
1868 case VOID_TYPE: return void_type_class;
1869 case INTEGER_TYPE: return integer_type_class;
1870 case ENUMERAL_TYPE: return enumeral_type_class;
1871 case BOOLEAN_TYPE: return boolean_type_class;
1872 case POINTER_TYPE: return pointer_type_class;
1873 case REFERENCE_TYPE: return reference_type_class;
1874 case OFFSET_TYPE: return offset_type_class;
1875 case REAL_TYPE: return real_type_class;
1876 case COMPLEX_TYPE: return complex_type_class;
1877 case FUNCTION_TYPE: return function_type_class;
1878 case METHOD_TYPE: return method_type_class;
1879 case RECORD_TYPE: return record_type_class;
1880 case UNION_TYPE:
1881 case QUAL_UNION_TYPE: return union_type_class;
1882 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1883 ? string_type_class : array_type_class);
1884 case LANG_TYPE: return lang_type_class;
1885 case OPAQUE_TYPE: return opaque_type_class;
1886 case BITINT_TYPE: return bitint_type_class;
1887 case VECTOR_TYPE: return vector_type_class;
1888 default: return no_type_class;
1892 /* Expand a call EXP to __builtin_classify_type. */
1894 static rtx
1895 expand_builtin_classify_type (tree exp)
1897 if (call_expr_nargs (exp))
1898 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1899 return GEN_INT (no_type_class);
1902 /* This helper macro, meant to be used in mathfn_built_in below, determines
1903 which among a set of builtin math functions is appropriate for a given type
1904 mode. The `F' (float) and `L' (long double) are automatically generated
1905 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1906 types, there are additional types that are considered with 'F32', 'F64',
1907 'F128', etc. suffixes. */
1908 #define CASE_MATHFN(MATHFN) \
1909 CASE_CFN_##MATHFN: \
1910 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1911 fcodel = BUILT_IN_##MATHFN##L ; break;
1912 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1913 types. */
1914 #define CASE_MATHFN_FLOATN(MATHFN) \
1915 CASE_CFN_##MATHFN: \
1916 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1917 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1918 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1919 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1920 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1921 break;
1922 /* Similar to above, but appends _R after any F/L suffix. */
1923 #define CASE_MATHFN_REENT(MATHFN) \
1924 case CFN_BUILT_IN_##MATHFN##_R: \
1925 case CFN_BUILT_IN_##MATHFN##F_R: \
1926 case CFN_BUILT_IN_##MATHFN##L_R: \
1927 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1928 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1930 /* Return a function equivalent to FN but operating on floating-point
1931 values of type TYPE, or END_BUILTINS if no such function exists.
1932 This is purely an operation on function codes; it does not guarantee
1933 that the target actually has an implementation of the function. */
1935 static built_in_function
1936 mathfn_built_in_2 (tree type, combined_fn fn)
1938 tree mtype;
1939 built_in_function fcode, fcodef, fcodel;
1940 built_in_function fcodef16 = END_BUILTINS;
1941 built_in_function fcodef32 = END_BUILTINS;
1942 built_in_function fcodef64 = END_BUILTINS;
1943 built_in_function fcodef128 = END_BUILTINS;
1944 built_in_function fcodef32x = END_BUILTINS;
1945 built_in_function fcodef64x = END_BUILTINS;
1946 built_in_function fcodef128x = END_BUILTINS;
1948 /* If <math.h> has been included somehow, HUGE_VAL and NAN definitions
1949 break the uses below. */
1950 #undef HUGE_VAL
1951 #undef NAN
1953 switch (fn)
1955 #define SEQ_OF_CASE_MATHFN \
1956 CASE_MATHFN_FLOATN (ACOS) \
1957 CASE_MATHFN_FLOATN (ACOSH) \
1958 CASE_MATHFN_FLOATN (ASIN) \
1959 CASE_MATHFN_FLOATN (ASINH) \
1960 CASE_MATHFN_FLOATN (ATAN) \
1961 CASE_MATHFN_FLOATN (ATAN2) \
1962 CASE_MATHFN_FLOATN (ATANH) \
1963 CASE_MATHFN_FLOATN (CBRT) \
1964 CASE_MATHFN_FLOATN (CEIL) \
1965 CASE_MATHFN (CEXPI) \
1966 CASE_MATHFN_FLOATN (COPYSIGN) \
1967 CASE_MATHFN_FLOATN (COS) \
1968 CASE_MATHFN_FLOATN (COSH) \
1969 CASE_MATHFN (DREM) \
1970 CASE_MATHFN_FLOATN (ERF) \
1971 CASE_MATHFN_FLOATN (ERFC) \
1972 CASE_MATHFN_FLOATN (EXP) \
1973 CASE_MATHFN (EXP10) \
1974 CASE_MATHFN_FLOATN (EXP2) \
1975 CASE_MATHFN_FLOATN (EXPM1) \
1976 CASE_MATHFN_FLOATN (FABS) \
1977 CASE_MATHFN_FLOATN (FDIM) \
1978 CASE_MATHFN_FLOATN (FLOOR) \
1979 CASE_MATHFN_FLOATN (FMA) \
1980 CASE_MATHFN_FLOATN (FMAX) \
1981 CASE_MATHFN_FLOATN (FMIN) \
1982 CASE_MATHFN_FLOATN (FMOD) \
1983 CASE_MATHFN_FLOATN (FREXP) \
1984 CASE_MATHFN (GAMMA) \
1985 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
1986 CASE_MATHFN_FLOATN (HUGE_VAL) \
1987 CASE_MATHFN_FLOATN (HYPOT) \
1988 CASE_MATHFN_FLOATN (ILOGB) \
1989 CASE_MATHFN (ICEIL) \
1990 CASE_MATHFN (IFLOOR) \
1991 CASE_MATHFN_FLOATN (INF) \
1992 CASE_MATHFN (IRINT) \
1993 CASE_MATHFN (IROUND) \
1994 CASE_MATHFN (ISINF) \
1995 CASE_MATHFN (J0) \
1996 CASE_MATHFN (J1) \
1997 CASE_MATHFN (JN) \
1998 CASE_MATHFN (LCEIL) \
1999 CASE_MATHFN_FLOATN (LDEXP) \
2000 CASE_MATHFN (LFLOOR) \
2001 CASE_MATHFN_FLOATN (LGAMMA) \
2002 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
2003 CASE_MATHFN (LLCEIL) \
2004 CASE_MATHFN (LLFLOOR) \
2005 CASE_MATHFN_FLOATN (LLRINT) \
2006 CASE_MATHFN_FLOATN (LLROUND) \
2007 CASE_MATHFN_FLOATN (LOG) \
2008 CASE_MATHFN_FLOATN (LOG10) \
2009 CASE_MATHFN_FLOATN (LOG1P) \
2010 CASE_MATHFN_FLOATN (LOG2) \
2011 CASE_MATHFN_FLOATN (LOGB) \
2012 CASE_MATHFN_FLOATN (LRINT) \
2013 CASE_MATHFN_FLOATN (LROUND) \
2014 CASE_MATHFN_FLOATN (MODF) \
2015 CASE_MATHFN_FLOATN (NAN) \
2016 CASE_MATHFN_FLOATN (NANS) \
2017 CASE_MATHFN_FLOATN (NEARBYINT) \
2018 CASE_MATHFN_FLOATN (NEXTAFTER) \
2019 CASE_MATHFN (NEXTTOWARD) \
2020 CASE_MATHFN_FLOATN (POW) \
2021 CASE_MATHFN (POWI) \
2022 CASE_MATHFN (POW10) \
2023 CASE_MATHFN_FLOATN (REMAINDER) \
2024 CASE_MATHFN_FLOATN (REMQUO) \
2025 CASE_MATHFN_FLOATN (RINT) \
2026 CASE_MATHFN_FLOATN (ROUND) \
2027 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2028 CASE_MATHFN (SCALB) \
2029 CASE_MATHFN_FLOATN (SCALBLN) \
2030 CASE_MATHFN_FLOATN (SCALBN) \
2031 CASE_MATHFN (SIGNBIT) \
2032 CASE_MATHFN (SIGNIFICAND) \
2033 CASE_MATHFN_FLOATN (SIN) \
2034 CASE_MATHFN (SINCOS) \
2035 CASE_MATHFN_FLOATN (SINH) \
2036 CASE_MATHFN_FLOATN (SQRT) \
2037 CASE_MATHFN_FLOATN (TAN) \
2038 CASE_MATHFN_FLOATN (TANH) \
2039 CASE_MATHFN_FLOATN (TGAMMA) \
2040 CASE_MATHFN_FLOATN (TRUNC) \
2041 CASE_MATHFN (Y0) \
2042 CASE_MATHFN (Y1) \
2043 CASE_MATHFN (YN)
2045 SEQ_OF_CASE_MATHFN
2047 default:
2048 return END_BUILTINS;
2051 mtype = TYPE_MAIN_VARIANT (type);
2052 if (mtype == double_type_node)
2053 return fcode;
2054 else if (mtype == float_type_node)
2055 return fcodef;
2056 else if (mtype == long_double_type_node)
2057 return fcodel;
2058 else if (mtype == float16_type_node)
2059 return fcodef16;
2060 else if (mtype == float32_type_node)
2061 return fcodef32;
2062 else if (mtype == float64_type_node)
2063 return fcodef64;
2064 else if (mtype == float128_type_node)
2065 return fcodef128;
2066 else if (mtype == float32x_type_node)
2067 return fcodef32x;
2068 else if (mtype == float64x_type_node)
2069 return fcodef64x;
2070 else if (mtype == float128x_type_node)
2071 return fcodef128x;
2072 else
2073 return END_BUILTINS;
2076 #undef CASE_MATHFN
2077 #undef CASE_MATHFN_FLOATN
2078 #undef CASE_MATHFN_REENT
2080 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2081 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2082 otherwise use the explicit declaration. If we can't do the conversion,
2083 return null. */
2085 static tree
2086 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2088 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2089 if (fcode2 == END_BUILTINS)
2090 return NULL_TREE;
2092 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2093 return NULL_TREE;
2095 return builtin_decl_explicit (fcode2);
2098 /* Like mathfn_built_in_1, but always use the implicit array. */
2100 tree
2101 mathfn_built_in (tree type, combined_fn fn)
2103 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2106 /* Like mathfn_built_in_1, but always use the explicit array. */
2108 tree
2109 mathfn_built_in_explicit (tree type, combined_fn fn)
2111 return mathfn_built_in_1 (type, fn, /*implicit=*/ 0);
2114 /* Like mathfn_built_in_1, but take a built_in_function and
2115 always use the implicit array. */
2117 tree
2118 mathfn_built_in (tree type, enum built_in_function fn)
2120 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2123 /* Return the type associated with a built in function, i.e., the one
2124 to be passed to mathfn_built_in to get the type-specific
2125 function. */
2127 tree
2128 mathfn_built_in_type (combined_fn fn)
2130 #define CASE_MATHFN(MATHFN) \
2131 case CFN_BUILT_IN_##MATHFN: \
2132 return double_type_node; \
2133 case CFN_BUILT_IN_##MATHFN##F: \
2134 return float_type_node; \
2135 case CFN_BUILT_IN_##MATHFN##L: \
2136 return long_double_type_node;
2138 #define CASE_MATHFN_FLOATN(MATHFN) \
2139 CASE_MATHFN(MATHFN) \
2140 case CFN_BUILT_IN_##MATHFN##F16: \
2141 return float16_type_node; \
2142 case CFN_BUILT_IN_##MATHFN##F32: \
2143 return float32_type_node; \
2144 case CFN_BUILT_IN_##MATHFN##F64: \
2145 return float64_type_node; \
2146 case CFN_BUILT_IN_##MATHFN##F128: \
2147 return float128_type_node; \
2148 case CFN_BUILT_IN_##MATHFN##F32X: \
2149 return float32x_type_node; \
2150 case CFN_BUILT_IN_##MATHFN##F64X: \
2151 return float64x_type_node; \
2152 case CFN_BUILT_IN_##MATHFN##F128X: \
2153 return float128x_type_node;
2155 /* Similar to above, but appends _R after any F/L suffix. */
2156 #define CASE_MATHFN_REENT(MATHFN) \
2157 case CFN_BUILT_IN_##MATHFN##_R: \
2158 return double_type_node; \
2159 case CFN_BUILT_IN_##MATHFN##F_R: \
2160 return float_type_node; \
2161 case CFN_BUILT_IN_##MATHFN##L_R: \
2162 return long_double_type_node;
2164 switch (fn)
2166 SEQ_OF_CASE_MATHFN
2168 default:
2169 return NULL_TREE;
2172 #undef CASE_MATHFN
2173 #undef CASE_MATHFN_FLOATN
2174 #undef CASE_MATHFN_REENT
2175 #undef SEQ_OF_CASE_MATHFN
2178 /* Check whether there is an internal function associated with function FN
2179 and return type RETURN_TYPE. Return the function if so, otherwise return
2180 IFN_LAST.
2182 Note that this function only tests whether the function is defined in
2183 internals.def, not whether it is actually available on the target. */
2185 static internal_fn
2186 associated_internal_fn (built_in_function fn, tree return_type)
2188 switch (fn)
2190 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2191 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2192 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2193 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2194 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2195 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2196 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2197 #include "internal-fn.def"
2199 CASE_FLT_FN (BUILT_IN_POW10):
2200 return IFN_EXP10;
2202 CASE_FLT_FN (BUILT_IN_DREM):
2203 return IFN_REMAINDER;
2205 CASE_FLT_FN (BUILT_IN_SCALBN):
2206 CASE_FLT_FN (BUILT_IN_SCALBLN):
2207 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2208 return IFN_LDEXP;
2209 return IFN_LAST;
2211 default:
2212 return IFN_LAST;
2216 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2217 return its code, otherwise return IFN_LAST. Note that this function
2218 only tests whether the function is defined in internals.def, not whether
2219 it is actually available on the target. */
2221 internal_fn
2222 associated_internal_fn (tree fndecl)
2224 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2225 return associated_internal_fn (DECL_FUNCTION_CODE (fndecl),
2226 TREE_TYPE (TREE_TYPE (fndecl)));
2229 /* Check whether there is an internal function associated with function CFN
2230 and return type RETURN_TYPE. Return the function if so, otherwise return
2231 IFN_LAST.
2233 Note that this function only tests whether the function is defined in
2234 internals.def, not whether it is actually available on the target. */
2236 internal_fn
2237 associated_internal_fn (combined_fn cfn, tree return_type)
2239 if (internal_fn_p (cfn))
2240 return as_internal_fn (cfn);
2241 return associated_internal_fn (as_builtin_fn (cfn), return_type);
2244 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2245 on the current target by a call to an internal function, return the
2246 code of that internal function, otherwise return IFN_LAST. The caller
2247 is responsible for ensuring that any side-effects of the built-in
2248 call are dealt with correctly. E.g. if CALL sets errno, the caller
2249 must decide that the errno result isn't needed or make it available
2250 in some other way. */
2252 internal_fn
2253 replacement_internal_fn (gcall *call)
2255 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2257 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2258 if (ifn != IFN_LAST)
2260 tree_pair types = direct_internal_fn_types (ifn, call);
2261 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2262 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2263 return ifn;
2266 return IFN_LAST;
2269 /* Expand a call to the builtin trinary math functions (fma).
2270 Return NULL_RTX if a normal call should be emitted rather than expanding the
2271 function in-line. EXP is the expression that is a call to the builtin
2272 function; if convenient, the result should be placed in TARGET.
2273 SUBTARGET may be used as the target for computing one of EXP's
2274 operands. */
2276 static rtx
2277 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2279 optab builtin_optab;
2280 rtx op0, op1, op2, result;
2281 rtx_insn *insns;
2282 tree fndecl = get_callee_fndecl (exp);
2283 tree arg0, arg1, arg2;
2284 machine_mode mode;
2286 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2287 return NULL_RTX;
2289 arg0 = CALL_EXPR_ARG (exp, 0);
2290 arg1 = CALL_EXPR_ARG (exp, 1);
2291 arg2 = CALL_EXPR_ARG (exp, 2);
2293 switch (DECL_FUNCTION_CODE (fndecl))
2295 CASE_FLT_FN (BUILT_IN_FMA):
2296 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2297 builtin_optab = fma_optab; break;
2298 default:
2299 gcc_unreachable ();
2302 /* Make a suitable register to place result in. */
2303 mode = TYPE_MODE (TREE_TYPE (exp));
2305 /* Before working hard, check whether the instruction is available. */
2306 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2307 return NULL_RTX;
2309 result = gen_reg_rtx (mode);
2311 /* Always stabilize the argument list. */
2312 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2313 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2314 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2316 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2317 op1 = expand_normal (arg1);
2318 op2 = expand_normal (arg2);
2320 start_sequence ();
2322 /* Compute into RESULT.
2323 Set RESULT to wherever the result comes back. */
2324 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2325 result, 0);
2327 /* If we were unable to expand via the builtin, stop the sequence
2328 (without outputting the insns) and call to the library function
2329 with the stabilized argument list. */
2330 if (result == 0)
2332 end_sequence ();
2333 return expand_call (exp, target, target == const0_rtx);
2336 /* Output the entire sequence. */
2337 insns = get_insns ();
2338 end_sequence ();
2339 emit_insn (insns);
2341 return result;
2344 /* Expand a call to the builtin sin and cos math functions.
2345 Return NULL_RTX if a normal call should be emitted rather than expanding the
2346 function in-line. EXP is the expression that is a call to the builtin
2347 function; if convenient, the result should be placed in TARGET.
2348 SUBTARGET may be used as the target for computing one of EXP's
2349 operands. */
2351 static rtx
2352 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2354 optab builtin_optab;
2355 rtx op0;
2356 rtx_insn *insns;
2357 tree fndecl = get_callee_fndecl (exp);
2358 machine_mode mode;
2359 tree arg;
2361 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2362 return NULL_RTX;
2364 arg = CALL_EXPR_ARG (exp, 0);
2366 switch (DECL_FUNCTION_CODE (fndecl))
2368 CASE_FLT_FN (BUILT_IN_SIN):
2369 CASE_FLT_FN (BUILT_IN_COS):
2370 builtin_optab = sincos_optab; break;
2371 default:
2372 gcc_unreachable ();
2375 /* Make a suitable register to place result in. */
2376 mode = TYPE_MODE (TREE_TYPE (exp));
2378 /* Check if sincos insn is available, otherwise fallback
2379 to sin or cos insn. */
2380 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2381 switch (DECL_FUNCTION_CODE (fndecl))
2383 CASE_FLT_FN (BUILT_IN_SIN):
2384 builtin_optab = sin_optab; break;
2385 CASE_FLT_FN (BUILT_IN_COS):
2386 builtin_optab = cos_optab; break;
2387 default:
2388 gcc_unreachable ();
2391 /* Before working hard, check whether the instruction is available. */
2392 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2394 rtx result = gen_reg_rtx (mode);
2396 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2397 need to expand the argument again. This way, we will not perform
2398 side-effects more the once. */
2399 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2401 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2403 start_sequence ();
2405 /* Compute into RESULT.
2406 Set RESULT to wherever the result comes back. */
2407 if (builtin_optab == sincos_optab)
2409 int ok;
2411 switch (DECL_FUNCTION_CODE (fndecl))
2413 CASE_FLT_FN (BUILT_IN_SIN):
2414 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2415 break;
2416 CASE_FLT_FN (BUILT_IN_COS):
2417 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2418 break;
2419 default:
2420 gcc_unreachable ();
2422 gcc_assert (ok);
2424 else
2425 result = expand_unop (mode, builtin_optab, op0, result, 0);
2427 if (result != 0)
2429 /* Output the entire sequence. */
2430 insns = get_insns ();
2431 end_sequence ();
2432 emit_insn (insns);
2433 return result;
2436 /* If we were unable to expand via the builtin, stop the sequence
2437 (without outputting the insns) and call to the library function
2438 with the stabilized argument list. */
2439 end_sequence ();
2442 return expand_call (exp, target, target == const0_rtx);
2445 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2446 return an RTL instruction code that implements the functionality.
2447 If that isn't possible or available return CODE_FOR_nothing. */
2449 static enum insn_code
2450 interclass_mathfn_icode (tree arg, tree fndecl)
2452 bool errno_set = false;
2453 optab builtin_optab = unknown_optab;
2454 machine_mode mode;
2456 switch (DECL_FUNCTION_CODE (fndecl))
2458 CASE_FLT_FN (BUILT_IN_ILOGB):
2459 errno_set = true; builtin_optab = ilogb_optab; break;
2460 CASE_FLT_FN (BUILT_IN_ISINF):
2461 builtin_optab = isinf_optab; break;
2462 case BUILT_IN_ISFINITE:
2463 builtin_optab = isfinite_optab;
2464 break;
2465 case BUILT_IN_ISNORMAL:
2466 builtin_optab = isnormal_optab;
2467 break;
2468 CASE_FLT_FN (BUILT_IN_FINITE):
2469 case BUILT_IN_FINITED32:
2470 case BUILT_IN_FINITED64:
2471 case BUILT_IN_FINITED128:
2472 case BUILT_IN_ISINFD32:
2473 case BUILT_IN_ISINFD64:
2474 case BUILT_IN_ISINFD128:
2475 /* These builtins have no optabs (yet). */
2476 break;
2477 default:
2478 gcc_unreachable ();
2481 /* There's no easy way to detect the case we need to set EDOM. */
2482 if (flag_errno_math && errno_set)
2483 return CODE_FOR_nothing;
2485 /* Optab mode depends on the mode of the input argument. */
2486 mode = TYPE_MODE (TREE_TYPE (arg));
2488 if (builtin_optab)
2489 return optab_handler (builtin_optab, mode);
2490 return CODE_FOR_nothing;
2493 /* Expand a call to one of the builtin math functions that operate on
2494 floating point argument and output an integer result (ilogb, isinf,
2495 isnan, etc).
2496 Return 0 if a normal call should be emitted rather than expanding the
2497 function in-line. EXP is the expression that is a call to the builtin
2498 function; if convenient, the result should be placed in TARGET. */
2500 static rtx
2501 expand_builtin_interclass_mathfn (tree exp, rtx target)
2503 enum insn_code icode = CODE_FOR_nothing;
2504 rtx op0;
2505 tree fndecl = get_callee_fndecl (exp);
2506 machine_mode mode;
2507 tree arg;
2509 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2510 return NULL_RTX;
2512 arg = CALL_EXPR_ARG (exp, 0);
2513 icode = interclass_mathfn_icode (arg, fndecl);
2514 mode = TYPE_MODE (TREE_TYPE (arg));
2516 if (icode != CODE_FOR_nothing)
2518 class expand_operand ops[1];
2519 rtx_insn *last = get_last_insn ();
2520 tree orig_arg = arg;
2522 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2523 need to expand the argument again. This way, we will not perform
2524 side-effects more the once. */
2525 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2527 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2529 if (mode != GET_MODE (op0))
2530 op0 = convert_to_mode (mode, op0, 0);
2532 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2533 if (maybe_legitimize_operands (icode, 0, 1, ops)
2534 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2535 return ops[0].value;
2537 delete_insns_since (last);
2538 CALL_EXPR_ARG (exp, 0) = orig_arg;
2541 return NULL_RTX;
2544 /* Expand a call to the builtin sincos math function.
2545 Return NULL_RTX if a normal call should be emitted rather than expanding the
2546 function in-line. EXP is the expression that is a call to the builtin
2547 function. */
2549 static rtx
2550 expand_builtin_sincos (tree exp)
2552 rtx op0, op1, op2, target1, target2;
2553 machine_mode mode;
2554 tree arg, sinp, cosp;
2555 int result;
2556 location_t loc = EXPR_LOCATION (exp);
2557 tree alias_type, alias_off;
2559 if (!validate_arglist (exp, REAL_TYPE,
2560 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2561 return NULL_RTX;
2563 arg = CALL_EXPR_ARG (exp, 0);
2564 sinp = CALL_EXPR_ARG (exp, 1);
2565 cosp = CALL_EXPR_ARG (exp, 2);
2567 /* Make a suitable register to place result in. */
2568 mode = TYPE_MODE (TREE_TYPE (arg));
2570 /* Check if sincos insn is available, otherwise emit the call. */
2571 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2572 return NULL_RTX;
2574 target1 = gen_reg_rtx (mode);
2575 target2 = gen_reg_rtx (mode);
2577 op0 = expand_normal (arg);
2578 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2579 alias_off = build_int_cst (alias_type, 0);
2580 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2581 sinp, alias_off));
2582 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2583 cosp, alias_off));
2585 /* Compute into target1 and target2.
2586 Set TARGET to wherever the result comes back. */
2587 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2588 gcc_assert (result);
2590 /* Move target1 and target2 to the memory locations indicated
2591 by op1 and op2. */
2592 emit_move_insn (op1, target1);
2593 emit_move_insn (op2, target2);
2595 return const0_rtx;
2598 /* Expand call EXP to the fegetround builtin (from C99 fenv.h), returning the
2599 result and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2600 static rtx
2601 expand_builtin_fegetround (tree exp, rtx target, machine_mode target_mode)
2603 if (!validate_arglist (exp, VOID_TYPE))
2604 return NULL_RTX;
2606 insn_code icode = direct_optab_handler (fegetround_optab, SImode);
2607 if (icode == CODE_FOR_nothing)
2608 return NULL_RTX;
2610 if (target == 0
2611 || GET_MODE (target) != target_mode
2612 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2613 target = gen_reg_rtx (target_mode);
2615 rtx pat = GEN_FCN (icode) (target);
2616 if (!pat)
2617 return NULL_RTX;
2618 emit_insn (pat);
2620 return target;
2623 /* Expand call EXP to either feclearexcept or feraiseexcept builtins (from C99
2624 fenv.h), returning the result and setting it in TARGET. Otherwise return
2625 NULL_RTX on failure. */
2626 static rtx
2627 expand_builtin_feclear_feraise_except (tree exp, rtx target,
2628 machine_mode target_mode, optab op_optab)
2630 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
2631 return NULL_RTX;
2632 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2634 insn_code icode = direct_optab_handler (op_optab, SImode);
2635 if (icode == CODE_FOR_nothing)
2636 return NULL_RTX;
2638 if (!(*insn_data[icode].operand[1].predicate) (op0, GET_MODE (op0)))
2639 return NULL_RTX;
2641 if (target == 0
2642 || GET_MODE (target) != target_mode
2643 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2644 target = gen_reg_rtx (target_mode);
2646 rtx pat = GEN_FCN (icode) (target, op0);
2647 if (!pat)
2648 return NULL_RTX;
2649 emit_insn (pat);
2651 return target;
2654 /* Expand a call to the internal cexpi builtin to the sincos math function.
2655 EXP is the expression that is a call to the builtin function; if convenient,
2656 the result should be placed in TARGET. */
2658 static rtx
2659 expand_builtin_cexpi (tree exp, rtx target)
2661 tree fndecl = get_callee_fndecl (exp);
2662 tree arg, type;
2663 machine_mode mode;
2664 rtx op0, op1, op2;
2665 location_t loc = EXPR_LOCATION (exp);
2667 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2668 return NULL_RTX;
2670 arg = CALL_EXPR_ARG (exp, 0);
2671 type = TREE_TYPE (arg);
2672 mode = TYPE_MODE (TREE_TYPE (arg));
2674 /* Try expanding via a sincos optab, fall back to emitting a libcall
2675 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2676 is only generated from sincos, cexp or if we have either of them. */
2677 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2679 op1 = gen_reg_rtx (mode);
2680 op2 = gen_reg_rtx (mode);
2682 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2684 /* Compute into op1 and op2. */
2685 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2687 else if (targetm.libc_has_function (function_sincos, type))
2689 tree call, fn = NULL_TREE;
2690 tree top1, top2;
2691 rtx op1a, op2a;
2693 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2694 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2695 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2696 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2697 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2698 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2699 else
2700 gcc_unreachable ();
2702 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2703 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2704 op1a = copy_addr_to_reg (XEXP (op1, 0));
2705 op2a = copy_addr_to_reg (XEXP (op2, 0));
2706 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2707 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2709 /* Make sure not to fold the sincos call again. */
2710 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2711 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2712 call, 3, arg, top1, top2));
2714 else
2716 tree call, fn = NULL_TREE, narg;
2717 tree ctype = build_complex_type (type);
2719 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2720 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2721 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2722 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2723 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2724 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2725 else
2726 gcc_unreachable ();
2728 /* If we don't have a decl for cexp create one. This is the
2729 friendliest fallback if the user calls __builtin_cexpi
2730 without full target C99 function support. */
2731 if (fn == NULL_TREE)
2733 tree fntype;
2734 const char *name = NULL;
2736 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2737 name = "cexpf";
2738 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2739 name = "cexp";
2740 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2741 name = "cexpl";
2743 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2744 fn = build_fn_decl (name, fntype);
2747 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2748 build_real (type, dconst0), arg);
2750 /* Make sure not to fold the cexp call again. */
2751 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2752 return expand_expr (build_call_nary (ctype, call, 1, narg),
2753 target, VOIDmode, EXPAND_NORMAL);
2756 /* Now build the proper return type. */
2757 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2758 make_tree (TREE_TYPE (arg), op2),
2759 make_tree (TREE_TYPE (arg), op1)),
2760 target, VOIDmode, EXPAND_NORMAL);
2763 /* Conveniently construct a function call expression. FNDECL names the
2764 function to be called, N is the number of arguments, and the "..."
2765 parameters are the argument expressions. Unlike build_call_exr
2766 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2768 static tree
2769 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2771 va_list ap;
2772 tree fntype = TREE_TYPE (fndecl);
2773 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2775 va_start (ap, n);
2776 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2777 va_end (ap);
2778 SET_EXPR_LOCATION (fn, loc);
2779 return fn;
2782 /* Expand the __builtin_issignaling builtin. This needs to handle
2783 all floating point formats that do support NaNs (for those that
2784 don't it just sets target to 0). */
2786 static rtx
2787 expand_builtin_issignaling (tree exp, rtx target)
2789 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2790 return NULL_RTX;
2792 tree arg = CALL_EXPR_ARG (exp, 0);
2793 scalar_float_mode fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
2794 const struct real_format *fmt = REAL_MODE_FORMAT (fmode);
2796 /* Expand the argument yielding a RTX expression. */
2797 rtx temp = expand_normal (arg);
2799 /* If mode doesn't support NaN, always return 0.
2800 Don't use !HONOR_SNANS (fmode) here, so there is some possibility of
2801 __builtin_issignaling working without -fsignaling-nans. Especially
2802 when -fno-signaling-nans is the default.
2803 On the other side, MODE_HAS_NANS (fmode) is unnecessary, with
2804 -ffinite-math-only even __builtin_isnan or __builtin_fpclassify
2805 fold to 0 or non-NaN/Inf classification. */
2806 if (!HONOR_NANS (fmode))
2808 emit_move_insn (target, const0_rtx);
2809 return target;
2812 /* Check if the back end provides an insn that handles issignaling for the
2813 argument's mode. */
2814 enum insn_code icode = optab_handler (issignaling_optab, fmode);
2815 if (icode != CODE_FOR_nothing)
2817 rtx_insn *last = get_last_insn ();
2818 rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2819 if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
2820 return this_target;
2821 delete_insns_since (last);
2824 if (DECIMAL_FLOAT_MODE_P (fmode))
2826 scalar_int_mode imode;
2827 rtx hi;
2828 switch (fmt->ieee_bits)
2830 case 32:
2831 case 64:
2832 imode = int_mode_for_mode (fmode).require ();
2833 temp = gen_lowpart (imode, temp);
2834 break;
2835 case 128:
2836 imode = int_mode_for_size (64, 1).require ();
2837 hi = NULL_RTX;
2838 /* For decimal128, TImode support isn't always there and even when
2839 it is, working on the DImode high part is usually better. */
2840 if (!MEM_P (temp))
2842 if (rtx t = force_highpart_subreg (imode, temp, fmode))
2843 hi = t;
2844 else
2846 scalar_int_mode imode2;
2847 if (int_mode_for_mode (fmode).exists (&imode2))
2849 rtx temp2 = gen_lowpart (imode2, temp);
2850 if (rtx t = force_highpart_subreg (imode, temp2, imode2))
2851 hi = t;
2854 if (!hi)
2856 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2857 emit_move_insn (mem, temp);
2858 temp = mem;
2861 if (!hi)
2863 poly_int64 offset
2864 = subreg_highpart_offset (imode, GET_MODE (temp));
2865 hi = adjust_address (temp, imode, offset);
2867 temp = hi;
2868 break;
2869 default:
2870 gcc_unreachable ();
2872 /* In all of decimal{32,64,128}, there is MSB sign bit and sNaN
2873 have 6 bits below it all set. */
2874 rtx val
2875 = GEN_INT (HOST_WIDE_INT_C (0x3f) << (GET_MODE_BITSIZE (imode) - 7));
2876 temp = expand_binop (imode, and_optab, temp, val,
2877 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2878 temp = emit_store_flag_force (target, EQ, temp, val, imode, 1, 1);
2879 return temp;
2882 /* Only PDP11 has these defined differently but doesn't support NaNs. */
2883 gcc_assert (FLOAT_WORDS_BIG_ENDIAN == WORDS_BIG_ENDIAN);
2884 gcc_assert (fmt->signbit_ro > 0 && fmt->b == 2);
2885 gcc_assert (MODE_COMPOSITE_P (fmode)
2886 || (fmt->pnan == fmt->p
2887 && fmt->signbit_ro == fmt->signbit_rw));
2889 switch (fmt->p)
2891 case 106: /* IBM double double */
2892 /* For IBM double double, recurse on the most significant double. */
2893 gcc_assert (MODE_COMPOSITE_P (fmode));
2894 temp = convert_modes (DFmode, fmode, temp, 0);
2895 fmode = DFmode;
2896 fmt = REAL_MODE_FORMAT (DFmode);
2897 /* FALLTHRU */
2898 case 8: /* bfloat */
2899 case 11: /* IEEE half */
2900 case 24: /* IEEE single */
2901 case 53: /* IEEE double or Intel extended with rounding to double */
2902 if (fmt->p == 53 && fmt->signbit_ro == 79)
2903 goto extended;
2905 scalar_int_mode imode = int_mode_for_mode (fmode).require ();
2906 temp = gen_lowpart (imode, temp);
2907 rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2))
2908 & ~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2909 if (fmt->qnan_msb_set)
2911 rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2912 rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2));
2913 /* For non-MIPS/PA IEEE single/double/half or bfloat, expand to:
2914 ((temp ^ bit) & mask) > val. */
2915 temp = expand_binop (imode, xor_optab, temp, bit,
2916 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2917 temp = expand_binop (imode, and_optab, temp, mask,
2918 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2919 temp = emit_store_flag_force (target, GTU, temp, val, imode,
2920 1, 1);
2922 else
2924 /* For MIPS/PA IEEE single/double, expand to:
2925 (temp & val) == val. */
2926 temp = expand_binop (imode, and_optab, temp, val,
2927 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2928 temp = emit_store_flag_force (target, EQ, temp, val, imode,
2929 1, 1);
2932 break;
2933 case 113: /* IEEE quad */
2935 rtx hi = NULL_RTX, lo = NULL_RTX;
2936 scalar_int_mode imode = int_mode_for_size (64, 1).require ();
2937 /* For IEEE quad, TImode support isn't always there and even when
2938 it is, working on DImode parts is usually better. */
2939 if (!MEM_P (temp))
2941 hi = force_highpart_subreg (imode, temp, fmode);
2942 lo = force_lowpart_subreg (imode, temp, fmode);
2943 if (!hi || !lo)
2945 scalar_int_mode imode2;
2946 if (int_mode_for_mode (fmode).exists (&imode2))
2948 rtx temp2 = gen_lowpart (imode2, temp);
2949 hi = force_highpart_subreg (imode, temp2, imode2);
2950 lo = force_lowpart_subreg (imode, temp2, imode2);
2953 if (!hi || !lo)
2955 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2956 emit_move_insn (mem, temp);
2957 temp = mem;
2960 if (!hi || !lo)
2962 poly_int64 offset
2963 = subreg_highpart_offset (imode, GET_MODE (temp));
2964 hi = adjust_address (temp, imode, offset);
2965 offset = subreg_lowpart_offset (imode, GET_MODE (temp));
2966 lo = adjust_address (temp, imode, offset);
2968 rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2 - 64))
2969 & ~(HOST_WIDE_INT_M1U << (fmt->signbit_ro - 64)));
2970 if (fmt->qnan_msb_set)
2972 rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << (fmt->signbit_ro
2973 - 64)));
2974 rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2 - 64));
2975 /* For non-MIPS/PA IEEE quad, expand to:
2976 (((hi ^ bit) | ((lo | -lo) >> 63)) & mask) > val. */
2977 rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
2978 lo = expand_binop (imode, ior_optab, lo, nlo,
2979 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2980 lo = expand_shift (RSHIFT_EXPR, imode, lo, 63, NULL_RTX, 1);
2981 temp = expand_binop (imode, xor_optab, hi, bit,
2982 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2983 temp = expand_binop (imode, ior_optab, temp, lo,
2984 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2985 temp = expand_binop (imode, and_optab, temp, mask,
2986 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2987 temp = emit_store_flag_force (target, GTU, temp, val, imode,
2988 1, 1);
2990 else
2992 /* For MIPS/PA IEEE quad, expand to:
2993 (hi & val) == val. */
2994 temp = expand_binop (imode, and_optab, hi, val,
2995 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2996 temp = emit_store_flag_force (target, EQ, temp, val, imode,
2997 1, 1);
3000 break;
3001 case 64: /* Intel or Motorola extended */
3002 extended:
3004 rtx ex, hi, lo;
3005 scalar_int_mode imode = int_mode_for_size (32, 1).require ();
3006 scalar_int_mode iemode = int_mode_for_size (16, 1).require ();
3007 if (!MEM_P (temp))
3009 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
3010 emit_move_insn (mem, temp);
3011 temp = mem;
3013 if (fmt->signbit_ro == 95)
3015 /* Motorola, always big endian, with 16-bit gap in between
3016 16-bit sign+exponent and 64-bit mantissa. */
3017 ex = adjust_address (temp, iemode, 0);
3018 hi = adjust_address (temp, imode, 4);
3019 lo = adjust_address (temp, imode, 8);
3021 else if (!WORDS_BIG_ENDIAN)
3023 /* Intel little endian, 64-bit mantissa followed by 16-bit
3024 sign+exponent and then either 16 or 48 bits of gap. */
3025 ex = adjust_address (temp, iemode, 8);
3026 hi = adjust_address (temp, imode, 4);
3027 lo = adjust_address (temp, imode, 0);
3029 else
3031 /* Big endian Itanium. */
3032 ex = adjust_address (temp, iemode, 0);
3033 hi = adjust_address (temp, imode, 2);
3034 lo = adjust_address (temp, imode, 6);
3036 rtx val = GEN_INT (HOST_WIDE_INT_M1U << 30);
3037 gcc_assert (fmt->qnan_msb_set);
3038 rtx mask = GEN_INT (0x7fff);
3039 rtx bit = GEN_INT (HOST_WIDE_INT_1U << 30);
3040 /* For Intel/Motorola extended format, expand to:
3041 (ex & mask) == mask && ((hi ^ bit) | ((lo | -lo) >> 31)) > val. */
3042 rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
3043 lo = expand_binop (imode, ior_optab, lo, nlo,
3044 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3045 lo = expand_shift (RSHIFT_EXPR, imode, lo, 31, NULL_RTX, 1);
3046 temp = expand_binop (imode, xor_optab, hi, bit,
3047 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3048 temp = expand_binop (imode, ior_optab, temp, lo,
3049 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3050 temp = emit_store_flag_force (target, GTU, temp, val, imode, 1, 1);
3051 ex = expand_binop (iemode, and_optab, ex, mask,
3052 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3053 ex = emit_store_flag_force (gen_reg_rtx (GET_MODE (temp)), EQ,
3054 ex, mask, iemode, 1, 1);
3055 temp = expand_binop (GET_MODE (temp), and_optab, temp, ex,
3056 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3058 break;
3059 default:
3060 gcc_unreachable ();
3063 return temp;
3066 /* Expand a call to one of the builtin rounding functions gcc defines
3067 as an extension (lfloor and lceil). As these are gcc extensions we
3068 do not need to worry about setting errno to EDOM.
3069 If expanding via optab fails, lower expression to (int)(floor(x)).
3070 EXP is the expression that is a call to the builtin function;
3071 if convenient, the result should be placed in TARGET. */
3073 static rtx
3074 expand_builtin_int_roundingfn (tree exp, rtx target)
3076 convert_optab builtin_optab;
3077 rtx op0, tmp;
3078 rtx_insn *insns;
3079 tree fndecl = get_callee_fndecl (exp);
3080 enum built_in_function fallback_fn;
3081 tree fallback_fndecl;
3082 machine_mode mode;
3083 tree arg;
3085 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3086 return NULL_RTX;
3088 arg = CALL_EXPR_ARG (exp, 0);
3090 switch (DECL_FUNCTION_CODE (fndecl))
3092 CASE_FLT_FN (BUILT_IN_ICEIL):
3093 CASE_FLT_FN (BUILT_IN_LCEIL):
3094 CASE_FLT_FN (BUILT_IN_LLCEIL):
3095 builtin_optab = lceil_optab;
3096 fallback_fn = BUILT_IN_CEIL;
3097 break;
3099 CASE_FLT_FN (BUILT_IN_IFLOOR):
3100 CASE_FLT_FN (BUILT_IN_LFLOOR):
3101 CASE_FLT_FN (BUILT_IN_LLFLOOR):
3102 builtin_optab = lfloor_optab;
3103 fallback_fn = BUILT_IN_FLOOR;
3104 break;
3106 default:
3107 gcc_unreachable ();
3110 /* Make a suitable register to place result in. */
3111 mode = TYPE_MODE (TREE_TYPE (exp));
3113 target = gen_reg_rtx (mode);
3115 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3116 need to expand the argument again. This way, we will not perform
3117 side-effects more the once. */
3118 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3120 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3122 start_sequence ();
3124 /* Compute into TARGET. */
3125 if (expand_sfix_optab (target, op0, builtin_optab))
3127 /* Output the entire sequence. */
3128 insns = get_insns ();
3129 end_sequence ();
3130 emit_insn (insns);
3131 return target;
3134 /* If we were unable to expand via the builtin, stop the sequence
3135 (without outputting the insns). */
3136 end_sequence ();
3138 /* Fall back to floating point rounding optab. */
3139 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
3141 /* For non-C99 targets we may end up without a fallback fndecl here
3142 if the user called __builtin_lfloor directly. In this case emit
3143 a call to the floor/ceil variants nevertheless. This should result
3144 in the best user experience for not full C99 targets. */
3145 if (fallback_fndecl == NULL_TREE)
3147 tree fntype;
3148 const char *name = NULL;
3150 switch (DECL_FUNCTION_CODE (fndecl))
3152 case BUILT_IN_ICEIL:
3153 case BUILT_IN_LCEIL:
3154 case BUILT_IN_LLCEIL:
3155 name = "ceil";
3156 break;
3157 case BUILT_IN_ICEILF:
3158 case BUILT_IN_LCEILF:
3159 case BUILT_IN_LLCEILF:
3160 name = "ceilf";
3161 break;
3162 case BUILT_IN_ICEILL:
3163 case BUILT_IN_LCEILL:
3164 case BUILT_IN_LLCEILL:
3165 name = "ceill";
3166 break;
3167 case BUILT_IN_IFLOOR:
3168 case BUILT_IN_LFLOOR:
3169 case BUILT_IN_LLFLOOR:
3170 name = "floor";
3171 break;
3172 case BUILT_IN_IFLOORF:
3173 case BUILT_IN_LFLOORF:
3174 case BUILT_IN_LLFLOORF:
3175 name = "floorf";
3176 break;
3177 case BUILT_IN_IFLOORL:
3178 case BUILT_IN_LFLOORL:
3179 case BUILT_IN_LLFLOORL:
3180 name = "floorl";
3181 break;
3182 default:
3183 gcc_unreachable ();
3186 fntype = build_function_type_list (TREE_TYPE (arg),
3187 TREE_TYPE (arg), NULL_TREE);
3188 fallback_fndecl = build_fn_decl (name, fntype);
3191 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
3193 tmp = expand_normal (exp);
3194 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3196 /* Truncate the result of floating point optab to integer
3197 via expand_fix (). */
3198 target = gen_reg_rtx (mode);
3199 expand_fix (target, tmp, 0);
3201 return target;
3204 /* Expand a call to one of the builtin math functions doing integer
3205 conversion (lrint).
3206 Return 0 if a normal call should be emitted rather than expanding the
3207 function in-line. EXP is the expression that is a call to the builtin
3208 function; if convenient, the result should be placed in TARGET. */
3210 static rtx
3211 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3213 convert_optab builtin_optab;
3214 rtx op0;
3215 rtx_insn *insns;
3216 tree fndecl = get_callee_fndecl (exp);
3217 tree arg;
3218 machine_mode mode;
3219 enum built_in_function fallback_fn = BUILT_IN_NONE;
3221 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3222 return NULL_RTX;
3224 arg = CALL_EXPR_ARG (exp, 0);
3226 switch (DECL_FUNCTION_CODE (fndecl))
3228 CASE_FLT_FN (BUILT_IN_IRINT):
3229 fallback_fn = BUILT_IN_LRINT;
3230 gcc_fallthrough ();
3231 CASE_FLT_FN (BUILT_IN_LRINT):
3232 CASE_FLT_FN (BUILT_IN_LLRINT):
3233 builtin_optab = lrint_optab;
3234 break;
3236 CASE_FLT_FN (BUILT_IN_IROUND):
3237 fallback_fn = BUILT_IN_LROUND;
3238 gcc_fallthrough ();
3239 CASE_FLT_FN (BUILT_IN_LROUND):
3240 CASE_FLT_FN (BUILT_IN_LLROUND):
3241 builtin_optab = lround_optab;
3242 break;
3244 default:
3245 gcc_unreachable ();
3248 /* There's no easy way to detect the case we need to set EDOM. */
3249 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3250 return NULL_RTX;
3252 /* Make a suitable register to place result in. */
3253 mode = TYPE_MODE (TREE_TYPE (exp));
3255 /* There's no easy way to detect the case we need to set EDOM. */
3256 if (!flag_errno_math)
3258 rtx result = gen_reg_rtx (mode);
3260 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3261 need to expand the argument again. This way, we will not perform
3262 side-effects more the once. */
3263 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3265 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3267 start_sequence ();
3269 if (expand_sfix_optab (result, op0, builtin_optab))
3271 /* Output the entire sequence. */
3272 insns = get_insns ();
3273 end_sequence ();
3274 emit_insn (insns);
3275 return result;
3278 /* If we were unable to expand via the builtin, stop the sequence
3279 (without outputting the insns) and call to the library function
3280 with the stabilized argument list. */
3281 end_sequence ();
3284 if (fallback_fn != BUILT_IN_NONE)
3286 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3287 targets, (int) round (x) should never be transformed into
3288 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3289 a call to lround in the hope that the target provides at least some
3290 C99 functions. This should result in the best user experience for
3291 not full C99 targets.
3292 As scalar float conversions with same mode are useless in GIMPLE,
3293 we can end up e.g. with _Float32 argument passed to float builtin,
3294 try to get the type from the builtin prototype first. */
3295 tree fallback_fndecl = NULL_TREE;
3296 if (tree argtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3297 fallback_fndecl
3298 = mathfn_built_in_1 (TREE_VALUE (argtypes),
3299 as_combined_fn (fallback_fn), 0);
3300 if (fallback_fndecl == NULL_TREE)
3301 fallback_fndecl
3302 = mathfn_built_in_1 (TREE_TYPE (arg),
3303 as_combined_fn (fallback_fn), 0);
3304 if (fallback_fndecl)
3306 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3307 fallback_fndecl, 1, arg);
3309 target = expand_call (exp, NULL_RTX, target == const0_rtx);
3310 target = maybe_emit_group_store (target, TREE_TYPE (exp));
3311 return convert_to_mode (mode, target, 0);
3315 return expand_call (exp, target, target == const0_rtx);
3318 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3319 a normal call should be emitted rather than expanding the function
3320 in-line. EXP is the expression that is a call to the builtin
3321 function; if convenient, the result should be placed in TARGET. */
3323 static rtx
3324 expand_builtin_powi (tree exp, rtx target)
3326 tree arg0, arg1;
3327 rtx op0, op1;
3328 machine_mode mode;
3329 machine_mode mode2;
3331 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3332 return NULL_RTX;
3334 arg0 = CALL_EXPR_ARG (exp, 0);
3335 arg1 = CALL_EXPR_ARG (exp, 1);
3336 mode = TYPE_MODE (TREE_TYPE (exp));
3338 /* Emit a libcall to libgcc. */
3340 /* Mode of the 2nd argument must match that of an int. */
3341 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3343 if (target == NULL_RTX)
3344 target = gen_reg_rtx (mode);
3346 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3347 if (GET_MODE (op0) != mode)
3348 op0 = convert_to_mode (mode, op0, 0);
3349 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3350 if (GET_MODE (op1) != mode2)
3351 op1 = convert_to_mode (mode2, op1, 0);
3353 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3354 target, LCT_CONST, mode,
3355 op0, mode, op1, mode2);
3357 return target;
3360 /* Expand expression EXP which is a call to the strlen builtin. Return
3361 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3362 try to get the result in TARGET, if convenient. */
3364 static rtx
3365 expand_builtin_strlen (tree exp, rtx target,
3366 machine_mode target_mode)
3368 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3369 return NULL_RTX;
3371 tree src = CALL_EXPR_ARG (exp, 0);
3373 /* If the length can be computed at compile-time, return it. */
3374 if (tree len = c_strlen (src, 0))
3375 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3377 /* If the length can be computed at compile-time and is constant
3378 integer, but there are side-effects in src, evaluate
3379 src for side-effects, then return len.
3380 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3381 can be optimized into: i++; x = 3; */
3382 tree len = c_strlen (src, 1);
3383 if (len && TREE_CODE (len) == INTEGER_CST)
3385 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3386 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3389 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3391 /* If SRC is not a pointer type, don't do this operation inline. */
3392 if (align == 0)
3393 return NULL_RTX;
3395 /* Bail out if we can't compute strlen in the right mode. */
3396 machine_mode insn_mode;
3397 enum insn_code icode = CODE_FOR_nothing;
3398 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3400 icode = optab_handler (strlen_optab, insn_mode);
3401 if (icode != CODE_FOR_nothing)
3402 break;
3404 if (insn_mode == VOIDmode)
3405 return NULL_RTX;
3407 /* Make a place to hold the source address. We will not expand
3408 the actual source until we are sure that the expansion will
3409 not fail -- there are trees that cannot be expanded twice. */
3410 rtx src_reg = gen_reg_rtx (Pmode);
3412 /* Mark the beginning of the strlen sequence so we can emit the
3413 source operand later. */
3414 rtx_insn *before_strlen = get_last_insn ();
3416 class expand_operand ops[4];
3417 create_output_operand (&ops[0], target, insn_mode);
3418 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3419 create_integer_operand (&ops[2], 0);
3420 create_integer_operand (&ops[3], align);
3421 if (!maybe_expand_insn (icode, 4, ops))
3422 return NULL_RTX;
3424 /* Check to see if the argument was declared attribute nonstring
3425 and if so, issue a warning since at this point it's not known
3426 to be nul-terminated. */
3427 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3429 /* Now that we are assured of success, expand the source. */
3430 start_sequence ();
3431 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3432 if (pat != src_reg)
3434 #ifdef POINTERS_EXTEND_UNSIGNED
3435 if (GET_MODE (pat) != Pmode)
3436 pat = convert_to_mode (Pmode, pat,
3437 POINTERS_EXTEND_UNSIGNED);
3438 #endif
3439 emit_move_insn (src_reg, pat);
3441 pat = get_insns ();
3442 end_sequence ();
3444 if (before_strlen)
3445 emit_insn_after (pat, before_strlen);
3446 else
3447 emit_insn_before (pat, get_insns ());
3449 /* Return the value in the proper mode for this function. */
3450 if (GET_MODE (ops[0].value) == target_mode)
3451 target = ops[0].value;
3452 else if (target != 0)
3453 convert_move (target, ops[0].value, 0);
3454 else
3455 target = convert_to_mode (target_mode, ops[0].value, 0);
3457 return target;
3460 /* Expand call EXP to the strnlen built-in, returning the result
3461 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3463 static rtx
3464 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3466 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3467 return NULL_RTX;
3469 tree src = CALL_EXPR_ARG (exp, 0);
3470 tree bound = CALL_EXPR_ARG (exp, 1);
3472 if (!bound)
3473 return NULL_RTX;
3475 location_t loc = UNKNOWN_LOCATION;
3476 if (EXPR_HAS_LOCATION (exp))
3477 loc = EXPR_LOCATION (exp);
3479 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3480 so these conversions aren't necessary. */
3481 c_strlen_data lendata = { };
3482 tree len = c_strlen (src, 0, &lendata, 1);
3483 if (len)
3484 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3486 if (TREE_CODE (bound) == INTEGER_CST)
3488 if (!len)
3489 return NULL_RTX;
3491 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3492 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3495 if (TREE_CODE (bound) != SSA_NAME)
3496 return NULL_RTX;
3498 wide_int min, max;
3499 int_range_max r;
3500 get_global_range_query ()->range_of_expr (r, bound);
3501 if (r.varying_p () || r.undefined_p ())
3502 return NULL_RTX;
3503 min = r.lower_bound ();
3504 max = r.upper_bound ();
3506 if (!len || TREE_CODE (len) != INTEGER_CST)
3508 bool exact;
3509 lendata.decl = unterminated_array (src, &len, &exact);
3510 if (!lendata.decl)
3511 return NULL_RTX;
3514 if (lendata.decl)
3515 return NULL_RTX;
3517 if (wi::gtu_p (min, wi::to_wide (len)))
3518 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3520 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3521 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3524 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3525 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3526 a target constant. */
3528 static rtx
3529 builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3530 fixed_size_mode mode)
3532 /* The REPresentation pointed to by DATA need not be a nul-terminated
3533 string but the caller guarantees it's large enough for MODE. */
3534 const char *rep = (const char *) data;
3536 return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3539 /* LEN specify length of the block of memcpy/memset operation.
3540 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3541 In some cases we can make very likely guess on max size, then we
3542 set it into PROBABLE_MAX_SIZE. */
3544 static void
3545 determine_block_size (tree len, rtx len_rtx,
3546 unsigned HOST_WIDE_INT *min_size,
3547 unsigned HOST_WIDE_INT *max_size,
3548 unsigned HOST_WIDE_INT *probable_max_size)
3550 if (CONST_INT_P (len_rtx))
3552 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3553 return;
3555 else
3557 wide_int min, max;
3558 enum value_range_kind range_type = VR_UNDEFINED;
3560 /* Determine bounds from the type. */
3561 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3562 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3563 else
3564 *min_size = 0;
3565 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3566 *probable_max_size = *max_size
3567 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3568 else
3569 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3571 if (TREE_CODE (len) == SSA_NAME)
3573 int_range_max r;
3574 tree tmin, tmax;
3575 get_global_range_query ()->range_of_expr (r, len);
3576 range_type = get_legacy_range (r, tmin, tmax);
3577 if (range_type != VR_UNDEFINED)
3579 min = wi::to_wide (tmin);
3580 max = wi::to_wide (tmax);
3583 if (range_type == VR_RANGE)
3585 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3586 *min_size = min.to_uhwi ();
3587 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3588 *probable_max_size = *max_size = max.to_uhwi ();
3590 else if (range_type == VR_ANTI_RANGE)
3592 /* Code like
3594 int n;
3595 if (n < 100)
3596 memcpy (a, b, n)
3598 Produce anti range allowing negative values of N. We still
3599 can use the information and make a guess that N is not negative.
3601 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3602 *probable_max_size = min.to_uhwi () - 1;
3605 gcc_checking_assert (*max_size <=
3606 (unsigned HOST_WIDE_INT)
3607 GET_MODE_MASK (GET_MODE (len_rtx)));
3610 /* Expand a call EXP to the memcpy builtin.
3611 Return NULL_RTX if we failed, the caller should emit a normal call,
3612 otherwise try to get the result in TARGET, if convenient (and in
3613 mode MODE if that's convenient). */
3615 static rtx
3616 expand_builtin_memcpy (tree exp, rtx target)
3618 if (!validate_arglist (exp,
3619 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3620 return NULL_RTX;
3622 tree dest = CALL_EXPR_ARG (exp, 0);
3623 tree src = CALL_EXPR_ARG (exp, 1);
3624 tree len = CALL_EXPR_ARG (exp, 2);
3626 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3627 /*retmode=*/ RETURN_BEGIN, false);
3630 /* Check a call EXP to the memmove built-in for validity.
3631 Return NULL_RTX on both success and failure. */
3633 static rtx
3634 expand_builtin_memmove (tree exp, rtx target)
3636 if (!validate_arglist (exp,
3637 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3638 return NULL_RTX;
3640 tree dest = CALL_EXPR_ARG (exp, 0);
3641 tree src = CALL_EXPR_ARG (exp, 1);
3642 tree len = CALL_EXPR_ARG (exp, 2);
3644 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3645 /*retmode=*/ RETURN_BEGIN, true);
3648 /* Expand a call EXP to the mempcpy builtin.
3649 Return NULL_RTX if we failed; the caller should emit a normal call,
3650 otherwise try to get the result in TARGET, if convenient (and in
3651 mode MODE if that's convenient). */
3653 static rtx
3654 expand_builtin_mempcpy (tree exp, rtx target)
3656 if (!validate_arglist (exp,
3657 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3658 return NULL_RTX;
3660 tree dest = CALL_EXPR_ARG (exp, 0);
3661 tree src = CALL_EXPR_ARG (exp, 1);
3662 tree len = CALL_EXPR_ARG (exp, 2);
3664 /* Policy does not generally allow using compute_objsize (which
3665 is used internally by check_memop_size) to change code generation
3666 or drive optimization decisions.
3668 In this instance it is safe because the code we generate has
3669 the same semantics regardless of the return value of
3670 check_memop_sizes. Exactly the same amount of data is copied
3671 and the return value is exactly the same in both cases.
3673 Furthermore, check_memop_size always uses mode 0 for the call to
3674 compute_objsize, so the imprecise nature of compute_objsize is
3675 avoided. */
3677 /* Avoid expanding mempcpy into memcpy when the call is determined
3678 to overflow the buffer. This also prevents the same overflow
3679 from being diagnosed again when expanding memcpy. */
3681 return expand_builtin_mempcpy_args (dest, src, len,
3682 target, exp, /*retmode=*/ RETURN_END);
3685 /* Helper function to do the actual work for expand of memory copy family
3686 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3687 of memory from SRC to DEST and assign to TARGET if convenient. Return
3688 value is based on RETMODE argument. */
3690 static rtx
3691 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3692 rtx target, tree exp, memop_ret retmode,
3693 bool might_overlap)
3695 unsigned int src_align = get_pointer_alignment (src);
3696 unsigned int dest_align = get_pointer_alignment (dest);
3697 rtx dest_mem, src_mem, dest_addr, len_rtx;
3698 HOST_WIDE_INT expected_size = -1;
3699 unsigned int expected_align = 0;
3700 unsigned HOST_WIDE_INT min_size;
3701 unsigned HOST_WIDE_INT max_size;
3702 unsigned HOST_WIDE_INT probable_max_size;
3704 bool is_move_done;
3706 /* If DEST is not a pointer type, call the normal function. */
3707 if (dest_align == 0)
3708 return NULL_RTX;
3710 /* If either SRC is not a pointer type, don't do this
3711 operation in-line. */
3712 if (src_align == 0)
3713 return NULL_RTX;
3715 if (currently_expanding_gimple_stmt)
3716 stringop_block_profile (currently_expanding_gimple_stmt,
3717 &expected_align, &expected_size);
3719 if (expected_align < dest_align)
3720 expected_align = dest_align;
3721 dest_mem = get_memory_rtx (dest, len);
3722 set_mem_align (dest_mem, dest_align);
3723 len_rtx = expand_normal (len);
3724 determine_block_size (len, len_rtx, &min_size, &max_size,
3725 &probable_max_size);
3727 /* Try to get the byte representation of the constant SRC points to,
3728 with its byte size in NBYTES. */
3729 unsigned HOST_WIDE_INT nbytes;
3730 const char *rep = getbyterep (src, &nbytes);
3732 /* If the function's constant bound LEN_RTX is less than or equal
3733 to the byte size of the representation of the constant argument,
3734 and if block move would be done by pieces, we can avoid loading
3735 the bytes from memory and only store the computed constant.
3736 This works in the overlap (memmove) case as well because
3737 store_by_pieces just generates a series of stores of constants
3738 from the representation returned by getbyterep(). */
3739 if (rep
3740 && CONST_INT_P (len_rtx)
3741 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3742 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3743 CONST_CAST (char *, rep),
3744 dest_align, false))
3746 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3747 builtin_memcpy_read_str,
3748 CONST_CAST (char *, rep),
3749 dest_align, false, retmode);
3750 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3751 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3752 return dest_mem;
3755 src_mem = get_memory_rtx (src, len);
3756 set_mem_align (src_mem, src_align);
3758 /* Copy word part most expediently. */
3759 enum block_op_methods method = BLOCK_OP_NORMAL;
3760 if (CALL_EXPR_TAILCALL (exp)
3761 && (retmode == RETURN_BEGIN || target == const0_rtx))
3762 method = BLOCK_OP_TAILCALL;
3763 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3764 && retmode == RETURN_END
3765 && !might_overlap
3766 && target != const0_rtx);
3767 if (use_mempcpy_call)
3768 method = BLOCK_OP_NO_LIBCALL_RET;
3769 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3770 expected_align, expected_size,
3771 min_size, max_size, probable_max_size,
3772 use_mempcpy_call, &is_move_done,
3773 might_overlap, tree_ctz (len));
3775 /* Bail out when a mempcpy call would be expanded as libcall and when
3776 we have a target that provides a fast implementation
3777 of mempcpy routine. */
3778 if (!is_move_done)
3779 return NULL_RTX;
3781 if (dest_addr == pc_rtx)
3782 return NULL_RTX;
3784 if (dest_addr == 0)
3786 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3787 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3790 if (retmode != RETURN_BEGIN && target != const0_rtx)
3792 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3793 /* stpcpy pointer to last byte. */
3794 if (retmode == RETURN_END_MINUS_ONE)
3795 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3798 return dest_addr;
3801 static rtx
3802 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3803 rtx target, tree orig_exp, memop_ret retmode)
3805 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3806 retmode, false);
3809 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3810 we failed, the caller should emit a normal call, otherwise try to
3811 get the result in TARGET, if convenient.
3812 Return value is based on RETMODE argument. */
3814 static rtx
3815 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3817 class expand_operand ops[3];
3818 rtx dest_mem;
3819 rtx src_mem;
3821 if (!targetm.have_movstr ())
3822 return NULL_RTX;
3824 dest_mem = get_memory_rtx (dest, NULL);
3825 src_mem = get_memory_rtx (src, NULL);
3826 if (retmode == RETURN_BEGIN)
3828 target = force_reg (Pmode, XEXP (dest_mem, 0));
3829 dest_mem = replace_equiv_address (dest_mem, target);
3832 create_output_operand (&ops[0],
3833 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3834 create_fixed_operand (&ops[1], dest_mem);
3835 create_fixed_operand (&ops[2], src_mem);
3836 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3837 return NULL_RTX;
3839 if (retmode != RETURN_BEGIN && target != const0_rtx)
3841 target = ops[0].value;
3842 /* movstr is supposed to set end to the address of the NUL
3843 terminator. If the caller requested a mempcpy-like return value,
3844 adjust it. */
3845 if (retmode == RETURN_END)
3847 rtx tem = plus_constant (GET_MODE (target),
3848 gen_lowpart (GET_MODE (target), target), 1);
3849 emit_move_insn (target, force_operand (tem, NULL_RTX));
3852 return target;
3855 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3856 NULL_RTX if we failed the caller should emit a normal call, otherwise
3857 try to get the result in TARGET, if convenient (and in mode MODE if that's
3858 convenient). */
3860 static rtx
3861 expand_builtin_strcpy (tree exp, rtx target)
3863 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3864 return NULL_RTX;
3866 tree dest = CALL_EXPR_ARG (exp, 0);
3867 tree src = CALL_EXPR_ARG (exp, 1);
3869 return expand_builtin_strcpy_args (exp, dest, src, target);
3872 /* Helper function to do the actual work for expand_builtin_strcpy. The
3873 arguments to the builtin_strcpy call DEST and SRC are broken out
3874 so that this can also be called without constructing an actual CALL_EXPR.
3875 The other arguments and return value are the same as for
3876 expand_builtin_strcpy. */
3878 static rtx
3879 expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3881 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3884 /* Expand a call EXP to the stpcpy builtin.
3885 Return NULL_RTX if we failed the caller should emit a normal call,
3886 otherwise try to get the result in TARGET, if convenient (and in
3887 mode MODE if that's convenient). */
3889 static rtx
3890 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3892 tree dst, src;
3893 location_t loc = EXPR_LOCATION (exp);
3895 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3896 return NULL_RTX;
3898 dst = CALL_EXPR_ARG (exp, 0);
3899 src = CALL_EXPR_ARG (exp, 1);
3901 /* If return value is ignored, transform stpcpy into strcpy. */
3902 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3904 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3905 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3906 return expand_expr (result, target, mode, EXPAND_NORMAL);
3908 else
3910 tree len, lenp1;
3911 rtx ret;
3913 /* Ensure we get an actual string whose length can be evaluated at
3914 compile-time, not an expression containing a string. This is
3915 because the latter will potentially produce pessimized code
3916 when used to produce the return value. */
3917 c_strlen_data lendata = { };
3918 if (!c_getstr (src)
3919 || !(len = c_strlen (src, 0, &lendata, 1)))
3920 return expand_movstr (dst, src, target,
3921 /*retmode=*/ RETURN_END_MINUS_ONE);
3923 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3924 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3925 target, exp,
3926 /*retmode=*/ RETURN_END_MINUS_ONE);
3928 if (ret)
3929 return ret;
3931 if (TREE_CODE (len) == INTEGER_CST)
3933 rtx len_rtx = expand_normal (len);
3935 if (CONST_INT_P (len_rtx))
3937 ret = expand_builtin_strcpy_args (exp, dst, src, target);
3939 if (ret)
3941 if (! target)
3943 if (mode != VOIDmode)
3944 target = gen_reg_rtx (mode);
3945 else
3946 target = gen_reg_rtx (GET_MODE (ret));
3948 if (GET_MODE (target) != GET_MODE (ret))
3949 ret = gen_lowpart (GET_MODE (target), ret);
3951 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3952 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3953 gcc_assert (ret);
3955 return target;
3960 return expand_movstr (dst, src, target,
3961 /*retmode=*/ RETURN_END_MINUS_ONE);
3965 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3966 arguments while being careful to avoid duplicate warnings (which could
3967 be issued if the expander were to expand the call, resulting in it
3968 being emitted in expand_call(). */
3970 static rtx
3971 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3973 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3975 /* The call has been successfully expanded. Check for nonstring
3976 arguments and issue warnings as appropriate. */
3977 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3978 return ret;
3981 return NULL_RTX;
3984 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3985 bytes from constant string DATA + OFFSET and return it as target
3986 constant. */
3989 builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3990 fixed_size_mode mode)
3992 const char *str = (const char *) data;
3994 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3995 return const0_rtx;
3997 return c_readstr (str + offset, mode);
4000 /* Helper to check the sizes of sequences and the destination of calls
4001 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4002 success (no overflow or invalid sizes), false otherwise. */
4004 static bool
4005 check_strncat_sizes (tree exp, tree objsize)
4007 tree dest = CALL_EXPR_ARG (exp, 0);
4008 tree src = CALL_EXPR_ARG (exp, 1);
4009 tree maxread = CALL_EXPR_ARG (exp, 2);
4011 /* Try to determine the range of lengths that the source expression
4012 refers to. */
4013 c_strlen_data lendata = { };
4014 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4016 /* Try to verify that the destination is big enough for the shortest
4017 string. */
4019 access_data data (nullptr, exp, access_read_write, maxread, true);
4020 if (!objsize && warn_stringop_overflow)
4022 /* If it hasn't been provided by __strncat_chk, try to determine
4023 the size of the destination object into which the source is
4024 being copied. */
4025 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
4028 /* Add one for the terminating nul. */
4029 tree srclen = (lendata.minlen
4030 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4031 size_one_node)
4032 : NULL_TREE);
4034 /* The strncat function copies at most MAXREAD bytes and always appends
4035 the terminating nul so the specified upper bound should never be equal
4036 to (or greater than) the size of the destination. */
4037 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4038 && tree_int_cst_equal (objsize, maxread))
4040 location_t loc = EXPR_LOCATION (exp);
4041 warning_at (loc, OPT_Wstringop_overflow_,
4042 "%qD specified bound %E equals destination size",
4043 get_callee_fndecl (exp), maxread);
4045 return false;
4048 if (!srclen
4049 || (maxread && tree_fits_uhwi_p (maxread)
4050 && tree_fits_uhwi_p (srclen)
4051 && tree_int_cst_lt (maxread, srclen)))
4052 srclen = maxread;
4054 /* The number of bytes to write is LEN but check_access will alsoa
4055 check SRCLEN if LEN's value isn't known. */
4056 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
4057 objsize, data.mode, &data);
4060 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4061 NULL_RTX if we failed the caller should emit a normal call. */
4063 static rtx
4064 expand_builtin_strncpy (tree exp, rtx target)
4066 location_t loc = EXPR_LOCATION (exp);
4068 if (!validate_arglist (exp,
4069 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4070 return NULL_RTX;
4071 tree dest = CALL_EXPR_ARG (exp, 0);
4072 tree src = CALL_EXPR_ARG (exp, 1);
4073 /* The number of bytes to write (not the maximum). */
4074 tree len = CALL_EXPR_ARG (exp, 2);
4076 /* The length of the source sequence. */
4077 tree slen = c_strlen (src, 1);
4079 /* We must be passed a constant len and src parameter. */
4080 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4081 return NULL_RTX;
4083 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4085 /* We're required to pad with trailing zeros if the requested
4086 len is greater than strlen(s2)+1. In that case try to
4087 use store_by_pieces, if it fails, punt. */
4088 if (tree_int_cst_lt (slen, len))
4090 unsigned int dest_align = get_pointer_alignment (dest);
4091 const char *p = c_getstr (src);
4092 rtx dest_mem;
4094 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4095 || !can_store_by_pieces (tree_to_uhwi (len),
4096 builtin_strncpy_read_str,
4097 CONST_CAST (char *, p),
4098 dest_align, false))
4099 return NULL_RTX;
4101 dest_mem = get_memory_rtx (dest, len);
4102 store_by_pieces (dest_mem, tree_to_uhwi (len),
4103 builtin_strncpy_read_str,
4104 CONST_CAST (char *, p), dest_align, false,
4105 RETURN_BEGIN);
4106 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4107 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4108 return dest_mem;
4111 return NULL_RTX;
4114 /* Return the RTL of a register in MODE generated from PREV in the
4115 previous iteration. */
4117 static rtx
4118 gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
4120 rtx target = nullptr;
4121 if (prev != nullptr && prev->data != nullptr)
4123 /* Use the previous data in the same mode. */
4124 if (prev->mode == mode)
4125 return prev->data;
4127 fixed_size_mode prev_mode = prev->mode;
4129 /* Don't use the previous data to write QImode if it is in a
4130 vector mode. */
4131 if (VECTOR_MODE_P (prev_mode) && mode == QImode)
4132 return target;
4134 rtx prev_rtx = prev->data;
4136 if (REG_P (prev_rtx)
4137 && HARD_REGISTER_P (prev_rtx)
4138 && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
4140 /* This case occurs when PREV_MODE is a vector and when
4141 MODE is too small to store using vector operations.
4142 After register allocation, the code will need to move the
4143 lowpart of the vector register into a non-vector register.
4145 Also, the target has chosen to use a hard register
4146 instead of going with the default choice of using a
4147 pseudo register. We should respect that choice and try to
4148 avoid creating a pseudo register with the same mode as the
4149 current hard register.
4151 In principle, we could just use a lowpart MODE subreg of
4152 the vector register. However, the vector register mode might
4153 be too wide for non-vector registers, and we already know
4154 that the non-vector mode is too small for vector registers.
4155 It's therefore likely that we'd need to spill to memory in
4156 the vector mode and reload the non-vector value from there.
4158 Try to avoid that by reducing the vector register to the
4159 smallest size that it can hold. This should increase the
4160 chances that non-vector registers can hold both the inner
4161 and outer modes of the subreg that we generate later. */
4162 machine_mode m;
4163 fixed_size_mode candidate;
4164 FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
4165 if (is_a<fixed_size_mode> (m, &candidate))
4167 if (GET_MODE_SIZE (candidate)
4168 >= GET_MODE_SIZE (prev_mode))
4169 break;
4170 if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
4171 && lowpart_subreg_regno (REGNO (prev_rtx),
4172 prev_mode, candidate) >= 0)
4174 target = lowpart_subreg (candidate, prev_rtx,
4175 prev_mode);
4176 prev_rtx = target;
4177 prev_mode = candidate;
4178 break;
4181 if (target == nullptr)
4182 prev_rtx = copy_to_reg (prev_rtx);
4185 target = lowpart_subreg (mode, prev_rtx, prev_mode);
4187 return target;
4190 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4191 bytes from constant string DATA + OFFSET and return it as target
4192 constant. If PREV isn't nullptr, it has the RTL info from the
4193 previous iteration. */
4196 builtin_memset_read_str (void *data, void *prev,
4197 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4198 fixed_size_mode mode)
4200 const char *c = (const char *) data;
4201 unsigned int size = GET_MODE_SIZE (mode);
4203 rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
4204 mode);
4205 if (target != nullptr)
4206 return target;
4207 rtx src = gen_int_mode (*c, QImode);
4209 if (VECTOR_MODE_P (mode))
4211 gcc_assert (GET_MODE_INNER (mode) == QImode);
4213 rtx const_vec = gen_const_vec_duplicate (mode, src);
4214 if (prev == NULL)
4215 /* Return CONST_VECTOR when called by a query function. */
4216 return const_vec;
4218 /* Use the move expander with CONST_VECTOR. */
4219 target = gen_reg_rtx (mode);
4220 emit_move_insn (target, const_vec);
4221 return target;
4224 char *p = XALLOCAVEC (char, size);
4226 memset (p, *c, size);
4228 return c_readstr (p, mode);
4231 /* Callback routine for store_by_pieces. Return the RTL of a register
4232 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4233 char value given in the RTL register data. For example, if mode is
4234 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
4235 nullptr, it has the RTL info from the previous iteration. */
4237 static rtx
4238 builtin_memset_gen_str (void *data, void *prev,
4239 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4240 fixed_size_mode mode)
4242 rtx target, coeff;
4243 size_t size;
4244 char *p;
4246 size = GET_MODE_SIZE (mode);
4247 if (size == 1)
4248 return (rtx) data;
4250 target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
4251 if (target != nullptr)
4252 return target;
4254 if (VECTOR_MODE_P (mode))
4256 gcc_assert (GET_MODE_INNER (mode) == QImode);
4258 /* vec_duplicate_optab is a precondition to pick a vector mode for
4259 the memset expander. */
4260 insn_code icode = optab_handler (vec_duplicate_optab, mode);
4262 target = gen_reg_rtx (mode);
4263 class expand_operand ops[2];
4264 create_output_operand (&ops[0], target, mode);
4265 create_input_operand (&ops[1], (rtx) data, QImode);
4266 expand_insn (icode, 2, ops);
4267 if (!rtx_equal_p (target, ops[0].value))
4268 emit_move_insn (target, ops[0].value);
4270 return target;
4273 p = XALLOCAVEC (char, size);
4274 memset (p, 1, size);
4275 coeff = c_readstr (p, mode);
4277 target = convert_to_mode (mode, (rtx) data, 1);
4278 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4279 return force_reg (mode, target);
4282 /* Expand expression EXP, which is a call to the memset builtin. Return
4283 NULL_RTX if we failed the caller should emit a normal call, otherwise
4284 try to get the result in TARGET, if convenient (and in mode MODE if that's
4285 convenient). */
4288 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4290 if (!validate_arglist (exp,
4291 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4292 return NULL_RTX;
4294 tree dest = CALL_EXPR_ARG (exp, 0);
4295 tree val = CALL_EXPR_ARG (exp, 1);
4296 tree len = CALL_EXPR_ARG (exp, 2);
4298 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4301 /* Check that store_by_pieces allows BITS + LEN (so that we don't
4302 expand something too unreasonably long), and every power of 2 in
4303 BITS. It is assumed that LEN has already been tested by
4304 itself. */
4305 static bool
4306 can_store_by_multiple_pieces (unsigned HOST_WIDE_INT bits,
4307 by_pieces_constfn constfun,
4308 void *constfundata, unsigned int align,
4309 bool memsetp,
4310 unsigned HOST_WIDE_INT len)
4312 if (bits
4313 && !can_store_by_pieces (bits + len, constfun, constfundata,
4314 align, memsetp))
4315 return false;
4317 /* BITS set are expected to be generally in the low range and
4318 contiguous. We do NOT want to repeat the test above in case BITS
4319 has a single bit set, so we terminate the loop when BITS == BIT.
4320 In the unlikely case that BITS has the MSB set, also terminate in
4321 case BIT gets shifted out. */
4322 for (unsigned HOST_WIDE_INT bit = 1; bit < bits && bit; bit <<= 1)
4324 if ((bits & bit) == 0)
4325 continue;
4327 if (!can_store_by_pieces (bit, constfun, constfundata,
4328 align, memsetp))
4329 return false;
4332 return true;
4335 /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
4336 Return TRUE if successful, FALSE otherwise. TO is assumed to be
4337 aligned at an ALIGN-bits boundary. LEN must be a multiple of
4338 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
4340 The strategy is to issue one store_by_pieces for each power of two,
4341 from most to least significant, guarded by a test on whether there
4342 are at least that many bytes left to copy in LEN.
4344 ??? Should we skip some powers of two in favor of loops? Maybe start
4345 at the max of TO/LEN/word alignment, at least when optimizing for
4346 size, instead of ensuring O(log len) dynamic compares? */
4348 bool
4349 try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
4350 unsigned HOST_WIDE_INT min_len,
4351 unsigned HOST_WIDE_INT max_len,
4352 rtx val, char valc, unsigned int align)
4354 int max_bits = floor_log2 (max_len);
4355 int min_bits = floor_log2 (min_len);
4356 int sctz_len = ctz_len;
4358 gcc_checking_assert (sctz_len >= 0);
4360 if (val)
4361 valc = 1;
4363 /* Bits more significant than TST_BITS are part of the shared prefix
4364 in the binary representation of both min_len and max_len. Since
4365 they're identical, we don't need to test them in the loop. */
4366 int tst_bits = (max_bits != min_bits ? max_bits
4367 : floor_log2 (max_len ^ min_len));
4369 /* Save the pre-blksize values. */
4370 int orig_max_bits = max_bits;
4371 int orig_tst_bits = tst_bits;
4373 /* Check whether it's profitable to start by storing a fixed BLKSIZE
4374 bytes, to lower max_bits. In the unlikely case of a constant LEN
4375 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
4376 single store_by_pieces, but otherwise, select the minimum multiple
4377 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
4378 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
4379 unsigned HOST_WIDE_INT blksize;
4380 if (max_len > min_len)
4382 unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
4383 align / BITS_PER_UNIT);
4384 blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
4385 blksize &= ~(alrng - 1);
4387 else if (max_len == min_len)
4388 blksize = max_len;
4389 else
4390 /* Huh, max_len < min_len? Punt. See pr100843.c. */
4391 return false;
4392 if (min_len >= blksize
4393 /* ??? Maybe try smaller fixed-prefix blksizes before
4394 punting? */
4395 && can_store_by_pieces (blksize, builtin_memset_read_str,
4396 &valc, align, true))
4398 min_len -= blksize;
4399 min_bits = floor_log2 (min_len);
4400 max_len -= blksize;
4401 max_bits = floor_log2 (max_len);
4403 tst_bits = (max_bits != min_bits ? max_bits
4404 : floor_log2 (max_len ^ min_len));
4406 else
4407 blksize = 0;
4409 /* Check that we can use store by pieces for the maximum store count
4410 we may issue (initial fixed-size block, plus conditional
4411 power-of-two-sized from max_bits to ctz_len. */
4412 unsigned HOST_WIDE_INT xlenest = blksize;
4413 if (max_bits >= 0)
4414 xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
4415 - (HOST_WIDE_INT_1U << ctz_len));
4416 bool max_loop = false;
4417 bool use_store_by_pieces = true;
4418 /* Skip the test in case of overflow in xlenest. It shouldn't
4419 happen because of the way max_bits and blksize are related, but
4420 it doesn't hurt to test. */
4421 if (blksize > xlenest
4422 || !can_store_by_multiple_pieces (xlenest - blksize,
4423 builtin_memset_read_str,
4424 &valc, align, true, blksize))
4426 if (!(flag_inline_stringops & ILSOP_MEMSET))
4427 return false;
4429 for (max_bits = orig_max_bits;
4430 max_bits >= sctz_len;
4431 --max_bits)
4433 xlenest = ((HOST_WIDE_INT_1U << max_bits) * 2
4434 - (HOST_WIDE_INT_1U << ctz_len));
4435 /* Check that blksize plus the bits to be stored as blocks
4436 sized at powers of two can be stored by pieces. This is
4437 like the test above, but with smaller max_bits. Skip
4438 orig_max_bits (it would be redundant). Also skip in case
4439 of overflow. */
4440 if (max_bits < orig_max_bits
4441 && xlenest + blksize >= xlenest
4442 && can_store_by_multiple_pieces (xlenest,
4443 builtin_memset_read_str,
4444 &valc, align, true, blksize))
4446 max_loop = true;
4447 break;
4449 if (blksize
4450 && can_store_by_multiple_pieces (xlenest,
4451 builtin_memset_read_str,
4452 &valc, align, true, 0))
4454 max_len += blksize;
4455 min_len += blksize;
4456 tst_bits = orig_tst_bits;
4457 blksize = 0;
4458 max_loop = true;
4459 break;
4461 if (max_bits == sctz_len)
4463 /* We'll get here if can_store_by_pieces refuses to
4464 store even a single QImode. We'll fall back to
4465 QImode stores then. */
4466 if (!sctz_len)
4468 blksize = 0;
4469 max_loop = true;
4470 use_store_by_pieces = false;
4471 break;
4473 --sctz_len;
4474 --ctz_len;
4477 if (!max_loop)
4478 return false;
4479 /* If the boundaries are such that min and max may run a
4480 different number of trips in the initial loop, the remainder
4481 needs not be between the moduli, so set tst_bits to cover all
4482 bits. Otherwise, if the trip counts are the same, max_len
4483 has the common prefix, and the previously-computed tst_bits
4484 is usable. */
4485 if (max_len >> max_bits > min_len >> max_bits)
4486 tst_bits = max_bits;
4489 by_pieces_constfn constfun;
4490 void *constfundata;
4491 if (val)
4493 constfun = builtin_memset_gen_str;
4494 constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
4495 val);
4497 else
4499 constfun = builtin_memset_read_str;
4500 constfundata = &valc;
4503 rtx ptr = copy_addr_to_reg (XEXP (to, 0));
4504 rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
4505 to = replace_equiv_address (to, ptr);
4506 set_mem_align (to, align);
4508 if (blksize)
4510 to = store_by_pieces (to, blksize,
4511 constfun, constfundata,
4512 align, true,
4513 max_len != 0 ? RETURN_END : RETURN_BEGIN);
4514 if (max_len == 0)
4515 return true;
4517 /* Adjust PTR, TO and REM. Since TO's address is likely
4518 PTR+offset, we have to replace it. */
4519 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4520 to = replace_equiv_address (to, ptr);
4521 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4522 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4525 /* Iterate over power-of-two block sizes from the maximum length to
4526 the least significant bit possibly set in the length. */
4527 for (int i = max_bits; i >= sctz_len; i--)
4529 rtx_code_label *loop_label = NULL;
4530 rtx_code_label *label = NULL;
4532 blksize = HOST_WIDE_INT_1U << i;
4534 /* If we're past the bits shared between min_ and max_len, expand
4535 a test on the dynamic length, comparing it with the
4536 BLKSIZE. */
4537 if (i <= tst_bits)
4539 label = gen_label_rtx ();
4540 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4541 ptr_mode, 1, label,
4542 profile_probability::even ());
4544 /* If we are at a bit that is in the prefix shared by min_ and
4545 max_len, skip the current BLKSIZE if the bit is clear, but do
4546 not skip the loop, even if it doesn't require
4547 prechecking. */
4548 else if ((max_len & blksize) == 0
4549 && !(max_loop && i == max_bits))
4550 continue;
4552 if (max_loop && i == max_bits)
4554 loop_label = gen_label_rtx ();
4555 emit_label (loop_label);
4556 /* Since we may run this multiple times, don't assume we
4557 know anything about the offset. */
4558 clear_mem_offset (to);
4561 bool update_needed = i != sctz_len || loop_label;
4562 rtx next_ptr = NULL_RTX;
4563 if (!use_store_by_pieces)
4565 gcc_checking_assert (blksize == 1);
4566 if (!val)
4567 val = gen_int_mode (valc, QImode);
4568 to = change_address (to, QImode, 0);
4569 emit_move_insn (to, val);
4570 if (update_needed)
4571 next_ptr = plus_constant (GET_MODE (ptr), ptr, blksize);
4573 else
4575 /* Issue a store of BLKSIZE bytes. */
4576 to = store_by_pieces (to, blksize,
4577 constfun, constfundata,
4578 align, true,
4579 update_needed ? RETURN_END : RETURN_BEGIN);
4580 next_ptr = XEXP (to, 0);
4582 /* Adjust REM and PTR, unless this is the last iteration. */
4583 if (update_needed)
4585 emit_move_insn (ptr, force_operand (next_ptr, NULL_RTX));
4586 to = replace_equiv_address (to, ptr);
4587 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4588 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4591 if (loop_label)
4592 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), GE, NULL,
4593 ptr_mode, 1, loop_label,
4594 profile_probability::likely ());
4596 if (label)
4598 emit_label (label);
4600 /* Given conditional stores, the offset can no longer be
4601 known, so clear it. */
4602 clear_mem_offset (to);
4606 return true;
4609 /* Helper function to do the actual work for expand_builtin_memset. The
4610 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4611 so that this can also be called without constructing an actual CALL_EXPR.
4612 The other arguments and return value are the same as for
4613 expand_builtin_memset. */
4615 static rtx
4616 expand_builtin_memset_args (tree dest, tree val, tree len,
4617 rtx target, machine_mode mode, tree orig_exp)
4619 tree fndecl, fn;
4620 enum built_in_function fcode;
4621 machine_mode val_mode;
4622 char c;
4623 unsigned int dest_align;
4624 rtx dest_mem, dest_addr, len_rtx;
4625 HOST_WIDE_INT expected_size = -1;
4626 unsigned int expected_align = 0;
4627 unsigned HOST_WIDE_INT min_size;
4628 unsigned HOST_WIDE_INT max_size;
4629 unsigned HOST_WIDE_INT probable_max_size;
4631 dest_align = get_pointer_alignment (dest);
4633 /* If DEST is not a pointer type, don't do this operation in-line. */
4634 if (dest_align == 0)
4635 return NULL_RTX;
4637 if (currently_expanding_gimple_stmt)
4638 stringop_block_profile (currently_expanding_gimple_stmt,
4639 &expected_align, &expected_size);
4641 if (expected_align < dest_align)
4642 expected_align = dest_align;
4644 /* If the LEN parameter is zero, return DEST. */
4645 if (integer_zerop (len))
4647 /* Evaluate and ignore VAL in case it has side-effects. */
4648 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4649 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4652 /* Stabilize the arguments in case we fail. */
4653 dest = builtin_save_expr (dest);
4654 val = builtin_save_expr (val);
4655 len = builtin_save_expr (len);
4657 len_rtx = expand_normal (len);
4658 determine_block_size (len, len_rtx, &min_size, &max_size,
4659 &probable_max_size);
4660 dest_mem = get_memory_rtx (dest, len);
4661 val_mode = TYPE_MODE (unsigned_char_type_node);
4663 if (TREE_CODE (val) != INTEGER_CST
4664 || target_char_cast (val, &c))
4666 rtx val_rtx;
4668 val_rtx = expand_normal (val);
4669 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4671 /* Assume that we can memset by pieces if we can store
4672 * the coefficients by pieces (in the required modes).
4673 * We can't pass builtin_memset_gen_str as that emits RTL. */
4674 c = 1;
4675 if (tree_fits_uhwi_p (len)
4676 && can_store_by_pieces (tree_to_uhwi (len),
4677 builtin_memset_read_str, &c, dest_align,
4678 true))
4680 val_rtx = force_reg (val_mode, val_rtx);
4681 store_by_pieces (dest_mem, tree_to_uhwi (len),
4682 builtin_memset_gen_str, val_rtx, dest_align,
4683 true, RETURN_BEGIN);
4685 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4686 dest_align, expected_align,
4687 expected_size, min_size, max_size,
4688 probable_max_size)
4689 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4690 tree_ctz (len),
4691 min_size, max_size,
4692 val_rtx, 0,
4693 dest_align))
4694 goto do_libcall;
4696 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4697 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4698 return dest_mem;
4701 if (c)
4703 if (tree_fits_uhwi_p (len)
4704 && can_store_by_pieces (tree_to_uhwi (len),
4705 builtin_memset_read_str, &c, dest_align,
4706 true))
4707 store_by_pieces (dest_mem, tree_to_uhwi (len),
4708 builtin_memset_read_str, &c, dest_align, true,
4709 RETURN_BEGIN);
4710 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4711 gen_int_mode (c, val_mode),
4712 dest_align, expected_align,
4713 expected_size, min_size, max_size,
4714 probable_max_size)
4715 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4716 tree_ctz (len),
4717 min_size, max_size,
4718 NULL_RTX, c,
4719 dest_align))
4720 goto do_libcall;
4722 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4723 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4724 return dest_mem;
4727 set_mem_align (dest_mem, dest_align);
4728 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4729 CALL_EXPR_TAILCALL (orig_exp)
4730 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4731 expected_align, expected_size,
4732 min_size, max_size,
4733 probable_max_size, tree_ctz (len));
4735 if (dest_addr == 0)
4737 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4738 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4741 return dest_addr;
4743 do_libcall:
4744 fndecl = get_callee_fndecl (orig_exp);
4745 fcode = DECL_FUNCTION_CODE (fndecl);
4746 if (fcode == BUILT_IN_MEMSET)
4747 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4748 dest, val, len);
4749 else if (fcode == BUILT_IN_BZERO)
4750 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4751 dest, len);
4752 else
4753 gcc_unreachable ();
4754 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4755 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4756 return expand_call (fn, target, target == const0_rtx);
4759 /* Expand expression EXP, which is a call to the bzero builtin. Return
4760 NULL_RTX if we failed the caller should emit a normal call. */
4762 static rtx
4763 expand_builtin_bzero (tree exp)
4765 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4766 return NULL_RTX;
4768 tree dest = CALL_EXPR_ARG (exp, 0);
4769 tree size = CALL_EXPR_ARG (exp, 1);
4771 /* New argument list transforming bzero(ptr x, int y) to
4772 memset(ptr x, int 0, size_t y). This is done this way
4773 so that if it isn't expanded inline, we fallback to
4774 calling bzero instead of memset. */
4776 location_t loc = EXPR_LOCATION (exp);
4778 return expand_builtin_memset_args (dest, integer_zero_node,
4779 fold_convert_loc (loc,
4780 size_type_node, size),
4781 const0_rtx, VOIDmode, exp);
4784 /* Try to expand cmpstr operation ICODE with the given operands.
4785 Return the result rtx on success, otherwise return null. */
4787 static rtx
4788 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4789 HOST_WIDE_INT align)
4791 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4793 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4794 target = NULL_RTX;
4796 class expand_operand ops[4];
4797 create_output_operand (&ops[0], target, insn_mode);
4798 create_fixed_operand (&ops[1], arg1_rtx);
4799 create_fixed_operand (&ops[2], arg2_rtx);
4800 create_integer_operand (&ops[3], align);
4801 if (maybe_expand_insn (icode, 4, ops))
4802 return ops[0].value;
4803 return NULL_RTX;
4806 /* Expand expression EXP, which is a call to the memcmp built-in function.
4807 Return NULL_RTX if we failed and the caller should emit a normal call,
4808 otherwise try to get the result in TARGET, if convenient.
4809 RESULT_EQ is true if we can relax the returned value to be either zero
4810 or nonzero, without caring about the sign. */
4812 static rtx
4813 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4815 if (!validate_arglist (exp,
4816 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4817 return NULL_RTX;
4819 tree arg1 = CALL_EXPR_ARG (exp, 0);
4820 tree arg2 = CALL_EXPR_ARG (exp, 1);
4821 tree len = CALL_EXPR_ARG (exp, 2);
4823 /* Due to the performance benefit, always inline the calls first
4824 when result_eq is false. */
4825 rtx result = NULL_RTX;
4826 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4827 if (!result_eq && fcode != BUILT_IN_BCMP)
4829 result = inline_expand_builtin_bytecmp (exp, target);
4830 if (result)
4831 return result;
4834 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4835 location_t loc = EXPR_LOCATION (exp);
4837 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4838 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4840 /* If we don't have POINTER_TYPE, call the function. */
4841 if (arg1_align == 0 || arg2_align == 0)
4842 return NULL_RTX;
4844 rtx arg1_rtx = get_memory_rtx (arg1, len);
4845 rtx arg2_rtx = get_memory_rtx (arg2, len);
4846 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4848 /* Set MEM_SIZE as appropriate. */
4849 if (CONST_INT_P (len_rtx))
4851 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4852 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4855 by_pieces_constfn constfn = NULL;
4857 /* Try to get the byte representation of the constant ARG2 (or, only
4858 when the function's result is used for equality to zero, ARG1)
4859 points to, with its byte size in NBYTES. */
4860 unsigned HOST_WIDE_INT nbytes;
4861 const char *rep = getbyterep (arg2, &nbytes);
4862 if (result_eq && rep == NULL)
4864 /* For equality to zero the arguments are interchangeable. */
4865 rep = getbyterep (arg1, &nbytes);
4866 if (rep != NULL)
4867 std::swap (arg1_rtx, arg2_rtx);
4870 /* If the function's constant bound LEN_RTX is less than or equal
4871 to the byte size of the representation of the constant argument,
4872 and if block move would be done by pieces, we can avoid loading
4873 the bytes from memory and only store the computed constant result. */
4874 if (rep
4875 && CONST_INT_P (len_rtx)
4876 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4877 constfn = builtin_memcpy_read_str;
4879 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4880 TREE_TYPE (len), target,
4881 result_eq, constfn,
4882 CONST_CAST (char *, rep),
4883 tree_ctz (len));
4885 if (result)
4887 /* Return the value in the proper mode for this function. */
4888 if (GET_MODE (result) == mode)
4889 return result;
4891 if (target != 0)
4893 convert_move (target, result, 0);
4894 return target;
4897 return convert_to_mode (mode, result, 0);
4900 return NULL_RTX;
4903 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4904 if we failed the caller should emit a normal call, otherwise try to get
4905 the result in TARGET, if convenient. */
4907 static rtx
4908 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4910 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4911 return NULL_RTX;
4913 tree arg1 = CALL_EXPR_ARG (exp, 0);
4914 tree arg2 = CALL_EXPR_ARG (exp, 1);
4916 /* Due to the performance benefit, always inline the calls first. */
4917 rtx result = NULL_RTX;
4918 result = inline_expand_builtin_bytecmp (exp, target);
4919 if (result)
4920 return result;
4922 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4923 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4924 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4925 return NULL_RTX;
4927 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4928 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4930 /* If we don't have POINTER_TYPE, call the function. */
4931 if (arg1_align == 0 || arg2_align == 0)
4932 return NULL_RTX;
4934 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4935 arg1 = builtin_save_expr (arg1);
4936 arg2 = builtin_save_expr (arg2);
4938 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4939 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4941 /* Try to call cmpstrsi. */
4942 if (cmpstr_icode != CODE_FOR_nothing)
4943 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4944 MIN (arg1_align, arg2_align));
4946 /* Try to determine at least one length and call cmpstrnsi. */
4947 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4949 tree len;
4950 rtx arg3_rtx;
4952 tree len1 = c_strlen (arg1, 1);
4953 tree len2 = c_strlen (arg2, 1);
4955 if (len1)
4956 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4957 if (len2)
4958 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4960 /* If we don't have a constant length for the first, use the length
4961 of the second, if we know it. We don't require a constant for
4962 this case; some cost analysis could be done if both are available
4963 but neither is constant. For now, assume they're equally cheap,
4964 unless one has side effects. If both strings have constant lengths,
4965 use the smaller. */
4967 if (!len1)
4968 len = len2;
4969 else if (!len2)
4970 len = len1;
4971 else if (TREE_SIDE_EFFECTS (len1))
4972 len = len2;
4973 else if (TREE_SIDE_EFFECTS (len2))
4974 len = len1;
4975 else if (TREE_CODE (len1) != INTEGER_CST)
4976 len = len2;
4977 else if (TREE_CODE (len2) != INTEGER_CST)
4978 len = len1;
4979 else if (tree_int_cst_lt (len1, len2))
4980 len = len1;
4981 else
4982 len = len2;
4984 /* If both arguments have side effects, we cannot optimize. */
4985 if (len && !TREE_SIDE_EFFECTS (len))
4987 arg3_rtx = expand_normal (len);
4988 result = expand_cmpstrn_or_cmpmem
4989 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4990 arg3_rtx, MIN (arg1_align, arg2_align));
4994 tree fndecl = get_callee_fndecl (exp);
4995 if (result)
4997 /* Return the value in the proper mode for this function. */
4998 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4999 if (GET_MODE (result) == mode)
5000 return result;
5001 if (target == 0)
5002 return convert_to_mode (mode, result, 0);
5003 convert_move (target, result, 0);
5004 return target;
5007 /* Expand the library call ourselves using a stabilized argument
5008 list to avoid re-evaluating the function's arguments twice. */
5009 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
5010 copy_warning (fn, exp);
5011 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5012 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5013 return expand_call (fn, target, target == const0_rtx);
5016 /* Expand expression EXP, which is a call to the strncmp builtin. Return
5017 NULL_RTX if we failed the caller should emit a normal call, otherwise
5018 try to get the result in TARGET, if convenient. */
5020 static rtx
5021 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
5022 ATTRIBUTE_UNUSED machine_mode mode)
5024 if (!validate_arglist (exp,
5025 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5026 return NULL_RTX;
5028 tree arg1 = CALL_EXPR_ARG (exp, 0);
5029 tree arg2 = CALL_EXPR_ARG (exp, 1);
5030 tree arg3 = CALL_EXPR_ARG (exp, 2);
5032 location_t loc = EXPR_LOCATION (exp);
5033 tree len1 = c_strlen (arg1, 1);
5034 tree len2 = c_strlen (arg2, 1);
5036 /* Due to the performance benefit, always inline the calls first. */
5037 rtx result = NULL_RTX;
5038 result = inline_expand_builtin_bytecmp (exp, target);
5039 if (result)
5040 return result;
5042 /* If c_strlen can determine an expression for one of the string
5043 lengths, and it doesn't have side effects, then emit cmpstrnsi
5044 using length MIN(strlen(string)+1, arg3). */
5045 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5046 if (cmpstrn_icode == CODE_FOR_nothing)
5047 return NULL_RTX;
5049 tree len;
5051 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5052 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5054 if (len1)
5055 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5056 if (len2)
5057 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5059 tree len3 = fold_convert_loc (loc, sizetype, arg3);
5061 /* If we don't have a constant length for the first, use the length
5062 of the second, if we know it. If neither string is constant length,
5063 use the given length argument. We don't require a constant for
5064 this case; some cost analysis could be done if both are available
5065 but neither is constant. For now, assume they're equally cheap,
5066 unless one has side effects. If both strings have constant lengths,
5067 use the smaller. */
5069 if (!len1 && !len2)
5070 len = len3;
5071 else if (!len1)
5072 len = len2;
5073 else if (!len2)
5074 len = len1;
5075 else if (TREE_SIDE_EFFECTS (len1))
5076 len = len2;
5077 else if (TREE_SIDE_EFFECTS (len2))
5078 len = len1;
5079 else if (TREE_CODE (len1) != INTEGER_CST)
5080 len = len2;
5081 else if (TREE_CODE (len2) != INTEGER_CST)
5082 len = len1;
5083 else if (tree_int_cst_lt (len1, len2))
5084 len = len1;
5085 else
5086 len = len2;
5088 /* If we are not using the given length, we must incorporate it here.
5089 The actual new length parameter will be MIN(len,arg3) in this case. */
5090 if (len != len3)
5092 len = fold_convert_loc (loc, sizetype, len);
5093 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5095 rtx arg1_rtx = get_memory_rtx (arg1, len);
5096 rtx arg2_rtx = get_memory_rtx (arg2, len);
5097 rtx arg3_rtx = expand_normal (len);
5098 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5099 arg2_rtx, TREE_TYPE (len), arg3_rtx,
5100 MIN (arg1_align, arg2_align));
5102 tree fndecl = get_callee_fndecl (exp);
5103 if (result)
5105 /* Return the value in the proper mode for this function. */
5106 mode = TYPE_MODE (TREE_TYPE (exp));
5107 if (GET_MODE (result) == mode)
5108 return result;
5109 if (target == 0)
5110 return convert_to_mode (mode, result, 0);
5111 convert_move (target, result, 0);
5112 return target;
5115 /* Expand the library call ourselves using a stabilized argument
5116 list to avoid re-evaluating the function's arguments twice. */
5117 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5118 copy_warning (call, exp);
5119 gcc_assert (TREE_CODE (call) == CALL_EXPR);
5120 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
5121 return expand_call (call, target, target == const0_rtx);
5124 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5125 if that's convenient. */
5128 expand_builtin_saveregs (void)
5130 rtx val;
5131 rtx_insn *seq;
5133 /* Don't do __builtin_saveregs more than once in a function.
5134 Save the result of the first call and reuse it. */
5135 if (saveregs_value != 0)
5136 return saveregs_value;
5138 /* When this function is called, it means that registers must be
5139 saved on entry to this function. So we migrate the call to the
5140 first insn of this function. */
5142 start_sequence ();
5144 /* Do whatever the machine needs done in this case. */
5145 val = targetm.calls.expand_builtin_saveregs ();
5147 seq = get_insns ();
5148 end_sequence ();
5150 saveregs_value = val;
5152 /* Put the insns after the NOTE that starts the function. If this
5153 is inside a start_sequence, make the outer-level insn chain current, so
5154 the code is placed at the start of the function. */
5155 push_topmost_sequence ();
5156 emit_insn_after (seq, entry_of_function ());
5157 pop_topmost_sequence ();
5159 return val;
5162 /* Expand a call to __builtin_next_arg. */
5164 static rtx
5165 expand_builtin_next_arg (void)
5167 /* Checking arguments is already done in fold_builtin_next_arg
5168 that must be called before this function. */
5169 return expand_binop (ptr_mode, add_optab,
5170 crtl->args.internal_arg_pointer,
5171 crtl->args.arg_offset_rtx,
5172 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5175 /* Make it easier for the backends by protecting the valist argument
5176 from multiple evaluations. */
5178 static tree
5179 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5181 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5183 /* The current way of determining the type of valist is completely
5184 bogus. We should have the information on the va builtin instead. */
5185 if (!vatype)
5186 vatype = targetm.fn_abi_va_list (cfun->decl);
5188 if (TREE_CODE (vatype) == ARRAY_TYPE)
5190 if (TREE_SIDE_EFFECTS (valist))
5191 valist = save_expr (valist);
5193 /* For this case, the backends will be expecting a pointer to
5194 vatype, but it's possible we've actually been given an array
5195 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5196 So fix it. */
5197 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5199 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5200 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5203 else
5205 tree pt = build_pointer_type (vatype);
5207 if (! needs_lvalue)
5209 if (! TREE_SIDE_EFFECTS (valist))
5210 return valist;
5212 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5213 TREE_SIDE_EFFECTS (valist) = 1;
5216 if (TREE_SIDE_EFFECTS (valist))
5217 valist = save_expr (valist);
5218 valist = fold_build2_loc (loc, MEM_REF,
5219 vatype, valist, build_int_cst (pt, 0));
5222 return valist;
5225 /* The "standard" definition of va_list is void*. */
5227 tree
5228 std_build_builtin_va_list (void)
5230 return ptr_type_node;
5233 /* The "standard" abi va_list is va_list_type_node. */
5235 tree
5236 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5238 return va_list_type_node;
5241 /* The "standard" type of va_list is va_list_type_node. */
5243 tree
5244 std_canonical_va_list_type (tree type)
5246 tree wtype, htype;
5248 wtype = va_list_type_node;
5249 htype = type;
5251 if (TREE_CODE (wtype) == ARRAY_TYPE)
5253 /* If va_list is an array type, the argument may have decayed
5254 to a pointer type, e.g. by being passed to another function.
5255 In that case, unwrap both types so that we can compare the
5256 underlying records. */
5257 if (TREE_CODE (htype) == ARRAY_TYPE
5258 || POINTER_TYPE_P (htype))
5260 wtype = TREE_TYPE (wtype);
5261 htype = TREE_TYPE (htype);
5264 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5265 return va_list_type_node;
5267 return NULL_TREE;
5270 /* The "standard" implementation of va_start: just assign `nextarg' to
5271 the variable. */
5273 void
5274 std_expand_builtin_va_start (tree valist, rtx nextarg)
5276 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5277 convert_move (va_r, nextarg, 0);
5280 /* Expand EXP, a call to __builtin_va_start. */
5282 static rtx
5283 expand_builtin_va_start (tree exp)
5285 rtx nextarg;
5286 tree valist;
5287 location_t loc = EXPR_LOCATION (exp);
5289 if (call_expr_nargs (exp) < 2)
5291 error_at (loc, "too few arguments to function %<va_start%>");
5292 return const0_rtx;
5295 if (fold_builtin_next_arg (exp, true))
5296 return const0_rtx;
5298 nextarg = expand_builtin_next_arg ();
5299 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5301 if (targetm.expand_builtin_va_start)
5302 targetm.expand_builtin_va_start (valist, nextarg);
5303 else
5304 std_expand_builtin_va_start (valist, nextarg);
5306 return const0_rtx;
5309 /* Expand EXP, a call to __builtin_va_end. */
5311 static rtx
5312 expand_builtin_va_end (tree exp)
5314 tree valist = CALL_EXPR_ARG (exp, 0);
5316 /* Evaluate for side effects, if needed. I hate macros that don't
5317 do that. */
5318 if (TREE_SIDE_EFFECTS (valist))
5319 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5321 return const0_rtx;
5324 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5325 builtin rather than just as an assignment in stdarg.h because of the
5326 nastiness of array-type va_list types. */
5328 static rtx
5329 expand_builtin_va_copy (tree exp)
5331 tree dst, src, t;
5332 location_t loc = EXPR_LOCATION (exp);
5334 dst = CALL_EXPR_ARG (exp, 0);
5335 src = CALL_EXPR_ARG (exp, 1);
5337 dst = stabilize_va_list_loc (loc, dst, 1);
5338 src = stabilize_va_list_loc (loc, src, 0);
5340 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5342 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5344 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5345 TREE_SIDE_EFFECTS (t) = 1;
5346 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5348 else
5350 rtx dstb, srcb, size;
5352 /* Evaluate to pointers. */
5353 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5354 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5355 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5356 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5358 dstb = convert_memory_address (Pmode, dstb);
5359 srcb = convert_memory_address (Pmode, srcb);
5361 /* "Dereference" to BLKmode memories. */
5362 dstb = gen_rtx_MEM (BLKmode, dstb);
5363 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5364 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5365 srcb = gen_rtx_MEM (BLKmode, srcb);
5366 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5367 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5369 /* Copy. */
5370 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5373 return const0_rtx;
5376 /* Expand a call to one of the builtin functions __builtin_frame_address or
5377 __builtin_return_address. */
5379 static rtx
5380 expand_builtin_frame_address (tree fndecl, tree exp)
5382 /* The argument must be a nonnegative integer constant.
5383 It counts the number of frames to scan up the stack.
5384 The value is either the frame pointer value or the return
5385 address saved in that frame. */
5386 if (call_expr_nargs (exp) == 0)
5387 /* Warning about missing arg was already issued. */
5388 return const0_rtx;
5389 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5391 error ("invalid argument to %qD", fndecl);
5392 return const0_rtx;
5394 else
5396 /* Number of frames to scan up the stack. */
5397 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5399 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5401 /* Some ports cannot access arbitrary stack frames. */
5402 if (tem == NULL)
5404 warning (0, "unsupported argument to %qD", fndecl);
5405 return const0_rtx;
5408 if (count)
5410 /* Warn since no effort is made to ensure that any frame
5411 beyond the current one exists or can be safely reached. */
5412 warning (OPT_Wframe_address, "calling %qD with "
5413 "a nonzero argument is unsafe", fndecl);
5416 /* For __builtin_frame_address, return what we've got. */
5417 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5418 return tem;
5420 if (!REG_P (tem)
5421 && ! CONSTANT_P (tem))
5422 tem = copy_addr_to_reg (tem);
5423 return tem;
5427 #if ! STACK_GROWS_DOWNWARD
5428 # define STACK_TOPS GT
5429 #else
5430 # define STACK_TOPS LT
5431 #endif
5433 #ifdef POINTERS_EXTEND_UNSIGNED
5434 # define STACK_UNSIGNED POINTERS_EXTEND_UNSIGNED
5435 #else
5436 # define STACK_UNSIGNED true
5437 #endif
5439 /* Expand a call to builtin function __builtin_stack_address. */
5441 static rtx
5442 expand_builtin_stack_address ()
5444 rtx ret = convert_to_mode (ptr_mode, copy_to_reg (stack_pointer_rtx),
5445 STACK_UNSIGNED);
5447 #ifdef STACK_ADDRESS_OFFSET
5448 /* Unbias the stack pointer, bringing it to the boundary between the
5449 stack area claimed by the active function calling this builtin,
5450 and stack ranges that could get clobbered if it called another
5451 function. It should NOT encompass any stack red zone, that is
5452 used in leaf functions.
5454 On SPARC, the register save area is *not* considered active or
5455 used by the active function, but rather as akin to the area in
5456 which call-preserved registers are saved by callees. This
5457 enables __strub_leave to clear what would otherwise overlap with
5458 its own register save area.
5460 If the address is computed too high or too low, parts of a stack
5461 range that should be scrubbed may be left unscrubbed, scrubbing
5462 may corrupt active portions of the stack frame, and stack ranges
5463 may be doubly-scrubbed by caller and callee.
5465 In order for it to be just right, the area delimited by
5466 @code{__builtin_stack_address} and @code{__builtin_frame_address
5467 (0)} should encompass caller's registers saved by the function,
5468 local on-stack variables and @code{alloca} stack areas.
5469 Accumulated outgoing on-stack arguments, preallocated as part of
5470 a function's own prologue, are to be regarded as part of the
5471 (caller) function's active area as well, whereas those pushed or
5472 allocated temporarily for a call are regarded as part of the
5473 callee's stack range, rather than the caller's. */
5474 ret = plus_constant (ptr_mode, ret, STACK_ADDRESS_OFFSET);
5475 #endif
5477 return force_reg (ptr_mode, ret);
5480 /* Expand a call to builtin function __builtin_strub_enter. */
5482 static rtx
5483 expand_builtin_strub_enter (tree exp)
5485 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5486 return NULL_RTX;
5488 if (optimize < 1 || flag_no_inline)
5489 return NULL_RTX;
5491 rtx stktop = expand_builtin_stack_address ();
5493 tree wmptr = CALL_EXPR_ARG (exp, 0);
5494 tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5495 tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5496 build_int_cst (TREE_TYPE (wmptr), 0));
5497 rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5499 emit_move_insn (wmark, stktop);
5501 return const0_rtx;
5504 /* Expand a call to builtin function __builtin_strub_update. */
5506 static rtx
5507 expand_builtin_strub_update (tree exp)
5509 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5510 return NULL_RTX;
5512 if (optimize < 2 || flag_no_inline)
5513 return NULL_RTX;
5515 rtx stktop = expand_builtin_stack_address ();
5517 #ifdef RED_ZONE_SIZE
5518 /* Here's how the strub enter, update and leave functions deal with red zones.
5520 If it weren't for red zones, update, called from within a strub context,
5521 would bump the watermark to the top of the stack. Enter and leave, running
5522 in the caller, would use the caller's top of stack address both to
5523 initialize the watermark passed to the callee, and to start strubbing the
5524 stack afterwards.
5526 Ideally, we'd update the watermark so as to cover the used amount of red
5527 zone, and strub starting at the caller's other end of the (presumably
5528 unused) red zone. Normally, only leaf functions use the red zone, but at
5529 this point we can't tell whether a function is a leaf, nor can we tell how
5530 much of the red zone it uses. Furthermore, some strub contexts may have
5531 been inlined so that update and leave are called from the same stack frame,
5532 and the strub builtins may all have been inlined, turning a strub function
5533 into a leaf.
5535 So cleaning the range from the caller's stack pointer (one end of the red
5536 zone) to the (potentially inlined) callee's (other end of the) red zone
5537 could scribble over the caller's own red zone.
5539 We avoid this possibility by arranging for callers that are strub contexts
5540 to use their own watermark as the strub starting point. So, if A calls B,
5541 and B calls C, B will tell A to strub up to the end of B's red zone, and
5542 will strub itself only the part of C's stack frame and red zone that
5543 doesn't overlap with B's. With that, we don't need to know who's leaf and
5544 who isn't: inlined calls will shrink their strub window to zero, each
5545 remaining call will strub some portion of the stack, and eventually the
5546 strub context will return to a caller that isn't a strub context itself,
5547 that will therefore use its own stack pointer as the strub starting point.
5548 It's not a leaf, because strub contexts can't be inlined into non-strub
5549 contexts, so it doesn't use the red zone, and it will therefore correctly
5550 strub up the callee's stack frame up to the end of the callee's red zone.
5551 Neat! */
5552 if (true /* (flags_from_decl_or_type (current_function_decl) & ECF_LEAF) */)
5554 poly_int64 red_zone_size = RED_ZONE_SIZE;
5555 #if STACK_GROWS_DOWNWARD
5556 red_zone_size = -red_zone_size;
5557 #endif
5558 stktop = plus_constant (ptr_mode, stktop, red_zone_size);
5559 stktop = force_reg (ptr_mode, stktop);
5561 #endif
5563 tree wmptr = CALL_EXPR_ARG (exp, 0);
5564 tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5565 tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5566 build_int_cst (TREE_TYPE (wmptr), 0));
5567 rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5569 rtx wmarkr = force_reg (ptr_mode, wmark);
5571 rtx_code_label *lab = gen_label_rtx ();
5572 do_compare_rtx_and_jump (stktop, wmarkr, STACK_TOPS, STACK_UNSIGNED,
5573 ptr_mode, NULL_RTX, lab, NULL,
5574 profile_probability::very_likely ());
5575 emit_move_insn (wmark, stktop);
5577 /* If this is an inlined strub function, also bump the watermark for the
5578 enclosing function. This avoids a problem with the following scenario: A
5579 calls B and B calls C, and both B and C get inlined into A. B allocates
5580 temporary stack space before calling C. If we don't update A's watermark,
5581 we may use an outdated baseline for the post-C strub_leave, erasing B's
5582 temporary stack allocation. We only need this if we're fully expanding
5583 strub_leave inline. */
5584 tree xwmptr = (optimize > 2
5585 ? strub_watermark_parm (current_function_decl)
5586 : wmptr);
5587 if (wmptr != xwmptr)
5589 wmptr = xwmptr;
5590 wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5591 wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5592 build_int_cst (TREE_TYPE (wmptr), 0));
5593 wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5594 wmarkr = force_reg (ptr_mode, wmark);
5596 do_compare_rtx_and_jump (stktop, wmarkr, STACK_TOPS, STACK_UNSIGNED,
5597 ptr_mode, NULL_RTX, lab, NULL,
5598 profile_probability::very_likely ());
5599 emit_move_insn (wmark, stktop);
5602 emit_label (lab);
5604 return const0_rtx;
5608 /* Expand a call to builtin function __builtin_strub_leave. */
5610 static rtx
5611 expand_builtin_strub_leave (tree exp)
5613 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5614 return NULL_RTX;
5616 if (optimize < 2 || optimize_size || flag_no_inline)
5617 return NULL_RTX;
5619 rtx stktop = NULL_RTX;
5621 if (tree wmptr = (optimize
5622 ? strub_watermark_parm (current_function_decl)
5623 : NULL_TREE))
5625 tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5626 tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5627 build_int_cst (TREE_TYPE (wmptr), 0));
5628 rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5629 stktop = force_reg (ptr_mode, wmark);
5632 if (!stktop)
5633 stktop = expand_builtin_stack_address ();
5635 tree wmptr = CALL_EXPR_ARG (exp, 0);
5636 tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5637 tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5638 build_int_cst (TREE_TYPE (wmptr), 0));
5639 rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5641 rtx wmarkr = force_reg (ptr_mode, wmark);
5643 #if ! STACK_GROWS_DOWNWARD
5644 rtx base = stktop;
5645 rtx end = wmarkr;
5646 #else
5647 rtx base = wmarkr;
5648 rtx end = stktop;
5649 #endif
5651 /* We're going to modify it, so make sure it's not e.g. the stack pointer. */
5652 base = copy_to_reg (base);
5654 rtx_code_label *done = gen_label_rtx ();
5655 do_compare_rtx_and_jump (base, end, LT, STACK_UNSIGNED,
5656 ptr_mode, NULL_RTX, done, NULL,
5657 profile_probability::very_likely ());
5659 if (optimize < 3)
5660 expand_call (exp, NULL_RTX, true);
5661 else
5663 /* Ok, now we've determined we want to copy the block, so convert the
5664 addresses to Pmode, as needed to dereference them to access ptr_mode
5665 memory locations, so that we don't have to convert anything within the
5666 loop. */
5667 base = memory_address (ptr_mode, base);
5668 end = memory_address (ptr_mode, end);
5670 rtx zero = force_operand (const0_rtx, NULL_RTX);
5671 int ulen = GET_MODE_SIZE (ptr_mode);
5673 /* ??? It would be nice to use setmem or similar patterns here,
5674 but they do not necessarily obey the stack growth direction,
5675 which has security implications. We also have to avoid calls
5676 (memset, bzero or any machine-specific ones), which are
5677 likely unsafe here (see TARGET_STRUB_MAY_USE_MEMSET). */
5678 #if ! STACK_GROWS_DOWNWARD
5679 rtx incr = plus_constant (Pmode, base, ulen);
5680 rtx dstm = gen_rtx_MEM (ptr_mode, base);
5682 rtx_code_label *loop = gen_label_rtx ();
5683 emit_label (loop);
5684 emit_move_insn (dstm, zero);
5685 emit_move_insn (base, force_operand (incr, NULL_RTX));
5686 #else
5687 rtx decr = plus_constant (Pmode, end, -ulen);
5688 rtx dstm = gen_rtx_MEM (ptr_mode, end);
5690 rtx_code_label *loop = gen_label_rtx ();
5691 emit_label (loop);
5692 emit_move_insn (end, force_operand (decr, NULL_RTX));
5693 emit_move_insn (dstm, zero);
5694 #endif
5695 do_compare_rtx_and_jump (base, end, LT, STACK_UNSIGNED,
5696 Pmode, NULL_RTX, NULL, loop,
5697 profile_probability::very_likely ());
5700 emit_label (done);
5702 return const0_rtx;
5705 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5706 failed and the caller should emit a normal call. */
5708 static rtx
5709 expand_builtin_alloca (tree exp)
5711 rtx op0;
5712 rtx result;
5713 unsigned int align;
5714 tree fndecl = get_callee_fndecl (exp);
5715 HOST_WIDE_INT max_size;
5716 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5717 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5718 bool valid_arglist
5719 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5720 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5721 VOID_TYPE)
5722 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5723 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5724 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5726 if (!valid_arglist)
5727 return NULL_RTX;
5729 /* Compute the argument. */
5730 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5732 /* Compute the alignment. */
5733 align = (fcode == BUILT_IN_ALLOCA
5734 ? BIGGEST_ALIGNMENT
5735 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5737 /* Compute the maximum size. */
5738 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5739 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5740 : -1);
5742 /* Allocate the desired space. If the allocation stems from the declaration
5743 of a variable-sized object, it cannot accumulate. */
5744 result
5745 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5746 result = convert_memory_address (ptr_mode, result);
5748 /* Dynamic allocations for variables are recorded during gimplification. */
5749 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5750 record_dynamic_alloc (exp);
5752 return result;
5755 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5756 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5757 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5758 handle_builtin_stack_restore function. */
5760 static rtx
5761 expand_asan_emit_allocas_unpoison (tree exp)
5763 tree arg0 = CALL_EXPR_ARG (exp, 0);
5764 tree arg1 = CALL_EXPR_ARG (exp, 1);
5765 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5766 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5767 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5768 stack_pointer_rtx, NULL_RTX, 0,
5769 OPTAB_LIB_WIDEN);
5770 off = convert_modes (ptr_mode, Pmode, off, 0);
5771 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5772 OPTAB_LIB_WIDEN);
5773 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5774 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5775 top, ptr_mode, bot, ptr_mode);
5776 return ret;
5779 /* Expand a call to bswap builtin in EXP.
5780 Return NULL_RTX if a normal call should be emitted rather than expanding the
5781 function in-line. If convenient, the result should be placed in TARGET.
5782 SUBTARGET may be used as the target for computing one of EXP's operands. */
5784 static rtx
5785 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5786 rtx subtarget)
5788 tree arg;
5789 rtx op0;
5791 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5792 return NULL_RTX;
5794 arg = CALL_EXPR_ARG (exp, 0);
5795 op0 = expand_expr (arg,
5796 subtarget && GET_MODE (subtarget) == target_mode
5797 ? subtarget : NULL_RTX,
5798 target_mode, EXPAND_NORMAL);
5799 if (GET_MODE (op0) != target_mode)
5800 op0 = convert_to_mode (target_mode, op0, 1);
5802 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5804 gcc_assert (target);
5806 return convert_to_mode (target_mode, target, 1);
5809 /* Expand a call to a unary builtin in EXP.
5810 Return NULL_RTX if a normal call should be emitted rather than expanding the
5811 function in-line. If convenient, the result should be placed in TARGET.
5812 SUBTARGET may be used as the target for computing one of EXP's operands. */
5814 static rtx
5815 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5816 rtx subtarget, optab op_optab)
5818 rtx op0;
5820 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5821 return NULL_RTX;
5823 /* Compute the argument. */
5824 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5825 (subtarget
5826 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5827 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5828 VOIDmode, EXPAND_NORMAL);
5829 /* Compute op, into TARGET if possible.
5830 Set TARGET to wherever the result comes back. */
5831 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5832 op_optab, op0, target, op_optab != clrsb_optab);
5833 gcc_assert (target);
5835 return convert_to_mode (target_mode, target, 0);
5838 /* Expand a call to __builtin_expect. We just return our argument
5839 as the builtin_expect semantic should've been already executed by
5840 tree branch prediction pass. */
5842 static rtx
5843 expand_builtin_expect (tree exp, rtx target)
5845 tree arg;
5847 if (call_expr_nargs (exp) < 2)
5848 return const0_rtx;
5849 arg = CALL_EXPR_ARG (exp, 0);
5851 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5852 /* When guessing was done, the hints should be already stripped away. */
5853 gcc_assert (!flag_guess_branch_prob
5854 || optimize == 0 || seen_error ());
5855 return target;
5858 /* Expand a call to __builtin_expect_with_probability. We just return our
5859 argument as the builtin_expect semantic should've been already executed by
5860 tree branch prediction pass. */
5862 static rtx
5863 expand_builtin_expect_with_probability (tree exp, rtx target)
5865 tree arg;
5867 if (call_expr_nargs (exp) < 3)
5868 return const0_rtx;
5869 arg = CALL_EXPR_ARG (exp, 0);
5871 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5872 /* When guessing was done, the hints should be already stripped away. */
5873 gcc_assert (!flag_guess_branch_prob
5874 || optimize == 0 || seen_error ());
5875 return target;
5879 /* Expand a call to __builtin_assume_aligned. We just return our first
5880 argument as the builtin_assume_aligned semantic should've been already
5881 executed by CCP. */
5883 static rtx
5884 expand_builtin_assume_aligned (tree exp, rtx target)
5886 if (call_expr_nargs (exp) < 2)
5887 return const0_rtx;
5888 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5889 EXPAND_NORMAL);
5890 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5891 && (call_expr_nargs (exp) < 3
5892 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5893 return target;
5896 void
5897 expand_builtin_trap (void)
5899 if (targetm.have_trap ())
5901 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5902 /* For trap insns when not accumulating outgoing args force
5903 REG_ARGS_SIZE note to prevent crossjumping of calls with
5904 different args sizes. */
5905 if (!ACCUMULATE_OUTGOING_ARGS)
5906 add_args_size_note (insn, stack_pointer_delta);
5908 else
5910 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5911 tree call_expr = build_call_expr (fn, 0);
5912 expand_call (call_expr, NULL_RTX, false);
5915 emit_barrier ();
5918 /* Expand a call to __builtin_unreachable. We do nothing except emit
5919 a barrier saying that control flow will not pass here.
5921 It is the responsibility of the program being compiled to ensure
5922 that control flow does never reach __builtin_unreachable. */
5923 static void
5924 expand_builtin_unreachable (void)
5926 /* Use gimple_build_builtin_unreachable or builtin_decl_unreachable
5927 to avoid this. */
5928 gcc_checking_assert (!sanitize_flags_p (SANITIZE_UNREACHABLE));
5929 emit_barrier ();
5932 /* Expand EXP, a call to fabs, fabsf or fabsl.
5933 Return NULL_RTX if a normal call should be emitted rather than expanding
5934 the function inline. If convenient, the result should be placed
5935 in TARGET. SUBTARGET may be used as the target for computing
5936 the operand. */
5938 static rtx
5939 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5941 machine_mode mode;
5942 tree arg;
5943 rtx op0;
5945 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5946 return NULL_RTX;
5948 arg = CALL_EXPR_ARG (exp, 0);
5949 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5950 mode = TYPE_MODE (TREE_TYPE (arg));
5951 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5952 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5955 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5956 Return NULL is a normal call should be emitted rather than expanding the
5957 function inline. If convenient, the result should be placed in TARGET.
5958 SUBTARGET may be used as the target for computing the operand. */
5960 static rtx
5961 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5963 rtx op0, op1;
5964 tree arg;
5966 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5967 return NULL_RTX;
5969 arg = CALL_EXPR_ARG (exp, 0);
5970 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5972 arg = CALL_EXPR_ARG (exp, 1);
5973 op1 = expand_normal (arg);
5975 return expand_copysign (op0, op1, target);
5978 /* Emit a call to __builtin___clear_cache. */
5980 void
5981 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
5983 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
5984 BUILTIN_ASM_NAME_PTR
5985 (BUILT_IN_CLEAR_CACHE));
5987 emit_library_call (callee,
5988 LCT_NORMAL, VOIDmode,
5989 convert_memory_address (ptr_mode, begin), ptr_mode,
5990 convert_memory_address (ptr_mode, end), ptr_mode);
5993 /* Emit a call to __builtin___clear_cache, unless the target specifies
5994 it as do-nothing. This function can be used by trampoline
5995 finalizers to duplicate the effects of expanding a call to the
5996 clear_cache builtin. */
5998 void
5999 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
6001 gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode
6002 || CONST_INT_P (begin))
6003 && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode
6004 || CONST_INT_P (end)));
6006 if (targetm.have_clear_cache ())
6008 /* We have a "clear_cache" insn, and it will handle everything. */
6009 class expand_operand ops[2];
6011 create_address_operand (&ops[0], begin);
6012 create_address_operand (&ops[1], end);
6014 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
6015 return;
6017 else
6019 #ifndef CLEAR_INSN_CACHE
6020 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6021 does nothing. There is no need to call it. Do nothing. */
6022 return;
6023 #endif /* CLEAR_INSN_CACHE */
6026 targetm.calls.emit_call_builtin___clear_cache (begin, end);
6029 /* Expand a call to __builtin___clear_cache. */
6031 static void
6032 expand_builtin___clear_cache (tree exp)
6034 tree begin, end;
6035 rtx begin_rtx, end_rtx;
6037 /* We must not expand to a library call. If we did, any
6038 fallback library function in libgcc that might contain a call to
6039 __builtin___clear_cache() would recurse infinitely. */
6040 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6042 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
6043 return;
6046 begin = CALL_EXPR_ARG (exp, 0);
6047 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
6049 end = CALL_EXPR_ARG (exp, 1);
6050 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
6052 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
6055 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
6057 static rtx
6058 round_trampoline_addr (rtx tramp)
6060 rtx temp, addend, mask;
6062 /* If we don't need too much alignment, we'll have been guaranteed
6063 proper alignment by get_trampoline_type. */
6064 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
6065 return tramp;
6067 /* Round address up to desired boundary. */
6068 temp = gen_reg_rtx (Pmode);
6069 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
6070 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6072 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
6073 temp, 0, OPTAB_LIB_WIDEN);
6074 tramp = expand_simple_binop (Pmode, AND, temp, mask,
6075 temp, 0, OPTAB_LIB_WIDEN);
6077 return tramp;
6080 static rtx
6081 expand_builtin_init_trampoline (tree exp, bool onstack)
6083 tree t_tramp, t_func, t_chain;
6084 rtx m_tramp, r_tramp, r_chain, tmp;
6086 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6087 POINTER_TYPE, VOID_TYPE))
6088 return NULL_RTX;
6090 t_tramp = CALL_EXPR_ARG (exp, 0);
6091 t_func = CALL_EXPR_ARG (exp, 1);
6092 t_chain = CALL_EXPR_ARG (exp, 2);
6094 r_tramp = expand_normal (t_tramp);
6095 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
6096 MEM_NOTRAP_P (m_tramp) = 1;
6098 /* If ONSTACK, the TRAMP argument should be the address of a field
6099 within the local function's FRAME decl. Either way, let's see if
6100 we can fill in the MEM_ATTRs for this memory. */
6101 if (TREE_CODE (t_tramp) == ADDR_EXPR)
6102 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
6104 /* Creator of a heap trampoline is responsible for making sure the
6105 address is aligned to at least STACK_BOUNDARY. Normally malloc
6106 will ensure this anyhow. */
6107 tmp = round_trampoline_addr (r_tramp);
6108 if (tmp != r_tramp)
6110 m_tramp = change_address (m_tramp, BLKmode, tmp);
6111 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
6112 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
6115 /* The FUNC argument should be the address of the nested function.
6116 Extract the actual function decl to pass to the hook. */
6117 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
6118 t_func = TREE_OPERAND (t_func, 0);
6119 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
6121 r_chain = expand_normal (t_chain);
6123 /* Generate insns to initialize the trampoline. */
6124 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6126 if (onstack)
6128 trampolines_created = 1;
6130 if (targetm.calls.custom_function_descriptors != 0)
6131 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
6132 "trampoline generated for nested function %qD", t_func);
6135 return const0_rtx;
6138 static rtx
6139 expand_builtin_adjust_trampoline (tree exp)
6141 rtx tramp;
6143 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6144 return NULL_RTX;
6146 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6147 tramp = round_trampoline_addr (tramp);
6148 if (targetm.calls.trampoline_adjust_address)
6149 tramp = targetm.calls.trampoline_adjust_address (tramp);
6151 return tramp;
6154 /* Expand a call to the builtin descriptor initialization routine.
6155 A descriptor is made up of a couple of pointers to the static
6156 chain and the code entry in this order. */
6158 static rtx
6159 expand_builtin_init_descriptor (tree exp)
6161 tree t_descr, t_func, t_chain;
6162 rtx m_descr, r_descr, r_func, r_chain;
6164 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
6165 VOID_TYPE))
6166 return NULL_RTX;
6168 t_descr = CALL_EXPR_ARG (exp, 0);
6169 t_func = CALL_EXPR_ARG (exp, 1);
6170 t_chain = CALL_EXPR_ARG (exp, 2);
6172 r_descr = expand_normal (t_descr);
6173 m_descr = gen_rtx_MEM (BLKmode, r_descr);
6174 MEM_NOTRAP_P (m_descr) = 1;
6175 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
6177 r_func = expand_normal (t_func);
6178 r_chain = expand_normal (t_chain);
6180 /* Generate insns to initialize the descriptor. */
6181 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
6182 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
6183 POINTER_SIZE / BITS_PER_UNIT), r_func);
6185 return const0_rtx;
6188 /* Expand a call to the builtin descriptor adjustment routine. */
6190 static rtx
6191 expand_builtin_adjust_descriptor (tree exp)
6193 rtx tramp;
6195 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6196 return NULL_RTX;
6198 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6200 /* Unalign the descriptor to allow runtime identification. */
6201 tramp = plus_constant (ptr_mode, tramp,
6202 targetm.calls.custom_function_descriptors);
6204 return force_operand (tramp, NULL_RTX);
6207 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
6208 function. The function first checks whether the back end provides
6209 an insn to implement signbit for the respective mode. If not, it
6210 checks whether the floating point format of the value is such that
6211 the sign bit can be extracted. If that is not the case, error out.
6212 EXP is the expression that is a call to the builtin function; if
6213 convenient, the result should be placed in TARGET. */
6214 static rtx
6215 expand_builtin_signbit (tree exp, rtx target)
6217 const struct real_format *fmt;
6218 scalar_float_mode fmode;
6219 scalar_int_mode rmode, imode;
6220 tree arg;
6221 int word, bitpos;
6222 enum insn_code icode;
6223 rtx temp;
6224 location_t loc = EXPR_LOCATION (exp);
6226 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6227 return NULL_RTX;
6229 arg = CALL_EXPR_ARG (exp, 0);
6230 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
6231 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
6232 fmt = REAL_MODE_FORMAT (fmode);
6234 arg = builtin_save_expr (arg);
6236 /* Expand the argument yielding a RTX expression. */
6237 temp = expand_normal (arg);
6239 /* Check if the back end provides an insn that handles signbit for the
6240 argument's mode. */
6241 icode = optab_handler (signbit_optab, fmode);
6242 if (icode != CODE_FOR_nothing)
6244 rtx_insn *last = get_last_insn ();
6245 rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6246 if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
6247 return this_target;
6248 delete_insns_since (last);
6251 /* For floating point formats without a sign bit, implement signbit
6252 as "ARG < 0.0". */
6253 bitpos = fmt->signbit_ro;
6254 if (bitpos < 0)
6256 /* But we can't do this if the format supports signed zero. */
6257 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
6259 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
6260 build_real (TREE_TYPE (arg), dconst0));
6261 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6264 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
6266 imode = int_mode_for_mode (fmode).require ();
6267 temp = gen_lowpart (imode, temp);
6269 else
6271 imode = word_mode;
6272 /* Handle targets with different FP word orders. */
6273 if (FLOAT_WORDS_BIG_ENDIAN)
6274 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
6275 else
6276 word = bitpos / BITS_PER_WORD;
6277 temp = operand_subword_force (temp, word, fmode);
6278 bitpos = bitpos % BITS_PER_WORD;
6281 /* Force the intermediate word_mode (or narrower) result into a
6282 register. This avoids attempting to create paradoxical SUBREGs
6283 of floating point modes below. */
6284 temp = force_reg (imode, temp);
6286 /* If the bitpos is within the "result mode" lowpart, the operation
6287 can be implement with a single bitwise AND. Otherwise, we need
6288 a right shift and an AND. */
6290 if (bitpos < GET_MODE_BITSIZE (rmode))
6292 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
6294 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
6295 temp = gen_lowpart (rmode, temp);
6296 temp = expand_binop (rmode, and_optab, temp,
6297 immed_wide_int_const (mask, rmode),
6298 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6300 else
6302 /* Perform a logical right shift to place the signbit in the least
6303 significant bit, then truncate the result to the desired mode
6304 and mask just this bit. */
6305 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
6306 temp = gen_lowpart (rmode, temp);
6307 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
6308 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6311 return temp;
6314 /* Expand fork or exec calls. TARGET is the desired target of the
6315 call. EXP is the call. FN is the
6316 identificator of the actual function. IGNORE is nonzero if the
6317 value is to be ignored. */
6319 static rtx
6320 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
6322 tree id, decl;
6323 tree call;
6325 /* If we are not profiling, just call the function. */
6326 if (!profile_arc_flag && !condition_coverage_flag)
6327 return NULL_RTX;
6329 /* Otherwise call the wrapper. This should be equivalent for the rest of
6330 compiler, so the code does not diverge, and the wrapper may run the
6331 code necessary for keeping the profiling sane. */
6333 switch (DECL_FUNCTION_CODE (fn))
6335 case BUILT_IN_FORK:
6336 id = get_identifier ("__gcov_fork");
6337 break;
6339 case BUILT_IN_EXECL:
6340 id = get_identifier ("__gcov_execl");
6341 break;
6343 case BUILT_IN_EXECV:
6344 id = get_identifier ("__gcov_execv");
6345 break;
6347 case BUILT_IN_EXECLP:
6348 id = get_identifier ("__gcov_execlp");
6349 break;
6351 case BUILT_IN_EXECLE:
6352 id = get_identifier ("__gcov_execle");
6353 break;
6355 case BUILT_IN_EXECVP:
6356 id = get_identifier ("__gcov_execvp");
6357 break;
6359 case BUILT_IN_EXECVE:
6360 id = get_identifier ("__gcov_execve");
6361 break;
6363 default:
6364 gcc_unreachable ();
6367 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6368 FUNCTION_DECL, id, TREE_TYPE (fn));
6369 DECL_EXTERNAL (decl) = 1;
6370 TREE_PUBLIC (decl) = 1;
6371 DECL_ARTIFICIAL (decl) = 1;
6372 TREE_NOTHROW (decl) = 1;
6373 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6374 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6375 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6376 return expand_call (call, target, ignore);
6381 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6382 the pointer in these functions is void*, the tree optimizers may remove
6383 casts. The mode computed in expand_builtin isn't reliable either, due
6384 to __sync_bool_compare_and_swap.
6386 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6387 group of builtins. This gives us log2 of the mode size. */
6389 static inline machine_mode
6390 get_builtin_sync_mode (int fcode_diff)
6392 /* The size is not negotiable, so ask not to get BLKmode in return
6393 if the target indicates that a smaller size would be better. */
6394 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
6397 /* Expand the memory expression LOC and return the appropriate memory operand
6398 for the builtin_sync operations. */
6400 static rtx
6401 get_builtin_sync_mem (tree loc, machine_mode mode)
6403 rtx addr, mem;
6404 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6405 ? TREE_TYPE (TREE_TYPE (loc))
6406 : TREE_TYPE (loc));
6407 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
6409 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
6410 addr = convert_memory_address (addr_mode, addr);
6412 /* Note that we explicitly do not want any alias information for this
6413 memory, so that we kill all other live memories. Otherwise we don't
6414 satisfy the full barrier semantics of the intrinsic. */
6415 mem = gen_rtx_MEM (mode, addr);
6417 set_mem_addr_space (mem, addr_space);
6419 mem = validize_mem (mem);
6421 /* The alignment needs to be at least according to that of the mode. */
6422 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6423 get_pointer_alignment (loc)));
6424 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6425 MEM_VOLATILE_P (mem) = 1;
6427 return mem;
6430 /* Make sure an argument is in the right mode.
6431 EXP is the tree argument.
6432 MODE is the mode it should be in. */
6434 static rtx
6435 expand_expr_force_mode (tree exp, machine_mode mode)
6437 rtx val;
6438 machine_mode old_mode;
6440 if (TREE_CODE (exp) == SSA_NAME
6441 && TYPE_MODE (TREE_TYPE (exp)) != mode)
6443 /* Undo argument promotion if possible, as combine might not
6444 be able to do it later due to MEM_VOLATILE_P uses in the
6445 patterns. */
6446 gimple *g = get_gimple_for_ssa_name (exp);
6447 if (g && gimple_assign_cast_p (g))
6449 tree rhs = gimple_assign_rhs1 (g);
6450 tree_code code = gimple_assign_rhs_code (g);
6451 if (CONVERT_EXPR_CODE_P (code)
6452 && TYPE_MODE (TREE_TYPE (rhs)) == mode
6453 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
6454 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6455 && (TYPE_PRECISION (TREE_TYPE (exp))
6456 > TYPE_PRECISION (TREE_TYPE (rhs))))
6457 exp = rhs;
6461 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6462 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6463 of CONST_INTs, where we know the old_mode only from the call argument. */
6465 old_mode = GET_MODE (val);
6466 if (old_mode == VOIDmode)
6467 old_mode = TYPE_MODE (TREE_TYPE (exp));
6468 val = convert_modes (mode, old_mode, val, 1);
6469 return val;
6473 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6474 EXP is the CALL_EXPR. CODE is the rtx code
6475 that corresponds to the arithmetic or logical operation from the name;
6476 an exception here is that NOT actually means NAND. TARGET is an optional
6477 place for us to store the results; AFTER is true if this is the
6478 fetch_and_xxx form. */
6480 static rtx
6481 expand_builtin_sync_operation (machine_mode mode, tree exp,
6482 enum rtx_code code, bool after,
6483 rtx target)
6485 rtx val, mem;
6486 location_t loc = EXPR_LOCATION (exp);
6488 if (code == NOT && warn_sync_nand)
6490 tree fndecl = get_callee_fndecl (exp);
6491 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6493 static bool warned_f_a_n, warned_n_a_f;
6495 switch (fcode)
6497 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6498 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6499 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6500 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6501 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6502 if (warned_f_a_n)
6503 break;
6505 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6506 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6507 warned_f_a_n = true;
6508 break;
6510 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6511 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6512 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6513 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6514 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6515 if (warned_n_a_f)
6516 break;
6518 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6519 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6520 warned_n_a_f = true;
6521 break;
6523 default:
6524 gcc_unreachable ();
6528 /* Expand the operands. */
6529 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6530 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6532 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6533 after);
6536 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6537 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6538 true if this is the boolean form. TARGET is a place for us to store the
6539 results; this is NOT optional if IS_BOOL is true. */
6541 static rtx
6542 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6543 bool is_bool, rtx target)
6545 rtx old_val, new_val, mem;
6546 rtx *pbool, *poval;
6548 /* Expand the operands. */
6549 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6550 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6551 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6553 pbool = poval = NULL;
6554 if (target != const0_rtx)
6556 if (is_bool)
6557 pbool = &target;
6558 else
6559 poval = &target;
6561 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6562 false, MEMMODEL_SYNC_SEQ_CST,
6563 MEMMODEL_SYNC_SEQ_CST))
6564 return NULL_RTX;
6566 return target;
6569 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6570 general form is actually an atomic exchange, and some targets only
6571 support a reduced form with the second argument being a constant 1.
6572 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6573 the results. */
6575 static rtx
6576 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6577 rtx target)
6579 rtx val, mem;
6581 /* Expand the operands. */
6582 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6583 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6585 return expand_sync_lock_test_and_set (target, mem, val);
6588 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6590 static void
6591 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6593 rtx mem;
6595 /* Expand the operands. */
6596 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6598 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6601 /* Given an integer representing an ``enum memmodel'', verify its
6602 correctness and return the memory model enum. */
6604 static enum memmodel
6605 get_memmodel (tree exp)
6607 /* If the parameter is not a constant, it's a run time value so we'll just
6608 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6609 if (TREE_CODE (exp) != INTEGER_CST)
6610 return MEMMODEL_SEQ_CST;
6612 rtx op = expand_normal (exp);
6614 unsigned HOST_WIDE_INT val = INTVAL (op);
6615 if (targetm.memmodel_check)
6616 val = targetm.memmodel_check (val);
6617 else if (val & ~MEMMODEL_MASK)
6618 return MEMMODEL_SEQ_CST;
6620 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6621 if (memmodel_base (val) >= MEMMODEL_LAST)
6622 return MEMMODEL_SEQ_CST;
6624 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6625 be conservative and promote consume to acquire. */
6626 if (val == MEMMODEL_CONSUME)
6627 val = MEMMODEL_ACQUIRE;
6629 return (enum memmodel) val;
6632 /* Expand the __atomic_exchange intrinsic:
6633 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6634 EXP is the CALL_EXPR.
6635 TARGET is an optional place for us to store the results. */
6637 static rtx
6638 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6640 rtx val, mem;
6641 enum memmodel model;
6643 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6645 if (!flag_inline_atomics)
6646 return NULL_RTX;
6648 /* Expand the operands. */
6649 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6650 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6652 return expand_atomic_exchange (target, mem, val, model);
6655 /* Expand the __atomic_compare_exchange intrinsic:
6656 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6657 TYPE desired, BOOL weak,
6658 enum memmodel success,
6659 enum memmodel failure)
6660 EXP is the CALL_EXPR.
6661 TARGET is an optional place for us to store the results. */
6663 static rtx
6664 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6665 rtx target)
6667 rtx expect, desired, mem, oldval;
6668 rtx_code_label *label;
6669 tree weak;
6670 bool is_weak;
6672 memmodel success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6673 memmodel failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6675 if (failure > success)
6676 success = MEMMODEL_SEQ_CST;
6678 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6680 failure = MEMMODEL_SEQ_CST;
6681 success = MEMMODEL_SEQ_CST;
6685 if (!flag_inline_atomics)
6686 return NULL_RTX;
6688 /* Expand the operands. */
6689 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6691 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6692 expect = convert_memory_address (Pmode, expect);
6693 expect = gen_rtx_MEM (mode, expect);
6694 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6696 weak = CALL_EXPR_ARG (exp, 3);
6697 is_weak = false;
6698 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6699 is_weak = true;
6701 if (target == const0_rtx)
6702 target = NULL;
6704 /* Lest the rtl backend create a race condition with an imporoper store
6705 to memory, always create a new pseudo for OLDVAL. */
6706 oldval = NULL;
6708 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6709 is_weak, success, failure))
6710 return NULL_RTX;
6712 /* Conditionally store back to EXPECT, lest we create a race condition
6713 with an improper store to memory. */
6714 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6715 the normal case where EXPECT is totally private, i.e. a register. At
6716 which point the store can be unconditional. */
6717 label = gen_label_rtx ();
6718 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6719 GET_MODE (target), 1, label);
6720 emit_move_insn (expect, oldval);
6721 emit_label (label);
6723 return target;
6726 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6727 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6728 call. The weak parameter must be dropped to match the expected parameter
6729 list and the expected argument changed from value to pointer to memory
6730 slot. */
6732 static void
6733 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6735 unsigned int z;
6736 vec<tree, va_gc> *vec;
6738 vec_alloc (vec, 5);
6739 vec->quick_push (gimple_call_arg (call, 0));
6740 tree expected = gimple_call_arg (call, 1);
6741 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6742 TREE_TYPE (expected));
6743 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6744 if (expd != x)
6745 emit_move_insn (x, expd);
6746 tree v = make_tree (TREE_TYPE (expected), x);
6747 vec->quick_push (build1 (ADDR_EXPR,
6748 build_pointer_type (TREE_TYPE (expected)), v));
6749 vec->quick_push (gimple_call_arg (call, 2));
6750 /* Skip the boolean weak parameter. */
6751 for (z = 4; z < 6; z++)
6752 vec->quick_push (gimple_call_arg (call, z));
6753 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6754 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6755 gcc_assert (bytes_log2 < 5);
6756 built_in_function fncode
6757 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6758 + bytes_log2);
6759 tree fndecl = builtin_decl_explicit (fncode);
6760 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6761 fndecl);
6762 tree exp = build_call_vec (boolean_type_node, fn, vec);
6763 tree lhs = gimple_call_lhs (call);
6764 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6765 if (lhs)
6767 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6768 if (GET_MODE (boolret) != mode)
6769 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6770 x = force_reg (mode, x);
6771 write_complex_part (target, boolret, true, true);
6772 write_complex_part (target, x, false, false);
6776 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6778 void
6779 expand_ifn_atomic_compare_exchange (gcall *call)
6781 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6782 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6783 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6785 memmodel success = get_memmodel (gimple_call_arg (call, 4));
6786 memmodel failure = get_memmodel (gimple_call_arg (call, 5));
6788 if (failure > success)
6789 success = MEMMODEL_SEQ_CST;
6791 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6793 failure = MEMMODEL_SEQ_CST;
6794 success = MEMMODEL_SEQ_CST;
6797 if (!flag_inline_atomics)
6799 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6800 return;
6803 /* Expand the operands. */
6804 rtx mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6806 rtx expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6807 rtx desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6809 bool is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6811 rtx boolret = NULL;
6812 rtx oldval = NULL;
6814 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6815 is_weak, success, failure))
6817 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6818 return;
6821 tree lhs = gimple_call_lhs (call);
6822 if (lhs)
6824 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6825 if (GET_MODE (boolret) != mode)
6826 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6827 write_complex_part (target, boolret, true, true);
6828 write_complex_part (target, oldval, false, false);
6832 /* Expand the __atomic_load intrinsic:
6833 TYPE __atomic_load (TYPE *object, enum memmodel)
6834 EXP is the CALL_EXPR.
6835 TARGET is an optional place for us to store the results. */
6837 static rtx
6838 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6840 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6841 if (is_mm_release (model) || is_mm_acq_rel (model))
6842 model = MEMMODEL_SEQ_CST;
6844 if (!flag_inline_atomics)
6845 return NULL_RTX;
6847 /* Expand the operand. */
6848 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6850 return expand_atomic_load (target, mem, model);
6854 /* Expand the __atomic_store intrinsic:
6855 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6856 EXP is the CALL_EXPR.
6857 TARGET is an optional place for us to store the results. */
6859 static rtx
6860 expand_builtin_atomic_store (machine_mode mode, tree exp)
6862 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6863 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6864 || is_mm_release (model)))
6865 model = MEMMODEL_SEQ_CST;
6867 if (!flag_inline_atomics)
6868 return NULL_RTX;
6870 /* Expand the operands. */
6871 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6872 rtx val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6874 return expand_atomic_store (mem, val, model, false);
6877 /* Expand the __atomic_fetch_XXX intrinsic:
6878 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6879 EXP is the CALL_EXPR.
6880 TARGET is an optional place for us to store the results.
6881 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6882 FETCH_AFTER is true if returning the result of the operation.
6883 FETCH_AFTER is false if returning the value before the operation.
6884 IGNORE is true if the result is not used.
6885 EXT_CALL is the correct builtin for an external call if this cannot be
6886 resolved to an instruction sequence. */
6888 static rtx
6889 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6890 enum rtx_code code, bool fetch_after,
6891 bool ignore, enum built_in_function ext_call)
6893 rtx val, mem, ret;
6894 enum memmodel model;
6895 tree fndecl;
6896 tree addr;
6898 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6900 /* Expand the operands. */
6901 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6902 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6904 /* Only try generating instructions if inlining is turned on. */
6905 if (flag_inline_atomics)
6907 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6908 if (ret)
6909 return ret;
6912 /* Return if a different routine isn't needed for the library call. */
6913 if (ext_call == BUILT_IN_NONE)
6914 return NULL_RTX;
6916 /* Change the call to the specified function. */
6917 fndecl = get_callee_fndecl (exp);
6918 addr = CALL_EXPR_FN (exp);
6919 STRIP_NOPS (addr);
6921 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6922 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6924 /* If we will emit code after the call, the call cannot be a tail call.
6925 If it is emitted as a tail call, a barrier is emitted after it, and
6926 then all trailing code is removed. */
6927 if (!ignore)
6928 CALL_EXPR_TAILCALL (exp) = 0;
6930 /* Expand the call here so we can emit trailing code. */
6931 ret = expand_call (exp, target, ignore);
6933 /* Replace the original function just in case it matters. */
6934 TREE_OPERAND (addr, 0) = fndecl;
6936 /* Then issue the arithmetic correction to return the right result. */
6937 if (!ignore)
6939 if (code == NOT)
6941 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6942 OPTAB_LIB_WIDEN);
6943 ret = expand_simple_unop (mode, NOT, ret, target, true);
6945 else
6946 ret = expand_simple_binop (mode, code, ret, val, target, true,
6947 OPTAB_LIB_WIDEN);
6949 return ret;
6952 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6954 void
6955 expand_ifn_atomic_bit_test_and (gcall *call)
6957 tree ptr = gimple_call_arg (call, 0);
6958 tree bit = gimple_call_arg (call, 1);
6959 tree flag = gimple_call_arg (call, 2);
6960 tree lhs = gimple_call_lhs (call);
6961 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6962 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6963 enum rtx_code code;
6964 optab optab;
6965 class expand_operand ops[5];
6967 gcc_assert (flag_inline_atomics);
6969 if (gimple_call_num_args (call) == 5)
6970 model = get_memmodel (gimple_call_arg (call, 3));
6972 rtx mem = get_builtin_sync_mem (ptr, mode);
6973 rtx val = expand_expr_force_mode (bit, mode);
6975 switch (gimple_call_internal_fn (call))
6977 case IFN_ATOMIC_BIT_TEST_AND_SET:
6978 code = IOR;
6979 optab = atomic_bit_test_and_set_optab;
6980 break;
6981 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6982 code = XOR;
6983 optab = atomic_bit_test_and_complement_optab;
6984 break;
6985 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6986 code = AND;
6987 optab = atomic_bit_test_and_reset_optab;
6988 break;
6989 default:
6990 gcc_unreachable ();
6993 if (lhs == NULL_TREE)
6995 rtx val2 = expand_simple_binop (mode, ASHIFT, const1_rtx,
6996 val, NULL_RTX, true, OPTAB_DIRECT);
6997 if (code == AND)
6998 val2 = expand_simple_unop (mode, NOT, val2, NULL_RTX, true);
6999 if (expand_atomic_fetch_op (const0_rtx, mem, val2, code, model, false))
7000 return;
7003 rtx target;
7004 if (lhs)
7005 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7006 else
7007 target = gen_reg_rtx (mode);
7008 enum insn_code icode = direct_optab_handler (optab, mode);
7009 gcc_assert (icode != CODE_FOR_nothing);
7010 create_output_operand (&ops[0], target, mode);
7011 create_fixed_operand (&ops[1], mem);
7012 create_convert_operand_to (&ops[2], val, mode, true);
7013 create_integer_operand (&ops[3], model);
7014 create_integer_operand (&ops[4], integer_onep (flag));
7015 if (maybe_expand_insn (icode, 5, ops))
7016 return;
7018 rtx bitval = val;
7019 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7020 val, NULL_RTX, true, OPTAB_DIRECT);
7021 rtx maskval = val;
7022 if (code == AND)
7023 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7024 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
7025 code, model, false);
7026 if (!result)
7028 bool is_atomic = gimple_call_num_args (call) == 5;
7029 tree tcall = gimple_call_arg (call, 3 + is_atomic);
7030 tree fndecl = gimple_call_addr_fndecl (tcall);
7031 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7032 tree exp = build_call_nary (type, tcall, 2 + is_atomic, ptr,
7033 make_tree (type, val),
7034 is_atomic
7035 ? gimple_call_arg (call, 3)
7036 : integer_zero_node);
7037 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
7038 mode, !lhs);
7040 if (!lhs)
7041 return;
7042 if (integer_onep (flag))
7044 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
7045 NULL_RTX, true, OPTAB_DIRECT);
7046 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
7047 true, OPTAB_DIRECT);
7049 else
7050 result = expand_simple_binop (mode, AND, result, maskval, target, true,
7051 OPTAB_DIRECT);
7052 if (result != target)
7053 emit_move_insn (target, result);
7056 /* Expand IFN_ATOMIC_*_FETCH_CMP_0 internal function. */
7058 void
7059 expand_ifn_atomic_op_fetch_cmp_0 (gcall *call)
7061 tree cmp = gimple_call_arg (call, 0);
7062 tree ptr = gimple_call_arg (call, 1);
7063 tree arg = gimple_call_arg (call, 2);
7064 tree lhs = gimple_call_lhs (call);
7065 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7066 machine_mode mode = TYPE_MODE (TREE_TYPE (cmp));
7067 optab optab;
7068 rtx_code code;
7069 class expand_operand ops[5];
7071 gcc_assert (flag_inline_atomics);
7073 if (gimple_call_num_args (call) == 5)
7074 model = get_memmodel (gimple_call_arg (call, 3));
7076 rtx mem = get_builtin_sync_mem (ptr, mode);
7077 rtx op = expand_expr_force_mode (arg, mode);
7079 switch (gimple_call_internal_fn (call))
7081 case IFN_ATOMIC_ADD_FETCH_CMP_0:
7082 code = PLUS;
7083 optab = atomic_add_fetch_cmp_0_optab;
7084 break;
7085 case IFN_ATOMIC_SUB_FETCH_CMP_0:
7086 code = MINUS;
7087 optab = atomic_sub_fetch_cmp_0_optab;
7088 break;
7089 case IFN_ATOMIC_AND_FETCH_CMP_0:
7090 code = AND;
7091 optab = atomic_and_fetch_cmp_0_optab;
7092 break;
7093 case IFN_ATOMIC_OR_FETCH_CMP_0:
7094 code = IOR;
7095 optab = atomic_or_fetch_cmp_0_optab;
7096 break;
7097 case IFN_ATOMIC_XOR_FETCH_CMP_0:
7098 code = XOR;
7099 optab = atomic_xor_fetch_cmp_0_optab;
7100 break;
7101 default:
7102 gcc_unreachable ();
7105 enum rtx_code comp = UNKNOWN;
7106 switch (tree_to_uhwi (cmp))
7108 case ATOMIC_OP_FETCH_CMP_0_EQ: comp = EQ; break;
7109 case ATOMIC_OP_FETCH_CMP_0_NE: comp = NE; break;
7110 case ATOMIC_OP_FETCH_CMP_0_GT: comp = GT; break;
7111 case ATOMIC_OP_FETCH_CMP_0_GE: comp = GE; break;
7112 case ATOMIC_OP_FETCH_CMP_0_LT: comp = LT; break;
7113 case ATOMIC_OP_FETCH_CMP_0_LE: comp = LE; break;
7114 default: gcc_unreachable ();
7117 rtx target;
7118 if (lhs == NULL_TREE)
7119 target = gen_reg_rtx (TYPE_MODE (boolean_type_node));
7120 else
7121 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7122 enum insn_code icode = direct_optab_handler (optab, mode);
7123 gcc_assert (icode != CODE_FOR_nothing);
7124 create_output_operand (&ops[0], target, TYPE_MODE (boolean_type_node));
7125 create_fixed_operand (&ops[1], mem);
7126 create_convert_operand_to (&ops[2], op, mode, true);
7127 create_integer_operand (&ops[3], model);
7128 create_integer_operand (&ops[4], comp);
7129 if (maybe_expand_insn (icode, 5, ops))
7130 return;
7132 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, op,
7133 code, model, true);
7134 if (!result)
7136 bool is_atomic = gimple_call_num_args (call) == 5;
7137 tree tcall = gimple_call_arg (call, 3 + is_atomic);
7138 tree fndecl = gimple_call_addr_fndecl (tcall);
7139 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7140 tree exp = build_call_nary (type, tcall,
7141 2 + is_atomic, ptr, arg,
7142 is_atomic
7143 ? gimple_call_arg (call, 3)
7144 : integer_zero_node);
7145 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
7146 mode, !lhs);
7149 if (lhs)
7151 result = emit_store_flag_force (target, comp, result, const0_rtx, mode,
7152 0, 1);
7153 if (result != target)
7154 emit_move_insn (target, result);
7158 /* Expand an atomic clear operation.
7159 void _atomic_clear (BOOL *obj, enum memmodel)
7160 EXP is the call expression. */
7162 static rtx
7163 expand_builtin_atomic_clear (tree exp)
7165 machine_mode mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7166 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7167 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7169 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
7170 model = MEMMODEL_SEQ_CST;
7172 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
7173 Failing that, a store is issued by __atomic_store. The only way this can
7174 fail is if the bool type is larger than a word size. Unlikely, but
7175 handle it anyway for completeness. Assume a single threaded model since
7176 there is no atomic support in this case, and no barriers are required. */
7177 rtx ret = expand_atomic_store (mem, const0_rtx, model, true);
7178 if (!ret)
7179 emit_move_insn (mem, const0_rtx);
7180 return const0_rtx;
7183 /* Expand an atomic test_and_set operation.
7184 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
7185 EXP is the call expression. */
7187 static rtx
7188 expand_builtin_atomic_test_and_set (tree exp, rtx target)
7190 rtx mem;
7191 enum memmodel model;
7192 machine_mode mode;
7194 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7195 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7196 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7198 return expand_atomic_test_and_set (target, mem, model);
7202 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
7203 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
7205 static tree
7206 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
7208 int size;
7209 machine_mode mode;
7210 unsigned int mode_align, type_align;
7212 if (TREE_CODE (arg0) != INTEGER_CST)
7213 return NULL_TREE;
7215 /* We need a corresponding integer mode for the access to be lock-free. */
7216 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
7217 if (!int_mode_for_size (size, 0).exists (&mode))
7218 return boolean_false_node;
7220 mode_align = GET_MODE_ALIGNMENT (mode);
7222 if (TREE_CODE (arg1) == INTEGER_CST)
7224 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
7226 /* Either this argument is null, or it's a fake pointer encoding
7227 the alignment of the object. */
7228 val = least_bit_hwi (val);
7229 val *= BITS_PER_UNIT;
7231 if (val == 0 || mode_align < val)
7232 type_align = mode_align;
7233 else
7234 type_align = val;
7236 else
7238 tree ttype = TREE_TYPE (arg1);
7240 /* This function is usually invoked and folded immediately by the front
7241 end before anything else has a chance to look at it. The pointer
7242 parameter at this point is usually cast to a void *, so check for that
7243 and look past the cast. */
7244 if (CONVERT_EXPR_P (arg1)
7245 && POINTER_TYPE_P (ttype)
7246 && VOID_TYPE_P (TREE_TYPE (ttype))
7247 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
7248 arg1 = TREE_OPERAND (arg1, 0);
7250 ttype = TREE_TYPE (arg1);
7251 gcc_assert (POINTER_TYPE_P (ttype));
7253 /* Get the underlying type of the object. */
7254 ttype = TREE_TYPE (ttype);
7255 type_align = TYPE_ALIGN (ttype);
7258 /* If the object has smaller alignment, the lock free routines cannot
7259 be used. */
7260 if (type_align < mode_align)
7261 return boolean_false_node;
7263 /* Check if a compare_and_swap pattern exists for the mode which represents
7264 the required size. The pattern is not allowed to fail, so the existence
7265 of the pattern indicates support is present. Also require that an
7266 atomic load exists for the required size. */
7267 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
7268 return boolean_true_node;
7269 else
7270 return boolean_false_node;
7273 /* Return true if the parameters to call EXP represent an object which will
7274 always generate lock free instructions. The first argument represents the
7275 size of the object, and the second parameter is a pointer to the object
7276 itself. If NULL is passed for the object, then the result is based on
7277 typical alignment for an object of the specified size. Otherwise return
7278 false. */
7280 static rtx
7281 expand_builtin_atomic_always_lock_free (tree exp)
7283 tree size;
7284 tree arg0 = CALL_EXPR_ARG (exp, 0);
7285 tree arg1 = CALL_EXPR_ARG (exp, 1);
7287 if (TREE_CODE (arg0) != INTEGER_CST)
7289 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
7290 return const0_rtx;
7293 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
7294 if (size == boolean_true_node)
7295 return const1_rtx;
7296 return const0_rtx;
7299 /* Return a one or zero if it can be determined that object ARG1 of size ARG
7300 is lock free on this architecture. */
7302 static tree
7303 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
7305 if (!flag_inline_atomics)
7306 return NULL_TREE;
7308 /* If it isn't always lock free, don't generate a result. */
7309 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
7310 return boolean_true_node;
7312 return NULL_TREE;
7315 /* Return true if the parameters to call EXP represent an object which will
7316 always generate lock free instructions. The first argument represents the
7317 size of the object, and the second parameter is a pointer to the object
7318 itself. If NULL is passed for the object, then the result is based on
7319 typical alignment for an object of the specified size. Otherwise return
7320 NULL*/
7322 static rtx
7323 expand_builtin_atomic_is_lock_free (tree exp)
7325 tree size;
7326 tree arg0 = CALL_EXPR_ARG (exp, 0);
7327 tree arg1 = CALL_EXPR_ARG (exp, 1);
7329 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
7331 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
7332 return NULL_RTX;
7335 if (!flag_inline_atomics)
7336 return NULL_RTX;
7338 /* If the value is known at compile time, return the RTX for it. */
7339 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
7340 if (size == boolean_true_node)
7341 return const1_rtx;
7343 return NULL_RTX;
7346 /* Expand the __atomic_thread_fence intrinsic:
7347 void __atomic_thread_fence (enum memmodel)
7348 EXP is the CALL_EXPR. */
7350 static void
7351 expand_builtin_atomic_thread_fence (tree exp)
7353 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7354 expand_mem_thread_fence (model);
7357 /* Expand the __atomic_signal_fence intrinsic:
7358 void __atomic_signal_fence (enum memmodel)
7359 EXP is the CALL_EXPR. */
7361 static void
7362 expand_builtin_atomic_signal_fence (tree exp)
7364 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7365 expand_mem_signal_fence (model);
7368 /* Expand the __sync_synchronize intrinsic. */
7370 static void
7371 expand_builtin_sync_synchronize (void)
7373 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
7376 static rtx
7377 expand_builtin_thread_pointer (tree exp, rtx target)
7379 enum insn_code icode;
7380 if (!validate_arglist (exp, VOID_TYPE))
7381 return const0_rtx;
7382 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
7383 if (icode != CODE_FOR_nothing)
7385 class expand_operand op;
7386 /* If the target is not sutitable then create a new target. */
7387 if (target == NULL_RTX
7388 || !REG_P (target)
7389 || GET_MODE (target) != Pmode)
7390 target = gen_reg_rtx (Pmode);
7391 create_output_operand (&op, target, Pmode);
7392 expand_insn (icode, 1, &op);
7393 return target;
7395 error ("%<__builtin_thread_pointer%> is not supported on this target");
7396 return const0_rtx;
7399 static void
7400 expand_builtin_set_thread_pointer (tree exp)
7402 enum insn_code icode;
7403 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7404 return;
7405 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
7406 if (icode != CODE_FOR_nothing)
7408 class expand_operand op;
7409 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
7410 Pmode, EXPAND_NORMAL);
7411 create_input_operand (&op, val, Pmode);
7412 expand_insn (icode, 1, &op);
7413 return;
7415 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
7419 /* Emit code to restore the current value of stack. */
7421 static void
7422 expand_stack_restore (tree var)
7424 rtx_insn *prev;
7425 rtx sa = expand_normal (var);
7427 sa = convert_memory_address (Pmode, sa);
7429 prev = get_last_insn ();
7430 emit_stack_restore (SAVE_BLOCK, sa);
7432 record_new_stack_level ();
7434 fixup_args_size_notes (prev, get_last_insn (), 0);
7437 /* Emit code to save the current value of stack. */
7439 static rtx
7440 expand_stack_save (void)
7442 rtx ret = NULL_RTX;
7444 emit_stack_save (SAVE_BLOCK, &ret);
7445 return ret;
7448 /* Emit code to get the openacc gang, worker or vector id or size. */
7450 static rtx
7451 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7453 const char *name;
7454 rtx fallback_retval;
7455 rtx_insn *(*gen_fn) (rtx, rtx);
7456 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7458 case BUILT_IN_GOACC_PARLEVEL_ID:
7459 name = "__builtin_goacc_parlevel_id";
7460 fallback_retval = const0_rtx;
7461 gen_fn = targetm.gen_oacc_dim_pos;
7462 break;
7463 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7464 name = "__builtin_goacc_parlevel_size";
7465 fallback_retval = const1_rtx;
7466 gen_fn = targetm.gen_oacc_dim_size;
7467 break;
7468 default:
7469 gcc_unreachable ();
7472 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7474 error ("%qs only supported in OpenACC code", name);
7475 return const0_rtx;
7478 tree arg = CALL_EXPR_ARG (exp, 0);
7479 if (TREE_CODE (arg) != INTEGER_CST)
7481 error ("non-constant argument 0 to %qs", name);
7482 return const0_rtx;
7485 int dim = TREE_INT_CST_LOW (arg);
7486 switch (dim)
7488 case GOMP_DIM_GANG:
7489 case GOMP_DIM_WORKER:
7490 case GOMP_DIM_VECTOR:
7491 break;
7492 default:
7493 error ("illegal argument 0 to %qs", name);
7494 return const0_rtx;
7497 if (ignore)
7498 return target;
7500 if (target == NULL_RTX)
7501 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7503 if (!targetm.have_oacc_dim_size ())
7505 emit_move_insn (target, fallback_retval);
7506 return target;
7509 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7510 emit_insn (gen_fn (reg, GEN_INT (dim)));
7511 if (reg != target)
7512 emit_move_insn (target, reg);
7514 return target;
7517 /* Expand a string compare operation using a sequence of char comparison
7518 to get rid of the calling overhead, with result going to TARGET if
7519 that's convenient.
7521 VAR_STR is the variable string source;
7522 CONST_STR is the constant string source;
7523 LENGTH is the number of chars to compare;
7524 CONST_STR_N indicates which source string is the constant string;
7525 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7527 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7529 target = (int) (unsigned char) var_str[0]
7530 - (int) (unsigned char) const_str[0];
7531 if (target != 0)
7532 goto ne_label;
7534 target = (int) (unsigned char) var_str[length - 2]
7535 - (int) (unsigned char) const_str[length - 2];
7536 if (target != 0)
7537 goto ne_label;
7538 target = (int) (unsigned char) var_str[length - 1]
7539 - (int) (unsigned char) const_str[length - 1];
7540 ne_label:
7543 static rtx
7544 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7545 unsigned HOST_WIDE_INT length,
7546 int const_str_n, machine_mode mode)
7548 HOST_WIDE_INT offset = 0;
7549 rtx var_rtx_array
7550 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7551 rtx var_rtx = NULL_RTX;
7552 rtx const_rtx = NULL_RTX;
7553 rtx result = target ? target : gen_reg_rtx (mode);
7554 rtx_code_label *ne_label = gen_label_rtx ();
7555 tree unit_type_node = unsigned_char_type_node;
7556 scalar_int_mode unit_mode
7557 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7559 start_sequence ();
7561 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7563 var_rtx
7564 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7565 const_rtx = c_readstr (const_str + offset, unit_mode);
7566 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7567 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7569 op0 = convert_modes (mode, unit_mode, op0, 1);
7570 op1 = convert_modes (mode, unit_mode, op1, 1);
7571 rtx diff = expand_simple_binop (mode, MINUS, op0, op1,
7572 result, 1, OPTAB_WIDEN);
7574 /* Force the difference into result register. We cannot reassign
7575 result here ("result = diff") or we may end up returning
7576 uninitialized result when expand_simple_binop allocates a new
7577 pseudo-register for returning. */
7578 if (diff != result)
7579 emit_move_insn (result, diff);
7581 if (i < length - 1)
7582 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7583 mode, true, ne_label);
7584 offset += GET_MODE_SIZE (unit_mode);
7587 emit_label (ne_label);
7588 rtx_insn *insns = get_insns ();
7589 end_sequence ();
7590 emit_insn (insns);
7592 return result;
7595 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
7596 to TARGET if that's convenient.
7597 If the call is not been inlined, return NULL_RTX. */
7599 static rtx
7600 inline_expand_builtin_bytecmp (tree exp, rtx target)
7602 tree fndecl = get_callee_fndecl (exp);
7603 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7604 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7606 /* Do NOT apply this inlining expansion when optimizing for size or
7607 optimization level below 2 or if unused *cmp hasn't been DCEd. */
7608 if (optimize < 2 || optimize_insn_for_size_p () || target == const0_rtx)
7609 return NULL_RTX;
7611 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7612 || fcode == BUILT_IN_STRNCMP
7613 || fcode == BUILT_IN_MEMCMP);
7615 /* On a target where the type of the call (int) has same or narrower presicion
7616 than unsigned char, give up the inlining expansion. */
7617 if (TYPE_PRECISION (unsigned_char_type_node)
7618 >= TYPE_PRECISION (TREE_TYPE (exp)))
7619 return NULL_RTX;
7621 tree arg1 = CALL_EXPR_ARG (exp, 0);
7622 tree arg2 = CALL_EXPR_ARG (exp, 1);
7623 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7625 unsigned HOST_WIDE_INT len1 = 0;
7626 unsigned HOST_WIDE_INT len2 = 0;
7627 unsigned HOST_WIDE_INT len3 = 0;
7629 /* Get the object representation of the initializers of ARG1 and ARG2
7630 as strings, provided they refer to constant objects, with their byte
7631 sizes in LEN1 and LEN2, respectively. */
7632 const char *bytes1 = getbyterep (arg1, &len1);
7633 const char *bytes2 = getbyterep (arg2, &len2);
7635 /* Fail if neither argument refers to an initialized constant. */
7636 if (!bytes1 && !bytes2)
7637 return NULL_RTX;
7639 if (is_ncmp)
7641 /* Fail if the memcmp/strncmp bound is not a constant. */
7642 if (!tree_fits_uhwi_p (len3_tree))
7643 return NULL_RTX;
7645 len3 = tree_to_uhwi (len3_tree);
7647 if (fcode == BUILT_IN_MEMCMP)
7649 /* Fail if the memcmp bound is greater than the size of either
7650 of the two constant objects. */
7651 if ((bytes1 && len1 < len3)
7652 || (bytes2 && len2 < len3))
7653 return NULL_RTX;
7657 if (fcode != BUILT_IN_MEMCMP)
7659 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
7660 and LEN2 to the length of the nul-terminated string stored
7661 in each. */
7662 if (bytes1 != NULL)
7663 len1 = strnlen (bytes1, len1) + 1;
7664 if (bytes2 != NULL)
7665 len2 = strnlen (bytes2, len2) + 1;
7668 /* See inline_string_cmp. */
7669 int const_str_n;
7670 if (!len1)
7671 const_str_n = 2;
7672 else if (!len2)
7673 const_str_n = 1;
7674 else if (len2 > len1)
7675 const_str_n = 1;
7676 else
7677 const_str_n = 2;
7679 /* For strncmp only, compute the new bound as the smallest of
7680 the lengths of the two strings (plus 1) and the bound provided
7681 to the function. */
7682 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
7683 if (is_ncmp && len3 < bound)
7684 bound = len3;
7686 /* If the bound of the comparison is larger than the threshold,
7687 do nothing. */
7688 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
7689 return NULL_RTX;
7691 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7693 /* Now, start inline expansion the call. */
7694 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7695 (const_str_n == 1) ? bytes1 : bytes2, bound,
7696 const_str_n, mode);
7699 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7700 represents the size of the first argument to that call, or VOIDmode
7701 if the argument is a pointer. IGNORE will be true if the result
7702 isn't used. */
7703 static rtx
7704 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7705 bool ignore)
7707 rtx val, failsafe;
7708 unsigned nargs = call_expr_nargs (exp);
7710 tree arg0 = CALL_EXPR_ARG (exp, 0);
7712 if (mode == VOIDmode)
7714 mode = TYPE_MODE (TREE_TYPE (arg0));
7715 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7718 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7720 /* An optional second argument can be used as a failsafe value on
7721 some machines. If it isn't present, then the failsafe value is
7722 assumed to be 0. */
7723 if (nargs > 1)
7725 tree arg1 = CALL_EXPR_ARG (exp, 1);
7726 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7728 else
7729 failsafe = const0_rtx;
7731 /* If the result isn't used, the behavior is undefined. It would be
7732 nice to emit a warning here, but path splitting means this might
7733 happen with legitimate code. So simply drop the builtin
7734 expansion in that case; we've handled any side-effects above. */
7735 if (ignore)
7736 return const0_rtx;
7738 /* If we don't have a suitable target, create one to hold the result. */
7739 if (target == NULL || GET_MODE (target) != mode)
7740 target = gen_reg_rtx (mode);
7742 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7743 val = convert_modes (mode, VOIDmode, val, false);
7745 return targetm.speculation_safe_value (mode, target, val, failsafe);
7748 /* Expand an expression EXP that calls a built-in function,
7749 with result going to TARGET if that's convenient
7750 (and in mode MODE if that's convenient).
7751 SUBTARGET may be used as the target for computing one of EXP's operands.
7752 IGNORE is nonzero if the value is to be ignored. */
7755 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7756 int ignore)
7758 tree fndecl = get_callee_fndecl (exp);
7759 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7760 int flags;
7762 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7763 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7765 /* When ASan is enabled, we don't want to expand some memory/string
7766 builtins and rely on libsanitizer's hooks. This allows us to avoid
7767 redundant checks and be sure, that possible overflow will be detected
7768 by ASan. */
7770 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7771 if (param_asan_kernel_mem_intrinsic_prefix
7772 && sanitize_flags_p (SANITIZE_KERNEL_ADDRESS
7773 | SANITIZE_KERNEL_HWADDRESS))
7774 switch (fcode)
7776 rtx save_decl_rtl, ret;
7777 case BUILT_IN_MEMCPY:
7778 case BUILT_IN_MEMMOVE:
7779 case BUILT_IN_MEMSET:
7780 save_decl_rtl = DECL_RTL (fndecl);
7781 DECL_RTL (fndecl) = asan_memfn_rtl (fndecl);
7782 ret = expand_call (exp, target, ignore);
7783 DECL_RTL (fndecl) = save_decl_rtl;
7784 return ret;
7785 default:
7786 break;
7788 if (sanitize_flags_p (SANITIZE_ADDRESS | SANITIZE_HWADDRESS)
7789 && asan_intercepted_p (fcode))
7790 return expand_call (exp, target, ignore);
7792 /* When not optimizing, generate calls to library functions for a certain
7793 set of builtins. */
7794 if (!optimize
7795 && !called_as_built_in (fndecl)
7796 && fcode != BUILT_IN_FORK
7797 && fcode != BUILT_IN_EXECL
7798 && fcode != BUILT_IN_EXECV
7799 && fcode != BUILT_IN_EXECLP
7800 && fcode != BUILT_IN_EXECLE
7801 && fcode != BUILT_IN_EXECVP
7802 && fcode != BUILT_IN_EXECVE
7803 && fcode != BUILT_IN_CLEAR_CACHE
7804 && !ALLOCA_FUNCTION_CODE_P (fcode)
7805 && fcode != BUILT_IN_FREE
7806 && (fcode != BUILT_IN_MEMSET
7807 || !(flag_inline_stringops & ILSOP_MEMSET))
7808 && (fcode != BUILT_IN_MEMCPY
7809 || !(flag_inline_stringops & ILSOP_MEMCPY))
7810 && (fcode != BUILT_IN_MEMMOVE
7811 || !(flag_inline_stringops & ILSOP_MEMMOVE))
7812 && (fcode != BUILT_IN_MEMCMP
7813 || !(flag_inline_stringops & ILSOP_MEMCMP)))
7814 return expand_call (exp, target, ignore);
7816 /* The built-in function expanders test for target == const0_rtx
7817 to determine whether the function's result will be ignored. */
7818 if (ignore)
7819 target = const0_rtx;
7821 /* If the result of a pure or const built-in function is ignored, and
7822 none of its arguments are volatile, we can avoid expanding the
7823 built-in call and just evaluate the arguments for side-effects. */
7824 if (target == const0_rtx
7825 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7826 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7828 bool volatilep = false;
7829 tree arg;
7830 call_expr_arg_iterator iter;
7832 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7833 if (TREE_THIS_VOLATILE (arg))
7835 volatilep = true;
7836 break;
7839 if (! volatilep)
7841 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7842 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7843 return const0_rtx;
7847 switch (fcode)
7849 CASE_FLT_FN (BUILT_IN_FABS):
7850 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7851 case BUILT_IN_FABSD32:
7852 case BUILT_IN_FABSD64:
7853 case BUILT_IN_FABSD128:
7854 target = expand_builtin_fabs (exp, target, subtarget);
7855 if (target)
7856 return target;
7857 break;
7859 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7860 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7861 target = expand_builtin_copysign (exp, target, subtarget);
7862 if (target)
7863 return target;
7864 break;
7866 /* Just do a normal library call if we were unable to fold
7867 the values. */
7868 CASE_FLT_FN (BUILT_IN_CABS):
7869 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CABS):
7870 break;
7872 CASE_FLT_FN (BUILT_IN_FMA):
7873 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7874 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7875 if (target)
7876 return target;
7877 break;
7879 CASE_FLT_FN (BUILT_IN_ILOGB):
7880 if (! flag_unsafe_math_optimizations)
7881 break;
7882 gcc_fallthrough ();
7883 CASE_FLT_FN (BUILT_IN_ISINF):
7884 CASE_FLT_FN (BUILT_IN_FINITE):
7885 case BUILT_IN_ISFINITE:
7886 case BUILT_IN_ISNORMAL:
7887 target = expand_builtin_interclass_mathfn (exp, target);
7888 if (target)
7889 return target;
7890 break;
7892 case BUILT_IN_ISSIGNALING:
7893 target = expand_builtin_issignaling (exp, target);
7894 if (target)
7895 return target;
7896 break;
7898 CASE_FLT_FN (BUILT_IN_ICEIL):
7899 CASE_FLT_FN (BUILT_IN_LCEIL):
7900 CASE_FLT_FN (BUILT_IN_LLCEIL):
7901 CASE_FLT_FN (BUILT_IN_LFLOOR):
7902 CASE_FLT_FN (BUILT_IN_IFLOOR):
7903 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7904 target = expand_builtin_int_roundingfn (exp, target);
7905 if (target)
7906 return target;
7907 break;
7909 CASE_FLT_FN (BUILT_IN_IRINT):
7910 CASE_FLT_FN (BUILT_IN_LRINT):
7911 CASE_FLT_FN (BUILT_IN_LLRINT):
7912 CASE_FLT_FN (BUILT_IN_IROUND):
7913 CASE_FLT_FN (BUILT_IN_LROUND):
7914 CASE_FLT_FN (BUILT_IN_LLROUND):
7915 target = expand_builtin_int_roundingfn_2 (exp, target);
7916 if (target)
7917 return target;
7918 break;
7920 CASE_FLT_FN (BUILT_IN_POWI):
7921 target = expand_builtin_powi (exp, target);
7922 if (target)
7923 return target;
7924 break;
7926 CASE_FLT_FN (BUILT_IN_CEXPI):
7927 target = expand_builtin_cexpi (exp, target);
7928 gcc_assert (target);
7929 return target;
7931 CASE_FLT_FN (BUILT_IN_SIN):
7932 CASE_FLT_FN (BUILT_IN_COS):
7933 if (! flag_unsafe_math_optimizations)
7934 break;
7935 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7936 if (target)
7937 return target;
7938 break;
7940 CASE_FLT_FN (BUILT_IN_SINCOS):
7941 if (! flag_unsafe_math_optimizations)
7942 break;
7943 target = expand_builtin_sincos (exp);
7944 if (target)
7945 return target;
7946 break;
7948 case BUILT_IN_FEGETROUND:
7949 target = expand_builtin_fegetround (exp, target, target_mode);
7950 if (target)
7951 return target;
7952 break;
7954 case BUILT_IN_FECLEAREXCEPT:
7955 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7956 feclearexcept_optab);
7957 if (target)
7958 return target;
7959 break;
7961 case BUILT_IN_FERAISEEXCEPT:
7962 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7963 feraiseexcept_optab);
7964 if (target)
7965 return target;
7966 break;
7968 case BUILT_IN_APPLY_ARGS:
7969 return expand_builtin_apply_args ();
7971 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7972 FUNCTION with a copy of the parameters described by
7973 ARGUMENTS, and ARGSIZE. It returns a block of memory
7974 allocated on the stack into which is stored all the registers
7975 that might possibly be used for returning the result of a
7976 function. ARGUMENTS is the value returned by
7977 __builtin_apply_args. ARGSIZE is the number of bytes of
7978 arguments that must be copied. ??? How should this value be
7979 computed? We'll also need a safe worst case value for varargs
7980 functions. */
7981 case BUILT_IN_APPLY:
7982 if (!validate_arglist (exp, POINTER_TYPE,
7983 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7984 && !validate_arglist (exp, REFERENCE_TYPE,
7985 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7986 return const0_rtx;
7987 else
7989 rtx ops[3];
7991 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7992 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7993 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7995 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7998 /* __builtin_return (RESULT) causes the function to return the
7999 value described by RESULT. RESULT is address of the block of
8000 memory returned by __builtin_apply. */
8001 case BUILT_IN_RETURN:
8002 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8003 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
8004 return const0_rtx;
8006 case BUILT_IN_SAVEREGS:
8007 return expand_builtin_saveregs ();
8009 case BUILT_IN_VA_ARG_PACK:
8010 /* All valid uses of __builtin_va_arg_pack () are removed during
8011 inlining. */
8012 error ("invalid use of %<__builtin_va_arg_pack ()%>");
8013 return const0_rtx;
8015 case BUILT_IN_VA_ARG_PACK_LEN:
8016 /* All valid uses of __builtin_va_arg_pack_len () are removed during
8017 inlining. */
8018 error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
8019 return const0_rtx;
8021 /* Return the address of the first anonymous stack arg. */
8022 case BUILT_IN_NEXT_ARG:
8023 if (fold_builtin_next_arg (exp, false))
8024 return const0_rtx;
8025 return expand_builtin_next_arg ();
8027 case BUILT_IN_CLEAR_CACHE:
8028 expand_builtin___clear_cache (exp);
8029 return const0_rtx;
8031 case BUILT_IN_CLASSIFY_TYPE:
8032 return expand_builtin_classify_type (exp);
8034 case BUILT_IN_CONSTANT_P:
8035 return const0_rtx;
8037 case BUILT_IN_FRAME_ADDRESS:
8038 case BUILT_IN_RETURN_ADDRESS:
8039 return expand_builtin_frame_address (fndecl, exp);
8041 case BUILT_IN_STACK_ADDRESS:
8042 return expand_builtin_stack_address ();
8044 case BUILT_IN___STRUB_ENTER:
8045 target = expand_builtin_strub_enter (exp);
8046 if (target)
8047 return target;
8048 break;
8050 case BUILT_IN___STRUB_UPDATE:
8051 target = expand_builtin_strub_update (exp);
8052 if (target)
8053 return target;
8054 break;
8056 case BUILT_IN___STRUB_LEAVE:
8057 target = expand_builtin_strub_leave (exp);
8058 if (target)
8059 return target;
8060 break;
8062 /* Returns the address of the area where the structure is returned.
8063 0 otherwise. */
8064 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8065 if (call_expr_nargs (exp) != 0
8066 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8067 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
8068 return const0_rtx;
8069 else
8070 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8072 CASE_BUILT_IN_ALLOCA:
8073 target = expand_builtin_alloca (exp);
8074 if (target)
8075 return target;
8076 break;
8078 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
8079 return expand_asan_emit_allocas_unpoison (exp);
8081 case BUILT_IN_STACK_SAVE:
8082 return expand_stack_save ();
8084 case BUILT_IN_STACK_RESTORE:
8085 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
8086 return const0_rtx;
8088 case BUILT_IN_BSWAP16:
8089 case BUILT_IN_BSWAP32:
8090 case BUILT_IN_BSWAP64:
8091 case BUILT_IN_BSWAP128:
8092 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
8093 if (target)
8094 return target;
8095 break;
8097 CASE_INT_FN (BUILT_IN_FFS):
8098 target = expand_builtin_unop (target_mode, exp, target,
8099 subtarget, ffs_optab);
8100 if (target)
8101 return target;
8102 break;
8104 CASE_INT_FN (BUILT_IN_CLZ):
8105 target = expand_builtin_unop (target_mode, exp, target,
8106 subtarget, clz_optab);
8107 if (target)
8108 return target;
8109 break;
8111 CASE_INT_FN (BUILT_IN_CTZ):
8112 target = expand_builtin_unop (target_mode, exp, target,
8113 subtarget, ctz_optab);
8114 if (target)
8115 return target;
8116 break;
8118 CASE_INT_FN (BUILT_IN_CLRSB):
8119 target = expand_builtin_unop (target_mode, exp, target,
8120 subtarget, clrsb_optab);
8121 if (target)
8122 return target;
8123 break;
8125 CASE_INT_FN (BUILT_IN_POPCOUNT):
8126 target = expand_builtin_unop (target_mode, exp, target,
8127 subtarget, popcount_optab);
8128 if (target)
8129 return target;
8130 break;
8132 CASE_INT_FN (BUILT_IN_PARITY):
8133 target = expand_builtin_unop (target_mode, exp, target,
8134 subtarget, parity_optab);
8135 if (target)
8136 return target;
8137 break;
8139 case BUILT_IN_STRLEN:
8140 target = expand_builtin_strlen (exp, target, target_mode);
8141 if (target)
8142 return target;
8143 break;
8145 case BUILT_IN_STRNLEN:
8146 target = expand_builtin_strnlen (exp, target, target_mode);
8147 if (target)
8148 return target;
8149 break;
8151 case BUILT_IN_STRCPY:
8152 target = expand_builtin_strcpy (exp, target);
8153 if (target)
8154 return target;
8155 break;
8157 case BUILT_IN_STRNCPY:
8158 target = expand_builtin_strncpy (exp, target);
8159 if (target)
8160 return target;
8161 break;
8163 case BUILT_IN_STPCPY:
8164 target = expand_builtin_stpcpy (exp, target, mode);
8165 if (target)
8166 return target;
8167 break;
8169 case BUILT_IN_MEMCPY:
8170 target = expand_builtin_memcpy (exp, target);
8171 if (target)
8172 return target;
8173 break;
8175 case BUILT_IN_MEMMOVE:
8176 target = expand_builtin_memmove (exp, target);
8177 if (target)
8178 return target;
8179 break;
8181 case BUILT_IN_MEMPCPY:
8182 target = expand_builtin_mempcpy (exp, target);
8183 if (target)
8184 return target;
8185 break;
8187 case BUILT_IN_MEMSET:
8188 target = expand_builtin_memset (exp, target, mode);
8189 if (target)
8190 return target;
8191 break;
8193 case BUILT_IN_BZERO:
8194 target = expand_builtin_bzero (exp);
8195 if (target)
8196 return target;
8197 break;
8199 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8200 back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
8201 when changing it to a strcmp call. */
8202 case BUILT_IN_STRCMP_EQ:
8203 target = expand_builtin_memcmp (exp, target, true);
8204 if (target)
8205 return target;
8207 /* Change this call back to a BUILT_IN_STRCMP. */
8208 TREE_OPERAND (exp, 1)
8209 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
8211 /* Delete the last parameter. */
8212 unsigned int i;
8213 vec<tree, va_gc> *arg_vec;
8214 vec_alloc (arg_vec, 2);
8215 for (i = 0; i < 2; i++)
8216 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
8217 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
8218 /* FALLTHROUGH */
8220 case BUILT_IN_STRCMP:
8221 target = expand_builtin_strcmp (exp, target);
8222 if (target)
8223 return target;
8224 break;
8226 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8227 back to a BUILT_IN_STRNCMP. */
8228 case BUILT_IN_STRNCMP_EQ:
8229 target = expand_builtin_memcmp (exp, target, true);
8230 if (target)
8231 return target;
8233 /* Change it back to a BUILT_IN_STRNCMP. */
8234 TREE_OPERAND (exp, 1)
8235 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
8236 /* FALLTHROUGH */
8238 case BUILT_IN_STRNCMP:
8239 target = expand_builtin_strncmp (exp, target, mode);
8240 if (target)
8241 return target;
8242 break;
8244 case BUILT_IN_BCMP:
8245 case BUILT_IN_MEMCMP:
8246 case BUILT_IN_MEMCMP_EQ:
8247 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
8248 if (target)
8249 return target;
8250 if (fcode == BUILT_IN_MEMCMP_EQ)
8252 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
8253 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
8255 break;
8257 case BUILT_IN_SETJMP:
8258 /* This should have been lowered to the builtins below. */
8259 gcc_unreachable ();
8261 case BUILT_IN_SETJMP_SETUP:
8262 /* __builtin_setjmp_setup is passed a pointer to an array of five words
8263 and the receiver label. */
8264 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8266 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8267 VOIDmode, EXPAND_NORMAL);
8268 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
8269 rtx_insn *label_r = label_rtx (label);
8271 expand_builtin_setjmp_setup (buf_addr, label_r);
8272 return const0_rtx;
8274 break;
8276 case BUILT_IN_SETJMP_RECEIVER:
8277 /* __builtin_setjmp_receiver is passed the receiver label. */
8278 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8280 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
8281 rtx_insn *label_r = label_rtx (label);
8283 expand_builtin_setjmp_receiver (label_r);
8284 nonlocal_goto_handler_labels
8285 = gen_rtx_INSN_LIST (VOIDmode, label_r,
8286 nonlocal_goto_handler_labels);
8287 /* ??? Do not let expand_label treat us as such since we would
8288 not want to be both on the list of non-local labels and on
8289 the list of forced labels. */
8290 FORCED_LABEL (label) = 0;
8291 return const0_rtx;
8293 break;
8295 /* __builtin_longjmp is passed a pointer to an array of five words.
8296 It's similar to the C library longjmp function but works with
8297 __builtin_setjmp above. */
8298 case BUILT_IN_LONGJMP:
8299 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8301 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8302 VOIDmode, EXPAND_NORMAL);
8303 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
8305 if (value != const1_rtx)
8307 error ("%<__builtin_longjmp%> second argument must be 1");
8308 return const0_rtx;
8311 expand_builtin_longjmp (buf_addr, value);
8312 return const0_rtx;
8314 break;
8316 case BUILT_IN_NONLOCAL_GOTO:
8317 target = expand_builtin_nonlocal_goto (exp);
8318 if (target)
8319 return target;
8320 break;
8322 /* This updates the setjmp buffer that is its argument with the value
8323 of the current stack pointer. */
8324 case BUILT_IN_UPDATE_SETJMP_BUF:
8325 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8327 rtx buf_addr
8328 = expand_normal (CALL_EXPR_ARG (exp, 0));
8330 expand_builtin_update_setjmp_buf (buf_addr);
8331 return const0_rtx;
8333 break;
8335 case BUILT_IN_TRAP:
8336 case BUILT_IN_UNREACHABLE_TRAP:
8337 expand_builtin_trap ();
8338 return const0_rtx;
8340 case BUILT_IN_UNREACHABLE:
8341 expand_builtin_unreachable ();
8342 return const0_rtx;
8344 CASE_FLT_FN (BUILT_IN_SIGNBIT):
8345 case BUILT_IN_SIGNBITD32:
8346 case BUILT_IN_SIGNBITD64:
8347 case BUILT_IN_SIGNBITD128:
8348 target = expand_builtin_signbit (exp, target);
8349 if (target)
8350 return target;
8351 break;
8353 /* Various hooks for the DWARF 2 __throw routine. */
8354 case BUILT_IN_UNWIND_INIT:
8355 expand_builtin_unwind_init ();
8356 return const0_rtx;
8357 case BUILT_IN_DWARF_CFA:
8358 return virtual_cfa_rtx;
8359 #ifdef DWARF2_UNWIND_INFO
8360 case BUILT_IN_DWARF_SP_COLUMN:
8361 return expand_builtin_dwarf_sp_column ();
8362 case BUILT_IN_INIT_DWARF_REG_SIZES:
8363 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
8364 return const0_rtx;
8365 #endif
8366 case BUILT_IN_FROB_RETURN_ADDR:
8367 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
8368 case BUILT_IN_EXTRACT_RETURN_ADDR:
8369 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
8370 case BUILT_IN_EH_RETURN:
8371 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
8372 CALL_EXPR_ARG (exp, 1));
8373 return const0_rtx;
8374 case BUILT_IN_EH_RETURN_DATA_REGNO:
8375 return expand_builtin_eh_return_data_regno (exp);
8376 case BUILT_IN_EXTEND_POINTER:
8377 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
8378 case BUILT_IN_EH_POINTER:
8379 return expand_builtin_eh_pointer (exp);
8380 case BUILT_IN_EH_FILTER:
8381 return expand_builtin_eh_filter (exp);
8382 case BUILT_IN_EH_COPY_VALUES:
8383 return expand_builtin_eh_copy_values (exp);
8385 case BUILT_IN_VA_START:
8386 return expand_builtin_va_start (exp);
8387 case BUILT_IN_VA_END:
8388 return expand_builtin_va_end (exp);
8389 case BUILT_IN_VA_COPY:
8390 return expand_builtin_va_copy (exp);
8391 case BUILT_IN_EXPECT:
8392 return expand_builtin_expect (exp, target);
8393 case BUILT_IN_EXPECT_WITH_PROBABILITY:
8394 return expand_builtin_expect_with_probability (exp, target);
8395 case BUILT_IN_ASSUME_ALIGNED:
8396 return expand_builtin_assume_aligned (exp, target);
8397 case BUILT_IN_PREFETCH:
8398 expand_builtin_prefetch (exp);
8399 return const0_rtx;
8401 case BUILT_IN_INIT_TRAMPOLINE:
8402 return expand_builtin_init_trampoline (exp, true);
8403 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
8404 return expand_builtin_init_trampoline (exp, false);
8405 case BUILT_IN_ADJUST_TRAMPOLINE:
8406 return expand_builtin_adjust_trampoline (exp);
8408 case BUILT_IN_INIT_DESCRIPTOR:
8409 return expand_builtin_init_descriptor (exp);
8410 case BUILT_IN_ADJUST_DESCRIPTOR:
8411 return expand_builtin_adjust_descriptor (exp);
8413 case BUILT_IN_GCC_NESTED_PTR_CREATED:
8414 case BUILT_IN_GCC_NESTED_PTR_DELETED:
8415 break; /* At present, no expansion, just call the function. */
8417 case BUILT_IN_FORK:
8418 case BUILT_IN_EXECL:
8419 case BUILT_IN_EXECV:
8420 case BUILT_IN_EXECLP:
8421 case BUILT_IN_EXECLE:
8422 case BUILT_IN_EXECVP:
8423 case BUILT_IN_EXECVE:
8424 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
8425 if (target)
8426 return target;
8427 break;
8429 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
8430 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
8431 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
8432 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
8433 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
8434 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
8435 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
8436 if (target)
8437 return target;
8438 break;
8440 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
8441 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
8442 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
8443 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
8444 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
8445 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
8446 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
8447 if (target)
8448 return target;
8449 break;
8451 case BUILT_IN_SYNC_FETCH_AND_OR_1:
8452 case BUILT_IN_SYNC_FETCH_AND_OR_2:
8453 case BUILT_IN_SYNC_FETCH_AND_OR_4:
8454 case BUILT_IN_SYNC_FETCH_AND_OR_8:
8455 case BUILT_IN_SYNC_FETCH_AND_OR_16:
8456 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
8457 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
8458 if (target)
8459 return target;
8460 break;
8462 case BUILT_IN_SYNC_FETCH_AND_AND_1:
8463 case BUILT_IN_SYNC_FETCH_AND_AND_2:
8464 case BUILT_IN_SYNC_FETCH_AND_AND_4:
8465 case BUILT_IN_SYNC_FETCH_AND_AND_8:
8466 case BUILT_IN_SYNC_FETCH_AND_AND_16:
8467 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
8468 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
8469 if (target)
8470 return target;
8471 break;
8473 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8474 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8475 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8476 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8477 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8478 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
8479 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
8480 if (target)
8481 return target;
8482 break;
8484 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8485 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8486 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8487 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8488 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8489 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
8490 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
8491 if (target)
8492 return target;
8493 break;
8495 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8496 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8497 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8498 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8499 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8500 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8501 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
8502 if (target)
8503 return target;
8504 break;
8506 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8507 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8508 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8509 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8510 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8511 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8512 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
8513 if (target)
8514 return target;
8515 break;
8517 case BUILT_IN_SYNC_OR_AND_FETCH_1:
8518 case BUILT_IN_SYNC_OR_AND_FETCH_2:
8519 case BUILT_IN_SYNC_OR_AND_FETCH_4:
8520 case BUILT_IN_SYNC_OR_AND_FETCH_8:
8521 case BUILT_IN_SYNC_OR_AND_FETCH_16:
8522 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8523 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
8524 if (target)
8525 return target;
8526 break;
8528 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8529 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8530 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8531 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8532 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8533 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8534 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8535 if (target)
8536 return target;
8537 break;
8539 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8540 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8541 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8542 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8543 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8544 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8545 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8546 if (target)
8547 return target;
8548 break;
8550 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8551 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8552 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8553 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8554 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8555 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8556 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8557 if (target)
8558 return target;
8559 break;
8561 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8562 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8563 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8564 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8565 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8566 if (mode == VOIDmode)
8567 mode = TYPE_MODE (boolean_type_node);
8568 if (!target || !register_operand (target, mode))
8569 target = gen_reg_rtx (mode);
8571 mode = get_builtin_sync_mode
8572 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8573 target = expand_builtin_compare_and_swap (mode, exp, true, target);
8574 if (target)
8575 return target;
8576 break;
8578 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8579 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8580 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8581 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8582 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8583 mode = get_builtin_sync_mode
8584 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8585 target = expand_builtin_compare_and_swap (mode, exp, false, target);
8586 if (target)
8587 return target;
8588 break;
8590 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8591 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8592 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8593 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8594 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8595 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8596 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8597 if (target)
8598 return target;
8599 break;
8601 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8602 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8603 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8604 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8605 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8606 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8607 expand_builtin_sync_lock_release (mode, exp);
8608 return const0_rtx;
8610 case BUILT_IN_SYNC_SYNCHRONIZE:
8611 expand_builtin_sync_synchronize ();
8612 return const0_rtx;
8614 case BUILT_IN_ATOMIC_EXCHANGE_1:
8615 case BUILT_IN_ATOMIC_EXCHANGE_2:
8616 case BUILT_IN_ATOMIC_EXCHANGE_4:
8617 case BUILT_IN_ATOMIC_EXCHANGE_8:
8618 case BUILT_IN_ATOMIC_EXCHANGE_16:
8619 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8620 target = expand_builtin_atomic_exchange (mode, exp, target);
8621 if (target)
8622 return target;
8623 break;
8625 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8626 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8627 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8628 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8629 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8631 unsigned int nargs, z;
8632 vec<tree, va_gc> *vec;
8634 mode =
8635 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8636 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8637 if (target)
8638 return target;
8640 /* If this is turned into an external library call, the weak parameter
8641 must be dropped to match the expected parameter list. */
8642 nargs = call_expr_nargs (exp);
8643 vec_alloc (vec, nargs - 1);
8644 for (z = 0; z < 3; z++)
8645 vec->quick_push (CALL_EXPR_ARG (exp, z));
8646 /* Skip the boolean weak parameter. */
8647 for (z = 4; z < 6; z++)
8648 vec->quick_push (CALL_EXPR_ARG (exp, z));
8649 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8650 break;
8653 case BUILT_IN_ATOMIC_LOAD_1:
8654 case BUILT_IN_ATOMIC_LOAD_2:
8655 case BUILT_IN_ATOMIC_LOAD_4:
8656 case BUILT_IN_ATOMIC_LOAD_8:
8657 case BUILT_IN_ATOMIC_LOAD_16:
8658 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8659 target = expand_builtin_atomic_load (mode, exp, target);
8660 if (target)
8661 return target;
8662 break;
8664 case BUILT_IN_ATOMIC_STORE_1:
8665 case BUILT_IN_ATOMIC_STORE_2:
8666 case BUILT_IN_ATOMIC_STORE_4:
8667 case BUILT_IN_ATOMIC_STORE_8:
8668 case BUILT_IN_ATOMIC_STORE_16:
8669 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8670 target = expand_builtin_atomic_store (mode, exp);
8671 if (target)
8672 return const0_rtx;
8673 break;
8675 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8676 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8677 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8678 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8679 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8681 enum built_in_function lib;
8682 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8683 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8684 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8685 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8686 ignore, lib);
8687 if (target)
8688 return target;
8689 break;
8691 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8692 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8693 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8694 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8695 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8697 enum built_in_function lib;
8698 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8699 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8700 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8701 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8702 ignore, lib);
8703 if (target)
8704 return target;
8705 break;
8707 case BUILT_IN_ATOMIC_AND_FETCH_1:
8708 case BUILT_IN_ATOMIC_AND_FETCH_2:
8709 case BUILT_IN_ATOMIC_AND_FETCH_4:
8710 case BUILT_IN_ATOMIC_AND_FETCH_8:
8711 case BUILT_IN_ATOMIC_AND_FETCH_16:
8713 enum built_in_function lib;
8714 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8715 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8716 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8717 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8718 ignore, lib);
8719 if (target)
8720 return target;
8721 break;
8723 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8724 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8725 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8726 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8727 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8729 enum built_in_function lib;
8730 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8731 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8732 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8733 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8734 ignore, lib);
8735 if (target)
8736 return target;
8737 break;
8739 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8740 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8741 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8742 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8743 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8745 enum built_in_function lib;
8746 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8747 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8748 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8749 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8750 ignore, lib);
8751 if (target)
8752 return target;
8753 break;
8755 case BUILT_IN_ATOMIC_OR_FETCH_1:
8756 case BUILT_IN_ATOMIC_OR_FETCH_2:
8757 case BUILT_IN_ATOMIC_OR_FETCH_4:
8758 case BUILT_IN_ATOMIC_OR_FETCH_8:
8759 case BUILT_IN_ATOMIC_OR_FETCH_16:
8761 enum built_in_function lib;
8762 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8763 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8764 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8765 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8766 ignore, lib);
8767 if (target)
8768 return target;
8769 break;
8771 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8772 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8773 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8774 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8775 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8776 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8777 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8778 ignore, BUILT_IN_NONE);
8779 if (target)
8780 return target;
8781 break;
8783 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8784 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8785 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8786 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8787 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8788 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8789 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8790 ignore, BUILT_IN_NONE);
8791 if (target)
8792 return target;
8793 break;
8795 case BUILT_IN_ATOMIC_FETCH_AND_1:
8796 case BUILT_IN_ATOMIC_FETCH_AND_2:
8797 case BUILT_IN_ATOMIC_FETCH_AND_4:
8798 case BUILT_IN_ATOMIC_FETCH_AND_8:
8799 case BUILT_IN_ATOMIC_FETCH_AND_16:
8800 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8801 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8802 ignore, BUILT_IN_NONE);
8803 if (target)
8804 return target;
8805 break;
8807 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8808 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8809 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8810 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8811 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8812 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8813 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8814 ignore, BUILT_IN_NONE);
8815 if (target)
8816 return target;
8817 break;
8819 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8820 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8821 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8822 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8823 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8824 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8825 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8826 ignore, BUILT_IN_NONE);
8827 if (target)
8828 return target;
8829 break;
8831 case BUILT_IN_ATOMIC_FETCH_OR_1:
8832 case BUILT_IN_ATOMIC_FETCH_OR_2:
8833 case BUILT_IN_ATOMIC_FETCH_OR_4:
8834 case BUILT_IN_ATOMIC_FETCH_OR_8:
8835 case BUILT_IN_ATOMIC_FETCH_OR_16:
8836 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8837 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8838 ignore, BUILT_IN_NONE);
8839 if (target)
8840 return target;
8841 break;
8843 case BUILT_IN_ATOMIC_TEST_AND_SET:
8844 target = expand_builtin_atomic_test_and_set (exp, target);
8845 if (target)
8846 return target;
8847 break;
8849 case BUILT_IN_ATOMIC_CLEAR:
8850 return expand_builtin_atomic_clear (exp);
8852 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8853 return expand_builtin_atomic_always_lock_free (exp);
8855 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8856 target = expand_builtin_atomic_is_lock_free (exp);
8857 if (target)
8858 return target;
8859 break;
8861 case BUILT_IN_ATOMIC_THREAD_FENCE:
8862 expand_builtin_atomic_thread_fence (exp);
8863 return const0_rtx;
8865 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8866 expand_builtin_atomic_signal_fence (exp);
8867 return const0_rtx;
8869 case BUILT_IN_OBJECT_SIZE:
8870 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
8871 return expand_builtin_object_size (exp);
8873 case BUILT_IN_MEMCPY_CHK:
8874 case BUILT_IN_MEMPCPY_CHK:
8875 case BUILT_IN_MEMMOVE_CHK:
8876 case BUILT_IN_MEMSET_CHK:
8877 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8878 if (target)
8879 return target;
8880 break;
8882 case BUILT_IN_STRCPY_CHK:
8883 case BUILT_IN_STPCPY_CHK:
8884 case BUILT_IN_STRNCPY_CHK:
8885 case BUILT_IN_STPNCPY_CHK:
8886 case BUILT_IN_STRCAT_CHK:
8887 case BUILT_IN_STRNCAT_CHK:
8888 case BUILT_IN_SNPRINTF_CHK:
8889 case BUILT_IN_VSNPRINTF_CHK:
8890 maybe_emit_chk_warning (exp, fcode);
8891 break;
8893 case BUILT_IN_SPRINTF_CHK:
8894 case BUILT_IN_VSPRINTF_CHK:
8895 maybe_emit_sprintf_chk_warning (exp, fcode);
8896 break;
8898 case BUILT_IN_THREAD_POINTER:
8899 return expand_builtin_thread_pointer (exp, target);
8901 case BUILT_IN_SET_THREAD_POINTER:
8902 expand_builtin_set_thread_pointer (exp);
8903 return const0_rtx;
8905 case BUILT_IN_ACC_ON_DEVICE:
8906 /* Do library call, if we failed to expand the builtin when
8907 folding. */
8908 break;
8910 case BUILT_IN_GOACC_PARLEVEL_ID:
8911 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8912 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8914 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8915 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8917 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8918 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8919 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8920 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8921 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8922 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8923 return expand_speculation_safe_value (mode, exp, target, ignore);
8925 default: /* just do library call, if unknown builtin */
8926 break;
8929 /* The switch statement above can drop through to cause the function
8930 to be called normally. */
8931 return expand_call (exp, target, ignore);
8934 /* Determine whether a tree node represents a call to a built-in
8935 function. If the tree T is a call to a built-in function with
8936 the right number of arguments of the appropriate types, return
8937 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8938 Otherwise the return value is END_BUILTINS. */
8940 enum built_in_function
8941 builtin_mathfn_code (const_tree t)
8943 const_tree fndecl, arg, parmlist;
8944 const_tree argtype, parmtype;
8945 const_call_expr_arg_iterator iter;
8947 if (TREE_CODE (t) != CALL_EXPR)
8948 return END_BUILTINS;
8950 fndecl = get_callee_fndecl (t);
8951 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8952 return END_BUILTINS;
8954 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8955 init_const_call_expr_arg_iterator (t, &iter);
8956 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8958 /* If a function doesn't take a variable number of arguments,
8959 the last element in the list will have type `void'. */
8960 parmtype = TREE_VALUE (parmlist);
8961 if (VOID_TYPE_P (parmtype))
8963 if (more_const_call_expr_args_p (&iter))
8964 return END_BUILTINS;
8965 return DECL_FUNCTION_CODE (fndecl);
8968 if (! more_const_call_expr_args_p (&iter))
8969 return END_BUILTINS;
8971 arg = next_const_call_expr_arg (&iter);
8972 argtype = TREE_TYPE (arg);
8974 if (SCALAR_FLOAT_TYPE_P (parmtype))
8976 if (! SCALAR_FLOAT_TYPE_P (argtype))
8977 return END_BUILTINS;
8979 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8981 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8982 return END_BUILTINS;
8984 else if (POINTER_TYPE_P (parmtype))
8986 if (! POINTER_TYPE_P (argtype))
8987 return END_BUILTINS;
8989 else if (INTEGRAL_TYPE_P (parmtype))
8991 if (! INTEGRAL_TYPE_P (argtype))
8992 return END_BUILTINS;
8994 else
8995 return END_BUILTINS;
8998 /* Variable-length argument list. */
8999 return DECL_FUNCTION_CODE (fndecl);
9002 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
9003 evaluate to a constant. */
9005 static tree
9006 fold_builtin_constant_p (tree arg)
9008 /* We return 1 for a numeric type that's known to be a constant
9009 value at compile-time or for an aggregate type that's a
9010 literal constant. */
9011 STRIP_NOPS (arg);
9013 /* If we know this is a constant, emit the constant of one. */
9014 if (CONSTANT_CLASS_P (arg)
9015 || (TREE_CODE (arg) == CONSTRUCTOR
9016 && TREE_CONSTANT (arg)))
9017 return integer_one_node;
9018 if (TREE_CODE (arg) == ADDR_EXPR)
9020 tree op = TREE_OPERAND (arg, 0);
9021 if (TREE_CODE (op) == STRING_CST
9022 || (TREE_CODE (op) == ARRAY_REF
9023 && integer_zerop (TREE_OPERAND (op, 1))
9024 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
9025 return integer_one_node;
9028 /* If this expression has side effects, show we don't know it to be a
9029 constant. Likewise if it's a pointer or aggregate type since in
9030 those case we only want literals, since those are only optimized
9031 when generating RTL, not later.
9032 And finally, if we are compiling an initializer, not code, we
9033 need to return a definite result now; there's not going to be any
9034 more optimization done. */
9035 if (TREE_SIDE_EFFECTS (arg)
9036 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9037 || POINTER_TYPE_P (TREE_TYPE (arg))
9038 || cfun == 0
9039 || folding_initializer
9040 || force_folding_builtin_constant_p)
9041 return integer_zero_node;
9043 return NULL_TREE;
9046 /* Create builtin_expect or builtin_expect_with_probability
9047 with PRED and EXPECTED as its arguments and return it as a truthvalue.
9048 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
9049 builtin_expect_with_probability instead uses third argument as PROBABILITY
9050 value. */
9052 static tree
9053 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
9054 tree predictor, tree probability)
9056 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
9058 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
9059 : BUILT_IN_EXPECT_WITH_PROBABILITY);
9060 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
9061 ret_type = TREE_TYPE (TREE_TYPE (fn));
9062 pred_type = TREE_VALUE (arg_types);
9063 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
9065 pred = fold_convert_loc (loc, pred_type, pred);
9066 expected = fold_convert_loc (loc, expected_type, expected);
9068 if (probability)
9069 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
9070 else
9071 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
9072 predictor);
9074 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
9075 build_int_cst (ret_type, 0));
9078 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
9079 NULL_TREE if no simplification is possible. */
9081 tree
9082 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
9083 tree arg3)
9085 tree inner, fndecl, inner_arg0;
9086 enum tree_code code;
9088 /* Distribute the expected value over short-circuiting operators.
9089 See through the cast from truthvalue_type_node to long. */
9090 inner_arg0 = arg0;
9091 while (CONVERT_EXPR_P (inner_arg0)
9092 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
9093 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
9094 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
9096 /* If this is a builtin_expect within a builtin_expect keep the
9097 inner one. See through a comparison against a constant. It
9098 might have been added to create a thruthvalue. */
9099 inner = inner_arg0;
9101 if (COMPARISON_CLASS_P (inner)
9102 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
9103 inner = TREE_OPERAND (inner, 0);
9105 if (TREE_CODE (inner) == CALL_EXPR
9106 && (fndecl = get_callee_fndecl (inner))
9107 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT,
9108 BUILT_IN_EXPECT_WITH_PROBABILITY))
9109 return arg0;
9111 inner = inner_arg0;
9112 code = TREE_CODE (inner);
9113 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
9115 tree op0 = TREE_OPERAND (inner, 0);
9116 tree op1 = TREE_OPERAND (inner, 1);
9117 arg1 = save_expr (arg1);
9119 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
9120 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
9121 inner = build2 (code, TREE_TYPE (inner), op0, op1);
9123 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
9126 /* If the argument isn't invariant then there's nothing else we can do. */
9127 if (!TREE_CONSTANT (inner_arg0))
9128 return NULL_TREE;
9130 /* If we expect that a comparison against the argument will fold to
9131 a constant return the constant. In practice, this means a true
9132 constant or the address of a non-weak symbol. */
9133 inner = inner_arg0;
9134 STRIP_NOPS (inner);
9135 if (TREE_CODE (inner) == ADDR_EXPR)
9139 inner = TREE_OPERAND (inner, 0);
9141 while (TREE_CODE (inner) == COMPONENT_REF
9142 || TREE_CODE (inner) == ARRAY_REF);
9143 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
9144 return NULL_TREE;
9147 /* Otherwise, ARG0 already has the proper type for the return value. */
9148 return arg0;
9151 /* Fold a call to __builtin_classify_type with argument ARG. */
9153 static tree
9154 fold_builtin_classify_type (tree arg)
9156 if (arg == 0)
9157 return build_int_cst (integer_type_node, no_type_class);
9159 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
9162 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
9163 ARG. */
9165 static tree
9166 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
9168 if (!validate_arg (arg, POINTER_TYPE))
9169 return NULL_TREE;
9170 else
9172 c_strlen_data lendata = { };
9173 tree len = c_strlen (arg, 0, &lendata);
9175 if (len)
9176 return fold_convert_loc (loc, type, len);
9178 /* TODO: Move this to gimple-ssa-warn-access once the pass runs
9179 also early enough to detect invalid reads in multimensional
9180 arrays and struct members. */
9181 if (!lendata.decl)
9182 c_strlen (arg, 1, &lendata);
9184 if (lendata.decl)
9186 if (EXPR_HAS_LOCATION (arg))
9187 loc = EXPR_LOCATION (arg);
9188 else if (loc == UNKNOWN_LOCATION)
9189 loc = input_location;
9190 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
9193 return NULL_TREE;
9197 /* Fold a call to __builtin_inf or __builtin_huge_val. */
9199 static tree
9200 fold_builtin_inf (location_t loc, tree type, int warn)
9202 /* __builtin_inff is intended to be usable to define INFINITY on all
9203 targets. If an infinity is not available, INFINITY expands "to a
9204 positive constant of type float that overflows at translation
9205 time", footnote "In this case, using INFINITY will violate the
9206 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
9207 Thus we pedwarn to ensure this constraint violation is
9208 diagnosed. */
9209 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
9210 pedwarn (loc, 0, "target format does not support infinity");
9212 return build_real (type, dconstinf);
9215 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
9216 NULL_TREE if no simplification can be made. */
9218 static tree
9219 fold_builtin_sincos (location_t loc,
9220 tree arg0, tree arg1, tree arg2)
9222 tree type;
9223 tree fndecl, call = NULL_TREE;
9225 if (!validate_arg (arg0, REAL_TYPE)
9226 || !validate_arg (arg1, POINTER_TYPE)
9227 || !validate_arg (arg2, POINTER_TYPE))
9228 return NULL_TREE;
9230 type = TREE_TYPE (arg0);
9232 /* Calculate the result when the argument is a constant. */
9233 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
9234 if (fn == END_BUILTINS)
9235 return NULL_TREE;
9237 /* Canonicalize sincos to cexpi. */
9238 if (TREE_CODE (arg0) == REAL_CST)
9240 tree complex_type = build_complex_type (type);
9241 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
9243 if (!call)
9245 if (!targetm.libc_has_function (function_c99_math_complex, type)
9246 || !builtin_decl_implicit_p (fn))
9247 return NULL_TREE;
9248 fndecl = builtin_decl_explicit (fn);
9249 call = build_call_expr_loc (loc, fndecl, 1, arg0);
9250 call = builtin_save_expr (call);
9253 tree ptype = build_pointer_type (type);
9254 arg1 = fold_convert (ptype, arg1);
9255 arg2 = fold_convert (ptype, arg2);
9256 return build2 (COMPOUND_EXPR, void_type_node,
9257 build2 (MODIFY_EXPR, void_type_node,
9258 build_fold_indirect_ref_loc (loc, arg1),
9259 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
9260 build2 (MODIFY_EXPR, void_type_node,
9261 build_fold_indirect_ref_loc (loc, arg2),
9262 fold_build1_loc (loc, REALPART_EXPR, type, call)));
9265 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9266 Return NULL_TREE if no simplification can be made. */
9268 static tree
9269 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9271 if (!validate_arg (arg1, POINTER_TYPE)
9272 || !validate_arg (arg2, POINTER_TYPE)
9273 || !validate_arg (len, INTEGER_TYPE))
9274 return NULL_TREE;
9276 /* If the LEN parameter is zero, return zero. */
9277 if (integer_zerop (len))
9278 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9279 arg1, arg2);
9281 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9282 if (operand_equal_p (arg1, arg2, 0))
9283 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9285 /* If len parameter is one, return an expression corresponding to
9286 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9287 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9289 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9290 tree cst_uchar_ptr_node
9291 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9293 tree ind1
9294 = fold_convert_loc (loc, integer_type_node,
9295 build1 (INDIRECT_REF, cst_uchar_node,
9296 fold_convert_loc (loc,
9297 cst_uchar_ptr_node,
9298 arg1)));
9299 tree ind2
9300 = fold_convert_loc (loc, integer_type_node,
9301 build1 (INDIRECT_REF, cst_uchar_node,
9302 fold_convert_loc (loc,
9303 cst_uchar_ptr_node,
9304 arg2)));
9305 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9308 return NULL_TREE;
9311 /* Fold a call to builtin isascii with argument ARG. */
9313 static tree
9314 fold_builtin_isascii (location_t loc, tree arg)
9316 if (!validate_arg (arg, INTEGER_TYPE))
9317 return NULL_TREE;
9318 else
9320 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9321 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9322 build_int_cst (integer_type_node,
9323 ~ HOST_WIDE_INT_UC (0x7f)));
9324 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9325 arg, integer_zero_node);
9329 /* Fold a call to builtin toascii with argument ARG. */
9331 static tree
9332 fold_builtin_toascii (location_t loc, tree arg)
9334 if (!validate_arg (arg, INTEGER_TYPE))
9335 return NULL_TREE;
9337 /* Transform toascii(c) -> (c & 0x7f). */
9338 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9339 build_int_cst (integer_type_node, 0x7f));
9342 /* Fold a call to builtin isdigit with argument ARG. */
9344 static tree
9345 fold_builtin_isdigit (location_t loc, tree arg)
9347 if (!validate_arg (arg, INTEGER_TYPE))
9348 return NULL_TREE;
9349 else
9351 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9352 /* According to the C standard, isdigit is unaffected by locale.
9353 However, it definitely is affected by the target character set. */
9354 unsigned HOST_WIDE_INT target_digit0
9355 = lang_hooks.to_target_charset ('0');
9357 if (target_digit0 == 0)
9358 return NULL_TREE;
9360 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9361 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9362 build_int_cst (unsigned_type_node, target_digit0));
9363 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9364 build_int_cst (unsigned_type_node, 9));
9368 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9370 static tree
9371 fold_builtin_fabs (location_t loc, tree arg, tree type)
9373 if (!validate_arg (arg, REAL_TYPE))
9374 return NULL_TREE;
9376 arg = fold_convert_loc (loc, type, arg);
9377 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9380 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9382 static tree
9383 fold_builtin_abs (location_t loc, tree arg, tree type)
9385 if (!validate_arg (arg, INTEGER_TYPE))
9386 return NULL_TREE;
9388 arg = fold_convert_loc (loc, type, arg);
9389 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9392 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9394 static tree
9395 fold_builtin_carg (location_t loc, tree arg, tree type)
9397 if (validate_arg (arg, COMPLEX_TYPE)
9398 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg))))
9400 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9402 if (atan2_fn)
9404 tree new_arg = builtin_save_expr (arg);
9405 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9406 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9407 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9411 return NULL_TREE;
9414 /* Fold a call to builtin frexp, we can assume the base is 2. */
9416 static tree
9417 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9419 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9420 return NULL_TREE;
9422 STRIP_NOPS (arg0);
9424 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9425 return NULL_TREE;
9427 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9429 /* Proceed if a valid pointer type was passed in. */
9430 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9432 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9433 tree frac, exp, res;
9435 switch (value->cl)
9437 case rvc_zero:
9438 /* For +-0, return (*exp = 0, +-0). */
9439 exp = integer_zero_node;
9440 frac = arg0;
9441 break;
9442 case rvc_nan:
9443 case rvc_inf:
9444 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9445 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9446 case rvc_normal:
9448 /* Since the frexp function always expects base 2, and in
9449 GCC normalized significands are already in the range
9450 [0.5, 1.0), we have exactly what frexp wants. */
9451 REAL_VALUE_TYPE frac_rvt = *value;
9452 SET_REAL_EXP (&frac_rvt, 0);
9453 frac = build_real (rettype, frac_rvt);
9454 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9456 break;
9457 default:
9458 gcc_unreachable ();
9461 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9462 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9463 TREE_SIDE_EFFECTS (arg1) = 1;
9464 res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9465 suppress_warning (res, OPT_Wunused_value);
9466 return res;
9469 return NULL_TREE;
9472 /* Fold a call to builtin modf. */
9474 static tree
9475 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9477 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9478 return NULL_TREE;
9480 STRIP_NOPS (arg0);
9482 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9483 return NULL_TREE;
9485 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9487 /* Proceed if a valid pointer type was passed in. */
9488 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9490 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9491 REAL_VALUE_TYPE trunc, frac;
9492 tree res;
9494 switch (value->cl)
9496 case rvc_nan:
9497 case rvc_zero:
9498 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9499 trunc = frac = *value;
9500 break;
9501 case rvc_inf:
9502 /* For +-Inf, return (*arg1 = arg0, +-0). */
9503 frac = dconst0;
9504 frac.sign = value->sign;
9505 trunc = *value;
9506 break;
9507 case rvc_normal:
9508 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9509 real_trunc (&trunc, VOIDmode, value);
9510 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9511 /* If the original number was negative and already
9512 integral, then the fractional part is -0.0. */
9513 if (value->sign && frac.cl == rvc_zero)
9514 frac.sign = value->sign;
9515 break;
9518 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9519 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9520 build_real (rettype, trunc));
9521 TREE_SIDE_EFFECTS (arg1) = 1;
9522 res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9523 build_real (rettype, frac));
9524 suppress_warning (res, OPT_Wunused_value);
9525 return res;
9528 return NULL_TREE;
9531 /* Given a location LOC, an interclass builtin function decl FNDECL
9532 and its single argument ARG, return an folded expression computing
9533 the same, or NULL_TREE if we either couldn't or didn't want to fold
9534 (the latter happen if there's an RTL instruction available). */
9536 static tree
9537 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9539 machine_mode mode;
9541 if (!validate_arg (arg, REAL_TYPE))
9542 return NULL_TREE;
9544 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9545 return NULL_TREE;
9547 mode = TYPE_MODE (TREE_TYPE (arg));
9549 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9551 /* If there is no optab, try generic code. */
9552 switch (DECL_FUNCTION_CODE (fndecl))
9554 tree result;
9556 CASE_FLT_FN (BUILT_IN_ISINF):
9558 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9559 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9560 tree type = TREE_TYPE (arg);
9561 REAL_VALUE_TYPE r;
9562 char buf[128];
9564 if (is_ibm_extended)
9566 /* NaN and Inf are encoded in the high-order double value
9567 only. The low-order value is not significant. */
9568 type = double_type_node;
9569 mode = DFmode;
9570 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9572 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9573 real_from_string (&r, buf);
9574 result = build_call_expr (isgr_fn, 2,
9575 fold_build1_loc (loc, ABS_EXPR, type, arg),
9576 build_real (type, r));
9577 return result;
9579 CASE_FLT_FN (BUILT_IN_FINITE):
9580 case BUILT_IN_ISFINITE:
9582 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9583 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9584 tree type = TREE_TYPE (arg);
9585 REAL_VALUE_TYPE r;
9586 char buf[128];
9588 if (is_ibm_extended)
9590 /* NaN and Inf are encoded in the high-order double value
9591 only. The low-order value is not significant. */
9592 type = double_type_node;
9593 mode = DFmode;
9594 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9596 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9597 real_from_string (&r, buf);
9598 result = build_call_expr (isle_fn, 2,
9599 fold_build1_loc (loc, ABS_EXPR, type, arg),
9600 build_real (type, r));
9601 /*result = fold_build2_loc (loc, UNGT_EXPR,
9602 TREE_TYPE (TREE_TYPE (fndecl)),
9603 fold_build1_loc (loc, ABS_EXPR, type, arg),
9604 build_real (type, r));
9605 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9606 TREE_TYPE (TREE_TYPE (fndecl)),
9607 result);*/
9608 return result;
9610 case BUILT_IN_ISNORMAL:
9612 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9613 islessequal(fabs(x),DBL_MAX). */
9614 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9615 tree type = TREE_TYPE (arg);
9616 tree orig_arg, max_exp, min_exp;
9617 machine_mode orig_mode = mode;
9618 REAL_VALUE_TYPE rmax, rmin;
9619 char buf[128];
9621 orig_arg = arg = builtin_save_expr (arg);
9622 if (is_ibm_extended)
9624 /* Use double to test the normal range of IBM extended
9625 precision. Emin for IBM extended precision is
9626 different to emin for IEEE double, being 53 higher
9627 since the low double exponent is at least 53 lower
9628 than the high double exponent. */
9629 type = double_type_node;
9630 mode = DFmode;
9631 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9633 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9635 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9636 real_from_string (&rmax, buf);
9637 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9638 real_from_string (&rmin, buf);
9639 max_exp = build_real (type, rmax);
9640 min_exp = build_real (type, rmin);
9642 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9643 if (is_ibm_extended)
9645 /* Testing the high end of the range is done just using
9646 the high double, using the same test as isfinite().
9647 For the subnormal end of the range we first test the
9648 high double, then if its magnitude is equal to the
9649 limit of 0x1p-969, we test whether the low double is
9650 non-zero and opposite sign to the high double. */
9651 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9652 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9653 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9654 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9655 arg, min_exp);
9656 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9657 complex_double_type_node, orig_arg);
9658 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9659 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9660 tree zero = build_real (type, dconst0);
9661 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9662 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9663 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9664 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9665 fold_build3 (COND_EXPR,
9666 integer_type_node,
9667 hilt, logt, lolt));
9668 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9669 eq_min, ok_lo);
9670 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9671 gt_min, eq_min);
9673 else
9675 tree const isge_fn
9676 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9677 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9679 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9680 max_exp, min_exp);
9681 return result;
9683 default:
9684 break;
9687 return NULL_TREE;
9690 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9691 ARG is the argument for the call. */
9693 static tree
9694 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9696 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9698 if (!validate_arg (arg, REAL_TYPE))
9699 return NULL_TREE;
9701 switch (builtin_index)
9703 case BUILT_IN_ISINF:
9704 if (tree_expr_infinite_p (arg))
9705 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9706 if (!tree_expr_maybe_infinite_p (arg))
9707 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9708 return NULL_TREE;
9710 case BUILT_IN_ISINF_SIGN:
9712 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9713 /* In a boolean context, GCC will fold the inner COND_EXPR to
9714 1. So e.g. "if (isinf_sign(x))" would be folded to just
9715 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9716 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9717 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9718 tree tmp = NULL_TREE;
9720 arg = builtin_save_expr (arg);
9722 if (signbit_fn && isinf_fn)
9724 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9725 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9727 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9728 signbit_call, integer_zero_node);
9729 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9730 isinf_call, integer_zero_node);
9732 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9733 integer_minus_one_node, integer_one_node);
9734 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9735 isinf_call, tmp,
9736 integer_zero_node);
9739 return tmp;
9742 case BUILT_IN_ISFINITE:
9743 if (tree_expr_finite_p (arg))
9744 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9745 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
9746 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9747 return NULL_TREE;
9749 case BUILT_IN_ISNAN:
9750 if (tree_expr_nan_p (arg))
9751 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9752 if (!tree_expr_maybe_nan_p (arg))
9753 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9756 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9757 if (is_ibm_extended)
9759 /* NaN and Inf are encoded in the high-order double value
9760 only. The low-order value is not significant. */
9761 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9764 arg = builtin_save_expr (arg);
9765 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9767 case BUILT_IN_ISSIGNALING:
9768 /* Folding to true for REAL_CST is done in fold_const_call_ss.
9769 Don't use tree_expr_signaling_nan_p (arg) -> integer_one_node
9770 and !tree_expr_maybe_signaling_nan_p (arg) -> integer_zero_node
9771 here, so there is some possibility of __builtin_issignaling working
9772 without -fsignaling-nans. Especially when -fno-signaling-nans is
9773 the default. */
9774 if (!tree_expr_maybe_nan_p (arg))
9775 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9776 return NULL_TREE;
9778 default:
9779 gcc_unreachable ();
9783 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9784 This builtin will generate code to return the appropriate floating
9785 point classification depending on the value of the floating point
9786 number passed in. The possible return values must be supplied as
9787 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9788 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9789 one floating point argument which is "type generic". */
9791 static tree
9792 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9794 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9795 arg, type, res, tmp;
9796 machine_mode mode;
9797 REAL_VALUE_TYPE r;
9798 char buf[128];
9800 /* Verify the required arguments in the original call. */
9801 if (nargs != 6
9802 || !validate_arg (args[0], INTEGER_TYPE)
9803 || !validate_arg (args[1], INTEGER_TYPE)
9804 || !validate_arg (args[2], INTEGER_TYPE)
9805 || !validate_arg (args[3], INTEGER_TYPE)
9806 || !validate_arg (args[4], INTEGER_TYPE)
9807 || !validate_arg (args[5], REAL_TYPE))
9808 return NULL_TREE;
9810 fp_nan = args[0];
9811 fp_infinite = args[1];
9812 fp_normal = args[2];
9813 fp_subnormal = args[3];
9814 fp_zero = args[4];
9815 arg = args[5];
9816 type = TREE_TYPE (arg);
9817 mode = TYPE_MODE (type);
9818 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9820 /* fpclassify(x) ->
9821 isnan(x) ? FP_NAN :
9822 (fabs(x) == Inf ? FP_INFINITE :
9823 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9824 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9826 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9827 build_real (type, dconst0));
9828 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9829 tmp, fp_zero, fp_subnormal);
9831 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9832 real_from_string (&r, buf);
9833 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9834 arg, build_real (type, r));
9835 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9837 if (tree_expr_maybe_infinite_p (arg))
9839 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9840 build_real (type, dconstinf));
9841 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9842 fp_infinite, res);
9845 if (tree_expr_maybe_nan_p (arg))
9847 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9848 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9851 return res;
9854 /* Fold a call to an unordered comparison function such as
9855 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9856 being called and ARG0 and ARG1 are the arguments for the call.
9857 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9858 the opposite of the desired result. UNORDERED_CODE is used
9859 for modes that can hold NaNs and ORDERED_CODE is used for
9860 the rest. */
9862 static tree
9863 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9864 enum tree_code unordered_code,
9865 enum tree_code ordered_code)
9867 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9868 enum tree_code code;
9869 tree type0, type1;
9870 enum tree_code code0, code1;
9871 tree cmp_type = NULL_TREE;
9873 type0 = TREE_TYPE (arg0);
9874 type1 = TREE_TYPE (arg1);
9876 code0 = TREE_CODE (type0);
9877 code1 = TREE_CODE (type1);
9879 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9880 /* Choose the wider of two real types. */
9881 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9882 ? type0 : type1;
9883 else if (code0 == REAL_TYPE
9884 && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE))
9885 cmp_type = type0;
9886 else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE)
9887 && code1 == REAL_TYPE)
9888 cmp_type = type1;
9890 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9891 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9893 if (unordered_code == UNORDERED_EXPR)
9895 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
9896 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
9897 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
9898 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9899 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9902 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
9903 ? unordered_code : ordered_code;
9904 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9905 fold_build2_loc (loc, code, type, arg0, arg1));
9908 /* Fold a call to __builtin_iseqsig(). ARG0 and ARG1 are the arguments.
9909 After choosing the wider floating-point type for the comparison,
9910 the code is folded to:
9911 SAVE_EXPR<ARG0> >= SAVE_EXPR<ARG1> && SAVE_EXPR<ARG0> <= SAVE_EXPR<ARG1> */
9913 static tree
9914 fold_builtin_iseqsig (location_t loc, tree arg0, tree arg1)
9916 tree type0, type1;
9917 enum tree_code code0, code1;
9918 tree cmp1, cmp2, cmp_type = NULL_TREE;
9920 type0 = TREE_TYPE (arg0);
9921 type1 = TREE_TYPE (arg1);
9923 code0 = TREE_CODE (type0);
9924 code1 = TREE_CODE (type1);
9926 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9927 /* Choose the wider of two real types. */
9928 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9929 ? type0 : type1;
9930 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9931 cmp_type = type0;
9932 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9933 cmp_type = type1;
9935 arg0 = builtin_save_expr (fold_convert_loc (loc, cmp_type, arg0));
9936 arg1 = builtin_save_expr (fold_convert_loc (loc, cmp_type, arg1));
9938 cmp1 = fold_build2_loc (loc, GE_EXPR, integer_type_node, arg0, arg1);
9939 cmp2 = fold_build2_loc (loc, LE_EXPR, integer_type_node, arg0, arg1);
9941 return fold_build2_loc (loc, TRUTH_AND_EXPR, integer_type_node, cmp1, cmp2);
9944 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9945 arithmetics if it can never overflow, or into internal functions that
9946 return both result of arithmetics and overflowed boolean flag in
9947 a complex integer result, or some other check for overflow.
9948 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9949 checking part of that. */
9951 static tree
9952 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9953 tree arg0, tree arg1, tree arg2)
9955 enum internal_fn ifn = IFN_LAST;
9956 /* The code of the expression corresponding to the built-in. */
9957 enum tree_code opcode = ERROR_MARK;
9958 bool ovf_only = false;
9960 switch (fcode)
9962 case BUILT_IN_ADD_OVERFLOW_P:
9963 ovf_only = true;
9964 /* FALLTHRU */
9965 case BUILT_IN_ADD_OVERFLOW:
9966 case BUILT_IN_SADD_OVERFLOW:
9967 case BUILT_IN_SADDL_OVERFLOW:
9968 case BUILT_IN_SADDLL_OVERFLOW:
9969 case BUILT_IN_UADD_OVERFLOW:
9970 case BUILT_IN_UADDL_OVERFLOW:
9971 case BUILT_IN_UADDLL_OVERFLOW:
9972 opcode = PLUS_EXPR;
9973 ifn = IFN_ADD_OVERFLOW;
9974 break;
9975 case BUILT_IN_SUB_OVERFLOW_P:
9976 ovf_only = true;
9977 /* FALLTHRU */
9978 case BUILT_IN_SUB_OVERFLOW:
9979 case BUILT_IN_SSUB_OVERFLOW:
9980 case BUILT_IN_SSUBL_OVERFLOW:
9981 case BUILT_IN_SSUBLL_OVERFLOW:
9982 case BUILT_IN_USUB_OVERFLOW:
9983 case BUILT_IN_USUBL_OVERFLOW:
9984 case BUILT_IN_USUBLL_OVERFLOW:
9985 opcode = MINUS_EXPR;
9986 ifn = IFN_SUB_OVERFLOW;
9987 break;
9988 case BUILT_IN_MUL_OVERFLOW_P:
9989 ovf_only = true;
9990 /* FALLTHRU */
9991 case BUILT_IN_MUL_OVERFLOW:
9992 case BUILT_IN_SMUL_OVERFLOW:
9993 case BUILT_IN_SMULL_OVERFLOW:
9994 case BUILT_IN_SMULLL_OVERFLOW:
9995 case BUILT_IN_UMUL_OVERFLOW:
9996 case BUILT_IN_UMULL_OVERFLOW:
9997 case BUILT_IN_UMULLL_OVERFLOW:
9998 opcode = MULT_EXPR;
9999 ifn = IFN_MUL_OVERFLOW;
10000 break;
10001 default:
10002 gcc_unreachable ();
10005 /* For the "generic" overloads, the first two arguments can have different
10006 types and the last argument determines the target type to use to check
10007 for overflow. The arguments of the other overloads all have the same
10008 type. */
10009 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
10011 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
10012 arguments are constant, attempt to fold the built-in call into a constant
10013 expression indicating whether or not it detected an overflow. */
10014 if (ovf_only
10015 && TREE_CODE (arg0) == INTEGER_CST
10016 && TREE_CODE (arg1) == INTEGER_CST)
10017 /* Perform the computation in the target type and check for overflow. */
10018 return omit_one_operand_loc (loc, boolean_type_node,
10019 arith_overflowed_p (opcode, type, arg0, arg1)
10020 ? boolean_true_node : boolean_false_node,
10021 arg2);
10023 tree intres, ovfres;
10024 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10026 intres = fold_binary_loc (loc, opcode, type,
10027 fold_convert_loc (loc, type, arg0),
10028 fold_convert_loc (loc, type, arg1));
10029 if (TREE_OVERFLOW (intres))
10030 intres = drop_tree_overflow (intres);
10031 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
10032 ? boolean_true_node : boolean_false_node);
10034 else
10036 tree ctype = build_complex_type (type);
10037 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10038 arg0, arg1);
10039 tree tgt;
10040 if (ovf_only)
10042 tgt = call;
10043 intres = NULL_TREE;
10045 else
10047 /* Force SAVE_EXPR even for calls which satisfy tree_invariant_p_1,
10048 as while the call itself is const, the REALPART_EXPR store is
10049 certainly not. And in any case, we want just one call,
10050 not multiple and trying to CSE them later. */
10051 TREE_SIDE_EFFECTS (call) = 1;
10052 tgt = save_expr (call);
10054 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10055 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10056 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
10059 if (ovf_only)
10060 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
10062 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
10063 tree store
10064 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
10065 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
10068 /* Fold __builtin_{clz,ctz,clrsb,ffs,parity,popcount}g into corresponding
10069 internal function. */
10071 static tree
10072 fold_builtin_bit_query (location_t loc, enum built_in_function fcode,
10073 tree arg0, tree arg1)
10075 enum internal_fn ifn;
10076 enum built_in_function fcodei, fcodel, fcodell;
10077 tree arg0_type = TREE_TYPE (arg0);
10078 tree cast_type = NULL_TREE;
10079 int addend = 0;
10081 switch (fcode)
10083 case BUILT_IN_CLZG:
10084 if (arg1 && TREE_CODE (arg1) != INTEGER_CST)
10085 return NULL_TREE;
10086 ifn = IFN_CLZ;
10087 fcodei = BUILT_IN_CLZ;
10088 fcodel = BUILT_IN_CLZL;
10089 fcodell = BUILT_IN_CLZLL;
10090 break;
10091 case BUILT_IN_CTZG:
10092 if (arg1 && TREE_CODE (arg1) != INTEGER_CST)
10093 return NULL_TREE;
10094 ifn = IFN_CTZ;
10095 fcodei = BUILT_IN_CTZ;
10096 fcodel = BUILT_IN_CTZL;
10097 fcodell = BUILT_IN_CTZLL;
10098 break;
10099 case BUILT_IN_CLRSBG:
10100 ifn = IFN_CLRSB;
10101 fcodei = BUILT_IN_CLRSB;
10102 fcodel = BUILT_IN_CLRSBL;
10103 fcodell = BUILT_IN_CLRSBLL;
10104 break;
10105 case BUILT_IN_FFSG:
10106 ifn = IFN_FFS;
10107 fcodei = BUILT_IN_FFS;
10108 fcodel = BUILT_IN_FFSL;
10109 fcodell = BUILT_IN_FFSLL;
10110 break;
10111 case BUILT_IN_PARITYG:
10112 ifn = IFN_PARITY;
10113 fcodei = BUILT_IN_PARITY;
10114 fcodel = BUILT_IN_PARITYL;
10115 fcodell = BUILT_IN_PARITYLL;
10116 break;
10117 case BUILT_IN_POPCOUNTG:
10118 ifn = IFN_POPCOUNT;
10119 fcodei = BUILT_IN_POPCOUNT;
10120 fcodel = BUILT_IN_POPCOUNTL;
10121 fcodell = BUILT_IN_POPCOUNTLL;
10122 break;
10123 default:
10124 gcc_unreachable ();
10127 if (TYPE_PRECISION (arg0_type)
10128 <= TYPE_PRECISION (long_long_unsigned_type_node))
10130 if (TYPE_PRECISION (arg0_type) <= TYPE_PRECISION (unsigned_type_node))
10132 cast_type = (TYPE_UNSIGNED (arg0_type)
10133 ? unsigned_type_node : integer_type_node);
10134 else if (TYPE_PRECISION (arg0_type)
10135 <= TYPE_PRECISION (long_unsigned_type_node))
10137 cast_type = (TYPE_UNSIGNED (arg0_type)
10138 ? long_unsigned_type_node : long_integer_type_node);
10139 fcodei = fcodel;
10141 else
10143 cast_type = (TYPE_UNSIGNED (arg0_type)
10144 ? long_long_unsigned_type_node
10145 : long_long_integer_type_node);
10146 fcodei = fcodell;
10149 else if (TYPE_PRECISION (arg0_type) <= MAX_FIXED_MODE_SIZE)
10151 cast_type
10152 = build_nonstandard_integer_type (MAX_FIXED_MODE_SIZE,
10153 TYPE_UNSIGNED (arg0_type));
10154 gcc_assert (TYPE_PRECISION (cast_type)
10155 == 2 * TYPE_PRECISION (long_long_unsigned_type_node));
10156 fcodei = END_BUILTINS;
10158 else
10159 fcodei = END_BUILTINS;
10160 if (cast_type)
10162 switch (fcode)
10164 case BUILT_IN_CLZG:
10165 case BUILT_IN_CLRSBG:
10166 addend = TYPE_PRECISION (arg0_type) - TYPE_PRECISION (cast_type);
10167 break;
10168 default:
10169 break;
10171 arg0 = fold_convert (cast_type, arg0);
10172 arg0_type = cast_type;
10175 if (arg1)
10176 arg1 = fold_convert (integer_type_node, arg1);
10178 tree arg2 = arg1;
10179 if (fcode == BUILT_IN_CLZG && addend)
10181 if (arg1)
10182 arg0 = save_expr (arg0);
10183 arg2 = NULL_TREE;
10185 tree call = NULL_TREE, tem;
10186 if (TYPE_PRECISION (arg0_type) == MAX_FIXED_MODE_SIZE
10187 && (TYPE_PRECISION (arg0_type)
10188 == 2 * TYPE_PRECISION (long_long_unsigned_type_node))
10189 /* If the target supports the optab, then don't do the expansion. */
10190 && !direct_internal_fn_supported_p (ifn, arg0_type, OPTIMIZE_FOR_BOTH))
10192 /* __int128 expansions using up to 2 long long builtins. */
10193 arg0 = save_expr (arg0);
10194 tree type = (TYPE_UNSIGNED (arg0_type)
10195 ? long_long_unsigned_type_node
10196 : long_long_integer_type_node);
10197 tree hi = fold_build2 (RSHIFT_EXPR, arg0_type, arg0,
10198 build_int_cst (integer_type_node,
10199 MAX_FIXED_MODE_SIZE / 2));
10200 hi = fold_convert (type, hi);
10201 tree lo = fold_convert (type, arg0);
10202 switch (fcode)
10204 case BUILT_IN_CLZG:
10205 call = fold_builtin_bit_query (loc, fcode, lo, NULL_TREE);
10206 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10207 build_int_cst (integer_type_node,
10208 MAX_FIXED_MODE_SIZE / 2));
10209 if (arg2)
10210 call = fold_build3 (COND_EXPR, integer_type_node,
10211 fold_build2 (NE_EXPR, boolean_type_node,
10212 lo, build_zero_cst (type)),
10213 call, arg2);
10214 call = fold_build3 (COND_EXPR, integer_type_node,
10215 fold_build2 (NE_EXPR, boolean_type_node,
10216 hi, build_zero_cst (type)),
10217 fold_builtin_bit_query (loc, fcode, hi,
10218 NULL_TREE),
10219 call);
10220 break;
10221 case BUILT_IN_CTZG:
10222 call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
10223 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10224 build_int_cst (integer_type_node,
10225 MAX_FIXED_MODE_SIZE / 2));
10226 if (arg2)
10227 call = fold_build3 (COND_EXPR, integer_type_node,
10228 fold_build2 (NE_EXPR, boolean_type_node,
10229 hi, build_zero_cst (type)),
10230 call, arg2);
10231 call = fold_build3 (COND_EXPR, integer_type_node,
10232 fold_build2 (NE_EXPR, boolean_type_node,
10233 lo, build_zero_cst (type)),
10234 fold_builtin_bit_query (loc, fcode, lo,
10235 NULL_TREE),
10236 call);
10237 break;
10238 case BUILT_IN_CLRSBG:
10239 tem = fold_builtin_bit_query (loc, fcode, lo, NULL_TREE);
10240 tem = fold_build2 (PLUS_EXPR, integer_type_node, tem,
10241 build_int_cst (integer_type_node,
10242 MAX_FIXED_MODE_SIZE / 2));
10243 tem = fold_build3 (COND_EXPR, integer_type_node,
10244 fold_build2 (LT_EXPR, boolean_type_node,
10245 fold_build2 (BIT_XOR_EXPR, type,
10246 lo, hi),
10247 build_zero_cst (type)),
10248 build_int_cst (integer_type_node,
10249 MAX_FIXED_MODE_SIZE / 2 - 1),
10250 tem);
10251 call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
10252 call = save_expr (call);
10253 call = fold_build3 (COND_EXPR, integer_type_node,
10254 fold_build2 (NE_EXPR, boolean_type_node,
10255 call,
10256 build_int_cst (integer_type_node,
10257 MAX_FIXED_MODE_SIZE
10258 / 2 - 1)),
10259 call, tem);
10260 break;
10261 case BUILT_IN_FFSG:
10262 call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
10263 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10264 build_int_cst (integer_type_node,
10265 MAX_FIXED_MODE_SIZE / 2));
10266 call = fold_build3 (COND_EXPR, integer_type_node,
10267 fold_build2 (NE_EXPR, boolean_type_node,
10268 hi, build_zero_cst (type)),
10269 call, integer_zero_node);
10270 call = fold_build3 (COND_EXPR, integer_type_node,
10271 fold_build2 (NE_EXPR, boolean_type_node,
10272 lo, build_zero_cst (type)),
10273 fold_builtin_bit_query (loc, fcode, lo,
10274 NULL_TREE),
10275 call);
10276 break;
10277 case BUILT_IN_PARITYG:
10278 call = fold_builtin_bit_query (loc, fcode,
10279 fold_build2 (BIT_XOR_EXPR, type,
10280 lo, hi), NULL_TREE);
10281 break;
10282 case BUILT_IN_POPCOUNTG:
10283 call = fold_build2 (PLUS_EXPR, integer_type_node,
10284 fold_builtin_bit_query (loc, fcode, hi,
10285 NULL_TREE),
10286 fold_builtin_bit_query (loc, fcode, lo,
10287 NULL_TREE));
10288 break;
10289 default:
10290 gcc_unreachable ();
10293 else
10295 /* Only keep second argument to IFN_CLZ/IFN_CTZ if it is the
10296 value defined at zero during GIMPLE, or for large/huge _BitInt
10297 (which are then lowered during bitint lowering). */
10298 if (arg2 && TREE_CODE (TREE_TYPE (arg0)) != BITINT_TYPE)
10300 int val;
10301 if (fcode == BUILT_IN_CLZG)
10303 if (CLZ_DEFINED_VALUE_AT_ZERO (SCALAR_TYPE_MODE (arg0_type),
10304 val) != 2
10305 || wi::to_widest (arg2) != val)
10306 arg2 = NULL_TREE;
10308 else if (CTZ_DEFINED_VALUE_AT_ZERO (SCALAR_TYPE_MODE (arg0_type),
10309 val) != 2
10310 || wi::to_widest (arg2) != val)
10311 arg2 = NULL_TREE;
10312 if (!direct_internal_fn_supported_p (ifn, arg0_type,
10313 OPTIMIZE_FOR_BOTH))
10314 arg2 = NULL_TREE;
10315 if (arg2 == NULL_TREE)
10316 arg0 = save_expr (arg0);
10318 if (fcodei == END_BUILTINS || arg2)
10319 call = build_call_expr_internal_loc (loc, ifn, integer_type_node,
10320 arg2 ? 2 : 1, arg0, arg2);
10321 else
10322 call = build_call_expr_loc (loc, builtin_decl_explicit (fcodei), 1,
10323 arg0);
10325 if (addend)
10326 call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10327 build_int_cst (integer_type_node, addend));
10328 if (arg1 && arg2 == NULL_TREE)
10329 call = fold_build3 (COND_EXPR, integer_type_node,
10330 fold_build2 (NE_EXPR, boolean_type_node,
10331 arg0, build_zero_cst (arg0_type)),
10332 call, arg1);
10334 return call;
10337 /* Fold __builtin_{add,sub}c{,l,ll} into pair of internal functions
10338 that return both result of arithmetics and overflowed boolean
10339 flag in a complex integer result. */
10341 static tree
10342 fold_builtin_addc_subc (location_t loc, enum built_in_function fcode,
10343 tree *args)
10345 enum internal_fn ifn;
10347 switch (fcode)
10349 case BUILT_IN_ADDC:
10350 case BUILT_IN_ADDCL:
10351 case BUILT_IN_ADDCLL:
10352 ifn = IFN_ADD_OVERFLOW;
10353 break;
10354 case BUILT_IN_SUBC:
10355 case BUILT_IN_SUBCL:
10356 case BUILT_IN_SUBCLL:
10357 ifn = IFN_SUB_OVERFLOW;
10358 break;
10359 default:
10360 gcc_unreachable ();
10363 tree type = TREE_TYPE (args[0]);
10364 tree ctype = build_complex_type (type);
10365 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10366 args[0], args[1]);
10367 /* Force SAVE_EXPR even for calls which satisfy tree_invariant_p_1,
10368 as while the call itself is const, the REALPART_EXPR store is
10369 certainly not. And in any case, we want just one call,
10370 not multiple and trying to CSE them later. */
10371 TREE_SIDE_EFFECTS (call) = 1;
10372 tree tgt = save_expr (call);
10373 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10374 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10375 call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10376 intres, args[2]);
10377 TREE_SIDE_EFFECTS (call) = 1;
10378 tgt = save_expr (call);
10379 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10380 tree ovfres2 = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10381 ovfres = build2_loc (loc, BIT_IOR_EXPR, type, ovfres, ovfres2);
10382 tree mem_arg3 = build_fold_indirect_ref_loc (loc, args[3]);
10383 tree store
10384 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg3, ovfres);
10385 return build2_loc (loc, COMPOUND_EXPR, type, store, intres);
10388 /* Fold a call to __builtin_FILE to a constant string. */
10390 static inline tree
10391 fold_builtin_FILE (location_t loc)
10393 if (const char *fname = LOCATION_FILE (loc))
10395 /* The documentation says this builtin is equivalent to the preprocessor
10396 __FILE__ macro so it appears appropriate to use the same file prefix
10397 mappings. */
10398 fname = remap_macro_filename (fname);
10399 return build_string_literal (fname);
10402 return build_string_literal ("");
10405 /* Fold a call to __builtin_FUNCTION to a constant string. */
10407 static inline tree
10408 fold_builtin_FUNCTION ()
10410 const char *name = "";
10412 if (current_function_decl)
10413 name = lang_hooks.decl_printable_name (current_function_decl, 0);
10415 return build_string_literal (name);
10418 /* Fold a call to __builtin_LINE to an integer constant. */
10420 static inline tree
10421 fold_builtin_LINE (location_t loc, tree type)
10423 return build_int_cst (type, LOCATION_LINE (loc));
10426 /* Fold a call to built-in function FNDECL with 0 arguments.
10427 This function returns NULL_TREE if no simplification was possible. */
10429 static tree
10430 fold_builtin_0 (location_t loc, tree fndecl)
10432 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10433 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10434 switch (fcode)
10436 case BUILT_IN_FILE:
10437 return fold_builtin_FILE (loc);
10439 case BUILT_IN_FUNCTION:
10440 return fold_builtin_FUNCTION ();
10442 case BUILT_IN_LINE:
10443 return fold_builtin_LINE (loc, type);
10445 CASE_FLT_FN (BUILT_IN_INF):
10446 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
10447 case BUILT_IN_INFD32:
10448 case BUILT_IN_INFD64:
10449 case BUILT_IN_INFD128:
10450 return fold_builtin_inf (loc, type, true);
10452 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10453 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
10454 return fold_builtin_inf (loc, type, false);
10456 case BUILT_IN_CLASSIFY_TYPE:
10457 return fold_builtin_classify_type (NULL_TREE);
10459 case BUILT_IN_UNREACHABLE:
10460 /* Rewrite any explicit calls to __builtin_unreachable. */
10461 if (sanitize_flags_p (SANITIZE_UNREACHABLE))
10462 return build_builtin_unreachable (loc);
10463 break;
10465 default:
10466 break;
10468 return NULL_TREE;
10471 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10472 This function returns NULL_TREE if no simplification was possible. */
10474 static tree
10475 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
10477 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10478 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10480 if (error_operand_p (arg0))
10481 return NULL_TREE;
10483 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
10484 return ret;
10486 switch (fcode)
10488 case BUILT_IN_CONSTANT_P:
10490 tree val = fold_builtin_constant_p (arg0);
10492 /* Gimplification will pull the CALL_EXPR for the builtin out of
10493 an if condition. When not optimizing, we'll not CSE it back.
10494 To avoid link error types of regressions, return false now. */
10495 if (!val && !optimize)
10496 val = integer_zero_node;
10498 return val;
10501 case BUILT_IN_CLASSIFY_TYPE:
10502 return fold_builtin_classify_type (arg0);
10504 case BUILT_IN_STRLEN:
10505 return fold_builtin_strlen (loc, expr, type, arg0);
10507 CASE_FLT_FN (BUILT_IN_FABS):
10508 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10509 case BUILT_IN_FABSD32:
10510 case BUILT_IN_FABSD64:
10511 case BUILT_IN_FABSD128:
10512 return fold_builtin_fabs (loc, arg0, type);
10514 case BUILT_IN_ABS:
10515 case BUILT_IN_LABS:
10516 case BUILT_IN_LLABS:
10517 case BUILT_IN_IMAXABS:
10518 return fold_builtin_abs (loc, arg0, type);
10520 CASE_FLT_FN (BUILT_IN_CONJ):
10521 if (validate_arg (arg0, COMPLEX_TYPE)
10522 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10523 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10524 break;
10526 CASE_FLT_FN (BUILT_IN_CREAL):
10527 if (validate_arg (arg0, COMPLEX_TYPE)
10528 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10529 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
10530 break;
10532 CASE_FLT_FN (BUILT_IN_CIMAG):
10533 if (validate_arg (arg0, COMPLEX_TYPE)
10534 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10535 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10536 break;
10538 CASE_FLT_FN (BUILT_IN_CARG):
10539 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CARG):
10540 return fold_builtin_carg (loc, arg0, type);
10542 case BUILT_IN_ISASCII:
10543 return fold_builtin_isascii (loc, arg0);
10545 case BUILT_IN_TOASCII:
10546 return fold_builtin_toascii (loc, arg0);
10548 case BUILT_IN_ISDIGIT:
10549 return fold_builtin_isdigit (loc, arg0);
10551 CASE_FLT_FN (BUILT_IN_FINITE):
10552 case BUILT_IN_FINITED32:
10553 case BUILT_IN_FINITED64:
10554 case BUILT_IN_FINITED128:
10555 case BUILT_IN_ISFINITE:
10557 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10558 if (ret)
10559 return ret;
10560 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10563 CASE_FLT_FN (BUILT_IN_ISINF):
10564 case BUILT_IN_ISINFD32:
10565 case BUILT_IN_ISINFD64:
10566 case BUILT_IN_ISINFD128:
10568 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10569 if (ret)
10570 return ret;
10571 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10574 case BUILT_IN_ISNORMAL:
10575 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10577 case BUILT_IN_ISINF_SIGN:
10578 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10580 CASE_FLT_FN (BUILT_IN_ISNAN):
10581 case BUILT_IN_ISNAND32:
10582 case BUILT_IN_ISNAND64:
10583 case BUILT_IN_ISNAND128:
10584 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10586 case BUILT_IN_ISSIGNALING:
10587 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISSIGNALING);
10589 case BUILT_IN_FREE:
10590 if (integer_zerop (arg0))
10591 return build_empty_stmt (loc);
10592 break;
10594 case BUILT_IN_CLZG:
10595 case BUILT_IN_CTZG:
10596 case BUILT_IN_CLRSBG:
10597 case BUILT_IN_FFSG:
10598 case BUILT_IN_PARITYG:
10599 case BUILT_IN_POPCOUNTG:
10600 return fold_builtin_bit_query (loc, fcode, arg0, NULL_TREE);
10602 default:
10603 break;
10606 return NULL_TREE;
10610 /* Folds a call EXPR (which may be null) to built-in function FNDECL
10611 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10612 if no simplification was possible. */
10614 static tree
10615 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
10617 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10618 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10620 if (error_operand_p (arg0)
10621 || error_operand_p (arg1))
10622 return NULL_TREE;
10624 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
10625 return ret;
10627 switch (fcode)
10629 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10630 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10631 if (validate_arg (arg0, REAL_TYPE)
10632 && validate_arg (arg1, POINTER_TYPE))
10633 return do_mpfr_lgamma_r (arg0, arg1, type);
10634 break;
10636 CASE_FLT_FN (BUILT_IN_FREXP):
10637 return fold_builtin_frexp (loc, arg0, arg1, type);
10639 CASE_FLT_FN (BUILT_IN_MODF):
10640 return fold_builtin_modf (loc, arg0, arg1, type);
10642 case BUILT_IN_STRSPN:
10643 return fold_builtin_strspn (loc, expr, arg0, arg1);
10645 case BUILT_IN_STRCSPN:
10646 return fold_builtin_strcspn (loc, expr, arg0, arg1);
10648 case BUILT_IN_STRPBRK:
10649 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
10651 case BUILT_IN_EXPECT:
10652 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
10654 case BUILT_IN_ISGREATER:
10655 return fold_builtin_unordered_cmp (loc, fndecl,
10656 arg0, arg1, UNLE_EXPR, LE_EXPR);
10657 case BUILT_IN_ISGREATEREQUAL:
10658 return fold_builtin_unordered_cmp (loc, fndecl,
10659 arg0, arg1, UNLT_EXPR, LT_EXPR);
10660 case BUILT_IN_ISLESS:
10661 return fold_builtin_unordered_cmp (loc, fndecl,
10662 arg0, arg1, UNGE_EXPR, GE_EXPR);
10663 case BUILT_IN_ISLESSEQUAL:
10664 return fold_builtin_unordered_cmp (loc, fndecl,
10665 arg0, arg1, UNGT_EXPR, GT_EXPR);
10666 case BUILT_IN_ISLESSGREATER:
10667 return fold_builtin_unordered_cmp (loc, fndecl,
10668 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10669 case BUILT_IN_ISUNORDERED:
10670 return fold_builtin_unordered_cmp (loc, fndecl,
10671 arg0, arg1, UNORDERED_EXPR,
10672 NOP_EXPR);
10674 case BUILT_IN_ISEQSIG:
10675 return fold_builtin_iseqsig (loc, arg0, arg1);
10677 /* We do the folding for va_start in the expander. */
10678 case BUILT_IN_VA_START:
10679 break;
10681 case BUILT_IN_OBJECT_SIZE:
10682 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
10683 return fold_builtin_object_size (arg0, arg1, fcode);
10685 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10686 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10688 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10689 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10691 case BUILT_IN_CLZG:
10692 case BUILT_IN_CTZG:
10693 return fold_builtin_bit_query (loc, fcode, arg0, arg1);
10695 default:
10696 break;
10698 return NULL_TREE;
10701 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10702 and ARG2.
10703 This function returns NULL_TREE if no simplification was possible. */
10705 static tree
10706 fold_builtin_3 (location_t loc, tree fndecl,
10707 tree arg0, tree arg1, tree arg2)
10709 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10710 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10712 if (error_operand_p (arg0)
10713 || error_operand_p (arg1)
10714 || error_operand_p (arg2))
10715 return NULL_TREE;
10717 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
10718 arg0, arg1, arg2))
10719 return ret;
10721 switch (fcode)
10724 CASE_FLT_FN (BUILT_IN_SINCOS):
10725 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10727 CASE_FLT_FN (BUILT_IN_REMQUO):
10728 if (validate_arg (arg0, REAL_TYPE)
10729 && validate_arg (arg1, REAL_TYPE)
10730 && validate_arg (arg2, POINTER_TYPE))
10731 return do_mpfr_remquo (arg0, arg1, arg2);
10732 break;
10734 case BUILT_IN_MEMCMP:
10735 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
10737 case BUILT_IN_EXPECT:
10738 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10740 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10741 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
10743 case BUILT_IN_ADD_OVERFLOW:
10744 case BUILT_IN_SUB_OVERFLOW:
10745 case BUILT_IN_MUL_OVERFLOW:
10746 case BUILT_IN_ADD_OVERFLOW_P:
10747 case BUILT_IN_SUB_OVERFLOW_P:
10748 case BUILT_IN_MUL_OVERFLOW_P:
10749 case BUILT_IN_SADD_OVERFLOW:
10750 case BUILT_IN_SADDL_OVERFLOW:
10751 case BUILT_IN_SADDLL_OVERFLOW:
10752 case BUILT_IN_SSUB_OVERFLOW:
10753 case BUILT_IN_SSUBL_OVERFLOW:
10754 case BUILT_IN_SSUBLL_OVERFLOW:
10755 case BUILT_IN_SMUL_OVERFLOW:
10756 case BUILT_IN_SMULL_OVERFLOW:
10757 case BUILT_IN_SMULLL_OVERFLOW:
10758 case BUILT_IN_UADD_OVERFLOW:
10759 case BUILT_IN_UADDL_OVERFLOW:
10760 case BUILT_IN_UADDLL_OVERFLOW:
10761 case BUILT_IN_USUB_OVERFLOW:
10762 case BUILT_IN_USUBL_OVERFLOW:
10763 case BUILT_IN_USUBLL_OVERFLOW:
10764 case BUILT_IN_UMUL_OVERFLOW:
10765 case BUILT_IN_UMULL_OVERFLOW:
10766 case BUILT_IN_UMULLL_OVERFLOW:
10767 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10769 default:
10770 break;
10772 return NULL_TREE;
10775 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
10776 ARGS is an array of NARGS arguments. IGNORE is true if the result
10777 of the function call is ignored. This function returns NULL_TREE
10778 if no simplification was possible. */
10780 static tree
10781 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10782 int nargs, bool)
10784 tree ret = NULL_TREE;
10786 switch (nargs)
10788 case 0:
10789 ret = fold_builtin_0 (loc, fndecl);
10790 break;
10791 case 1:
10792 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
10793 break;
10794 case 2:
10795 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
10796 break;
10797 case 3:
10798 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10799 break;
10800 default:
10801 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10802 break;
10804 if (ret)
10806 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10807 SET_EXPR_LOCATION (ret, loc);
10808 return ret;
10810 return NULL_TREE;
10813 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10814 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10815 of arguments in ARGS to be omitted. OLDNARGS is the number of
10816 elements in ARGS. */
10818 static tree
10819 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10820 int skip, tree fndecl, int n, va_list newargs)
10822 int nargs = oldnargs - skip + n;
10823 tree *buffer;
10825 if (n > 0)
10827 int i, j;
10829 buffer = XALLOCAVEC (tree, nargs);
10830 for (i = 0; i < n; i++)
10831 buffer[i] = va_arg (newargs, tree);
10832 for (j = skip; j < oldnargs; j++, i++)
10833 buffer[i] = args[j];
10835 else
10836 buffer = args + skip;
10838 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10841 /* Return true if FNDECL shouldn't be folded right now.
10842 If a built-in function has an inline attribute always_inline
10843 wrapper, defer folding it after always_inline functions have
10844 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10845 might not be performed. */
10847 bool
10848 avoid_folding_inline_builtin (tree fndecl)
10850 return (DECL_DECLARED_INLINE_P (fndecl)
10851 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10852 && cfun
10853 && !cfun->always_inline_functions_inlined
10854 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10857 /* A wrapper function for builtin folding that prevents warnings for
10858 "statement without effect" and the like, caused by removing the
10859 call node earlier than the warning is generated. */
10861 tree
10862 fold_call_expr (location_t loc, tree exp, bool ignore)
10864 tree ret = NULL_TREE;
10865 tree fndecl = get_callee_fndecl (exp);
10866 if (fndecl && fndecl_built_in_p (fndecl)
10867 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10868 yet. Defer folding until we see all the arguments
10869 (after inlining). */
10870 && !CALL_EXPR_VA_ARG_PACK (exp))
10872 int nargs = call_expr_nargs (exp);
10874 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10875 instead last argument is __builtin_va_arg_pack (). Defer folding
10876 even in that case, until arguments are finalized. */
10877 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10879 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10880 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10881 return NULL_TREE;
10884 if (avoid_folding_inline_builtin (fndecl))
10885 return NULL_TREE;
10887 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10888 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10889 CALL_EXPR_ARGP (exp), ignore);
10890 else
10892 tree *args = CALL_EXPR_ARGP (exp);
10893 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
10894 if (ret)
10895 return ret;
10898 return NULL_TREE;
10901 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10902 N arguments are passed in the array ARGARRAY. Return a folded
10903 expression or NULL_TREE if no simplification was possible. */
10905 tree
10906 fold_builtin_call_array (location_t loc, tree,
10907 tree fn,
10908 int n,
10909 tree *argarray)
10911 if (TREE_CODE (fn) != ADDR_EXPR)
10912 return NULL_TREE;
10914 tree fndecl = TREE_OPERAND (fn, 0);
10915 if (TREE_CODE (fndecl) == FUNCTION_DECL
10916 && fndecl_built_in_p (fndecl))
10918 /* If last argument is __builtin_va_arg_pack (), arguments to this
10919 function are not finalized yet. Defer folding until they are. */
10920 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10922 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10923 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10924 return NULL_TREE;
10926 if (avoid_folding_inline_builtin (fndecl))
10927 return NULL_TREE;
10928 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10929 return targetm.fold_builtin (fndecl, n, argarray, false);
10930 else
10931 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
10934 return NULL_TREE;
10937 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10938 along with N new arguments specified as the "..." parameters. SKIP
10939 is the number of arguments in EXP to be omitted. This function is used
10940 to do varargs-to-varargs transformations. */
10942 static tree
10943 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10945 va_list ap;
10946 tree t;
10948 va_start (ap, n);
10949 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10950 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10951 va_end (ap);
10953 return t;
10956 /* Validate a single argument ARG against a tree code CODE representing
10957 a type. Return true when argument is valid. */
10959 static bool
10960 validate_arg (const_tree arg, enum tree_code code)
10962 if (!arg)
10963 return false;
10964 else if (code == POINTER_TYPE)
10965 return POINTER_TYPE_P (TREE_TYPE (arg));
10966 else if (code == INTEGER_TYPE)
10967 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10968 return code == TREE_CODE (TREE_TYPE (arg));
10971 /* This function validates the types of a function call argument list
10972 against a specified list of tree_codes. If the last specifier is a 0,
10973 that represents an ellipses, otherwise the last specifier must be a
10974 VOID_TYPE.
10976 This is the GIMPLE version of validate_arglist. Eventually we want to
10977 completely convert builtins.cc to work from GIMPLEs and the tree based
10978 validate_arglist will then be removed. */
10980 bool
10981 validate_gimple_arglist (const gcall *call, ...)
10983 enum tree_code code;
10984 bool res = 0;
10985 va_list ap;
10986 const_tree arg;
10987 size_t i;
10989 va_start (ap, call);
10990 i = 0;
10994 code = (enum tree_code) va_arg (ap, int);
10995 switch (code)
10997 case 0:
10998 /* This signifies an ellipses, any further arguments are all ok. */
10999 res = true;
11000 goto end;
11001 case VOID_TYPE:
11002 /* This signifies an endlink, if no arguments remain, return
11003 true, otherwise return false. */
11004 res = (i == gimple_call_num_args (call));
11005 goto end;
11006 default:
11007 /* If no parameters remain or the parameter's code does not
11008 match the specified code, return false. Otherwise continue
11009 checking any remaining arguments. */
11010 arg = gimple_call_arg (call, i++);
11011 if (!validate_arg (arg, code))
11012 goto end;
11013 break;
11016 while (1);
11018 /* We need gotos here since we can only have one VA_CLOSE in a
11019 function. */
11020 end: ;
11021 va_end (ap);
11023 return res;
11026 /* Default target-specific builtin expander that does nothing. */
11029 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11030 rtx target ATTRIBUTE_UNUSED,
11031 rtx subtarget ATTRIBUTE_UNUSED,
11032 machine_mode mode ATTRIBUTE_UNUSED,
11033 int ignore ATTRIBUTE_UNUSED)
11035 return NULL_RTX;
11038 /* Returns true is EXP represents data that would potentially reside
11039 in a readonly section. */
11041 bool
11042 readonly_data_expr (tree exp)
11044 STRIP_NOPS (exp);
11046 if (TREE_CODE (exp) != ADDR_EXPR)
11047 return false;
11049 exp = get_base_address (TREE_OPERAND (exp, 0));
11050 if (!exp)
11051 return false;
11053 /* Make sure we call decl_readonly_section only for trees it
11054 can handle (since it returns true for everything it doesn't
11055 understand). */
11056 if (TREE_CODE (exp) == STRING_CST
11057 || TREE_CODE (exp) == CONSTRUCTOR
11058 || (VAR_P (exp) && TREE_STATIC (exp)))
11059 return decl_readonly_section (exp, 0);
11060 else
11061 return false;
11064 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11065 to the call, and TYPE is its return type.
11067 Return NULL_TREE if no simplification was possible, otherwise return the
11068 simplified form of the call as a tree.
11070 The simplified form may be a constant or other expression which
11071 computes the same value, but in a more efficient manner (including
11072 calls to other builtin functions).
11074 The call may contain arguments which need to be evaluated, but
11075 which are not useful to determine the result of the call. In
11076 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11077 COMPOUND_EXPR will be an argument which must be evaluated.
11078 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11079 COMPOUND_EXPR in the chain will contain the tree for the simplified
11080 form of the builtin function call. */
11082 static tree
11083 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
11085 if (!validate_arg (s1, POINTER_TYPE)
11086 || !validate_arg (s2, POINTER_TYPE))
11087 return NULL_TREE;
11089 tree fn;
11090 const char *p1, *p2;
11092 p2 = c_getstr (s2);
11093 if (p2 == NULL)
11094 return NULL_TREE;
11096 p1 = c_getstr (s1);
11097 if (p1 != NULL)
11099 const char *r = strpbrk (p1, p2);
11100 tree tem;
11102 if (r == NULL)
11103 return build_int_cst (TREE_TYPE (s1), 0);
11105 /* Return an offset into the constant string argument. */
11106 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11107 return fold_convert_loc (loc, type, tem);
11110 if (p2[0] == '\0')
11111 /* strpbrk(x, "") == NULL.
11112 Evaluate and ignore s1 in case it had side-effects. */
11113 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
11115 if (p2[1] != '\0')
11116 return NULL_TREE; /* Really call strpbrk. */
11118 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11119 if (!fn)
11120 return NULL_TREE;
11122 /* New argument list transforming strpbrk(s1, s2) to
11123 strchr(s1, s2[0]). */
11124 return build_call_expr_loc (loc, fn, 2, s1,
11125 build_int_cst (integer_type_node, p2[0]));
11128 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11129 to the call.
11131 Return NULL_TREE if no simplification was possible, otherwise return the
11132 simplified form of the call as a tree.
11134 The simplified form may be a constant or other expression which
11135 computes the same value, but in a more efficient manner (including
11136 calls to other builtin functions).
11138 The call may contain arguments which need to be evaluated, but
11139 which are not useful to determine the result of the call. In
11140 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11141 COMPOUND_EXPR will be an argument which must be evaluated.
11142 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11143 COMPOUND_EXPR in the chain will contain the tree for the simplified
11144 form of the builtin function call. */
11146 static tree
11147 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
11149 if (!validate_arg (s1, POINTER_TYPE)
11150 || !validate_arg (s2, POINTER_TYPE))
11151 return NULL_TREE;
11153 if (!check_nul_terminated_array (expr, s1)
11154 || !check_nul_terminated_array (expr, s2))
11155 return NULL_TREE;
11157 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11159 /* If either argument is "", return NULL_TREE. */
11160 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11161 /* Evaluate and ignore both arguments in case either one has
11162 side-effects. */
11163 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11164 s1, s2);
11165 return NULL_TREE;
11168 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11169 to the call.
11171 Return NULL_TREE if no simplification was possible, otherwise return the
11172 simplified form of the call as a tree.
11174 The simplified form may be a constant or other expression which
11175 computes the same value, but in a more efficient manner (including
11176 calls to other builtin functions).
11178 The call may contain arguments which need to be evaluated, but
11179 which are not useful to determine the result of the call. In
11180 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11181 COMPOUND_EXPR will be an argument which must be evaluated.
11182 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11183 COMPOUND_EXPR in the chain will contain the tree for the simplified
11184 form of the builtin function call. */
11186 static tree
11187 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
11189 if (!validate_arg (s1, POINTER_TYPE)
11190 || !validate_arg (s2, POINTER_TYPE))
11191 return NULL_TREE;
11193 if (!check_nul_terminated_array (expr, s1)
11194 || !check_nul_terminated_array (expr, s2))
11195 return NULL_TREE;
11197 /* If the first argument is "", return NULL_TREE. */
11198 const char *p1 = c_getstr (s1);
11199 if (p1 && *p1 == '\0')
11201 /* Evaluate and ignore argument s2 in case it has
11202 side-effects. */
11203 return omit_one_operand_loc (loc, size_type_node,
11204 size_zero_node, s2);
11207 /* If the second argument is "", return __builtin_strlen(s1). */
11208 const char *p2 = c_getstr (s2);
11209 if (p2 && *p2 == '\0')
11211 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11213 /* If the replacement _DECL isn't initialized, don't do the
11214 transformation. */
11215 if (!fn)
11216 return NULL_TREE;
11218 return build_call_expr_loc (loc, fn, 1, s1);
11220 return NULL_TREE;
11223 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11224 produced. False otherwise. This is done so that we don't output the error
11225 or warning twice or three times. */
11227 bool
11228 fold_builtin_next_arg (tree exp, bool va_start_p)
11230 tree fntype = TREE_TYPE (current_function_decl);
11231 int nargs = call_expr_nargs (exp);
11232 tree arg;
11233 /* There is good chance the current input_location points inside the
11234 definition of the va_start macro (perhaps on the token for
11235 builtin) in a system header, so warnings will not be emitted.
11236 Use the location in real source code. */
11237 location_t current_location =
11238 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11239 NULL);
11241 if (!stdarg_p (fntype))
11243 error ("%<va_start%> used in function with fixed arguments");
11244 return true;
11247 if (va_start_p)
11249 if (va_start_p && (nargs != 2))
11251 error ("wrong number of arguments to function %<va_start%>");
11252 return true;
11254 arg = CALL_EXPR_ARG (exp, 1);
11256 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11257 when we checked the arguments and if needed issued a warning. */
11258 else
11260 if (nargs == 0)
11262 /* Evidently an out of date version of <stdarg.h>; can't validate
11263 va_start's second argument, but can still work as intended. */
11264 warning_at (current_location,
11265 OPT_Wvarargs,
11266 "%<__builtin_next_arg%> called without an argument");
11267 return true;
11269 else if (nargs > 1)
11271 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11272 return true;
11274 arg = CALL_EXPR_ARG (exp, 0);
11277 if (TREE_CODE (arg) == SSA_NAME
11278 && SSA_NAME_VAR (arg))
11279 arg = SSA_NAME_VAR (arg);
11281 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11282 or __builtin_next_arg (0) the first time we see it, after checking
11283 the arguments and if needed issuing a warning. */
11284 if (!integer_zerop (arg))
11286 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11288 /* Strip off all nops for the sake of the comparison. This
11289 is not quite the same as STRIP_NOPS. It does more.
11290 We must also strip off INDIRECT_EXPR for C++ reference
11291 parameters. */
11292 while (CONVERT_EXPR_P (arg)
11293 || INDIRECT_REF_P (arg))
11294 arg = TREE_OPERAND (arg, 0);
11295 if (arg != last_parm)
11297 /* FIXME: Sometimes with the tree optimizers we can get the
11298 not the last argument even though the user used the last
11299 argument. We just warn and set the arg to be the last
11300 argument so that we will get wrong-code because of
11301 it. */
11302 warning_at (current_location,
11303 OPT_Wvarargs,
11304 "second parameter of %<va_start%> not last named argument");
11307 /* Undefined by C99 7.15.1.4p4 (va_start):
11308 "If the parameter parmN is declared with the register storage
11309 class, with a function or array type, or with a type that is
11310 not compatible with the type that results after application of
11311 the default argument promotions, the behavior is undefined."
11313 else if (DECL_REGISTER (arg))
11315 warning_at (current_location,
11316 OPT_Wvarargs,
11317 "undefined behavior when second parameter of "
11318 "%<va_start%> is declared with %<register%> storage");
11321 /* We want to verify the second parameter just once before the tree
11322 optimizers are run and then avoid keeping it in the tree,
11323 as otherwise we could warn even for correct code like:
11324 void foo (int i, ...)
11325 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11326 if (va_start_p)
11327 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11328 else
11329 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11331 return false;
11335 /* Expand a call EXP to __builtin_object_size. */
11337 static rtx
11338 expand_builtin_object_size (tree exp)
11340 tree ost;
11341 int object_size_type;
11342 tree fndecl = get_callee_fndecl (exp);
11344 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11346 error ("first argument of %qD must be a pointer, second integer constant",
11347 fndecl);
11348 expand_builtin_trap ();
11349 return const0_rtx;
11352 ost = CALL_EXPR_ARG (exp, 1);
11353 STRIP_NOPS (ost);
11355 if (TREE_CODE (ost) != INTEGER_CST
11356 || tree_int_cst_sgn (ost) < 0
11357 || compare_tree_int (ost, 3) > 0)
11359 error ("last argument of %qD is not integer constant between 0 and 3",
11360 fndecl);
11361 expand_builtin_trap ();
11362 return const0_rtx;
11365 object_size_type = tree_to_shwi (ost);
11367 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11370 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11371 FCODE is the BUILT_IN_* to use.
11372 Return NULL_RTX if we failed; the caller should emit a normal call,
11373 otherwise try to get the result in TARGET, if convenient (and in
11374 mode MODE if that's convenient). */
11376 static rtx
11377 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11378 enum built_in_function fcode)
11380 if (!validate_arglist (exp,
11381 POINTER_TYPE,
11382 fcode == BUILT_IN_MEMSET_CHK
11383 ? INTEGER_TYPE : POINTER_TYPE,
11384 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11385 return NULL_RTX;
11387 tree dest = CALL_EXPR_ARG (exp, 0);
11388 tree src = CALL_EXPR_ARG (exp, 1);
11389 tree len = CALL_EXPR_ARG (exp, 2);
11390 tree size = CALL_EXPR_ARG (exp, 3);
11392 /* FIXME: Set access mode to write only for memset et al. */
11393 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
11394 /*srcstr=*/NULL_TREE, size, access_read_write);
11396 if (!tree_fits_uhwi_p (size))
11397 return NULL_RTX;
11399 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11401 /* Avoid transforming the checking call to an ordinary one when
11402 an overflow has been detected or when the call couldn't be
11403 validated because the size is not constant. */
11404 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
11405 return NULL_RTX;
11407 tree fn = NULL_TREE;
11408 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11409 mem{cpy,pcpy,move,set} is available. */
11410 switch (fcode)
11412 case BUILT_IN_MEMCPY_CHK:
11413 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11414 break;
11415 case BUILT_IN_MEMPCPY_CHK:
11416 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11417 break;
11418 case BUILT_IN_MEMMOVE_CHK:
11419 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11420 break;
11421 case BUILT_IN_MEMSET_CHK:
11422 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11423 break;
11424 default:
11425 break;
11428 if (! fn)
11429 return NULL_RTX;
11431 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11432 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11433 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11434 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11436 else if (fcode == BUILT_IN_MEMSET_CHK)
11437 return NULL_RTX;
11438 else
11440 unsigned int dest_align = get_pointer_alignment (dest);
11442 /* If DEST is not a pointer type, call the normal function. */
11443 if (dest_align == 0)
11444 return NULL_RTX;
11446 /* If SRC and DEST are the same (and not volatile), do nothing. */
11447 if (operand_equal_p (src, dest, 0))
11449 tree expr;
11451 if (fcode != BUILT_IN_MEMPCPY_CHK)
11453 /* Evaluate and ignore LEN in case it has side-effects. */
11454 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11455 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11458 expr = fold_build_pointer_plus (dest, len);
11459 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11462 /* __memmove_chk special case. */
11463 if (fcode == BUILT_IN_MEMMOVE_CHK)
11465 unsigned int src_align = get_pointer_alignment (src);
11467 if (src_align == 0)
11468 return NULL_RTX;
11470 /* If src is categorized for a readonly section we can use
11471 normal __memcpy_chk. */
11472 if (readonly_data_expr (src))
11474 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11475 if (!fn)
11476 return NULL_RTX;
11477 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11478 dest, src, len, size);
11479 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11480 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11481 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11484 return NULL_RTX;
11488 /* Emit warning if a buffer overflow is detected at compile time. */
11490 static void
11491 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11493 /* The source string. */
11494 tree srcstr = NULL_TREE;
11495 /* The size of the destination object returned by __builtin_object_size. */
11496 tree objsize = NULL_TREE;
11497 /* The string that is being concatenated with (as in __strcat_chk)
11498 or null if it isn't. */
11499 tree catstr = NULL_TREE;
11500 /* The maximum length of the source sequence in a bounded operation
11501 (such as __strncat_chk) or null if the operation isn't bounded
11502 (such as __strcat_chk). */
11503 tree maxread = NULL_TREE;
11504 /* The exact size of the access (such as in __strncpy_chk). */
11505 tree size = NULL_TREE;
11506 /* The access by the function that's checked. Except for snprintf
11507 both writing and reading is checked. */
11508 access_mode mode = access_read_write;
11510 switch (fcode)
11512 case BUILT_IN_STRCPY_CHK:
11513 case BUILT_IN_STPCPY_CHK:
11514 srcstr = CALL_EXPR_ARG (exp, 1);
11515 objsize = CALL_EXPR_ARG (exp, 2);
11516 break;
11518 case BUILT_IN_STRCAT_CHK:
11519 /* For __strcat_chk the warning will be emitted only if overflowing
11520 by at least strlen (dest) + 1 bytes. */
11521 catstr = CALL_EXPR_ARG (exp, 0);
11522 srcstr = CALL_EXPR_ARG (exp, 1);
11523 objsize = CALL_EXPR_ARG (exp, 2);
11524 break;
11526 case BUILT_IN_STRNCAT_CHK:
11527 catstr = CALL_EXPR_ARG (exp, 0);
11528 srcstr = CALL_EXPR_ARG (exp, 1);
11529 maxread = CALL_EXPR_ARG (exp, 2);
11530 objsize = CALL_EXPR_ARG (exp, 3);
11531 break;
11533 case BUILT_IN_STRNCPY_CHK:
11534 case BUILT_IN_STPNCPY_CHK:
11535 srcstr = CALL_EXPR_ARG (exp, 1);
11536 size = CALL_EXPR_ARG (exp, 2);
11537 objsize = CALL_EXPR_ARG (exp, 3);
11538 break;
11540 case BUILT_IN_SNPRINTF_CHK:
11541 case BUILT_IN_VSNPRINTF_CHK:
11542 maxread = CALL_EXPR_ARG (exp, 1);
11543 objsize = CALL_EXPR_ARG (exp, 3);
11544 /* The only checked access the write to the destination. */
11545 mode = access_write_only;
11546 break;
11547 default:
11548 gcc_unreachable ();
11551 if (catstr && maxread)
11553 /* Check __strncat_chk. There is no way to determine the length
11554 of the string to which the source string is being appended so
11555 just warn when the length of the source string is not known. */
11556 check_strncat_sizes (exp, objsize);
11557 return;
11560 check_access (exp, size, maxread, srcstr, objsize, mode);
11563 /* Emit warning if a buffer overflow is detected at compile time
11564 in __sprintf_chk/__vsprintf_chk calls. */
11566 static void
11567 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11569 tree size, len, fmt;
11570 const char *fmt_str;
11571 int nargs = call_expr_nargs (exp);
11573 /* Verify the required arguments in the original call. */
11575 if (nargs < 4)
11576 return;
11577 size = CALL_EXPR_ARG (exp, 2);
11578 fmt = CALL_EXPR_ARG (exp, 3);
11580 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11581 return;
11583 /* Check whether the format is a literal string constant. */
11584 fmt_str = c_getstr (fmt);
11585 if (fmt_str == NULL)
11586 return;
11588 if (!init_target_chars ())
11589 return;
11591 /* If the format doesn't contain % args or %%, we know its size. */
11592 if (strchr (fmt_str, target_percent) == 0)
11593 len = build_int_cstu (size_type_node, strlen (fmt_str));
11594 /* If the format is "%s" and first ... argument is a string literal,
11595 we know it too. */
11596 else if (fcode == BUILT_IN_SPRINTF_CHK
11597 && strcmp (fmt_str, target_percent_s) == 0)
11599 tree arg;
11601 if (nargs < 5)
11602 return;
11603 arg = CALL_EXPR_ARG (exp, 4);
11604 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11605 return;
11607 len = c_strlen (arg, 1);
11608 if (!len || ! tree_fits_uhwi_p (len))
11609 return;
11611 else
11612 return;
11614 /* Add one for the terminating nul. */
11615 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
11617 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
11618 access_write_only);
11621 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11622 if possible. */
11624 static tree
11625 fold_builtin_object_size (tree ptr, tree ost, enum built_in_function fcode)
11627 tree bytes;
11628 int object_size_type;
11630 if (!validate_arg (ptr, POINTER_TYPE)
11631 || !validate_arg (ost, INTEGER_TYPE))
11632 return NULL_TREE;
11634 STRIP_NOPS (ost);
11636 if (TREE_CODE (ost) != INTEGER_CST
11637 || tree_int_cst_sgn (ost) < 0
11638 || compare_tree_int (ost, 3) > 0)
11639 return NULL_TREE;
11641 object_size_type = tree_to_shwi (ost);
11643 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11644 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11645 and (size_t) 0 for types 2 and 3. */
11646 if (TREE_SIDE_EFFECTS (ptr))
11647 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11649 if (fcode == BUILT_IN_DYNAMIC_OBJECT_SIZE)
11650 object_size_type |= OST_DYNAMIC;
11652 if (TREE_CODE (ptr) == ADDR_EXPR)
11654 compute_builtin_object_size (ptr, object_size_type, &bytes);
11655 if ((object_size_type & OST_DYNAMIC)
11656 || int_fits_type_p (bytes, size_type_node))
11657 return fold_convert (size_type_node, bytes);
11659 else if (TREE_CODE (ptr) == SSA_NAME)
11661 /* If object size is not known yet, delay folding until
11662 later. Maybe subsequent passes will help determining
11663 it. */
11664 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11665 && ((object_size_type & OST_DYNAMIC)
11666 || int_fits_type_p (bytes, size_type_node)))
11667 return fold_convert (size_type_node, bytes);
11670 return NULL_TREE;
11673 /* Builtins with folding operations that operate on "..." arguments
11674 need special handling; we need to store the arguments in a convenient
11675 data structure before attempting any folding. Fortunately there are
11676 only a few builtins that fall into this category. FNDECL is the
11677 function, EXP is the CALL_EXPR for the call. */
11679 static tree
11680 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11682 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11683 tree ret = NULL_TREE;
11685 switch (fcode)
11687 case BUILT_IN_FPCLASSIFY:
11688 ret = fold_builtin_fpclassify (loc, args, nargs);
11689 break;
11691 case BUILT_IN_ADDC:
11692 case BUILT_IN_ADDCL:
11693 case BUILT_IN_ADDCLL:
11694 case BUILT_IN_SUBC:
11695 case BUILT_IN_SUBCL:
11696 case BUILT_IN_SUBCLL:
11697 return fold_builtin_addc_subc (loc, fcode, args);
11699 default:
11700 break;
11702 if (ret)
11704 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11705 SET_EXPR_LOCATION (ret, loc);
11706 suppress_warning (ret);
11707 return ret;
11709 return NULL_TREE;
11712 /* Initialize format string characters in the target charset. */
11714 bool
11715 init_target_chars (void)
11717 static bool init;
11718 if (!init)
11720 target_newline = lang_hooks.to_target_charset ('\n');
11721 target_percent = lang_hooks.to_target_charset ('%');
11722 target_c = lang_hooks.to_target_charset ('c');
11723 target_s = lang_hooks.to_target_charset ('s');
11724 if (target_newline == 0 || target_percent == 0 || target_c == 0
11725 || target_s == 0)
11726 return false;
11728 target_percent_c[0] = target_percent;
11729 target_percent_c[1] = target_c;
11730 target_percent_c[2] = '\0';
11732 target_percent_s[0] = target_percent;
11733 target_percent_s[1] = target_s;
11734 target_percent_s[2] = '\0';
11736 target_percent_s_newline[0] = target_percent;
11737 target_percent_s_newline[1] = target_s;
11738 target_percent_s_newline[2] = target_newline;
11739 target_percent_s_newline[3] = '\0';
11741 init = true;
11743 return true;
11746 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11747 and no overflow/underflow occurred. INEXACT is true if M was not
11748 exactly calculated. TYPE is the tree type for the result. This
11749 function assumes that you cleared the MPFR flags and then
11750 calculated M to see if anything subsequently set a flag prior to
11751 entering this function. Return NULL_TREE if any checks fail. */
11753 static tree
11754 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11756 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11757 overflow/underflow occurred. If -frounding-math, proceed iff the
11758 result of calling FUNC was exact. */
11759 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11760 && (!flag_rounding_math || !inexact))
11762 REAL_VALUE_TYPE rr;
11764 real_from_mpfr (&rr, m, type, MPFR_RNDN);
11765 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11766 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11767 but the mpfr_t is not, then we underflowed in the
11768 conversion. */
11769 if (real_isfinite (&rr)
11770 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11772 REAL_VALUE_TYPE rmode;
11774 real_convert (&rmode, TYPE_MODE (type), &rr);
11775 /* Proceed iff the specified mode can hold the value. */
11776 if (real_identical (&rmode, &rr))
11777 return build_real (type, rmode);
11780 return NULL_TREE;
11783 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11784 number and no overflow/underflow occurred. INEXACT is true if M
11785 was not exactly calculated. TYPE is the tree type for the result.
11786 This function assumes that you cleared the MPFR flags and then
11787 calculated M to see if anything subsequently set a flag prior to
11788 entering this function. Return NULL_TREE if any checks fail, if
11789 FORCE_CONVERT is true, then bypass the checks. */
11791 static tree
11792 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11794 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11795 overflow/underflow occurred. If -frounding-math, proceed iff the
11796 result of calling FUNC was exact. */
11797 if (force_convert
11798 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11799 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11800 && (!flag_rounding_math || !inexact)))
11802 REAL_VALUE_TYPE re, im;
11804 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11805 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
11806 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11807 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11808 but the mpfr_t is not, then we underflowed in the
11809 conversion. */
11810 if (force_convert
11811 || (real_isfinite (&re) && real_isfinite (&im)
11812 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11813 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11815 REAL_VALUE_TYPE re_mode, im_mode;
11817 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11818 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11819 /* Proceed iff the specified mode can hold the value. */
11820 if (force_convert
11821 || (real_identical (&re_mode, &re)
11822 && real_identical (&im_mode, &im)))
11823 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11824 build_real (TREE_TYPE (type), im_mode));
11827 return NULL_TREE;
11830 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11831 the pointer *(ARG_QUO) and return the result. The type is taken
11832 from the type of ARG0 and is used for setting the precision of the
11833 calculation and results. */
11835 static tree
11836 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11838 tree const type = TREE_TYPE (arg0);
11839 tree result = NULL_TREE;
11841 STRIP_NOPS (arg0);
11842 STRIP_NOPS (arg1);
11844 /* To proceed, MPFR must exactly represent the target floating point
11845 format, which only happens when the target base equals two. */
11846 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11847 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11848 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11850 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11851 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11853 if (real_isfinite (ra0) && real_isfinite (ra1))
11855 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11856 const int prec = fmt->p;
11857 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11858 tree result_rem;
11859 long integer_quo;
11860 mpfr_t m0, m1;
11862 mpfr_inits2 (prec, m0, m1, NULL);
11863 mpfr_from_real (m0, ra0, MPFR_RNDN);
11864 mpfr_from_real (m1, ra1, MPFR_RNDN);
11865 mpfr_clear_flags ();
11866 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11867 /* Remquo is independent of the rounding mode, so pass
11868 inexact=0 to do_mpfr_ckconv(). */
11869 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11870 mpfr_clears (m0, m1, NULL);
11871 if (result_rem)
11873 /* MPFR calculates quo in the host's long so it may
11874 return more bits in quo than the target int can hold
11875 if sizeof(host long) > sizeof(target int). This can
11876 happen even for native compilers in LP64 mode. In
11877 these cases, modulo the quo value with the largest
11878 number that the target int can hold while leaving one
11879 bit for the sign. */
11880 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11881 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11883 /* Dereference the quo pointer argument. */
11884 arg_quo = build_fold_indirect_ref (arg_quo);
11885 /* Proceed iff a valid pointer type was passed in. */
11886 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11888 /* Set the value. */
11889 tree result_quo
11890 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11891 build_int_cst (TREE_TYPE (arg_quo),
11892 integer_quo));
11893 TREE_SIDE_EFFECTS (result_quo) = 1;
11894 /* Combine the quo assignment with the rem. */
11895 result = fold_build2 (COMPOUND_EXPR, type,
11896 result_quo, result_rem);
11897 suppress_warning (result, OPT_Wunused_value);
11898 result = non_lvalue (result);
11903 return result;
11906 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11907 resulting value as a tree with type TYPE. The mpfr precision is
11908 set to the precision of TYPE. We assume that this mpfr function
11909 returns zero if the result could be calculated exactly within the
11910 requested precision. In addition, the integer pointer represented
11911 by ARG_SG will be dereferenced and set to the appropriate signgam
11912 (-1,1) value. */
11914 static tree
11915 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11917 tree result = NULL_TREE;
11919 STRIP_NOPS (arg);
11921 /* To proceed, MPFR must exactly represent the target floating point
11922 format, which only happens when the target base equals two. Also
11923 verify ARG is a constant and that ARG_SG is an int pointer. */
11924 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11925 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11926 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11927 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11929 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11931 /* In addition to NaN and Inf, the argument cannot be zero or a
11932 negative integer. */
11933 if (real_isfinite (ra)
11934 && ra->cl != rvc_zero
11935 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11937 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11938 const int prec = fmt->p;
11939 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11940 int inexact, sg;
11941 tree result_lg;
11943 auto_mpfr m (prec);
11944 mpfr_from_real (m, ra, MPFR_RNDN);
11945 mpfr_clear_flags ();
11946 inexact = mpfr_lgamma (m, &sg, m, rnd);
11947 result_lg = do_mpfr_ckconv (m, type, inexact);
11948 if (result_lg)
11950 tree result_sg;
11952 /* Dereference the arg_sg pointer argument. */
11953 arg_sg = build_fold_indirect_ref (arg_sg);
11954 /* Assign the signgam value into *arg_sg. */
11955 result_sg = fold_build2 (MODIFY_EXPR,
11956 TREE_TYPE (arg_sg), arg_sg,
11957 build_int_cst (TREE_TYPE (arg_sg), sg));
11958 TREE_SIDE_EFFECTS (result_sg) = 1;
11959 /* Combine the signgam assignment with the lgamma result. */
11960 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11961 result_sg, result_lg));
11966 return result;
11969 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11970 mpc function FUNC on it and return the resulting value as a tree
11971 with type TYPE. The mpfr precision is set to the precision of
11972 TYPE. We assume that function FUNC returns zero if the result
11973 could be calculated exactly within the requested precision. If
11974 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11975 in the arguments and/or results. */
11977 tree
11978 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11979 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11981 tree result = NULL_TREE;
11983 STRIP_NOPS (arg0);
11984 STRIP_NOPS (arg1);
11986 /* To proceed, MPFR must exactly represent the target floating point
11987 format, which only happens when the target base equals two. */
11988 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11989 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg0)))
11990 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11991 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg1)))
11992 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11994 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11995 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11996 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11997 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11999 if (do_nonfinite
12000 || (real_isfinite (re0) && real_isfinite (im0)
12001 && real_isfinite (re1) && real_isfinite (im1)))
12003 const struct real_format *const fmt =
12004 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12005 const int prec = fmt->p;
12006 const mpfr_rnd_t rnd = fmt->round_towards_zero
12007 ? MPFR_RNDZ : MPFR_RNDN;
12008 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12009 int inexact;
12010 mpc_t m0, m1;
12012 mpc_init2 (m0, prec);
12013 mpc_init2 (m1, prec);
12014 mpfr_from_real (mpc_realref (m0), re0, rnd);
12015 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12016 mpfr_from_real (mpc_realref (m1), re1, rnd);
12017 mpfr_from_real (mpc_imagref (m1), im1, rnd);
12018 mpfr_clear_flags ();
12019 inexact = func (m0, m0, m1, crnd);
12020 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12021 mpc_clear (m0);
12022 mpc_clear (m1);
12026 return result;
12029 /* A wrapper function for builtin folding that prevents warnings for
12030 "statement without effect" and the like, caused by removing the
12031 call node earlier than the warning is generated. */
12033 tree
12034 fold_call_stmt (gcall *stmt, bool ignore)
12036 tree ret = NULL_TREE;
12037 tree fndecl = gimple_call_fndecl (stmt);
12038 location_t loc = gimple_location (stmt);
12039 if (fndecl && fndecl_built_in_p (fndecl)
12040 && !gimple_call_va_arg_pack_p (stmt))
12042 int nargs = gimple_call_num_args (stmt);
12043 tree *args = (nargs > 0
12044 ? gimple_call_arg_ptr (stmt, 0)
12045 : &error_mark_node);
12047 if (avoid_folding_inline_builtin (fndecl))
12048 return NULL_TREE;
12049 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12051 return targetm.fold_builtin (fndecl, nargs, args, ignore);
12053 else
12055 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
12056 if (ret)
12058 /* Propagate location information from original call to
12059 expansion of builtin. Otherwise things like
12060 maybe_emit_chk_warning, that operate on the expansion
12061 of a builtin, will use the wrong location information. */
12062 if (gimple_has_location (stmt))
12064 tree realret = ret;
12065 if (TREE_CODE (ret) == NOP_EXPR)
12066 realret = TREE_OPERAND (ret, 0);
12067 if (CAN_HAVE_LOCATION_P (realret)
12068 && !EXPR_HAS_LOCATION (realret))
12069 SET_EXPR_LOCATION (realret, loc);
12070 return realret;
12072 return ret;
12076 return NULL_TREE;
12079 /* Look up the function in builtin_decl that corresponds to DECL
12080 and set ASMSPEC as its user assembler name. DECL must be a
12081 function decl that declares a builtin. */
12083 void
12084 set_builtin_user_assembler_name (tree decl, const char *asmspec)
12086 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
12087 && asmspec != 0);
12089 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12090 set_user_assembler_name (builtin, asmspec);
12092 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
12093 && INT_TYPE_SIZE < BITS_PER_WORD)
12095 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
12096 set_user_assembler_libfunc ("ffs", asmspec);
12097 set_optab_libfunc (ffs_optab, mode, "ffs");
12101 /* Return true if DECL is a builtin that expands to a constant or similarly
12102 simple code. */
12103 bool
12104 is_simple_builtin (tree decl)
12106 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
12107 switch (DECL_FUNCTION_CODE (decl))
12109 /* Builtins that expand to constants. */
12110 case BUILT_IN_CONSTANT_P:
12111 case BUILT_IN_EXPECT:
12112 case BUILT_IN_OBJECT_SIZE:
12113 case BUILT_IN_UNREACHABLE:
12114 /* Simple register moves or loads from stack. */
12115 case BUILT_IN_ASSUME_ALIGNED:
12116 case BUILT_IN_RETURN_ADDRESS:
12117 case BUILT_IN_EXTRACT_RETURN_ADDR:
12118 case BUILT_IN_FROB_RETURN_ADDR:
12119 case BUILT_IN_RETURN:
12120 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12121 case BUILT_IN_FRAME_ADDRESS:
12122 case BUILT_IN_VA_END:
12123 case BUILT_IN_STACK_SAVE:
12124 case BUILT_IN_STACK_RESTORE:
12125 case BUILT_IN_DWARF_CFA:
12126 /* Exception state returns or moves registers around. */
12127 case BUILT_IN_EH_FILTER:
12128 case BUILT_IN_EH_POINTER:
12129 case BUILT_IN_EH_COPY_VALUES:
12130 return true;
12132 default:
12133 return false;
12136 return false;
12139 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12140 most probably expanded inline into reasonably simple code. This is a
12141 superset of is_simple_builtin. */
12142 bool
12143 is_inexpensive_builtin (tree decl)
12145 if (!decl)
12146 return false;
12147 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12148 return true;
12149 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12150 switch (DECL_FUNCTION_CODE (decl))
12152 case BUILT_IN_ABS:
12153 CASE_BUILT_IN_ALLOCA:
12154 case BUILT_IN_BSWAP16:
12155 case BUILT_IN_BSWAP32:
12156 case BUILT_IN_BSWAP64:
12157 case BUILT_IN_BSWAP128:
12158 case BUILT_IN_CLZ:
12159 case BUILT_IN_CLZIMAX:
12160 case BUILT_IN_CLZL:
12161 case BUILT_IN_CLZLL:
12162 case BUILT_IN_CTZ:
12163 case BUILT_IN_CTZIMAX:
12164 case BUILT_IN_CTZL:
12165 case BUILT_IN_CTZLL:
12166 case BUILT_IN_FFS:
12167 case BUILT_IN_FFSIMAX:
12168 case BUILT_IN_FFSL:
12169 case BUILT_IN_FFSLL:
12170 case BUILT_IN_IMAXABS:
12171 case BUILT_IN_FINITE:
12172 case BUILT_IN_FINITEF:
12173 case BUILT_IN_FINITEL:
12174 case BUILT_IN_FINITED32:
12175 case BUILT_IN_FINITED64:
12176 case BUILT_IN_FINITED128:
12177 case BUILT_IN_FPCLASSIFY:
12178 case BUILT_IN_ISFINITE:
12179 case BUILT_IN_ISINF_SIGN:
12180 case BUILT_IN_ISINF:
12181 case BUILT_IN_ISINFF:
12182 case BUILT_IN_ISINFL:
12183 case BUILT_IN_ISINFD32:
12184 case BUILT_IN_ISINFD64:
12185 case BUILT_IN_ISINFD128:
12186 case BUILT_IN_ISNAN:
12187 case BUILT_IN_ISNANF:
12188 case BUILT_IN_ISNANL:
12189 case BUILT_IN_ISNAND32:
12190 case BUILT_IN_ISNAND64:
12191 case BUILT_IN_ISNAND128:
12192 case BUILT_IN_ISNORMAL:
12193 case BUILT_IN_ISGREATER:
12194 case BUILT_IN_ISGREATEREQUAL:
12195 case BUILT_IN_ISLESS:
12196 case BUILT_IN_ISLESSEQUAL:
12197 case BUILT_IN_ISLESSGREATER:
12198 case BUILT_IN_ISUNORDERED:
12199 case BUILT_IN_ISEQSIG:
12200 case BUILT_IN_VA_ARG_PACK:
12201 case BUILT_IN_VA_ARG_PACK_LEN:
12202 case BUILT_IN_VA_COPY:
12203 case BUILT_IN_TRAP:
12204 case BUILT_IN_UNREACHABLE_TRAP:
12205 case BUILT_IN_SAVEREGS:
12206 case BUILT_IN_POPCOUNTL:
12207 case BUILT_IN_POPCOUNTLL:
12208 case BUILT_IN_POPCOUNTIMAX:
12209 case BUILT_IN_POPCOUNT:
12210 case BUILT_IN_PARITYL:
12211 case BUILT_IN_PARITYLL:
12212 case BUILT_IN_PARITYIMAX:
12213 case BUILT_IN_PARITY:
12214 case BUILT_IN_LABS:
12215 case BUILT_IN_LLABS:
12216 case BUILT_IN_PREFETCH:
12217 case BUILT_IN_ACC_ON_DEVICE:
12218 return true;
12220 default:
12221 return is_simple_builtin (decl);
12224 return false;
12227 /* Return true if T is a constant and the value cast to a target char
12228 can be represented by a host char.
12229 Store the casted char constant in *P if so. */
12231 bool
12232 target_char_cst_p (tree t, char *p)
12234 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
12235 return false;
12237 *p = (char)tree_to_uhwi (t);
12238 return true;
12241 /* Return true if the builtin DECL is implemented in a standard library.
12242 Otherwise return false which doesn't guarantee it is not (thus the list
12243 of handled builtins below may be incomplete). */
12245 bool
12246 builtin_with_linkage_p (tree decl)
12248 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12249 switch (DECL_FUNCTION_CODE (decl))
12251 CASE_FLT_FN (BUILT_IN_ACOS):
12252 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOS):
12253 CASE_FLT_FN (BUILT_IN_ACOSH):
12254 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOSH):
12255 CASE_FLT_FN (BUILT_IN_ASIN):
12256 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASIN):
12257 CASE_FLT_FN (BUILT_IN_ASINH):
12258 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASINH):
12259 CASE_FLT_FN (BUILT_IN_ATAN):
12260 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN):
12261 CASE_FLT_FN (BUILT_IN_ATANH):
12262 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATANH):
12263 CASE_FLT_FN (BUILT_IN_ATAN2):
12264 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN2):
12265 CASE_FLT_FN (BUILT_IN_CBRT):
12266 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CBRT):
12267 CASE_FLT_FN (BUILT_IN_CEIL):
12268 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
12269 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12270 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
12271 CASE_FLT_FN (BUILT_IN_COS):
12272 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COS):
12273 CASE_FLT_FN (BUILT_IN_COSH):
12274 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COSH):
12275 CASE_FLT_FN (BUILT_IN_ERF):
12276 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERF):
12277 CASE_FLT_FN (BUILT_IN_ERFC):
12278 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERFC):
12279 CASE_FLT_FN (BUILT_IN_EXP):
12280 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP):
12281 CASE_FLT_FN (BUILT_IN_EXP2):
12282 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP2):
12283 CASE_FLT_FN (BUILT_IN_EXPM1):
12284 CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXPM1):
12285 CASE_FLT_FN (BUILT_IN_FABS):
12286 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
12287 CASE_FLT_FN (BUILT_IN_FDIM):
12288 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FDIM):
12289 CASE_FLT_FN (BUILT_IN_FLOOR):
12290 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
12291 CASE_FLT_FN (BUILT_IN_FMA):
12292 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
12293 CASE_FLT_FN (BUILT_IN_FMAX):
12294 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
12295 CASE_FLT_FN (BUILT_IN_FMIN):
12296 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
12297 CASE_FLT_FN (BUILT_IN_FMOD):
12298 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMOD):
12299 CASE_FLT_FN (BUILT_IN_FREXP):
12300 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FREXP):
12301 CASE_FLT_FN (BUILT_IN_HYPOT):
12302 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HYPOT):
12303 CASE_FLT_FN (BUILT_IN_ILOGB):
12304 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ILOGB):
12305 CASE_FLT_FN (BUILT_IN_LDEXP):
12306 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LDEXP):
12307 CASE_FLT_FN (BUILT_IN_LGAMMA):
12308 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LGAMMA):
12309 CASE_FLT_FN (BUILT_IN_LLRINT):
12310 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLRINT):
12311 CASE_FLT_FN (BUILT_IN_LLROUND):
12312 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLROUND):
12313 CASE_FLT_FN (BUILT_IN_LOG):
12314 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG):
12315 CASE_FLT_FN (BUILT_IN_LOG10):
12316 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG10):
12317 CASE_FLT_FN (BUILT_IN_LOG1P):
12318 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG1P):
12319 CASE_FLT_FN (BUILT_IN_LOG2):
12320 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG2):
12321 CASE_FLT_FN (BUILT_IN_LOGB):
12322 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOGB):
12323 CASE_FLT_FN (BUILT_IN_LRINT):
12324 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LRINT):
12325 CASE_FLT_FN (BUILT_IN_LROUND):
12326 CASE_FLT_FN_FLOATN_NX (BUILT_IN_LROUND):
12327 CASE_FLT_FN (BUILT_IN_MODF):
12328 CASE_FLT_FN_FLOATN_NX (BUILT_IN_MODF):
12329 CASE_FLT_FN (BUILT_IN_NAN):
12330 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NAN):
12331 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12332 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
12333 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
12334 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEXTAFTER):
12335 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
12336 CASE_FLT_FN (BUILT_IN_POW):
12337 CASE_FLT_FN_FLOATN_NX (BUILT_IN_POW):
12338 CASE_FLT_FN (BUILT_IN_REMAINDER):
12339 CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMAINDER):
12340 CASE_FLT_FN (BUILT_IN_REMQUO):
12341 CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMQUO):
12342 CASE_FLT_FN (BUILT_IN_RINT):
12343 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
12344 CASE_FLT_FN (BUILT_IN_ROUND):
12345 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
12346 CASE_FLT_FN (BUILT_IN_SCALBLN):
12347 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBLN):
12348 CASE_FLT_FN (BUILT_IN_SCALBN):
12349 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBN):
12350 CASE_FLT_FN (BUILT_IN_SIN):
12351 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SIN):
12352 CASE_FLT_FN (BUILT_IN_SINH):
12353 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SINH):
12354 CASE_FLT_FN (BUILT_IN_SINCOS):
12355 CASE_FLT_FN (BUILT_IN_SQRT):
12356 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
12357 CASE_FLT_FN (BUILT_IN_TAN):
12358 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TAN):
12359 CASE_FLT_FN (BUILT_IN_TANH):
12360 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TANH):
12361 CASE_FLT_FN (BUILT_IN_TGAMMA):
12362 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TGAMMA):
12363 CASE_FLT_FN (BUILT_IN_TRUNC):
12364 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
12365 return true;
12367 case BUILT_IN_STPCPY:
12368 case BUILT_IN_STPNCPY:
12369 /* stpcpy is both referenced in libiberty's pex-win32.c and provided
12370 by libiberty's stpcpy.c for MinGW targets so we need to return true
12371 in order to be able to build libiberty in LTO mode for them. */
12372 return true;
12374 default:
12375 break;
12377 return false;
12380 /* Return true if OFFRNG is bounded to a subrange of offset values
12381 valid for the largest possible object. */
12383 bool
12384 access_ref::offset_bounded () const
12386 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
12387 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
12388 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
12391 /* If CALLEE has known side effects, fill in INFO and return true.
12392 See tree-ssa-structalias.cc:find_func_aliases
12393 for the list of builtins we might need to handle here. */
12395 attr_fnspec
12396 builtin_fnspec (tree callee)
12398 built_in_function code = DECL_FUNCTION_CODE (callee);
12400 switch (code)
12402 /* All the following functions read memory pointed to by
12403 their second argument and write memory pointed to by first
12404 argument.
12405 strcat/strncat additionally reads memory pointed to by the first
12406 argument. */
12407 case BUILT_IN_STRCAT:
12408 case BUILT_IN_STRCAT_CHK:
12409 return "1cW 1 ";
12410 case BUILT_IN_STRNCAT:
12411 case BUILT_IN_STRNCAT_CHK:
12412 return "1cW 13";
12413 case BUILT_IN_STRCPY:
12414 case BUILT_IN_STRCPY_CHK:
12415 return "1cO 1 ";
12416 case BUILT_IN_STPCPY:
12417 case BUILT_IN_STPCPY_CHK:
12418 return ".cO 1 ";
12419 case BUILT_IN_STRNCPY:
12420 case BUILT_IN_MEMCPY:
12421 case BUILT_IN_MEMMOVE:
12422 case BUILT_IN_TM_MEMCPY:
12423 case BUILT_IN_TM_MEMMOVE:
12424 case BUILT_IN_STRNCPY_CHK:
12425 case BUILT_IN_MEMCPY_CHK:
12426 case BUILT_IN_MEMMOVE_CHK:
12427 return "1cO313";
12428 case BUILT_IN_MEMPCPY:
12429 case BUILT_IN_MEMPCPY_CHK:
12430 return ".cO313";
12431 case BUILT_IN_STPNCPY:
12432 case BUILT_IN_STPNCPY_CHK:
12433 return ".cO313";
12434 case BUILT_IN_BCOPY:
12435 return ".c23O3";
12436 case BUILT_IN_BZERO:
12437 return ".cO2";
12438 case BUILT_IN_MEMCMP:
12439 case BUILT_IN_MEMCMP_EQ:
12440 case BUILT_IN_BCMP:
12441 case BUILT_IN_STRNCMP:
12442 case BUILT_IN_STRNCMP_EQ:
12443 case BUILT_IN_STRNCASECMP:
12444 return ".cR3R3";
12446 /* The following functions read memory pointed to by their
12447 first argument. */
12448 CASE_BUILT_IN_TM_LOAD (1):
12449 CASE_BUILT_IN_TM_LOAD (2):
12450 CASE_BUILT_IN_TM_LOAD (4):
12451 CASE_BUILT_IN_TM_LOAD (8):
12452 CASE_BUILT_IN_TM_LOAD (FLOAT):
12453 CASE_BUILT_IN_TM_LOAD (DOUBLE):
12454 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
12455 CASE_BUILT_IN_TM_LOAD (M64):
12456 CASE_BUILT_IN_TM_LOAD (M128):
12457 CASE_BUILT_IN_TM_LOAD (M256):
12458 case BUILT_IN_TM_LOG:
12459 case BUILT_IN_TM_LOG_1:
12460 case BUILT_IN_TM_LOG_2:
12461 case BUILT_IN_TM_LOG_4:
12462 case BUILT_IN_TM_LOG_8:
12463 case BUILT_IN_TM_LOG_FLOAT:
12464 case BUILT_IN_TM_LOG_DOUBLE:
12465 case BUILT_IN_TM_LOG_LDOUBLE:
12466 case BUILT_IN_TM_LOG_M64:
12467 case BUILT_IN_TM_LOG_M128:
12468 case BUILT_IN_TM_LOG_M256:
12469 return ".cR ";
12471 case BUILT_IN_INDEX:
12472 case BUILT_IN_RINDEX:
12473 case BUILT_IN_STRCHR:
12474 case BUILT_IN_STRLEN:
12475 case BUILT_IN_STRRCHR:
12476 return ".cR ";
12477 case BUILT_IN_STRNLEN:
12478 return ".cR2";
12480 /* These read memory pointed to by the first argument.
12481 Allocating memory does not have any side-effects apart from
12482 being the definition point for the pointer.
12483 Unix98 specifies that errno is set on allocation failure. */
12484 case BUILT_IN_STRDUP:
12485 return "mCR ";
12486 case BUILT_IN_STRNDUP:
12487 return "mCR2";
12488 /* Allocating memory does not have any side-effects apart from
12489 being the definition point for the pointer. */
12490 case BUILT_IN_MALLOC:
12491 case BUILT_IN_ALIGNED_ALLOC:
12492 case BUILT_IN_CALLOC:
12493 case BUILT_IN_GOMP_ALLOC:
12494 return "mC";
12495 CASE_BUILT_IN_ALLOCA:
12496 return "mc";
12497 /* These read memory pointed to by the first argument with size
12498 in the third argument. */
12499 case BUILT_IN_MEMCHR:
12500 return ".cR3";
12501 /* These read memory pointed to by the first and second arguments. */
12502 case BUILT_IN_STRSTR:
12503 case BUILT_IN_STRPBRK:
12504 case BUILT_IN_STRCASECMP:
12505 case BUILT_IN_STRCSPN:
12506 case BUILT_IN_STRSPN:
12507 case BUILT_IN_STRCMP:
12508 case BUILT_IN_STRCMP_EQ:
12509 return ".cR R ";
12510 /* Freeing memory kills the pointed-to memory. More importantly
12511 the call has to serve as a barrier for moving loads and stores
12512 across it. */
12513 case BUILT_IN_STACK_RESTORE:
12514 case BUILT_IN_FREE:
12515 case BUILT_IN_GOMP_FREE:
12516 return ".co ";
12517 case BUILT_IN_VA_END:
12518 return ".cO ";
12519 /* Realloc serves both as allocation point and deallocation point. */
12520 case BUILT_IN_REALLOC:
12521 case BUILT_IN_GOMP_REALLOC:
12522 return ".Cw ";
12523 case BUILT_IN_GAMMA_R:
12524 case BUILT_IN_GAMMAF_R:
12525 case BUILT_IN_GAMMAL_R:
12526 case BUILT_IN_LGAMMA_R:
12527 case BUILT_IN_LGAMMAF_R:
12528 case BUILT_IN_LGAMMAL_R:
12529 return ".C. Ot";
12530 case BUILT_IN_FREXP:
12531 case BUILT_IN_FREXPF:
12532 case BUILT_IN_FREXPL:
12533 case BUILT_IN_MODF:
12534 case BUILT_IN_MODFF:
12535 case BUILT_IN_MODFL:
12536 return ".c. Ot";
12537 case BUILT_IN_REMQUO:
12538 case BUILT_IN_REMQUOF:
12539 case BUILT_IN_REMQUOL:
12540 return ".c. . Ot";
12541 case BUILT_IN_SINCOS:
12542 case BUILT_IN_SINCOSF:
12543 case BUILT_IN_SINCOSL:
12544 return ".c. OtOt";
12545 case BUILT_IN_MEMSET:
12546 case BUILT_IN_MEMSET_CHK:
12547 case BUILT_IN_TM_MEMSET:
12548 return "1cO3";
12549 CASE_BUILT_IN_TM_STORE (1):
12550 CASE_BUILT_IN_TM_STORE (2):
12551 CASE_BUILT_IN_TM_STORE (4):
12552 CASE_BUILT_IN_TM_STORE (8):
12553 CASE_BUILT_IN_TM_STORE (FLOAT):
12554 CASE_BUILT_IN_TM_STORE (DOUBLE):
12555 CASE_BUILT_IN_TM_STORE (LDOUBLE):
12556 CASE_BUILT_IN_TM_STORE (M64):
12557 CASE_BUILT_IN_TM_STORE (M128):
12558 CASE_BUILT_IN_TM_STORE (M256):
12559 return ".cO ";
12560 case BUILT_IN_STACK_SAVE:
12561 case BUILT_IN_RETURN:
12562 case BUILT_IN_EH_POINTER:
12563 case BUILT_IN_EH_FILTER:
12564 case BUILT_IN_UNWIND_RESUME:
12565 case BUILT_IN_CXA_END_CLEANUP:
12566 case BUILT_IN_EH_COPY_VALUES:
12567 case BUILT_IN_FRAME_ADDRESS:
12568 case BUILT_IN_APPLY_ARGS:
12569 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
12570 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
12571 case BUILT_IN_PREFETCH:
12572 case BUILT_IN_DWARF_CFA:
12573 case BUILT_IN_RETURN_ADDRESS:
12574 return ".c";
12575 case BUILT_IN_ASSUME_ALIGNED:
12576 case BUILT_IN_EXPECT:
12577 case BUILT_IN_EXPECT_WITH_PROBABILITY:
12578 return "1cX ";
12579 /* But posix_memalign stores a pointer into the memory pointed to
12580 by its first argument. */
12581 case BUILT_IN_POSIX_MEMALIGN:
12582 return ".cOt";
12584 default:
12585 return "";