1 /* Definitions of the pointer_query and related classes.
3 Copyright (C) 2020-2021 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "stringpool.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "tree-ssa-strlen.h"
33 #include "langhooks.h"
34 #include "stringpool.h"
36 #include "gimple-fold.h"
37 #include "gimple-ssa.h"
39 #include "attr-fnspec.h"
40 #include "gimple-range.h"
41 #include "pointer-query.h"
42 #include "tree-pretty-print.h"
43 #include "tree-ssanames.h"
45 static bool compute_objsize_r (tree
, int, access_ref
*, ssa_name_limit_t
&,
48 /* Wrapper around the wide_int overload of get_range that accepts
49 offset_int instead. For middle end expressions returns the same
50 result. For a subset of nonconstamt expressions emitted by the front
51 end determines a more precise range than would be possible otherwise. */
54 get_offset_range (tree x
, gimple
*stmt
, offset_int r
[2], range_query
*rvals
)
57 if (TREE_CODE (x
) == PLUS_EXPR
)
59 /* Handle constant offsets in pointer addition expressions seen
60 n the front end IL. */
61 tree op
= TREE_OPERAND (x
, 1);
62 if (TREE_CODE (op
) == INTEGER_CST
)
64 op
= fold_convert (signed_type_for (TREE_TYPE (op
)), op
);
65 add
= wi::to_offset (op
);
66 x
= TREE_OPERAND (x
, 0);
70 if (TREE_CODE (x
) == NOP_EXPR
)
71 /* Also handle conversions to sizetype seen in the front end IL. */
72 x
= TREE_OPERAND (x
, 0);
74 tree type
= TREE_TYPE (x
);
75 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
78 if (TREE_CODE (x
) != INTEGER_CST
79 && TREE_CODE (x
) != SSA_NAME
)
81 if (TYPE_UNSIGNED (type
)
82 && TYPE_PRECISION (type
) == TYPE_PRECISION (sizetype
))
83 type
= signed_type_for (type
);
85 r
[0] = wi::to_offset (TYPE_MIN_VALUE (type
)) + add
;
86 r
[1] = wi::to_offset (TYPE_MAX_VALUE (type
)) + add
;
91 if (!get_range (x
, stmt
, wr
, rvals
))
95 /* Only convert signed integers or unsigned sizetype to a signed
96 offset and avoid converting large positive values in narrower
97 types to negative offsets. */
98 if (TYPE_UNSIGNED (type
)
99 && wr
[0].get_precision () < TYPE_PRECISION (sizetype
))
102 r
[0] = offset_int::from (wr
[0], sgn
);
103 r
[1] = offset_int::from (wr
[1], sgn
);
107 /* Return the argument that the call STMT to a built-in function returns
108 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
109 from the argument reflected in the value returned by the built-in if it
110 can be determined, otherwise to 0 and HWI_M1U respectively. Set
111 *PAST_END for functions like mempcpy that might return a past the end
112 pointer (most functions return a dereferenceable pointer to an existing
113 element of an array). */
116 gimple_call_return_array (gimple
*stmt
, offset_int offrng
[2], bool *past_end
,
119 /* Clear and set below for the rare function(s) that might return
120 a past-the-end pointer. */
124 /* Check for attribute fn spec to see if the function returns one
126 attr_fnspec fnspec
= gimple_call_fnspec (as_a
<gcall
*>(stmt
));
128 if (fnspec
.returns_arg (&argno
))
130 /* Functions return the first argument (not a range). */
131 offrng
[0] = offrng
[1] = 0;
132 return gimple_call_arg (stmt
, argno
);
136 if (gimple_call_num_args (stmt
) < 1)
139 tree fn
= gimple_call_fndecl (stmt
);
140 if (!gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
142 /* See if this is a call to placement new. */
144 || !DECL_IS_OPERATOR_NEW_P (fn
)
145 || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fn
))
148 /* Check the mangling, keeping in mind that operator new takes
149 a size_t which could be unsigned int or unsigned long. */
150 tree fname
= DECL_ASSEMBLER_NAME (fn
);
151 if (!id_equal (fname
, "_ZnwjPv") // ordinary form
152 && !id_equal (fname
, "_ZnwmPv") // ordinary form
153 && !id_equal (fname
, "_ZnajPv") // array form
154 && !id_equal (fname
, "_ZnamPv")) // array form
157 if (gimple_call_num_args (stmt
) != 2)
160 /* Allocation functions return a pointer to the beginning. */
161 offrng
[0] = offrng
[1] = 0;
162 return gimple_call_arg (stmt
, 1);
165 switch (DECL_FUNCTION_CODE (fn
))
167 case BUILT_IN_MEMCPY
:
168 case BUILT_IN_MEMCPY_CHK
:
169 case BUILT_IN_MEMMOVE
:
170 case BUILT_IN_MEMMOVE_CHK
:
171 case BUILT_IN_MEMSET
:
172 case BUILT_IN_STRCAT
:
173 case BUILT_IN_STRCAT_CHK
:
174 case BUILT_IN_STRCPY
:
175 case BUILT_IN_STRCPY_CHK
:
176 case BUILT_IN_STRNCAT
:
177 case BUILT_IN_STRNCAT_CHK
:
178 case BUILT_IN_STRNCPY
:
179 case BUILT_IN_STRNCPY_CHK
:
180 /* Functions return the first argument (not a range). */
181 offrng
[0] = offrng
[1] = 0;
182 return gimple_call_arg (stmt
, 0);
184 case BUILT_IN_MEMPCPY
:
185 case BUILT_IN_MEMPCPY_CHK
:
187 /* The returned pointer is in a range constrained by the smaller
188 of the upper bound of the size argument and the source object
191 offrng
[1] = HOST_WIDE_INT_M1U
;
192 tree off
= gimple_call_arg (stmt
, 2);
193 bool off_valid
= get_offset_range (off
, stmt
, offrng
, rvals
);
194 if (!off_valid
|| offrng
[0] != offrng
[1])
196 /* If the offset is either indeterminate or in some range,
197 try to constrain its upper bound to at most the size
198 of the source object. */
200 tree src
= gimple_call_arg (stmt
, 1);
201 if (compute_objsize (src
, 1, &aref
, rvals
)
202 && aref
.sizrng
[1] < offrng
[1])
203 offrng
[1] = aref
.sizrng
[1];
206 /* Mempcpy may return a past-the-end pointer. */
208 return gimple_call_arg (stmt
, 0);
211 case BUILT_IN_MEMCHR
:
213 tree off
= gimple_call_arg (stmt
, 2);
214 if (get_offset_range (off
, stmt
, offrng
, rvals
))
217 offrng
[1] = HOST_WIDE_INT_M1U
;
220 return gimple_call_arg (stmt
, 0);
223 case BUILT_IN_STRCHR
:
224 case BUILT_IN_STRRCHR
:
225 case BUILT_IN_STRSTR
:
227 offrng
[1] = HOST_WIDE_INT_M1U
;
228 return gimple_call_arg (stmt
, 0);
230 case BUILT_IN_STPCPY
:
231 case BUILT_IN_STPCPY_CHK
:
234 tree src
= gimple_call_arg (stmt
, 1);
235 if (compute_objsize (src
, 1, &aref
, rvals
))
236 offrng
[1] = aref
.sizrng
[1] - 1;
238 offrng
[1] = HOST_WIDE_INT_M1U
;
241 return gimple_call_arg (stmt
, 0);
244 case BUILT_IN_STPNCPY
:
245 case BUILT_IN_STPNCPY_CHK
:
247 /* The returned pointer is in a range between the first argument
248 and it plus the smaller of the upper bound of the size argument
249 and the source object size. */
250 offrng
[1] = HOST_WIDE_INT_M1U
;
251 tree off
= gimple_call_arg (stmt
, 2);
252 if (!get_offset_range (off
, stmt
, offrng
, rvals
)
253 || offrng
[0] != offrng
[1])
255 /* If the offset is either indeterminate or in some range,
256 try to constrain its upper bound to at most the size
257 of the source object. */
259 tree src
= gimple_call_arg (stmt
, 1);
260 if (compute_objsize (src
, 1, &aref
, rvals
)
261 && aref
.sizrng
[1] < offrng
[1])
262 offrng
[1] = aref
.sizrng
[1];
265 /* When the source is the empty string the returned pointer is
266 a copy of the argument. Otherwise stpcpy can also return
267 a past-the-end pointer. */
270 return gimple_call_arg (stmt
, 0);
280 /* Return true when EXP's range can be determined and set RANGE[] to it
281 after adjusting it if necessary to make EXP a represents a valid size
282 of object, or a valid size argument to an allocation function declared
283 with attribute alloc_size (whose argument may be signed), or to a string
284 manipulation function like memset.
285 When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
286 a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is
287 a (nearly) invalid argument to allocation functions like malloc but it
288 is a valid argument to functions like memset.
289 When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
290 in a multi-range, otherwise to the smallest valid subrange. */
293 get_size_range (range_query
*query
, tree exp
, gimple
*stmt
, tree range
[2],
299 if (tree_fits_uhwi_p (exp
))
301 /* EXP is a constant. */
302 range
[0] = range
[1] = exp
;
306 tree exptype
= TREE_TYPE (exp
);
307 bool integral
= INTEGRAL_TYPE_P (exptype
);
310 enum value_range_kind range_type
;
313 query
= get_range_query (cfun
);
319 query
->range_of_expr (vr
, exp
, stmt
);
321 if (vr
.undefined_p ())
322 vr
.set_varying (TREE_TYPE (exp
));
323 range_type
= vr
.kind ();
324 min
= wi::to_wide (vr
.min ());
325 max
= wi::to_wide (vr
.max ());
328 range_type
= VR_VARYING
;
330 if (range_type
== VR_VARYING
)
334 /* Use the full range of the type of the expression when
335 no value range information is available. */
336 range
[0] = TYPE_MIN_VALUE (exptype
);
337 range
[1] = TYPE_MAX_VALUE (exptype
);
341 range
[0] = NULL_TREE
;
342 range
[1] = NULL_TREE
;
346 unsigned expprec
= TYPE_PRECISION (exptype
);
348 bool signed_p
= !TYPE_UNSIGNED (exptype
);
350 if (range_type
== VR_ANTI_RANGE
)
354 if (wi::les_p (max
, 0))
356 /* EXP is not in a strictly negative range. That means
357 it must be in some (not necessarily strictly) positive
358 range which includes zero. Since in signed to unsigned
359 conversions negative values end up converted to large
360 positive values, and otherwise they are not valid sizes,
361 the resulting range is in both cases [0, TYPE_MAX]. */
362 min
= wi::zero (expprec
);
363 max
= wi::to_wide (TYPE_MAX_VALUE (exptype
));
365 else if (wi::les_p (min
- 1, 0))
367 /* EXP is not in a negative-positive range. That means EXP
368 is either negative, or greater than max. Since negative
369 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
371 max
= wi::to_wide (TYPE_MAX_VALUE (exptype
));
376 min
= wi::zero (expprec
);
381 wide_int maxsize
= wi::to_wide (max_object_size ());
382 min
= wide_int::from (min
, maxsize
.get_precision (), UNSIGNED
);
383 max
= wide_int::from (max
, maxsize
.get_precision (), UNSIGNED
);
384 if (wi::eq_p (0, min
- 1))
386 /* EXP is unsigned and not in the range [1, MAX]. That means
387 it's either zero or greater than MAX. Even though 0 would
388 normally be detected by -Walloc-zero, unless ALLOW_ZERO
389 is set, set the range to [MAX, TYPE_MAX] so that when MAX
390 is greater than the limit the whole range is diagnosed. */
391 wide_int maxsize
= wi::to_wide (max_object_size ());
392 if (flags
& SR_ALLOW_ZERO
)
394 if (wi::leu_p (maxsize
, max
+ 1)
395 || !(flags
& SR_USE_LARGEST
))
396 min
= max
= wi::zero (expprec
);
400 max
= wi::to_wide (TYPE_MAX_VALUE (exptype
));
406 max
= wi::to_wide (TYPE_MAX_VALUE (exptype
));
409 else if ((flags
& SR_USE_LARGEST
)
410 && wi::ltu_p (max
+ 1, maxsize
))
412 /* When USE_LARGEST is set and the larger of the two subranges
413 is a valid size, use it... */
419 /* ...otherwise use the smaller subrange. */
421 min
= wi::zero (expprec
);
426 range
[0] = wide_int_to_tree (exptype
, min
);
427 range
[1] = wide_int_to_tree (exptype
, max
);
433 get_size_range (tree exp
, tree range
[2], int flags
/* = 0 */)
435 return get_size_range (/*query=*/NULL
, exp
, /*stmt=*/NULL
, range
, flags
);
438 /* If STMT is a call to an allocation function, returns the constant
439 maximum size of the object allocated by the call represented as
440 sizetype. If nonnull, sets RNG1[] to the range of the size.
441 When nonnull, uses RVALS for range information, otherwise gets global
443 Returns null when STMT is not a call to a valid allocation function. */
446 gimple_call_alloc_size (gimple
*stmt
, wide_int rng1
[2] /* = NULL */,
447 range_query
* /* = NULL */)
449 if (!stmt
|| !is_gimple_call (stmt
))
453 if (tree fndecl
= gimple_call_fndecl (stmt
))
454 allocfntype
= TREE_TYPE (fndecl
);
456 allocfntype
= gimple_call_fntype (stmt
);
461 unsigned argidx1
= UINT_MAX
, argidx2
= UINT_MAX
;
462 tree at
= lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype
));
465 if (!gimple_call_builtin_p (stmt
, BUILT_IN_ALLOCA_WITH_ALIGN
))
471 unsigned nargs
= gimple_call_num_args (stmt
);
473 if (argidx1
== UINT_MAX
)
475 tree atval
= TREE_VALUE (at
);
479 argidx1
= TREE_INT_CST_LOW (TREE_VALUE (atval
)) - 1;
480 if (nargs
<= argidx1
)
483 atval
= TREE_CHAIN (atval
);
486 argidx2
= TREE_INT_CST_LOW (TREE_VALUE (atval
)) - 1;
487 if (nargs
<= argidx2
)
492 tree size
= gimple_call_arg (stmt
, argidx1
);
494 wide_int rng1_buf
[2];
495 /* If RNG1 is not set, use the buffer. */
499 /* Use maximum precision to avoid overflow below. */
500 const int prec
= ADDR_MAX_PRECISION
;
504 /* Determine the largest valid range size, including zero. */
505 if (!get_size_range (size
, r
, SR_ALLOW_ZERO
| SR_USE_LARGEST
))
507 rng1
[0] = wi::to_wide (r
[0], prec
);
508 rng1
[1] = wi::to_wide (r
[1], prec
);
511 if (argidx2
> nargs
&& TREE_CODE (size
) == INTEGER_CST
)
512 return fold_convert (sizetype
, size
);
514 /* To handle ranges do the math in wide_int and return the product
515 of the upper bounds as a constant. Ignore anti-ranges. */
516 tree n
= argidx2
< nargs
? gimple_call_arg (stmt
, argidx2
) : integer_one_node
;
520 /* As above, use the full non-negative range on failure. */
521 if (!get_size_range (n
, r
, SR_ALLOW_ZERO
| SR_USE_LARGEST
))
523 rng2
[0] = wi::to_wide (r
[0], prec
);
524 rng2
[1] = wi::to_wide (r
[1], prec
);
527 /* Compute products of both bounds for the caller but return the lesser
528 of SIZE_MAX and the product of the upper bounds as a constant. */
529 rng1
[0] = rng1
[0] * rng2
[0];
530 rng1
[1] = rng1
[1] * rng2
[1];
532 const tree size_max
= TYPE_MAX_VALUE (sizetype
);
533 if (wi::gtu_p (rng1
[1], wi::to_wide (size_max
, prec
)))
535 rng1
[1] = wi::to_wide (size_max
, prec
);
539 return wide_int_to_tree (sizetype
, rng1
[1]);
542 /* For an access to an object referenced to by the function parameter PTR
543 of pointer type, and set RNG[] to the range of sizes of the object
544 obtainedfrom the attribute access specification for the current function.
545 Set STATIC_ARRAY if the array parameter has been declared [static].
546 Return the function parameter on success and null otherwise. */
549 gimple_parm_array_size (tree ptr
, wide_int rng
[2],
550 bool *static_array
/* = NULL */)
552 /* For a function argument try to determine the byte size of the array
553 from the current function declaratation (e.g., attribute access or
555 tree var
= SSA_NAME_VAR (ptr
);
556 if (TREE_CODE (var
) != PARM_DECL
)
559 const unsigned prec
= TYPE_PRECISION (sizetype
);
562 attr_access
*access
= get_parm_access (rdwr_idx
, var
);
566 if (access
->sizarg
!= UINT_MAX
)
568 /* TODO: Try to extract the range from the argument based on
569 those of subsequent assertions or based on known calls to
570 the current function. */
574 if (!access
->minsize
)
577 /* Only consider ordinary array bound at level 2 (or above if it's
579 if (warn_array_parameter
< 2 && !access
->static_p
)
583 *static_array
= access
->static_p
;
585 rng
[0] = wi::zero (prec
);
586 rng
[1] = wi::uhwi (access
->minsize
, prec
);
587 /* Multiply the array bound encoded in the attribute by the size
588 of what the pointer argument to which it decays points to. */
589 tree eltype
= TREE_TYPE (TREE_TYPE (ptr
));
590 tree size
= TYPE_SIZE_UNIT (eltype
);
591 if (!size
|| TREE_CODE (size
) != INTEGER_CST
)
594 rng
[1] *= wi::to_wide (size
, prec
);
598 access_ref::access_ref (tree bound
/* = NULL_TREE */,
599 bool minaccess
/* = false */)
600 : ref (), eval ([](tree x
){ return x
; }), deref (), trail1special (true),
601 base0 (true), parmarray ()
604 offrng
[0] = offrng
[1] = 0;
605 offmax
[0] = offmax
[1] = 0;
607 sizrng
[0] = sizrng
[1] = -1;
609 /* Set the default bounds of the access and adjust below. */
610 bndrng
[0] = minaccess
? 1 : 0;
611 bndrng
[1] = HOST_WIDE_INT_M1U
;
613 /* When BOUND is nonnull and a range can be extracted from it,
614 set the bounds of the access to reflect both it and MINACCESS.
615 BNDRNG[0] is the size of the minimum access. */
617 if (bound
&& get_size_range (bound
, rng
, SR_ALLOW_ZERO
))
619 bndrng
[0] = wi::to_offset (rng
[0]);
620 bndrng
[1] = wi::to_offset (rng
[1]);
621 bndrng
[0] = bndrng
[0] > 0 && minaccess
? 1 : 0;
625 /* Return the PHI node REF refers to or null if it doesn't. */
628 access_ref::phi () const
630 if (!ref
|| TREE_CODE (ref
) != SSA_NAME
)
633 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ref
);
634 if (!def_stmt
|| gimple_code (def_stmt
) != GIMPLE_PHI
)
637 return as_a
<gphi
*> (def_stmt
);
640 /* Determine and return the largest object to which *THIS refers. If
641 *THIS refers to a PHI and PREF is nonnull, fill *PREF with the details
642 of the object determined by compute_objsize(ARG, OSTYPE) for each PHI
646 access_ref::get_ref (vec
<access_ref
> *all_refs
,
647 access_ref
*pref
/* = NULL */,
648 int ostype
/* = 1 */,
649 ssa_name_limit_t
*psnlim
/* = NULL */,
650 pointer_query
*qry
/* = NULL */) const
652 gphi
*phi_stmt
= this->phi ();
656 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
657 cause unbounded recursion. */
658 ssa_name_limit_t snlim_buf
;
662 if (!psnlim
->visit_phi (ref
))
665 pointer_query empty_qry
;
669 /* The conservative result of the PHI reflecting the offset and size
670 of the largest PHI argument, regardless of whether or not they all
671 refer to the same object. */
675 /* The identity of the object has not been determined yet but
676 PREF->REF is set by the caller to the PHI for convenience.
677 The size is negative/invalid and the offset is zero (it's
678 updated only after the identity of the object has been
680 gcc_assert (pref
->sizrng
[0] < 0);
681 gcc_assert (pref
->offrng
[0] == 0 && pref
->offrng
[1] == 0);
686 /* Set if any argument is a function array (or VLA) parameter not
687 declared [static]. */
688 bool parmarray
= false;
689 /* The size of the smallest object referenced by the PHI arguments. */
690 offset_int minsize
= 0;
691 const offset_int maxobjsize
= wi::to_offset (max_object_size ());
693 const unsigned nargs
= gimple_phi_num_args (phi_stmt
);
694 for (unsigned i
= 0; i
< nargs
; ++i
)
696 access_ref phi_arg_ref
;
697 tree arg
= gimple_phi_arg_def (phi_stmt
, i
);
698 if (!compute_objsize_r (arg
, ostype
, &phi_arg_ref
, *psnlim
, qry
)
699 || phi_arg_ref
.sizrng
[0] < 0)
700 /* A PHI with all null pointer arguments. */
703 if (TREE_CODE (arg
) == SSA_NAME
)
704 qry
->put_ref (arg
, phi_arg_ref
);
707 all_refs
->safe_push (phi_arg_ref
);
709 parmarray
|= phi_arg_ref
.parmarray
;
711 const bool nullp
= integer_zerop (arg
) && (i
|| i
+ 1 < nargs
);
713 if (phi_ref
.sizrng
[0] < 0)
715 /* If PHI_REF doesn't contain a meaningful result yet set it
716 to the result for the first argument. */
718 phi_ref
= phi_arg_ref
;
720 /* Set if the current argument refers to one or more objects of
721 known size (or range of sizes), as opposed to referring to
722 one or more unknown object(s). */
723 const bool arg_known_size
= (phi_arg_ref
.sizrng
[0] != 0
724 || phi_arg_ref
.sizrng
[1] != maxobjsize
);
726 minsize
= phi_arg_ref
.sizrng
[0];
731 const bool phi_known_size
= (phi_ref
.sizrng
[0] != 0
732 || phi_ref
.sizrng
[1] != maxobjsize
);
734 if (phi_known_size
&& phi_arg_ref
.sizrng
[0] < minsize
)
735 minsize
= phi_arg_ref
.sizrng
[0];
737 /* Disregard null pointers in PHIs with two or more arguments.
738 TODO: Handle this better! */
742 /* Determine the amount of remaining space in the argument. */
743 offset_int argrem
[2];
744 argrem
[1] = phi_arg_ref
.size_remaining (argrem
);
746 /* Determine the amount of remaining space computed so far and
747 if the remaining space in the argument is more use it instead. */
748 offset_int phirem
[2];
749 phirem
[1] = phi_ref
.size_remaining (phirem
);
751 /* Reset the PHI's BASE0 flag if any of the nonnull arguments
752 refers to an object at an unknown offset. */
753 if (!phi_arg_ref
.base0
)
754 phi_ref
.base0
= false;
756 if (phirem
[1] < argrem
[1]
757 || (phirem
[1] == argrem
[1]
758 && phi_ref
.sizrng
[1] < phi_arg_ref
.sizrng
[1]))
759 /* Use the argument with the most space remaining as the result,
760 or the larger one if the space is equal. */
761 phi_ref
= phi_arg_ref
;
764 /* Replace the lower bound of the largest argument with the size
765 of the smallest argument, and set PARMARRAY if any argument
767 phi_ref
.sizrng
[0] = minsize
;
768 phi_ref
.parmarray
= parmarray
;
770 if (phi_ref
.sizrng
[0] < 0)
772 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
773 (perhaps because they have all been already visited by prior
775 psnlim
->leave_phi (ref
);
779 /* Avoid changing *THIS. */
780 if (pref
&& pref
!= this)
783 psnlim
->leave_phi (ref
);
788 /* Return the maximum amount of space remaining and if non-null, set
789 argument to the minimum. */
792 access_ref::size_remaining (offset_int
*pmin
/* = NULL */) const
800 /* If the identity of the object hasn't been determined return
801 the maximum size range. */
803 return wi::to_offset (max_object_size ());
806 /* add_offset() ensures the offset range isn't inverted. */
807 gcc_checking_assert (offrng
[0] <= offrng
[1]);
811 /* The offset into referenced object is zero-based (i.e., it's
812 not referenced by a pointer into middle of some unknown object). */
813 if (offrng
[0] < 0 && offrng
[1] < 0)
815 /* If the offset is negative the remaining size is zero. */
820 if (sizrng
[1] <= offrng
[0])
822 /* If the starting offset is greater than or equal to the upper
823 bound on the size of the object, the space remaining is zero.
824 As a special case, if it's equal, set *PMIN to -1 to let
825 the caller know the offset is valid and just past the end. */
826 *pmin
= sizrng
[1] == offrng
[0] ? -1 : 0;
830 /* Otherwise return the size minus the lower bound of the offset. */
831 offset_int or0
= offrng
[0] < 0 ? 0 : offrng
[0];
833 *pmin
= sizrng
[0] - or0
;
834 return sizrng
[1] - or0
;
837 /* The offset to the referenced object isn't zero-based (i.e., it may
838 refer to a byte other than the first. The size of such an object
839 is constrained only by the size of the address space (the result
840 of max_object_size()). */
841 if (sizrng
[1] <= offrng
[0])
847 offset_int or0
= offrng
[0] < 0 ? 0 : offrng
[0];
849 *pmin
= sizrng
[0] - or0
;
850 return sizrng
[1] - or0
;
853 /* Return true if the offset and object size are in range for SIZE. */
856 access_ref::offset_in_range (const offset_int
&size
) const
858 if (size_remaining () < size
)
862 return offmax
[0] >= 0 && offmax
[1] <= sizrng
[1];
864 offset_int maxoff
= wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
865 return offmax
[0] > -maxoff
&& offmax
[1] < maxoff
;
868 /* Add the range [MIN, MAX] to the offset range. For known objects (with
869 zero-based offsets) at least one of whose offset's bounds is in range,
870 constrain the other (or both) to the bounds of the object (i.e., zero
871 and the upper bound of its size). This improves the quality of
874 void access_ref::add_offset (const offset_int
&min
, const offset_int
&max
)
878 /* To add an ordinary range just add it to the bounds. */
884 /* To add an inverted range to an offset to an unknown object
885 expand it to the maximum. */
891 /* To add an inverted range to an offset to an known object set
892 the upper bound to the maximum representable offset value
893 (which may be greater than MAX_OBJECT_SIZE).
894 The lower bound is either the sum of the current offset and
895 MIN when abs(MAX) is greater than the former, or zero otherwise.
896 Zero because then then inverted range includes the negative of
898 offset_int maxoff
= wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
909 offset_int absmax
= wi::abs (max
);
910 if (offrng
[0] < absmax
)
913 /* Cap the lower bound at the upper (set to MAXOFF above)
914 to avoid inadvertently recreating an inverted range. */
915 if (offrng
[1] < offrng
[0])
916 offrng
[0] = offrng
[1];
922 /* Set the minimum and maximmum computed so far. */
923 if (offrng
[1] < 0 && offrng
[1] < offmax
[0])
924 offmax
[0] = offrng
[1];
925 if (offrng
[0] > 0 && offrng
[0] > offmax
[1])
926 offmax
[1] = offrng
[0];
931 /* When referencing a known object check to see if the offset computed
932 so far is in bounds... */
933 offset_int remrng
[2];
934 remrng
[1] = size_remaining (remrng
);
935 if (remrng
[1] > 0 || remrng
[0] < 0)
937 /* ...if so, constrain it so that neither bound exceeds the size of
938 the object. Out of bounds offsets are left unchanged, and, for
939 better or worse, become in bounds later. They should be detected
940 and diagnosed at the point they first become invalid by
944 if (offrng
[1] > sizrng
[1])
945 offrng
[1] = sizrng
[1];
949 /* Issue one inform message describing each target of an access REF.
950 WRITE is set for a write access and clear for a read access. */
953 access_ref::inform_access (access_mode mode
) const
955 const access_ref
&aref
= *this;
961 /* Set MAXREF to refer to the largest object and fill ALL_REFS
962 with data for all objects referenced by the PHI arguments. */
964 auto_vec
<access_ref
> all_refs
;
965 if (!get_ref (&all_refs
, &maxref
))
968 /* Except for MAXREF, the rest of the arguments' offsets need not
969 reflect one added to the PHI itself. Determine the latter from
970 MAXREF on which the result is based. */
971 const offset_int orng
[] =
973 offrng
[0] - maxref
.offrng
[0],
974 wi::smax (offrng
[1] - maxref
.offrng
[1], offrng
[0]),
977 /* Add the final PHI's offset to that of each of the arguments
978 and recurse to issue an inform message for it. */
979 for (unsigned i
= 0; i
!= all_refs
.length (); ++i
)
981 /* Skip any PHIs; those could lead to infinite recursion. */
982 if (all_refs
[i
].phi ())
985 all_refs
[i
].add_offset (orng
[0], orng
[1]);
986 all_refs
[i
].inform_access (mode
);
991 /* Convert offset range and avoid including a zero range since it
992 isn't necessarily meaningful. */
993 HOST_WIDE_INT diff_min
= tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node
));
994 HOST_WIDE_INT diff_max
= tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node
));
995 HOST_WIDE_INT minoff
;
996 HOST_WIDE_INT maxoff
= diff_max
;
997 if (wi::fits_shwi_p (aref
.offrng
[0]))
998 minoff
= aref
.offrng
[0].to_shwi ();
1000 minoff
= aref
.offrng
[0] < 0 ? diff_min
: diff_max
;
1002 if (wi::fits_shwi_p (aref
.offrng
[1]))
1003 maxoff
= aref
.offrng
[1].to_shwi ();
1005 if (maxoff
<= diff_min
|| maxoff
>= diff_max
)
1006 /* Avoid mentioning an upper bound that's equal to or in excess
1007 of the maximum of ptrdiff_t. */
1010 /* Convert size range and always include it since all sizes are
1012 unsigned long long minsize
= 0, maxsize
= 0;
1013 if (wi::fits_shwi_p (aref
.sizrng
[0])
1014 && wi::fits_shwi_p (aref
.sizrng
[1]))
1016 minsize
= aref
.sizrng
[0].to_shwi ();
1017 maxsize
= aref
.sizrng
[1].to_shwi ();
1020 /* SIZRNG doesn't necessarily have the same range as the allocation
1021 size determined by gimple_call_alloc_size (). */
1023 if (minsize
== maxsize
)
1024 sprintf (sizestr
, "%llu", minsize
);
1026 sprintf (sizestr
, "[%llu, %llu]", minsize
, maxsize
);
1030 && (maxoff
== 0 || aref
.sizrng
[1] <= maxoff
))
1032 else if (minoff
== maxoff
)
1033 sprintf (offstr
, "%lli", (long long) minoff
);
1035 sprintf (offstr
, "[%lli, %lli]", (long long) minoff
, (long long) maxoff
);
1037 location_t loc
= UNKNOWN_LOCATION
;
1039 tree ref
= this->ref
;
1040 tree allocfn
= NULL_TREE
;
1041 if (TREE_CODE (ref
) == SSA_NAME
)
1043 gimple
*stmt
= SSA_NAME_DEF_STMT (ref
);
1047 if (is_gimple_call (stmt
))
1049 loc
= gimple_location (stmt
);
1050 if (gimple_call_builtin_p (stmt
, BUILT_IN_ALLOCA_WITH_ALIGN
))
1052 /* Strip the SSA_NAME suffix from the variable name and
1053 recreate an identifier with the VLA's original name. */
1054 ref
= gimple_call_lhs (stmt
);
1055 if (SSA_NAME_IDENTIFIER (ref
))
1057 ref
= SSA_NAME_IDENTIFIER (ref
);
1058 const char *id
= IDENTIFIER_POINTER (ref
);
1059 size_t len
= strcspn (id
, ".$");
1062 ref
= get_identifier_with_length (id
, len
);
1067 /* Except for VLAs, retrieve the allocation function. */
1068 allocfn
= gimple_call_fndecl (stmt
);
1070 allocfn
= gimple_call_fn (stmt
);
1071 if (TREE_CODE (allocfn
) == SSA_NAME
)
1073 /* For an ALLOC_CALL via a function pointer make a small
1074 effort to determine the destination of the pointer. */
1075 gimple
*def
= SSA_NAME_DEF_STMT (allocfn
);
1076 if (gimple_assign_single_p (def
))
1078 tree rhs
= gimple_assign_rhs1 (def
);
1081 else if (TREE_CODE (rhs
) == COMPONENT_REF
)
1082 allocfn
= TREE_OPERAND (rhs
, 1);
1087 else if (gimple_nop_p (stmt
))
1088 /* Handle DECL_PARM below. */
1089 ref
= SSA_NAME_VAR (ref
);
1090 else if (is_gimple_assign (stmt
)
1091 && (gimple_assign_rhs_code (stmt
) == MIN_EXPR
1092 || gimple_assign_rhs_code (stmt
) == MAX_EXPR
))
1094 /* MIN or MAX_EXPR here implies a reference to a known object
1095 and either an unknown or distinct one (the latter being
1096 the result of an invalid relational expression). Determine
1097 the identity of the former and point to it in the note.
1098 TODO: Consider merging with PHI handling. */
1099 access_ref arg_ref
[2];
1100 tree arg
= gimple_assign_rhs1 (stmt
);
1101 compute_objsize (arg
, /* ostype = */ 1 , &arg_ref
[0]);
1102 arg
= gimple_assign_rhs2 (stmt
);
1103 compute_objsize (arg
, /* ostype = */ 1 , &arg_ref
[1]);
1105 /* Use the argument that references a known object with more
1108 = (!arg_ref
[0].ref
|| !arg_ref
[0].base0
1109 || (arg_ref
[0].base0
&& arg_ref
[1].base0
1110 && (arg_ref
[0].size_remaining ()
1111 < arg_ref
[1].size_remaining ())));
1113 arg_ref
[idx
].offrng
[0] = offrng
[0];
1114 arg_ref
[idx
].offrng
[1] = offrng
[1];
1115 arg_ref
[idx
].inform_access (mode
);
1121 loc
= DECL_SOURCE_LOCATION (ref
);
1122 else if (EXPR_P (ref
) && EXPR_HAS_LOCATION (ref
))
1123 loc
= EXPR_LOCATION (ref
);
1124 else if (TREE_CODE (ref
) != IDENTIFIER_NODE
1125 && TREE_CODE (ref
) != SSA_NAME
)
1128 if (mode
== access_read_write
|| mode
== access_write_only
)
1130 if (allocfn
== NULL_TREE
)
1133 inform (loc
, "at offset %s into destination object %qE of size %s",
1134 offstr
, ref
, sizestr
);
1136 inform (loc
, "destination object %qE of size %s", ref
, sizestr
);
1142 "at offset %s into destination object of size %s "
1143 "allocated by %qE", offstr
, sizestr
, allocfn
);
1145 inform (loc
, "destination object of size %s allocated by %qE",
1150 if (mode
== access_read_only
)
1152 if (allocfn
== NULL_TREE
)
1155 inform (loc
, "at offset %s into source object %qE of size %s",
1156 offstr
, ref
, sizestr
);
1158 inform (loc
, "source object %qE of size %s", ref
, sizestr
);
1165 "at offset %s into source object of size %s allocated by %qE",
1166 offstr
, sizestr
, allocfn
);
1168 inform (loc
, "source object of size %s allocated by %qE",
1173 if (allocfn
== NULL_TREE
)
1176 inform (loc
, "at offset %s into object %qE of size %s",
1177 offstr
, ref
, sizestr
);
1179 inform (loc
, "object %qE of size %s", ref
, sizestr
);
1186 "at offset %s into object of size %s allocated by %qE",
1187 offstr
, sizestr
, allocfn
);
1189 inform (loc
, "object of size %s allocated by %qE",
1193 /* Set a bit for the PHI in VISITED and return true if it wasn't
1197 ssa_name_limit_t::visit_phi (tree ssa_name
)
1200 visited
= BITMAP_ALLOC (NULL
);
1202 /* Return false if SSA_NAME has already been visited. */
1203 return bitmap_set_bit (visited
, SSA_NAME_VERSION (ssa_name
));
1206 /* Clear a bit for the PHI in VISITED. */
1209 ssa_name_limit_t::leave_phi (tree ssa_name
)
1211 /* Return false if SSA_NAME has already been visited. */
1212 bitmap_clear_bit (visited
, SSA_NAME_VERSION (ssa_name
));
1215 /* Return false if the SSA_NAME chain length counter has reached
1216 the limit, otherwise increment the counter and return true. */
1219 ssa_name_limit_t::next ()
1221 /* Return a negative value to let caller avoid recursing beyond
1222 the specified limit. */
1223 if (ssa_def_max
== 0)
1230 /* If the SSA_NAME has already been "seen" return a positive value.
1231 Otherwise add it to VISITED. If the SSA_NAME limit has been
1232 reached, return a negative value. Otherwise return zero. */
1235 ssa_name_limit_t::next_phi (tree ssa_name
)
1238 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ssa_name
);
1239 /* Return a positive value if the PHI has already been visited. */
1240 if (gimple_code (def_stmt
) == GIMPLE_PHI
1241 && !visit_phi (ssa_name
))
1245 /* Return a negative value to let caller avoid recursing beyond
1246 the specified limit. */
1247 if (ssa_def_max
== 0)
1255 ssa_name_limit_t::~ssa_name_limit_t ()
1258 BITMAP_FREE (visited
);
1261 /* Default ctor. Initialize object with pointers to the range_query
1262 and cache_type instances to use or null. */
1264 pointer_query::pointer_query (range_query
*qry
/* = NULL */,
1265 cache_type
*cache
/* = NULL */)
1266 : rvals (qry
), var_cache (cache
), hits (), misses (),
1267 failures (), depth (), max_depth ()
1272 /* Return a pointer to the cached access_ref instance for the SSA_NAME
1273 PTR if it's there or null otherwise. */
1276 pointer_query::get_ref (tree ptr
, int ostype
/* = 1 */) const
1284 unsigned version
= SSA_NAME_VERSION (ptr
);
1285 unsigned idx
= version
<< 1 | (ostype
& 1);
1286 if (var_cache
->indices
.length () <= idx
)
1292 unsigned cache_idx
= var_cache
->indices
[idx
];
1293 if (var_cache
->access_refs
.length () <= cache_idx
)
1299 access_ref
&cache_ref
= var_cache
->access_refs
[cache_idx
];
1310 /* Retrieve the access_ref instance for a variable from the cache if it's
1311 there or compute it and insert it into the cache if it's nonnonull. */
1314 pointer_query::get_ref (tree ptr
, access_ref
*pref
, int ostype
/* = 1 */)
1316 const unsigned version
1317 = TREE_CODE (ptr
) == SSA_NAME
? SSA_NAME_VERSION (ptr
) : 0;
1319 if (var_cache
&& version
)
1321 unsigned idx
= version
<< 1 | (ostype
& 1);
1322 if (idx
< var_cache
->indices
.length ())
1324 unsigned cache_idx
= var_cache
->indices
[idx
] - 1;
1325 if (cache_idx
< var_cache
->access_refs
.length ()
1326 && var_cache
->access_refs
[cache_idx
].ref
)
1329 *pref
= var_cache
->access_refs
[cache_idx
];
1337 if (!compute_objsize (ptr
, ostype
, pref
, this))
1346 /* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
1350 pointer_query::put_ref (tree ptr
, const access_ref
&ref
, int ostype
/* = 1 */)
1352 /* Only add populated/valid entries. */
1353 if (!var_cache
|| !ref
.ref
|| ref
.sizrng
[0] < 0)
1356 /* Add REF to the two-level cache. */
1357 unsigned version
= SSA_NAME_VERSION (ptr
);
1358 unsigned idx
= version
<< 1 | (ostype
& 1);
1360 /* Grow INDICES if necessary. An index is valid if it's nonzero.
1361 Its value minus one is the index into ACCESS_REFS. Not all
1362 entries are valid. */
1363 if (var_cache
->indices
.length () <= idx
)
1364 var_cache
->indices
.safe_grow_cleared (idx
+ 1);
1366 if (!var_cache
->indices
[idx
])
1367 var_cache
->indices
[idx
] = var_cache
->access_refs
.length () + 1;
1369 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
1370 REF member is nonnull. All entries except for the last two
1371 are valid. Once nonnull, the REF value must stay unchanged. */
1372 unsigned cache_idx
= var_cache
->indices
[idx
];
1373 if (var_cache
->access_refs
.length () <= cache_idx
)
1374 var_cache
->access_refs
.safe_grow_cleared (cache_idx
+ 1);
1376 access_ref
&cache_ref
= var_cache
->access_refs
[cache_idx
];
1379 gcc_checking_assert (cache_ref
.ref
== ref
.ref
);
1386 /* Flush the cache if it's nonnull. */
1389 pointer_query::flush_cache ()
1393 var_cache
->indices
.release ();
1394 var_cache
->access_refs
.release ();
1397 /* Dump statistics and, optionally, cache contents to DUMP_FILE. */
1400 pointer_query::dump (FILE *dump_file
, bool contents
/* = false */)
1402 unsigned nused
= 0, nrefs
= 0;
1403 unsigned nidxs
= var_cache
->indices
.length ();
1404 for (unsigned i
= 0; i
!= nidxs
; ++i
)
1406 unsigned ari
= var_cache
->indices
[i
];
1412 const access_ref
&aref
= var_cache
->access_refs
[ari
];
1419 fprintf (dump_file
, "pointer_query counters:\n"
1420 " index cache size: %u\n"
1421 " index entries: %u\n"
1422 " access cache size: %u\n"
1423 " access entries: %u\n"
1429 var_cache
->access_refs
.length (), nrefs
,
1430 hits
, misses
, failures
, max_depth
);
1432 if (!contents
|| !nidxs
)
1435 fputs ("\npointer_query cache contents:\n", dump_file
);
1437 for (unsigned i
= 0; i
!= nidxs
; ++i
)
1439 unsigned ari
= var_cache
->indices
[i
];
1443 const access_ref
&aref
= var_cache
->access_refs
[ari
];
1447 /* The level-1 cache index corresponds to the SSA_NAME_VERSION
1448 shifted left by one and ORed with the Object Size Type in
1449 the lowest bit. Print the two separately. */
1450 unsigned ver
= i
>> 1;
1451 unsigned ost
= i
& 1;
1453 fprintf (dump_file
, " %u.%u[%u]: ", ver
, ost
, ari
);
1454 if (tree name
= ssa_name (ver
))
1456 print_generic_expr (dump_file
, name
);
1457 fputs (" = ", dump_file
);
1460 fprintf (dump_file
, " _%u = ", ver
);
1462 if (gphi
*phi
= aref
.phi ())
1464 fputs ("PHI <", dump_file
);
1465 unsigned nargs
= gimple_phi_num_args (phi
);
1466 for (unsigned i
= 0; i
!= nargs
; ++i
)
1468 tree arg
= gimple_phi_arg_def (phi
, i
);
1469 print_generic_expr (dump_file
, arg
);
1471 fputs (", ", dump_file
);
1473 fputc ('>', dump_file
);
1476 print_generic_expr (dump_file
, aref
.ref
);
1478 if (aref
.offrng
[0] != aref
.offrng
[1])
1479 fprintf (dump_file
, " + [%lli, %lli]",
1480 (long long) aref
.offrng
[0].to_shwi (),
1481 (long long) aref
.offrng
[1].to_shwi ());
1482 else if (aref
.offrng
[0] != 0)
1483 fprintf (dump_file
, " %c %lli",
1484 aref
.offrng
[0] < 0 ? '-' : '+',
1485 (long long) aref
.offrng
[0].to_shwi ());
1487 fputc ('\n', dump_file
);
1490 fputc ('\n', dump_file
);
1493 /* A helper of compute_objsize_r() to determine the size from an assignment
1494 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. On success
1495 set PREF->REF to the operand with more or less space remaining,
1496 respectively, if both refer to the same (sub)object, or to PTR if they
1497 might not, and return true. Otherwise, if the identity of neither
1498 operand can be determined, return false. */
1501 handle_min_max_size (tree ptr
, int ostype
, access_ref
*pref
,
1502 ssa_name_limit_t
&snlim
, pointer_query
*qry
)
1504 const gimple
*stmt
= SSA_NAME_DEF_STMT (ptr
);
1505 const tree_code code
= gimple_assign_rhs_code (stmt
);
1507 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
1508 Determine the size/offset of each and use the one with more or less
1509 space remaining, respectively. If either fails, use the information
1510 determined from the other instead, adjusted up or down as appropriate
1511 for the expression. */
1512 access_ref aref
[2] = { *pref
, *pref
};
1513 tree arg1
= gimple_assign_rhs1 (stmt
);
1514 if (!compute_objsize_r (arg1
, ostype
, &aref
[0], snlim
, qry
))
1516 aref
[0].base0
= false;
1517 aref
[0].offrng
[0] = aref
[0].offrng
[1] = 0;
1518 aref
[0].add_max_offset ();
1519 aref
[0].set_max_size_range ();
1522 tree arg2
= gimple_assign_rhs2 (stmt
);
1523 if (!compute_objsize_r (arg2
, ostype
, &aref
[1], snlim
, qry
))
1525 aref
[1].base0
= false;
1526 aref
[1].offrng
[0] = aref
[1].offrng
[1] = 0;
1527 aref
[1].add_max_offset ();
1528 aref
[1].set_max_size_range ();
1531 if (!aref
[0].ref
&& !aref
[1].ref
)
1532 /* Fail if the identity of neither argument could be determined. */
1536 if (aref
[0].ref
&& aref
[0].base0
)
1538 if (aref
[1].ref
&& aref
[1].base0
)
1540 /* If the object referenced by both arguments has been determined
1541 set *PREF to the one with more or less space remainng, whichever
1542 is appopriate for CODE.
1543 TODO: Indicate when the objects are distinct so it can be
1545 i0
= code
== MAX_EXPR
;
1546 const bool i1
= !i0
;
1548 if (aref
[i0
].size_remaining () < aref
[i1
].size_remaining ())
1553 if (aref
[i0
].ref
!= aref
[i1
].ref
)
1554 /* If the operands don't refer to the same (sub)object set
1555 PREF->REF to the SSA_NAME from which STMT was obtained
1556 so that both can be identified in a diagnostic. */
1562 /* If only the object referenced by one of the arguments could be
1563 determined, use it and... */
1570 const bool i1
= !i0
;
1571 /* ...see if the offset obtained from the other pointer can be used
1572 to tighten up the bound on the offset obtained from the first. */
1573 if ((code
== MAX_EXPR
&& aref
[i1
].offrng
[1] < aref
[i0
].offrng
[0])
1574 || (code
== MIN_EXPR
&& aref
[i0
].offrng
[0] < aref
[i1
].offrng
[1]))
1576 pref
->offrng
[0] = aref
[i0
].offrng
[0];
1577 pref
->offrng
[1] = aref
[i0
].offrng
[1];
1580 /* Replace PTR->REF with the SSA_NAME to indicate the expression
1581 might not refer to the same (sub)object. */
1586 /* A helper of compute_objsize_r() to determine the size from ARRAY_REF
1587 AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true
1588 on success and false on failure. */
1591 handle_array_ref (tree aref
, bool addr
, int ostype
, access_ref
*pref
,
1592 ssa_name_limit_t
&snlim
, pointer_query
*qry
)
1594 gcc_assert (TREE_CODE (aref
) == ARRAY_REF
);
1598 tree arefop
= TREE_OPERAND (aref
, 0);
1599 tree reftype
= TREE_TYPE (arefop
);
1600 if (!addr
&& TREE_CODE (TREE_TYPE (reftype
)) == POINTER_TYPE
)
1601 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
1605 if (!compute_objsize_r (arefop
, ostype
, pref
, snlim
, qry
))
1609 tree off
= pref
->eval (TREE_OPERAND (aref
, 1));
1610 range_query
*const rvals
= qry
? qry
->rvals
: NULL
;
1611 if (!get_offset_range (off
, NULL
, orng
, rvals
))
1613 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1614 orng
[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
1615 orng
[0] = -orng
[1] - 1;
1618 /* Convert the array index range determined above to a byte
1620 tree lowbnd
= array_ref_low_bound (aref
);
1621 if (!integer_zerop (lowbnd
) && tree_fits_uhwi_p (lowbnd
))
1623 /* Adjust the index by the low bound of the array domain
1624 (normally zero but 1 in Fortran). */
1625 unsigned HOST_WIDE_INT lb
= tree_to_uhwi (lowbnd
);
1630 tree eltype
= TREE_TYPE (aref
);
1631 tree tpsize
= TYPE_SIZE_UNIT (eltype
);
1632 if (!tpsize
|| TREE_CODE (tpsize
) != INTEGER_CST
)
1634 pref
->add_max_offset ();
1638 offset_int sz
= wi::to_offset (tpsize
);
1642 if (ostype
&& TREE_CODE (eltype
) == ARRAY_TYPE
)
1644 /* Except for the permissive raw memory functions which use
1645 the size of the whole object determined above, use the size
1646 of the referenced array. Because the overall offset is from
1647 the beginning of the complete array object add this overall
1648 offset to the size of array. */
1649 offset_int sizrng
[2] =
1651 pref
->offrng
[0] + orng
[0] + sz
,
1652 pref
->offrng
[1] + orng
[1] + sz
1654 if (sizrng
[1] < sizrng
[0])
1655 std::swap (sizrng
[0], sizrng
[1]);
1656 if (sizrng
[0] >= 0 && sizrng
[0] <= pref
->sizrng
[0])
1657 pref
->sizrng
[0] = sizrng
[0];
1658 if (sizrng
[1] >= 0 && sizrng
[1] <= pref
->sizrng
[1])
1659 pref
->sizrng
[1] = sizrng
[1];
1662 pref
->add_offset (orng
[0], orng
[1]);
1666 /* A helper of compute_objsize_r() to determine the size from MEM_REF
1667 MREF. Return true on success and false on failure. */
1670 handle_mem_ref (tree mref
, int ostype
, access_ref
*pref
,
1671 ssa_name_limit_t
&snlim
, pointer_query
*qry
)
1673 gcc_assert (TREE_CODE (mref
) == MEM_REF
);
1677 if (VECTOR_TYPE_P (TREE_TYPE (mref
)))
1679 /* Hack: Handle MEM_REFs of vector types as those to complete
1680 objects; those may be synthesized from multiple assignments
1681 to consecutive data members (see PR 93200 and 96963).
1682 FIXME: Vectorized assignments should only be present after
1683 vectorization so this hack is only necessary after it has
1684 run and could be avoided in calls from prior passes (e.g.,
1686 FIXME: Deal with this more generally, e.g., by marking up
1687 such MEM_REFs at the time they're created. */
1691 tree mrefop
= TREE_OPERAND (mref
, 0);
1692 if (!compute_objsize_r (mrefop
, ostype
, pref
, snlim
, qry
))
1696 tree off
= pref
->eval (TREE_OPERAND (mref
, 1));
1697 range_query
*const rvals
= qry
? qry
->rvals
: NULL
;
1698 if (!get_offset_range (off
, NULL
, orng
, rvals
))
1700 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1701 orng
[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
1702 orng
[0] = -orng
[1] - 1;
1705 pref
->add_offset (orng
[0], orng
[1]);
1709 /* Helper to compute the size of the object referenced by the PTR
1710 expression which must have pointer type, using Object Size type
1711 OSTYPE (only the least significant 2 bits are used).
1712 On success, sets PREF->REF to the DECL of the referenced object
1713 if it's unique, otherwise to null, PREF->OFFRNG to the range of
1714 offsets into it, and PREF->SIZRNG to the range of sizes of
1716 SNLIM is used to avoid visiting the same PHI operand multiple
1717 times, and, when nonnull, RVALS to determine range information.
1718 Returns true on success, false when a meaningful size (or range)
1719 cannot be determined.
1721 The function is intended for diagnostics and should not be used
1722 to influence code generation or optimization. */
1725 compute_objsize_r (tree ptr
, int ostype
, access_ref
*pref
,
1726 ssa_name_limit_t
&snlim
, pointer_query
*qry
)
1730 const bool addr
= TREE_CODE (ptr
) == ADDR_EXPR
;
1734 ptr
= TREE_OPERAND (ptr
, 0);
1741 /* Reset the offset in case it was set by a prior call and not
1742 cleared by the caller. The offset is only adjusted after
1743 the identity of the object has been determined. */
1744 pref
->offrng
[0] = pref
->offrng
[1] = 0;
1746 if (!addr
&& POINTER_TYPE_P (TREE_TYPE (ptr
)))
1748 /* Set the maximum size if the reference is to the pointer
1749 itself (as opposed to what it points to), and clear
1750 BASE0 since the offset isn't necessarily zero-based. */
1751 pref
->set_max_size_range ();
1752 pref
->base0
= false;
1756 /* Valid offsets into the object are nonnegative. */
1759 if (tree size
= decl_init_size (ptr
, false))
1760 if (TREE_CODE (size
) == INTEGER_CST
)
1762 pref
->sizrng
[0] = pref
->sizrng
[1] = wi::to_offset (size
);
1766 pref
->set_max_size_range ();
1770 const tree_code code
= TREE_CODE (ptr
);
1771 range_query
*const rvals
= qry
? qry
->rvals
: NULL
;
1773 if (code
== BIT_FIELD_REF
)
1775 tree ref
= TREE_OPERAND (ptr
, 0);
1776 if (!compute_objsize_r (ref
, ostype
, pref
, snlim
, qry
))
1779 offset_int off
= wi::to_offset (pref
->eval (TREE_OPERAND (ptr
, 2)));
1780 pref
->add_offset (off
/ BITS_PER_UNIT
);
1784 if (code
== COMPONENT_REF
)
1786 tree ref
= TREE_OPERAND (ptr
, 0);
1787 if (TREE_CODE (TREE_TYPE (ref
)) == UNION_TYPE
)
1788 /* In accesses through union types consider the entire unions
1789 rather than just their members. */
1791 tree field
= TREE_OPERAND (ptr
, 1);
1795 /* In OSTYPE zero (for raw memory functions like memcpy), use
1796 the maximum size instead if the identity of the enclosing
1797 object cannot be determined. */
1798 if (!compute_objsize_r (ref
, ostype
, pref
, snlim
, qry
))
1801 /* Otherwise, use the size of the enclosing object and add
1802 the offset of the member to the offset computed so far. */
1803 tree offset
= byte_position (field
);
1804 if (TREE_CODE (offset
) == INTEGER_CST
)
1805 pref
->add_offset (wi::to_offset (offset
));
1807 pref
->add_max_offset ();
1810 /* REF may have been already set to an SSA_NAME earlier
1811 to provide better context for diagnostics. In that case,
1812 leave it unchanged. */
1819 if (!addr
&& POINTER_TYPE_P (TREE_TYPE (field
)))
1821 /* Set maximum size if the reference is to the pointer member
1822 itself (as opposed to what it points to). */
1823 pref
->set_max_size_range ();
1827 /* SAM is set for array members that might need special treatment. */
1828 special_array_member sam
;
1829 tree size
= component_ref_size (ptr
, &sam
);
1830 if (sam
== special_array_member::int_0
)
1831 pref
->sizrng
[0] = pref
->sizrng
[1] = 0;
1832 else if (!pref
->trail1special
&& sam
== special_array_member::trail_1
)
1833 pref
->sizrng
[0] = pref
->sizrng
[1] = 1;
1834 else if (size
&& TREE_CODE (size
) == INTEGER_CST
)
1835 pref
->sizrng
[0] = pref
->sizrng
[1] = wi::to_offset (size
);
1838 /* When the size of the member is unknown it's either a flexible
1839 array member or a trailing special array member (either zero
1840 length or one-element). Set the size to the maximum minus
1841 the constant size of the type. */
1842 pref
->sizrng
[0] = 0;
1843 pref
->sizrng
[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
1844 if (tree recsize
= TYPE_SIZE_UNIT (TREE_TYPE (ref
)))
1845 if (TREE_CODE (recsize
) == INTEGER_CST
)
1846 pref
->sizrng
[1] -= wi::to_offset (recsize
);
1851 if (code
== ARRAY_REF
)
1852 return handle_array_ref (ptr
, addr
, ostype
, pref
, snlim
, qry
);
1854 if (code
== MEM_REF
)
1855 return handle_mem_ref (ptr
, ostype
, pref
, snlim
, qry
);
1857 if (code
== TARGET_MEM_REF
)
1859 tree ref
= TREE_OPERAND (ptr
, 0);
1860 if (!compute_objsize_r (ref
, ostype
, pref
, snlim
, qry
))
1863 /* TODO: Handle remaining operands. Until then, add maximum offset. */
1865 pref
->add_max_offset ();
1869 if (code
== INTEGER_CST
)
1871 /* Pointer constants other than null are most likely the result
1872 of erroneous null pointer addition/subtraction. Set size to
1873 zero. For null pointers, set size to the maximum for now
1874 since those may be the result of jump threading. */
1875 if (integer_zerop (ptr
))
1876 pref
->set_max_size_range ();
1878 pref
->sizrng
[0] = pref
->sizrng
[1] = 0;
1884 if (code
== STRING_CST
)
1886 pref
->sizrng
[0] = pref
->sizrng
[1] = TREE_STRING_LENGTH (ptr
);
1891 if (code
== POINTER_PLUS_EXPR
)
1893 tree ref
= TREE_OPERAND (ptr
, 0);
1894 if (!compute_objsize_r (ref
, ostype
, pref
, snlim
, qry
))
1897 /* Clear DEREF since the offset is being applied to the target
1898 of the dereference. */
1902 tree off
= pref
->eval (TREE_OPERAND (ptr
, 1));
1903 if (get_offset_range (off
, NULL
, orng
, rvals
))
1904 pref
->add_offset (orng
[0], orng
[1]);
1906 pref
->add_max_offset ();
1910 if (code
== VIEW_CONVERT_EXPR
)
1912 ptr
= TREE_OPERAND (ptr
, 0);
1913 return compute_objsize_r (ptr
, ostype
, pref
, snlim
, qry
);
1916 if (code
== SSA_NAME
)
1921 /* Only process an SSA_NAME if the recursion limit has not yet
1926 qry
->max_depth
= qry
->depth
;
1927 if (const access_ref
*cache_ref
= qry
->get_ref (ptr
))
1929 /* If the pointer is in the cache set *PREF to what it refers
1930 to and return success.
1931 FIXME: BNDRNG is determined by each access and so it doesn't
1932 belong in access_ref. Until the design is changed, keep it
1934 const offset_int bndrng
[2] = { pref
->bndrng
[0], pref
->bndrng
[1] };
1936 pref
->bndrng
[0] = bndrng
[0];
1937 pref
->bndrng
[1] = bndrng
[1];
1942 gimple
*stmt
= SSA_NAME_DEF_STMT (ptr
);
1943 if (is_gimple_call (stmt
))
1945 /* If STMT is a call to an allocation function get the size
1946 from its argument(s). If successful, also set *PREF->REF
1947 to PTR for the caller to include in diagnostics. */
1949 if (gimple_call_alloc_size (stmt
, wr
, rvals
))
1952 pref
->sizrng
[0] = offset_int::from (wr
[0], UNSIGNED
);
1953 pref
->sizrng
[1] = offset_int::from (wr
[1], UNSIGNED
);
1954 /* Constrain both bounds to a valid size. */
1955 offset_int maxsize
= wi::to_offset (max_object_size ());
1956 if (pref
->sizrng
[0] > maxsize
)
1957 pref
->sizrng
[0] = maxsize
;
1958 if (pref
->sizrng
[1] > maxsize
)
1959 pref
->sizrng
[1] = maxsize
;
1963 /* For functions known to return one of their pointer arguments
1964 try to determine what the returned pointer points to, and on
1965 success add OFFRNG which was set to the offset added by
1966 the function (e.g., memchr) to the overall offset. */
1968 offset_int offrng
[2];
1969 if (tree ret
= gimple_call_return_array (stmt
, offrng
,
1972 if (!compute_objsize_r (ret
, ostype
, pref
, snlim
, qry
))
1975 /* Cap OFFRNG[1] to at most the remaining size of
1977 offset_int remrng
[2];
1978 remrng
[1] = pref
->size_remaining (remrng
);
1979 if (remrng
[1] != 0 && !past_end
)
1980 /* Decrement the size for functions that never return
1981 a past-the-end pointer. */
1984 if (remrng
[1] < offrng
[1])
1985 offrng
[1] = remrng
[1];
1986 pref
->add_offset (offrng
[0], offrng
[1]);
1990 /* For other calls that might return arbitrary pointers
1991 including into the middle of objects set the size
1992 range to maximum, clear PREF->BASE0, and also set
1993 PREF->REF to include in diagnostics. */
1994 pref
->set_max_size_range ();
1995 pref
->base0
= false;
1999 qry
->put_ref (ptr
, *pref
);
2003 if (gimple_nop_p (stmt
))
2005 /* For a function argument try to determine the byte size
2006 of the array from the current function declaratation
2007 (e.g., attribute access or related). */
2009 bool static_array
= false;
2010 if (tree ref
= gimple_parm_array_size (ptr
, wr
, &static_array
))
2012 pref
->parmarray
= !static_array
;
2013 pref
->sizrng
[0] = offset_int::from (wr
[0], UNSIGNED
);
2014 pref
->sizrng
[1] = offset_int::from (wr
[1], UNSIGNED
);
2016 qry
->put_ref (ptr
, *pref
);
2020 pref
->set_max_size_range ();
2021 pref
->base0
= false;
2023 qry
->put_ref (ptr
, *pref
);
2027 if (gimple_code (stmt
) == GIMPLE_PHI
)
2030 access_ref phi_ref
= *pref
;
2031 if (!pref
->get_ref (NULL
, &phi_ref
, ostype
, &snlim
, qry
))
2035 qry
->put_ref (ptr
, *pref
);
2039 if (!is_gimple_assign (stmt
))
2041 /* Clear BASE0 since the assigned pointer might point into
2042 the middle of the object, set the maximum size range and,
2043 if the SSA_NAME refers to a function argumnent, set
2045 pref
->base0
= false;
2046 pref
->set_max_size_range ();
2051 tree_code code
= gimple_assign_rhs_code (stmt
);
2053 if (code
== MAX_EXPR
|| code
== MIN_EXPR
)
2055 if (!handle_min_max_size (ptr
, ostype
, pref
, snlim
, qry
))
2058 qry
->put_ref (ptr
, *pref
);
2062 tree rhs
= gimple_assign_rhs1 (stmt
);
2064 if (code
== ASSERT_EXPR
)
2066 rhs
= TREE_OPERAND (rhs
, 0);
2067 return compute_objsize_r (rhs
, ostype
, pref
, snlim
, qry
);
2070 if (code
== POINTER_PLUS_EXPR
2071 && TREE_CODE (TREE_TYPE (rhs
)) == POINTER_TYPE
)
2073 /* Compute the size of the object first. */
2074 if (!compute_objsize_r (rhs
, ostype
, pref
, snlim
, qry
))
2078 tree off
= gimple_assign_rhs2 (stmt
);
2079 if (get_offset_range (off
, stmt
, orng
, rvals
))
2080 pref
->add_offset (orng
[0], orng
[1]);
2082 pref
->add_max_offset ();
2084 qry
->put_ref (ptr
, *pref
);
2088 if (code
== ADDR_EXPR
|| code
== SSA_NAME
)
2090 if (!compute_objsize_r (rhs
, ostype
, pref
, snlim
, qry
))
2092 qry
->put_ref (ptr
, *pref
);
2096 /* (This could also be an assignment from a nonlocal pointer.) Save
2097 PTR to mention in diagnostics but otherwise treat it as a pointer
2098 to an unknown object. */
2100 pref
->base0
= false;
2101 pref
->set_max_size_range ();
2105 /* Assume all other expressions point into an unknown object
2106 of the maximum valid size. */
2108 pref
->base0
= false;
2109 pref
->set_max_size_range ();
2110 if (TREE_CODE (ptr
) == SSA_NAME
)
2111 qry
->put_ref (ptr
, *pref
);
2115 /* A "public" wrapper around the above. Clients should use this overload
2119 compute_objsize (tree ptr
, int ostype
, access_ref
*pref
,
2120 range_query
*rvals
/* = NULL */)
2125 /* Clear and invalidate in case *PREF is being reused. */
2126 pref
->offrng
[0] = pref
->offrng
[1] = 0;
2127 pref
->sizrng
[0] = pref
->sizrng
[1] = -1;
2129 ssa_name_limit_t snlim
;
2130 if (!compute_objsize_r (ptr
, ostype
, pref
, snlim
, &qry
))
2133 offset_int maxsize
= pref
->size_remaining ();
2134 if (pref
->base0
&& pref
->offrng
[0] < 0 && pref
->offrng
[1] >= 0)
2135 pref
->offrng
[0] = 0;
2136 return wide_int_to_tree (sizetype
, maxsize
);
2139 /* Transitional wrapper. The function should be removed once callers
2140 transition to the pointer_query API. */
2143 compute_objsize (tree ptr
, int ostype
, access_ref
*pref
, pointer_query
*ptr_qry
)
2151 /* Clear and invalidate in case *PREF is being reused. */
2152 pref
->offrng
[0] = pref
->offrng
[1] = 0;
2153 pref
->sizrng
[0] = pref
->sizrng
[1] = -1;
2155 ssa_name_limit_t snlim
;
2156 if (!compute_objsize_r (ptr
, ostype
, pref
, snlim
, ptr_qry
))
2159 offset_int maxsize
= pref
->size_remaining ();
2160 if (pref
->base0
&& pref
->offrng
[0] < 0 && pref
->offrng
[1] >= 0)
2161 pref
->offrng
[0] = 0;
2162 return wide_int_to_tree (sizetype
, maxsize
);
2165 /* Legacy wrapper around the above. The function should be removed
2166 once callers transition to one of the two above. */
2169 compute_objsize (tree ptr
, int ostype
, tree
*pdecl
/* = NULL */,
2170 tree
*poff
/* = NULL */, range_query
*rvals
/* = NULL */)
2172 /* Set the initial offsets to zero and size to negative to indicate
2173 none has been computed yet. */
2175 tree size
= compute_objsize (ptr
, ostype
, &ref
, rvals
);
2176 if (!size
|| !ref
.base0
)
2183 *poff
= wide_int_to_tree (ptrdiff_type_node
, ref
.offrng
[ref
.offrng
[0] < 0]);