tree-object-size: use size_for_offset in more cases
[official-gcc.git] / gcc / tree-object-size.cc
blob78faae7ad0d34b16c01fcba88e87c4a2607fdcdd
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-iterator.h"
33 #include "gimple-fold.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "attribs.h"
38 #include "builtins.h"
39 #include "gimplify-me.h"
40 #include "gimplify.h"
41 #include "tree-ssa-dce.h"
43 struct object_size_info
45 int object_size_type;
46 unsigned char pass;
47 bool changed;
48 bitmap visited, reexamine;
49 unsigned int *depths;
50 unsigned int *stack, *tos;
53 struct GTY(()) object_size
55 /* Estimate of bytes till the end of the object. */
56 tree size;
57 /* Estimate of the size of the whole object. */
58 tree wholesize;
61 static tree compute_object_offset (tree, const_tree);
62 static bool addr_object_size (struct object_size_info *,
63 const_tree, int, tree *, tree *t = NULL);
64 static tree alloc_object_size (const gcall *, int);
65 static tree access_with_size_object_size (const gcall *, int);
66 static tree pass_through_call (const gcall *);
67 static void collect_object_sizes_for (struct object_size_info *, tree);
68 static void expr_object_size (struct object_size_info *, tree, tree);
69 static bool merge_object_sizes (struct object_size_info *, tree, tree);
70 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
71 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
72 static void init_offset_limit (void);
73 static void check_for_plus_in_loops (struct object_size_info *, tree);
74 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
75 unsigned int);
77 /* object_sizes[0] is upper bound for the object size and number of bytes till
78 the end of the object.
79 object_sizes[1] is upper bound for the object size and number of bytes till
80 the end of the subobject (innermost array or field with address taken).
81 object_sizes[2] is lower bound for the object size and number of bytes till
82 the end of the object and object_sizes[3] lower bound for subobject.
84 For static object sizes, the object size and the bytes till the end of the
85 object are both INTEGER_CST. In the dynamic case, they are finally either a
86 gimple variable or an INTEGER_CST. */
87 static vec<object_size> object_sizes[OST_END];
89 /* Bitmaps what object sizes have been computed already. */
90 static bitmap computed[OST_END];
92 /* Maximum value of offset we consider to be addition. */
93 static unsigned HOST_WIDE_INT offset_limit;
95 /* Tell the generic SSA updater what kind of update is needed after the pass
96 executes. */
97 static unsigned todo;
99 /* Return true if VAL represents an initial size for OBJECT_SIZE_TYPE. */
101 static inline bool
102 size_initval_p (tree val, int object_size_type)
104 return ((object_size_type & OST_MINIMUM)
105 ? integer_all_onesp (val) : integer_zerop (val));
108 /* Return true if VAL represents an unknown size for OBJECT_SIZE_TYPE. */
110 static inline bool
111 size_unknown_p (tree val, int object_size_type)
113 return ((object_size_type & OST_MINIMUM)
114 ? integer_zerop (val) : integer_all_onesp (val));
117 /* Return true if VAL represents a valid size for OBJECT_SIZE_TYPE. */
119 static inline bool
120 size_valid_p (tree val, int object_size_type)
122 return ((object_size_type & OST_DYNAMIC) || TREE_CODE (val) == INTEGER_CST);
125 /* Return true if VAL is usable as an object size in the object_sizes
126 vectors. */
128 static inline bool
129 size_usable_p (tree val)
131 return TREE_CODE (val) == SSA_NAME || TREE_CODE (val) == INTEGER_CST;
134 /* Return a tree with initial value for OBJECT_SIZE_TYPE. */
136 static inline tree
137 size_initval (int object_size_type)
139 return ((object_size_type & OST_MINIMUM)
140 ? TYPE_MAX_VALUE (sizetype) : size_zero_node);
143 /* Return a tree with unknown value for OBJECT_SIZE_TYPE. */
145 static inline tree
146 size_unknown (int object_size_type)
148 return ((object_size_type & OST_MINIMUM)
149 ? size_zero_node : TYPE_MAX_VALUE (sizetype));
152 /* Grow object_sizes[OBJECT_SIZE_TYPE] to num_ssa_names. */
154 static inline void
155 object_sizes_grow (int object_size_type)
157 if (num_ssa_names > object_sizes[object_size_type].length ())
158 object_sizes[object_size_type].safe_grow (num_ssa_names, true);
161 /* Release object_sizes[OBJECT_SIZE_TYPE]. */
163 static inline void
164 object_sizes_release (int object_size_type)
166 object_sizes[object_size_type].release ();
169 /* Return true if object_sizes[OBJECT_SIZE_TYPE][VARNO] is unknown. */
171 static inline bool
172 object_sizes_unknown_p (int object_size_type, unsigned varno)
174 return size_unknown_p (object_sizes[object_size_type][varno].size,
175 object_size_type);
178 /* Return the raw size expression for VARNO corresponding to OSI. This returns
179 the TREE_VEC as is and should only be used during gimplification. */
181 static inline object_size
182 object_sizes_get_raw (struct object_size_info *osi, unsigned varno)
184 gcc_assert (osi->pass != 0);
185 return object_sizes[osi->object_size_type][varno];
188 /* Return a size tree for VARNO corresponding to OSI. If WHOLE is true, return
189 the whole object size. Use this for building size expressions based on size
190 of VARNO. */
192 static inline tree
193 object_sizes_get (struct object_size_info *osi, unsigned varno,
194 bool whole = false)
196 tree ret;
197 int object_size_type = osi->object_size_type;
199 if (whole)
200 ret = object_sizes[object_size_type][varno].wholesize;
201 else
202 ret = object_sizes[object_size_type][varno].size;
204 if (object_size_type & OST_DYNAMIC)
206 if (TREE_CODE (ret) == MODIFY_EXPR)
207 return TREE_OPERAND (ret, 0);
208 else if (TREE_CODE (ret) == TREE_VEC)
209 return TREE_VEC_ELT (ret, TREE_VEC_LENGTH (ret) - 1);
210 else
211 gcc_checking_assert (size_usable_p (ret));
214 return ret;
217 /* Set size for VARNO corresponding to OSI to VAL. */
219 static inline void
220 object_sizes_initialize (struct object_size_info *osi, unsigned varno,
221 tree val, tree wholeval)
223 int object_size_type = osi->object_size_type;
225 object_sizes[object_size_type][varno].size = val;
226 object_sizes[object_size_type][varno].wholesize = wholeval;
229 /* Return a MODIFY_EXPR for cases where SSA and EXPR have the same type. The
230 TREE_VEC is returned only in case of PHI nodes. */
232 static tree
233 bundle_sizes (tree name, tree expr)
235 gcc_checking_assert (TREE_TYPE (name) == sizetype);
237 if (TREE_CODE (expr) == TREE_VEC)
239 TREE_VEC_ELT (expr, TREE_VEC_LENGTH (expr) - 1) = name;
240 return expr;
243 gcc_checking_assert (types_compatible_p (TREE_TYPE (expr), sizetype));
244 return build2 (MODIFY_EXPR, sizetype, name, expr);
247 /* Set size for VARNO corresponding to OSI to VAL if it is the new minimum or
248 maximum. For static sizes, each element of TREE_VEC is always INTEGER_CST
249 throughout the computation. For dynamic sizes, each element may either be a
250 gimple variable, a MODIFY_EXPR or a TREE_VEC. The MODIFY_EXPR is for
251 expressions that need to be gimplified. TREE_VECs are special, they're
252 emitted only for GIMPLE_PHI and the PHI result variable is the last element
253 of the vector. */
255 static bool
256 object_sizes_set (struct object_size_info *osi, unsigned varno, tree val,
257 tree wholeval)
259 int object_size_type = osi->object_size_type;
260 object_size osize = object_sizes[object_size_type][varno];
261 bool changed = true;
263 tree oldval = osize.size;
264 tree old_wholeval = osize.wholesize;
266 if (object_size_type & OST_DYNAMIC)
268 if (bitmap_bit_p (osi->reexamine, varno))
270 val = bundle_sizes (oldval, val);
271 wholeval = bundle_sizes (old_wholeval, wholeval);
273 else
275 gcc_checking_assert (size_initval_p (oldval, object_size_type));
276 gcc_checking_assert (size_initval_p (old_wholeval,
277 object_size_type));
278 /* For dynamic object sizes, all object sizes that are not gimple
279 variables will need to be gimplified. */
280 if (wholeval != val && !size_usable_p (wholeval))
282 bitmap_set_bit (osi->reexamine, varno);
283 wholeval = bundle_sizes (make_ssa_name (sizetype), wholeval);
285 if (!size_usable_p (val))
287 bitmap_set_bit (osi->reexamine, varno);
288 tree newval = bundle_sizes (make_ssa_name (sizetype), val);
289 if (val == wholeval)
290 wholeval = newval;
291 val = newval;
293 /* If the new value is a temporary variable, mark it for
294 reexamination. */
295 else if (TREE_CODE (val) == SSA_NAME && !SSA_NAME_DEF_STMT (val))
296 bitmap_set_bit (osi->reexamine, varno);
299 else
301 enum tree_code code = (object_size_type & OST_MINIMUM
302 ? MIN_EXPR : MAX_EXPR);
304 val = size_binop (code, val, oldval);
305 wholeval = size_binop (code, wholeval, old_wholeval);
306 changed = (tree_int_cst_compare (val, oldval) != 0
307 || tree_int_cst_compare (old_wholeval, wholeval) != 0);
310 object_sizes[object_size_type][varno].size = val;
311 object_sizes[object_size_type][varno].wholesize = wholeval;
313 return changed;
316 /* Set temporary SSA names for object size and whole size to resolve dependency
317 loops in dynamic size computation. */
319 static inline void
320 object_sizes_set_temp (struct object_size_info *osi, unsigned varno)
322 tree val = object_sizes_get (osi, varno);
324 if (size_initval_p (val, osi->object_size_type))
325 object_sizes_set (osi, varno,
326 make_ssa_name (sizetype),
327 make_ssa_name (sizetype));
330 /* Initialize OFFSET_LIMIT variable. */
331 static void
332 init_offset_limit (void)
334 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
335 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
336 else
337 offset_limit = -1;
338 offset_limit /= 2;
341 /* Bytes at end of the object with SZ from offset OFFSET. If WHOLESIZE is not
342 NULL_TREE, use it to get the net offset of the pointer, which should always
343 be positive and hence, be within OFFSET_LIMIT for valid offsets. */
345 static tree
346 size_for_offset (tree sz, tree offset, tree wholesize = NULL_TREE)
348 gcc_checking_assert (types_compatible_p (TREE_TYPE (sz), sizetype));
350 /* For negative offsets, if we have a distinct WHOLESIZE, use it to get a net
351 offset from the whole object. */
352 if (wholesize && wholesize != sz
353 && (TREE_CODE (sz) != INTEGER_CST
354 || TREE_CODE (wholesize) != INTEGER_CST
355 || tree_int_cst_compare (sz, wholesize)))
357 gcc_checking_assert (types_compatible_p (TREE_TYPE (wholesize),
358 sizetype));
360 /* Restructure SZ - OFFSET as
361 WHOLESIZE - (WHOLESIZE + OFFSET - SZ) so that the offset part, i.e.
362 WHOLESIZE + OFFSET - SZ is only allowed to be positive. */
363 tree tmp = size_binop (MAX_EXPR, wholesize, sz);
364 offset = fold_build2 (PLUS_EXPR, sizetype, tmp, offset);
365 offset = fold_build2 (MINUS_EXPR, sizetype, offset, sz);
366 sz = tmp;
369 /* Safe to convert now, since a valid net offset should be non-negative. */
370 if (!useless_type_conversion_p (sizetype, TREE_TYPE (offset)))
371 offset = fold_convert (sizetype, offset);
373 if (TREE_CODE (offset) == INTEGER_CST)
375 if (integer_zerop (offset))
376 return sz;
378 /* Negative or too large offset even after adjustment, cannot be within
379 bounds of an object. */
380 if (compare_tree_int (offset, offset_limit) > 0)
381 return size_zero_node;
384 return size_binop (MINUS_EXPR, size_binop (MAX_EXPR, sz, offset), offset);
387 /* Compute offset of EXPR within VAR. Return error_mark_node
388 if unknown. */
390 static tree
391 compute_object_offset (tree expr, const_tree var)
393 enum tree_code code = PLUS_EXPR;
394 tree base, off, t;
396 if (expr == var)
397 return size_zero_node;
399 switch (TREE_CODE (expr))
401 case COMPONENT_REF:
402 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
403 if (base == error_mark_node)
404 return base;
406 t = TREE_OPERAND (expr, 1);
407 off = size_binop (PLUS_EXPR,
408 component_ref_field_offset (expr),
409 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
410 / BITS_PER_UNIT));
411 break;
413 case REALPART_EXPR:
414 CASE_CONVERT:
415 case VIEW_CONVERT_EXPR:
416 case NON_LVALUE_EXPR:
417 return compute_object_offset (TREE_OPERAND (expr, 0), var);
419 case IMAGPART_EXPR:
420 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
421 if (base == error_mark_node)
422 return base;
424 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
425 break;
427 case ARRAY_REF:
428 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
429 if (base == error_mark_node)
430 return base;
432 t = TREE_OPERAND (expr, 1);
433 tree low_bound, unit_size;
434 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
435 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
436 if (! integer_zerop (low_bound))
437 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
438 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
440 code = MINUS_EXPR;
441 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
443 t = fold_convert (sizetype, t);
444 off = size_binop (MULT_EXPR, unit_size, t);
445 break;
447 case MEM_REF:
448 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
449 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
451 default:
452 return error_mark_node;
455 return size_binop (code, base, off);
458 /* Returns the size of the object designated by DECL considering its
459 initializer if it either has one or if it would not affect its size,
460 otherwise the size of the object without the initializer when MIN
461 is true, else null. An object's initializer affects the object's
462 size if it's a struct type with a flexible array member. */
464 tree
465 decl_init_size (tree decl, bool min)
467 tree size = DECL_SIZE_UNIT (decl);
468 tree type = TREE_TYPE (decl);
469 if (TREE_CODE (type) != RECORD_TYPE)
470 return size;
472 tree last = last_field (type);
473 if (!last)
474 return size;
476 tree last_type = TREE_TYPE (last);
477 if (TREE_CODE (last_type) != ARRAY_TYPE
478 || TYPE_SIZE (last_type))
479 return size;
481 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
482 of the initializer and sometimes doesn't. */
483 size = TYPE_SIZE_UNIT (type);
484 tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
485 tree compsize = component_ref_size (ref);
486 if (!compsize)
487 return min ? size : NULL_TREE;
489 /* The size includes tail padding and initializer elements. */
490 tree pos = byte_position (last);
491 size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
492 return size;
495 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
496 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
497 If unknown, return size_unknown (object_size_type). */
499 static bool
500 addr_object_size (struct object_size_info *osi, const_tree ptr,
501 int object_size_type, tree *psize, tree *pwholesize)
503 tree pt_var, pt_var_size = NULL_TREE, pt_var_wholesize = NULL_TREE;
504 tree var_size, bytes, wholebytes;
506 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
508 /* Set to unknown and overwrite just before returning if the size
509 could be determined. */
510 *psize = size_unknown (object_size_type);
511 if (pwholesize)
512 *pwholesize = size_unknown (object_size_type);
514 pt_var = TREE_OPERAND (ptr, 0);
515 while (handled_component_p (pt_var))
516 pt_var = TREE_OPERAND (pt_var, 0);
518 if (!pt_var)
519 return false;
521 if (TREE_CODE (pt_var) == MEM_REF)
523 tree sz, wholesize;
525 if (!osi || (object_size_type & OST_SUBOBJECT) != 0
526 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
528 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
529 object_size_type & ~OST_SUBOBJECT, &sz);
530 wholesize = sz;
532 else
534 tree var = TREE_OPERAND (pt_var, 0);
535 if (osi->pass == 0)
536 collect_object_sizes_for (osi, var);
537 if (bitmap_bit_p (computed[object_size_type],
538 SSA_NAME_VERSION (var)))
540 sz = object_sizes_get (osi, SSA_NAME_VERSION (var));
541 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (var), true);
543 else
544 sz = wholesize = size_unknown (object_size_type);
546 if (!size_unknown_p (sz, object_size_type))
547 sz = size_for_offset (sz, TREE_OPERAND (pt_var, 1), wholesize);
549 if (!size_unknown_p (sz, object_size_type)
550 && (TREE_CODE (sz) != INTEGER_CST
551 || compare_tree_int (sz, offset_limit) < 0))
553 pt_var_size = sz;
554 pt_var_wholesize = wholesize;
557 else if (DECL_P (pt_var))
559 pt_var_size = pt_var_wholesize
560 = decl_init_size (pt_var, object_size_type & OST_MINIMUM);
561 if (!pt_var_size)
562 return false;
564 else if (TREE_CODE (pt_var) == STRING_CST)
565 pt_var_size = pt_var_wholesize = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
566 else
567 return false;
569 if (pt_var_size)
571 /* Validate the size determined above if it is a constant. */
572 if (TREE_CODE (pt_var_size) == INTEGER_CST
573 && compare_tree_int (pt_var_size, offset_limit) >= 0)
574 return false;
577 if (pt_var != TREE_OPERAND (ptr, 0))
579 tree var;
581 if (object_size_type & OST_SUBOBJECT)
583 var = TREE_OPERAND (ptr, 0);
585 while (var != pt_var
586 && TREE_CODE (var) != BIT_FIELD_REF
587 && TREE_CODE (var) != COMPONENT_REF
588 && TREE_CODE (var) != ARRAY_REF
589 && TREE_CODE (var) != ARRAY_RANGE_REF
590 && TREE_CODE (var) != REALPART_EXPR
591 && TREE_CODE (var) != IMAGPART_EXPR)
592 var = TREE_OPERAND (var, 0);
593 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
594 var = TREE_OPERAND (var, 0);
595 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
596 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
597 || (pt_var_size && TREE_CODE (pt_var_size) == INTEGER_CST
598 && tree_int_cst_lt (pt_var_size,
599 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
600 var = pt_var;
601 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
603 tree v = var;
604 /* For &X->fld, compute object size if fld isn't a flexible array
605 member. */
606 bool is_flexible_array_mem_ref = false;
607 while (v && v != pt_var)
608 switch (TREE_CODE (v))
610 case ARRAY_REF:
611 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0))))
613 tree domain
614 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
615 if (domain && TYPE_MAX_VALUE (domain))
617 v = NULL_TREE;
618 break;
621 v = TREE_OPERAND (v, 0);
622 break;
623 case REALPART_EXPR:
624 case IMAGPART_EXPR:
625 v = NULL_TREE;
626 break;
627 case COMPONENT_REF:
628 /* When the ref is not to an aggregate type, i.e, an array,
629 a record or a union, it will not have flexible size,
630 compute the object size directly. */
631 if (!AGGREGATE_TYPE_P (TREE_TYPE (v)))
633 v = NULL_TREE;
634 break;
636 /* if the ref is to a record or union type, but the type
637 does not include a flexible array recursively, compute
638 the object size directly. */
639 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (v)))
641 if (!TYPE_INCLUDES_FLEXARRAY (TREE_TYPE (v)))
643 v = NULL_TREE;
644 break;
646 else
648 v = TREE_OPERAND (v, 0);
649 break;
652 /* Now the ref is to an array type. */
653 gcc_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
654 is_flexible_array_mem_ref = array_ref_flexible_size_p (v);
655 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
656 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
657 != UNION_TYPE
658 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
659 != QUAL_UNION_TYPE)
660 break;
661 else
662 v = TREE_OPERAND (v, 0);
663 if (TREE_CODE (v) == COMPONENT_REF
664 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
665 == RECORD_TYPE)
667 /* compute object size only if v is not a
668 flexible array member. */
669 if (!is_flexible_array_mem_ref)
671 v = NULL_TREE;
672 break;
674 v = TREE_OPERAND (v, 0);
676 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
677 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
678 != UNION_TYPE
679 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
680 != QUAL_UNION_TYPE)
681 break;
682 else
683 v = TREE_OPERAND (v, 0);
684 if (v != pt_var)
685 v = NULL_TREE;
686 else
687 v = pt_var;
688 break;
689 default:
690 v = pt_var;
691 break;
693 if (v == pt_var)
694 var = pt_var;
697 else
698 var = pt_var;
700 if (var != pt_var)
702 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
703 if (!TREE_CONSTANT (var_size))
704 var_size = get_or_create_ssa_default_def (cfun, var_size);
705 if (!var_size)
706 return false;
708 else if (!pt_var_size)
709 return false;
710 else
711 var_size = pt_var_size;
712 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
713 if (bytes != error_mark_node)
715 bytes = size_for_offset (var_size, bytes);
716 if (var != pt_var && pt_var_size && TREE_CODE (pt_var) == MEM_REF)
718 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0),
719 pt_var);
720 if (bytes2 != error_mark_node)
722 bytes2 = size_for_offset (pt_var_size, bytes2);
723 bytes = size_binop (MIN_EXPR, bytes, bytes2);
727 else
728 bytes = size_unknown (object_size_type);
730 wholebytes
731 = object_size_type & OST_SUBOBJECT ? var_size : pt_var_wholesize;
733 else if (!pt_var_size)
734 return false;
735 else
737 bytes = pt_var_size;
738 wholebytes = pt_var_wholesize;
741 if (!size_unknown_p (bytes, object_size_type)
742 && size_valid_p (bytes, object_size_type)
743 && !size_unknown_p (bytes, object_size_type)
744 && size_valid_p (wholebytes, object_size_type))
746 *psize = bytes;
747 if (pwholesize)
748 *pwholesize = wholebytes;
749 return true;
752 return false;
755 /* Compute __builtin_object_size for a CALL to .ACCESS_WITH_SIZE,
756 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
757 The 2nd, 3rd, and the 4th parameters of the call determine the size of
758 the CALL:
760 2nd argument REF_TO_SIZE: The reference to the size of the object,
761 3rd argument CLASS_OF_SIZE: The size referenced by the REF_TO_SIZE represents
762 0: the number of bytes;
763 1: the number of the elements of the object type;
764 4th argument TYPE_OF_SIZE: A constant 0 with its TYPE being the same as the TYPE
765 of the object referenced by REF_TO_SIZE
766 6th argument: A constant 0 with the pointer TYPE to the original flexible
767 array type.
769 The size of the element can be retrived from the TYPE of the 6th argument
770 of the call, which is the pointer to the array type. */
771 static tree
772 access_with_size_object_size (const gcall *call, int object_size_type)
774 /* If not for dynamic object size, return. */
775 if ((object_size_type & OST_DYNAMIC) == 0)
776 return size_unknown (object_size_type);
778 gcc_assert (gimple_call_internal_p (call, IFN_ACCESS_WITH_SIZE));
779 /* The type of the 6th argument type is the pointer TYPE to the original
780 flexible array type. */
781 tree pointer_to_array_type = TREE_TYPE (gimple_call_arg (call, 5));
782 gcc_assert (POINTER_TYPE_P (pointer_to_array_type));
783 tree element_type = TREE_TYPE (TREE_TYPE (pointer_to_array_type));
784 tree element_size = TYPE_SIZE_UNIT (element_type);
785 tree ref_to_size = gimple_call_arg (call, 1);
786 unsigned int class_of_size = TREE_INT_CST_LOW (gimple_call_arg (call, 2));
787 tree type = TREE_TYPE (gimple_call_arg (call, 3));
789 tree size = fold_build2 (MEM_REF, type, ref_to_size,
790 build_int_cst (ptr_type_node, 0));
792 /* If size is negative value, treat it as zero. */
793 if (!TYPE_UNSIGNED (type))
795 tree cond_expr = fold_build2 (LT_EXPR, boolean_type_node,
796 unshare_expr (size), build_zero_cst (type));
797 size = fold_build3 (COND_EXPR, integer_type_node, cond_expr,
798 build_zero_cst (type), size);
801 if (class_of_size == 1)
802 size = size_binop (MULT_EXPR,
803 fold_convert (sizetype, size),
804 fold_convert (sizetype, element_size));
805 else
806 size = fold_convert (sizetype, size);
808 if (!todo)
809 todo = TODO_update_ssa_only_virtuals;
811 return size;
814 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
815 Handles calls to functions declared with attribute alloc_size.
816 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
817 If unknown, return size_unknown (object_size_type). */
819 static tree
820 alloc_object_size (const gcall *call, int object_size_type)
822 gcc_assert (is_gimple_call (call));
824 tree calltype;
825 tree callfn = gimple_call_fndecl (call);
826 if (callfn)
827 calltype = TREE_TYPE (callfn);
828 else
829 calltype = gimple_call_fntype (call);
831 if (!calltype)
832 return size_unknown (object_size_type);
834 /* Set to positions of alloc_size arguments. */
835 int arg1 = -1, arg2 = -1;
836 tree alloc_size = lookup_attribute ("alloc_size",
837 TYPE_ATTRIBUTES (calltype));
838 if (alloc_size && TREE_VALUE (alloc_size))
840 tree p = TREE_VALUE (alloc_size);
842 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
843 if (TREE_CHAIN (p))
844 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
846 else if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
847 && callfn
848 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callfn)))
849 arg1 = 0;
851 /* Non-const arguments are OK here, let the caller handle constness. */
852 if (arg1 < 0
853 || (unsigned) arg1 >= gimple_call_num_args (call)
854 || (arg2 >= 0 && (unsigned) arg2 >= gimple_call_num_args (call)))
855 return size_unknown (object_size_type);
857 tree targ1 = gimple_call_arg (call, arg1);
858 if (!INTEGRAL_TYPE_P (TREE_TYPE (targ1))
859 || TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (sizetype))
860 return size_unknown (object_size_type);
861 targ1 = fold_convert (sizetype, targ1);
862 tree bytes = NULL_TREE;
863 if (arg2 >= 0)
865 tree targ2 = gimple_call_arg (call, arg2);
866 if (!INTEGRAL_TYPE_P (TREE_TYPE (targ2))
867 || TYPE_PRECISION (TREE_TYPE (targ2)) > TYPE_PRECISION (sizetype))
868 return size_unknown (object_size_type);
869 targ2 = fold_convert (sizetype, targ2);
870 bytes = size_binop (MULT_EXPR, targ1, targ2);
872 else
873 bytes = targ1;
875 return bytes ? bytes : size_unknown (object_size_type);
878 /* Compute __builtin_object_size for CALL, which is a call to either
879 BUILT_IN_STRDUP or BUILT_IN_STRNDUP; IS_STRNDUP indicates which it is.
880 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
881 If unknown, return size_unknown (object_size_type). */
883 static tree
884 strdup_object_size (const gcall *call, int object_size_type, bool is_strndup)
886 tree src = gimple_call_arg (call, 0);
887 tree sz = size_unknown (object_size_type);
888 tree n = NULL_TREE;
890 if (is_strndup)
891 n = fold_build2 (PLUS_EXPR, sizetype, size_one_node,
892 gimple_call_arg (call, 1));
893 /* For strdup, simply emit strlen (SRC) + 1 and let the optimizer fold it the
894 way it likes. */
895 else
897 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
898 if (strlen_fn)
900 sz = fold_build2 (PLUS_EXPR, sizetype, size_one_node,
901 build_call_expr (strlen_fn, 1, src));
902 todo = TODO_update_ssa_only_virtuals;
906 /* In all other cases, return the size of SRC since the object size cannot
907 exceed that. We cannot do this for OST_MINIMUM unless SRC points into a
908 string constant since otherwise the object size could go all the way down
909 to zero. */
910 if (!size_valid_p (sz, object_size_type)
911 || size_unknown_p (sz, object_size_type))
913 tree wholesrc = NULL_TREE;
914 if (TREE_CODE (src) == ADDR_EXPR)
915 wholesrc = get_base_address (TREE_OPERAND (src, 0));
917 /* If the source points within a string constant, we try to get its
918 length. */
919 if (wholesrc && TREE_CODE (wholesrc) == STRING_CST)
921 tree len = c_strlen (src, 0);
922 if (len)
923 sz = fold_build2 (PLUS_EXPR, sizetype, size_one_node, len);
926 /* For maximum estimate, our next best guess is the object size of the
927 source. */
928 if (size_unknown_p (sz, object_size_type)
929 && !(object_size_type & OST_MINIMUM))
930 compute_builtin_object_size (src, object_size_type, &sz);
933 /* String duplication allocates at least one byte, so we should never fail
934 for OST_MINIMUM. */
935 if ((!size_valid_p (sz, object_size_type)
936 || size_unknown_p (sz, object_size_type))
937 && (object_size_type & OST_MINIMUM))
938 sz = size_one_node;
940 /* Factor in the N. */
941 return n ? fold_build2 (MIN_EXPR, sizetype, n, sz) : sz;
944 /* If object size is propagated from one of function's arguments directly
945 to its return value, return that argument for GIMPLE_CALL statement CALL.
946 Otherwise return NULL. */
948 static tree
949 pass_through_call (const gcall *call)
951 unsigned rf = gimple_call_return_flags (call);
952 if (rf & ERF_RETURNS_ARG)
954 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
955 if (argnum < gimple_call_num_args (call))
956 return gimple_call_arg (call, argnum);
959 /* __builtin_assume_aligned is intentionally not marked RET1. */
960 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
961 return gimple_call_arg (call, 0);
963 return NULL_TREE;
966 /* Emit PHI nodes for size expressions fo. */
968 static void
969 emit_phi_nodes (gimple *stmt, tree size, tree wholesize)
971 tree phires;
972 gphi *wholephi = NULL;
974 if (wholesize != size)
976 phires = TREE_VEC_ELT (wholesize, TREE_VEC_LENGTH (wholesize) - 1);
977 wholephi = create_phi_node (phires, gimple_bb (stmt));
980 phires = TREE_VEC_ELT (size, TREE_VEC_LENGTH (size) - 1);
981 gphi *phi = create_phi_node (phires, gimple_bb (stmt));
982 gphi *obj_phi = as_a <gphi *> (stmt);
984 gcc_checking_assert (TREE_CODE (wholesize) == TREE_VEC);
985 gcc_checking_assert (TREE_CODE (size) == TREE_VEC);
987 for (unsigned i = 0; i < gimple_phi_num_args (stmt); i++)
989 gimple_seq seq = NULL;
990 tree wsz = TREE_VEC_ELT (wholesize, i);
991 tree sz = TREE_VEC_ELT (size, i);
993 /* If we built an expression, we will need to build statements
994 and insert them on the edge right away. */
995 if (TREE_CODE (wsz) != SSA_NAME)
996 wsz = force_gimple_operand (wsz, &seq, true, NULL);
997 if (TREE_CODE (sz) != SSA_NAME)
999 gimple_seq s;
1000 sz = force_gimple_operand (sz, &s, true, NULL);
1001 gimple_seq_add_seq (&seq, s);
1004 if (seq)
1005 gsi_insert_seq_on_edge (gimple_phi_arg_edge (obj_phi, i), seq);
1007 if (wholephi)
1008 add_phi_arg (wholephi, wsz,
1009 gimple_phi_arg_edge (obj_phi, i),
1010 gimple_phi_arg_location (obj_phi, i));
1012 add_phi_arg (phi, sz,
1013 gimple_phi_arg_edge (obj_phi, i),
1014 gimple_phi_arg_location (obj_phi, i));
1018 /* Descend through EXPR and return size_unknown if it uses any SSA variable
1019 object_size_set or object_size_set_temp generated, which turned out to be
1020 size_unknown, as noted in UNKNOWNS. */
1022 static tree
1023 propagate_unknowns (object_size_info *osi, tree expr, bitmap unknowns)
1025 int object_size_type = osi->object_size_type;
1027 switch (TREE_CODE (expr))
1029 case SSA_NAME:
1030 if (bitmap_bit_p (unknowns, SSA_NAME_VERSION (expr)))
1031 return size_unknown (object_size_type);
1032 return expr;
1034 case MIN_EXPR:
1035 case MAX_EXPR:
1037 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0),
1038 unknowns);
1039 if (size_unknown_p (res, object_size_type))
1040 return res;
1042 res = propagate_unknowns (osi, TREE_OPERAND (expr, 1), unknowns);
1043 if (size_unknown_p (res, object_size_type))
1044 return res;
1046 return expr;
1048 case MODIFY_EXPR:
1050 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 1),
1051 unknowns);
1052 if (size_unknown_p (res, object_size_type))
1053 return res;
1054 return expr;
1056 case TREE_VEC:
1057 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
1059 tree res = propagate_unknowns (osi, TREE_VEC_ELT (expr, i),
1060 unknowns);
1061 if (size_unknown_p (res, object_size_type))
1062 return res;
1064 return expr;
1065 case PLUS_EXPR:
1066 case MINUS_EXPR:
1068 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0),
1069 unknowns);
1070 if (size_unknown_p (res, object_size_type))
1071 return res;
1073 return expr;
1075 default:
1076 return expr;
1080 /* Walk through size expressions that need reexamination and generate
1081 statements for them. */
1083 static void
1084 gimplify_size_expressions (object_size_info *osi)
1086 int object_size_type = osi->object_size_type;
1087 bitmap_iterator bi;
1088 unsigned int i;
1089 bool changed;
1091 /* Step 1: Propagate unknowns into expressions. */
1092 bitmap reexamine = BITMAP_ALLOC (NULL);
1093 bitmap_copy (reexamine, osi->reexamine);
1094 bitmap unknowns = BITMAP_ALLOC (NULL);
1097 changed = false;
1098 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1100 object_size cur = object_sizes_get_raw (osi, i);
1102 if (size_unknown_p (propagate_unknowns (osi, cur.size, unknowns),
1103 object_size_type)
1104 || size_unknown_p (propagate_unknowns (osi, cur.wholesize,
1105 unknowns),
1106 object_size_type))
1108 /* Record the SSAs we're overwriting to propagate the
1109 unknwons. */
1110 tree oldval = object_sizes_get (osi, i);
1111 tree old_wholeval = object_sizes_get (osi, i, true);
1113 bitmap_set_bit (unknowns, SSA_NAME_VERSION (oldval));
1114 bitmap_set_bit (unknowns, SSA_NAME_VERSION (old_wholeval));
1115 object_sizes_initialize (osi, i,
1116 size_unknown (object_size_type),
1117 size_unknown (object_size_type));
1118 bitmap_clear_bit (osi->reexamine, i);
1119 changed = true;
1122 bitmap_copy (reexamine, osi->reexamine);
1124 while (changed);
1126 /* Release all unknowns. */
1127 EXECUTE_IF_SET_IN_BITMAP (unknowns, 0, i, bi)
1128 release_ssa_name (ssa_name (i));
1130 BITMAP_FREE (unknowns);
1131 BITMAP_FREE (reexamine);
1133 /* Expand all size expressions to put their definitions close to the objects
1134 for which size is being computed. */
1135 EXECUTE_IF_SET_IN_BITMAP (osi->reexamine, 0, i, bi)
1137 gimple_seq seq = NULL;
1138 object_size osize = object_sizes_get_raw (osi, i);
1140 gimple *stmt = SSA_NAME_DEF_STMT (ssa_name (i));
1141 enum gimple_code code = gimple_code (stmt);
1143 /* PHI nodes need special attention. */
1144 if (code == GIMPLE_PHI)
1145 emit_phi_nodes (stmt, osize.size, osize.wholesize);
1146 else
1148 tree size_expr = NULL_TREE;
1150 /* Bundle wholesize in with the size to gimplify if needed. */
1151 if (osize.wholesize != osize.size
1152 && !size_usable_p (osize.wholesize))
1153 size_expr = size_binop (COMPOUND_EXPR,
1154 osize.wholesize,
1155 osize.size);
1156 else if (!size_usable_p (osize.size))
1157 size_expr = osize.size;
1159 if (size_expr)
1161 gimple_stmt_iterator gsi;
1162 if (code == GIMPLE_NOP)
1163 gsi = gsi_start_bb (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1164 else
1165 gsi = gsi_for_stmt (stmt);
1167 force_gimple_operand (size_expr, &seq, true, NULL);
1168 gsi_insert_seq_before (&gsi, seq, GSI_CONTINUE_LINKING);
1172 /* We're done, so replace the MODIFY_EXPRs with the SSA names. */
1173 object_sizes_initialize (osi, i,
1174 object_sizes_get (osi, i),
1175 object_sizes_get (osi, i, true));
1179 /* Compute __builtin_object_size value for PTR and set *PSIZE to
1180 the resulting value. If the declared object is known and PDECL
1181 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
1182 is the second argument to __builtin_object_size.
1183 Returns true on success and false when the object size could not
1184 be determined. */
1186 bool
1187 compute_builtin_object_size (tree ptr, int object_size_type,
1188 tree *psize)
1190 gcc_assert (object_size_type >= 0 && object_size_type < OST_END);
1192 /* Set to unknown and overwrite just before returning if the size
1193 could be determined. */
1194 *psize = size_unknown (object_size_type);
1196 if (! offset_limit)
1197 init_offset_limit ();
1199 if (TREE_CODE (ptr) == ADDR_EXPR)
1200 return addr_object_size (NULL, ptr, object_size_type, psize);
1202 if (TREE_CODE (ptr) != SSA_NAME
1203 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
1204 return false;
1206 if (computed[object_size_type] == NULL)
1208 if (optimize || object_size_type & OST_SUBOBJECT)
1209 return false;
1211 /* When not optimizing, rather than failing, make a small effort
1212 to determine the object size without the full benefit of
1213 the (costly) computation below. */
1214 gimple *def = SSA_NAME_DEF_STMT (ptr);
1215 if (gimple_code (def) == GIMPLE_ASSIGN)
1217 tree_code code = gimple_assign_rhs_code (def);
1218 if (code == POINTER_PLUS_EXPR)
1220 tree offset = gimple_assign_rhs2 (def);
1221 ptr = gimple_assign_rhs1 (def);
1223 if (((object_size_type & OST_DYNAMIC)
1224 || (tree_fits_shwi_p (offset)
1225 && compare_tree_int (offset, offset_limit) <= 0))
1226 && compute_builtin_object_size (ptr, object_size_type,
1227 psize))
1229 *psize = size_for_offset (*psize, offset);
1230 return true;
1234 return false;
1237 struct object_size_info osi;
1238 osi.object_size_type = object_size_type;
1239 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
1241 bitmap_iterator bi;
1242 unsigned int i;
1244 object_sizes_grow (object_size_type);
1245 if (dump_file)
1247 fprintf (dump_file, "Computing %s %s%sobject size for ",
1248 (object_size_type & OST_MINIMUM) ? "minimum" : "maximum",
1249 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1250 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1251 print_generic_expr (dump_file, ptr, dump_flags);
1252 fprintf (dump_file, ":\n");
1255 osi.visited = BITMAP_ALLOC (NULL);
1256 osi.reexamine = BITMAP_ALLOC (NULL);
1258 if (!(object_size_type & OST_DYNAMIC))
1260 osi.depths = NULL;
1261 osi.stack = NULL;
1262 osi.tos = NULL;
1265 /* First pass: walk UD chains, compute object sizes that can be computed.
1266 osi.reexamine bitmap at the end will contain versions of SSA_NAMES
1267 that need to be reexamined. For both static and dynamic size
1268 computation, reexamination is for propagation across dependency loops.
1269 The dynamic case has the additional use case where the computed
1270 expression needs to be gimplified. */
1271 osi.pass = 0;
1272 osi.changed = false;
1273 collect_object_sizes_for (&osi, ptr);
1275 if (object_size_type & OST_DYNAMIC)
1277 osi.pass = 1;
1278 gimplify_size_expressions (&osi);
1279 bitmap_clear (osi.reexamine);
1282 /* Second pass: keep recomputing object sizes of variables
1283 that need reexamination, until no object sizes are
1284 increased or all object sizes are computed. */
1285 if (! bitmap_empty_p (osi.reexamine))
1287 bitmap reexamine = BITMAP_ALLOC (NULL);
1289 /* If looking for minimum instead of maximum object size,
1290 detect cases where a pointer is increased in a loop.
1291 Although even without this detection pass 2 would eventually
1292 terminate, it could take a long time. If a pointer is
1293 increasing this way, we need to assume 0 object size.
1294 E.g. p = &buf[0]; while (cond) p = p + 4; */
1295 if (object_size_type & OST_MINIMUM)
1297 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
1298 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
1299 osi.tos = osi.stack;
1300 osi.pass = 1;
1301 /* collect_object_sizes_for is changing
1302 osi.reexamine bitmap, so iterate over a copy. */
1303 bitmap_copy (reexamine, osi.reexamine);
1304 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1305 if (bitmap_bit_p (osi.reexamine, i))
1306 check_for_plus_in_loops (&osi, ssa_name (i));
1308 free (osi.depths);
1309 osi.depths = NULL;
1310 free (osi.stack);
1311 osi.stack = NULL;
1312 osi.tos = NULL;
1317 osi.pass = 2;
1318 osi.changed = false;
1319 /* collect_object_sizes_for is changing
1320 osi.reexamine bitmap, so iterate over a copy. */
1321 bitmap_copy (reexamine, osi.reexamine);
1322 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1323 if (bitmap_bit_p (osi.reexamine, i))
1325 collect_object_sizes_for (&osi, ssa_name (i));
1326 if (dump_file && (dump_flags & TDF_DETAILS))
1328 fprintf (dump_file, "Reexamining ");
1329 print_generic_expr (dump_file, ssa_name (i),
1330 dump_flags);
1331 fprintf (dump_file, "\n");
1335 while (osi.changed);
1337 BITMAP_FREE (reexamine);
1339 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
1340 bitmap_set_bit (computed[object_size_type], i);
1342 /* Debugging dumps. */
1343 if (dump_file)
1345 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
1346 if (!object_sizes_unknown_p (object_size_type, i))
1348 print_generic_expr (dump_file, ssa_name (i),
1349 dump_flags);
1350 fprintf (dump_file,
1351 ": %s %s%sobject size ",
1352 ((object_size_type & OST_MINIMUM) ? "minimum"
1353 : "maximum"),
1354 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1355 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1356 print_generic_expr (dump_file, object_sizes_get (&osi, i),
1357 dump_flags);
1358 fprintf (dump_file, "\n");
1362 BITMAP_FREE (osi.reexamine);
1363 BITMAP_FREE (osi.visited);
1366 *psize = object_sizes_get (&osi, SSA_NAME_VERSION (ptr));
1367 return !size_unknown_p (*psize, object_size_type);
1370 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
1372 static void
1373 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
1375 int object_size_type = osi->object_size_type;
1376 unsigned int varno = SSA_NAME_VERSION (ptr);
1377 tree bytes, wholesize;
1379 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1380 gcc_assert (osi->pass == 0);
1382 if (TREE_CODE (value) == WITH_SIZE_EXPR)
1383 value = TREE_OPERAND (value, 0);
1385 /* Pointer variables should have been handled by merge_object_sizes. */
1386 gcc_assert (TREE_CODE (value) != SSA_NAME
1387 || !POINTER_TYPE_P (TREE_TYPE (value)));
1389 if (TREE_CODE (value) == ADDR_EXPR)
1390 addr_object_size (osi, value, object_size_type, &bytes, &wholesize);
1391 else
1392 bytes = wholesize = size_unknown (object_size_type);
1394 object_sizes_set (osi, varno, bytes, wholesize);
1398 /* Compute object_sizes for PTR, defined to the result of a call. */
1400 static void
1401 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
1403 int object_size_type = osi->object_size_type;
1404 unsigned int varno = SSA_NAME_VERSION (ptr);
1405 tree bytes = NULL_TREE;
1407 gcc_assert (is_gimple_call (call));
1409 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1410 gcc_assert (osi->pass == 0);
1412 bool is_strdup = gimple_call_builtin_p (call, BUILT_IN_STRDUP);
1413 bool is_strndup = gimple_call_builtin_p (call, BUILT_IN_STRNDUP);
1414 bool is_access_with_size
1415 = gimple_call_internal_p (call, IFN_ACCESS_WITH_SIZE);
1416 if (is_strdup || is_strndup)
1417 bytes = strdup_object_size (call, object_size_type, is_strndup);
1418 else if (is_access_with_size)
1419 bytes = access_with_size_object_size (call, object_size_type);
1420 else
1421 bytes = alloc_object_size (call, object_size_type);
1423 if (!size_valid_p (bytes, object_size_type))
1424 bytes = size_unknown (object_size_type);
1426 object_sizes_set (osi, varno, bytes, bytes);
1430 /* Compute object_sizes for PTR, defined to an unknown value. */
1432 static void
1433 unknown_object_size (struct object_size_info *osi, tree ptr)
1435 int object_size_type = osi->object_size_type;
1436 unsigned int varno = SSA_NAME_VERSION (ptr);
1438 gcc_checking_assert (!object_sizes_unknown_p (object_size_type, varno));
1439 gcc_checking_assert (osi->pass == 0);
1440 tree bytes = size_unknown (object_size_type);
1442 object_sizes_set (osi, varno, bytes, bytes);
1446 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
1447 the object size might need reexamination later. */
1449 static bool
1450 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig)
1452 int object_size_type = osi->object_size_type;
1453 unsigned int varno = SSA_NAME_VERSION (dest);
1454 tree orig_bytes, wholesize;
1456 if (object_sizes_unknown_p (object_size_type, varno))
1457 return false;
1459 if (osi->pass == 0)
1460 collect_object_sizes_for (osi, orig);
1462 orig_bytes = object_sizes_get (osi, SSA_NAME_VERSION (orig));
1463 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (orig), true);
1465 if (object_sizes_set (osi, varno, orig_bytes, wholesize))
1466 osi->changed = true;
1468 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
1472 /* Compute object_sizes for VAR, defined to the result of an assignment
1473 with operator POINTER_PLUS_EXPR. Return true if the object size might
1474 need reexamination later. */
1476 static bool
1477 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1479 int object_size_type = osi->object_size_type;
1480 unsigned int varno = SSA_NAME_VERSION (var);
1481 tree bytes, wholesize;
1482 tree op0, op1;
1483 bool reexamine = false;
1485 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1487 op0 = gimple_assign_rhs1 (stmt);
1488 op1 = gimple_assign_rhs2 (stmt);
1490 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
1492 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
1493 gcc_assert (TREE_CODE (rhs) == MEM_REF);
1494 op0 = TREE_OPERAND (rhs, 0);
1495 op1 = TREE_OPERAND (rhs, 1);
1497 else
1498 gcc_unreachable ();
1500 if (object_sizes_unknown_p (object_size_type, varno))
1501 return false;
1503 /* Handle PTR + OFFSET here. */
1504 if (size_valid_p (op1, object_size_type)
1505 && (TREE_CODE (op0) == SSA_NAME || TREE_CODE (op0) == ADDR_EXPR))
1507 if (TREE_CODE (op0) == SSA_NAME)
1509 if (osi->pass == 0)
1510 collect_object_sizes_for (osi, op0);
1512 bytes = object_sizes_get (osi, SSA_NAME_VERSION (op0));
1513 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (op0), true);
1514 reexamine = bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (op0));
1516 else
1518 /* op0 will be ADDR_EXPR here. We should never come here during
1519 reexamination. */
1520 gcc_checking_assert (osi->pass == 0);
1521 addr_object_size (osi, op0, object_size_type, &bytes, &wholesize);
1524 /* size_for_offset doesn't make sense for -1 size, but it does for size 0
1525 since the wholesize could be non-zero and a negative offset could give
1526 a non-zero size. */
1527 if (size_unknown_p (bytes, 0))
1529 else if ((object_size_type & OST_DYNAMIC)
1530 || bytes != wholesize
1531 || compare_tree_int (op1, offset_limit) <= 0)
1532 bytes = size_for_offset (bytes, op1, wholesize);
1533 /* In the static case, with a negative offset, the best estimate for
1534 minimum size is size_unknown but for maximum size, the wholesize is a
1535 better estimate than size_unknown. */
1536 else if (object_size_type & OST_MINIMUM)
1537 bytes = size_unknown (object_size_type);
1538 else
1539 bytes = wholesize;
1541 else
1542 bytes = wholesize = size_unknown (object_size_type);
1544 if (!size_valid_p (bytes, object_size_type)
1545 || !size_valid_p (wholesize, object_size_type))
1546 bytes = wholesize = size_unknown (object_size_type);
1548 if (object_sizes_set (osi, varno, bytes, wholesize))
1549 osi->changed = true;
1550 return reexamine;
1553 /* Compute the dynamic object size for VAR. Return the result in SIZE and
1554 WHOLESIZE. */
1556 static void
1557 dynamic_object_size (struct object_size_info *osi, tree var,
1558 tree *size, tree *wholesize)
1560 int object_size_type = osi->object_size_type;
1562 if (TREE_CODE (var) == SSA_NAME)
1564 unsigned varno = SSA_NAME_VERSION (var);
1566 collect_object_sizes_for (osi, var);
1567 *size = object_sizes_get (osi, varno);
1568 *wholesize = object_sizes_get (osi, varno, true);
1570 else if (TREE_CODE (var) == ADDR_EXPR)
1571 addr_object_size (osi, var, object_size_type, size, wholesize);
1572 else
1573 *size = *wholesize = size_unknown (object_size_type);
1576 /* Compute object_sizes for VAR, defined at STMT, which is
1577 a COND_EXPR. Return true if the object size might need reexamination
1578 later. */
1580 static bool
1581 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1583 tree then_, else_;
1584 int object_size_type = osi->object_size_type;
1585 unsigned int varno = SSA_NAME_VERSION (var);
1586 bool reexamine = false;
1588 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
1590 if (object_sizes_unknown_p (object_size_type, varno))
1591 return false;
1593 then_ = gimple_assign_rhs2 (stmt);
1594 else_ = gimple_assign_rhs3 (stmt);
1596 if (object_size_type & OST_DYNAMIC)
1598 tree then_size, then_wholesize, else_size, else_wholesize;
1600 dynamic_object_size (osi, then_, &then_size, &then_wholesize);
1601 if (!size_unknown_p (then_size, object_size_type))
1602 dynamic_object_size (osi, else_, &else_size, &else_wholesize);
1604 tree cond_size, cond_wholesize;
1605 if (size_unknown_p (then_size, object_size_type)
1606 || size_unknown_p (else_size, object_size_type))
1607 cond_size = cond_wholesize = size_unknown (object_size_type);
1608 else
1610 cond_size = fold_build3 (COND_EXPR, sizetype,
1611 gimple_assign_rhs1 (stmt),
1612 then_size, else_size);
1613 cond_wholesize = fold_build3 (COND_EXPR, sizetype,
1614 gimple_assign_rhs1 (stmt),
1615 then_wholesize, else_wholesize);
1618 object_sizes_set (osi, varno, cond_size, cond_wholesize);
1620 return false;
1623 if (TREE_CODE (then_) == SSA_NAME)
1624 reexamine |= merge_object_sizes (osi, var, then_);
1625 else
1626 expr_object_size (osi, var, then_);
1628 if (object_sizes_unknown_p (object_size_type, varno))
1629 return reexamine;
1631 if (TREE_CODE (else_) == SSA_NAME)
1632 reexamine |= merge_object_sizes (osi, var, else_);
1633 else
1634 expr_object_size (osi, var, else_);
1636 return reexamine;
1639 /* Find size of an object passed as a parameter to the function. */
1641 static void
1642 parm_object_size (struct object_size_info *osi, tree var)
1644 int object_size_type = osi->object_size_type;
1645 tree parm = SSA_NAME_VAR (var);
1647 if (!(object_size_type & OST_DYNAMIC) || !POINTER_TYPE_P (TREE_TYPE (parm)))
1649 expr_object_size (osi, var, parm);
1650 return;
1653 /* Look for access attribute. */
1654 rdwr_map rdwr_idx;
1656 tree fndecl = cfun->decl;
1657 const attr_access *access = get_parm_access (rdwr_idx, parm, fndecl);
1658 tree typesize = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (parm)));
1659 tree sz = NULL_TREE;
1661 /* If we have an access attribute with a usable size argument... */
1662 if (access && access->sizarg != UINT_MAX
1663 /* ... and either PARM is void * or has a type that is complete and has a
1664 constant size... */
1665 && ((typesize && poly_int_tree_p (typesize))
1666 || (!typesize && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))))
1668 tree fnargs = DECL_ARGUMENTS (fndecl);
1669 tree arg = NULL_TREE;
1670 unsigned argpos = 0;
1672 /* ... then walk through the parameters to pick the size parameter and
1673 safely scale it by the type size if needed.
1675 TODO: we could also compute the size of VLAs where the size is
1676 given by a function parameter. */
1677 for (arg = fnargs; arg; arg = TREE_CHAIN (arg), ++argpos)
1678 if (argpos == access->sizarg)
1680 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1681 sz = get_or_create_ssa_default_def (cfun, arg);
1682 if (sz != NULL_TREE)
1684 sz = fold_convert (sizetype, sz);
1685 if (typesize)
1686 sz = size_binop (MULT_EXPR, sz, typesize);
1688 break;
1691 if (!sz)
1692 sz = size_unknown (object_size_type);
1694 object_sizes_set (osi, SSA_NAME_VERSION (var), sz, sz);
1697 /* Compute an object size expression for VAR, which is the result of a PHI
1698 node. */
1700 static void
1701 phi_dynamic_object_size (struct object_size_info *osi, tree var)
1703 int object_size_type = osi->object_size_type;
1704 unsigned int varno = SSA_NAME_VERSION (var);
1705 gimple *stmt = SSA_NAME_DEF_STMT (var);
1706 unsigned i, num_args = gimple_phi_num_args (stmt);
1707 bool wholesize_needed = false;
1709 /* The extra space is for the PHI result at the end, which object_sizes_set
1710 sets for us. */
1711 tree sizes = make_tree_vec (num_args + 1);
1712 tree wholesizes = make_tree_vec (num_args + 1);
1714 /* Bail out if the size of any of the PHI arguments cannot be
1715 determined. */
1716 for (i = 0; i < num_args; i++)
1718 edge e = gimple_phi_arg_edge (as_a <gphi *> (stmt), i);
1719 if (e->flags & EDGE_COMPLEX)
1720 break;
1722 tree rhs = gimple_phi_arg_def (stmt, i);
1723 tree size, wholesize;
1725 dynamic_object_size (osi, rhs, &size, &wholesize);
1727 if (size_unknown_p (size, object_size_type))
1728 break;
1730 if (size != wholesize)
1731 wholesize_needed = true;
1733 TREE_VEC_ELT (sizes, i) = size;
1734 TREE_VEC_ELT (wholesizes, i) = wholesize;
1737 if (i < num_args)
1739 ggc_free (sizes);
1740 ggc_free (wholesizes);
1741 sizes = wholesizes = size_unknown (object_size_type);
1744 /* Point to the same TREE_VEC so that we can avoid emitting two PHI
1745 nodes. */
1746 else if (!wholesize_needed)
1748 ggc_free (wholesizes);
1749 wholesizes = sizes;
1752 object_sizes_set (osi, varno, sizes, wholesizes);
1755 /* Compute object sizes for VAR.
1756 For ADDR_EXPR an object size is the number of remaining bytes
1757 to the end of the object (where what is considered an object depends on
1758 OSI->object_size_type).
1759 For allocation GIMPLE_CALL like malloc or calloc object size is the size
1760 of the allocation.
1761 For POINTER_PLUS_EXPR where second operand is a constant integer,
1762 object size is object size of the first operand minus the constant.
1763 If the constant is bigger than the number of remaining bytes until the
1764 end of the object, object size is 0, but if it is instead a pointer
1765 subtraction, object size is size_unknown (object_size_type).
1766 To differentiate addition from subtraction, ADDR_EXPR returns
1767 size_unknown (object_size_type) for all objects bigger than half of the
1768 address space, and constants less than half of the address space are
1769 considered addition, while bigger constants subtraction.
1770 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
1771 object size is object size of that argument.
1772 Otherwise, object size is the maximum of object sizes of variables
1773 that it might be set to. */
1775 static void
1776 collect_object_sizes_for (struct object_size_info *osi, tree var)
1778 int object_size_type = osi->object_size_type;
1779 unsigned int varno = SSA_NAME_VERSION (var);
1780 gimple *stmt;
1781 bool reexamine;
1783 if (bitmap_bit_p (computed[object_size_type], varno))
1784 return;
1786 if (osi->pass == 0)
1788 if (bitmap_set_bit (osi->visited, varno))
1790 /* Initialize to 0 for maximum size and M1U for minimum size so that
1791 it gets immediately overridden. */
1792 object_sizes_initialize (osi, varno,
1793 size_initval (object_size_type),
1794 size_initval (object_size_type));
1796 else
1798 /* Found a dependency loop. Mark the variable for later
1799 re-examination. */
1800 if (object_size_type & OST_DYNAMIC)
1801 object_sizes_set_temp (osi, varno);
1803 bitmap_set_bit (osi->reexamine, varno);
1804 if (dump_file && (dump_flags & TDF_DETAILS))
1806 fprintf (dump_file, "Found a dependency loop at ");
1807 print_generic_expr (dump_file, var, dump_flags);
1808 fprintf (dump_file, "\n");
1810 return;
1814 if (dump_file && (dump_flags & TDF_DETAILS))
1816 fprintf (dump_file, "Visiting use-def links for ");
1817 print_generic_expr (dump_file, var, dump_flags);
1818 fprintf (dump_file, "\n");
1821 stmt = SSA_NAME_DEF_STMT (var);
1822 reexamine = false;
1824 switch (gimple_code (stmt))
1826 case GIMPLE_ASSIGN:
1828 tree rhs = gimple_assign_rhs1 (stmt);
1829 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1830 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1831 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
1832 reexamine = plus_stmt_object_size (osi, var, stmt);
1833 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1834 reexamine = cond_expr_object_size (osi, var, stmt);
1835 else if (gimple_assign_single_p (stmt)
1836 || gimple_assign_unary_nop_p (stmt))
1838 if (TREE_CODE (rhs) == SSA_NAME
1839 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1840 reexamine = merge_object_sizes (osi, var, rhs);
1841 else
1842 expr_object_size (osi, var, rhs);
1844 else
1845 unknown_object_size (osi, var);
1846 break;
1849 case GIMPLE_CALL:
1851 gcall *call_stmt = as_a <gcall *> (stmt);
1852 tree arg = pass_through_call (call_stmt);
1853 if (arg)
1855 if (TREE_CODE (arg) == SSA_NAME
1856 && POINTER_TYPE_P (TREE_TYPE (arg)))
1857 reexamine = merge_object_sizes (osi, var, arg);
1858 else
1859 expr_object_size (osi, var, arg);
1861 else
1862 call_object_size (osi, var, call_stmt);
1863 break;
1866 case GIMPLE_ASM:
1867 /* Pointers defined by __asm__ statements can point anywhere. */
1868 unknown_object_size (osi, var);
1869 break;
1871 case GIMPLE_NOP:
1872 if (SSA_NAME_VAR (var)
1873 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1874 parm_object_size (osi, var);
1875 else
1876 /* Uninitialized SSA names point nowhere. */
1877 unknown_object_size (osi, var);
1878 break;
1880 case GIMPLE_PHI:
1882 unsigned i;
1884 if (object_size_type & OST_DYNAMIC)
1886 phi_dynamic_object_size (osi, var);
1887 break;
1890 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1892 tree rhs = gimple_phi_arg (stmt, i)->def;
1894 if (object_sizes_unknown_p (object_size_type, varno))
1895 break;
1897 if (TREE_CODE (rhs) == SSA_NAME)
1898 reexamine |= merge_object_sizes (osi, var, rhs);
1899 else if (osi->pass == 0)
1900 expr_object_size (osi, var, rhs);
1902 break;
1905 default:
1906 gcc_unreachable ();
1909 /* Dynamic sizes use placeholder temps to return an answer, so it is always
1910 safe to set COMPUTED for them. */
1911 if ((object_size_type & OST_DYNAMIC)
1912 || !reexamine || object_sizes_unknown_p (object_size_type, varno))
1914 bitmap_set_bit (computed[object_size_type], varno);
1915 if (!(object_size_type & OST_DYNAMIC))
1916 bitmap_clear_bit (osi->reexamine, varno);
1917 else if (reexamine)
1918 bitmap_set_bit (osi->reexamine, varno);
1920 else
1922 bitmap_set_bit (osi->reexamine, varno);
1923 if (dump_file && (dump_flags & TDF_DETAILS))
1925 fprintf (dump_file, "Need to reexamine ");
1926 print_generic_expr (dump_file, var, dump_flags);
1927 fprintf (dump_file, "\n");
1933 /* Helper function for check_for_plus_in_loops. Called recursively
1934 to detect loops. */
1936 static void
1937 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1938 unsigned int depth)
1940 gimple *stmt = SSA_NAME_DEF_STMT (var);
1941 unsigned int varno = SSA_NAME_VERSION (var);
1943 if (osi->depths[varno])
1945 if (osi->depths[varno] != depth)
1947 unsigned int *sp;
1949 /* Found a loop involving pointer addition. */
1950 for (sp = osi->tos; sp > osi->stack; )
1952 --sp;
1953 bitmap_clear_bit (osi->reexamine, *sp);
1954 bitmap_set_bit (computed[osi->object_size_type], *sp);
1955 object_sizes_set (osi, *sp, size_zero_node,
1956 object_sizes_get (osi, *sp, true));
1957 if (*sp == varno)
1958 break;
1961 return;
1963 else if (! bitmap_bit_p (osi->reexamine, varno))
1964 return;
1966 osi->depths[varno] = depth;
1967 *osi->tos++ = varno;
1969 switch (gimple_code (stmt))
1972 case GIMPLE_ASSIGN:
1974 if ((gimple_assign_single_p (stmt)
1975 || gimple_assign_unary_nop_p (stmt))
1976 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1978 tree rhs = gimple_assign_rhs1 (stmt);
1980 check_for_plus_in_loops_1 (osi, rhs, depth);
1982 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1984 tree basevar = gimple_assign_rhs1 (stmt);
1985 tree cst = gimple_assign_rhs2 (stmt);
1987 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1989 check_for_plus_in_loops_1 (osi, basevar,
1990 depth + !integer_zerop (cst));
1992 else
1993 gcc_unreachable ();
1994 break;
1997 case GIMPLE_CALL:
1999 gcall *call_stmt = as_a <gcall *> (stmt);
2000 tree arg = pass_through_call (call_stmt);
2001 if (arg)
2003 if (TREE_CODE (arg) == SSA_NAME)
2004 check_for_plus_in_loops_1 (osi, arg, depth);
2005 else
2006 gcc_unreachable ();
2008 break;
2011 case GIMPLE_PHI:
2013 unsigned i;
2015 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2017 tree rhs = gimple_phi_arg (stmt, i)->def;
2019 if (TREE_CODE (rhs) == SSA_NAME)
2020 check_for_plus_in_loops_1 (osi, rhs, depth);
2022 break;
2025 default:
2026 gcc_unreachable ();
2029 osi->depths[varno] = 0;
2030 osi->tos--;
2034 /* Check if some pointer we are computing object size of is being increased
2035 within a loop. If yes, assume all the SSA variables participating in
2036 that loop have minimum object sizes 0. */
2038 static void
2039 check_for_plus_in_loops (struct object_size_info *osi, tree var)
2041 gimple *stmt = SSA_NAME_DEF_STMT (var);
2043 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
2044 and looked for a POINTER_PLUS_EXPR in the pass-through
2045 argument, if any. In GIMPLE, however, such an expression
2046 is not a valid call operand. */
2048 if (is_gimple_assign (stmt)
2049 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2051 tree basevar = gimple_assign_rhs1 (stmt);
2052 tree cst = gimple_assign_rhs2 (stmt);
2054 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2056 /* Skip non-positive offsets. */
2057 if (integer_zerop (cst) || compare_tree_int (cst, offset_limit) > 0)
2058 return;
2060 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
2061 *osi->tos++ = SSA_NAME_VERSION (basevar);
2062 check_for_plus_in_loops_1 (osi, var, 2);
2063 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
2064 osi->tos--;
2069 /* Initialize data structures for the object size computation. */
2071 void
2072 init_object_sizes (void)
2074 int object_size_type;
2076 if (computed[0])
2077 return;
2079 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
2081 object_sizes_grow (object_size_type);
2082 computed[object_size_type] = BITMAP_ALLOC (NULL);
2085 init_offset_limit ();
2089 /* Destroy data structures after the object size computation. */
2091 void
2092 fini_object_sizes (void)
2094 int object_size_type;
2096 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
2098 object_sizes_release (object_size_type);
2099 BITMAP_FREE (computed[object_size_type]);
2103 /* Dummy valueize function. */
2105 static tree
2106 do_valueize (tree t)
2108 return t;
2111 /* Process a __builtin_object_size or __builtin_dynamic_object_size call in
2112 CALL early for subobjects before any object information is lost due to
2113 optimization. Insert a MIN or MAX expression of the result and
2114 __builtin_object_size at I so that it may be processed in the second pass.
2115 __builtin_dynamic_object_size is treated like __builtin_object_size here
2116 since we're only looking for constant bounds. */
2118 static void
2119 early_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
2121 tree ost = gimple_call_arg (call, 1);
2122 tree lhs = gimple_call_lhs (call);
2123 gcc_assert (lhs != NULL_TREE);
2125 if (!tree_fits_uhwi_p (ost))
2126 return;
2128 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2129 tree ptr = gimple_call_arg (call, 0);
2131 if (object_size_type != 1 && object_size_type != 3)
2132 return;
2134 if (TREE_CODE (ptr) != ADDR_EXPR && TREE_CODE (ptr) != SSA_NAME)
2135 return;
2137 tree type = TREE_TYPE (lhs);
2138 tree bytes;
2139 if (!compute_builtin_object_size (ptr, object_size_type, &bytes)
2140 || !int_fits_type_p (bytes, type))
2141 return;
2143 tree tem = make_ssa_name (type);
2144 gimple_call_set_lhs (call, tem);
2145 enum tree_code code = object_size_type & OST_MINIMUM ? MAX_EXPR : MIN_EXPR;
2146 tree cst = fold_convert (type, bytes);
2147 gimple *g = gimple_build_assign (lhs, code, tem, cst);
2148 gsi_insert_after (i, g, GSI_NEW_STMT);
2149 update_stmt (call);
2152 /* Attempt to fold one __builtin_dynamic_object_size call in CALL into an
2153 expression and insert it at I. Return true if it succeeds. */
2155 static bool
2156 dynamic_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
2158 gcc_assert (gimple_call_num_args (call) == 2);
2160 tree args[2];
2161 args[0] = gimple_call_arg (call, 0);
2162 args[1] = gimple_call_arg (call, 1);
2164 location_t loc = EXPR_LOC_OR_LOC (args[0], input_location);
2165 tree result_type = gimple_call_return_type (as_a <gcall *> (call));
2166 tree result = fold_builtin_call_array (loc, result_type,
2167 gimple_call_fn (call), 2, args);
2169 if (!result)
2170 return false;
2172 /* fold_builtin_call_array may wrap the result inside a
2173 NOP_EXPR. */
2174 STRIP_NOPS (result);
2175 gimplify_and_update_call_from_tree (i, result);
2177 if (dump_file && (dump_flags & TDF_DETAILS))
2179 fprintf (dump_file, "Simplified (dynamic)\n ");
2180 print_gimple_stmt (dump_file, call, 0, dump_flags);
2181 fprintf (dump_file, " to ");
2182 print_generic_expr (dump_file, result);
2183 fprintf (dump_file, "\n");
2185 return true;
2188 static unsigned int
2189 object_sizes_execute (function *fun, bool early)
2191 todo = 0;
2192 auto_bitmap sdce_worklist;
2194 basic_block bb;
2195 FOR_EACH_BB_FN (bb, fun)
2197 gimple_stmt_iterator i;
2198 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
2200 tree result;
2201 bool dynamic = false;
2203 gimple *call = gsi_stmt (i);
2204 if (gimple_call_builtin_p (call, BUILT_IN_DYNAMIC_OBJECT_SIZE))
2205 dynamic = true;
2206 else if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
2207 continue;
2209 tree lhs = gimple_call_lhs (call);
2210 if (!lhs)
2211 continue;
2213 init_object_sizes ();
2215 /* If early, only attempt to fold
2216 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
2217 and rather than folding the builtin to the constant if any,
2218 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
2219 call result and the computed constant. Do the same for
2220 __builtin_dynamic_object_size too. */
2221 if (early)
2223 early_object_sizes_execute_one (&i, call);
2224 continue;
2227 if (dynamic)
2229 if (dynamic_object_sizes_execute_one (&i, call))
2230 continue;
2231 else
2233 /* If we could not find a suitable size expression, lower to
2234 __builtin_object_size so that we may at least get a
2235 constant lower or higher estimate. */
2236 tree bosfn = builtin_decl_implicit (BUILT_IN_OBJECT_SIZE);
2237 gimple_call_set_fndecl (call, bosfn);
2238 update_stmt (call);
2240 if (dump_file && (dump_flags & TDF_DETAILS))
2242 print_generic_expr (dump_file, gimple_call_arg (call, 0),
2243 dump_flags);
2244 fprintf (dump_file,
2245 ": Retrying as __builtin_object_size\n");
2250 result = gimple_fold_stmt_to_constant (call, do_valueize);
2251 if (!result)
2253 tree ost = gimple_call_arg (call, 1);
2255 if (tree_fits_uhwi_p (ost))
2257 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2259 if (object_size_type & OST_MINIMUM)
2260 result = build_zero_cst (size_type_node);
2261 else if (object_size_type < OST_END)
2262 result = fold_convert (size_type_node,
2263 integer_minus_one_node);
2266 if (!result)
2267 continue;
2270 gcc_assert (TREE_CODE (result) == INTEGER_CST);
2272 if (dump_file && (dump_flags & TDF_DETAILS))
2274 fprintf (dump_file, "Simplified\n ");
2275 print_gimple_stmt (dump_file, call, 0, dump_flags);
2276 fprintf (dump_file, " to ");
2277 print_generic_expr (dump_file, result);
2278 fprintf (dump_file, "\n");
2281 /* Propagate into all uses and fold those stmts. */
2282 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2284 replace_uses_by (lhs, result);
2285 /* Mark lhs as being possiblely DCEd. */
2286 bitmap_set_bit (sdce_worklist, SSA_NAME_VERSION (lhs));
2288 else
2289 replace_call_with_value (&i, result);
2293 fini_object_sizes ();
2294 simple_dce_from_worklist (sdce_worklist);
2295 return todo;
2298 /* Simple pass to optimize all __builtin_object_size () builtins. */
2300 namespace {
2302 const pass_data pass_data_object_sizes =
2304 GIMPLE_PASS, /* type */
2305 "objsz", /* name */
2306 OPTGROUP_NONE, /* optinfo_flags */
2307 TV_NONE, /* tv_id */
2308 ( PROP_cfg | PROP_ssa ), /* properties_required */
2309 PROP_objsz, /* properties_provided */
2310 0, /* properties_destroyed */
2311 0, /* todo_flags_start */
2312 0, /* todo_flags_finish */
2315 class pass_object_sizes : public gimple_opt_pass
2317 public:
2318 pass_object_sizes (gcc::context *ctxt)
2319 : gimple_opt_pass (pass_data_object_sizes, ctxt)
2322 /* opt_pass methods: */
2323 opt_pass * clone () final override { return new pass_object_sizes (m_ctxt); }
2324 unsigned int execute (function *fun) final override
2326 return object_sizes_execute (fun, false);
2328 }; // class pass_object_sizes
2330 } // anon namespace
2332 gimple_opt_pass *
2333 make_pass_object_sizes (gcc::context *ctxt)
2335 return new pass_object_sizes (ctxt);
2338 /* Early version of pass to optimize all __builtin_object_size () builtins. */
2340 namespace {
2342 const pass_data pass_data_early_object_sizes =
2344 GIMPLE_PASS, /* type */
2345 "early_objsz", /* name */
2346 OPTGROUP_NONE, /* optinfo_flags */
2347 TV_NONE, /* tv_id */
2348 ( PROP_cfg | PROP_ssa ), /* properties_required */
2349 0, /* properties_provided */
2350 0, /* properties_destroyed */
2351 0, /* todo_flags_start */
2352 0, /* todo_flags_finish */
2355 class pass_early_object_sizes : public gimple_opt_pass
2357 public:
2358 pass_early_object_sizes (gcc::context *ctxt)
2359 : gimple_opt_pass (pass_data_early_object_sizes, ctxt)
2362 /* opt_pass methods: */
2363 unsigned int execute (function *fun) final override
2365 return object_sizes_execute (fun, true);
2367 }; // class pass_object_sizes
2369 } // anon namespace
2371 gimple_opt_pass *
2372 make_pass_early_object_sizes (gcc::context *ctxt)
2374 return new pass_early_object_sizes (ctxt);