libstdc++: Simplify std::any to fix -Wdeprecated-declarations warning
[official-gcc.git] / gcc / tree-object-size.cc
blob4c1fa9b555fa926e0d6268899fe24265b5e643e2
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-iterator.h"
33 #include "gimple-fold.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "attribs.h"
38 #include "builtins.h"
39 #include "gimplify-me.h"
40 #include "gimplify.h"
42 struct object_size_info
44 int object_size_type;
45 unsigned char pass;
46 bool changed;
47 bitmap visited, reexamine;
48 unsigned int *depths;
49 unsigned int *stack, *tos;
52 struct GTY(()) object_size
54 /* Estimate of bytes till the end of the object. */
55 tree size;
56 /* Estimate of the size of the whole object. */
57 tree wholesize;
60 static tree compute_object_offset (tree, const_tree);
61 static bool addr_object_size (struct object_size_info *,
62 const_tree, int, tree *, tree *t = NULL);
63 static tree alloc_object_size (const gcall *, int);
64 static tree access_with_size_object_size (const gcall *, int);
65 static tree pass_through_call (const gcall *);
66 static void collect_object_sizes_for (struct object_size_info *, tree);
67 static void expr_object_size (struct object_size_info *, tree, tree);
68 static bool merge_object_sizes (struct object_size_info *, tree, tree);
69 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
70 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
71 static void init_offset_limit (void);
72 static void check_for_plus_in_loops (struct object_size_info *, tree);
73 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
74 unsigned int);
76 /* object_sizes[0] is upper bound for the object size and number of bytes till
77 the end of the object.
78 object_sizes[1] is upper bound for the object size and number of bytes till
79 the end of the subobject (innermost array or field with address taken).
80 object_sizes[2] is lower bound for the object size and number of bytes till
81 the end of the object and object_sizes[3] lower bound for subobject.
83 For static object sizes, the object size and the bytes till the end of the
84 object are both INTEGER_CST. In the dynamic case, they are finally either a
85 gimple variable or an INTEGER_CST. */
86 static vec<object_size> object_sizes[OST_END];
88 /* Bitmaps what object sizes have been computed already. */
89 static bitmap computed[OST_END];
91 /* Maximum value of offset we consider to be addition. */
92 static unsigned HOST_WIDE_INT offset_limit;
94 /* Tell the generic SSA updater what kind of update is needed after the pass
95 executes. */
96 static unsigned todo;
98 /* Return true if VAL represents an initial size for OBJECT_SIZE_TYPE. */
100 static inline bool
101 size_initval_p (tree val, int object_size_type)
103 return ((object_size_type & OST_MINIMUM)
104 ? integer_all_onesp (val) : integer_zerop (val));
107 /* Return true if VAL represents an unknown size for OBJECT_SIZE_TYPE. */
109 static inline bool
110 size_unknown_p (tree val, int object_size_type)
112 return ((object_size_type & OST_MINIMUM)
113 ? integer_zerop (val) : integer_all_onesp (val));
116 /* Return true if VAL represents a valid size for OBJECT_SIZE_TYPE. */
118 static inline bool
119 size_valid_p (tree val, int object_size_type)
121 return ((object_size_type & OST_DYNAMIC) || TREE_CODE (val) == INTEGER_CST);
124 /* Return true if VAL is usable as an object size in the object_sizes
125 vectors. */
127 static inline bool
128 size_usable_p (tree val)
130 return TREE_CODE (val) == SSA_NAME || TREE_CODE (val) == INTEGER_CST;
133 /* Return a tree with initial value for OBJECT_SIZE_TYPE. */
135 static inline tree
136 size_initval (int object_size_type)
138 return ((object_size_type & OST_MINIMUM)
139 ? TYPE_MAX_VALUE (sizetype) : size_zero_node);
142 /* Return a tree with unknown value for OBJECT_SIZE_TYPE. */
144 static inline tree
145 size_unknown (int object_size_type)
147 return ((object_size_type & OST_MINIMUM)
148 ? size_zero_node : TYPE_MAX_VALUE (sizetype));
151 /* Grow object_sizes[OBJECT_SIZE_TYPE] to num_ssa_names. */
153 static inline void
154 object_sizes_grow (int object_size_type)
156 if (num_ssa_names > object_sizes[object_size_type].length ())
157 object_sizes[object_size_type].safe_grow (num_ssa_names, true);
160 /* Release object_sizes[OBJECT_SIZE_TYPE]. */
162 static inline void
163 object_sizes_release (int object_size_type)
165 object_sizes[object_size_type].release ();
168 /* Return true if object_sizes[OBJECT_SIZE_TYPE][VARNO] is unknown. */
170 static inline bool
171 object_sizes_unknown_p (int object_size_type, unsigned varno)
173 return size_unknown_p (object_sizes[object_size_type][varno].size,
174 object_size_type);
177 /* Return the raw size expression for VARNO corresponding to OSI. This returns
178 the TREE_VEC as is and should only be used during gimplification. */
180 static inline object_size
181 object_sizes_get_raw (struct object_size_info *osi, unsigned varno)
183 gcc_assert (osi->pass != 0);
184 return object_sizes[osi->object_size_type][varno];
187 /* Return a size tree for VARNO corresponding to OSI. If WHOLE is true, return
188 the whole object size. Use this for building size expressions based on size
189 of VARNO. */
191 static inline tree
192 object_sizes_get (struct object_size_info *osi, unsigned varno,
193 bool whole = false)
195 tree ret;
196 int object_size_type = osi->object_size_type;
198 if (whole)
199 ret = object_sizes[object_size_type][varno].wholesize;
200 else
201 ret = object_sizes[object_size_type][varno].size;
203 if (object_size_type & OST_DYNAMIC)
205 if (TREE_CODE (ret) == MODIFY_EXPR)
206 return TREE_OPERAND (ret, 0);
207 else if (TREE_CODE (ret) == TREE_VEC)
208 return TREE_VEC_ELT (ret, TREE_VEC_LENGTH (ret) - 1);
209 else
210 gcc_checking_assert (size_usable_p (ret));
213 return ret;
216 /* Set size for VARNO corresponding to OSI to VAL. */
218 static inline void
219 object_sizes_initialize (struct object_size_info *osi, unsigned varno,
220 tree val, tree wholeval)
222 int object_size_type = osi->object_size_type;
224 object_sizes[object_size_type][varno].size = val;
225 object_sizes[object_size_type][varno].wholesize = wholeval;
228 /* Return a MODIFY_EXPR for cases where SSA and EXPR have the same type. The
229 TREE_VEC is returned only in case of PHI nodes. */
231 static tree
232 bundle_sizes (tree name, tree expr)
234 gcc_checking_assert (TREE_TYPE (name) == sizetype);
236 if (TREE_CODE (expr) == TREE_VEC)
238 TREE_VEC_ELT (expr, TREE_VEC_LENGTH (expr) - 1) = name;
239 return expr;
242 gcc_checking_assert (types_compatible_p (TREE_TYPE (expr), sizetype));
243 return build2 (MODIFY_EXPR, sizetype, name, expr);
246 /* Set size for VARNO corresponding to OSI to VAL if it is the new minimum or
247 maximum. For static sizes, each element of TREE_VEC is always INTEGER_CST
248 throughout the computation. For dynamic sizes, each element may either be a
249 gimple variable, a MODIFY_EXPR or a TREE_VEC. The MODIFY_EXPR is for
250 expressions that need to be gimplified. TREE_VECs are special, they're
251 emitted only for GIMPLE_PHI and the PHI result variable is the last element
252 of the vector. */
254 static bool
255 object_sizes_set (struct object_size_info *osi, unsigned varno, tree val,
256 tree wholeval)
258 int object_size_type = osi->object_size_type;
259 object_size osize = object_sizes[object_size_type][varno];
260 bool changed = true;
262 tree oldval = osize.size;
263 tree old_wholeval = osize.wholesize;
265 if (object_size_type & OST_DYNAMIC)
267 if (bitmap_bit_p (osi->reexamine, varno))
269 val = bundle_sizes (oldval, val);
270 wholeval = bundle_sizes (old_wholeval, wholeval);
272 else
274 gcc_checking_assert (size_initval_p (oldval, object_size_type));
275 gcc_checking_assert (size_initval_p (old_wholeval,
276 object_size_type));
277 /* For dynamic object sizes, all object sizes that are not gimple
278 variables will need to be gimplified. */
279 if (wholeval != val && !size_usable_p (wholeval))
281 bitmap_set_bit (osi->reexamine, varno);
282 wholeval = bundle_sizes (make_ssa_name (sizetype), wholeval);
284 if (!size_usable_p (val))
286 bitmap_set_bit (osi->reexamine, varno);
287 tree newval = bundle_sizes (make_ssa_name (sizetype), val);
288 if (val == wholeval)
289 wholeval = newval;
290 val = newval;
292 /* If the new value is a temporary variable, mark it for
293 reexamination. */
294 else if (TREE_CODE (val) == SSA_NAME && !SSA_NAME_DEF_STMT (val))
295 bitmap_set_bit (osi->reexamine, varno);
298 else
300 enum tree_code code = (object_size_type & OST_MINIMUM
301 ? MIN_EXPR : MAX_EXPR);
303 val = size_binop (code, val, oldval);
304 wholeval = size_binop (code, wholeval, old_wholeval);
305 changed = (tree_int_cst_compare (val, oldval) != 0
306 || tree_int_cst_compare (old_wholeval, wholeval) != 0);
309 object_sizes[object_size_type][varno].size = val;
310 object_sizes[object_size_type][varno].wholesize = wholeval;
312 return changed;
315 /* Set temporary SSA names for object size and whole size to resolve dependency
316 loops in dynamic size computation. */
318 static inline void
319 object_sizes_set_temp (struct object_size_info *osi, unsigned varno)
321 tree val = object_sizes_get (osi, varno);
323 if (size_initval_p (val, osi->object_size_type))
324 object_sizes_set (osi, varno,
325 make_ssa_name (sizetype),
326 make_ssa_name (sizetype));
329 /* Initialize OFFSET_LIMIT variable. */
330 static void
331 init_offset_limit (void)
333 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
334 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
335 else
336 offset_limit = -1;
337 offset_limit /= 2;
340 /* Bytes at end of the object with SZ from offset OFFSET. If WHOLESIZE is not
341 NULL_TREE, use it to get the net offset of the pointer, which should always
342 be positive and hence, be within OFFSET_LIMIT for valid offsets. */
344 static tree
345 size_for_offset (tree sz, tree offset, tree wholesize = NULL_TREE)
347 gcc_checking_assert (types_compatible_p (TREE_TYPE (sz), sizetype));
349 /* For negative offsets, if we have a distinct WHOLESIZE, use it to get a net
350 offset from the whole object. */
351 if (wholesize && wholesize != sz
352 && (TREE_CODE (sz) != INTEGER_CST
353 || TREE_CODE (wholesize) != INTEGER_CST
354 || tree_int_cst_compare (sz, wholesize)))
356 gcc_checking_assert (types_compatible_p (TREE_TYPE (wholesize),
357 sizetype));
359 /* Restructure SZ - OFFSET as
360 WHOLESIZE - (WHOLESIZE + OFFSET - SZ) so that the offset part, i.e.
361 WHOLESIZE + OFFSET - SZ is only allowed to be positive. */
362 tree tmp = size_binop (MAX_EXPR, wholesize, sz);
363 offset = fold_build2 (PLUS_EXPR, sizetype, tmp, offset);
364 offset = fold_build2 (MINUS_EXPR, sizetype, offset, sz);
365 sz = tmp;
368 /* Safe to convert now, since a valid net offset should be non-negative. */
369 if (!useless_type_conversion_p (sizetype, TREE_TYPE (offset)))
370 offset = fold_convert (sizetype, offset);
372 if (TREE_CODE (offset) == INTEGER_CST)
374 if (integer_zerop (offset))
375 return sz;
377 /* Negative or too large offset even after adjustment, cannot be within
378 bounds of an object. */
379 if (compare_tree_int (offset, offset_limit) > 0)
380 return size_zero_node;
383 return size_binop (MINUS_EXPR, size_binop (MAX_EXPR, sz, offset), offset);
386 /* Compute offset of EXPR within VAR. Return error_mark_node
387 if unknown. */
389 static tree
390 compute_object_offset (tree expr, const_tree var)
392 enum tree_code code = PLUS_EXPR;
393 tree base, off, t;
395 if (expr == var)
396 return size_zero_node;
398 switch (TREE_CODE (expr))
400 case COMPONENT_REF:
401 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
402 if (base == error_mark_node)
403 return base;
405 t = TREE_OPERAND (expr, 1);
406 off = size_binop (PLUS_EXPR,
407 component_ref_field_offset (expr),
408 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
409 / BITS_PER_UNIT));
410 break;
412 case REALPART_EXPR:
413 CASE_CONVERT:
414 case VIEW_CONVERT_EXPR:
415 case NON_LVALUE_EXPR:
416 return compute_object_offset (TREE_OPERAND (expr, 0), var);
418 case IMAGPART_EXPR:
419 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
420 if (base == error_mark_node)
421 return base;
423 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
424 break;
426 case ARRAY_REF:
427 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
428 if (base == error_mark_node)
429 return base;
431 t = TREE_OPERAND (expr, 1);
432 tree low_bound, unit_size;
433 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
434 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
435 if (! integer_zerop (low_bound))
436 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
437 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
439 code = MINUS_EXPR;
440 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
442 t = fold_convert (sizetype, t);
443 off = size_binop (MULT_EXPR, unit_size, t);
444 break;
446 case MEM_REF:
447 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
448 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
450 default:
451 return error_mark_node;
454 return size_binop (code, base, off);
457 /* Returns the size of the object designated by DECL considering its
458 initializer if it either has one or if it would not affect its size,
459 otherwise the size of the object without the initializer when MIN
460 is true, else null. An object's initializer affects the object's
461 size if it's a struct type with a flexible array member. */
463 tree
464 decl_init_size (tree decl, bool min)
466 tree size = DECL_SIZE_UNIT (decl);
467 tree type = TREE_TYPE (decl);
468 if (TREE_CODE (type) != RECORD_TYPE)
469 return size;
471 tree last = last_field (type);
472 if (!last)
473 return size;
475 tree last_type = TREE_TYPE (last);
476 if (TREE_CODE (last_type) != ARRAY_TYPE
477 || TYPE_SIZE (last_type))
478 return size;
480 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
481 of the initializer and sometimes doesn't. */
482 size = TYPE_SIZE_UNIT (type);
483 tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
484 tree compsize = component_ref_size (ref);
485 if (!compsize)
486 return min ? size : NULL_TREE;
488 /* The size includes tail padding and initializer elements. */
489 tree pos = byte_position (last);
490 size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
491 return size;
494 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
495 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
496 If unknown, return size_unknown (object_size_type). */
498 static bool
499 addr_object_size (struct object_size_info *osi, const_tree ptr,
500 int object_size_type, tree *psize, tree *pwholesize)
502 tree pt_var, pt_var_size = NULL_TREE, pt_var_wholesize = NULL_TREE;
503 tree var_size, bytes, wholebytes;
505 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
507 /* Set to unknown and overwrite just before returning if the size
508 could be determined. */
509 *psize = size_unknown (object_size_type);
510 if (pwholesize)
511 *pwholesize = size_unknown (object_size_type);
513 pt_var = TREE_OPERAND (ptr, 0);
514 while (handled_component_p (pt_var))
515 pt_var = TREE_OPERAND (pt_var, 0);
517 if (!pt_var)
518 return false;
520 if (TREE_CODE (pt_var) == MEM_REF)
522 tree sz, wholesize;
524 if (!osi || (object_size_type & OST_SUBOBJECT) != 0
525 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
527 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
528 object_size_type & ~OST_SUBOBJECT, &sz);
529 wholesize = sz;
531 else
533 tree var = TREE_OPERAND (pt_var, 0);
534 if (osi->pass == 0)
535 collect_object_sizes_for (osi, var);
536 if (bitmap_bit_p (computed[object_size_type],
537 SSA_NAME_VERSION (var)))
539 sz = object_sizes_get (osi, SSA_NAME_VERSION (var));
540 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (var), true);
542 else
543 sz = wholesize = size_unknown (object_size_type);
545 if (!size_unknown_p (sz, object_size_type))
546 sz = size_for_offset (sz, TREE_OPERAND (pt_var, 1), wholesize);
548 if (!size_unknown_p (sz, object_size_type)
549 && (TREE_CODE (sz) != INTEGER_CST
550 || compare_tree_int (sz, offset_limit) < 0))
552 pt_var_size = sz;
553 pt_var_wholesize = wholesize;
556 else if (DECL_P (pt_var))
558 pt_var_size = pt_var_wholesize
559 = decl_init_size (pt_var, object_size_type & OST_MINIMUM);
560 if (!pt_var_size)
561 return false;
563 else if (TREE_CODE (pt_var) == STRING_CST)
564 pt_var_size = pt_var_wholesize = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
565 else
566 return false;
568 if (pt_var_size)
570 /* Validate the size determined above if it is a constant. */
571 if (TREE_CODE (pt_var_size) == INTEGER_CST
572 && compare_tree_int (pt_var_size, offset_limit) >= 0)
573 return false;
576 if (pt_var != TREE_OPERAND (ptr, 0))
578 tree var;
580 if (object_size_type & OST_SUBOBJECT)
582 var = TREE_OPERAND (ptr, 0);
584 while (var != pt_var
585 && TREE_CODE (var) != BIT_FIELD_REF
586 && TREE_CODE (var) != COMPONENT_REF
587 && TREE_CODE (var) != ARRAY_REF
588 && TREE_CODE (var) != ARRAY_RANGE_REF
589 && TREE_CODE (var) != REALPART_EXPR
590 && TREE_CODE (var) != IMAGPART_EXPR)
591 var = TREE_OPERAND (var, 0);
592 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
593 var = TREE_OPERAND (var, 0);
594 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
595 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
596 || (pt_var_size && TREE_CODE (pt_var_size) == INTEGER_CST
597 && tree_int_cst_lt (pt_var_size,
598 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
599 var = pt_var;
600 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
602 tree v = var;
603 /* For &X->fld, compute object size if fld isn't a flexible array
604 member. */
605 bool is_flexible_array_mem_ref = false;
606 while (v && v != pt_var)
607 switch (TREE_CODE (v))
609 case ARRAY_REF:
610 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0))))
612 tree domain
613 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
614 if (domain && TYPE_MAX_VALUE (domain))
616 v = NULL_TREE;
617 break;
620 v = TREE_OPERAND (v, 0);
621 break;
622 case REALPART_EXPR:
623 case IMAGPART_EXPR:
624 v = NULL_TREE;
625 break;
626 case COMPONENT_REF:
627 /* When the ref is not to an aggregate type, i.e, an array,
628 a record or a union, it will not have flexible size,
629 compute the object size directly. */
630 if (!AGGREGATE_TYPE_P (TREE_TYPE (v)))
632 v = NULL_TREE;
633 break;
635 /* if the ref is to a record or union type, but the type
636 does not include a flexible array recursively, compute
637 the object size directly. */
638 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (v)))
640 if (!TYPE_INCLUDES_FLEXARRAY (TREE_TYPE (v)))
642 v = NULL_TREE;
643 break;
645 else
647 v = TREE_OPERAND (v, 0);
648 break;
651 /* Now the ref is to an array type. */
652 gcc_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
653 is_flexible_array_mem_ref = array_ref_flexible_size_p (v);
654 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
655 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
656 != UNION_TYPE
657 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
658 != QUAL_UNION_TYPE)
659 break;
660 else
661 v = TREE_OPERAND (v, 0);
662 if (TREE_CODE (v) == COMPONENT_REF
663 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
664 == RECORD_TYPE)
666 /* compute object size only if v is not a
667 flexible array member. */
668 if (!is_flexible_array_mem_ref)
670 v = NULL_TREE;
671 break;
673 v = TREE_OPERAND (v, 0);
675 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
676 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
677 != UNION_TYPE
678 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
679 != QUAL_UNION_TYPE)
680 break;
681 else
682 v = TREE_OPERAND (v, 0);
683 if (v != pt_var)
684 v = NULL_TREE;
685 else
686 v = pt_var;
687 break;
688 default:
689 v = pt_var;
690 break;
692 if (v == pt_var)
693 var = pt_var;
696 else
697 var = pt_var;
699 if (var != pt_var)
701 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
702 if (!TREE_CONSTANT (var_size))
703 var_size = get_or_create_ssa_default_def (cfun, var_size);
704 if (!var_size)
705 return false;
707 else if (!pt_var_size)
708 return false;
709 else
710 var_size = pt_var_size;
711 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
712 if (bytes != error_mark_node)
714 bytes = size_for_offset (var_size, bytes);
715 if (var != pt_var && pt_var_size && TREE_CODE (pt_var) == MEM_REF)
717 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0),
718 pt_var);
719 if (bytes2 != error_mark_node)
721 bytes2 = size_for_offset (pt_var_size, bytes2);
722 bytes = size_binop (MIN_EXPR, bytes, bytes2);
726 else
727 bytes = size_unknown (object_size_type);
729 wholebytes
730 = object_size_type & OST_SUBOBJECT ? var_size : pt_var_wholesize;
732 else if (!pt_var_size)
733 return false;
734 else
736 bytes = pt_var_size;
737 wholebytes = pt_var_wholesize;
740 if (!size_unknown_p (bytes, object_size_type)
741 && size_valid_p (bytes, object_size_type)
742 && !size_unknown_p (bytes, object_size_type)
743 && size_valid_p (wholebytes, object_size_type))
745 *psize = bytes;
746 if (pwholesize)
747 *pwholesize = wholebytes;
748 return true;
751 return false;
754 /* Compute __builtin_object_size for a CALL to .ACCESS_WITH_SIZE,
755 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
756 The 2nd, 3rd, and the 4th parameters of the call determine the size of
757 the CALL:
759 2nd argument REF_TO_SIZE: The reference to the size of the object,
760 3rd argument CLASS_OF_SIZE: The size referenced by the REF_TO_SIZE represents
761 0: the number of bytes;
762 1: the number of the elements of the object type;
763 4th argument TYPE_OF_SIZE: A constant 0 with its TYPE being the same as the TYPE
764 of the object referenced by REF_TO_SIZE
765 6th argument: A constant 0 with the pointer TYPE to the original flexible
766 array type.
768 The size of the element can be retrived from the TYPE of the 6th argument
769 of the call, which is the pointer to the array type. */
770 static tree
771 access_with_size_object_size (const gcall *call, int object_size_type)
773 /* If not for dynamic object size, return. */
774 if ((object_size_type & OST_DYNAMIC) == 0)
775 return size_unknown (object_size_type);
777 gcc_assert (gimple_call_internal_p (call, IFN_ACCESS_WITH_SIZE));
778 /* The type of the 6th argument type is the pointer TYPE to the original
779 flexible array type. */
780 tree pointer_to_array_type = TREE_TYPE (gimple_call_arg (call, 5));
781 gcc_assert (POINTER_TYPE_P (pointer_to_array_type));
782 tree element_type = TREE_TYPE (TREE_TYPE (pointer_to_array_type));
783 tree element_size = TYPE_SIZE_UNIT (element_type);
784 tree ref_to_size = gimple_call_arg (call, 1);
785 unsigned int class_of_size = TREE_INT_CST_LOW (gimple_call_arg (call, 2));
786 tree type = TREE_TYPE (gimple_call_arg (call, 3));
788 tree size = fold_build2 (MEM_REF, type, ref_to_size,
789 build_int_cst (ptr_type_node, 0));
791 /* If size is negative value, treat it as zero. */
792 if (!TYPE_UNSIGNED (type))
794 tree cond_expr = fold_build2 (LT_EXPR, boolean_type_node,
795 unshare_expr (size), build_zero_cst (type));
796 size = fold_build3 (COND_EXPR, integer_type_node, cond_expr,
797 build_zero_cst (type), size);
800 if (class_of_size == 1)
801 size = size_binop (MULT_EXPR,
802 fold_convert (sizetype, size),
803 fold_convert (sizetype, element_size));
804 else
805 size = fold_convert (sizetype, size);
807 if (!todo)
808 todo = TODO_update_ssa_only_virtuals;
810 return size;
813 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
814 Handles calls to functions declared with attribute alloc_size.
815 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
816 If unknown, return size_unknown (object_size_type). */
818 static tree
819 alloc_object_size (const gcall *call, int object_size_type)
821 gcc_assert (is_gimple_call (call));
823 tree calltype;
824 tree callfn = gimple_call_fndecl (call);
825 if (callfn)
826 calltype = TREE_TYPE (callfn);
827 else
828 calltype = gimple_call_fntype (call);
830 if (!calltype)
831 return size_unknown (object_size_type);
833 /* Set to positions of alloc_size arguments. */
834 int arg1 = -1, arg2 = -1;
835 tree alloc_size = lookup_attribute ("alloc_size",
836 TYPE_ATTRIBUTES (calltype));
837 if (alloc_size && TREE_VALUE (alloc_size))
839 tree p = TREE_VALUE (alloc_size);
841 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
842 if (TREE_CHAIN (p))
843 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
845 else if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
846 && callfn
847 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callfn)))
848 arg1 = 0;
850 /* Non-const arguments are OK here, let the caller handle constness. */
851 if (arg1 < 0
852 || (unsigned) arg1 >= gimple_call_num_args (call)
853 || (arg2 >= 0 && (unsigned) arg2 >= gimple_call_num_args (call)))
854 return size_unknown (object_size_type);
856 tree targ1 = gimple_call_arg (call, arg1);
857 if (!INTEGRAL_TYPE_P (TREE_TYPE (targ1))
858 || TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (sizetype))
859 return size_unknown (object_size_type);
860 targ1 = fold_convert (sizetype, targ1);
861 tree bytes = NULL_TREE;
862 if (arg2 >= 0)
864 tree targ2 = gimple_call_arg (call, arg2);
865 if (!INTEGRAL_TYPE_P (TREE_TYPE (targ2))
866 || TYPE_PRECISION (TREE_TYPE (targ2)) > TYPE_PRECISION (sizetype))
867 return size_unknown (object_size_type);
868 targ2 = fold_convert (sizetype, targ2);
869 bytes = size_binop (MULT_EXPR, targ1, targ2);
871 else
872 bytes = targ1;
874 return bytes ? bytes : size_unknown (object_size_type);
877 /* Compute __builtin_object_size for CALL, which is a call to either
878 BUILT_IN_STRDUP or BUILT_IN_STRNDUP; IS_STRNDUP indicates which it is.
879 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
880 If unknown, return size_unknown (object_size_type). */
882 static tree
883 strdup_object_size (const gcall *call, int object_size_type, bool is_strndup)
885 tree src = gimple_call_arg (call, 0);
886 tree sz = size_unknown (object_size_type);
887 tree n = NULL_TREE;
889 if (is_strndup)
890 n = fold_build2 (PLUS_EXPR, sizetype, size_one_node,
891 gimple_call_arg (call, 1));
892 /* For strdup, simply emit strlen (SRC) + 1 and let the optimizer fold it the
893 way it likes. */
894 else
896 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
897 if (strlen_fn)
899 sz = fold_build2 (PLUS_EXPR, sizetype, size_one_node,
900 build_call_expr (strlen_fn, 1, src));
901 todo = TODO_update_ssa_only_virtuals;
905 /* In all other cases, return the size of SRC since the object size cannot
906 exceed that. We cannot do this for OST_MINIMUM unless SRC points into a
907 string constant since otherwise the object size could go all the way down
908 to zero. */
909 if (!size_valid_p (sz, object_size_type)
910 || size_unknown_p (sz, object_size_type))
912 tree wholesrc = NULL_TREE;
913 if (TREE_CODE (src) == ADDR_EXPR)
914 wholesrc = get_base_address (TREE_OPERAND (src, 0));
916 /* If the source points within a string constant, we try to get its
917 length. */
918 if (wholesrc && TREE_CODE (wholesrc) == STRING_CST)
920 tree len = c_strlen (src, 0);
921 if (len)
922 sz = fold_build2 (PLUS_EXPR, sizetype, size_one_node, len);
925 /* For maximum estimate, our next best guess is the object size of the
926 source. */
927 if (size_unknown_p (sz, object_size_type)
928 && !(object_size_type & OST_MINIMUM))
929 compute_builtin_object_size (src, object_size_type, &sz);
932 /* String duplication allocates at least one byte, so we should never fail
933 for OST_MINIMUM. */
934 if ((!size_valid_p (sz, object_size_type)
935 || size_unknown_p (sz, object_size_type))
936 && (object_size_type & OST_MINIMUM))
937 sz = size_one_node;
939 /* Factor in the N. */
940 return n ? fold_build2 (MIN_EXPR, sizetype, n, sz) : sz;
943 /* If object size is propagated from one of function's arguments directly
944 to its return value, return that argument for GIMPLE_CALL statement CALL.
945 Otherwise return NULL. */
947 static tree
948 pass_through_call (const gcall *call)
950 unsigned rf = gimple_call_return_flags (call);
951 if (rf & ERF_RETURNS_ARG)
953 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
954 if (argnum < gimple_call_num_args (call))
955 return gimple_call_arg (call, argnum);
958 /* __builtin_assume_aligned is intentionally not marked RET1. */
959 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
960 return gimple_call_arg (call, 0);
962 return NULL_TREE;
965 /* Emit PHI nodes for size expressions fo. */
967 static void
968 emit_phi_nodes (gimple *stmt, tree size, tree wholesize)
970 tree phires;
971 gphi *wholephi = NULL;
973 if (wholesize != size)
975 phires = TREE_VEC_ELT (wholesize, TREE_VEC_LENGTH (wholesize) - 1);
976 wholephi = create_phi_node (phires, gimple_bb (stmt));
979 phires = TREE_VEC_ELT (size, TREE_VEC_LENGTH (size) - 1);
980 gphi *phi = create_phi_node (phires, gimple_bb (stmt));
981 gphi *obj_phi = as_a <gphi *> (stmt);
983 gcc_checking_assert (TREE_CODE (wholesize) == TREE_VEC);
984 gcc_checking_assert (TREE_CODE (size) == TREE_VEC);
986 for (unsigned i = 0; i < gimple_phi_num_args (stmt); i++)
988 gimple_seq seq = NULL;
989 tree wsz = TREE_VEC_ELT (wholesize, i);
990 tree sz = TREE_VEC_ELT (size, i);
992 /* If we built an expression, we will need to build statements
993 and insert them on the edge right away. */
994 if (TREE_CODE (wsz) != SSA_NAME)
995 wsz = force_gimple_operand (wsz, &seq, true, NULL);
996 if (TREE_CODE (sz) != SSA_NAME)
998 gimple_seq s;
999 sz = force_gimple_operand (sz, &s, true, NULL);
1000 gimple_seq_add_seq (&seq, s);
1003 if (seq)
1004 gsi_insert_seq_on_edge (gimple_phi_arg_edge (obj_phi, i), seq);
1006 if (wholephi)
1007 add_phi_arg (wholephi, wsz,
1008 gimple_phi_arg_edge (obj_phi, i),
1009 gimple_phi_arg_location (obj_phi, i));
1011 add_phi_arg (phi, sz,
1012 gimple_phi_arg_edge (obj_phi, i),
1013 gimple_phi_arg_location (obj_phi, i));
1017 /* Descend through EXPR and return size_unknown if it uses any SSA variable
1018 object_size_set or object_size_set_temp generated, which turned out to be
1019 size_unknown, as noted in UNKNOWNS. */
1021 static tree
1022 propagate_unknowns (object_size_info *osi, tree expr, bitmap unknowns)
1024 int object_size_type = osi->object_size_type;
1026 switch (TREE_CODE (expr))
1028 case SSA_NAME:
1029 if (bitmap_bit_p (unknowns, SSA_NAME_VERSION (expr)))
1030 return size_unknown (object_size_type);
1031 return expr;
1033 case MIN_EXPR:
1034 case MAX_EXPR:
1036 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0),
1037 unknowns);
1038 if (size_unknown_p (res, object_size_type))
1039 return res;
1041 res = propagate_unknowns (osi, TREE_OPERAND (expr, 1), unknowns);
1042 if (size_unknown_p (res, object_size_type))
1043 return res;
1045 return expr;
1047 case MODIFY_EXPR:
1049 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 1),
1050 unknowns);
1051 if (size_unknown_p (res, object_size_type))
1052 return res;
1053 return expr;
1055 case TREE_VEC:
1056 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
1058 tree res = propagate_unknowns (osi, TREE_VEC_ELT (expr, i),
1059 unknowns);
1060 if (size_unknown_p (res, object_size_type))
1061 return res;
1063 return expr;
1064 case PLUS_EXPR:
1065 case MINUS_EXPR:
1067 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0),
1068 unknowns);
1069 if (size_unknown_p (res, object_size_type))
1070 return res;
1072 return expr;
1074 default:
1075 return expr;
1079 /* Walk through size expressions that need reexamination and generate
1080 statements for them. */
1082 static void
1083 gimplify_size_expressions (object_size_info *osi)
1085 int object_size_type = osi->object_size_type;
1086 bitmap_iterator bi;
1087 unsigned int i;
1088 bool changed;
1090 /* Step 1: Propagate unknowns into expressions. */
1091 bitmap reexamine = BITMAP_ALLOC (NULL);
1092 bitmap_copy (reexamine, osi->reexamine);
1093 bitmap unknowns = BITMAP_ALLOC (NULL);
1096 changed = false;
1097 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1099 object_size cur = object_sizes_get_raw (osi, i);
1101 if (size_unknown_p (propagate_unknowns (osi, cur.size, unknowns),
1102 object_size_type)
1103 || size_unknown_p (propagate_unknowns (osi, cur.wholesize,
1104 unknowns),
1105 object_size_type))
1107 /* Record the SSAs we're overwriting to propagate the
1108 unknwons. */
1109 tree oldval = object_sizes_get (osi, i);
1110 tree old_wholeval = object_sizes_get (osi, i, true);
1112 bitmap_set_bit (unknowns, SSA_NAME_VERSION (oldval));
1113 bitmap_set_bit (unknowns, SSA_NAME_VERSION (old_wholeval));
1114 object_sizes_initialize (osi, i,
1115 size_unknown (object_size_type),
1116 size_unknown (object_size_type));
1117 bitmap_clear_bit (osi->reexamine, i);
1118 changed = true;
1121 bitmap_copy (reexamine, osi->reexamine);
1123 while (changed);
1125 /* Release all unknowns. */
1126 EXECUTE_IF_SET_IN_BITMAP (unknowns, 0, i, bi)
1127 release_ssa_name (ssa_name (i));
1129 BITMAP_FREE (unknowns);
1130 BITMAP_FREE (reexamine);
1132 /* Expand all size expressions to put their definitions close to the objects
1133 for which size is being computed. */
1134 EXECUTE_IF_SET_IN_BITMAP (osi->reexamine, 0, i, bi)
1136 gimple_seq seq = NULL;
1137 object_size osize = object_sizes_get_raw (osi, i);
1139 gimple *stmt = SSA_NAME_DEF_STMT (ssa_name (i));
1140 enum gimple_code code = gimple_code (stmt);
1142 /* PHI nodes need special attention. */
1143 if (code == GIMPLE_PHI)
1144 emit_phi_nodes (stmt, osize.size, osize.wholesize);
1145 else
1147 tree size_expr = NULL_TREE;
1149 /* Bundle wholesize in with the size to gimplify if needed. */
1150 if (osize.wholesize != osize.size
1151 && !size_usable_p (osize.wholesize))
1152 size_expr = size_binop (COMPOUND_EXPR,
1153 osize.wholesize,
1154 osize.size);
1155 else if (!size_usable_p (osize.size))
1156 size_expr = osize.size;
1158 if (size_expr)
1160 gimple_stmt_iterator gsi;
1161 if (code == GIMPLE_NOP)
1162 gsi = gsi_start_bb (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1163 else
1164 gsi = gsi_for_stmt (stmt);
1166 force_gimple_operand (size_expr, &seq, true, NULL);
1167 gsi_insert_seq_before (&gsi, seq, GSI_CONTINUE_LINKING);
1171 /* We're done, so replace the MODIFY_EXPRs with the SSA names. */
1172 object_sizes_initialize (osi, i,
1173 object_sizes_get (osi, i),
1174 object_sizes_get (osi, i, true));
1178 /* Compute __builtin_object_size value for PTR and set *PSIZE to
1179 the resulting value. If the declared object is known and PDECL
1180 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
1181 is the second argument to __builtin_object_size.
1182 Returns true on success and false when the object size could not
1183 be determined. */
1185 bool
1186 compute_builtin_object_size (tree ptr, int object_size_type,
1187 tree *psize)
1189 gcc_assert (object_size_type >= 0 && object_size_type < OST_END);
1191 /* Set to unknown and overwrite just before returning if the size
1192 could be determined. */
1193 *psize = size_unknown (object_size_type);
1195 if (! offset_limit)
1196 init_offset_limit ();
1198 if (TREE_CODE (ptr) == ADDR_EXPR)
1199 return addr_object_size (NULL, ptr, object_size_type, psize);
1201 if (TREE_CODE (ptr) != SSA_NAME
1202 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
1203 return false;
1205 if (computed[object_size_type] == NULL)
1207 if (optimize || object_size_type & OST_SUBOBJECT)
1208 return false;
1210 /* When not optimizing, rather than failing, make a small effort
1211 to determine the object size without the full benefit of
1212 the (costly) computation below. */
1213 gimple *def = SSA_NAME_DEF_STMT (ptr);
1214 if (gimple_code (def) == GIMPLE_ASSIGN)
1216 tree_code code = gimple_assign_rhs_code (def);
1217 if (code == POINTER_PLUS_EXPR)
1219 tree offset = gimple_assign_rhs2 (def);
1220 ptr = gimple_assign_rhs1 (def);
1222 if (((object_size_type & OST_DYNAMIC)
1223 || (tree_fits_shwi_p (offset)
1224 && compare_tree_int (offset, offset_limit) <= 0))
1225 && compute_builtin_object_size (ptr, object_size_type,
1226 psize))
1228 *psize = size_for_offset (*psize, offset);
1229 return true;
1233 return false;
1236 struct object_size_info osi;
1237 osi.object_size_type = object_size_type;
1238 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
1240 bitmap_iterator bi;
1241 unsigned int i;
1243 object_sizes_grow (object_size_type);
1244 if (dump_file)
1246 fprintf (dump_file, "Computing %s %s%sobject size for ",
1247 (object_size_type & OST_MINIMUM) ? "minimum" : "maximum",
1248 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1249 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1250 print_generic_expr (dump_file, ptr, dump_flags);
1251 fprintf (dump_file, ":\n");
1254 osi.visited = BITMAP_ALLOC (NULL);
1255 osi.reexamine = BITMAP_ALLOC (NULL);
1257 if (!(object_size_type & OST_DYNAMIC))
1259 osi.depths = NULL;
1260 osi.stack = NULL;
1261 osi.tos = NULL;
1264 /* First pass: walk UD chains, compute object sizes that can be computed.
1265 osi.reexamine bitmap at the end will contain versions of SSA_NAMES
1266 that need to be reexamined. For both static and dynamic size
1267 computation, reexamination is for propagation across dependency loops.
1268 The dynamic case has the additional use case where the computed
1269 expression needs to be gimplified. */
1270 osi.pass = 0;
1271 osi.changed = false;
1272 collect_object_sizes_for (&osi, ptr);
1274 if (object_size_type & OST_DYNAMIC)
1276 osi.pass = 1;
1277 gimplify_size_expressions (&osi);
1278 bitmap_clear (osi.reexamine);
1281 /* Second pass: keep recomputing object sizes of variables
1282 that need reexamination, until no object sizes are
1283 increased or all object sizes are computed. */
1284 if (! bitmap_empty_p (osi.reexamine))
1286 bitmap reexamine = BITMAP_ALLOC (NULL);
1288 /* If looking for minimum instead of maximum object size,
1289 detect cases where a pointer is increased in a loop.
1290 Although even without this detection pass 2 would eventually
1291 terminate, it could take a long time. If a pointer is
1292 increasing this way, we need to assume 0 object size.
1293 E.g. p = &buf[0]; while (cond) p = p + 4; */
1294 if (object_size_type & OST_MINIMUM)
1296 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
1297 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
1298 osi.tos = osi.stack;
1299 osi.pass = 1;
1300 /* collect_object_sizes_for is changing
1301 osi.reexamine bitmap, so iterate over a copy. */
1302 bitmap_copy (reexamine, osi.reexamine);
1303 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1304 if (bitmap_bit_p (osi.reexamine, i))
1305 check_for_plus_in_loops (&osi, ssa_name (i));
1307 free (osi.depths);
1308 osi.depths = NULL;
1309 free (osi.stack);
1310 osi.stack = NULL;
1311 osi.tos = NULL;
1316 osi.pass = 2;
1317 osi.changed = false;
1318 /* collect_object_sizes_for is changing
1319 osi.reexamine bitmap, so iterate over a copy. */
1320 bitmap_copy (reexamine, osi.reexamine);
1321 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1322 if (bitmap_bit_p (osi.reexamine, i))
1324 collect_object_sizes_for (&osi, ssa_name (i));
1325 if (dump_file && (dump_flags & TDF_DETAILS))
1327 fprintf (dump_file, "Reexamining ");
1328 print_generic_expr (dump_file, ssa_name (i),
1329 dump_flags);
1330 fprintf (dump_file, "\n");
1334 while (osi.changed);
1336 BITMAP_FREE (reexamine);
1338 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
1339 bitmap_set_bit (computed[object_size_type], i);
1341 /* Debugging dumps. */
1342 if (dump_file)
1344 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
1345 if (!object_sizes_unknown_p (object_size_type, i))
1347 print_generic_expr (dump_file, ssa_name (i),
1348 dump_flags);
1349 fprintf (dump_file,
1350 ": %s %s%sobject size ",
1351 ((object_size_type & OST_MINIMUM) ? "minimum"
1352 : "maximum"),
1353 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1354 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1355 print_generic_expr (dump_file, object_sizes_get (&osi, i),
1356 dump_flags);
1357 fprintf (dump_file, "\n");
1361 BITMAP_FREE (osi.reexamine);
1362 BITMAP_FREE (osi.visited);
1365 *psize = object_sizes_get (&osi, SSA_NAME_VERSION (ptr));
1366 return !size_unknown_p (*psize, object_size_type);
1369 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
1371 static void
1372 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
1374 int object_size_type = osi->object_size_type;
1375 unsigned int varno = SSA_NAME_VERSION (ptr);
1376 tree bytes, wholesize;
1378 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1379 gcc_assert (osi->pass == 0);
1381 if (TREE_CODE (value) == WITH_SIZE_EXPR)
1382 value = TREE_OPERAND (value, 0);
1384 /* Pointer variables should have been handled by merge_object_sizes. */
1385 gcc_assert (TREE_CODE (value) != SSA_NAME
1386 || !POINTER_TYPE_P (TREE_TYPE (value)));
1388 if (TREE_CODE (value) == ADDR_EXPR)
1389 addr_object_size (osi, value, object_size_type, &bytes, &wholesize);
1390 else
1391 bytes = wholesize = size_unknown (object_size_type);
1393 object_sizes_set (osi, varno, bytes, wholesize);
1397 /* Compute object_sizes for PTR, defined to the result of a call. */
1399 static void
1400 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
1402 int object_size_type = osi->object_size_type;
1403 unsigned int varno = SSA_NAME_VERSION (ptr);
1404 tree bytes = NULL_TREE;
1406 gcc_assert (is_gimple_call (call));
1408 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1409 gcc_assert (osi->pass == 0);
1411 bool is_strdup = gimple_call_builtin_p (call, BUILT_IN_STRDUP);
1412 bool is_strndup = gimple_call_builtin_p (call, BUILT_IN_STRNDUP);
1413 bool is_access_with_size
1414 = gimple_call_internal_p (call, IFN_ACCESS_WITH_SIZE);
1415 if (is_strdup || is_strndup)
1416 bytes = strdup_object_size (call, object_size_type, is_strndup);
1417 else if (is_access_with_size)
1418 bytes = access_with_size_object_size (call, object_size_type);
1419 else
1420 bytes = alloc_object_size (call, object_size_type);
1422 if (!size_valid_p (bytes, object_size_type))
1423 bytes = size_unknown (object_size_type);
1425 object_sizes_set (osi, varno, bytes, bytes);
1429 /* Compute object_sizes for PTR, defined to an unknown value. */
1431 static void
1432 unknown_object_size (struct object_size_info *osi, tree ptr)
1434 int object_size_type = osi->object_size_type;
1435 unsigned int varno = SSA_NAME_VERSION (ptr);
1437 gcc_checking_assert (!object_sizes_unknown_p (object_size_type, varno));
1438 gcc_checking_assert (osi->pass == 0);
1439 tree bytes = size_unknown (object_size_type);
1441 object_sizes_set (osi, varno, bytes, bytes);
1445 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
1446 the object size might need reexamination later. */
1448 static bool
1449 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig)
1451 int object_size_type = osi->object_size_type;
1452 unsigned int varno = SSA_NAME_VERSION (dest);
1453 tree orig_bytes, wholesize;
1455 if (object_sizes_unknown_p (object_size_type, varno))
1456 return false;
1458 if (osi->pass == 0)
1459 collect_object_sizes_for (osi, orig);
1461 orig_bytes = object_sizes_get (osi, SSA_NAME_VERSION (orig));
1462 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (orig), true);
1464 if (object_sizes_set (osi, varno, orig_bytes, wholesize))
1465 osi->changed = true;
1467 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
1471 /* Compute object_sizes for VAR, defined to the result of an assignment
1472 with operator POINTER_PLUS_EXPR. Return true if the object size might
1473 need reexamination later. */
1475 static bool
1476 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1478 int object_size_type = osi->object_size_type;
1479 unsigned int varno = SSA_NAME_VERSION (var);
1480 tree bytes, wholesize;
1481 tree op0, op1;
1482 bool reexamine = false;
1484 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1486 op0 = gimple_assign_rhs1 (stmt);
1487 op1 = gimple_assign_rhs2 (stmt);
1489 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
1491 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
1492 gcc_assert (TREE_CODE (rhs) == MEM_REF);
1493 op0 = TREE_OPERAND (rhs, 0);
1494 op1 = TREE_OPERAND (rhs, 1);
1496 else
1497 gcc_unreachable ();
1499 if (object_sizes_unknown_p (object_size_type, varno))
1500 return false;
1502 /* Handle PTR + OFFSET here. */
1503 if (size_valid_p (op1, object_size_type)
1504 && (TREE_CODE (op0) == SSA_NAME || TREE_CODE (op0) == ADDR_EXPR))
1506 if (TREE_CODE (op0) == SSA_NAME)
1508 if (osi->pass == 0)
1509 collect_object_sizes_for (osi, op0);
1511 bytes = object_sizes_get (osi, SSA_NAME_VERSION (op0));
1512 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (op0), true);
1513 reexamine = bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (op0));
1515 else
1517 /* op0 will be ADDR_EXPR here. We should never come here during
1518 reexamination. */
1519 gcc_checking_assert (osi->pass == 0);
1520 addr_object_size (osi, op0, object_size_type, &bytes, &wholesize);
1523 /* size_for_offset doesn't make sense for -1 size, but it does for size 0
1524 since the wholesize could be non-zero and a negative offset could give
1525 a non-zero size. */
1526 if (size_unknown_p (bytes, 0))
1528 else if ((object_size_type & OST_DYNAMIC)
1529 || compare_tree_int (op1, offset_limit) <= 0)
1530 bytes = size_for_offset (bytes, op1, wholesize);
1531 /* In the static case, with a negative offset, the best estimate for
1532 minimum size is size_unknown but for maximum size, the wholesize is a
1533 better estimate than size_unknown. */
1534 else if (object_size_type & OST_MINIMUM)
1535 bytes = size_unknown (object_size_type);
1536 else
1537 bytes = wholesize;
1539 else
1540 bytes = wholesize = size_unknown (object_size_type);
1542 if (!size_valid_p (bytes, object_size_type)
1543 || !size_valid_p (wholesize, object_size_type))
1544 bytes = wholesize = size_unknown (object_size_type);
1546 if (object_sizes_set (osi, varno, bytes, wholesize))
1547 osi->changed = true;
1548 return reexamine;
1551 /* Compute the dynamic object size for VAR. Return the result in SIZE and
1552 WHOLESIZE. */
1554 static void
1555 dynamic_object_size (struct object_size_info *osi, tree var,
1556 tree *size, tree *wholesize)
1558 int object_size_type = osi->object_size_type;
1560 if (TREE_CODE (var) == SSA_NAME)
1562 unsigned varno = SSA_NAME_VERSION (var);
1564 collect_object_sizes_for (osi, var);
1565 *size = object_sizes_get (osi, varno);
1566 *wholesize = object_sizes_get (osi, varno, true);
1568 else if (TREE_CODE (var) == ADDR_EXPR)
1569 addr_object_size (osi, var, object_size_type, size, wholesize);
1570 else
1571 *size = *wholesize = size_unknown (object_size_type);
1574 /* Compute object_sizes for VAR, defined at STMT, which is
1575 a COND_EXPR. Return true if the object size might need reexamination
1576 later. */
1578 static bool
1579 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1581 tree then_, else_;
1582 int object_size_type = osi->object_size_type;
1583 unsigned int varno = SSA_NAME_VERSION (var);
1584 bool reexamine = false;
1586 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
1588 if (object_sizes_unknown_p (object_size_type, varno))
1589 return false;
1591 then_ = gimple_assign_rhs2 (stmt);
1592 else_ = gimple_assign_rhs3 (stmt);
1594 if (object_size_type & OST_DYNAMIC)
1596 tree then_size, then_wholesize, else_size, else_wholesize;
1598 dynamic_object_size (osi, then_, &then_size, &then_wholesize);
1599 if (!size_unknown_p (then_size, object_size_type))
1600 dynamic_object_size (osi, else_, &else_size, &else_wholesize);
1602 tree cond_size, cond_wholesize;
1603 if (size_unknown_p (then_size, object_size_type)
1604 || size_unknown_p (else_size, object_size_type))
1605 cond_size = cond_wholesize = size_unknown (object_size_type);
1606 else
1608 cond_size = fold_build3 (COND_EXPR, sizetype,
1609 gimple_assign_rhs1 (stmt),
1610 then_size, else_size);
1611 cond_wholesize = fold_build3 (COND_EXPR, sizetype,
1612 gimple_assign_rhs1 (stmt),
1613 then_wholesize, else_wholesize);
1616 object_sizes_set (osi, varno, cond_size, cond_wholesize);
1618 return false;
1621 if (TREE_CODE (then_) == SSA_NAME)
1622 reexamine |= merge_object_sizes (osi, var, then_);
1623 else
1624 expr_object_size (osi, var, then_);
1626 if (object_sizes_unknown_p (object_size_type, varno))
1627 return reexamine;
1629 if (TREE_CODE (else_) == SSA_NAME)
1630 reexamine |= merge_object_sizes (osi, var, else_);
1631 else
1632 expr_object_size (osi, var, else_);
1634 return reexamine;
1637 /* Find size of an object passed as a parameter to the function. */
1639 static void
1640 parm_object_size (struct object_size_info *osi, tree var)
1642 int object_size_type = osi->object_size_type;
1643 tree parm = SSA_NAME_VAR (var);
1645 if (!(object_size_type & OST_DYNAMIC) || !POINTER_TYPE_P (TREE_TYPE (parm)))
1647 expr_object_size (osi, var, parm);
1648 return;
1651 /* Look for access attribute. */
1652 rdwr_map rdwr_idx;
1654 tree fndecl = cfun->decl;
1655 const attr_access *access = get_parm_access (rdwr_idx, parm, fndecl);
1656 tree typesize = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (parm)));
1657 tree sz = NULL_TREE;
1659 /* If we have an access attribute with a usable size argument... */
1660 if (access && access->sizarg != UINT_MAX
1661 /* ... and either PARM is void * or has a type that is complete and has a
1662 constant size... */
1663 && ((typesize && poly_int_tree_p (typesize))
1664 || (!typesize && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))))
1666 tree fnargs = DECL_ARGUMENTS (fndecl);
1667 tree arg = NULL_TREE;
1668 unsigned argpos = 0;
1670 /* ... then walk through the parameters to pick the size parameter and
1671 safely scale it by the type size if needed.
1673 TODO: we could also compute the size of VLAs where the size is
1674 given by a function parameter. */
1675 for (arg = fnargs; arg; arg = TREE_CHAIN (arg), ++argpos)
1676 if (argpos == access->sizarg)
1678 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1679 sz = get_or_create_ssa_default_def (cfun, arg);
1680 if (sz != NULL_TREE)
1682 sz = fold_convert (sizetype, sz);
1683 if (typesize)
1684 sz = size_binop (MULT_EXPR, sz, typesize);
1686 break;
1689 if (!sz)
1690 sz = size_unknown (object_size_type);
1692 object_sizes_set (osi, SSA_NAME_VERSION (var), sz, sz);
1695 /* Compute an object size expression for VAR, which is the result of a PHI
1696 node. */
1698 static void
1699 phi_dynamic_object_size (struct object_size_info *osi, tree var)
1701 int object_size_type = osi->object_size_type;
1702 unsigned int varno = SSA_NAME_VERSION (var);
1703 gimple *stmt = SSA_NAME_DEF_STMT (var);
1704 unsigned i, num_args = gimple_phi_num_args (stmt);
1705 bool wholesize_needed = false;
1707 /* The extra space is for the PHI result at the end, which object_sizes_set
1708 sets for us. */
1709 tree sizes = make_tree_vec (num_args + 1);
1710 tree wholesizes = make_tree_vec (num_args + 1);
1712 /* Bail out if the size of any of the PHI arguments cannot be
1713 determined. */
1714 for (i = 0; i < num_args; i++)
1716 edge e = gimple_phi_arg_edge (as_a <gphi *> (stmt), i);
1717 if (e->flags & EDGE_COMPLEX)
1718 break;
1720 tree rhs = gimple_phi_arg_def (stmt, i);
1721 tree size, wholesize;
1723 dynamic_object_size (osi, rhs, &size, &wholesize);
1725 if (size_unknown_p (size, object_size_type))
1726 break;
1728 if (size != wholesize)
1729 wholesize_needed = true;
1731 TREE_VEC_ELT (sizes, i) = size;
1732 TREE_VEC_ELT (wholesizes, i) = wholesize;
1735 if (i < num_args)
1737 ggc_free (sizes);
1738 ggc_free (wholesizes);
1739 sizes = wholesizes = size_unknown (object_size_type);
1742 /* Point to the same TREE_VEC so that we can avoid emitting two PHI
1743 nodes. */
1744 else if (!wholesize_needed)
1746 ggc_free (wholesizes);
1747 wholesizes = sizes;
1750 object_sizes_set (osi, varno, sizes, wholesizes);
1753 /* Compute object sizes for VAR.
1754 For ADDR_EXPR an object size is the number of remaining bytes
1755 to the end of the object (where what is considered an object depends on
1756 OSI->object_size_type).
1757 For allocation GIMPLE_CALL like malloc or calloc object size is the size
1758 of the allocation.
1759 For POINTER_PLUS_EXPR where second operand is a constant integer,
1760 object size is object size of the first operand minus the constant.
1761 If the constant is bigger than the number of remaining bytes until the
1762 end of the object, object size is 0, but if it is instead a pointer
1763 subtraction, object size is size_unknown (object_size_type).
1764 To differentiate addition from subtraction, ADDR_EXPR returns
1765 size_unknown (object_size_type) for all objects bigger than half of the
1766 address space, and constants less than half of the address space are
1767 considered addition, while bigger constants subtraction.
1768 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
1769 object size is object size of that argument.
1770 Otherwise, object size is the maximum of object sizes of variables
1771 that it might be set to. */
1773 static void
1774 collect_object_sizes_for (struct object_size_info *osi, tree var)
1776 int object_size_type = osi->object_size_type;
1777 unsigned int varno = SSA_NAME_VERSION (var);
1778 gimple *stmt;
1779 bool reexamine;
1781 if (bitmap_bit_p (computed[object_size_type], varno))
1782 return;
1784 if (osi->pass == 0)
1786 if (bitmap_set_bit (osi->visited, varno))
1788 /* Initialize to 0 for maximum size and M1U for minimum size so that
1789 it gets immediately overridden. */
1790 object_sizes_initialize (osi, varno,
1791 size_initval (object_size_type),
1792 size_initval (object_size_type));
1794 else
1796 /* Found a dependency loop. Mark the variable for later
1797 re-examination. */
1798 if (object_size_type & OST_DYNAMIC)
1799 object_sizes_set_temp (osi, varno);
1801 bitmap_set_bit (osi->reexamine, varno);
1802 if (dump_file && (dump_flags & TDF_DETAILS))
1804 fprintf (dump_file, "Found a dependency loop at ");
1805 print_generic_expr (dump_file, var, dump_flags);
1806 fprintf (dump_file, "\n");
1808 return;
1812 if (dump_file && (dump_flags & TDF_DETAILS))
1814 fprintf (dump_file, "Visiting use-def links for ");
1815 print_generic_expr (dump_file, var, dump_flags);
1816 fprintf (dump_file, "\n");
1819 stmt = SSA_NAME_DEF_STMT (var);
1820 reexamine = false;
1822 switch (gimple_code (stmt))
1824 case GIMPLE_ASSIGN:
1826 tree rhs = gimple_assign_rhs1 (stmt);
1827 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1828 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1829 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
1830 reexamine = plus_stmt_object_size (osi, var, stmt);
1831 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1832 reexamine = cond_expr_object_size (osi, var, stmt);
1833 else if (gimple_assign_single_p (stmt)
1834 || gimple_assign_unary_nop_p (stmt))
1836 if (TREE_CODE (rhs) == SSA_NAME
1837 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1838 reexamine = merge_object_sizes (osi, var, rhs);
1839 else
1840 expr_object_size (osi, var, rhs);
1842 else
1843 unknown_object_size (osi, var);
1844 break;
1847 case GIMPLE_CALL:
1849 gcall *call_stmt = as_a <gcall *> (stmt);
1850 tree arg = pass_through_call (call_stmt);
1851 if (arg)
1853 if (TREE_CODE (arg) == SSA_NAME
1854 && POINTER_TYPE_P (TREE_TYPE (arg)))
1855 reexamine = merge_object_sizes (osi, var, arg);
1856 else
1857 expr_object_size (osi, var, arg);
1859 else
1860 call_object_size (osi, var, call_stmt);
1861 break;
1864 case GIMPLE_ASM:
1865 /* Pointers defined by __asm__ statements can point anywhere. */
1866 unknown_object_size (osi, var);
1867 break;
1869 case GIMPLE_NOP:
1870 if (SSA_NAME_VAR (var)
1871 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1872 parm_object_size (osi, var);
1873 else
1874 /* Uninitialized SSA names point nowhere. */
1875 unknown_object_size (osi, var);
1876 break;
1878 case GIMPLE_PHI:
1880 unsigned i;
1882 if (object_size_type & OST_DYNAMIC)
1884 phi_dynamic_object_size (osi, var);
1885 break;
1888 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1890 tree rhs = gimple_phi_arg (stmt, i)->def;
1892 if (object_sizes_unknown_p (object_size_type, varno))
1893 break;
1895 if (TREE_CODE (rhs) == SSA_NAME)
1896 reexamine |= merge_object_sizes (osi, var, rhs);
1897 else if (osi->pass == 0)
1898 expr_object_size (osi, var, rhs);
1900 break;
1903 default:
1904 gcc_unreachable ();
1907 /* Dynamic sizes use placeholder temps to return an answer, so it is always
1908 safe to set COMPUTED for them. */
1909 if ((object_size_type & OST_DYNAMIC)
1910 || !reexamine || object_sizes_unknown_p (object_size_type, varno))
1912 bitmap_set_bit (computed[object_size_type], varno);
1913 if (!(object_size_type & OST_DYNAMIC))
1914 bitmap_clear_bit (osi->reexamine, varno);
1915 else if (reexamine)
1916 bitmap_set_bit (osi->reexamine, varno);
1918 else
1920 bitmap_set_bit (osi->reexamine, varno);
1921 if (dump_file && (dump_flags & TDF_DETAILS))
1923 fprintf (dump_file, "Need to reexamine ");
1924 print_generic_expr (dump_file, var, dump_flags);
1925 fprintf (dump_file, "\n");
1931 /* Helper function for check_for_plus_in_loops. Called recursively
1932 to detect loops. */
1934 static void
1935 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1936 unsigned int depth)
1938 gimple *stmt = SSA_NAME_DEF_STMT (var);
1939 unsigned int varno = SSA_NAME_VERSION (var);
1941 if (osi->depths[varno])
1943 if (osi->depths[varno] != depth)
1945 unsigned int *sp;
1947 /* Found a loop involving pointer addition. */
1948 for (sp = osi->tos; sp > osi->stack; )
1950 --sp;
1951 bitmap_clear_bit (osi->reexamine, *sp);
1952 bitmap_set_bit (computed[osi->object_size_type], *sp);
1953 object_sizes_set (osi, *sp, size_zero_node,
1954 object_sizes_get (osi, *sp, true));
1955 if (*sp == varno)
1956 break;
1959 return;
1961 else if (! bitmap_bit_p (osi->reexamine, varno))
1962 return;
1964 osi->depths[varno] = depth;
1965 *osi->tos++ = varno;
1967 switch (gimple_code (stmt))
1970 case GIMPLE_ASSIGN:
1972 if ((gimple_assign_single_p (stmt)
1973 || gimple_assign_unary_nop_p (stmt))
1974 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1976 tree rhs = gimple_assign_rhs1 (stmt);
1978 check_for_plus_in_loops_1 (osi, rhs, depth);
1980 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1982 tree basevar = gimple_assign_rhs1 (stmt);
1983 tree cst = gimple_assign_rhs2 (stmt);
1985 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1987 check_for_plus_in_loops_1 (osi, basevar,
1988 depth + !integer_zerop (cst));
1990 else
1991 gcc_unreachable ();
1992 break;
1995 case GIMPLE_CALL:
1997 gcall *call_stmt = as_a <gcall *> (stmt);
1998 tree arg = pass_through_call (call_stmt);
1999 if (arg)
2001 if (TREE_CODE (arg) == SSA_NAME)
2002 check_for_plus_in_loops_1 (osi, arg, depth);
2003 else
2004 gcc_unreachable ();
2006 break;
2009 case GIMPLE_PHI:
2011 unsigned i;
2013 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2015 tree rhs = gimple_phi_arg (stmt, i)->def;
2017 if (TREE_CODE (rhs) == SSA_NAME)
2018 check_for_plus_in_loops_1 (osi, rhs, depth);
2020 break;
2023 default:
2024 gcc_unreachable ();
2027 osi->depths[varno] = 0;
2028 osi->tos--;
2032 /* Check if some pointer we are computing object size of is being increased
2033 within a loop. If yes, assume all the SSA variables participating in
2034 that loop have minimum object sizes 0. */
2036 static void
2037 check_for_plus_in_loops (struct object_size_info *osi, tree var)
2039 gimple *stmt = SSA_NAME_DEF_STMT (var);
2041 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
2042 and looked for a POINTER_PLUS_EXPR in the pass-through
2043 argument, if any. In GIMPLE, however, such an expression
2044 is not a valid call operand. */
2046 if (is_gimple_assign (stmt)
2047 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2049 tree basevar = gimple_assign_rhs1 (stmt);
2050 tree cst = gimple_assign_rhs2 (stmt);
2052 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2054 /* Skip non-positive offsets. */
2055 if (integer_zerop (cst) || compare_tree_int (cst, offset_limit) > 0)
2056 return;
2058 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
2059 *osi->tos++ = SSA_NAME_VERSION (basevar);
2060 check_for_plus_in_loops_1 (osi, var, 2);
2061 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
2062 osi->tos--;
2067 /* Initialize data structures for the object size computation. */
2069 void
2070 init_object_sizes (void)
2072 int object_size_type;
2074 if (computed[0])
2075 return;
2077 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
2079 object_sizes_grow (object_size_type);
2080 computed[object_size_type] = BITMAP_ALLOC (NULL);
2083 init_offset_limit ();
2087 /* Destroy data structures after the object size computation. */
2089 void
2090 fini_object_sizes (void)
2092 int object_size_type;
2094 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
2096 object_sizes_release (object_size_type);
2097 BITMAP_FREE (computed[object_size_type]);
2101 /* Dummy valueize function. */
2103 static tree
2104 do_valueize (tree t)
2106 return t;
2109 /* Process a __builtin_object_size or __builtin_dynamic_object_size call in
2110 CALL early for subobjects before any object information is lost due to
2111 optimization. Insert a MIN or MAX expression of the result and
2112 __builtin_object_size at I so that it may be processed in the second pass.
2113 __builtin_dynamic_object_size is treated like __builtin_object_size here
2114 since we're only looking for constant bounds. */
2116 static void
2117 early_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
2119 tree ost = gimple_call_arg (call, 1);
2120 tree lhs = gimple_call_lhs (call);
2121 gcc_assert (lhs != NULL_TREE);
2123 if (!tree_fits_uhwi_p (ost))
2124 return;
2126 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2127 tree ptr = gimple_call_arg (call, 0);
2129 if (object_size_type != 1 && object_size_type != 3)
2130 return;
2132 if (TREE_CODE (ptr) != ADDR_EXPR && TREE_CODE (ptr) != SSA_NAME)
2133 return;
2135 tree type = TREE_TYPE (lhs);
2136 tree bytes;
2137 if (!compute_builtin_object_size (ptr, object_size_type, &bytes)
2138 || !int_fits_type_p (bytes, type))
2139 return;
2141 tree tem = make_ssa_name (type);
2142 gimple_call_set_lhs (call, tem);
2143 enum tree_code code = object_size_type & OST_MINIMUM ? MAX_EXPR : MIN_EXPR;
2144 tree cst = fold_convert (type, bytes);
2145 gimple *g = gimple_build_assign (lhs, code, tem, cst);
2146 gsi_insert_after (i, g, GSI_NEW_STMT);
2147 update_stmt (call);
2150 /* Attempt to fold one __builtin_dynamic_object_size call in CALL into an
2151 expression and insert it at I. Return true if it succeeds. */
2153 static bool
2154 dynamic_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
2156 gcc_assert (gimple_call_num_args (call) == 2);
2158 tree args[2];
2159 args[0] = gimple_call_arg (call, 0);
2160 args[1] = gimple_call_arg (call, 1);
2162 location_t loc = EXPR_LOC_OR_LOC (args[0], input_location);
2163 tree result_type = gimple_call_return_type (as_a <gcall *> (call));
2164 tree result = fold_builtin_call_array (loc, result_type,
2165 gimple_call_fn (call), 2, args);
2167 if (!result)
2168 return false;
2170 /* fold_builtin_call_array may wrap the result inside a
2171 NOP_EXPR. */
2172 STRIP_NOPS (result);
2173 gimplify_and_update_call_from_tree (i, result);
2175 if (dump_file && (dump_flags & TDF_DETAILS))
2177 fprintf (dump_file, "Simplified (dynamic)\n ");
2178 print_gimple_stmt (dump_file, call, 0, dump_flags);
2179 fprintf (dump_file, " to ");
2180 print_generic_expr (dump_file, result);
2181 fprintf (dump_file, "\n");
2183 return true;
2186 static unsigned int
2187 object_sizes_execute (function *fun, bool early)
2189 todo = 0;
2191 basic_block bb;
2192 FOR_EACH_BB_FN (bb, fun)
2194 gimple_stmt_iterator i;
2195 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
2197 tree result;
2198 bool dynamic = false;
2200 gimple *call = gsi_stmt (i);
2201 if (gimple_call_builtin_p (call, BUILT_IN_DYNAMIC_OBJECT_SIZE))
2202 dynamic = true;
2203 else if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
2204 continue;
2206 tree lhs = gimple_call_lhs (call);
2207 if (!lhs)
2208 continue;
2210 init_object_sizes ();
2212 /* If early, only attempt to fold
2213 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
2214 and rather than folding the builtin to the constant if any,
2215 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
2216 call result and the computed constant. Do the same for
2217 __builtin_dynamic_object_size too. */
2218 if (early)
2220 early_object_sizes_execute_one (&i, call);
2221 continue;
2224 if (dynamic)
2226 if (dynamic_object_sizes_execute_one (&i, call))
2227 continue;
2228 else
2230 /* If we could not find a suitable size expression, lower to
2231 __builtin_object_size so that we may at least get a
2232 constant lower or higher estimate. */
2233 tree bosfn = builtin_decl_implicit (BUILT_IN_OBJECT_SIZE);
2234 gimple_call_set_fndecl (call, bosfn);
2235 update_stmt (call);
2237 if (dump_file && (dump_flags & TDF_DETAILS))
2239 print_generic_expr (dump_file, gimple_call_arg (call, 0),
2240 dump_flags);
2241 fprintf (dump_file,
2242 ": Retrying as __builtin_object_size\n");
2247 result = gimple_fold_stmt_to_constant (call, do_valueize);
2248 if (!result)
2250 tree ost = gimple_call_arg (call, 1);
2252 if (tree_fits_uhwi_p (ost))
2254 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2256 if (object_size_type & OST_MINIMUM)
2257 result = build_zero_cst (size_type_node);
2258 else if (object_size_type < OST_END)
2259 result = fold_convert (size_type_node,
2260 integer_minus_one_node);
2263 if (!result)
2264 continue;
2267 gcc_assert (TREE_CODE (result) == INTEGER_CST);
2269 if (dump_file && (dump_flags & TDF_DETAILS))
2271 fprintf (dump_file, "Simplified\n ");
2272 print_gimple_stmt (dump_file, call, 0, dump_flags);
2273 fprintf (dump_file, " to ");
2274 print_generic_expr (dump_file, result);
2275 fprintf (dump_file, "\n");
2278 /* Propagate into all uses and fold those stmts. */
2279 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2280 replace_uses_by (lhs, result);
2281 else
2282 replace_call_with_value (&i, result);
2286 fini_object_sizes ();
2287 return todo;
2290 /* Simple pass to optimize all __builtin_object_size () builtins. */
2292 namespace {
2294 const pass_data pass_data_object_sizes =
2296 GIMPLE_PASS, /* type */
2297 "objsz", /* name */
2298 OPTGROUP_NONE, /* optinfo_flags */
2299 TV_NONE, /* tv_id */
2300 ( PROP_cfg | PROP_ssa ), /* properties_required */
2301 PROP_objsz, /* properties_provided */
2302 0, /* properties_destroyed */
2303 0, /* todo_flags_start */
2304 0, /* todo_flags_finish */
2307 class pass_object_sizes : public gimple_opt_pass
2309 public:
2310 pass_object_sizes (gcc::context *ctxt)
2311 : gimple_opt_pass (pass_data_object_sizes, ctxt)
2314 /* opt_pass methods: */
2315 opt_pass * clone () final override { return new pass_object_sizes (m_ctxt); }
2316 unsigned int execute (function *fun) final override
2318 return object_sizes_execute (fun, false);
2320 }; // class pass_object_sizes
2322 } // anon namespace
2324 gimple_opt_pass *
2325 make_pass_object_sizes (gcc::context *ctxt)
2327 return new pass_object_sizes (ctxt);
2330 /* Early version of pass to optimize all __builtin_object_size () builtins. */
2332 namespace {
2334 const pass_data pass_data_early_object_sizes =
2336 GIMPLE_PASS, /* type */
2337 "early_objsz", /* name */
2338 OPTGROUP_NONE, /* optinfo_flags */
2339 TV_NONE, /* tv_id */
2340 ( PROP_cfg | PROP_ssa ), /* properties_required */
2341 0, /* properties_provided */
2342 0, /* properties_destroyed */
2343 0, /* todo_flags_start */
2344 0, /* todo_flags_finish */
2347 class pass_early_object_sizes : public gimple_opt_pass
2349 public:
2350 pass_early_object_sizes (gcc::context *ctxt)
2351 : gimple_opt_pass (pass_data_early_object_sizes, ctxt)
2354 /* opt_pass methods: */
2355 unsigned int execute (function *fun) final override
2357 return object_sizes_execute (fun, true);
2359 }; // class pass_object_sizes
2361 } // anon namespace
2363 gimple_opt_pass *
2364 make_pass_early_object_sizes (gcc::context *ctxt)
2366 return new pass_early_object_sizes (ctxt);