libcpp, c, middle-end: Optimize initializers using #embed in C
[official-gcc.git] / gcc / tree-object-size.cc
blob6544730e1539e8326c114e242f79c18061679efb
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-iterator.h"
33 #include "gimple-fold.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "attribs.h"
38 #include "builtins.h"
39 #include "gimplify-me.h"
40 #include "gimplify.h"
41 #include "tree-ssa-dce.h"
43 struct object_size_info
45 int object_size_type;
46 unsigned char pass;
47 bool changed;
48 bitmap visited, reexamine;
49 unsigned int *depths;
50 unsigned int *stack, *tos;
53 struct GTY(()) object_size
55 /* Estimate of bytes till the end of the object. */
56 tree size;
57 /* Estimate of the size of the whole object. */
58 tree wholesize;
61 static tree compute_object_offset (tree, const_tree);
62 static bool addr_object_size (struct object_size_info *,
63 const_tree, int, tree *, tree *t = NULL);
64 static tree alloc_object_size (const gcall *, int);
65 static tree access_with_size_object_size (const gcall *, int);
66 static tree pass_through_call (const gcall *);
67 static void collect_object_sizes_for (struct object_size_info *, tree);
68 static void expr_object_size (struct object_size_info *, tree, tree);
69 static bool merge_object_sizes (struct object_size_info *, tree, tree);
70 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
71 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
72 static void init_offset_limit (void);
73 static void check_for_plus_in_loops (struct object_size_info *, tree);
74 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
75 unsigned int);
77 /* object_sizes[0] is upper bound for the object size and number of bytes till
78 the end of the object.
79 object_sizes[1] is upper bound for the object size and number of bytes till
80 the end of the subobject (innermost array or field with address taken).
81 object_sizes[2] is lower bound for the object size and number of bytes till
82 the end of the object and object_sizes[3] lower bound for subobject.
84 For static object sizes, the object size and the bytes till the end of the
85 object are both INTEGER_CST. In the dynamic case, they are finally either a
86 gimple variable or an INTEGER_CST. */
87 static vec<object_size> object_sizes[OST_END];
89 /* Bitmaps what object sizes have been computed already. */
90 static bitmap computed[OST_END];
92 /* Maximum value of offset we consider to be addition. */
93 static unsigned HOST_WIDE_INT offset_limit;
95 /* Tell the generic SSA updater what kind of update is needed after the pass
96 executes. */
97 static unsigned todo;
99 /* Return true if VAL represents an initial size for OBJECT_SIZE_TYPE. */
101 static inline bool
102 size_initval_p (tree val, int object_size_type)
104 return ((object_size_type & OST_MINIMUM)
105 ? integer_all_onesp (val) : integer_zerop (val));
108 /* Return true if VAL represents an unknown size for OBJECT_SIZE_TYPE. */
110 static inline bool
111 size_unknown_p (tree val, int object_size_type)
113 return ((object_size_type & OST_MINIMUM)
114 ? integer_zerop (val) : integer_all_onesp (val));
117 /* Return true if VAL represents a valid size for OBJECT_SIZE_TYPE. */
119 static inline bool
120 size_valid_p (tree val, int object_size_type)
122 return ((object_size_type & OST_DYNAMIC) || TREE_CODE (val) == INTEGER_CST);
125 /* Return true if VAL is usable as an object size in the object_sizes
126 vectors. */
128 static inline bool
129 size_usable_p (tree val)
131 return TREE_CODE (val) == SSA_NAME || TREE_CODE (val) == INTEGER_CST;
134 /* Return a tree with initial value for OBJECT_SIZE_TYPE. */
136 static inline tree
137 size_initval (int object_size_type)
139 return ((object_size_type & OST_MINIMUM)
140 ? TYPE_MAX_VALUE (sizetype) : size_zero_node);
143 /* Return a tree with unknown value for OBJECT_SIZE_TYPE. */
145 static inline tree
146 size_unknown (int object_size_type)
148 return ((object_size_type & OST_MINIMUM)
149 ? size_zero_node : TYPE_MAX_VALUE (sizetype));
152 /* Grow object_sizes[OBJECT_SIZE_TYPE] to num_ssa_names. */
154 static inline void
155 object_sizes_grow (int object_size_type)
157 if (num_ssa_names > object_sizes[object_size_type].length ())
158 object_sizes[object_size_type].safe_grow (num_ssa_names, true);
161 /* Release object_sizes[OBJECT_SIZE_TYPE]. */
163 static inline void
164 object_sizes_release (int object_size_type)
166 object_sizes[object_size_type].release ();
169 /* Return true if object_sizes[OBJECT_SIZE_TYPE][VARNO] is unknown. */
171 static inline bool
172 object_sizes_unknown_p (int object_size_type, unsigned varno)
174 return size_unknown_p (object_sizes[object_size_type][varno].size,
175 object_size_type);
178 /* Return the raw size expression for VARNO corresponding to OSI. This returns
179 the TREE_VEC as is and should only be used during gimplification. */
181 static inline object_size
182 object_sizes_get_raw (struct object_size_info *osi, unsigned varno)
184 gcc_assert (osi->pass != 0);
185 return object_sizes[osi->object_size_type][varno];
188 /* Return a size tree for VARNO corresponding to OSI. If WHOLE is true, return
189 the whole object size. Use this for building size expressions based on size
190 of VARNO. */
192 static inline tree
193 object_sizes_get (struct object_size_info *osi, unsigned varno,
194 bool whole = false)
196 tree ret;
197 int object_size_type = osi->object_size_type;
199 if (whole)
200 ret = object_sizes[object_size_type][varno].wholesize;
201 else
202 ret = object_sizes[object_size_type][varno].size;
204 if (object_size_type & OST_DYNAMIC)
206 if (TREE_CODE (ret) == MODIFY_EXPR)
207 return TREE_OPERAND (ret, 0);
208 else if (TREE_CODE (ret) == TREE_VEC)
209 return TREE_VEC_ELT (ret, TREE_VEC_LENGTH (ret) - 1);
210 else
211 gcc_checking_assert (size_usable_p (ret));
214 return ret;
217 /* Set size for VARNO corresponding to OSI to VAL. */
219 static inline void
220 object_sizes_initialize (struct object_size_info *osi, unsigned varno,
221 tree val, tree wholeval)
223 int object_size_type = osi->object_size_type;
225 object_sizes[object_size_type][varno].size = val;
226 object_sizes[object_size_type][varno].wholesize = wholeval;
229 /* Return a MODIFY_EXPR for cases where SSA and EXPR have the same type. The
230 TREE_VEC is returned only in case of PHI nodes. */
232 static tree
233 bundle_sizes (tree name, tree expr)
235 gcc_checking_assert (TREE_TYPE (name) == sizetype);
237 if (TREE_CODE (expr) == TREE_VEC)
239 TREE_VEC_ELT (expr, TREE_VEC_LENGTH (expr) - 1) = name;
240 return expr;
243 gcc_checking_assert (types_compatible_p (TREE_TYPE (expr), sizetype));
244 return build2 (MODIFY_EXPR, sizetype, name, expr);
247 /* Set size for VARNO corresponding to OSI to VAL if it is the new minimum or
248 maximum. For static sizes, each element of TREE_VEC is always INTEGER_CST
249 throughout the computation. For dynamic sizes, each element may either be a
250 gimple variable, a MODIFY_EXPR or a TREE_VEC. The MODIFY_EXPR is for
251 expressions that need to be gimplified. TREE_VECs are special, they're
252 emitted only for GIMPLE_PHI and the PHI result variable is the last element
253 of the vector. */
255 static bool
256 object_sizes_set (struct object_size_info *osi, unsigned varno, tree val,
257 tree wholeval)
259 int object_size_type = osi->object_size_type;
260 object_size osize = object_sizes[object_size_type][varno];
261 bool changed = true;
263 tree oldval = osize.size;
264 tree old_wholeval = osize.wholesize;
266 if (object_size_type & OST_DYNAMIC)
268 if (bitmap_bit_p (osi->reexamine, varno))
270 val = bundle_sizes (oldval, val);
271 wholeval = bundle_sizes (old_wholeval, wholeval);
273 else
275 gcc_checking_assert (size_initval_p (oldval, object_size_type));
276 gcc_checking_assert (size_initval_p (old_wholeval,
277 object_size_type));
278 /* For dynamic object sizes, all object sizes that are not gimple
279 variables will need to be gimplified. */
280 if (wholeval != val && !size_usable_p (wholeval))
282 bitmap_set_bit (osi->reexamine, varno);
283 wholeval = bundle_sizes (make_ssa_name (sizetype), wholeval);
285 if (!size_usable_p (val))
287 bitmap_set_bit (osi->reexamine, varno);
288 tree newval = bundle_sizes (make_ssa_name (sizetype), val);
289 if (val == wholeval)
290 wholeval = newval;
291 val = newval;
293 /* If the new value is a temporary variable, mark it for
294 reexamination. */
295 else if (TREE_CODE (val) == SSA_NAME && !SSA_NAME_DEF_STMT (val))
296 bitmap_set_bit (osi->reexamine, varno);
299 else
301 enum tree_code code = (object_size_type & OST_MINIMUM
302 ? MIN_EXPR : MAX_EXPR);
304 val = size_binop (code, val, oldval);
305 wholeval = size_binop (code, wholeval, old_wholeval);
306 changed = (tree_int_cst_compare (val, oldval) != 0
307 || tree_int_cst_compare (old_wholeval, wholeval) != 0);
310 object_sizes[object_size_type][varno].size = val;
311 object_sizes[object_size_type][varno].wholesize = wholeval;
313 return changed;
316 /* Set temporary SSA names for object size and whole size to resolve dependency
317 loops in dynamic size computation. */
319 static inline void
320 object_sizes_set_temp (struct object_size_info *osi, unsigned varno)
322 tree val = object_sizes_get (osi, varno);
324 if (size_initval_p (val, osi->object_size_type))
325 object_sizes_set (osi, varno,
326 make_ssa_name (sizetype),
327 make_ssa_name (sizetype));
330 /* Initialize OFFSET_LIMIT variable. */
331 static void
332 init_offset_limit (void)
334 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
335 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
336 else
337 offset_limit = -1;
338 offset_limit /= 2;
341 /* Bytes at end of the object with SZ from offset OFFSET. If WHOLESIZE is not
342 NULL_TREE, use it to get the net offset of the pointer, which should always
343 be positive and hence, be within OFFSET_LIMIT for valid offsets. */
345 static tree
346 size_for_offset (tree sz, tree offset, tree wholesize = NULL_TREE)
348 gcc_checking_assert (types_compatible_p (TREE_TYPE (sz), sizetype));
350 /* For negative offsets, if we have a distinct WHOLESIZE, use it to get a net
351 offset from the whole object. */
352 if (wholesize && wholesize != sz
353 && (TREE_CODE (sz) != INTEGER_CST
354 || TREE_CODE (wholesize) != INTEGER_CST
355 || tree_int_cst_compare (sz, wholesize)))
357 gcc_checking_assert (types_compatible_p (TREE_TYPE (wholesize),
358 sizetype));
360 /* Restructure SZ - OFFSET as
361 WHOLESIZE - (WHOLESIZE + OFFSET - SZ) so that the offset part, i.e.
362 WHOLESIZE + OFFSET - SZ is only allowed to be positive. */
363 tree tmp = size_binop (MAX_EXPR, wholesize, sz);
364 offset = fold_build2 (PLUS_EXPR, sizetype, tmp, offset);
365 offset = fold_build2 (MINUS_EXPR, sizetype, offset, sz);
366 sz = tmp;
369 /* Safe to convert now, since a valid net offset should be non-negative. */
370 if (!useless_type_conversion_p (sizetype, TREE_TYPE (offset)))
371 offset = fold_convert (sizetype, offset);
373 if (TREE_CODE (offset) == INTEGER_CST)
375 if (integer_zerop (offset))
376 return sz;
378 /* Negative or too large offset even after adjustment, cannot be within
379 bounds of an object. */
380 if (compare_tree_int (offset, offset_limit) > 0)
381 return size_zero_node;
384 return size_binop (MINUS_EXPR, size_binop (MAX_EXPR, sz, offset), offset);
387 /* Compute offset of EXPR within VAR. Return error_mark_node
388 if unknown. */
390 static tree
391 compute_object_offset (tree expr, const_tree var)
393 enum tree_code code = PLUS_EXPR;
394 tree base, off, t;
396 if (expr == var)
397 return size_zero_node;
399 switch (TREE_CODE (expr))
401 case COMPONENT_REF:
402 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
403 if (base == error_mark_node)
404 return base;
406 t = TREE_OPERAND (expr, 1);
407 off = size_binop (PLUS_EXPR,
408 component_ref_field_offset (expr),
409 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
410 / BITS_PER_UNIT));
411 break;
413 case REALPART_EXPR:
414 CASE_CONVERT:
415 case VIEW_CONVERT_EXPR:
416 case NON_LVALUE_EXPR:
417 return compute_object_offset (TREE_OPERAND (expr, 0), var);
419 case IMAGPART_EXPR:
420 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
421 if (base == error_mark_node)
422 return base;
424 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
425 break;
427 case ARRAY_REF:
428 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
429 if (base == error_mark_node)
430 return base;
432 t = TREE_OPERAND (expr, 1);
433 tree low_bound, unit_size;
434 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
435 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
436 if (! integer_zerop (low_bound))
437 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
438 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
440 code = MINUS_EXPR;
441 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
443 t = fold_convert (sizetype, t);
444 off = size_binop (MULT_EXPR, unit_size, t);
445 break;
447 case MEM_REF:
448 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
449 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
451 default:
452 return error_mark_node;
455 return size_binop (code, base, off);
458 /* Returns the size of the object designated by DECL considering its
459 initializer if it either has one or if it would not affect its size,
460 otherwise the size of the object without the initializer when MIN
461 is true, else null. An object's initializer affects the object's
462 size if it's a struct type with a flexible array member. */
464 tree
465 decl_init_size (tree decl, bool min)
467 tree size = DECL_SIZE_UNIT (decl);
468 tree type = TREE_TYPE (decl);
469 if (TREE_CODE (type) != RECORD_TYPE)
470 return size;
472 tree last = last_field (type);
473 if (!last)
474 return size;
476 tree last_type = TREE_TYPE (last);
477 if (TREE_CODE (last_type) != ARRAY_TYPE
478 || TYPE_SIZE (last_type))
479 return size;
481 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
482 of the initializer and sometimes doesn't. */
483 size = TYPE_SIZE_UNIT (type);
484 tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
485 tree compsize = component_ref_size (ref);
486 if (!compsize)
487 return min ? size : NULL_TREE;
489 /* The size includes tail padding and initializer elements. */
490 tree pos = byte_position (last);
491 size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
492 return size;
495 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
496 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
497 If unknown, return size_unknown (object_size_type). */
499 static bool
500 addr_object_size (struct object_size_info *osi, const_tree ptr,
501 int object_size_type, tree *psize, tree *pwholesize)
503 tree pt_var, pt_var_size = NULL_TREE, pt_var_wholesize = NULL_TREE;
504 tree var_size, bytes, wholebytes;
506 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
508 /* Set to unknown and overwrite just before returning if the size
509 could be determined. */
510 *psize = size_unknown (object_size_type);
511 if (pwholesize)
512 *pwholesize = size_unknown (object_size_type);
514 pt_var = TREE_OPERAND (ptr, 0);
515 while (handled_component_p (pt_var))
516 pt_var = TREE_OPERAND (pt_var, 0);
518 if (!pt_var)
519 return false;
521 if (TREE_CODE (pt_var) == MEM_REF)
523 tree sz, wholesize;
525 if (!osi || (object_size_type & OST_SUBOBJECT) != 0
526 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
528 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
529 object_size_type & ~OST_SUBOBJECT, &sz);
530 wholesize = sz;
532 else
534 tree var = TREE_OPERAND (pt_var, 0);
535 if (osi->pass == 0)
536 collect_object_sizes_for (osi, var);
537 if (bitmap_bit_p (computed[object_size_type],
538 SSA_NAME_VERSION (var)))
540 sz = object_sizes_get (osi, SSA_NAME_VERSION (var));
541 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (var), true);
543 else
544 sz = wholesize = size_unknown (object_size_type);
546 if (!size_unknown_p (sz, object_size_type))
547 sz = size_for_offset (sz, TREE_OPERAND (pt_var, 1), wholesize);
549 if (!size_unknown_p (sz, object_size_type)
550 && (TREE_CODE (sz) != INTEGER_CST
551 || compare_tree_int (sz, offset_limit) < 0))
553 pt_var_size = sz;
554 pt_var_wholesize = wholesize;
557 else if (DECL_P (pt_var))
559 pt_var_size = pt_var_wholesize
560 = decl_init_size (pt_var, object_size_type & OST_MINIMUM);
561 if (!pt_var_size)
562 return false;
564 else if (TREE_CODE (pt_var) == STRING_CST)
565 pt_var_size = pt_var_wholesize = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
566 else
567 return false;
569 if (pt_var_size)
571 /* Validate the size determined above if it is a constant. */
572 if (TREE_CODE (pt_var_size) == INTEGER_CST
573 && compare_tree_int (pt_var_size, offset_limit) >= 0)
574 return false;
577 if (pt_var != TREE_OPERAND (ptr, 0))
579 tree var;
581 if (object_size_type & OST_SUBOBJECT)
583 var = TREE_OPERAND (ptr, 0);
585 while (var != pt_var
586 && TREE_CODE (var) != BIT_FIELD_REF
587 && TREE_CODE (var) != COMPONENT_REF
588 && TREE_CODE (var) != ARRAY_REF
589 && TREE_CODE (var) != ARRAY_RANGE_REF
590 && TREE_CODE (var) != REALPART_EXPR
591 && TREE_CODE (var) != IMAGPART_EXPR)
592 var = TREE_OPERAND (var, 0);
593 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
594 var = TREE_OPERAND (var, 0);
595 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
596 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
597 || (pt_var_size && TREE_CODE (pt_var_size) == INTEGER_CST
598 && tree_int_cst_lt (pt_var_size,
599 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
600 var = pt_var;
601 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
603 tree v = var;
604 /* For &X->fld, compute object size if fld isn't a flexible array
605 member. */
606 bool is_flexible_array_mem_ref = false;
607 while (v && v != pt_var)
608 switch (TREE_CODE (v))
610 case ARRAY_REF:
611 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0))))
613 tree domain
614 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
615 if (domain && TYPE_MAX_VALUE (domain))
617 v = NULL_TREE;
618 break;
621 v = TREE_OPERAND (v, 0);
622 break;
623 case REALPART_EXPR:
624 case IMAGPART_EXPR:
625 v = NULL_TREE;
626 break;
627 case COMPONENT_REF:
628 /* When the ref is not to an aggregate type, i.e, an array,
629 a record or a union, it will not have flexible size,
630 compute the object size directly. */
631 if (!AGGREGATE_TYPE_P (TREE_TYPE (v)))
633 v = NULL_TREE;
634 break;
636 /* if the ref is to a record or union type, but the type
637 does not include a flexible array recursively, compute
638 the object size directly. */
639 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (v)))
641 if (!TYPE_INCLUDES_FLEXARRAY (TREE_TYPE (v)))
643 v = NULL_TREE;
644 break;
646 else
648 v = TREE_OPERAND (v, 0);
649 break;
652 /* Now the ref is to an array type. */
653 gcc_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
654 is_flexible_array_mem_ref = array_ref_flexible_size_p (v);
655 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
656 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
657 != UNION_TYPE
658 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
659 != QUAL_UNION_TYPE)
660 break;
661 else
662 v = TREE_OPERAND (v, 0);
663 if (TREE_CODE (v) == COMPONENT_REF
664 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
665 == RECORD_TYPE)
667 /* compute object size only if v is not a
668 flexible array member. */
669 if (!is_flexible_array_mem_ref)
671 v = NULL_TREE;
672 break;
674 v = TREE_OPERAND (v, 0);
676 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
677 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
678 != UNION_TYPE
679 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
680 != QUAL_UNION_TYPE)
681 break;
682 else
683 v = TREE_OPERAND (v, 0);
684 if (v != pt_var)
685 v = NULL_TREE;
686 else
687 v = pt_var;
688 break;
689 default:
690 v = pt_var;
691 break;
693 if (v == pt_var)
694 var = pt_var;
697 else
698 var = pt_var;
700 if (var != pt_var)
702 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
703 if (!TREE_CONSTANT (var_size))
704 var_size = get_or_create_ssa_default_def (cfun, var_size);
705 if (!var_size)
706 return false;
708 else if (!pt_var_size)
709 return false;
710 else
711 var_size = pt_var_size;
712 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
713 if (bytes != error_mark_node)
715 bytes = size_for_offset (var_size, bytes);
716 if (var != pt_var && pt_var_size && TREE_CODE (pt_var) == MEM_REF)
718 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0),
719 pt_var);
720 if (bytes2 != error_mark_node)
722 bytes2 = size_for_offset (pt_var_size, bytes2);
723 bytes = size_binop (MIN_EXPR, bytes, bytes2);
727 else
728 bytes = size_unknown (object_size_type);
730 wholebytes
731 = object_size_type & OST_SUBOBJECT ? var_size : pt_var_wholesize;
733 else if (!pt_var_size)
734 return false;
735 else
737 bytes = pt_var_size;
738 wholebytes = pt_var_wholesize;
741 if (!size_unknown_p (bytes, object_size_type)
742 && size_valid_p (bytes, object_size_type)
743 && !size_unknown_p (bytes, object_size_type)
744 && size_valid_p (wholebytes, object_size_type))
746 *psize = bytes;
747 if (pwholesize)
748 *pwholesize = wholebytes;
749 return true;
752 return false;
755 /* Compute __builtin_object_size for a CALL to .ACCESS_WITH_SIZE,
756 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
757 The 2nd, 3rd, and the 4th parameters of the call determine the size of
758 the CALL:
760 2nd argument REF_TO_SIZE: The reference to the size of the object,
761 3rd argument CLASS_OF_SIZE: The size referenced by the REF_TO_SIZE represents
762 0: the number of bytes;
763 1: the number of the elements of the object type;
764 4th argument TYPE_OF_SIZE: A constant 0 with its TYPE being the same as the TYPE
765 of the object referenced by REF_TO_SIZE
766 6th argument: A constant 0 with the pointer TYPE to the original flexible
767 array type.
769 The size of the element can be retrived from the TYPE of the 6th argument
770 of the call, which is the pointer to the array type. */
771 static tree
772 access_with_size_object_size (const gcall *call, int object_size_type)
774 /* If not for dynamic object size, return. */
775 if ((object_size_type & OST_DYNAMIC) == 0)
776 return size_unknown (object_size_type);
778 gcc_assert (gimple_call_internal_p (call, IFN_ACCESS_WITH_SIZE));
779 /* The type of the 6th argument type is the pointer TYPE to the original
780 flexible array type. */
781 tree pointer_to_array_type = TREE_TYPE (gimple_call_arg (call, 5));
782 gcc_assert (POINTER_TYPE_P (pointer_to_array_type));
783 tree element_type = TREE_TYPE (TREE_TYPE (pointer_to_array_type));
784 tree element_size = TYPE_SIZE_UNIT (element_type);
785 tree ref_to_size = gimple_call_arg (call, 1);
786 unsigned int class_of_size = TREE_INT_CST_LOW (gimple_call_arg (call, 2));
787 tree type = TREE_TYPE (gimple_call_arg (call, 3));
789 tree size = fold_build2 (MEM_REF, type, ref_to_size,
790 build_int_cst (ptr_type_node, 0));
792 /* If size is negative value, treat it as zero. */
793 if (!TYPE_UNSIGNED (type))
795 tree cond_expr = fold_build2 (LT_EXPR, boolean_type_node,
796 unshare_expr (size), build_zero_cst (type));
797 size = fold_build3 (COND_EXPR, integer_type_node, cond_expr,
798 build_zero_cst (type), size);
801 if (class_of_size == 1)
802 size = size_binop (MULT_EXPR,
803 fold_convert (sizetype, size),
804 fold_convert (sizetype, element_size));
805 else
806 size = fold_convert (sizetype, size);
808 if (!todo)
809 todo = TODO_update_ssa_only_virtuals;
811 return size;
814 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
815 Handles calls to functions declared with attribute alloc_size.
816 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
817 If unknown, return size_unknown (object_size_type). */
819 static tree
820 alloc_object_size (const gcall *call, int object_size_type)
822 gcc_assert (is_gimple_call (call));
824 tree calltype;
825 tree callfn = gimple_call_fndecl (call);
826 if (callfn)
827 calltype = TREE_TYPE (callfn);
828 else
829 calltype = gimple_call_fntype (call);
831 if (!calltype)
832 return size_unknown (object_size_type);
834 /* Set to positions of alloc_size arguments. */
835 int arg1 = -1, arg2 = -1;
836 tree alloc_size = lookup_attribute ("alloc_size",
837 TYPE_ATTRIBUTES (calltype));
838 if (alloc_size && TREE_VALUE (alloc_size))
840 tree p = TREE_VALUE (alloc_size);
842 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
843 if (TREE_CHAIN (p))
844 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
846 else if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
847 && callfn
848 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callfn)))
849 arg1 = 0;
851 /* Non-const arguments are OK here, let the caller handle constness. */
852 if (arg1 < 0
853 || (unsigned) arg1 >= gimple_call_num_args (call)
854 || (arg2 >= 0 && (unsigned) arg2 >= gimple_call_num_args (call)))
855 return size_unknown (object_size_type);
857 tree targ1 = gimple_call_arg (call, arg1);
858 if (!INTEGRAL_TYPE_P (TREE_TYPE (targ1))
859 || TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (sizetype))
860 return size_unknown (object_size_type);
861 targ1 = fold_convert (sizetype, targ1);
862 tree bytes = NULL_TREE;
863 if (arg2 >= 0)
865 tree targ2 = gimple_call_arg (call, arg2);
866 if (!INTEGRAL_TYPE_P (TREE_TYPE (targ2))
867 || TYPE_PRECISION (TREE_TYPE (targ2)) > TYPE_PRECISION (sizetype))
868 return size_unknown (object_size_type);
869 targ2 = fold_convert (sizetype, targ2);
870 bytes = size_binop (MULT_EXPR, targ1, targ2);
872 else
873 bytes = targ1;
875 return bytes ? bytes : size_unknown (object_size_type);
878 /* Compute __builtin_object_size for CALL, which is a call to either
879 BUILT_IN_STRDUP or BUILT_IN_STRNDUP; IS_STRNDUP indicates which it is.
880 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
881 If unknown, return size_unknown (object_size_type). */
883 static tree
884 strdup_object_size (const gcall *call, int object_size_type, bool is_strndup)
886 tree src = gimple_call_arg (call, 0);
887 tree sz = size_unknown (object_size_type);
888 tree n = NULL_TREE;
890 if (is_strndup)
891 n = fold_build2 (PLUS_EXPR, sizetype, size_one_node,
892 gimple_call_arg (call, 1));
893 /* For strdup, simply emit strlen (SRC) + 1 and let the optimizer fold it the
894 way it likes. */
895 else
897 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
898 if (strlen_fn)
900 sz = fold_build2 (PLUS_EXPR, sizetype, size_one_node,
901 build_call_expr (strlen_fn, 1, src));
902 todo = TODO_update_ssa_only_virtuals;
906 /* In all other cases, return the size of SRC since the object size cannot
907 exceed that. We cannot do this for OST_MINIMUM unless SRC points into a
908 string constant since otherwise the object size could go all the way down
909 to zero. */
910 if (!size_valid_p (sz, object_size_type)
911 || size_unknown_p (sz, object_size_type))
913 tree wholesrc = NULL_TREE;
914 if (TREE_CODE (src) == ADDR_EXPR)
915 wholesrc = get_base_address (TREE_OPERAND (src, 0));
917 /* If the source points within a string constant, we try to get its
918 length. */
919 if (wholesrc && TREE_CODE (wholesrc) == STRING_CST)
921 tree len = c_strlen (src, 0);
922 if (len)
923 sz = fold_build2 (PLUS_EXPR, sizetype, size_one_node, len);
926 /* For maximum estimate, our next best guess is the object size of the
927 source. */
928 if (size_unknown_p (sz, object_size_type)
929 && !(object_size_type & OST_MINIMUM))
930 compute_builtin_object_size (src, object_size_type, &sz);
933 /* String duplication allocates at least one byte, so we should never fail
934 for OST_MINIMUM. */
935 if ((!size_valid_p (sz, object_size_type)
936 || size_unknown_p (sz, object_size_type))
937 && (object_size_type & OST_MINIMUM))
938 sz = size_one_node;
940 /* Factor in the N. */
941 return n ? fold_build2 (MIN_EXPR, sizetype, n, sz) : sz;
944 /* If object size is propagated from one of function's arguments directly
945 to its return value, return that argument for GIMPLE_CALL statement CALL.
946 Otherwise return NULL. */
948 static tree
949 pass_through_call (const gcall *call)
951 unsigned rf = gimple_call_return_flags (call);
952 if (rf & ERF_RETURNS_ARG)
954 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
955 if (argnum < gimple_call_num_args (call))
956 return gimple_call_arg (call, argnum);
959 /* __builtin_assume_aligned is intentionally not marked RET1. */
960 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
961 return gimple_call_arg (call, 0);
963 return NULL_TREE;
966 /* Emit PHI nodes for size expressions fo. */
968 static void
969 emit_phi_nodes (gimple *stmt, tree size, tree wholesize)
971 tree phires;
972 gphi *wholephi = NULL;
974 if (wholesize != size)
976 phires = TREE_VEC_ELT (wholesize, TREE_VEC_LENGTH (wholesize) - 1);
977 wholephi = create_phi_node (phires, gimple_bb (stmt));
980 phires = TREE_VEC_ELT (size, TREE_VEC_LENGTH (size) - 1);
981 gphi *phi = create_phi_node (phires, gimple_bb (stmt));
982 gphi *obj_phi = as_a <gphi *> (stmt);
984 gcc_checking_assert (TREE_CODE (wholesize) == TREE_VEC);
985 gcc_checking_assert (TREE_CODE (size) == TREE_VEC);
987 for (unsigned i = 0; i < gimple_phi_num_args (stmt); i++)
989 gimple_seq seq = NULL;
990 tree wsz = TREE_VEC_ELT (wholesize, i);
991 tree sz = TREE_VEC_ELT (size, i);
993 /* If we built an expression, we will need to build statements
994 and insert them on the edge right away. */
995 if (TREE_CODE (wsz) != SSA_NAME)
996 wsz = force_gimple_operand (wsz, &seq, true, NULL);
997 if (TREE_CODE (sz) != SSA_NAME)
999 gimple_seq s;
1000 sz = force_gimple_operand (sz, &s, true, NULL);
1001 gimple_seq_add_seq (&seq, s);
1004 if (seq)
1005 gsi_insert_seq_on_edge (gimple_phi_arg_edge (obj_phi, i), seq);
1007 if (wholephi)
1008 add_phi_arg (wholephi, wsz,
1009 gimple_phi_arg_edge (obj_phi, i),
1010 gimple_phi_arg_location (obj_phi, i));
1012 add_phi_arg (phi, sz,
1013 gimple_phi_arg_edge (obj_phi, i),
1014 gimple_phi_arg_location (obj_phi, i));
1018 /* Descend through EXPR and return size_unknown if it uses any SSA variable
1019 object_size_set or object_size_set_temp generated, which turned out to be
1020 size_unknown, as noted in UNKNOWNS. */
1022 static tree
1023 propagate_unknowns (object_size_info *osi, tree expr, bitmap unknowns)
1025 int object_size_type = osi->object_size_type;
1027 switch (TREE_CODE (expr))
1029 case SSA_NAME:
1030 if (bitmap_bit_p (unknowns, SSA_NAME_VERSION (expr)))
1031 return size_unknown (object_size_type);
1032 return expr;
1034 case MIN_EXPR:
1035 case MAX_EXPR:
1037 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0),
1038 unknowns);
1039 if (size_unknown_p (res, object_size_type))
1040 return res;
1042 res = propagate_unknowns (osi, TREE_OPERAND (expr, 1), unknowns);
1043 if (size_unknown_p (res, object_size_type))
1044 return res;
1046 return expr;
1048 case MODIFY_EXPR:
1050 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 1),
1051 unknowns);
1052 if (size_unknown_p (res, object_size_type))
1053 return res;
1054 return expr;
1056 case TREE_VEC:
1057 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
1059 tree res = propagate_unknowns (osi, TREE_VEC_ELT (expr, i),
1060 unknowns);
1061 if (size_unknown_p (res, object_size_type))
1062 return res;
1064 return expr;
1065 case PLUS_EXPR:
1066 case MINUS_EXPR:
1068 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0),
1069 unknowns);
1070 if (size_unknown_p (res, object_size_type))
1071 return res;
1073 return expr;
1075 default:
1076 return expr;
1080 /* Walk through size expressions that need reexamination and generate
1081 statements for them. */
1083 static void
1084 gimplify_size_expressions (object_size_info *osi)
1086 int object_size_type = osi->object_size_type;
1087 bitmap_iterator bi;
1088 unsigned int i;
1089 bool changed;
1091 /* Step 1: Propagate unknowns into expressions. */
1092 bitmap reexamine = BITMAP_ALLOC (NULL);
1093 bitmap_copy (reexamine, osi->reexamine);
1094 bitmap unknowns = BITMAP_ALLOC (NULL);
1097 changed = false;
1098 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1100 object_size cur = object_sizes_get_raw (osi, i);
1102 if (size_unknown_p (propagate_unknowns (osi, cur.size, unknowns),
1103 object_size_type)
1104 || size_unknown_p (propagate_unknowns (osi, cur.wholesize,
1105 unknowns),
1106 object_size_type))
1108 /* Record the SSAs we're overwriting to propagate the
1109 unknwons. */
1110 tree oldval = object_sizes_get (osi, i);
1111 tree old_wholeval = object_sizes_get (osi, i, true);
1113 bitmap_set_bit (unknowns, SSA_NAME_VERSION (oldval));
1114 bitmap_set_bit (unknowns, SSA_NAME_VERSION (old_wholeval));
1115 object_sizes_initialize (osi, i,
1116 size_unknown (object_size_type),
1117 size_unknown (object_size_type));
1118 bitmap_clear_bit (osi->reexamine, i);
1119 changed = true;
1122 bitmap_copy (reexamine, osi->reexamine);
1124 while (changed);
1126 /* Release all unknowns. */
1127 EXECUTE_IF_SET_IN_BITMAP (unknowns, 0, i, bi)
1128 release_ssa_name (ssa_name (i));
1130 BITMAP_FREE (unknowns);
1131 BITMAP_FREE (reexamine);
1133 /* Expand all size expressions to put their definitions close to the objects
1134 for which size is being computed. */
1135 EXECUTE_IF_SET_IN_BITMAP (osi->reexamine, 0, i, bi)
1137 gimple_seq seq = NULL;
1138 object_size osize = object_sizes_get_raw (osi, i);
1140 gimple *stmt = SSA_NAME_DEF_STMT (ssa_name (i));
1141 enum gimple_code code = gimple_code (stmt);
1143 /* PHI nodes need special attention. */
1144 if (code == GIMPLE_PHI)
1145 emit_phi_nodes (stmt, osize.size, osize.wholesize);
1146 else
1148 tree size_expr = NULL_TREE;
1150 /* Bundle wholesize in with the size to gimplify if needed. */
1151 if (osize.wholesize != osize.size
1152 && !size_usable_p (osize.wholesize))
1153 size_expr = size_binop (COMPOUND_EXPR,
1154 osize.wholesize,
1155 osize.size);
1156 else if (!size_usable_p (osize.size))
1157 size_expr = osize.size;
1159 if (size_expr)
1161 gimple_stmt_iterator gsi;
1162 if (code == GIMPLE_NOP)
1163 gsi = gsi_start_bb (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1164 else
1165 gsi = gsi_for_stmt (stmt);
1167 force_gimple_operand (size_expr, &seq, true, NULL);
1168 gsi_insert_seq_before (&gsi, seq, GSI_CONTINUE_LINKING);
1172 /* We're done, so replace the MODIFY_EXPRs with the SSA names. */
1173 object_sizes_initialize (osi, i,
1174 object_sizes_get (osi, i),
1175 object_sizes_get (osi, i, true));
1179 /* Compute __builtin_object_size value for PTR and set *PSIZE to
1180 the resulting value. If the declared object is known and PDECL
1181 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
1182 is the second argument to __builtin_object_size.
1183 Returns true on success and false when the object size could not
1184 be determined. */
1186 bool
1187 compute_builtin_object_size (tree ptr, int object_size_type,
1188 tree *psize)
1190 gcc_assert (object_size_type >= 0 && object_size_type < OST_END);
1192 /* Set to unknown and overwrite just before returning if the size
1193 could be determined. */
1194 *psize = size_unknown (object_size_type);
1196 if (! offset_limit)
1197 init_offset_limit ();
1199 if (TREE_CODE (ptr) == ADDR_EXPR)
1200 return addr_object_size (NULL, ptr, object_size_type, psize);
1202 if (TREE_CODE (ptr) != SSA_NAME
1203 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
1204 return false;
1206 if (computed[object_size_type] == NULL)
1208 if (optimize || object_size_type & OST_SUBOBJECT)
1209 return false;
1211 /* When not optimizing, rather than failing, make a small effort
1212 to determine the object size without the full benefit of
1213 the (costly) computation below. */
1214 gimple *def = SSA_NAME_DEF_STMT (ptr);
1215 if (gimple_code (def) == GIMPLE_ASSIGN)
1217 tree_code code = gimple_assign_rhs_code (def);
1218 if (code == POINTER_PLUS_EXPR)
1220 tree offset = gimple_assign_rhs2 (def);
1221 ptr = gimple_assign_rhs1 (def);
1223 if (((object_size_type & OST_DYNAMIC)
1224 || (tree_fits_shwi_p (offset)
1225 && compare_tree_int (offset, offset_limit) <= 0))
1226 && compute_builtin_object_size (ptr, object_size_type,
1227 psize))
1229 *psize = size_for_offset (*psize, offset);
1230 return true;
1234 return false;
1237 struct object_size_info osi;
1238 osi.object_size_type = object_size_type;
1239 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
1241 bitmap_iterator bi;
1242 unsigned int i;
1244 object_sizes_grow (object_size_type);
1245 if (dump_file)
1247 fprintf (dump_file, "Computing %s %s%sobject size for ",
1248 (object_size_type & OST_MINIMUM) ? "minimum" : "maximum",
1249 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1250 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1251 print_generic_expr (dump_file, ptr, dump_flags);
1252 fprintf (dump_file, ":\n");
1255 osi.visited = BITMAP_ALLOC (NULL);
1256 osi.reexamine = BITMAP_ALLOC (NULL);
1258 if (!(object_size_type & OST_DYNAMIC))
1260 osi.depths = NULL;
1261 osi.stack = NULL;
1262 osi.tos = NULL;
1265 /* First pass: walk UD chains, compute object sizes that can be computed.
1266 osi.reexamine bitmap at the end will contain versions of SSA_NAMES
1267 that need to be reexamined. For both static and dynamic size
1268 computation, reexamination is for propagation across dependency loops.
1269 The dynamic case has the additional use case where the computed
1270 expression needs to be gimplified. */
1271 osi.pass = 0;
1272 osi.changed = false;
1273 collect_object_sizes_for (&osi, ptr);
1275 if (object_size_type & OST_DYNAMIC)
1277 osi.pass = 1;
1278 gimplify_size_expressions (&osi);
1279 bitmap_clear (osi.reexamine);
1282 /* Second pass: keep recomputing object sizes of variables
1283 that need reexamination, until no object sizes are
1284 increased or all object sizes are computed. */
1285 if (! bitmap_empty_p (osi.reexamine))
1287 bitmap reexamine = BITMAP_ALLOC (NULL);
1289 /* If looking for minimum instead of maximum object size,
1290 detect cases where a pointer is increased in a loop.
1291 Although even without this detection pass 2 would eventually
1292 terminate, it could take a long time. If a pointer is
1293 increasing this way, we need to assume 0 object size.
1294 E.g. p = &buf[0]; while (cond) p = p + 4; */
1295 if (object_size_type & OST_MINIMUM)
1297 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
1298 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
1299 osi.tos = osi.stack;
1300 osi.pass = 1;
1301 /* collect_object_sizes_for is changing
1302 osi.reexamine bitmap, so iterate over a copy. */
1303 bitmap_copy (reexamine, osi.reexamine);
1304 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1305 if (bitmap_bit_p (osi.reexamine, i))
1306 check_for_plus_in_loops (&osi, ssa_name (i));
1308 free (osi.depths);
1309 osi.depths = NULL;
1310 free (osi.stack);
1311 osi.stack = NULL;
1312 osi.tos = NULL;
1317 osi.pass = 2;
1318 osi.changed = false;
1319 /* collect_object_sizes_for is changing
1320 osi.reexamine bitmap, so iterate over a copy. */
1321 bitmap_copy (reexamine, osi.reexamine);
1322 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1323 if (bitmap_bit_p (osi.reexamine, i))
1325 collect_object_sizes_for (&osi, ssa_name (i));
1326 if (dump_file && (dump_flags & TDF_DETAILS))
1328 fprintf (dump_file, "Reexamining ");
1329 print_generic_expr (dump_file, ssa_name (i),
1330 dump_flags);
1331 fprintf (dump_file, "\n");
1335 while (osi.changed);
1337 BITMAP_FREE (reexamine);
1339 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
1340 bitmap_set_bit (computed[object_size_type], i);
1342 /* Debugging dumps. */
1343 if (dump_file)
1345 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
1346 if (!object_sizes_unknown_p (object_size_type, i))
1348 print_generic_expr (dump_file, ssa_name (i),
1349 dump_flags);
1350 fprintf (dump_file,
1351 ": %s %s%sobject size ",
1352 ((object_size_type & OST_MINIMUM) ? "minimum"
1353 : "maximum"),
1354 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1355 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1356 print_generic_expr (dump_file, object_sizes_get (&osi, i),
1357 dump_flags);
1358 fprintf (dump_file, "\n");
1362 BITMAP_FREE (osi.reexamine);
1363 BITMAP_FREE (osi.visited);
1366 *psize = object_sizes_get (&osi, SSA_NAME_VERSION (ptr));
1367 return !size_unknown_p (*psize, object_size_type);
1370 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
1372 static void
1373 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
1375 int object_size_type = osi->object_size_type;
1376 unsigned int varno = SSA_NAME_VERSION (ptr);
1377 tree bytes, wholesize;
1379 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1380 gcc_assert (osi->pass == 0);
1382 if (TREE_CODE (value) == WITH_SIZE_EXPR)
1383 value = TREE_OPERAND (value, 0);
1385 /* Pointer variables should have been handled by merge_object_sizes. */
1386 gcc_assert (TREE_CODE (value) != SSA_NAME
1387 || !POINTER_TYPE_P (TREE_TYPE (value)));
1389 if (TREE_CODE (value) == ADDR_EXPR)
1390 addr_object_size (osi, value, object_size_type, &bytes, &wholesize);
1391 else
1392 bytes = wholesize = size_unknown (object_size_type);
1394 object_sizes_set (osi, varno, bytes, wholesize);
1398 /* Compute object_sizes for PTR, defined to the result of a call. */
1400 static void
1401 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
1403 int object_size_type = osi->object_size_type;
1404 unsigned int varno = SSA_NAME_VERSION (ptr);
1405 tree bytes = NULL_TREE;
1407 gcc_assert (is_gimple_call (call));
1409 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1410 gcc_assert (osi->pass == 0);
1412 bool is_strdup = gimple_call_builtin_p (call, BUILT_IN_STRDUP);
1413 bool is_strndup = gimple_call_builtin_p (call, BUILT_IN_STRNDUP);
1414 bool is_access_with_size
1415 = gimple_call_internal_p (call, IFN_ACCESS_WITH_SIZE);
1416 if (is_strdup || is_strndup)
1417 bytes = strdup_object_size (call, object_size_type, is_strndup);
1418 else if (is_access_with_size)
1419 bytes = access_with_size_object_size (call, object_size_type);
1420 else
1421 bytes = alloc_object_size (call, object_size_type);
1423 if (!size_valid_p (bytes, object_size_type))
1424 bytes = size_unknown (object_size_type);
1426 object_sizes_set (osi, varno, bytes, bytes);
1430 /* Compute object_sizes for PTR, defined to an unknown value. */
1432 static void
1433 unknown_object_size (struct object_size_info *osi, tree ptr)
1435 int object_size_type = osi->object_size_type;
1436 unsigned int varno = SSA_NAME_VERSION (ptr);
1438 gcc_checking_assert (!object_sizes_unknown_p (object_size_type, varno));
1439 gcc_checking_assert (osi->pass == 0);
1440 tree bytes = size_unknown (object_size_type);
1442 object_sizes_set (osi, varno, bytes, bytes);
1446 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
1447 the object size might need reexamination later. */
1449 static bool
1450 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig)
1452 int object_size_type = osi->object_size_type;
1453 unsigned int varno = SSA_NAME_VERSION (dest);
1454 tree orig_bytes, wholesize;
1456 if (object_sizes_unknown_p (object_size_type, varno))
1457 return false;
1459 if (osi->pass == 0)
1460 collect_object_sizes_for (osi, orig);
1462 orig_bytes = object_sizes_get (osi, SSA_NAME_VERSION (orig));
1463 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (orig), true);
1465 if (object_sizes_set (osi, varno, orig_bytes, wholesize))
1466 osi->changed = true;
1468 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
1472 /* Compute object_sizes for VAR, defined to the result of an assignment
1473 with operator POINTER_PLUS_EXPR. Return true if the object size might
1474 need reexamination later. */
1476 static bool
1477 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1479 int object_size_type = osi->object_size_type;
1480 unsigned int varno = SSA_NAME_VERSION (var);
1481 tree bytes, wholesize;
1482 tree op0, op1;
1483 bool reexamine = false;
1485 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1487 op0 = gimple_assign_rhs1 (stmt);
1488 op1 = gimple_assign_rhs2 (stmt);
1490 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
1492 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
1493 gcc_assert (TREE_CODE (rhs) == MEM_REF);
1494 op0 = TREE_OPERAND (rhs, 0);
1495 op1 = TREE_OPERAND (rhs, 1);
1497 else
1498 gcc_unreachable ();
1500 if (object_sizes_unknown_p (object_size_type, varno))
1501 return false;
1503 /* Handle PTR + OFFSET here. */
1504 if (size_valid_p (op1, object_size_type)
1505 && (TREE_CODE (op0) == SSA_NAME || TREE_CODE (op0) == ADDR_EXPR))
1507 if (TREE_CODE (op0) == SSA_NAME)
1509 if (osi->pass == 0)
1510 collect_object_sizes_for (osi, op0);
1512 bytes = object_sizes_get (osi, SSA_NAME_VERSION (op0));
1513 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (op0), true);
1514 reexamine = bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (op0));
1516 else
1518 /* op0 will be ADDR_EXPR here. We should never come here during
1519 reexamination. */
1520 gcc_checking_assert (osi->pass == 0);
1521 addr_object_size (osi, op0, object_size_type, &bytes, &wholesize);
1524 /* size_for_offset doesn't make sense for -1 size, but it does for size 0
1525 since the wholesize could be non-zero and a negative offset could give
1526 a non-zero size. */
1527 if (size_unknown_p (bytes, 0))
1529 else if ((object_size_type & OST_DYNAMIC)
1530 || compare_tree_int (op1, offset_limit) <= 0)
1531 bytes = size_for_offset (bytes, op1, wholesize);
1532 /* In the static case, with a negative offset, the best estimate for
1533 minimum size is size_unknown but for maximum size, the wholesize is a
1534 better estimate than size_unknown. */
1535 else if (object_size_type & OST_MINIMUM)
1536 bytes = size_unknown (object_size_type);
1537 else
1538 bytes = wholesize;
1540 else
1541 bytes = wholesize = size_unknown (object_size_type);
1543 if (!size_valid_p (bytes, object_size_type)
1544 || !size_valid_p (wholesize, object_size_type))
1545 bytes = wholesize = size_unknown (object_size_type);
1547 if (object_sizes_set (osi, varno, bytes, wholesize))
1548 osi->changed = true;
1549 return reexamine;
1552 /* Compute the dynamic object size for VAR. Return the result in SIZE and
1553 WHOLESIZE. */
1555 static void
1556 dynamic_object_size (struct object_size_info *osi, tree var,
1557 tree *size, tree *wholesize)
1559 int object_size_type = osi->object_size_type;
1561 if (TREE_CODE (var) == SSA_NAME)
1563 unsigned varno = SSA_NAME_VERSION (var);
1565 collect_object_sizes_for (osi, var);
1566 *size = object_sizes_get (osi, varno);
1567 *wholesize = object_sizes_get (osi, varno, true);
1569 else if (TREE_CODE (var) == ADDR_EXPR)
1570 addr_object_size (osi, var, object_size_type, size, wholesize);
1571 else
1572 *size = *wholesize = size_unknown (object_size_type);
1575 /* Compute object_sizes for VAR, defined at STMT, which is
1576 a COND_EXPR. Return true if the object size might need reexamination
1577 later. */
1579 static bool
1580 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1582 tree then_, else_;
1583 int object_size_type = osi->object_size_type;
1584 unsigned int varno = SSA_NAME_VERSION (var);
1585 bool reexamine = false;
1587 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
1589 if (object_sizes_unknown_p (object_size_type, varno))
1590 return false;
1592 then_ = gimple_assign_rhs2 (stmt);
1593 else_ = gimple_assign_rhs3 (stmt);
1595 if (object_size_type & OST_DYNAMIC)
1597 tree then_size, then_wholesize, else_size, else_wholesize;
1599 dynamic_object_size (osi, then_, &then_size, &then_wholesize);
1600 if (!size_unknown_p (then_size, object_size_type))
1601 dynamic_object_size (osi, else_, &else_size, &else_wholesize);
1603 tree cond_size, cond_wholesize;
1604 if (size_unknown_p (then_size, object_size_type)
1605 || size_unknown_p (else_size, object_size_type))
1606 cond_size = cond_wholesize = size_unknown (object_size_type);
1607 else
1609 cond_size = fold_build3 (COND_EXPR, sizetype,
1610 gimple_assign_rhs1 (stmt),
1611 then_size, else_size);
1612 cond_wholesize = fold_build3 (COND_EXPR, sizetype,
1613 gimple_assign_rhs1 (stmt),
1614 then_wholesize, else_wholesize);
1617 object_sizes_set (osi, varno, cond_size, cond_wholesize);
1619 return false;
1622 if (TREE_CODE (then_) == SSA_NAME)
1623 reexamine |= merge_object_sizes (osi, var, then_);
1624 else
1625 expr_object_size (osi, var, then_);
1627 if (object_sizes_unknown_p (object_size_type, varno))
1628 return reexamine;
1630 if (TREE_CODE (else_) == SSA_NAME)
1631 reexamine |= merge_object_sizes (osi, var, else_);
1632 else
1633 expr_object_size (osi, var, else_);
1635 return reexamine;
1638 /* Find size of an object passed as a parameter to the function. */
1640 static void
1641 parm_object_size (struct object_size_info *osi, tree var)
1643 int object_size_type = osi->object_size_type;
1644 tree parm = SSA_NAME_VAR (var);
1646 if (!(object_size_type & OST_DYNAMIC) || !POINTER_TYPE_P (TREE_TYPE (parm)))
1648 expr_object_size (osi, var, parm);
1649 return;
1652 /* Look for access attribute. */
1653 rdwr_map rdwr_idx;
1655 tree fndecl = cfun->decl;
1656 const attr_access *access = get_parm_access (rdwr_idx, parm, fndecl);
1657 tree typesize = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (parm)));
1658 tree sz = NULL_TREE;
1660 /* If we have an access attribute with a usable size argument... */
1661 if (access && access->sizarg != UINT_MAX
1662 /* ... and either PARM is void * or has a type that is complete and has a
1663 constant size... */
1664 && ((typesize && poly_int_tree_p (typesize))
1665 || (!typesize && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))))
1667 tree fnargs = DECL_ARGUMENTS (fndecl);
1668 tree arg = NULL_TREE;
1669 unsigned argpos = 0;
1671 /* ... then walk through the parameters to pick the size parameter and
1672 safely scale it by the type size if needed.
1674 TODO: we could also compute the size of VLAs where the size is
1675 given by a function parameter. */
1676 for (arg = fnargs; arg; arg = TREE_CHAIN (arg), ++argpos)
1677 if (argpos == access->sizarg)
1679 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1680 sz = get_or_create_ssa_default_def (cfun, arg);
1681 if (sz != NULL_TREE)
1683 sz = fold_convert (sizetype, sz);
1684 if (typesize)
1685 sz = size_binop (MULT_EXPR, sz, typesize);
1687 break;
1690 if (!sz)
1691 sz = size_unknown (object_size_type);
1693 object_sizes_set (osi, SSA_NAME_VERSION (var), sz, sz);
1696 /* Compute an object size expression for VAR, which is the result of a PHI
1697 node. */
1699 static void
1700 phi_dynamic_object_size (struct object_size_info *osi, tree var)
1702 int object_size_type = osi->object_size_type;
1703 unsigned int varno = SSA_NAME_VERSION (var);
1704 gimple *stmt = SSA_NAME_DEF_STMT (var);
1705 unsigned i, num_args = gimple_phi_num_args (stmt);
1706 bool wholesize_needed = false;
1708 /* The extra space is for the PHI result at the end, which object_sizes_set
1709 sets for us. */
1710 tree sizes = make_tree_vec (num_args + 1);
1711 tree wholesizes = make_tree_vec (num_args + 1);
1713 /* Bail out if the size of any of the PHI arguments cannot be
1714 determined. */
1715 for (i = 0; i < num_args; i++)
1717 edge e = gimple_phi_arg_edge (as_a <gphi *> (stmt), i);
1718 if (e->flags & EDGE_COMPLEX)
1719 break;
1721 tree rhs = gimple_phi_arg_def (stmt, i);
1722 tree size, wholesize;
1724 dynamic_object_size (osi, rhs, &size, &wholesize);
1726 if (size_unknown_p (size, object_size_type))
1727 break;
1729 if (size != wholesize)
1730 wholesize_needed = true;
1732 TREE_VEC_ELT (sizes, i) = size;
1733 TREE_VEC_ELT (wholesizes, i) = wholesize;
1736 if (i < num_args)
1738 ggc_free (sizes);
1739 ggc_free (wholesizes);
1740 sizes = wholesizes = size_unknown (object_size_type);
1743 /* Point to the same TREE_VEC so that we can avoid emitting two PHI
1744 nodes. */
1745 else if (!wholesize_needed)
1747 ggc_free (wholesizes);
1748 wholesizes = sizes;
1751 object_sizes_set (osi, varno, sizes, wholesizes);
1754 /* Compute object sizes for VAR.
1755 For ADDR_EXPR an object size is the number of remaining bytes
1756 to the end of the object (where what is considered an object depends on
1757 OSI->object_size_type).
1758 For allocation GIMPLE_CALL like malloc or calloc object size is the size
1759 of the allocation.
1760 For POINTER_PLUS_EXPR where second operand is a constant integer,
1761 object size is object size of the first operand minus the constant.
1762 If the constant is bigger than the number of remaining bytes until the
1763 end of the object, object size is 0, but if it is instead a pointer
1764 subtraction, object size is size_unknown (object_size_type).
1765 To differentiate addition from subtraction, ADDR_EXPR returns
1766 size_unknown (object_size_type) for all objects bigger than half of the
1767 address space, and constants less than half of the address space are
1768 considered addition, while bigger constants subtraction.
1769 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
1770 object size is object size of that argument.
1771 Otherwise, object size is the maximum of object sizes of variables
1772 that it might be set to. */
1774 static void
1775 collect_object_sizes_for (struct object_size_info *osi, tree var)
1777 int object_size_type = osi->object_size_type;
1778 unsigned int varno = SSA_NAME_VERSION (var);
1779 gimple *stmt;
1780 bool reexamine;
1782 if (bitmap_bit_p (computed[object_size_type], varno))
1783 return;
1785 if (osi->pass == 0)
1787 if (bitmap_set_bit (osi->visited, varno))
1789 /* Initialize to 0 for maximum size and M1U for minimum size so that
1790 it gets immediately overridden. */
1791 object_sizes_initialize (osi, varno,
1792 size_initval (object_size_type),
1793 size_initval (object_size_type));
1795 else
1797 /* Found a dependency loop. Mark the variable for later
1798 re-examination. */
1799 if (object_size_type & OST_DYNAMIC)
1800 object_sizes_set_temp (osi, varno);
1802 bitmap_set_bit (osi->reexamine, varno);
1803 if (dump_file && (dump_flags & TDF_DETAILS))
1805 fprintf (dump_file, "Found a dependency loop at ");
1806 print_generic_expr (dump_file, var, dump_flags);
1807 fprintf (dump_file, "\n");
1809 return;
1813 if (dump_file && (dump_flags & TDF_DETAILS))
1815 fprintf (dump_file, "Visiting use-def links for ");
1816 print_generic_expr (dump_file, var, dump_flags);
1817 fprintf (dump_file, "\n");
1820 stmt = SSA_NAME_DEF_STMT (var);
1821 reexamine = false;
1823 switch (gimple_code (stmt))
1825 case GIMPLE_ASSIGN:
1827 tree rhs = gimple_assign_rhs1 (stmt);
1828 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1829 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1830 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
1831 reexamine = plus_stmt_object_size (osi, var, stmt);
1832 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1833 reexamine = cond_expr_object_size (osi, var, stmt);
1834 else if (gimple_assign_single_p (stmt)
1835 || gimple_assign_unary_nop_p (stmt))
1837 if (TREE_CODE (rhs) == SSA_NAME
1838 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1839 reexamine = merge_object_sizes (osi, var, rhs);
1840 else
1841 expr_object_size (osi, var, rhs);
1843 else
1844 unknown_object_size (osi, var);
1845 break;
1848 case GIMPLE_CALL:
1850 gcall *call_stmt = as_a <gcall *> (stmt);
1851 tree arg = pass_through_call (call_stmt);
1852 if (arg)
1854 if (TREE_CODE (arg) == SSA_NAME
1855 && POINTER_TYPE_P (TREE_TYPE (arg)))
1856 reexamine = merge_object_sizes (osi, var, arg);
1857 else
1858 expr_object_size (osi, var, arg);
1860 else
1861 call_object_size (osi, var, call_stmt);
1862 break;
1865 case GIMPLE_ASM:
1866 /* Pointers defined by __asm__ statements can point anywhere. */
1867 unknown_object_size (osi, var);
1868 break;
1870 case GIMPLE_NOP:
1871 if (SSA_NAME_VAR (var)
1872 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1873 parm_object_size (osi, var);
1874 else
1875 /* Uninitialized SSA names point nowhere. */
1876 unknown_object_size (osi, var);
1877 break;
1879 case GIMPLE_PHI:
1881 unsigned i;
1883 if (object_size_type & OST_DYNAMIC)
1885 phi_dynamic_object_size (osi, var);
1886 break;
1889 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1891 tree rhs = gimple_phi_arg (stmt, i)->def;
1893 if (object_sizes_unknown_p (object_size_type, varno))
1894 break;
1896 if (TREE_CODE (rhs) == SSA_NAME)
1897 reexamine |= merge_object_sizes (osi, var, rhs);
1898 else if (osi->pass == 0)
1899 expr_object_size (osi, var, rhs);
1901 break;
1904 default:
1905 gcc_unreachable ();
1908 /* Dynamic sizes use placeholder temps to return an answer, so it is always
1909 safe to set COMPUTED for them. */
1910 if ((object_size_type & OST_DYNAMIC)
1911 || !reexamine || object_sizes_unknown_p (object_size_type, varno))
1913 bitmap_set_bit (computed[object_size_type], varno);
1914 if (!(object_size_type & OST_DYNAMIC))
1915 bitmap_clear_bit (osi->reexamine, varno);
1916 else if (reexamine)
1917 bitmap_set_bit (osi->reexamine, varno);
1919 else
1921 bitmap_set_bit (osi->reexamine, varno);
1922 if (dump_file && (dump_flags & TDF_DETAILS))
1924 fprintf (dump_file, "Need to reexamine ");
1925 print_generic_expr (dump_file, var, dump_flags);
1926 fprintf (dump_file, "\n");
1932 /* Helper function for check_for_plus_in_loops. Called recursively
1933 to detect loops. */
1935 static void
1936 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1937 unsigned int depth)
1939 gimple *stmt = SSA_NAME_DEF_STMT (var);
1940 unsigned int varno = SSA_NAME_VERSION (var);
1942 if (osi->depths[varno])
1944 if (osi->depths[varno] != depth)
1946 unsigned int *sp;
1948 /* Found a loop involving pointer addition. */
1949 for (sp = osi->tos; sp > osi->stack; )
1951 --sp;
1952 bitmap_clear_bit (osi->reexamine, *sp);
1953 bitmap_set_bit (computed[osi->object_size_type], *sp);
1954 object_sizes_set (osi, *sp, size_zero_node,
1955 object_sizes_get (osi, *sp, true));
1956 if (*sp == varno)
1957 break;
1960 return;
1962 else if (! bitmap_bit_p (osi->reexamine, varno))
1963 return;
1965 osi->depths[varno] = depth;
1966 *osi->tos++ = varno;
1968 switch (gimple_code (stmt))
1971 case GIMPLE_ASSIGN:
1973 if ((gimple_assign_single_p (stmt)
1974 || gimple_assign_unary_nop_p (stmt))
1975 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1977 tree rhs = gimple_assign_rhs1 (stmt);
1979 check_for_plus_in_loops_1 (osi, rhs, depth);
1981 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1983 tree basevar = gimple_assign_rhs1 (stmt);
1984 tree cst = gimple_assign_rhs2 (stmt);
1986 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1988 check_for_plus_in_loops_1 (osi, basevar,
1989 depth + !integer_zerop (cst));
1991 else
1992 gcc_unreachable ();
1993 break;
1996 case GIMPLE_CALL:
1998 gcall *call_stmt = as_a <gcall *> (stmt);
1999 tree arg = pass_through_call (call_stmt);
2000 if (arg)
2002 if (TREE_CODE (arg) == SSA_NAME)
2003 check_for_plus_in_loops_1 (osi, arg, depth);
2004 else
2005 gcc_unreachable ();
2007 break;
2010 case GIMPLE_PHI:
2012 unsigned i;
2014 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2016 tree rhs = gimple_phi_arg (stmt, i)->def;
2018 if (TREE_CODE (rhs) == SSA_NAME)
2019 check_for_plus_in_loops_1 (osi, rhs, depth);
2021 break;
2024 default:
2025 gcc_unreachable ();
2028 osi->depths[varno] = 0;
2029 osi->tos--;
2033 /* Check if some pointer we are computing object size of is being increased
2034 within a loop. If yes, assume all the SSA variables participating in
2035 that loop have minimum object sizes 0. */
2037 static void
2038 check_for_plus_in_loops (struct object_size_info *osi, tree var)
2040 gimple *stmt = SSA_NAME_DEF_STMT (var);
2042 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
2043 and looked for a POINTER_PLUS_EXPR in the pass-through
2044 argument, if any. In GIMPLE, however, such an expression
2045 is not a valid call operand. */
2047 if (is_gimple_assign (stmt)
2048 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2050 tree basevar = gimple_assign_rhs1 (stmt);
2051 tree cst = gimple_assign_rhs2 (stmt);
2053 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2055 /* Skip non-positive offsets. */
2056 if (integer_zerop (cst) || compare_tree_int (cst, offset_limit) > 0)
2057 return;
2059 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
2060 *osi->tos++ = SSA_NAME_VERSION (basevar);
2061 check_for_plus_in_loops_1 (osi, var, 2);
2062 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
2063 osi->tos--;
2068 /* Initialize data structures for the object size computation. */
2070 void
2071 init_object_sizes (void)
2073 int object_size_type;
2075 if (computed[0])
2076 return;
2078 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
2080 object_sizes_grow (object_size_type);
2081 computed[object_size_type] = BITMAP_ALLOC (NULL);
2084 init_offset_limit ();
2088 /* Destroy data structures after the object size computation. */
2090 void
2091 fini_object_sizes (void)
2093 int object_size_type;
2095 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
2097 object_sizes_release (object_size_type);
2098 BITMAP_FREE (computed[object_size_type]);
2102 /* Dummy valueize function. */
2104 static tree
2105 do_valueize (tree t)
2107 return t;
2110 /* Process a __builtin_object_size or __builtin_dynamic_object_size call in
2111 CALL early for subobjects before any object information is lost due to
2112 optimization. Insert a MIN or MAX expression of the result and
2113 __builtin_object_size at I so that it may be processed in the second pass.
2114 __builtin_dynamic_object_size is treated like __builtin_object_size here
2115 since we're only looking for constant bounds. */
2117 static void
2118 early_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
2120 tree ost = gimple_call_arg (call, 1);
2121 tree lhs = gimple_call_lhs (call);
2122 gcc_assert (lhs != NULL_TREE);
2124 if (!tree_fits_uhwi_p (ost))
2125 return;
2127 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2128 tree ptr = gimple_call_arg (call, 0);
2130 if (object_size_type != 1 && object_size_type != 3)
2131 return;
2133 if (TREE_CODE (ptr) != ADDR_EXPR && TREE_CODE (ptr) != SSA_NAME)
2134 return;
2136 tree type = TREE_TYPE (lhs);
2137 tree bytes;
2138 if (!compute_builtin_object_size (ptr, object_size_type, &bytes)
2139 || !int_fits_type_p (bytes, type))
2140 return;
2142 tree tem = make_ssa_name (type);
2143 gimple_call_set_lhs (call, tem);
2144 enum tree_code code = object_size_type & OST_MINIMUM ? MAX_EXPR : MIN_EXPR;
2145 tree cst = fold_convert (type, bytes);
2146 gimple *g = gimple_build_assign (lhs, code, tem, cst);
2147 gsi_insert_after (i, g, GSI_NEW_STMT);
2148 update_stmt (call);
2151 /* Attempt to fold one __builtin_dynamic_object_size call in CALL into an
2152 expression and insert it at I. Return true if it succeeds. */
2154 static bool
2155 dynamic_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
2157 gcc_assert (gimple_call_num_args (call) == 2);
2159 tree args[2];
2160 args[0] = gimple_call_arg (call, 0);
2161 args[1] = gimple_call_arg (call, 1);
2163 location_t loc = EXPR_LOC_OR_LOC (args[0], input_location);
2164 tree result_type = gimple_call_return_type (as_a <gcall *> (call));
2165 tree result = fold_builtin_call_array (loc, result_type,
2166 gimple_call_fn (call), 2, args);
2168 if (!result)
2169 return false;
2171 /* fold_builtin_call_array may wrap the result inside a
2172 NOP_EXPR. */
2173 STRIP_NOPS (result);
2174 gimplify_and_update_call_from_tree (i, result);
2176 if (dump_file && (dump_flags & TDF_DETAILS))
2178 fprintf (dump_file, "Simplified (dynamic)\n ");
2179 print_gimple_stmt (dump_file, call, 0, dump_flags);
2180 fprintf (dump_file, " to ");
2181 print_generic_expr (dump_file, result);
2182 fprintf (dump_file, "\n");
2184 return true;
2187 static unsigned int
2188 object_sizes_execute (function *fun, bool early)
2190 todo = 0;
2191 auto_bitmap sdce_worklist;
2193 basic_block bb;
2194 FOR_EACH_BB_FN (bb, fun)
2196 gimple_stmt_iterator i;
2197 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
2199 tree result;
2200 bool dynamic = false;
2202 gimple *call = gsi_stmt (i);
2203 if (gimple_call_builtin_p (call, BUILT_IN_DYNAMIC_OBJECT_SIZE))
2204 dynamic = true;
2205 else if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
2206 continue;
2208 tree lhs = gimple_call_lhs (call);
2209 if (!lhs)
2210 continue;
2212 init_object_sizes ();
2214 /* If early, only attempt to fold
2215 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
2216 and rather than folding the builtin to the constant if any,
2217 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
2218 call result and the computed constant. Do the same for
2219 __builtin_dynamic_object_size too. */
2220 if (early)
2222 early_object_sizes_execute_one (&i, call);
2223 continue;
2226 if (dynamic)
2228 if (dynamic_object_sizes_execute_one (&i, call))
2229 continue;
2230 else
2232 /* If we could not find a suitable size expression, lower to
2233 __builtin_object_size so that we may at least get a
2234 constant lower or higher estimate. */
2235 tree bosfn = builtin_decl_implicit (BUILT_IN_OBJECT_SIZE);
2236 gimple_call_set_fndecl (call, bosfn);
2237 update_stmt (call);
2239 if (dump_file && (dump_flags & TDF_DETAILS))
2241 print_generic_expr (dump_file, gimple_call_arg (call, 0),
2242 dump_flags);
2243 fprintf (dump_file,
2244 ": Retrying as __builtin_object_size\n");
2249 result = gimple_fold_stmt_to_constant (call, do_valueize);
2250 if (!result)
2252 tree ost = gimple_call_arg (call, 1);
2254 if (tree_fits_uhwi_p (ost))
2256 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2258 if (object_size_type & OST_MINIMUM)
2259 result = build_zero_cst (size_type_node);
2260 else if (object_size_type < OST_END)
2261 result = fold_convert (size_type_node,
2262 integer_minus_one_node);
2265 if (!result)
2266 continue;
2269 gcc_assert (TREE_CODE (result) == INTEGER_CST);
2271 if (dump_file && (dump_flags & TDF_DETAILS))
2273 fprintf (dump_file, "Simplified\n ");
2274 print_gimple_stmt (dump_file, call, 0, dump_flags);
2275 fprintf (dump_file, " to ");
2276 print_generic_expr (dump_file, result);
2277 fprintf (dump_file, "\n");
2280 /* Propagate into all uses and fold those stmts. */
2281 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2283 replace_uses_by (lhs, result);
2284 /* Mark lhs as being possiblely DCEd. */
2285 bitmap_set_bit (sdce_worklist, SSA_NAME_VERSION (lhs));
2287 else
2288 replace_call_with_value (&i, result);
2292 fini_object_sizes ();
2293 simple_dce_from_worklist (sdce_worklist);
2294 return todo;
2297 /* Simple pass to optimize all __builtin_object_size () builtins. */
2299 namespace {
2301 const pass_data pass_data_object_sizes =
2303 GIMPLE_PASS, /* type */
2304 "objsz", /* name */
2305 OPTGROUP_NONE, /* optinfo_flags */
2306 TV_NONE, /* tv_id */
2307 ( PROP_cfg | PROP_ssa ), /* properties_required */
2308 PROP_objsz, /* properties_provided */
2309 0, /* properties_destroyed */
2310 0, /* todo_flags_start */
2311 0, /* todo_flags_finish */
2314 class pass_object_sizes : public gimple_opt_pass
2316 public:
2317 pass_object_sizes (gcc::context *ctxt)
2318 : gimple_opt_pass (pass_data_object_sizes, ctxt)
2321 /* opt_pass methods: */
2322 opt_pass * clone () final override { return new pass_object_sizes (m_ctxt); }
2323 unsigned int execute (function *fun) final override
2325 return object_sizes_execute (fun, false);
2327 }; // class pass_object_sizes
2329 } // anon namespace
2331 gimple_opt_pass *
2332 make_pass_object_sizes (gcc::context *ctxt)
2334 return new pass_object_sizes (ctxt);
2337 /* Early version of pass to optimize all __builtin_object_size () builtins. */
2339 namespace {
2341 const pass_data pass_data_early_object_sizes =
2343 GIMPLE_PASS, /* type */
2344 "early_objsz", /* name */
2345 OPTGROUP_NONE, /* optinfo_flags */
2346 TV_NONE, /* tv_id */
2347 ( PROP_cfg | PROP_ssa ), /* properties_required */
2348 0, /* properties_provided */
2349 0, /* properties_destroyed */
2350 0, /* todo_flags_start */
2351 0, /* todo_flags_finish */
2354 class pass_early_object_sizes : public gimple_opt_pass
2356 public:
2357 pass_early_object_sizes (gcc::context *ctxt)
2358 : gimple_opt_pass (pass_data_early_object_sizes, ctxt)
2361 /* opt_pass methods: */
2362 unsigned int execute (function *fun) final override
2364 return object_sizes_execute (fun, true);
2366 }; // class pass_object_sizes
2368 } // anon namespace
2370 gimple_opt_pass *
2371 make_pass_early_object_sizes (gcc::context *ctxt)
2373 return new pass_early_object_sizes (ctxt);