testsuite: Revert to the original version of pr100056.c
[official-gcc.git] / gcc / analyzer / region-model-manager.cc
blob1fcf46aa619e595abd2ce34088acfc2a9d050bc5
1 /* Consolidation of svalues and regions.
2 Copyright (C) 2020-2025 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #define INCLUDE_VECTOR
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "diagnostic-core.h"
27 #include "gimple-pretty-print.h"
28 #include "function.h"
29 #include "basic-block.h"
30 #include "gimple.h"
31 #include "gimple-iterator.h"
32 #include "diagnostic-core.h"
33 #include "graphviz.h"
34 #include "options.h"
35 #include "cgraph.h"
36 #include "tree-dfa.h"
37 #include "stringpool.h"
38 #include "convert.h"
39 #include "target.h"
40 #include "fold-const.h"
41 #include "tree-pretty-print.h"
42 #include "bitmap.h"
43 #include "analyzer/analyzer.h"
44 #include "analyzer/analyzer-logging.h"
45 #include "ordered-hash-map.h"
46 #include "options.h"
47 #include "analyzer/supergraph.h"
48 #include "sbitmap.h"
49 #include "analyzer/call-string.h"
50 #include "analyzer/program-point.h"
51 #include "analyzer/store.h"
52 #include "analyzer/region-model.h"
53 #include "analyzer/constraint-manager.h"
55 #if ENABLE_ANALYZER
57 namespace ana {
59 /* class region_model_manager. */
61 /* region_model_manager's ctor. */
63 region_model_manager::region_model_manager (logger *logger)
64 : m_logger (logger),
65 m_next_symbol_id (0),
66 m_empty_call_string (),
67 m_root_region (alloc_symbol_id ()),
68 m_stack_region (alloc_symbol_id (), &m_root_region),
69 m_heap_region (alloc_symbol_id (), &m_root_region),
70 m_unknown_NULL (NULL),
71 m_checking_feasibility (false),
72 m_max_complexity (0, 0),
73 m_code_region (alloc_symbol_id (), &m_root_region),
74 m_fndecls_map (), m_labels_map (),
75 m_globals_region (alloc_symbol_id (), &m_root_region),
76 m_globals_map (),
77 m_thread_local_region (alloc_symbol_id (), &m_root_region),
78 m_errno_region (alloc_symbol_id (), &m_thread_local_region),
79 m_store_mgr (this),
80 m_range_mgr (new bounded_ranges_manager ()),
81 m_known_fn_mgr (logger)
85 /* region_model_manager's dtor. Delete all of the managed svalues
86 and regions. */
88 region_model_manager::~region_model_manager ()
90 /* Delete consolidated svalues. */
91 for (constants_map_t::iterator iter = m_constants_map.begin ();
92 iter != m_constants_map.end (); ++iter)
93 delete (*iter).second;
94 for (unknowns_map_t::iterator iter = m_unknowns_map.begin ();
95 iter != m_unknowns_map.end (); ++iter)
96 delete (*iter).second;
97 delete m_unknown_NULL;
98 for (poisoned_values_map_t::iterator iter = m_poisoned_values_map.begin ();
99 iter != m_poisoned_values_map.end (); ++iter)
100 delete (*iter).second;
101 for (setjmp_values_map_t::iterator iter = m_setjmp_values_map.begin ();
102 iter != m_setjmp_values_map.end (); ++iter)
103 delete (*iter).second;
104 for (initial_values_map_t::iterator iter = m_initial_values_map.begin ();
105 iter != m_initial_values_map.end (); ++iter)
106 delete (*iter).second;
107 for (pointer_values_map_t::iterator iter = m_pointer_values_map.begin ();
108 iter != m_pointer_values_map.end (); ++iter)
109 delete (*iter).second;
110 for (unaryop_values_map_t::iterator iter = m_unaryop_values_map.begin ();
111 iter != m_unaryop_values_map.end (); ++iter)
112 delete (*iter).second;
113 for (binop_values_map_t::iterator iter = m_binop_values_map.begin ();
114 iter != m_binop_values_map.end (); ++iter)
115 delete (*iter).second;
116 for (sub_values_map_t::iterator iter = m_sub_values_map.begin ();
117 iter != m_sub_values_map.end (); ++iter)
118 delete (*iter).second;
119 for (auto iter : m_repeated_values_map)
120 delete iter.second;
121 for (auto iter : m_bits_within_values_map)
122 delete iter.second;
123 for (unmergeable_values_map_t::iterator iter
124 = m_unmergeable_values_map.begin ();
125 iter != m_unmergeable_values_map.end (); ++iter)
126 delete (*iter).second;
127 for (widening_values_map_t::iterator iter = m_widening_values_map.begin ();
128 iter != m_widening_values_map.end (); ++iter)
129 delete (*iter).second;
130 for (compound_values_map_t::iterator iter = m_compound_values_map.begin ();
131 iter != m_compound_values_map.end (); ++iter)
132 delete (*iter).second;
133 for (conjured_values_map_t::iterator iter = m_conjured_values_map.begin ();
134 iter != m_conjured_values_map.end (); ++iter)
135 delete (*iter).second;
136 for (auto iter : m_asm_output_values_map)
137 delete iter.second;
138 for (auto iter : m_const_fn_result_values_map)
139 delete iter.second;
141 /* Delete consolidated regions. */
142 for (fndecls_map_t::iterator iter = m_fndecls_map.begin ();
143 iter != m_fndecls_map.end (); ++iter)
144 delete (*iter).second;
145 for (labels_map_t::iterator iter = m_labels_map.begin ();
146 iter != m_labels_map.end (); ++iter)
147 delete (*iter).second;
148 for (globals_map_t::iterator iter = m_globals_map.begin ();
149 iter != m_globals_map.end (); ++iter)
150 delete (*iter).second;
151 for (string_map_t::iterator iter = m_string_map.begin ();
152 iter != m_string_map.end (); ++iter)
153 delete (*iter).second;
155 delete m_range_mgr;
158 /* Return true if C exceeds the complexity limit for svalues. */
160 bool
161 region_model_manager::too_complex_p (const complexity &c) const
163 if (c.m_max_depth > (unsigned)param_analyzer_max_svalue_depth)
164 return true;
165 return false;
168 /* If SVAL exceeds the complexity limit for svalues, delete it
169 and return true.
170 Otherwise update m_max_complexity and return false. */
172 bool
173 region_model_manager::reject_if_too_complex (svalue *sval)
175 if (m_checking_feasibility)
176 return false;
178 const complexity &c = sval->get_complexity ();
179 if (!too_complex_p (c))
181 if (m_max_complexity.m_num_nodes < c.m_num_nodes)
182 m_max_complexity.m_num_nodes = c.m_num_nodes;
183 if (m_max_complexity.m_max_depth < c.m_max_depth)
184 m_max_complexity.m_max_depth = c.m_max_depth;
185 return false;
188 pretty_printer pp;
189 pp_format_decoder (&pp) = default_tree_printer;
190 sval->dump_to_pp (&pp, true);
191 if (warning_at (input_location, OPT_Wanalyzer_symbol_too_complex,
192 "symbol too complicated: %qs",
193 pp_formatted_text (&pp)))
194 inform (input_location,
195 "max_depth %i exceeds --param=analyzer-max-svalue-depth=%i",
196 c.m_max_depth, param_analyzer_max_svalue_depth);
198 delete sval;
199 return true;
202 /* Macro for imposing a complexity limit on svalues, for use within
203 region_model_manager member functions.
205 If SVAL exceeds the complexity limit, delete it and return an UNKNOWN
206 value of the same type.
207 Otherwise update m_max_complexity and carry on. */
209 #define RETURN_UNKNOWN_IF_TOO_COMPLEX(SVAL) \
210 do { \
211 svalue *sval_ = (SVAL); \
212 tree type_ = sval_->get_type (); \
213 if (reject_if_too_complex (sval_)) \
214 return get_or_create_unknown_svalue (type_); \
215 } while (0)
217 /* svalue consolidation. */
219 /* Return the svalue * for a constant_svalue for CST_EXPR,
220 creating it if necessary.
221 The constant_svalue instances are reused, based on pointer equality
222 of trees */
224 const svalue *
225 region_model_manager::get_or_create_constant_svalue (tree type, tree cst_expr)
227 gcc_assert (cst_expr);
228 gcc_assert (CONSTANT_CLASS_P (cst_expr));
229 gcc_assert (type == TREE_TYPE (cst_expr) || type == NULL_TREE);
231 constant_svalue::key_t key (type, cst_expr);
232 constant_svalue **slot = m_constants_map.get (key);
233 if (slot)
234 return *slot;
235 constant_svalue *cst_sval
236 = new constant_svalue (alloc_symbol_id (), type, cst_expr);
237 RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval);
238 m_constants_map.put (key, cst_sval);
239 return cst_sval;
242 const svalue *
243 region_model_manager::get_or_create_constant_svalue (tree cst_expr)
245 return get_or_create_constant_svalue (TREE_TYPE (cst_expr), cst_expr);
248 /* Return the svalue * for a constant_svalue for the INTEGER_CST
249 for VAL of type TYPE, creating it if necessary. */
251 const svalue *
252 region_model_manager::get_or_create_int_cst (tree type,
253 const poly_wide_int_ref &cst)
255 tree effective_type = type;
256 if (!type)
257 effective_type = ptrdiff_type_node;
258 gcc_assert (INTEGRAL_TYPE_P (effective_type)
259 || POINTER_TYPE_P (effective_type));
260 tree tree_cst = wide_int_to_tree (effective_type, cst);
261 return get_or_create_constant_svalue (type, tree_cst);
264 /* Return the svalue * for the constant_svalue for the NULL pointer
265 of POINTER_TYPE, creating it if necessary. */
267 const svalue *
268 region_model_manager::get_or_create_null_ptr (tree pointer_type)
270 gcc_assert (pointer_type);
271 gcc_assert (POINTER_TYPE_P (pointer_type));
272 return get_or_create_int_cst (pointer_type, 0);
275 /* Return the svalue * for a unknown_svalue for TYPE (which can be NULL),
276 creating it if necessary.
277 The unknown_svalue instances are reused, based on pointer equality
278 of the types */
280 const svalue *
281 region_model_manager::get_or_create_unknown_svalue (tree type)
283 /* Don't create unknown values when doing feasibility testing;
284 instead, create a unique svalue. */
285 if (m_checking_feasibility)
286 return create_unique_svalue (type);
288 /* Special-case NULL, so that the hash_map can use NULL as the
289 "empty" value. */
290 if (type == NULL_TREE)
292 if (!m_unknown_NULL)
293 m_unknown_NULL = new unknown_svalue (alloc_symbol_id (), type);
294 return m_unknown_NULL;
297 unknown_svalue **slot = m_unknowns_map.get (type);
298 if (slot)
299 return *slot;
300 unknown_svalue *sval = new unknown_svalue (alloc_symbol_id (), type);
301 m_unknowns_map.put (type, sval);
302 return sval;
305 /* Return a freshly-allocated svalue of TYPE, owned by this manager. */
307 const svalue *
308 region_model_manager::create_unique_svalue (tree type)
310 svalue *sval = new placeholder_svalue (alloc_symbol_id (), type, "unique");
311 m_managed_dynamic_svalues.safe_push (sval);
312 return sval;
315 /* Return the svalue * for the initial value of REG, creating it if
316 necessary. */
318 const svalue *
319 region_model_manager::get_or_create_initial_value (const region *reg,
320 bool check_poisoned)
322 if (!reg->can_have_initial_svalue_p () && check_poisoned)
323 return get_or_create_poisoned_svalue (POISON_KIND_UNINIT,
324 reg->get_type ());
326 /* The initial value of a cast is a cast of the initial value. */
327 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
329 const region *original_reg = cast_reg->get_parent_region ();
330 return get_or_create_cast (cast_reg->get_type (),
331 get_or_create_initial_value (original_reg));
334 /* Simplify:
335 INIT_VAL(ELEMENT_REG(STRING_REG), CONSTANT_SVAL)
337 CONSTANT_SVAL(STRING[N]). */
338 if (const element_region *element_reg = reg->dyn_cast_element_region ())
339 if (tree cst_idx = element_reg->get_index ()->maybe_get_constant ())
340 if (const string_region *string_reg
341 = element_reg->get_parent_region ()->dyn_cast_string_region ())
342 if (tree_fits_shwi_p (cst_idx))
344 HOST_WIDE_INT idx = tree_to_shwi (cst_idx);
345 tree string_cst = string_reg->get_string_cst ();
346 if (idx >= 0 && idx <= TREE_STRING_LENGTH (string_cst))
348 int ch = TREE_STRING_POINTER (string_cst)[idx];
349 return get_or_create_int_cst (reg->get_type (), ch);
353 /* INIT_VAL (*UNKNOWN_PTR) -> UNKNOWN_VAL. */
354 if (reg->symbolic_for_unknown_ptr_p ())
355 return get_or_create_unknown_svalue (reg->get_type ());
357 if (initial_svalue **slot = m_initial_values_map.get (reg))
358 return *slot;
359 initial_svalue *initial_sval
360 = new initial_svalue (alloc_symbol_id (), reg->get_type (), reg);
361 RETURN_UNKNOWN_IF_TOO_COMPLEX (initial_sval);
362 m_initial_values_map.put (reg, initial_sval);
363 return initial_sval;
366 /* Return the svalue * for R using type TYPE, creating it if
367 necessary. */
369 const svalue *
370 region_model_manager::get_or_create_setjmp_svalue (const setjmp_record &r,
371 tree type)
373 setjmp_svalue::key_t key (r, type);
374 if (setjmp_svalue **slot = m_setjmp_values_map.get (key))
375 return *slot;
376 setjmp_svalue *setjmp_sval = new setjmp_svalue (r, alloc_symbol_id (), type);
377 RETURN_UNKNOWN_IF_TOO_COMPLEX (setjmp_sval);
378 m_setjmp_values_map.put (key, setjmp_sval);
379 return setjmp_sval;
382 /* Return the svalue * for a poisoned value of KIND and TYPE, creating it if
383 necessary. */
385 const svalue *
386 region_model_manager::get_or_create_poisoned_svalue (enum poison_kind kind,
387 tree type)
389 poisoned_svalue::key_t key (kind, type);
390 if (poisoned_svalue **slot = m_poisoned_values_map.get (key))
391 return *slot;
392 poisoned_svalue *poisoned_sval
393 = new poisoned_svalue (kind, alloc_symbol_id (), type);
394 RETURN_UNKNOWN_IF_TOO_COMPLEX (poisoned_sval);
395 m_poisoned_values_map.put (key, poisoned_sval);
396 return poisoned_sval;
399 /* Return the svalue * for a pointer to POINTEE of type PTR_TYPE,
400 creating it if necessary. */
402 const svalue *
403 region_model_manager::get_ptr_svalue (tree ptr_type, const region *pointee)
405 /* If this is a symbolic region from dereferencing a pointer, and the types
406 match, then return the original pointer. */
407 if (const symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region ())
408 if (ptr_type == sym_reg->get_pointer ()->get_type ())
409 return sym_reg->get_pointer ();
411 region_svalue::key_t key (ptr_type, pointee);
412 if (region_svalue **slot = m_pointer_values_map.get (key))
413 return *slot;
414 region_svalue *sval
415 = new region_svalue (alloc_symbol_id (), ptr_type, pointee);
416 RETURN_UNKNOWN_IF_TOO_COMPLEX (sval);
417 m_pointer_values_map.put (key, sval);
418 return sval;
421 /* Subroutine of region_model_manager::get_or_create_unaryop.
422 Attempt to fold the inputs and return a simpler svalue *.
423 Otherwise, return NULL. */
425 const svalue *
426 region_model_manager::maybe_fold_unaryop (tree type, enum tree_code op,
427 const svalue *arg)
429 /* Ops on "unknown" are also unknown. */
430 if (arg->get_kind () == SK_UNKNOWN)
431 return get_or_create_unknown_svalue (type);
432 /* Likewise for "poisoned". */
433 else if (const poisoned_svalue *poisoned_sval
434 = arg->dyn_cast_poisoned_svalue ())
435 return get_or_create_poisoned_svalue (poisoned_sval->get_poison_kind (),
436 type);
438 gcc_assert (arg->can_have_associated_state_p ());
440 switch (op)
442 default: break;
443 case VIEW_CONVERT_EXPR:
444 case NOP_EXPR:
446 if (!type)
447 return nullptr;
449 /* Handle redundant casts. */
450 if (arg->get_type ()
451 && useless_type_conversion_p (arg->get_type (), type))
452 return arg;
454 /* Fold "cast<TYPE> (cast <INNER_TYPE> (innermost_arg))
455 => "cast<TYPE> (innermost_arg)",
456 unless INNER_TYPE is narrower than TYPE. */
457 if (const svalue *innermost_arg = arg->maybe_undo_cast ())
459 if (tree inner_type = arg->get_type ())
460 if (TYPE_SIZE (type)
461 && TYPE_SIZE (inner_type)
462 && (fold_binary (LE_EXPR, boolean_type_node,
463 TYPE_SIZE (type), TYPE_SIZE (inner_type))
464 == boolean_true_node))
465 return maybe_fold_unaryop (type, op, innermost_arg);
467 /* Avoid creating symbolic regions for pointer casts by
468 simplifying (T*)(&REGION) to ((T*)&REGION). */
469 if (const region_svalue *region_sval = arg->dyn_cast_region_svalue ())
470 if (POINTER_TYPE_P (type)
471 && region_sval->get_type ()
472 && POINTER_TYPE_P (region_sval->get_type ()))
473 return get_ptr_svalue (type, region_sval->get_pointee ());
475 /* Casting all zeroes should give all zeroes. */
476 if (type
477 && arg->all_zeroes_p ()
478 && (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)))
479 return get_or_create_int_cst (type, 0);
481 break;
482 case TRUTH_NOT_EXPR:
484 /* Invert comparisons e.g. "!(x == y)" => "x != y". */
485 if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ())
486 if (TREE_CODE_CLASS (binop->get_op ()) == tcc_comparison)
488 enum tree_code inv_op
489 = invert_tree_comparison (binop->get_op (),
490 HONOR_NANS (binop->get_type ()));
491 if (inv_op != ERROR_MARK)
492 return get_or_create_binop (binop->get_type (), inv_op,
493 binop->get_arg0 (),
494 binop->get_arg1 ());
497 break;
498 case NEGATE_EXPR:
500 /* -(-(VAL)) is VAL, for integer types. */
501 if (const unaryop_svalue *unaryop = arg->dyn_cast_unaryop_svalue ())
502 if (unaryop->get_op () == NEGATE_EXPR
503 && type == unaryop->get_type ()
504 && type
505 && INTEGRAL_TYPE_P (type))
506 return unaryop->get_arg ();
508 break;
511 /* Constants. */
512 if (tree cst = arg->maybe_get_constant ())
513 if (tree result = fold_unary (op, type, cst))
515 if (CONSTANT_CLASS_P (result))
516 return get_or_create_constant_svalue (result);
518 /* fold_unary can return casts of constants; try to handle them. */
519 if (op != NOP_EXPR
520 && type
521 && TREE_CODE (result) == NOP_EXPR
522 && CONSTANT_CLASS_P (TREE_OPERAND (result, 0)))
524 const svalue *inner_cst
525 = get_or_create_constant_svalue (TREE_OPERAND (result, 0));
526 return get_or_create_cast (type,
527 get_or_create_cast (TREE_TYPE (result),
528 inner_cst));
532 return NULL;
535 /* Return the svalue * for an unary operation OP on ARG with a result of
536 type TYPE, creating it if necessary. */
538 const svalue *
539 region_model_manager::get_or_create_unaryop (tree type, enum tree_code op,
540 const svalue *arg)
542 if (const svalue *folded = maybe_fold_unaryop (type, op, arg))
543 return folded;
544 unaryop_svalue::key_t key (type, op, arg);
545 if (unaryop_svalue **slot = m_unaryop_values_map.get (key))
546 return *slot;
547 unaryop_svalue *unaryop_sval
548 = new unaryop_svalue (alloc_symbol_id (), type, op, arg);
549 RETURN_UNKNOWN_IF_TOO_COMPLEX (unaryop_sval);
550 m_unaryop_values_map.put (key, unaryop_sval);
551 return unaryop_sval;
554 /* Get a tree code for a cast to DST_TYPE from SRC_TYPE.
555 Use NOP_EXPR if possible (e.g. to help fold_unary convert casts
556 of 0 to (T*) to simple pointer constants), but use FIX_TRUNC_EXPR
557 and VIEW_CONVERT_EXPR for cases that fold_unary would otherwise crash
558 on. */
560 static enum tree_code
561 get_code_for_cast (tree dst_type, tree src_type)
563 if (!dst_type)
564 return NOP_EXPR;
565 if (!src_type)
566 return NOP_EXPR;
568 if (SCALAR_FLOAT_TYPE_P (src_type))
570 if (TREE_CODE (dst_type) == INTEGER_TYPE)
571 return FIX_TRUNC_EXPR;
572 else
573 return VIEW_CONVERT_EXPR;
576 return NOP_EXPR;
579 /* Return the svalue * for a cast of ARG to type TYPE, creating it
580 if necessary. */
582 const svalue *
583 region_model_manager::get_or_create_cast (tree type, const svalue *arg)
585 /* No-op if the types are the same. */
586 if (type == arg->get_type ())
587 return arg;
589 /* Don't attempt to handle casts involving vector types for now. */
590 if (type)
591 if (VECTOR_TYPE_P (type)
592 || (arg->get_type ()
593 && VECTOR_TYPE_P (arg->get_type ())))
594 return get_or_create_unknown_svalue (type);
596 enum tree_code op = get_code_for_cast (type, arg->get_type ());
597 return get_or_create_unaryop (type, op, arg);
600 /* Subroutine of region_model_manager::maybe_fold_binop for handling
601 (TYPE)(COMPOUND_SVAL BIT_AND_EXPR CST) that may have been generated by
602 optimize_bit_field_compare, where CST is from ARG1.
604 Support masking out bits from a compound_svalue for comparing a bitfield
605 against a value, as generated by optimize_bit_field_compare for
606 BITFIELD == VALUE.
608 If COMPOUND_SVAL has a value for the appropriate bits, return it,
609 shifted accordingly.
610 Otherwise return NULL. */
612 const svalue *
613 region_model_manager::
614 maybe_undo_optimize_bit_field_compare (tree type,
615 const compound_svalue *compound_sval,
616 tree cst,
617 const svalue *arg1)
619 if (!type)
620 return nullptr;
621 if (!INTEGRAL_TYPE_P (type))
622 return NULL;
624 const binding_map &map = compound_sval->get_map ();
625 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst);
626 /* If "mask" is a contiguous range of set bits, see if the
627 compound_sval has a value for those bits. */
628 bit_range bits (0, 0);
629 if (!bit_range::from_mask (mask, &bits))
630 return NULL;
632 bit_range bound_bits (bits);
633 if (BYTES_BIG_ENDIAN)
634 bound_bits = bit_range (BITS_PER_UNIT - bits.get_next_bit_offset (),
635 bits.m_size_in_bits);
636 const concrete_binding *conc
637 = get_store_manager ()->get_concrete_binding (bound_bits);
638 const svalue *sval = map.get (conc);
639 if (!sval)
640 return NULL;
642 /* We have a value;
643 shift it by the correct number of bits. */
644 const svalue *lhs = get_or_create_cast (type, sval);
645 HOST_WIDE_INT bit_offset = bits.get_start_bit_offset ().to_shwi ();
646 const svalue *shift_sval = get_or_create_int_cst (type, bit_offset);
647 const svalue *shifted_sval = get_or_create_binop (type, LSHIFT_EXPR,
648 lhs, shift_sval);
649 /* Reapply the mask (needed for negative
650 signed bitfields). */
651 return get_or_create_binop (type, BIT_AND_EXPR,
652 shifted_sval, arg1);
655 /* Subroutine of region_model_manager::get_or_create_binop.
656 Attempt to fold the inputs and return a simpler svalue *.
657 Otherwise, return NULL. */
659 const svalue *
660 region_model_manager::maybe_fold_binop (tree type, enum tree_code op,
661 const svalue *arg0,
662 const svalue *arg1)
664 tree cst0 = arg0->maybe_get_constant ();
665 tree cst1 = arg1->maybe_get_constant ();
666 /* (CST OP CST). */
667 if (cst0 && cst1)
669 if (type)
671 if (tree result = fold_binary (op, type, cst0, cst1))
672 if (CONSTANT_CLASS_P (result))
673 return get_or_create_constant_svalue (result);
675 else
677 if (tree result = int_const_binop (op, cst0, cst1, -1))
678 return get_or_create_constant_svalue (NULL_TREE, result);
682 if ((type && FLOAT_TYPE_P (type))
683 || (arg0->get_type () && FLOAT_TYPE_P (arg0->get_type ()))
684 || (arg1->get_type () && FLOAT_TYPE_P (arg1->get_type ())))
685 return NULL;
687 switch (op)
689 default:
690 break;
691 case POINTER_PLUS_EXPR:
692 case PLUS_EXPR:
693 /* (VAL + 0) -> VAL. */
694 if (cst1 && zerop (cst1))
695 return get_or_create_cast (type, arg0);
696 break;
697 case MINUS_EXPR:
698 /* (VAL - 0) -> VAL. */
699 if (cst1 && zerop (cst1))
700 return get_or_create_cast (type, arg0);
701 /* (0 - VAL) -> -VAL. */
702 if (cst0 && zerop (cst0))
703 return get_or_create_unaryop (type, NEGATE_EXPR, arg1);
704 /* (X + Y) - X -> Y. */
705 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
706 if (binop->get_op () == PLUS_EXPR)
707 if (binop->get_arg0 () == arg1)
708 return get_or_create_cast (type, binop->get_arg1 ());
709 break;
710 case MULT_EXPR:
711 /* (VAL * 0). */
712 if (cst1
713 && zerop (cst1)
714 && (type == NULL_TREE || INTEGRAL_TYPE_P (type)))
715 return get_or_create_int_cst (type, 0);
716 /* (VAL * 1) -> VAL. */
717 if (cst1 && integer_onep (cst1))
718 return get_or_create_cast (type, arg0);
719 break;
720 case BIT_AND_EXPR:
721 if (cst1)
723 if (zerop (cst1)
724 && (type == NULL_TREE || INTEGRAL_TYPE_P (type)))
725 /* "(ARG0 & 0)" -> "0". */
726 return get_or_create_int_cst (type, 0);
728 if (const compound_svalue *compound_sval
729 = arg0->dyn_cast_compound_svalue ())
730 if (const svalue *sval
731 = maybe_undo_optimize_bit_field_compare (type,
732 compound_sval,
733 cst1, arg1))
734 return sval;
736 if (arg0->get_type () == boolean_type_node
737 && arg1->get_type () == boolean_type_node)
739 /* If the LHS are both _Bool, then... */
740 /* ..."(1 & x) -> x". */
741 if (cst0 && !zerop (cst0))
742 return get_or_create_cast (type, arg1);
743 /* ..."(x & 1) -> x". */
744 if (cst1 && !zerop (cst1))
745 return get_or_create_cast (type, arg0);
746 /* ..."(0 & x) -> 0". */
747 if (cst0 && zerop (cst0))
748 return get_or_create_int_cst (type, 0);
749 /* ..."(x & 0) -> 0". */
750 if (cst1 && zerop (cst1))
751 return get_or_create_int_cst (type, 0);
753 break;
754 case BIT_IOR_EXPR:
755 if (arg0->get_type () == boolean_type_node
756 && arg1->get_type () == boolean_type_node)
758 /* If the LHS are both _Bool, then... */
759 /* ..."(1 | x) -> 1". */
760 if (cst0 && !zerop (cst0))
761 return get_or_create_int_cst (type, 1);
762 /* ..."(x | 1) -> 1". */
763 if (cst1 && !zerop (cst1))
764 return get_or_create_int_cst (type, 1);
765 /* ..."(0 | x) -> x". */
766 if (cst0 && zerop (cst0))
767 return get_or_create_cast (type, arg1);
768 /* ..."(x | 0) -> x". */
769 if (cst1 && zerop (cst1))
770 return get_or_create_cast (type, arg0);
772 break;
773 case TRUTH_ANDIF_EXPR:
774 case TRUTH_AND_EXPR:
775 if (cst1)
777 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
778 /* "(ARG0 && 0)" -> "0". */
779 return get_or_create_constant_svalue (build_int_cst (type, 0));
780 else
781 /* "(ARG0 && nonzero-cst)" -> "ARG0". */
782 return get_or_create_cast (type, arg0);
784 break;
785 case TRUTH_ORIF_EXPR:
786 case TRUTH_OR_EXPR:
787 if (cst1)
789 if (zerop (cst1))
790 /* "(ARG0 || 0)" -> "ARG0". */
791 return get_or_create_cast (type, arg0);
792 else
793 /* "(ARG0 && nonzero-cst)" -> "nonzero-cst". */
794 return get_or_create_cast (type, arg1);
796 break;
799 /* For associative ops, fold "(X op CST_A) op CST_B)" to
800 "X op (CST_A op CST_B)". */
801 if (cst1 && associative_tree_code (op))
802 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
803 if (binop->get_op () == op
804 && binop->get_arg1 ()->maybe_get_constant ())
805 return get_or_create_binop
806 (type, op, binop->get_arg0 (),
807 get_or_create_binop (type, op,
808 binop->get_arg1 (), arg1));
810 /* associative_tree_code is false for POINTER_PLUS_EXPR, but we
811 can fold:
812 "(PTR ptr+ CST_A) ptr+ CST_B)" to "PTR ptr+ (CST_A ptr+ CST_B)"
813 e.g. in data-model-1.c: test_4c. */
814 if (cst1 && op == POINTER_PLUS_EXPR)
815 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
816 if (binop->get_op () == POINTER_PLUS_EXPR)
817 if (binop->get_arg1 ()->maybe_get_constant ())
818 return get_or_create_binop
819 (type, op, binop->get_arg0 (),
820 get_or_create_binop (size_type_node, op,
821 binop->get_arg1 (), arg1));
823 /* Distribute multiplication by a constant through addition/subtraction:
824 (X + Y) * CST => (X * CST) + (Y * CST). */
825 if (cst1 && op == MULT_EXPR)
826 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
827 if (binop->get_op () == PLUS_EXPR
828 || binop->get_op () == MINUS_EXPR)
830 return get_or_create_binop
831 (type, binop->get_op (),
832 get_or_create_binop (type, op,
833 binop->get_arg0 (), arg1),
834 get_or_create_binop (type, op,
835 binop->get_arg1 (), arg1));
839 /* Typeless operations, assumed to be effectively arbitrary sized
840 integers following normal arithmetic rules. */
841 if (!type)
842 switch (op)
844 default:
845 break;
846 case MINUS_EXPR:
848 /* (X - X) -> 0. */
849 if (arg0 == arg1)
850 return get_or_create_int_cst (type, 0);
852 /* (X + A) - (A + B) -> (A - B). */
853 if (const binop_svalue *binop0 = arg0->dyn_cast_binop_svalue ())
854 if (const binop_svalue *binop1 = arg1->dyn_cast_binop_svalue ())
855 if (binop0->get_op () == PLUS_EXPR
856 && binop1->get_op () == PLUS_EXPR
857 && binop0->get_arg0 () == binop1->get_arg0 ())
858 return get_or_create_binop (NULL_TREE, op,
859 binop0->get_arg1 (),
860 binop1->get_arg1 ());
862 break;
864 case EXACT_DIV_EXPR:
866 if (const unaryop_svalue *unaryop0 = arg0->dyn_cast_unaryop_svalue ())
868 if (unaryop0->get_op () == NOP_EXPR)
869 if (const svalue *sval = maybe_fold_binop (NULL_TREE, op,
870 unaryop0->get_arg (),
871 arg1))
872 return sval;
874 if (const binop_svalue *binop0 = arg0->dyn_cast_binop_svalue ())
876 switch (binop0->get_op ())
878 default:
879 break;
881 case PLUS_EXPR:
882 case MINUS_EXPR:
883 /* (A op B) / C -> (A / C) op (B / C). */
885 if (const svalue *op_on_a
886 = maybe_fold_binop (NULL_TREE, op,
887 binop0->get_arg0 (), arg1))
888 if (const svalue *op_on_b
889 = maybe_fold_binop (NULL_TREE, op,
890 binop0->get_arg1 (), arg1))
891 return get_or_create_binop (NULL_TREE,
892 binop0->get_op (),
893 op_on_a, op_on_b);
895 break;
897 case MULT_EXPR:
898 /* (A * B) / C -> A * (B / C) if C is a divisor of B.
899 In particular, this should also handle the case
900 (A * B) / B -> A. */
901 if (const svalue *b_div_c
902 = maybe_fold_binop (NULL_TREE, op,
903 binop0->get_arg1 (), arg1))
904 return get_or_create_binop (NULL_TREE, binop0->get_op (),
905 binop0->get_arg0 (), b_div_c);
909 break;
912 /* etc. */
914 return NULL;
917 /* Return the svalue * for an binary operation OP on ARG0 and ARG1
918 with a result of type TYPE, creating it if necessary. */
920 const svalue *
921 region_model_manager::get_or_create_binop (tree type, enum tree_code op,
922 const svalue *arg0,
923 const svalue *arg1)
925 /* For commutative ops, put any constant on the RHS. */
926 if (arg0->maybe_get_constant () && commutative_tree_code (op))
927 std::swap (arg0, arg1);
929 if (const svalue *folded = maybe_fold_binop (type, op, arg0, arg1))
930 return folded;
932 /* Ops on "unknown"/"poisoned" are unknown (unless we were able to fold
933 it via an identity in maybe_fold_binop). */
934 if (!arg0->can_have_associated_state_p ()
935 || !arg1->can_have_associated_state_p ())
936 return get_or_create_unknown_svalue (type);
938 binop_svalue::key_t key (type, op, arg0, arg1);
939 if (binop_svalue **slot = m_binop_values_map.get (key))
940 return *slot;
941 binop_svalue *binop_sval
942 = new binop_svalue (alloc_symbol_id (), type, op, arg0, arg1);
943 RETURN_UNKNOWN_IF_TOO_COMPLEX (binop_sval);
944 m_binop_values_map.put (key, binop_sval);
945 return binop_sval;
948 /* Subroutine of region_model_manager::get_or_create_sub_svalue.
949 Return a folded svalue, or NULL. */
951 const svalue *
952 region_model_manager::maybe_fold_sub_svalue (tree type,
953 const svalue *parent_svalue,
954 const region *subregion)
956 /* Subvalues of "unknown"/"poisoned" are unknown. */
957 if (!parent_svalue->can_have_associated_state_p ())
958 return get_or_create_unknown_svalue (type);
960 /* If we have a subregion of a zero-fill, it's zero. */
961 if (const unaryop_svalue *unary
962 = parent_svalue->dyn_cast_unaryop_svalue ())
964 if (unary->get_op () == NOP_EXPR
965 || unary->get_op () == VIEW_CONVERT_EXPR)
966 if (tree cst = unary->get_arg ()->maybe_get_constant ())
967 if (zerop (cst) && type)
969 const svalue *cst_sval
970 = get_or_create_constant_svalue (cst);
971 return get_or_create_cast (type, cst_sval);
975 /* Handle getting individual chars from a STRING_CST. */
976 if (tree cst = parent_svalue->maybe_get_constant ())
977 if (TREE_CODE (cst) == STRING_CST)
979 /* If we have a concrete 1-byte access within the parent region... */
980 byte_range subregion_bytes (0, 0);
981 if (subregion->get_relative_concrete_byte_range (&subregion_bytes)
982 && subregion_bytes.m_size_in_bytes == 1
983 && type)
985 /* ...then attempt to get that char from the STRING_CST. */
986 HOST_WIDE_INT hwi_start_byte
987 = subregion_bytes.m_start_byte_offset.to_shwi ();
988 tree cst_idx
989 = build_int_cst_type (size_type_node, hwi_start_byte);
990 if (const svalue *char_sval
991 = maybe_get_char_from_string_cst (cst, cst_idx))
992 return get_or_create_cast (type, char_sval);
996 if (const initial_svalue *init_sval
997 = parent_svalue->dyn_cast_initial_svalue ())
999 /* SUB(INIT(r)).FIELD -> INIT(r.FIELD)
1000 i.e.
1001 Subvalue(InitialValue(R1), FieldRegion(R2, F))
1002 -> InitialValue(FieldRegion(R1, F)). */
1003 if (const field_region *field_reg = subregion->dyn_cast_field_region ())
1005 const region *field_reg_new
1006 = get_field_region (init_sval->get_region (),
1007 field_reg->get_field ());
1008 return get_or_create_initial_value (field_reg_new);
1010 /* SUB(INIT(r)[ELEMENT] -> INIT(e[ELEMENT])
1011 i.e.
1012 Subvalue(InitialValue(R1), ElementRegion(R2, IDX))
1013 -> InitialValue(ElementRegion(R1, IDX)). */
1014 if (const element_region *element_reg = subregion->dyn_cast_element_region ())
1016 const region *element_reg_new
1017 = get_element_region (init_sval->get_region (),
1018 element_reg->get_type (),
1019 element_reg->get_index ());
1020 return get_or_create_initial_value (element_reg_new);
1024 if (const repeated_svalue *repeated_sval
1025 = parent_svalue->dyn_cast_repeated_svalue ())
1026 if (type)
1027 return get_or_create_cast (type, repeated_sval->get_inner_svalue ());
1029 return NULL;
1032 /* Return the svalue * for extracting a subvalue of type TYPE from
1033 PARENT_SVALUE based on SUBREGION, creating it if necessary. */
1035 const svalue *
1036 region_model_manager::get_or_create_sub_svalue (tree type,
1037 const svalue *parent_svalue,
1038 const region *subregion)
1040 if (const svalue *folded
1041 = maybe_fold_sub_svalue (type, parent_svalue, subregion))
1042 return folded;
1044 sub_svalue::key_t key (type, parent_svalue, subregion);
1045 if (sub_svalue **slot = m_sub_values_map.get (key))
1046 return *slot;
1047 sub_svalue *sub_sval
1048 = new sub_svalue (alloc_symbol_id (), type, parent_svalue, subregion);
1049 RETURN_UNKNOWN_IF_TOO_COMPLEX (sub_sval);
1050 m_sub_values_map.put (key, sub_sval);
1051 return sub_sval;
1054 /* Subroutine of region_model_manager::get_or_create_repeated_svalue.
1055 Return a folded svalue, or NULL. */
1057 const svalue *
1058 region_model_manager::maybe_fold_repeated_svalue (tree type,
1059 const svalue *outer_size,
1060 const svalue *inner_svalue)
1062 /* Repeated "unknown"/"poisoned" is unknown. */
1063 if (!outer_size->can_have_associated_state_p ()
1064 || !inner_svalue->can_have_associated_state_p ())
1065 return get_or_create_unknown_svalue (type);
1067 /* If INNER_SVALUE is the same size as OUTER_SIZE,
1068 turn into simply a cast. */
1069 if (tree cst_outer_num_bytes = outer_size->maybe_get_constant ())
1071 HOST_WIDE_INT num_bytes_inner_svalue
1072 = int_size_in_bytes (inner_svalue->get_type ());
1073 if (num_bytes_inner_svalue != -1)
1074 if (num_bytes_inner_svalue
1075 == (HOST_WIDE_INT)tree_to_uhwi (cst_outer_num_bytes))
1077 if (type)
1078 return get_or_create_cast (type, inner_svalue);
1079 else
1080 return inner_svalue;
1084 /* Handle zero-fill of a specific type. */
1085 if (tree cst = inner_svalue->maybe_get_constant ())
1086 if (zerop (cst) && type)
1087 return get_or_create_cast (type, inner_svalue);
1089 return NULL;
1092 /* Return the svalue * of type TYPE in which INNER_SVALUE is repeated
1093 enough times to be of size OUTER_SIZE, creating it if necessary.
1094 e.g. for filling buffers with a constant value. */
1096 const svalue *
1097 region_model_manager::get_or_create_repeated_svalue (tree type,
1098 const svalue *outer_size,
1099 const svalue *inner_svalue)
1101 if (const svalue *folded
1102 = maybe_fold_repeated_svalue (type, outer_size, inner_svalue))
1103 return folded;
1105 repeated_svalue::key_t key (type, outer_size, inner_svalue);
1106 if (repeated_svalue **slot = m_repeated_values_map.get (key))
1107 return *slot;
1108 repeated_svalue *repeated_sval
1109 = new repeated_svalue (alloc_symbol_id (), type, outer_size, inner_svalue);
1110 RETURN_UNKNOWN_IF_TOO_COMPLEX (repeated_sval);
1111 m_repeated_values_map.put (key, repeated_sval);
1112 return repeated_sval;
1115 /* Attempt to get the bit_range for FIELD within a RECORD_TYPE.
1116 Return true and write the result to OUT if successful.
1117 Return false otherwise. */
1119 static bool
1120 get_bit_range_for_field (tree field, bit_range *out)
1122 bit_size_t bit_size;
1123 if (!int_size_in_bits (TREE_TYPE (field), &bit_size))
1124 return false;
1125 int field_bit_offset = int_bit_position (field);
1126 *out = bit_range (field_bit_offset, bit_size);
1127 return true;
1130 /* Attempt to get the byte_range for FIELD within a RECORD_TYPE.
1131 Return true and write the result to OUT if successful.
1132 Return false otherwise. */
1134 static bool
1135 get_byte_range_for_field (tree field, byte_range *out)
1137 bit_range field_bits (0, 0);
1138 if (!get_bit_range_for_field (field, &field_bits))
1139 return false;
1140 return field_bits.as_byte_range (out);
1143 /* Attempt to determine if there is a specific field within RECORD_TYPE
1144 at BYTES. If so, return it, and write the location of BYTES relative
1145 to the field to *OUT_RANGE_WITHIN_FIELD.
1146 Otherwise, return NULL_TREE.
1147 For example, given:
1148 struct foo { uint32 a; uint32; b};
1150 bytes = {bytes 6-7} (of foo)
1151 we have bytes 3-4 of field b. */
1153 static tree
1154 get_field_at_byte_range (tree record_type, const byte_range &bytes,
1155 byte_range *out_range_within_field)
1157 bit_offset_t bit_offset = bytes.m_start_byte_offset * BITS_PER_UNIT;
1159 tree field = get_field_at_bit_offset (record_type, bit_offset);
1160 if (!field)
1161 return NULL_TREE;
1163 byte_range field_bytes (0,0);
1164 if (!get_byte_range_for_field (field, &field_bytes))
1165 return NULL_TREE;
1167 /* Is BYTES fully within field_bytes? */
1168 byte_range bytes_within_field (0,0);
1169 if (!field_bytes.contains_p (bytes, &bytes_within_field))
1170 return NULL_TREE;
1172 *out_range_within_field = bytes_within_field;
1173 return field;
1176 /* Subroutine of region_model_manager::get_or_create_bits_within.
1177 Return a folded svalue, or NULL. */
1179 const svalue *
1180 region_model_manager::maybe_fold_bits_within_svalue (tree type,
1181 const bit_range &bits,
1182 const svalue *inner_svalue)
1184 tree inner_type = inner_svalue->get_type ();
1185 /* Fold:
1186 BITS_WITHIN ((0, sizeof (VAL), VAL))
1188 CAST(TYPE, VAL). */
1189 if (bits.m_start_bit_offset == 0 && inner_type)
1191 bit_size_t inner_type_size;
1192 if (int_size_in_bits (inner_type, &inner_type_size))
1193 if (inner_type_size == bits.m_size_in_bits)
1195 if (type)
1196 return get_or_create_cast (type, inner_svalue);
1197 else
1198 return inner_svalue;
1202 /* Kind-specific folding. */
1203 if (const svalue *sval
1204 = inner_svalue->maybe_fold_bits_within (type, bits, this))
1205 return sval;
1207 byte_range bytes (0,0);
1208 if (bits.as_byte_range (&bytes) && inner_type)
1209 switch (TREE_CODE (inner_type))
1211 default:
1212 break;
1213 case ARRAY_TYPE:
1215 /* Fold:
1216 BITS_WITHIN (range, KIND(REG))
1218 BITS_WITHIN (range - offsetof(ELEMENT), KIND(REG.ELEMENT))
1219 if range1 is a byte-range fully within one ELEMENT. */
1220 tree element_type = TREE_TYPE (inner_type);
1221 HOST_WIDE_INT element_byte_size
1222 = int_size_in_bytes (element_type);
1223 if (element_byte_size > 0)
1225 HOST_WIDE_INT start_idx
1226 = (bytes.get_start_byte_offset ().to_shwi ()
1227 / element_byte_size);
1228 HOST_WIDE_INT last_idx
1229 = (bytes.get_last_byte_offset ().to_shwi ()
1230 / element_byte_size);
1231 if (start_idx == last_idx)
1233 if (const initial_svalue *initial_sval
1234 = inner_svalue->dyn_cast_initial_svalue ())
1236 bit_offset_t start_of_element
1237 = start_idx * element_byte_size * BITS_PER_UNIT;
1238 bit_range bits_within_element
1239 (bits.m_start_bit_offset - start_of_element,
1240 bits.m_size_in_bits);
1241 const svalue *idx_sval
1242 = get_or_create_int_cst (integer_type_node, start_idx);
1243 const region *element_reg =
1244 get_element_region (initial_sval->get_region (),
1245 element_type, idx_sval);
1246 const svalue *element_reg_sval
1247 = get_or_create_initial_value (element_reg);
1248 return get_or_create_bits_within (type,
1249 bits_within_element,
1250 element_reg_sval);
1255 break;
1256 case RECORD_TYPE:
1258 /* Fold:
1259 BYTES_WITHIN (range, KIND(REG))
1261 BYTES_WITHIN (range - offsetof(FIELD), KIND(REG.FIELD))
1262 if range1 is fully within FIELD. */
1263 byte_range bytes_within_field (0, 0);
1264 if (tree field = get_field_at_byte_range (inner_type, bytes,
1265 &bytes_within_field))
1267 if (const initial_svalue *initial_sval
1268 = inner_svalue->dyn_cast_initial_svalue ())
1270 const region *field_reg =
1271 get_field_region (initial_sval->get_region (), field);
1272 const svalue *initial_reg_sval
1273 = get_or_create_initial_value (field_reg);
1274 return get_or_create_bits_within
1275 (type,
1276 bytes_within_field.as_bit_range (),
1277 initial_reg_sval);
1281 break;
1283 return NULL;
1286 /* Return the svalue * of type TYPE for extracting BITS from INNER_SVALUE,
1287 creating it if necessary. */
1289 const svalue *
1290 region_model_manager::get_or_create_bits_within (tree type,
1291 const bit_range &bits,
1292 const svalue *inner_svalue)
1294 if (const svalue *folded
1295 = maybe_fold_bits_within_svalue (type, bits, inner_svalue))
1296 return folded;
1298 bits_within_svalue::key_t key (type, bits, inner_svalue);
1299 if (bits_within_svalue **slot = m_bits_within_values_map.get (key))
1300 return *slot;
1301 bits_within_svalue *bits_within_sval
1302 = new bits_within_svalue (alloc_symbol_id (), type, bits, inner_svalue);
1303 RETURN_UNKNOWN_IF_TOO_COMPLEX (bits_within_sval);
1304 m_bits_within_values_map.put (key, bits_within_sval);
1305 return bits_within_sval;
1308 /* Return the svalue * that decorates ARG as being unmergeable,
1309 creating it if necessary. */
1311 const svalue *
1312 region_model_manager::get_or_create_unmergeable (const svalue *arg)
1314 if (arg->get_kind () == SK_UNMERGEABLE)
1315 return arg;
1317 if (unmergeable_svalue **slot = m_unmergeable_values_map.get (arg))
1318 return *slot;
1319 unmergeable_svalue *unmergeable_sval
1320 = new unmergeable_svalue (alloc_symbol_id (), arg);
1321 RETURN_UNKNOWN_IF_TOO_COMPLEX (unmergeable_sval);
1322 m_unmergeable_values_map.put (arg, unmergeable_sval);
1323 return unmergeable_sval;
1326 /* Return the svalue * of type TYPE for the merger of value BASE_SVAL
1327 and ITER_SVAL at POINT, creating it if necessary. */
1329 const svalue *
1330 region_model_manager::
1331 get_or_create_widening_svalue (tree type,
1332 const function_point &point,
1333 const svalue *base_sval,
1334 const svalue *iter_sval)
1336 gcc_assert (base_sval->get_kind () != SK_WIDENING);
1337 gcc_assert (iter_sval->get_kind () != SK_WIDENING);
1338 widening_svalue::key_t key (type, point, base_sval, iter_sval);
1339 if (widening_svalue **slot = m_widening_values_map.get (key))
1340 return *slot;
1341 widening_svalue *widening_sval
1342 = new widening_svalue (alloc_symbol_id (), type, point, base_sval,
1343 iter_sval);
1344 RETURN_UNKNOWN_IF_TOO_COMPLEX (widening_sval);
1345 m_widening_values_map.put (key, widening_sval);
1346 return widening_sval;
1349 /* Return the svalue * of type TYPE for the compound values in MAP,
1350 creating it if necessary. */
1352 const svalue *
1353 region_model_manager::get_or_create_compound_svalue (tree type,
1354 const binding_map &map)
1356 compound_svalue::key_t tmp_key (type, &map);
1357 if (compound_svalue **slot = m_compound_values_map.get (tmp_key))
1358 return *slot;
1359 compound_svalue *compound_sval
1360 = new compound_svalue (alloc_symbol_id (), type, map);
1361 RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval);
1362 /* Use make_key rather than reusing the key, so that we use a
1363 ptr to compound_sval's binding_map, rather than the MAP param. */
1364 m_compound_values_map.put (compound_sval->make_key (), compound_sval);
1365 return compound_sval;
1368 /* class conjured_purge. */
1370 /* Purge state relating to SVAL. */
1372 void
1373 conjured_purge::purge (const conjured_svalue *sval) const
1375 m_model->purge_state_involving (sval, m_ctxt);
1378 /* Return the svalue * of type TYPE for the value conjured for ID_REG
1379 at STMT (using IDX for any further disambiguation),
1380 creating it if necessary.
1381 Use P to purge existing state from the svalue, for the case where a
1382 conjured_svalue would be reused along an execution path. */
1384 const svalue *
1385 region_model_manager::get_or_create_conjured_svalue (tree type,
1386 const gimple *stmt,
1387 const region *id_reg,
1388 const conjured_purge &p,
1389 unsigned idx)
1391 conjured_svalue::key_t key (type, stmt, id_reg, idx);
1392 if (conjured_svalue **slot = m_conjured_values_map.get (key))
1394 const conjured_svalue *sval = *slot;
1395 /* We're reusing an existing conjured_svalue, perhaps from a different
1396 state within this analysis, or perhaps from an earlier state on this
1397 execution path. For the latter, purge any state involving the "new"
1398 svalue from the current program_state. */
1399 p.purge (sval);
1400 return sval;
1402 conjured_svalue *conjured_sval
1403 = new conjured_svalue (alloc_symbol_id (), type, stmt, id_reg, idx);
1404 RETURN_UNKNOWN_IF_TOO_COMPLEX (conjured_sval);
1405 m_conjured_values_map.put (key, conjured_sval);
1406 return conjured_sval;
1409 /* Subroutine of region_model_manager::get_or_create_asm_output_svalue.
1410 Return a folded svalue, or NULL. */
1412 const svalue *
1413 region_model_manager::
1414 maybe_fold_asm_output_svalue (tree type,
1415 const vec<const svalue *> &inputs)
1417 /* Unknown inputs should lead to unknown results. */
1418 for (const auto &iter : inputs)
1419 if (iter->get_kind () == SK_UNKNOWN)
1420 return get_or_create_unknown_svalue (type);
1422 return NULL;
1425 /* Return the svalue * of type TYPE for OUTPUT_IDX of the deterministic
1426 asm stmt ASM_STMT, given INPUTS as inputs. */
1428 const svalue *
1429 region_model_manager::
1430 get_or_create_asm_output_svalue (tree type,
1431 const gasm *asm_stmt,
1432 unsigned output_idx,
1433 const vec<const svalue *> &inputs)
1435 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1437 if (const svalue *folded
1438 = maybe_fold_asm_output_svalue (type, inputs))
1439 return folded;
1441 const char *asm_string = gimple_asm_string (asm_stmt);
1442 const unsigned noutputs = gimple_asm_noutputs (asm_stmt);
1444 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1445 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1446 return *slot;
1447 asm_output_svalue *asm_output_sval
1448 = new asm_output_svalue (alloc_symbol_id (), type, asm_string, output_idx,
1449 noutputs, inputs);
1450 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1451 m_asm_output_values_map.put (key, asm_output_sval);
1452 return asm_output_sval;
1455 /* Return the svalue * of type TYPE for OUTPUT_IDX of a deterministic
1456 asm stmt with string ASM_STRING with NUM_OUTPUTS outputs, given
1457 INPUTS as inputs. */
1459 const svalue *
1460 region_model_manager::
1461 get_or_create_asm_output_svalue (tree type,
1462 const char *asm_string,
1463 unsigned output_idx,
1464 unsigned num_outputs,
1465 const vec<const svalue *> &inputs)
1467 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1469 if (const svalue *folded
1470 = maybe_fold_asm_output_svalue (type, inputs))
1471 return folded;
1473 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1474 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1475 return *slot;
1476 asm_output_svalue *asm_output_sval
1477 = new asm_output_svalue (alloc_symbol_id (), type, asm_string, output_idx,
1478 num_outputs, inputs);
1479 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1480 m_asm_output_values_map.put (key, asm_output_sval);
1481 return asm_output_sval;
1484 /* Return the svalue * of type TYPE for the result of a call to FNDECL
1485 with __attribute__((const)), given INPUTS as inputs. */
1487 const svalue *
1488 region_model_manager::
1489 get_or_create_const_fn_result_svalue (tree type,
1490 tree fndecl,
1491 const vec<const svalue *> &inputs)
1493 gcc_assert (fndecl);
1494 gcc_assert (DECL_P (fndecl));
1495 gcc_assert (TREE_READONLY (fndecl));
1496 gcc_assert (inputs.length () <= const_fn_result_svalue::MAX_INPUTS);
1498 const_fn_result_svalue::key_t key (type, fndecl, inputs);
1499 if (const_fn_result_svalue **slot = m_const_fn_result_values_map.get (key))
1500 return *slot;
1501 const_fn_result_svalue *const_fn_result_sval
1502 = new const_fn_result_svalue (alloc_symbol_id (), type, fndecl, inputs);
1503 RETURN_UNKNOWN_IF_TOO_COMPLEX (const_fn_result_sval);
1504 m_const_fn_result_values_map.put (key, const_fn_result_sval);
1505 return const_fn_result_sval;
1508 /* Get a tree for the size of STRING_CST, or NULL_TREE.
1509 Note that this may be larger than TREE_STRING_LENGTH (implying
1510 a run of trailing zero bytes from TREE_STRING_LENGTH up to this
1511 higher limit). */
1513 tree
1514 get_string_cst_size (const_tree string_cst)
1516 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1517 gcc_assert (TREE_CODE (TREE_TYPE (string_cst)) == ARRAY_TYPE);
1519 return TYPE_SIZE_UNIT (TREE_TYPE (string_cst));
1522 /* Given STRING_CST, a STRING_CST and BYTE_OFFSET_CST a constant,
1523 attempt to get the character at that offset, returning either
1524 the svalue for the character constant, or NULL if unsuccessful. */
1526 const svalue *
1527 region_model_manager::maybe_get_char_from_string_cst (tree string_cst,
1528 tree byte_offset_cst)
1530 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1532 /* Adapted from fold_read_from_constant_string. */
1533 scalar_int_mode char_mode;
1534 if (TREE_CODE (byte_offset_cst) == INTEGER_CST
1535 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string_cst))),
1536 &char_mode)
1537 && GET_MODE_SIZE (char_mode) == 1)
1539 /* If we're beyond the string_cst, the read is unsuccessful. */
1540 if (compare_constants (byte_offset_cst,
1541 GE_EXPR,
1542 get_string_cst_size (string_cst)).is_true ())
1543 return NULL;
1545 int char_val;
1546 if (compare_tree_int (byte_offset_cst,
1547 TREE_STRING_LENGTH (string_cst)) < 0)
1548 /* We're within the area defined by TREE_STRING_POINTER. */
1549 char_val = (TREE_STRING_POINTER (string_cst)
1550 [TREE_INT_CST_LOW (byte_offset_cst)]);
1551 else
1552 /* We're in the padding area of trailing zeroes. */
1553 char_val = 0;
1554 tree char_cst
1555 = build_int_cst_type (TREE_TYPE (TREE_TYPE (string_cst)), char_val);
1556 return get_or_create_constant_svalue (char_cst);
1558 return NULL;
1561 /* region consolidation. */
1563 /* Return the region for FNDECL, creating it if necessary. */
1565 const function_region *
1566 region_model_manager::get_region_for_fndecl (tree fndecl)
1568 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
1570 function_region **slot = m_fndecls_map.get (fndecl);
1571 if (slot)
1572 return *slot;
1573 function_region *reg
1574 = new function_region (alloc_symbol_id (), &m_code_region, fndecl);
1575 m_fndecls_map.put (fndecl, reg);
1576 return reg;
1579 /* Return the region for LABEL, creating it if necessary. */
1581 const label_region *
1582 region_model_manager::get_region_for_label (tree label)
1584 gcc_assert (TREE_CODE (label) == LABEL_DECL);
1586 label_region **slot = m_labels_map.get (label);
1587 if (slot)
1588 return *slot;
1590 tree fndecl = DECL_CONTEXT (label);
1591 gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
1593 const function_region *func_reg = get_region_for_fndecl (fndecl);
1594 label_region *reg
1595 = new label_region (alloc_symbol_id (), func_reg, label);
1596 m_labels_map.put (label, reg);
1597 return reg;
1600 /* Return the region for EXPR, creating it if necessary. */
1602 const decl_region *
1603 region_model_manager::get_region_for_global (tree expr)
1605 gcc_assert (VAR_P (expr));
1607 decl_region **slot = m_globals_map.get (expr);
1608 if (slot)
1609 return *slot;
1610 decl_region *reg
1611 = new decl_region (alloc_symbol_id (), &m_globals_region, expr);
1612 m_globals_map.put (expr, reg);
1613 return reg;
1616 /* Return the region for an unknown access of type REGION_TYPE,
1617 creating it if necessary.
1618 This is a symbolic_region, where the pointer is an unknown_svalue
1619 of type &REGION_TYPE. */
1621 const region *
1622 region_model_manager::get_unknown_symbolic_region (tree region_type)
1624 tree ptr_type = region_type ? build_pointer_type (region_type) : NULL_TREE;
1625 const svalue *unknown_ptr = get_or_create_unknown_svalue (ptr_type);
1626 return get_symbolic_region (unknown_ptr);
1629 /* Return the region that describes accessing field FIELD of PARENT,
1630 creating it if necessary. */
1632 const region *
1633 region_model_manager::get_field_region (const region *parent, tree field)
1635 gcc_assert (TREE_CODE (field) == FIELD_DECL);
1637 /* (*UNKNOWN_PTR).field is (*UNKNOWN_PTR_OF_&FIELD_TYPE). */
1638 if (parent->symbolic_for_unknown_ptr_p ())
1639 return get_unknown_symbolic_region (TREE_TYPE (field));
1641 field_region::key_t key (parent, field);
1642 if (field_region *reg = m_field_regions.get (key))
1643 return reg;
1645 field_region *field_reg
1646 = new field_region (alloc_symbol_id (), parent, field);
1647 m_field_regions.put (key, field_reg);
1648 return field_reg;
1651 /* Return the region that describes accessing the element of type
1652 ELEMENT_TYPE at index INDEX of PARENT, creating it if necessary. */
1654 const region *
1655 region_model_manager::get_element_region (const region *parent,
1656 tree element_type,
1657 const svalue *index)
1659 /* (UNKNOWN_PTR[IDX]) is (UNKNOWN_PTR). */
1660 if (parent->symbolic_for_unknown_ptr_p ())
1661 return get_unknown_symbolic_region (element_type);
1663 element_region::key_t key (parent, element_type, index);
1664 if (element_region *reg = m_element_regions.get (key))
1665 return reg;
1667 element_region *element_reg
1668 = new element_region (alloc_symbol_id (), parent, element_type, index);
1669 m_element_regions.put (key, element_reg);
1670 return element_reg;
1673 /* Return the region that describes accessing the subregion of type
1674 ELEMENT_TYPE at offset BYTE_OFFSET within PARENT, creating it if
1675 necessary. */
1677 const region *
1678 region_model_manager::get_offset_region (const region *parent,
1679 tree type,
1680 const svalue *byte_offset)
1682 /* (UNKNOWN_PTR + OFFSET) is (UNKNOWN_PTR). */
1683 if (parent->symbolic_for_unknown_ptr_p ())
1684 return get_unknown_symbolic_region (type);
1686 /* If BYTE_OFFSET is zero, return PARENT. */
1687 if (tree cst_offset = byte_offset->maybe_get_constant ())
1688 if (zerop (cst_offset))
1689 return get_cast_region (parent, type);
1691 /* Fold OFFSET_REGION(OFFSET_REGION(REG, X), Y)
1692 to OFFSET_REGION(REG, (X + Y)). */
1693 if (const offset_region *parent_offset_reg
1694 = parent->dyn_cast_offset_region ())
1696 const svalue *sval_x = parent_offset_reg->get_byte_offset ();
1697 const svalue *sval_sum
1698 = get_or_create_binop (byte_offset->get_type (),
1699 PLUS_EXPR, sval_x, byte_offset);
1700 return get_offset_region (parent->get_parent_region (), type, sval_sum);
1703 offset_region::key_t key (parent, type, byte_offset);
1704 if (offset_region *reg = m_offset_regions.get (key))
1705 return reg;
1707 offset_region *offset_reg
1708 = new offset_region (alloc_symbol_id (), parent, type, byte_offset);
1709 m_offset_regions.put (key, offset_reg);
1710 return offset_reg;
1713 /* Return the region that describes accessing the subregion of type
1714 TYPE of size BYTE_SIZE_SVAL within PARENT, creating it if necessary. */
1716 const region *
1717 region_model_manager::get_sized_region (const region *parent,
1718 tree type,
1719 const svalue *byte_size_sval)
1721 if (parent->symbolic_for_unknown_ptr_p ())
1722 return get_unknown_symbolic_region (type);
1724 if (byte_size_sval->get_type () != size_type_node)
1725 byte_size_sval = get_or_create_cast (size_type_node, byte_size_sval);
1727 /* If PARENT is already that size, return it. */
1728 const svalue *parent_byte_size_sval = parent->get_byte_size_sval (this);
1729 if (tree parent_size_cst = parent_byte_size_sval->maybe_get_constant ())
1730 if (tree size_cst = byte_size_sval->maybe_get_constant ())
1732 tree comparison
1733 = fold_binary (EQ_EXPR, boolean_type_node, parent_size_cst, size_cst);
1734 if (comparison == boolean_true_node)
1735 return parent;
1738 sized_region::key_t key (parent, type, byte_size_sval);
1739 if (sized_region *reg = m_sized_regions.get (key))
1740 return reg;
1742 sized_region *sized_reg
1743 = new sized_region (alloc_symbol_id (), parent, type, byte_size_sval);
1744 m_sized_regions.put (key, sized_reg);
1745 return sized_reg;
1748 /* Return the region that describes accessing PARENT_REGION as if
1749 it were of type TYPE, creating it if necessary. */
1751 const region *
1752 region_model_manager::get_cast_region (const region *original_region,
1753 tree type)
1755 /* If types match, return ORIGINAL_REGION. */
1756 if (type == original_region->get_type ())
1757 return original_region;
1759 if (original_region->symbolic_for_unknown_ptr_p ())
1760 return get_unknown_symbolic_region (type);
1762 cast_region::key_t key (original_region, type);
1763 if (cast_region *reg = m_cast_regions.get (key))
1764 return reg;
1766 cast_region *cast_reg
1767 = new cast_region (alloc_symbol_id (), original_region, type);
1768 m_cast_regions.put (key, cast_reg);
1769 return cast_reg;
1772 /* Return the frame_region for call to FUN from CALLING_FRAME, creating it
1773 if necessary. CALLING_FRAME may be NULL. */
1775 const frame_region *
1776 region_model_manager::get_frame_region (const frame_region *calling_frame,
1777 const function &fun)
1779 int index = calling_frame ? calling_frame->get_index () + 1 : 0;
1781 frame_region::key_t key (calling_frame, fun);
1782 if (frame_region *reg = m_frame_regions.get (key))
1783 return reg;
1785 frame_region *frame_reg
1786 = new frame_region (alloc_symbol_id (), &m_stack_region, calling_frame,
1787 fun, index);
1788 m_frame_regions.put (key, frame_reg);
1789 return frame_reg;
1792 /* Return the region that describes dereferencing SVAL, creating it
1793 if necessary. */
1795 const region *
1796 region_model_manager::get_symbolic_region (const svalue *sval)
1798 symbolic_region::key_t key (&m_root_region, sval);
1799 if (symbolic_region *reg = m_symbolic_regions.get (key))
1800 return reg;
1802 symbolic_region *symbolic_reg
1803 = new symbolic_region (alloc_symbol_id (), &m_root_region, sval);
1804 m_symbolic_regions.put (key, symbolic_reg);
1805 return symbolic_reg;
1808 /* Return the region that describes accessing STRING_CST, creating it
1809 if necessary. */
1811 const string_region *
1812 region_model_manager::get_region_for_string (tree string_cst)
1814 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1816 string_region **slot = m_string_map.get (string_cst);
1817 if (slot)
1818 return *slot;
1819 string_region *reg
1820 = new string_region (alloc_symbol_id (), &m_root_region, string_cst);
1821 m_string_map.put (string_cst, reg);
1822 return reg;
1825 /* Return the region that describes accessing BITS within PARENT as TYPE,
1826 creating it if necessary. */
1828 const region *
1829 region_model_manager::get_bit_range (const region *parent, tree type,
1830 const bit_range &bits)
1832 gcc_assert (parent);
1834 if (parent->symbolic_for_unknown_ptr_p ())
1835 return get_unknown_symbolic_region (type);
1837 bit_range_region::key_t key (parent, type, bits);
1838 if (bit_range_region *reg = m_bit_range_regions.get (key))
1839 return reg;
1841 bit_range_region *bit_range_reg
1842 = new bit_range_region (alloc_symbol_id (), parent, type, bits);
1843 m_bit_range_regions.put (key, bit_range_reg);
1844 return bit_range_reg;
1847 /* Return the region that describes accessing the IDX-th variadic argument
1848 within PARENT_FRAME, creating it if necessary. */
1850 const var_arg_region *
1851 region_model_manager::get_var_arg_region (const frame_region *parent_frame,
1852 unsigned idx)
1854 gcc_assert (parent_frame);
1856 var_arg_region::key_t key (parent_frame, idx);
1857 if (var_arg_region *reg = m_var_arg_regions.get (key))
1858 return reg;
1860 var_arg_region *var_arg_reg
1861 = new var_arg_region (alloc_symbol_id (), parent_frame, idx);
1862 m_var_arg_regions.put (key, var_arg_reg);
1863 return var_arg_reg;
1866 /* If we see a tree code we don't know how to handle, rather than
1867 ICE or generate bogus results, create a dummy region, and notify
1868 CTXT so that it can mark the new state as being not properly
1869 modelled. The exploded graph can then stop exploring that path,
1870 since any diagnostics we might issue will have questionable
1871 validity. */
1873 const region *
1874 region_model_manager::
1875 get_region_for_unexpected_tree_code (region_model_context *ctxt,
1876 tree t,
1877 const dump_location_t &loc)
1879 tree type = TYPE_P (t) ? t : TREE_TYPE (t);
1880 region *new_reg
1881 = new unknown_region (alloc_symbol_id (), &m_root_region, type);
1882 if (ctxt)
1883 ctxt->on_unexpected_tree_code (t, loc);
1884 return new_reg;
1887 /* Return a region describing a heap-allocated block of memory.
1888 Reuse an existing heap_allocated_region is its id is not within
1889 BASE_REGS_IN_USE. */
1891 const region *
1892 region_model_manager::
1893 get_or_create_region_for_heap_alloc (const bitmap &base_regs_in_use)
1895 /* Try to reuse an existing region, if it's unreferenced in the
1896 client state. */
1897 for (auto existing_reg : m_managed_dynamic_regions)
1898 if (!bitmap_bit_p (base_regs_in_use, existing_reg->get_id ()))
1899 if (existing_reg->get_kind () == RK_HEAP_ALLOCATED)
1900 return existing_reg;
1902 /* All existing ones (if any) are in use; create a new one. */
1903 region *reg
1904 = new heap_allocated_region (alloc_symbol_id (), &m_heap_region);
1905 m_managed_dynamic_regions.safe_push (reg);
1906 return reg;
1909 /* Return a new region describing a block of memory allocated within FRAME. */
1911 const region *
1912 region_model_manager::create_region_for_alloca (const frame_region *frame)
1914 gcc_assert (frame);
1915 region *reg = new alloca_region (alloc_symbol_id (), frame);
1916 m_managed_dynamic_regions.safe_push (reg);
1917 return reg;
1920 /* Log OBJ to LOGGER. */
1922 template <typename T>
1923 static void
1924 log_managed_object (logger *logger, const T *obj)
1926 logger->start_log_line ();
1927 pretty_printer *pp = logger->get_printer ();
1928 pp_string (pp, " ");
1929 obj->dump_to_pp (pp, true);
1930 logger->end_log_line ();
1933 /* Specialization for frame_region, which also logs the count of locals
1934 managed by the frame_region. */
1936 template <>
1937 void
1938 log_managed_object (logger *logger, const frame_region *obj)
1940 logger->start_log_line ();
1941 pretty_printer *pp = logger->get_printer ();
1942 pp_string (pp, " ");
1943 obj->dump_to_pp (pp, true);
1944 pp_printf (pp, " [with %i region(s) for locals]", obj->get_num_locals ());
1945 logger->end_log_line ();
1948 /* Dump the number of objects that were managed by UNIQ_MAP to LOGGER.
1949 If SHOW_OBJS is true, also dump the objects themselves. */
1951 template <typename K, typename T>
1952 static void
1953 log_uniq_map (logger *logger, bool show_objs, const char *title,
1954 const hash_map<K, T*> &uniq_map)
1956 logger->log (" # %s: %li", title, (long)uniq_map.elements ());
1957 if (!show_objs)
1958 return;
1959 auto_vec<const T *> vec_objs (uniq_map.elements ());
1960 for (typename hash_map<K, T*>::iterator iter = uniq_map.begin ();
1961 iter != uniq_map.end (); ++iter)
1962 vec_objs.quick_push ((*iter).second);
1964 vec_objs.qsort (T::cmp_ptr_ptr);
1966 unsigned i;
1967 const T *obj;
1968 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1969 log_managed_object<T> (logger, obj);
1972 /* Dump the number of objects that were managed by MAP to LOGGER.
1973 If SHOW_OBJS is true, also dump the objects themselves. */
1975 template <typename T>
1976 static void
1977 log_uniq_map (logger *logger, bool show_objs, const char *title,
1978 const consolidation_map<T> &map)
1980 logger->log (" # %s: %li", title, (long)map.elements ());
1981 if (!show_objs)
1982 return;
1984 auto_vec<const T *> vec_objs (map.elements ());
1985 for (typename consolidation_map<T>::iterator iter = map.begin ();
1986 iter != map.end (); ++iter)
1987 vec_objs.quick_push ((*iter).second);
1989 vec_objs.qsort (T::cmp_ptr_ptr);
1991 unsigned i;
1992 const T *obj;
1993 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1994 log_managed_object<T> (logger, obj);
1997 /* Dump the number of objects of each class that were managed by this
1998 manager to LOGGER.
1999 If SHOW_OBJS is true, also dump the objects themselves. */
2001 void
2002 region_model_manager::log_stats (logger *logger, bool show_objs) const
2004 LOG_SCOPE (logger);
2005 logger->log ("call string consolidation");
2006 m_empty_call_string.recursive_log (logger);
2007 logger->log ("next symbol id: %i", m_next_symbol_id);
2008 logger->log ("svalue consolidation");
2009 log_uniq_map (logger, show_objs, "constant_svalue", m_constants_map);
2010 log_uniq_map (logger, show_objs, "unknown_svalue", m_unknowns_map);
2011 if (m_unknown_NULL)
2012 log_managed_object (logger, m_unknown_NULL);
2013 log_uniq_map (logger, show_objs, "poisoned_svalue", m_poisoned_values_map);
2014 log_uniq_map (logger, show_objs, "setjmp_svalue", m_setjmp_values_map);
2015 log_uniq_map (logger, show_objs, "initial_svalue", m_initial_values_map);
2016 log_uniq_map (logger, show_objs, "region_svalue", m_pointer_values_map);
2017 log_uniq_map (logger, show_objs, "unaryop_svalue", m_unaryop_values_map);
2018 log_uniq_map (logger, show_objs, "binop_svalue", m_binop_values_map);
2019 log_uniq_map (logger, show_objs, "sub_svalue", m_sub_values_map);
2020 log_uniq_map (logger, show_objs, "repeated_svalue", m_repeated_values_map);
2021 log_uniq_map (logger, show_objs, "bits_within_svalue",
2022 m_bits_within_values_map);
2023 log_uniq_map (logger, show_objs, "unmergeable_svalue",
2024 m_unmergeable_values_map);
2025 log_uniq_map (logger, show_objs, "widening_svalue", m_widening_values_map);
2026 log_uniq_map (logger, show_objs, "compound_svalue", m_compound_values_map);
2027 log_uniq_map (logger, show_objs, "conjured_svalue", m_conjured_values_map);
2028 log_uniq_map (logger, show_objs, "asm_output_svalue",
2029 m_asm_output_values_map);
2030 log_uniq_map (logger, show_objs, "const_fn_result_svalue",
2031 m_const_fn_result_values_map);
2033 logger->log ("max accepted svalue num_nodes: %i",
2034 m_max_complexity.m_num_nodes);
2035 logger->log ("max accepted svalue max_depth: %i",
2036 m_max_complexity.m_max_depth);
2038 logger->log ("region consolidation");
2039 log_uniq_map (logger, show_objs, "function_region", m_fndecls_map);
2040 log_uniq_map (logger, show_objs, "label_region", m_labels_map);
2041 log_uniq_map (logger, show_objs, "decl_region for globals", m_globals_map);
2042 log_uniq_map (logger, show_objs, "field_region", m_field_regions);
2043 log_uniq_map (logger, show_objs, "element_region", m_element_regions);
2044 log_uniq_map (logger, show_objs, "offset_region", m_offset_regions);
2045 log_uniq_map (logger, show_objs, "sized_region", m_sized_regions);
2046 log_uniq_map (logger, show_objs, "cast_region", m_cast_regions);
2047 log_uniq_map (logger, show_objs, "frame_region", m_frame_regions);
2048 log_uniq_map (logger, show_objs, "symbolic_region", m_symbolic_regions);
2049 log_uniq_map (logger, show_objs, "string_region", m_string_map);
2050 log_uniq_map (logger, show_objs, "bit_range_region", m_bit_range_regions);
2051 log_uniq_map (logger, show_objs, "var_arg_region", m_var_arg_regions);
2052 logger->log (" # managed dynamic regions: %i",
2053 m_managed_dynamic_regions.length ());
2054 m_store_mgr.log_stats (logger, show_objs);
2055 m_range_mgr->log_stats (logger, show_objs);
2058 /* Dump the number of objects of each class that were managed by this
2059 manager to LOGGER.
2060 If SHOW_OBJS is true, also dump the objects themselves.
2061 This is here so it can use log_uniq_map. */
2063 void
2064 store_manager::log_stats (logger *logger, bool show_objs) const
2066 LOG_SCOPE (logger);
2067 log_uniq_map (logger, show_objs, "concrete_binding",
2068 m_concrete_binding_key_mgr);
2069 log_uniq_map (logger, show_objs, "symbolic_binding",
2070 m_symbolic_binding_key_mgr);
2073 /* Emit a warning showing DECL_REG->tracked_p () for use in DejaGnu tests
2074 (using -fdump-analyzer-untracked). */
2076 static void
2077 dump_untracked_region (const decl_region *decl_reg)
2079 tree decl = decl_reg->get_decl ();
2080 if (TREE_CODE (decl) != VAR_DECL)
2081 return;
2082 /* For now, don't emit the status of decls in the constant pool, to avoid
2083 differences in DejaGnu test results between targets that use these vs
2084 those that don't.
2085 (Eventually these decls should probably be untracked and we should test
2086 for that, but that's not stage 4 material). */
2087 if (DECL_IN_CONSTANT_POOL (decl))
2088 return;
2089 warning_at (DECL_SOURCE_LOCATION (decl), 0,
2090 "track %qD: %s",
2091 decl, (decl_reg->tracked_p () ? "yes" : "no"));
2094 /* Implementation of -fdump-analyzer-untracked. */
2096 void
2097 region_model_manager::dump_untracked_regions () const
2099 for (auto iter : m_globals_map)
2101 const decl_region *decl_reg = iter.second;
2102 dump_untracked_region (decl_reg);
2104 for (auto frame_iter : m_frame_regions)
2106 const frame_region *frame_reg = frame_iter.second;
2107 frame_reg->dump_untracked_regions ();
2111 void
2112 frame_region::dump_untracked_regions () const
2114 for (auto iter : m_locals)
2116 const decl_region *decl_reg = iter.second;
2117 dump_untracked_region (decl_reg);
2121 } // namespace ana
2123 #endif /* #if ENABLE_ANALYZER */