libcpp, c, middle-end: Optimize initializers using #embed in C
[official-gcc.git] / gcc / analyzer / store.h
blobaf9ea4172a2f902d5292fba8e6b79faccce96948
1 /* Classes for modeling the state of memory.
2 Copyright (C) 2020-2024 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #ifndef GCC_ANALYZER_STORE_H
22 #define GCC_ANALYZER_STORE_H
24 #include "text-art/tree-widget.h"
26 /* Implementation of the region-based ternary model described in:
27 "A Memory Model for Static Analysis of C Programs"
28 (Zhongxing Xu, Ted Kremenek, and Jian Zhang)
29 http://lcs.ios.ac.cn/~xuzb/canalyze/memmodel.pdf */
31 /* The store models memory as a collection of "clusters", where regions
32 are partitioned into clusters via their base region.
34 For example, given:
35 int a, b, c;
36 struct coord { double x; double y; } verts[3];
37 then "verts[0].y" and "verts[1].x" both have "verts" as their base region.
38 Each of a, b, c, and verts will have their own clusters, so that we
39 know that writes to e.g. "verts[1].x".don't affect e.g. "a".
41 Within each cluster we store a map of bindings to values, where the
42 binding keys can be either concrete or symbolic.
44 Concrete bindings affect a specific range of bits relative to the start
45 of the base region of the cluster, whereas symbolic bindings affect
46 a specific subregion within the cluster.
48 Consider (from the symbolic-1.c testcase):
50 char arr[1024];
51 arr[2] = a; (1)
52 arr[3] = b; (2)
53 After (1) and (2), the cluster for "arr" has concrete bindings
54 for bits 16-23 and for bits 24-31, with svalues "INIT_VAL(a)"
55 and "INIT_VAL(b)" respectively:
56 cluster: {bits 16-23: "INIT_VAL(a)",
57 bits 24-31: "INIT_VAL(b)";
58 flags: {}}
59 Attempting to query unbound subregions e.g. arr[4] will
60 return "UNINITIALIZED".
61 "a" and "b" are each in their own clusters, with no explicit
62 bindings, and thus implicitly have value INIT_VAL(a) and INIT_VAL(b).
64 arr[3] = c; (3)
65 After (3), the concrete binding for bits 24-31 is replaced with the
66 svalue "INIT_VAL(c)":
67 cluster: {bits 16-23: "INIT_VAL(a)", (from before)
68 bits 24-31: "INIT_VAL(c)"; (updated)
69 flags: {}}
71 arr[i] = d; (4)
72 After (4), we lose the concrete bindings and replace them with a
73 symbolic binding for "arr[i]", with svalue "INIT_VAL(d)". We also
74 mark the cluster as having been "symbolically touched": future
75 attempts to query the values of subregions other than "arr[i]",
76 such as "arr[3]" are "UNKNOWN", since we don't know if the write
77 to arr[i] affected them.
78 cluster: {symbolic_key(arr[i]): "INIT_VAL(d)";
79 flags: {TOUCHED}}
81 arr[j] = e; (5)
82 After (5), we lose the symbolic binding for "arr[i]" since we could
83 have overwritten it, and add a symbolic binding for "arr[j]".
84 cluster: {symbolic_key(arr[j]): "INIT_VAL(d)"; (different symbolic
85 flags: {TOUCHED}} binding)
87 arr[3] = f; (6)
88 After (6), we lose the symbolic binding for "arr[j]" since we could
89 have overwritten it, and gain a concrete binding for bits 24-31
90 again, this time with svalue "INIT_VAL(e)":
91 cluster: {bits 24-31: "INIT_VAL(d)";
92 flags: {TOUCHED}}
93 The cluster is still flagged as touched, so that we know that
94 accesses to other elements are "UNKNOWN" rather than
95 "UNINITIALIZED".
97 Handling symbolic regions requires us to handle aliasing.
99 In the first example above, each of a, b, c and verts are non-symbolic
100 base regions and so their clusters are "concrete clusters", whereas given:
101 struct coord *p, *q;
102 then "*p" and "*q" are symbolic base regions, and thus "*p" and "*q"
103 have "symbolic clusters".
105 In the above, "verts[i].x" will have a symbolic *binding* within a
106 concrete cluster for "verts", whereas "*p" is a symbolic *cluster*.
108 Writes to concrete clusters can't affect other concrete clusters,
109 but can affect symbolic clusters; e.g. after:
110 verts[0].x = 42;
111 we bind 42 in the cluster for "verts", but the clusters for "b" and "c"
112 can't be affected. Any symbolic clusters for *p and for *q can be
113 affected, *p and *q could alias verts.
115 Writes to a symbolic cluster can affect other clusters, both
116 concrete and symbolic; e.g. after:
117 p->x = 17;
118 we bind 17 within the cluster for "*p". The concrete clusters for a, b,
119 c, and verts could be affected, depending on whether *p aliases them.
120 Similarly, the symbolic cluster to *q could be affected. */
122 namespace ana {
124 /* A class for keeping track of aspects of a program_state that we don't
125 know about, to avoid false positives about leaks.
127 Consider:
129 p->field = malloc (1024);
130 q->field = NULL;
132 where we don't know whether or not p and q point to the same memory,
133 and:
135 p->field = malloc (1024);
136 unknown_fn (p);
138 In both cases, the svalue for the address of the allocated buffer
139 goes from being bound to p->field to not having anything explicitly bound
140 to it.
142 Given that we conservatively discard bindings due to possible aliasing or
143 calls to unknown function, the store loses references to svalues,
144 but these svalues could still be live. We don't want to warn about
145 them leaking - they're effectively in a "maybe live" state.
147 This "maybe live" information is somewhat transient.
149 We don't want to store this "maybe live" information in the program_state,
150 region_model, or store, since we don't want to bloat these objects (and
151 potentially bloat the exploded_graph with more nodes).
152 However, we can't store it in the region_model_context, as these context
153 objects sometimes don't last long enough to be around when comparing the
154 old vs the new state.
156 This class is a way to track a set of such svalues, so that we can
157 temporarily capture that they are in a "maybe live" state whilst
158 comparing old and new states. */
160 class uncertainty_t
162 public:
163 typedef hash_set<const svalue *>::iterator iterator;
165 void on_maybe_bound_sval (const svalue *sval)
167 m_maybe_bound_svals.add (sval);
169 void on_mutable_sval_at_unknown_call (const svalue *sval)
171 m_mutable_at_unknown_call_svals.add (sval);
174 bool unknown_sm_state_p (const svalue *sval)
176 return (m_maybe_bound_svals.contains (sval)
177 || m_mutable_at_unknown_call_svals.contains (sval));
180 void dump_to_pp (pretty_printer *pp, bool simple) const;
181 void dump (bool simple) const;
183 iterator begin_maybe_bound_svals () const
185 return m_maybe_bound_svals.begin ();
187 iterator end_maybe_bound_svals () const
189 return m_maybe_bound_svals.end ();
192 private:
194 /* svalues that might or might not still be bound. */
195 hash_set<const svalue *> m_maybe_bound_svals;
197 /* svalues that have mutable sm-state at unknown calls. */
198 hash_set<const svalue *> m_mutable_at_unknown_call_svals;
201 class byte_range;
202 class concrete_binding;
203 class symbolic_binding;
205 /* Abstract base class for describing ranges of bits within a binding_map
206 that can have svalues bound to them. */
208 class binding_key
210 public:
211 virtual ~binding_key () {}
212 virtual bool concrete_p () const = 0;
213 bool symbolic_p () const { return !concrete_p (); }
215 static const binding_key *make (store_manager *mgr, const region *r);
217 virtual void dump_to_pp (pretty_printer *pp, bool simple) const = 0;
218 void dump (bool simple) const;
219 label_text get_desc (bool simple=true) const;
221 static int cmp_ptrs (const void *, const void *);
222 static int cmp (const binding_key *, const binding_key *);
224 virtual const concrete_binding *dyn_cast_concrete_binding () const
225 { return NULL; }
226 virtual const symbolic_binding *dyn_cast_symbolic_binding () const
227 { return NULL; }
230 /* A concrete range of bits. */
232 struct bit_range
234 bit_range (bit_offset_t start_bit_offset, bit_size_t size_in_bits)
235 : m_start_bit_offset (start_bit_offset),
236 m_size_in_bits (size_in_bits)
239 void dump_to_pp (pretty_printer *pp) const;
240 void dump () const;
242 json::object *to_json () const;
244 bool empty_p () const
246 return m_size_in_bits == 0;
249 bit_offset_t get_start_bit_offset () const
251 return m_start_bit_offset;
253 bit_offset_t get_next_bit_offset () const
255 return m_start_bit_offset + m_size_in_bits;
257 bit_offset_t get_last_bit_offset () const
259 gcc_assert (!empty_p ());
260 return get_next_bit_offset () - 1;
263 bool contains_p (bit_offset_t offset) const
265 return (offset >= get_start_bit_offset ()
266 && offset < get_next_bit_offset ());
269 bool contains_p (const bit_range &other, bit_range *out) const;
271 bool operator== (const bit_range &other) const
273 return (m_start_bit_offset == other.m_start_bit_offset
274 && m_size_in_bits == other.m_size_in_bits);
277 bool intersects_p (const bit_range &other) const
279 return (get_start_bit_offset () < other.get_next_bit_offset ()
280 && other.get_start_bit_offset () < get_next_bit_offset ());
282 bool intersects_p (const bit_range &other,
283 bit_size_t *out_num_overlap_bits) const;
284 bool intersects_p (const bit_range &other,
285 bit_range *out_this,
286 bit_range *out_other) const;
288 bool exceeds_p (const bit_range &other,
289 bit_range *out_overhanging_bit_range) const;
291 bool falls_short_of_p (bit_offset_t offset,
292 bit_range *out_fall_short_bits) const;
294 static int cmp (const bit_range &br1, const bit_range &br2);
296 bit_range operator- (bit_offset_t offset) const;
298 static bool from_mask (unsigned HOST_WIDE_INT mask, bit_range *out);
300 bool as_byte_range (byte_range *out) const;
302 bit_offset_t m_start_bit_offset;
303 bit_size_t m_size_in_bits;
306 /* A concrete range of bytes. */
308 struct byte_range
310 byte_range (byte_offset_t start_byte_offset, byte_size_t size_in_bytes)
311 : m_start_byte_offset (start_byte_offset),
312 m_size_in_bytes (size_in_bytes)
315 void dump_to_pp (pretty_printer *pp) const;
316 void dump () const;
318 json::object *to_json () const;
320 bool empty_p () const
322 return m_size_in_bytes == 0;
325 bool contains_p (byte_offset_t offset) const
327 return (offset >= get_start_byte_offset ()
328 && offset < get_next_byte_offset ());
330 bool contains_p (const byte_range &other, byte_range *out) const;
332 bool operator== (const byte_range &other) const
334 return (m_start_byte_offset == other.m_start_byte_offset
335 && m_size_in_bytes == other.m_size_in_bytes);
338 byte_offset_t get_start_byte_offset () const
340 return m_start_byte_offset;
342 byte_offset_t get_next_byte_offset () const
344 return m_start_byte_offset + m_size_in_bytes;
346 byte_offset_t get_last_byte_offset () const
348 gcc_assert (!empty_p ());
349 return m_start_byte_offset + m_size_in_bytes - 1;
352 bit_range as_bit_range () const
354 return bit_range (m_start_byte_offset * BITS_PER_UNIT,
355 m_size_in_bytes * BITS_PER_UNIT);
358 bit_offset_t get_start_bit_offset () const
360 return m_start_byte_offset * BITS_PER_UNIT;
362 bit_offset_t get_next_bit_offset () const
364 return get_next_byte_offset () * BITS_PER_UNIT;
367 static int cmp (const byte_range &br1, const byte_range &br2);
369 byte_offset_t m_start_byte_offset;
370 byte_size_t m_size_in_bytes;
373 /* Concrete subclass of binding_key, for describing a non-empty
374 concrete range of bits within the binding_map (e.g. "bits 8-15"). */
376 class concrete_binding : public binding_key
378 public:
379 /* This class is its own key for the purposes of consolidation. */
380 typedef concrete_binding key_t;
382 concrete_binding (bit_offset_t start_bit_offset, bit_size_t size_in_bits)
383 : m_bit_range (start_bit_offset, size_in_bits)
385 gcc_assert (m_bit_range.m_size_in_bits > 0);
387 bool concrete_p () const final override { return true; }
389 hashval_t hash () const
391 inchash::hash hstate;
392 hstate.add_wide_int (m_bit_range.m_start_bit_offset);
393 hstate.add_wide_int (m_bit_range.m_size_in_bits);
394 return hstate.end ();
396 bool operator== (const concrete_binding &other) const
398 return m_bit_range == other.m_bit_range;
401 void dump_to_pp (pretty_printer *pp, bool simple) const final override;
403 const concrete_binding *dyn_cast_concrete_binding () const final override
404 { return this; }
406 const bit_range &get_bit_range () const { return m_bit_range; }
407 bool get_byte_range (byte_range *out) const;
409 bit_offset_t get_start_bit_offset () const
411 return m_bit_range.m_start_bit_offset;
413 bit_size_t get_size_in_bits () const
415 return m_bit_range.m_size_in_bits;
417 /* Return the next bit offset after the end of this binding. */
418 bit_offset_t get_next_bit_offset () const
420 return m_bit_range.get_next_bit_offset ();
423 bool overlaps_p (const concrete_binding &other) const;
425 static int cmp_ptr_ptr (const void *, const void *);
427 void mark_deleted () { m_bit_range.m_size_in_bits = -1; }
428 void mark_empty () { m_bit_range.m_size_in_bits = -2; }
429 bool is_deleted () const { return m_bit_range.m_size_in_bits == -1; }
430 bool is_empty () const { return m_bit_range.m_size_in_bits == -2; }
432 private:
433 bit_range m_bit_range;
436 } // namespace ana
438 template <>
439 template <>
440 inline bool
441 is_a_helper <const ana::concrete_binding *>::test (const ana::binding_key *key)
443 return key->concrete_p ();
446 template <> struct default_hash_traits<ana::concrete_binding>
447 : public member_function_hash_traits<ana::concrete_binding>
449 static const bool empty_zero_p = false;
452 namespace ana {
454 /* Concrete subclass of binding_key, for describing a symbolic set of
455 bits within the binding_map in terms of a region (e.g. "arr[i]"). */
457 class symbolic_binding : public binding_key
459 public:
460 /* This class is its own key for the purposes of consolidation. */
461 typedef symbolic_binding key_t;
463 symbolic_binding (const region *region) : m_region (region) {}
464 bool concrete_p () const final override { return false; }
466 hashval_t hash () const
468 return (intptr_t)m_region;
470 bool operator== (const symbolic_binding &other) const
472 return m_region == other.m_region;
475 void dump_to_pp (pretty_printer *pp, bool simple) const final override;
477 const symbolic_binding *dyn_cast_symbolic_binding () const final override
478 { return this; }
480 const region *get_region () const { return m_region; }
482 static int cmp_ptr_ptr (const void *, const void *);
484 void mark_deleted () { m_region = reinterpret_cast<const region *> (1); }
485 void mark_empty () { m_region = NULL; }
486 bool is_deleted () const
487 { return m_region == reinterpret_cast<const region *> (1); }
488 bool is_empty () const { return m_region == NULL; }
490 private:
491 const region *m_region;
494 } // namespace ana
496 template <> struct default_hash_traits<ana::symbolic_binding>
497 : public member_function_hash_traits<ana::symbolic_binding>
499 static const bool empty_zero_p = true;
502 namespace ana {
504 /* A mapping from binding_keys to svalues, for use by binding_cluster
505 and compound_svalue. */
507 class binding_map
509 public:
510 typedef hash_map <const binding_key *, const svalue *> map_t;
511 typedef map_t::iterator iterator_t;
513 binding_map () : m_map () {}
514 binding_map (const binding_map &other);
515 binding_map& operator=(const binding_map &other);
517 bool operator== (const binding_map &other) const;
518 bool operator!= (const binding_map &other) const
520 return !(*this == other);
523 hashval_t hash () const;
525 const svalue *get (const binding_key *key) const
527 const svalue **slot = const_cast<map_t &> (m_map).get (key);
528 if (slot)
529 return *slot;
530 else
531 return NULL;
533 bool put (const binding_key *k, const svalue *v)
535 gcc_assert (v);
536 return m_map.put (k, v);
539 void remove (const binding_key *k) { m_map.remove (k); }
540 void empty () { m_map.empty (); }
542 iterator_t begin () const { return m_map.begin (); }
543 iterator_t end () const { return m_map.end (); }
544 size_t elements () const { return m_map.elements (); }
546 void dump_to_pp (pretty_printer *pp, bool simple, bool multiline) const;
547 void dump (bool simple) const;
549 json::object *to_json () const;
551 void add_to_tree_widget (text_art::tree_widget &parent_widget,
552 const text_art::dump_widget_info &dwi) const;
554 bool apply_ctor_to_region (const region *parent_reg, tree ctor,
555 region_model_manager *mgr);
557 static int cmp (const binding_map &map1, const binding_map &map2);
559 void remove_overlapping_bindings (store_manager *mgr,
560 const binding_key *drop_key,
561 uncertainty_t *uncertainty,
562 svalue_set *maybe_live_values,
563 bool always_overlap);
565 private:
566 void get_overlapping_bindings (const binding_key *key,
567 auto_vec<const binding_key *> *out);
568 bool apply_ctor_val_to_range (const region *parent_reg,
569 region_model_manager *mgr,
570 tree min_index, tree max_index,
571 tree val);
572 bool apply_ctor_pair_to_child_region (const region *parent_reg,
573 region_model_manager *mgr,
574 tree index, tree val);
576 map_t m_map;
579 /* Concept: BindingVisitor, for use by binding_cluster::for_each_binding
580 and store::for_each_binding.
582 Should implement:
583 void on_binding (const binding_key *key, const svalue *&sval);
586 /* All of the bindings within a store for regions that share the same
587 base region. */
589 class binding_cluster
591 public:
592 friend class store;
594 typedef hash_map <const binding_key *, const svalue *> map_t;
595 typedef map_t::iterator iterator_t;
597 binding_cluster (const region *base_region);
598 binding_cluster (const binding_cluster &other);
599 binding_cluster& operator=(const binding_cluster &other);
601 bool operator== (const binding_cluster &other) const;
602 bool operator!= (const binding_cluster &other) const
604 return !(*this == other);
607 hashval_t hash () const;
609 bool symbolic_p () const;
611 const region *get_base_region () const { return m_base_region; }
613 void dump_to_pp (pretty_printer *pp, bool simple, bool multiline) const;
614 void dump (bool simple) const;
616 void validate () const;
618 json::object *to_json () const;
620 std::unique_ptr<text_art::tree_widget>
621 make_dump_widget (const text_art::dump_widget_info &dwi,
622 store_manager *mgr) const;
624 void bind (store_manager *mgr, const region *, const svalue *);
626 void clobber_region (store_manager *mgr, const region *reg);
627 void purge_region (store_manager *mgr, const region *reg);
628 void fill_region (store_manager *mgr, const region *reg, const svalue *sval);
629 void zero_fill_region (store_manager *mgr, const region *reg);
630 void mark_region_as_unknown (store_manager *mgr,
631 const region *reg_to_bind,
632 const region *reg_for_overlap,
633 uncertainty_t *uncertainty,
634 svalue_set *maybe_live_values);
635 void purge_state_involving (const svalue *sval,
636 region_model_manager *sval_mgr);
638 const svalue *get_binding (store_manager *mgr, const region *reg) const;
639 const svalue *get_binding_recursive (store_manager *mgr,
640 const region *reg) const;
641 const svalue *get_any_binding (store_manager *mgr,
642 const region *reg) const;
643 const svalue *maybe_get_compound_binding (store_manager *mgr,
644 const region *reg) const;
646 void remove_overlapping_bindings (store_manager *mgr, const region *reg,
647 uncertainty_t *uncertainty,
648 svalue_set *maybe_live_values);
650 template <typename T>
651 void for_each_value (void (*cb) (const svalue *sval, T user_data),
652 T user_data) const
654 for (map_t::iterator iter = m_map.begin (); iter != m_map.end (); ++iter)
655 cb ((*iter).second, user_data);
658 static bool can_merge_p (const binding_cluster *cluster_a,
659 const binding_cluster *cluster_b,
660 binding_cluster *out_cluster,
661 store *out_store,
662 store_manager *mgr,
663 model_merger *merger);
664 void make_unknown_relative_to (const binding_cluster *other_cluster,
665 store *out_store,
666 store_manager *mgr);
668 void mark_as_escaped ();
669 void on_unknown_fncall (const gcall *call, store_manager *mgr,
670 const conjured_purge &p);
671 void on_asm (const gasm *stmt, store_manager *mgr,
672 const conjured_purge &p);
674 bool escaped_p () const;
675 bool touched_p () const { return m_touched; }
677 bool redundant_p () const;
678 bool empty_p () const { return m_map.elements () == 0; }
680 void get_representative_path_vars (const region_model *model,
681 svalue_set *visited,
682 const region *base_reg,
683 const svalue *sval,
684 logger *logger,
685 auto_vec<path_var> *out_pvs) const;
687 const svalue *maybe_get_simple_value (store_manager *mgr) const;
689 template <typename BindingVisitor>
690 void for_each_binding (BindingVisitor &v) const
692 for (map_t::iterator iter = m_map.begin (); iter != m_map.end (); ++iter)
694 const binding_key *key = (*iter).first;
695 const svalue *&sval = (*iter).second;
696 v.on_binding (key, sval);
700 iterator_t begin () const { return m_map.begin (); }
701 iterator_t end () const { return m_map.end (); }
703 const binding_map &get_map () const { return m_map; }
705 private:
706 const svalue *get_any_value (const binding_key *key) const;
707 void bind_compound_sval (store_manager *mgr,
708 const region *reg,
709 const compound_svalue *compound_sval);
710 void bind_key (const binding_key *key, const svalue *sval);
712 const region *m_base_region;
714 binding_map m_map;
716 /* Has a pointer to this cluster "escaped" into a part of the program
717 we don't know about (via a call to a function with an unknown body,
718 or by being passed in as a pointer param of a "top-level" function call).
719 Such regions could be overwritten when other such functions are called,
720 even if the region is no longer reachable by pointers that we are
721 tracking. */
722 bool m_escaped;
724 /* Has this cluster been written to via a symbolic binding?
725 If so, then we don't know anything about unbound subregions,
726 so we can't use initial_svalue, treat them as uninitialized, or
727 inherit values from a parent region. */
728 bool m_touched;
731 /* The mapping from regions to svalues.
732 This is actually expressed by subdividing into clusters, to better
733 handle aliasing. */
735 class store
737 public:
738 typedef hash_map <const region *, binding_cluster *> cluster_map_t;
740 store ();
741 store (const store &other);
742 ~store ();
744 store &operator= (const store &other);
746 bool operator== (const store &other) const;
747 bool operator!= (const store &other) const
749 return !(*this == other);
752 hashval_t hash () const;
754 void dump_to_pp (pretty_printer *pp, bool summarize, bool multiline,
755 store_manager *mgr) const;
756 void dump (bool simple) const;
757 void summarize_to_pp (pretty_printer *pp, bool simple) const;
759 void validate () const;
761 json::object *to_json () const;
763 std::unique_ptr<text_art::tree_widget>
764 make_dump_widget (const text_art::dump_widget_info &dwi,
765 store_manager *mgr) const;
767 const svalue *get_any_binding (store_manager *mgr, const region *reg) const;
769 bool called_unknown_fn_p () const { return m_called_unknown_fn; }
771 void set_value (store_manager *mgr, const region *lhs_reg,
772 const svalue *rhs_sval,
773 uncertainty_t *uncertainty);
774 void clobber_region (store_manager *mgr, const region *reg);
775 void purge_region (store_manager *mgr, const region *reg);
776 void fill_region (store_manager *mgr, const region *reg, const svalue *sval);
777 void zero_fill_region (store_manager *mgr, const region *reg);
778 void mark_region_as_unknown (store_manager *mgr, const region *reg,
779 uncertainty_t *uncertainty,
780 svalue_set *maybe_live_values);
781 void purge_state_involving (const svalue *sval,
782 region_model_manager *sval_mgr);
784 const binding_cluster *get_cluster (const region *base_reg) const;
785 binding_cluster *get_cluster (const region *base_reg);
786 binding_cluster *get_or_create_cluster (const region *base_reg);
787 void purge_cluster (const region *base_reg);
789 template <typename T>
790 void for_each_cluster (void (*cb) (const region *base_reg, T user_data),
791 T user_data) const
793 for (cluster_map_t::iterator iter = m_cluster_map.begin ();
794 iter != m_cluster_map.end (); ++iter)
795 cb ((*iter).first, user_data);
798 static bool can_merge_p (const store *store_a, const store *store_b,
799 store *out_store, store_manager *mgr,
800 model_merger *merger);
802 void mark_as_escaped (const region *base_reg);
803 void on_unknown_fncall (const gcall *call, store_manager *mgr,
804 const conjured_purge &p);
805 bool escaped_p (const region *reg) const;
807 void get_representative_path_vars (const region_model *model,
808 svalue_set *visited,
809 const svalue *sval,
810 logger *logger,
811 auto_vec<path_var> *out_pvs) const;
813 cluster_map_t::iterator begin () const { return m_cluster_map.begin (); }
814 cluster_map_t::iterator end () const { return m_cluster_map.end (); }
816 tristate eval_alias (const region *base_reg_a,
817 const region *base_reg_b) const;
819 template <typename BindingVisitor>
820 void for_each_binding (BindingVisitor &v)
822 for (cluster_map_t::iterator iter = m_cluster_map.begin ();
823 iter != m_cluster_map.end (); ++iter)
824 (*iter).second->for_each_binding (v);
827 void canonicalize (store_manager *mgr);
828 void loop_replay_fixup (const store *other_store,
829 region_model_manager *mgr);
831 void replay_call_summary (call_summary_replay &r,
832 const store &summary);
833 void replay_call_summary_cluster (call_summary_replay &r,
834 const store &summary,
835 const region *base_reg);
836 void on_maybe_live_values (const svalue_set &maybe_live_values);
838 private:
839 void remove_overlapping_bindings (store_manager *mgr, const region *reg,
840 uncertainty_t *uncertainty);
841 tristate eval_alias_1 (const region *base_reg_a,
842 const region *base_reg_b) const;
844 cluster_map_t m_cluster_map;
846 /* If this is true, then unknown code has been called, and so
847 any global variable that isn't currently modelled by the store
848 has unknown state, rather than being in an "initial state".
849 This is to avoid having to mark (and thus explicitly track)
850 every global when an unknown function is called; instead, they
851 can be tracked implicitly. */
852 bool m_called_unknown_fn;
855 /* A class responsible for owning and consolidating binding keys
856 (both concrete and symbolic).
857 Key instances are immutable as far as clients are concerned, so they
858 are provided as "const" ptrs. */
860 class store_manager
862 public:
863 store_manager (region_model_manager *mgr) : m_mgr (mgr) {}
865 logger *get_logger () const;
867 /* binding consolidation. */
868 const concrete_binding *
869 get_concrete_binding (bit_offset_t start_bit_offset,
870 bit_offset_t size_in_bits);
871 const concrete_binding *
872 get_concrete_binding (const bit_range &bits)
874 return get_concrete_binding (bits.get_start_bit_offset (),
875 bits.m_size_in_bits);
877 const concrete_binding *
878 get_concrete_binding (const byte_range &bytes)
880 bit_range bits = bytes.as_bit_range ();
881 return get_concrete_binding (bits);
883 const symbolic_binding *
884 get_symbolic_binding (const region *region);
886 region_model_manager *get_svalue_manager () const
888 return m_mgr;
891 void log_stats (logger *logger, bool show_objs) const;
893 private:
894 region_model_manager *m_mgr;
895 consolidation_map<concrete_binding> m_concrete_binding_key_mgr;
896 consolidation_map<symbolic_binding> m_symbolic_binding_key_mgr;
899 } // namespace ana
901 #endif /* GCC_ANALYZER_STORE_H */