1 /* A state machine for detecting misuses of the malloc/free API.
2 Copyright (C) 2019-2024 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #define INCLUDE_MEMORY
23 #define INCLUDE_VECTOR
25 #include "coretypes.h"
26 #include "make-unique.h"
29 #include "basic-block.h"
33 #include "diagnostic-core.h"
34 #include "diagnostic-path.h"
35 #include "analyzer/analyzer.h"
36 #include "diagnostic-event-id.h"
37 #include "analyzer/analyzer-logging.h"
38 #include "analyzer/sm.h"
39 #include "analyzer/pending-diagnostic.h"
40 #include "analyzer/call-string.h"
41 #include "analyzer/program-point.h"
42 #include "analyzer/store.h"
43 #include "analyzer/region-model.h"
44 #include "analyzer/call-details.h"
45 #include "stringpool.h"
47 #include "analyzer/function-set.h"
48 #include "analyzer/program-state.h"
49 #include "analyzer/checker-event.h"
50 #include "analyzer/exploded-graph.h"
51 #include "analyzer/inlining-iterator.h"
59 /* This state machine and its various support classes track allocations
62 It has a few standard allocation/deallocation pairs (e.g. new/delete),
63 and also supports user-defined ones via
64 __attribute__ ((malloc(DEALLOCATOR))).
66 There can be more than one valid deallocator for a given allocator,
68 __attribute__ ((malloc (fclose)))
69 __attribute__ ((malloc (freopen, 3)))
70 FILE* fopen (const char*, const char*);
71 A deallocator_set represents a particular set of valid deallocators.
73 We track the expected deallocator_set for a value, but not the allocation
74 function - there could be more than one allocator per deallocator_set.
75 For example, there could be dozens of allocators for "free" beyond just
76 malloc e.g. calloc, xstrdup, etc. We don't want to explode the number
77 of states by tracking individual allocators in the exploded graph;
78 we merely want to track "this value expects to have 'free' called on it".
79 Perhaps we can reconstruct which allocator was used later, when emitting
80 the path, if it's necessary for precision of wording of diagnostics. */
83 class deallocator_set
;
84 class malloc_state_machine
;
86 /* An enum for discriminating between different kinds of allocation_state. */
90 /* States that are independent of allocator/deallocator. */
92 /* The start state. */
95 /* State for a pointer that's been unconditionally dereferenced. */
98 /* State for a pointer that's known to be NULL. */
101 /* State for a pointer that's known to not be on the heap (e.g. to a local
105 /* Stop state, for pointers we don't want to track any more. */
108 /* States that relate to a specific deallocator_set. */
110 /* State for a pointer returned from an allocator that hasn't
111 been checked for NULL.
112 It could be a pointer to heap-allocated memory, or could be NULL. */
115 /* State for a pointer returned from an allocator,
116 known to be non-NULL. */
119 /* State for a pointer passed to a deallocator. */
123 /* Custom state subclass, which can optionally refer to an a
126 struct allocation_state
: public state_machine::state
128 allocation_state (const char *name
, unsigned id
,
129 enum resource_state rs
,
130 const deallocator_set
*deallocators
,
131 const deallocator
*deallocator
)
132 : state (name
, id
), m_rs (rs
),
133 m_deallocators (deallocators
),
134 m_deallocator (deallocator
)
137 void dump_to_pp (pretty_printer
*pp
) const override
;
139 const allocation_state
*get_nonnull () const;
141 enum resource_state m_rs
;
142 const deallocator_set
*m_deallocators
;
143 const deallocator
*m_deallocator
;
146 /* Custom state subclass, for the "assumed-non-null" state
147 where the assumption happens in a particular frame. */
149 struct assumed_non_null_state
: public allocation_state
151 assumed_non_null_state (const char *name
, unsigned id
,
152 const frame_region
*frame
)
153 : allocation_state (name
, id
, RS_ASSUMED_NON_NULL
,
157 gcc_assert (m_frame
);
160 void dump_to_pp (pretty_printer
*pp
) const final override
;
162 const frame_region
*m_frame
;
165 /* An enum for choosing which wording to use in various diagnostics
166 when describing deallocations. */
176 /* Base class representing a deallocation function,
177 either a built-in one we know about, or one exposed via
178 __attribute__((malloc(DEALLOCATOR))). */
182 hashval_t
hash () const;
183 void dump_to_pp (pretty_printer
*pp
) const;
184 static int cmp (const deallocator
*a
, const deallocator
*b
);
185 static int cmp_ptr_ptr (const void *, const void *);
187 /* Name to use in diagnostics. */
190 /* Which wording to use in diagnostics. */
191 enum wording m_wording
;
193 /* State for a value passed to one of the deallocators. */
194 state_machine::state_t m_freed
;
197 deallocator (malloc_state_machine
*sm
,
199 enum wording wording
);
202 /* Subclass representing a predefined deallocator.
203 e.g. "delete []", without needing a specific FUNCTION_DECL
206 struct standard_deallocator
: public deallocator
208 standard_deallocator (malloc_state_machine
*sm
,
210 enum wording wording
);
213 /* Subclass representing a user-defined deallocator
214 via __attribute__((malloc(DEALLOCATOR))) given
215 a specific FUNCTION_DECL. */
217 struct custom_deallocator
: public deallocator
219 custom_deallocator (malloc_state_machine
*sm
,
220 tree deallocator_fndecl
,
221 enum wording wording
)
222 : deallocator (sm
, IDENTIFIER_POINTER (DECL_NAME (deallocator_fndecl
)),
228 /* Base class representing a set of possible deallocators.
229 Often this will be just a single deallocator, but some
230 allocators have multiple valid deallocators (e.g. the result of
231 "fopen" can be closed by either "fclose" or "freopen"). */
233 struct deallocator_set
235 deallocator_set (malloc_state_machine
*sm
,
236 enum wording wording
);
237 virtual ~deallocator_set () {}
239 virtual bool contains_p (const deallocator
*d
) const = 0;
240 virtual const deallocator
*maybe_get_single () const = 0;
241 virtual void dump_to_pp (pretty_printer
*pp
) const = 0;
244 /* Which wording to use in diagnostics. */
245 enum wording m_wording
;
247 /* Pointers to states.
248 These states are owned by the state_machine base class. */
250 /* State for an unchecked result from an allocator using this set. */
251 state_machine::state_t m_unchecked
;
253 /* State for a known non-NULL result from such an allocator. */
254 state_machine::state_t m_nonnull
;
257 /* Subclass of deallocator_set representing a set of deallocators
258 defined by one or more __attribute__((malloc(DEALLOCATOR))). */
260 struct custom_deallocator_set
: public deallocator_set
262 typedef const auto_vec
<const deallocator
*> *key_t
;
264 custom_deallocator_set (malloc_state_machine
*sm
,
265 const auto_vec
<const deallocator
*> *vec
,
267 //const char *dealloc_funcname,
269 enum wording wording
);
271 bool contains_p (const deallocator
*d
) const final override
;
272 const deallocator
*maybe_get_single () const final override
;
273 void dump_to_pp (pretty_printer
*pp
) const final override
;
275 auto_vec
<const deallocator
*> m_deallocator_vec
;
278 /* Subclass of deallocator_set representing a set of deallocators
279 with a single standard_deallocator, e.g. "delete []". */
281 struct standard_deallocator_set
: public deallocator_set
283 standard_deallocator_set (malloc_state_machine
*sm
,
285 enum wording wording
);
287 bool contains_p (const deallocator
*d
) const final override
;
288 const deallocator
*maybe_get_single () const final override
;
289 void dump_to_pp (pretty_printer
*pp
) const final override
;
291 standard_deallocator m_deallocator
;
294 /* Traits class for ensuring uniqueness of deallocator_sets within
295 malloc_state_machine. */
297 struct deallocator_set_map_traits
299 typedef custom_deallocator_set::key_t key_type
;
300 typedef custom_deallocator_set
*value_type
;
301 typedef custom_deallocator_set
*compare_type
;
303 static inline hashval_t
hash (const key_type
&k
)
305 gcc_assert (k
!= NULL
);
306 gcc_assert (k
!= reinterpret_cast<key_type
> (1));
308 hashval_t result
= 0;
310 const deallocator
*d
;
311 FOR_EACH_VEC_ELT (*k
, i
, d
)
312 result
^= d
->hash ();
315 static inline bool equal_keys (const key_type
&k1
, const key_type
&k2
)
317 if (k1
->length () != k2
->length ())
320 for (unsigned i
= 0; i
< k1
->length (); i
++)
321 if ((*k1
)[i
] != (*k2
)[i
])
326 template <typename T
>
327 static inline void remove (T
&)
329 /* empty; the nodes are handled elsewhere. */
331 template <typename T
>
332 static inline void mark_deleted (T
&entry
)
334 entry
.m_key
= reinterpret_cast<key_type
> (1);
336 template <typename T
>
337 static inline void mark_empty (T
&entry
)
341 template <typename T
>
342 static inline bool is_deleted (const T
&entry
)
344 return entry
.m_key
== reinterpret_cast<key_type
> (1);
346 template <typename T
>
347 static inline bool is_empty (const T
&entry
)
349 return entry
.m_key
== NULL
;
351 static const bool empty_zero_p
= false;
354 /* A state machine for detecting misuses of the malloc/free API.
356 See sm-malloc.dot for an overview (keep this in-sync with that file). */
358 class malloc_state_machine
: public state_machine
361 typedef allocation_state custom_data_t
;
363 malloc_state_machine (logger
*logger
);
364 ~malloc_state_machine ();
367 add_state (const char *name
, enum resource_state rs
,
368 const deallocator_set
*deallocators
,
369 const deallocator
*deallocator
);
371 bool inherited_state_p () const final override
{ return false; }
373 state_machine::state_t
374 get_default_state (const svalue
*sval
) const final override
376 if (tree cst
= sval
->maybe_get_constant ())
381 if (const region_svalue
*ptr
= sval
->dyn_cast_region_svalue ())
383 const region
*reg
= ptr
->get_pointee ();
384 switch (reg
->get_memory_space ())
389 case MEMSPACE_GLOBALS
:
391 case MEMSPACE_READONLY_DATA
:
398 bool on_stmt (sm_context
&sm_ctxt
,
399 const supernode
*node
,
400 const gimple
*stmt
) const final override
;
402 void on_phi (sm_context
&sm_ctxt
,
403 const supernode
*node
,
405 tree rhs
) const final override
;
407 void on_condition (sm_context
&sm_ctxt
,
408 const supernode
*node
,
412 const svalue
*rhs
) const final override
;
414 void on_pop_frame (sm_state_map
*smap
,
415 const frame_region
*) const final override
;
417 bool can_purge_p (state_t s
) const final override
;
418 std::unique_ptr
<pending_diagnostic
> on_leak (tree var
) const final override
;
420 bool reset_when_passed_to_unknown_fn_p (state_t s
,
421 bool is_mutable
) const final override
;
424 maybe_get_merged_states_nonequal (state_t state_a
,
425 state_t state_b
) const final override
;
427 static bool unaffected_by_call_p (tree fndecl
);
429 void maybe_assume_non_null (sm_context
&sm_ctxt
,
431 const gimple
*stmt
) const;
433 void on_realloc_with_move (region_model
*model
,
435 const svalue
*old_ptr_sval
,
436 const svalue
*new_ptr_sval
,
437 const extrinsic_state
&ext_state
) const;
439 void transition_ptr_sval_non_null (region_model
*model
,
441 const svalue
*new_ptr_sval
,
442 const extrinsic_state
&ext_state
) const;
444 standard_deallocator_set m_free
;
445 standard_deallocator_set m_scalar_delete
;
446 standard_deallocator_set m_vector_delete
;
448 standard_deallocator m_realloc
;
450 /* States that are independent of api. */
452 /* States for a pointer that's been unconditionally dereferenced
453 in a particular stack frame. */
454 hash_map
<const frame_region
*, state_t
> m_assumed_non_null
;
456 /* State for a pointer that's known to be NULL. */
459 /* State for a pointer that's known to not be on the heap (e.g. to a local
461 state_t m_non_heap
; // TODO: or should this be a different state machine?
462 // or do we need child values etc?
464 /* Stop state, for pointers we don't want to track any more. */
468 const custom_deallocator_set
*
469 get_or_create_custom_deallocator_set (tree allocator_fndecl
);
470 custom_deallocator_set
*
471 maybe_create_custom_deallocator_set (tree allocator_fndecl
);
473 get_or_create_deallocator (tree deallocator_fndecl
);
476 get_or_create_assumed_non_null_state_for_frame (const frame_region
*frame
);
479 maybe_complain_about_deref_before_check (sm_context
&sm_ctxt
,
480 const supernode
*node
,
482 const assumed_non_null_state
*,
485 void on_allocator_call (sm_context
&sm_ctxt
,
487 const deallocator_set
*deallocators
,
488 bool returns_nonnull
= false) const;
489 void handle_free_of_non_heap (sm_context
&sm_ctxt
,
490 const supernode
*node
,
493 const deallocator
*d
) const;
494 void on_deallocator_call (sm_context
&sm_ctxt
,
495 const supernode
*node
,
497 const deallocator
*d
,
498 unsigned argno
) const;
499 void on_realloc_call (sm_context
&sm_ctxt
,
500 const supernode
*node
,
501 const gcall
*call
) const;
502 void on_zero_assignment (sm_context
&sm_ctxt
,
506 /* A map for consolidating deallocators so that they are
507 unique per deallocator FUNCTION_DECL. */
508 typedef hash_map
<tree
, deallocator
*> deallocator_map_t
;
509 deallocator_map_t m_deallocator_map
;
511 /* Memoized lookups from FUNCTION_DECL to custom_deallocator_set *. */
512 typedef hash_map
<tree
, custom_deallocator_set
*> deallocator_set_cache_t
;
513 deallocator_set_cache_t m_custom_deallocator_set_cache
;
515 /* A map for consolidating custom_deallocator_set instances. */
516 typedef hash_map
<custom_deallocator_set::key_t
,
517 custom_deallocator_set
*,
518 deallocator_set_map_traits
> custom_deallocator_set_map_t
;
519 custom_deallocator_set_map_t m_custom_deallocator_set_map
;
521 /* Record of dynamically-allocated objects, for cleanup. */
522 auto_vec
<custom_deallocator_set
*> m_dynamic_sets
;
523 auto_vec
<custom_deallocator
*> m_dynamic_deallocators
;
526 /* struct deallocator. */
528 deallocator::deallocator (malloc_state_machine
*sm
,
530 enum wording wording
)
533 m_freed (sm
->add_state ("freed", RS_FREED
, NULL
, this))
538 deallocator::hash () const
540 return (hashval_t
)m_freed
->get_id ();
544 deallocator::dump_to_pp (pretty_printer
*pp
) const
546 pp_printf (pp
, "%qs", m_name
);
550 deallocator::cmp (const deallocator
*a
, const deallocator
*b
)
552 return (int)a
->m_freed
->get_id () - (int)b
->m_freed
->get_id ();
556 deallocator::cmp_ptr_ptr (const void *a
, const void *b
)
558 return cmp (*(const deallocator
* const *)a
,
559 *(const deallocator
* const *)b
);
563 /* struct standard_deallocator : public deallocator. */
565 standard_deallocator::standard_deallocator (malloc_state_machine
*sm
,
567 enum wording wording
)
568 : deallocator (sm
, name
, wording
)
572 /* struct deallocator_set. */
574 deallocator_set::deallocator_set (malloc_state_machine
*sm
,
575 enum wording wording
)
576 : m_wording (wording
),
577 m_unchecked (sm
->add_state ("unchecked", RS_UNCHECKED
, this, NULL
)),
578 m_nonnull (sm
->add_state ("nonnull", RS_NONNULL
, this, NULL
))
582 /* Dump a description of this deallocator_set to stderr. */
585 deallocator_set::dump () const
587 tree_dump_pretty_printer
pp (stderr
);
592 /* struct custom_deallocator_set : public deallocator_set. */
594 custom_deallocator_set::
595 custom_deallocator_set (malloc_state_machine
*sm
,
596 const auto_vec
<const deallocator
*> *vec
,
597 enum wording wording
)
598 : deallocator_set (sm
, wording
),
599 m_deallocator_vec (vec
->length ())
602 const deallocator
*d
;
603 FOR_EACH_VEC_ELT (*vec
, i
, d
)
604 m_deallocator_vec
.safe_push (d
);
608 custom_deallocator_set::contains_p (const deallocator
*d
) const
611 const deallocator
*cd
;
612 FOR_EACH_VEC_ELT (m_deallocator_vec
, i
, cd
)
619 custom_deallocator_set::maybe_get_single () const
621 if (m_deallocator_vec
.length () == 1)
622 return m_deallocator_vec
[0];
627 custom_deallocator_set::dump_to_pp (pretty_printer
*pp
) const
629 pp_character (pp
, '{');
631 const deallocator
*d
;
632 FOR_EACH_VEC_ELT (m_deallocator_vec
, i
, d
)
635 pp_string (pp
, ", ");
638 pp_character (pp
, '}');
641 /* struct standard_deallocator_set : public deallocator_set. */
643 standard_deallocator_set::standard_deallocator_set (malloc_state_machine
*sm
,
645 enum wording wording
)
646 : deallocator_set (sm
, wording
),
647 m_deallocator (sm
, name
, wording
)
652 standard_deallocator_set::contains_p (const deallocator
*d
) const
654 return d
== &m_deallocator
;
658 standard_deallocator_set::maybe_get_single () const
660 return &m_deallocator
;
664 standard_deallocator_set::dump_to_pp (pretty_printer
*pp
) const
666 pp_character (pp
, '{');
667 pp_string (pp
, m_deallocator
.m_name
);
668 pp_character (pp
, '}');
671 /* Return STATE cast to the custom state subclass, or NULL for the start state.
672 Everything should be an allocation_state apart from the start state. */
674 static const allocation_state
*
675 dyn_cast_allocation_state (state_machine::state_t state
)
677 if (state
->get_id () == 0)
679 return static_cast <const allocation_state
*> (state
);
682 /* Return STATE cast to the custom state subclass, for a state that is
683 already known to not be the start state . */
685 static const allocation_state
*
686 as_a_allocation_state (state_machine::state_t state
)
688 gcc_assert (state
->get_id () != 0);
689 return static_cast <const allocation_state
*> (state
);
692 /* Get the resource_state for STATE. */
694 static enum resource_state
695 get_rs (state_machine::state_t state
)
697 if (const allocation_state
*astate
= dyn_cast_allocation_state (state
))
703 /* Return true if STATE is the start state. */
706 start_p (state_machine::state_t state
)
708 return get_rs (state
) == RS_START
;
711 /* Return true if STATE is an unchecked result from an allocator. */
714 unchecked_p (state_machine::state_t state
)
716 return get_rs (state
) == RS_UNCHECKED
;
719 /* Return true if STATE is a non-null result from an allocator. */
722 nonnull_p (state_machine::state_t state
)
724 return get_rs (state
) == RS_NONNULL
;
727 /* Return true if STATE is a value that has been passed to a deallocator. */
730 freed_p (state_machine::state_t state
)
732 return get_rs (state
) == RS_FREED
;
735 /* Return true if STATE is a value that has been assumed to be non-NULL. */
738 assumed_non_null_p (state_machine::state_t state
)
740 return get_rs (state
) == RS_ASSUMED_NON_NULL
;
743 /* Class for diagnostics relating to malloc_state_machine. */
745 class malloc_diagnostic
: public pending_diagnostic
748 malloc_diagnostic (const malloc_state_machine
&sm
, tree arg
)
749 : m_sm (sm
), m_arg (arg
)
752 bool subclass_equal_p (const pending_diagnostic
&base_other
) const override
754 return same_tree_p (m_arg
, ((const malloc_diagnostic
&)base_other
).m_arg
);
757 label_text
describe_state_change (const evdesc::state_change
&change
)
760 if (change
.m_old_state
== m_sm
.get_start_state ()
761 && (unchecked_p (change
.m_new_state
) || nonnull_p (change
.m_new_state
)))
762 // TODO: verify that it's the allocation stmt, not a copy
763 return label_text::borrow ("allocated here");
764 if (unchecked_p (change
.m_old_state
)
765 && nonnull_p (change
.m_new_state
))
768 return change
.formatted_print ("assuming %qE is non-NULL",
771 return change
.formatted_print ("assuming %qs is non-NULL",
774 if (change
.m_new_state
== m_sm
.m_null
)
776 if (unchecked_p (change
.m_old_state
))
779 return change
.formatted_print ("assuming %qE is NULL",
782 return change
.formatted_print ("assuming %qs is NULL",
788 return change
.formatted_print ("%qE is NULL",
791 return change
.formatted_print ("%qs is NULL",
796 return label_text ();
799 diagnostic_event::meaning
800 get_meaning_for_state_change (const evdesc::state_change
&change
)
803 if (change
.m_old_state
== m_sm
.get_start_state ()
804 && unchecked_p (change
.m_new_state
))
805 return diagnostic_event::meaning (diagnostic_event::VERB_acquire
,
806 diagnostic_event::NOUN_memory
);
807 if (freed_p (change
.m_new_state
))
808 return diagnostic_event::meaning (diagnostic_event::VERB_release
,
809 diagnostic_event::NOUN_memory
);
810 return diagnostic_event::meaning ();
814 const malloc_state_machine
&m_sm
;
818 /* Concrete subclass for reporting mismatching allocator/deallocator
821 class mismatching_deallocation
: public malloc_diagnostic
824 mismatching_deallocation (const malloc_state_machine
&sm
, tree arg
,
825 const deallocator_set
*expected_deallocators
,
826 const deallocator
*actual_dealloc
)
827 : malloc_diagnostic (sm
, arg
),
828 m_expected_deallocators (expected_deallocators
),
829 m_actual_dealloc (actual_dealloc
)
832 const char *get_kind () const final override
834 return "mismatching_deallocation";
837 int get_controlling_option () const final override
839 return OPT_Wanalyzer_mismatching_deallocation
;
842 bool emit (diagnostic_emission_context
&ctxt
) final override
844 auto_diagnostic_group d
;
845 ctxt
.add_cwe (762); /* CWE-762: Mismatched Memory Management Routines. */
846 if (const deallocator
*expected_dealloc
847 = m_expected_deallocators
->maybe_get_single ())
848 return ctxt
.warn ("%qE should have been deallocated with %qs"
849 " but was deallocated with %qs",
850 m_arg
, expected_dealloc
->m_name
,
851 m_actual_dealloc
->m_name
);
853 return ctxt
.warn ("%qs called on %qE returned from a mismatched"
854 " allocation function",
855 m_actual_dealloc
->m_name
, m_arg
);
858 label_text
describe_state_change (const evdesc::state_change
&change
)
861 if (unchecked_p (change
.m_new_state
))
863 m_alloc_event
= change
.m_event_id
;
864 if (const deallocator
*expected_dealloc
865 = m_expected_deallocators
->maybe_get_single ())
866 return change
.formatted_print ("allocated here"
867 " (expects deallocation with %qs)",
868 expected_dealloc
->m_name
);
870 return change
.formatted_print ("allocated here");
872 return malloc_diagnostic::describe_state_change (change
);
875 label_text
describe_final_event (const evdesc::final_event
&ev
) final override
877 if (m_alloc_event
.known_p ())
879 if (const deallocator
*expected_dealloc
880 = m_expected_deallocators
->maybe_get_single ())
881 return ev
.formatted_print
882 ("deallocated with %qs here;"
883 " allocation at %@ expects deallocation with %qs",
884 m_actual_dealloc
->m_name
, &m_alloc_event
,
885 expected_dealloc
->m_name
);
887 return ev
.formatted_print
888 ("deallocated with %qs here;"
890 m_actual_dealloc
->m_name
, &m_alloc_event
);
892 return ev
.formatted_print ("deallocated with %qs here",
893 m_actual_dealloc
->m_name
);
897 diagnostic_event_id_t m_alloc_event
;
898 const deallocator_set
*m_expected_deallocators
;
899 const deallocator
*m_actual_dealloc
;
902 /* Concrete subclass for reporting double-free diagnostics. */
904 class double_free
: public malloc_diagnostic
907 double_free (const malloc_state_machine
&sm
, tree arg
, const char *funcname
)
908 : malloc_diagnostic (sm
, arg
), m_funcname (funcname
)
911 const char *get_kind () const final override
{ return "double_free"; }
913 int get_controlling_option () const final override
915 return OPT_Wanalyzer_double_free
;
918 bool emit (diagnostic_emission_context
&ctxt
) final override
920 auto_diagnostic_group d
;
921 ctxt
.add_cwe (415); /* CWE-415: Double Free. */
922 return ctxt
.warn ("double-%qs of %qE", m_funcname
, m_arg
);
925 label_text
describe_state_change (const evdesc::state_change
&change
)
928 if (freed_p (change
.m_new_state
))
930 m_first_free_event
= change
.m_event_id
;
931 return change
.formatted_print ("first %qs here", m_funcname
);
933 return malloc_diagnostic::describe_state_change (change
);
936 label_text
describe_call_with_state (const evdesc::call_with_state
&info
)
939 if (freed_p (info
.m_state
))
940 return info
.formatted_print
941 ("passing freed pointer %qE in call to %qE from %qE",
942 info
.m_expr
, info
.m_callee_fndecl
, info
.m_caller_fndecl
);
943 return label_text ();
946 label_text
describe_final_event (const evdesc::final_event
&ev
) final override
948 if (m_first_free_event
.known_p ())
949 return ev
.formatted_print ("second %qs here; first %qs was at %@",
950 m_funcname
, m_funcname
,
951 &m_first_free_event
);
952 return ev
.formatted_print ("second %qs here", m_funcname
);
956 diagnostic_event_id_t m_first_free_event
;
957 const char *m_funcname
;
960 /* Abstract subclass for describing possible bad uses of NULL.
961 Responsible for describing the call that could return NULL. */
963 class possible_null
: public malloc_diagnostic
966 possible_null (const malloc_state_machine
&sm
, tree arg
)
967 : malloc_diagnostic (sm
, arg
)
970 label_text
describe_state_change (const evdesc::state_change
&change
)
973 if (change
.m_old_state
== m_sm
.get_start_state ()
974 && unchecked_p (change
.m_new_state
))
976 m_origin_of_unchecked_event
= change
.m_event_id
;
977 return label_text::borrow ("this call could return NULL");
979 return malloc_diagnostic::describe_state_change (change
);
982 label_text
describe_return_of_state (const evdesc::return_of_state
&info
)
985 if (unchecked_p (info
.m_state
))
986 return info
.formatted_print ("possible return of NULL to %qE from %qE",
987 info
.m_caller_fndecl
, info
.m_callee_fndecl
);
988 return label_text ();
992 diagnostic_event_id_t m_origin_of_unchecked_event
;
995 /* Concrete subclass for describing dereference of a possible NULL
998 class possible_null_deref
: public possible_null
1001 possible_null_deref (const malloc_state_machine
&sm
, tree arg
)
1002 : possible_null (sm
, arg
)
1005 const char *get_kind () const final override
{ return "possible_null_deref"; }
1007 int get_controlling_option () const final override
1009 return OPT_Wanalyzer_possible_null_dereference
;
1012 bool emit (diagnostic_emission_context
&ctxt
) final override
1014 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
1016 return ctxt
.warn ("dereference of possibly-NULL %qE", m_arg
);
1019 label_text
describe_final_event (const evdesc::final_event
&ev
) final override
1021 if (m_origin_of_unchecked_event
.known_p ())
1022 return ev
.formatted_print ("%qE could be NULL: unchecked value from %@",
1024 &m_origin_of_unchecked_event
);
1026 return ev
.formatted_print ("%qE could be NULL", ev
.m_expr
);
1031 /* Return true if FNDECL is a C++ method. */
1034 method_p (tree fndecl
)
1036 return TREE_CODE (TREE_TYPE (fndecl
)) == METHOD_TYPE
;
1039 /* Return a 1-based description of ARG_IDX (0-based) of FNDECL.
1040 Compare with %P in the C++ FE (implemented in cp/error.cc: parm_to_string
1041 as called from cp_printer). */
1044 describe_argument_index (tree fndecl
, int arg_idx
)
1046 if (method_p (fndecl
))
1048 return label_text::borrow ("'this'");
1050 pp_printf (&pp
, "%u", arg_idx
+ 1 - method_p (fndecl
));
1051 return label_text::take (xstrdup (pp_formatted_text (&pp
)));
1054 /* Subroutine for use by possible_null_arg::emit and null_arg::emit.
1055 Issue a note informing that the pertinent argument must be non-NULL. */
1058 inform_nonnull_attribute (tree fndecl
, int arg_idx
)
1060 label_text arg_desc
= describe_argument_index (fndecl
, arg_idx
);
1061 inform (DECL_SOURCE_LOCATION (fndecl
),
1062 "argument %s of %qD must be non-null",
1063 arg_desc
.get (), fndecl
);
1064 /* Ideally we would use the location of the parm and underline the
1065 attribute also - but we don't have the location_t values at this point
1067 For reference, the C and C++ FEs have get_fndecl_argument_location. */
1070 /* Concrete subclass for describing passing a possibly-NULL value to a
1071 function marked with __attribute__((nonnull)). */
1073 class possible_null_arg
: public possible_null
1076 possible_null_arg (const malloc_state_machine
&sm
, tree arg
,
1077 tree fndecl
, int arg_idx
)
1078 : possible_null (sm
, arg
),
1079 m_fndecl (fndecl
), m_arg_idx (arg_idx
)
1082 const char *get_kind () const final override
{ return "possible_null_arg"; }
1084 bool subclass_equal_p (const pending_diagnostic
&base_other
)
1085 const final override
1087 const possible_null_arg
&sub_other
1088 = (const possible_null_arg
&)base_other
;
1089 return (same_tree_p (m_arg
, sub_other
.m_arg
)
1090 && m_fndecl
== sub_other
.m_fndecl
1091 && m_arg_idx
== sub_other
.m_arg_idx
);
1094 int get_controlling_option () const final override
1096 return OPT_Wanalyzer_possible_null_argument
;
1099 bool emit (diagnostic_emission_context
&ctxt
) final override
1101 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
1102 auto_diagnostic_group d
;
1105 = ctxt
.warn ("use of possibly-NULL %qE where non-null expected",
1108 inform_nonnull_attribute (m_fndecl
, m_arg_idx
);
1112 label_text
describe_final_event (const evdesc::final_event
&ev
) final override
1114 label_text arg_desc
= describe_argument_index (m_fndecl
, m_arg_idx
);
1116 if (m_origin_of_unchecked_event
.known_p ())
1117 result
= ev
.formatted_print ("argument %s (%qE) from %@ could be NULL"
1118 " where non-null expected",
1119 arg_desc
.get (), ev
.m_expr
,
1120 &m_origin_of_unchecked_event
);
1122 result
= ev
.formatted_print ("argument %s (%qE) could be NULL"
1123 " where non-null expected",
1124 arg_desc
.get (), ev
.m_expr
);
1133 /* Concrete subclass for describing a dereference of a NULL value. */
1135 class null_deref
: public malloc_diagnostic
1138 null_deref (const malloc_state_machine
&sm
, tree arg
)
1139 : malloc_diagnostic (sm
, arg
) {}
1141 const char *get_kind () const final override
{ return "null_deref"; }
1143 int get_controlling_option () const final override
1145 return OPT_Wanalyzer_null_dereference
;
1148 bool terminate_path_p () const final override
{ return true; }
1150 bool emit (diagnostic_emission_context
&ctxt
) final override
1152 /* CWE-476: NULL Pointer Dereference. */
1154 return ctxt
.warn ("dereference of NULL %qE", m_arg
);
1157 label_text
describe_return_of_state (const evdesc::return_of_state
&info
)
1160 if (info
.m_state
== m_sm
.m_null
)
1161 return info
.formatted_print ("return of NULL to %qE from %qE",
1162 info
.m_caller_fndecl
, info
.m_callee_fndecl
);
1163 return label_text ();
1166 label_text
describe_final_event (const evdesc::final_event
&ev
) final override
1168 return ev
.formatted_print ("dereference of NULL %qE", ev
.m_expr
);
1171 /* Implementation of pending_diagnostic::supercedes_p for
1174 We want null-deref to supercede use-of-unitialized-value,
1175 so that if we have these at the same stmt, we don't emit
1176 a use-of-uninitialized, just the null-deref. */
1178 bool supercedes_p (const pending_diagnostic
&other
) const final override
1180 if (other
.use_of_uninit_p ())
1187 /* Concrete subclass for describing passing a NULL value to a
1188 function marked with __attribute__((nonnull)). */
1190 class null_arg
: public malloc_diagnostic
1193 null_arg (const malloc_state_machine
&sm
, tree arg
,
1194 tree fndecl
, int arg_idx
)
1195 : malloc_diagnostic (sm
, arg
),
1196 m_fndecl (fndecl
), m_arg_idx (arg_idx
)
1199 const char *get_kind () const final override
{ return "null_arg"; }
1201 bool subclass_equal_p (const pending_diagnostic
&base_other
)
1202 const final override
1204 const null_arg
&sub_other
1205 = (const null_arg
&)base_other
;
1206 return (same_tree_p (m_arg
, sub_other
.m_arg
)
1207 && m_fndecl
== sub_other
.m_fndecl
1208 && m_arg_idx
== sub_other
.m_arg_idx
);
1211 int get_controlling_option () const final override
1213 return OPT_Wanalyzer_null_argument
;
1216 bool terminate_path_p () const final override
{ return true; }
1218 bool emit (diagnostic_emission_context
&ctxt
) final override
1220 /* CWE-476: NULL Pointer Dereference. */
1221 auto_diagnostic_group d
;
1226 warned
= ctxt
.warn ("use of NULL where non-null expected");
1228 warned
= ctxt
.warn ("use of NULL %qE where non-null expected",
1231 inform_nonnull_attribute (m_fndecl
, m_arg_idx
);
1235 label_text
describe_final_event (const evdesc::final_event
&ev
) final override
1237 label_text arg_desc
= describe_argument_index (m_fndecl
, m_arg_idx
);
1239 if (zerop (ev
.m_expr
))
1240 result
= ev
.formatted_print ("argument %s NULL where non-null expected",
1243 result
= ev
.formatted_print ("argument %s (%qE) NULL"
1244 " where non-null expected",
1245 arg_desc
.get (), ev
.m_expr
);
1254 class use_after_free
: public malloc_diagnostic
1257 use_after_free (const malloc_state_machine
&sm
, tree arg
,
1258 const deallocator
*deallocator
)
1259 : malloc_diagnostic (sm
, arg
),
1260 m_deallocator (deallocator
)
1262 gcc_assert (deallocator
);
1265 const char *get_kind () const final override
{ return "use_after_free"; }
1267 int get_controlling_option () const final override
1269 return OPT_Wanalyzer_use_after_free
;
1272 bool emit (diagnostic_emission_context
&ctxt
) final override
1274 /* CWE-416: Use After Free. */
1276 return ctxt
.warn ("use after %<%s%> of %qE",
1277 m_deallocator
->m_name
, m_arg
);
1280 label_text
describe_state_change (const evdesc::state_change
&change
)
1283 if (freed_p (change
.m_new_state
))
1285 m_free_event
= change
.m_event_id
;
1286 switch (m_deallocator
->m_wording
)
1289 case WORDING_REALLOCATED
:
1292 return label_text::borrow ("freed here");
1293 case WORDING_DELETED
:
1294 return label_text::borrow ("deleted here");
1295 case WORDING_DEALLOCATED
:
1296 return label_text::borrow ("deallocated here");
1299 return malloc_diagnostic::describe_state_change (change
);
1302 label_text
describe_final_event (const evdesc::final_event
&ev
) final override
1304 const char *funcname
= m_deallocator
->m_name
;
1305 if (m_free_event
.known_p ())
1306 switch (m_deallocator
->m_wording
)
1309 case WORDING_REALLOCATED
:
1312 return ev
.formatted_print ("use after %<%s%> of %qE; freed at %@",
1313 funcname
, ev
.m_expr
, &m_free_event
);
1314 case WORDING_DELETED
:
1315 return ev
.formatted_print ("use after %<%s%> of %qE; deleted at %@",
1316 funcname
, ev
.m_expr
, &m_free_event
);
1317 case WORDING_DEALLOCATED
:
1318 return ev
.formatted_print ("use after %<%s%> of %qE;"
1319 " deallocated at %@",
1320 funcname
, ev
.m_expr
, &m_free_event
);
1323 return ev
.formatted_print ("use after %<%s%> of %qE",
1324 funcname
, ev
.m_expr
);
1327 /* Implementation of pending_diagnostic::supercedes_p for
1330 We want use-after-free to supercede use-of-unitialized-value,
1331 so that if we have these at the same stmt, we don't emit
1332 a use-of-uninitialized, just the use-after-free.
1333 (this is because we fully purge information about freed
1334 buffers when we free them to avoid state explosions, so
1335 that if they are accessed after the free, it looks like
1336 they are uninitialized). */
1338 bool supercedes_p (const pending_diagnostic
&other
) const final override
1340 if (other
.use_of_uninit_p ())
1347 diagnostic_event_id_t m_free_event
;
1348 const deallocator
*m_deallocator
;
1351 class malloc_leak
: public malloc_diagnostic
1354 malloc_leak (const malloc_state_machine
&sm
, tree arg
)
1355 : malloc_diagnostic (sm
, arg
) {}
1357 const char *get_kind () const final override
{ return "malloc_leak"; }
1359 int get_controlling_option () const final override
1361 return OPT_Wanalyzer_malloc_leak
;
1364 bool emit (diagnostic_emission_context
&ctxt
) final override
1366 /* "CWE-401: Missing Release of Memory after Effective Lifetime". */
1369 return ctxt
.warn ("leak of %qE", m_arg
);
1371 return ctxt
.warn ("leak of %qs", "<unknown>");
1374 label_text
describe_state_change (const evdesc::state_change
&change
)
1377 if (unchecked_p (change
.m_new_state
)
1378 || (start_p (change
.m_old_state
) && nonnull_p (change
.m_new_state
)))
1380 m_alloc_event
= change
.m_event_id
;
1381 return label_text::borrow ("allocated here");
1383 return malloc_diagnostic::describe_state_change (change
);
1386 label_text
describe_final_event (const evdesc::final_event
&ev
) final override
1390 if (m_alloc_event
.known_p ())
1391 return ev
.formatted_print ("%qE leaks here; was allocated at %@",
1392 ev
.m_expr
, &m_alloc_event
);
1394 return ev
.formatted_print ("%qE leaks here", ev
.m_expr
);
1398 if (m_alloc_event
.known_p ())
1399 return ev
.formatted_print ("%qs leaks here; was allocated at %@",
1400 "<unknown>", &m_alloc_event
);
1402 return ev
.formatted_print ("%qs leaks here", "<unknown>");
1407 diagnostic_event_id_t m_alloc_event
;
1410 class free_of_non_heap
: public malloc_diagnostic
1413 free_of_non_heap (const malloc_state_machine
&sm
, tree arg
,
1414 const region
*freed_reg
,
1415 const char *funcname
)
1416 : malloc_diagnostic (sm
, arg
), m_freed_reg (freed_reg
), m_funcname (funcname
)
1420 const char *get_kind () const final override
{ return "free_of_non_heap"; }
1422 bool subclass_equal_p (const pending_diagnostic
&base_other
) const
1425 const free_of_non_heap
&other
= (const free_of_non_heap
&)base_other
;
1426 return (same_tree_p (m_arg
, other
.m_arg
)
1427 && m_freed_reg
== other
.m_freed_reg
);
1430 int get_controlling_option () const final override
1432 return OPT_Wanalyzer_free_of_non_heap
;
1435 bool emit (diagnostic_emission_context
&ctxt
) final override
1437 auto_diagnostic_group d
;
1438 ctxt
.add_cwe (590); /* CWE-590: Free of Memory not on the Heap. */
1439 switch (get_memory_space ())
1444 case MEMSPACE_UNKNOWN
:
1446 case MEMSPACE_GLOBALS
:
1447 case MEMSPACE_READONLY_DATA
:
1448 return ctxt
.warn ("%<%s%> of %qE which points to memory"
1452 case MEMSPACE_STACK
:
1453 return ctxt
.warn ("%<%s%> of %qE which points to memory"
1460 label_text
describe_state_change (const evdesc::state_change
&)
1463 return label_text::borrow ("pointer is from here");
1466 label_text
describe_final_event (const evdesc::final_event
&ev
) final override
1468 return ev
.formatted_print ("call to %qs here", m_funcname
);
1471 void mark_interesting_stuff (interesting_t
*interest
) final override
1474 interest
->add_region_creation (m_freed_reg
);
1478 enum memory_space
get_memory_space () const
1481 return m_freed_reg
->get_memory_space ();
1483 return MEMSPACE_UNKNOWN
;
1486 const region
*m_freed_reg
;
1487 const char *m_funcname
;
1490 /* Concrete pending_diagnostic subclass for -Wanalyzer-deref-before-check. */
1492 class deref_before_check
: public malloc_diagnostic
1495 deref_before_check (const malloc_state_machine
&sm
, tree arg
)
1496 : malloc_diagnostic (sm
, arg
),
1497 m_deref_enode (NULL
),
1498 m_deref_expr (NULL
),
1499 m_check_enode (NULL
)
1504 const char *get_kind () const final override
{ return "deref_before_check"; }
1506 int get_controlling_option () const final override
1508 return OPT_Wanalyzer_deref_before_check
;
1511 bool emit (diagnostic_emission_context
&ctxt
) final override
1513 /* Don't emit the warning if we can't show where the deref
1514 and the check occur. */
1519 /* Only emit the warning for intraprocedural cases. */
1520 const program_point
&deref_point
= m_deref_enode
->get_point ();
1521 const program_point
&check_point
= m_check_enode
->get_point ();
1523 if (!program_point::effectively_intraprocedural_p (deref_point
,
1527 /* Reject the warning if the check occurs within a macro defintion.
1528 This avoids false positives for such code as:
1530 #define throw_error \
1540 where the usage of "throw_error" implicitly adds a check
1543 We do warn when the check is in a macro expansion if we can get
1544 at the location of the condition and it is't part of the
1545 definition, so that we warn for checks such as:
1546 if (words[0][0] == '@')
1548 g_assert(words[0] != NULL); <--- here
1549 Unfortunately we don't have locations for individual gimple
1552 we merely have a gimple_cond
1554 with no way of getting at the location of the condition separately
1555 from that of the gimple_cond (where the "if" is within the macro
1556 definition). We reject the warning for such cases.
1558 We do warn when the *deref* occurs in a macro, since this can be
1559 a source of real bugs; see e.g. PR 77425. */
1560 location_t check_loc
= m_check_enode
->get_point ().get_location ();
1561 if (linemap_location_from_macro_definition_p (line_table
, check_loc
))
1564 /* Reject warning if the check is in a loop header within a
1565 macro expansion. This rejects cases like:
1571 where the FOR_EACH macro tests for non-nullness of x, since
1572 the user is hoping to encapsulate the details of iteration
1573 in the macro, and the extra check on the first iteration
1574 would just be noise if we reported it. */
1575 if (loop_header_p (m_check_enode
->get_point ())
1576 && linemap_location_from_macro_expansion_p (line_table
, check_loc
))
1579 /* Reject if m_deref_expr is sufficiently different from m_arg
1580 for cases where the dereference is spelled differently from
1581 the check, which is probably two different ways to get the
1582 same svalue, and thus not worth reporting. */
1585 if (!sufficiently_similar_p (m_deref_expr
, m_arg
))
1588 /* Reject the warning if the deref's BB doesn't dominate that
1589 of the check, so that we don't warn e.g. for shared cleanup
1590 code that checks a pointer for NULL, when that code is sometimes
1591 used before a deref and sometimes after.
1592 Using the dominance code requires setting cfun. */
1593 auto_cfun
sentinel (m_deref_enode
->get_function ());
1594 calculate_dominance_info (CDI_DOMINATORS
);
1595 if (!dominated_by_p (CDI_DOMINATORS
,
1596 m_check_enode
->get_supernode ()->m_bb
,
1597 m_deref_enode
->get_supernode ()->m_bb
))
1600 return ctxt
.warn ("check of %qE for NULL after already"
1601 " dereferencing it",
1605 label_text
describe_state_change (const evdesc::state_change
&change
)
1608 if (change
.m_old_state
== m_sm
.get_start_state ()
1609 && assumed_non_null_p (change
.m_new_state
))
1611 m_first_deref_event
= change
.m_event_id
;
1612 m_deref_enode
= change
.m_event
.get_exploded_node ();
1613 m_deref_expr
= change
.m_expr
;
1614 return change
.formatted_print ("pointer %qE is dereferenced here",
1617 return malloc_diagnostic::describe_state_change (change
);
1620 label_text
describe_final_event (const evdesc::final_event
&ev
) final override
1622 m_check_enode
= ev
.m_event
.get_exploded_node ();
1623 if (m_first_deref_event
.known_p ())
1624 return ev
.formatted_print ("pointer %qE is checked for NULL here but"
1625 " it was already dereferenced at %@",
1626 m_arg
, &m_first_deref_event
);
1628 return ev
.formatted_print ("pointer %qE is checked for NULL here but"
1629 " it was already dereferenced",
1634 static bool loop_header_p (const program_point
&point
)
1636 const supernode
*snode
= point
.get_supernode ();
1639 for (auto &in_edge
: snode
->m_preds
)
1641 if (const cfg_superedge
*cfg_in_edge
1642 = in_edge
->dyn_cast_cfg_superedge ())
1643 if (cfg_in_edge
->back_edge_p ())
1649 static bool sufficiently_similar_p (tree expr_a
, tree expr_b
)
1651 pretty_printer
*pp_a
= global_dc
->m_printer
->clone ();
1652 pretty_printer
*pp_b
= global_dc
->m_printer
->clone ();
1653 pp_printf (pp_a
, "%qE", expr_a
);
1654 pp_printf (pp_b
, "%qE", expr_b
);
1655 bool result
= (strcmp (pp_formatted_text (pp_a
), pp_formatted_text (pp_b
))
1662 diagnostic_event_id_t m_first_deref_event
;
1663 const exploded_node
*m_deref_enode
;
1665 const exploded_node
*m_check_enode
;
1668 /* struct allocation_state : public state_machine::state. */
1670 /* Implementation of state_machine::state::dump_to_pp vfunc
1671 for allocation_state: append the API that this allocation is
1675 allocation_state::dump_to_pp (pretty_printer
*pp
) const
1677 state_machine::state::dump_to_pp (pp
);
1680 pp_string (pp
, " (");
1681 m_deallocators
->dump_to_pp (pp
);
1682 pp_character (pp
, ')');
1686 /* Given a allocation_state for a deallocator_set, get the "nonnull" state
1687 for the corresponding allocator(s). */
1689 const allocation_state
*
1690 allocation_state::get_nonnull () const
1692 gcc_assert (m_deallocators
);
1693 return as_a_allocation_state (m_deallocators
->m_nonnull
);
1696 /* struct assumed_non_null_state : public allocation_state. */
1699 assumed_non_null_state::dump_to_pp (pretty_printer
*pp
) const
1701 allocation_state::dump_to_pp (pp
);
1702 pp_string (pp
, " (in ");
1703 m_frame
->dump_to_pp (pp
, true);
1704 pp_character (pp
, ')');
1707 /* malloc_state_machine's ctor. */
1709 malloc_state_machine::malloc_state_machine (logger
*logger
)
1710 : state_machine ("malloc", logger
),
1711 m_free (this, "free", WORDING_FREED
),
1712 m_scalar_delete (this, "delete", WORDING_DELETED
),
1713 m_vector_delete (this, "delete[]", WORDING_DELETED
),
1714 m_realloc (this, "realloc", WORDING_REALLOCATED
)
1716 gcc_assert (m_start
->get_id () == 0);
1717 m_null
= add_state ("null", RS_FREED
, NULL
, NULL
);
1718 m_non_heap
= add_state ("non-heap", RS_NON_HEAP
, NULL
, NULL
);
1719 m_stop
= add_state ("stop", RS_STOP
, NULL
, NULL
);
1722 malloc_state_machine::~malloc_state_machine ()
1725 custom_deallocator_set
*set
;
1726 FOR_EACH_VEC_ELT (m_dynamic_sets
, i
, set
)
1728 custom_deallocator
*d
;
1729 FOR_EACH_VEC_ELT (m_dynamic_deallocators
, i
, d
)
1733 state_machine::state_t
1734 malloc_state_machine::add_state (const char *name
, enum resource_state rs
,
1735 const deallocator_set
*deallocators
,
1736 const deallocator
*deallocator
)
1738 return add_custom_state (new allocation_state (name
, alloc_state_id (),
1743 /* If ALLOCATOR_FNDECL has any "__attribute__((malloc(FOO)))",
1744 return a custom_deallocator_set for them, consolidating them
1745 to ensure uniqueness of the sets.
1747 Return NULL if it has no such attributes. */
1749 const custom_deallocator_set
*
1750 malloc_state_machine::
1751 get_or_create_custom_deallocator_set (tree allocator_fndecl
)
1753 /* Early rejection of decls without attributes. */
1754 tree attrs
= DECL_ATTRIBUTES (allocator_fndecl
);
1758 /* Otherwise, call maybe_create_custom_deallocator_set,
1759 memoizing the result. */
1760 if (custom_deallocator_set
**slot
1761 = m_custom_deallocator_set_cache
.get (allocator_fndecl
))
1763 custom_deallocator_set
*set
1764 = maybe_create_custom_deallocator_set (allocator_fndecl
);
1765 m_custom_deallocator_set_cache
.put (allocator_fndecl
, set
);
1769 /* Given ALLOCATOR_FNDECL, a FUNCTION_DECL with attributes,
1770 look for any "__attribute__((malloc(FOO)))" and return a
1771 custom_deallocator_set for them, consolidating them
1772 to ensure uniqueness of the sets.
1774 Return NULL if it has no such attributes.
1776 Subroutine of get_or_create_custom_deallocator_set which
1777 memoizes the result. */
1779 custom_deallocator_set
*
1780 malloc_state_machine::
1781 maybe_create_custom_deallocator_set (tree allocator_fndecl
)
1783 tree attrs
= DECL_ATTRIBUTES (allocator_fndecl
);
1786 /* Look for instances of __attribute__((malloc(FOO))). */
1787 auto_vec
<const deallocator
*> deallocator_vec
;
1788 for (tree allocs
= attrs
;
1789 (allocs
= lookup_attribute ("malloc", allocs
));
1790 allocs
= TREE_CHAIN (allocs
))
1792 tree args
= TREE_VALUE (allocs
);
1795 if (TREE_VALUE (args
))
1797 const deallocator
*d
1798 = get_or_create_deallocator (TREE_VALUE (args
));
1799 deallocator_vec
.safe_push (d
);
1803 /* If there weren't any deallocators, bail. */
1804 if (deallocator_vec
.length () == 0)
1807 /* Consolidate, so that we reuse existing deallocator_set
1809 deallocator_vec
.qsort (deallocator::cmp_ptr_ptr
);
1810 custom_deallocator_set
**slot
1811 = m_custom_deallocator_set_map
.get (&deallocator_vec
);
1814 custom_deallocator_set
*set
1815 = new custom_deallocator_set (this, &deallocator_vec
, WORDING_DEALLOCATED
);
1816 m_custom_deallocator_set_map
.put (&set
->m_deallocator_vec
, set
);
1817 m_dynamic_sets
.safe_push (set
);
1821 /* Get the deallocator for DEALLOCATOR_FNDECL, creating it if necessary. */
1824 malloc_state_machine::get_or_create_deallocator (tree deallocator_fndecl
)
1826 deallocator
**slot
= m_deallocator_map
.get (deallocator_fndecl
);
1832 if (is_named_call_p (deallocator_fndecl
, "free")
1833 || is_std_named_call_p (deallocator_fndecl
, "free")
1834 || is_named_call_p (deallocator_fndecl
, "__builtin_free"))
1835 d
= &m_free
.m_deallocator
;
1838 custom_deallocator
*cd
1839 = new custom_deallocator (this, deallocator_fndecl
,
1840 WORDING_DEALLOCATED
);
1841 m_dynamic_deallocators
.safe_push (cd
);
1844 m_deallocator_map
.put (deallocator_fndecl
, d
);
1848 /* Get the "assumed-non-null" state for assumptions made within FRAME,
1849 creating it if necessary. */
1851 state_machine::state_t
1852 malloc_state_machine::
1853 get_or_create_assumed_non_null_state_for_frame (const frame_region
*frame
)
1855 if (state_t
*slot
= m_assumed_non_null
.get (frame
))
1857 state_machine::state
*new_state
1858 = new assumed_non_null_state ("assumed-non-null", alloc_state_id (), frame
);
1859 add_custom_state (new_state
);
1860 m_assumed_non_null
.put (frame
, new_state
);
1864 /* Try to identify the function declaration either by name or as a known malloc
1868 known_allocator_p (const_tree fndecl
, const gcall
*call
)
1870 /* Either it is a function we know by name and number of arguments... */
1871 if (is_named_call_p (fndecl
, "malloc", call
, 1)
1872 || is_named_call_p (fndecl
, "calloc", call
, 2)
1873 || is_std_named_call_p (fndecl
, "malloc", call
, 1)
1874 || is_std_named_call_p (fndecl
, "calloc", call
, 2)
1875 || is_named_call_p (fndecl
, "strdup", call
, 1)
1876 || is_named_call_p (fndecl
, "strndup", call
, 2))
1879 /* ... or it is a builtin allocator that allocates objects freed with
1881 if (fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
1882 switch (DECL_FUNCTION_CODE (fndecl
))
1884 case BUILT_IN_MALLOC
:
1885 case BUILT_IN_CALLOC
:
1886 case BUILT_IN_STRDUP
:
1887 case BUILT_IN_STRNDUP
:
1896 /* If PTR's nullness is not known, transition it to the "assumed-non-null"
1897 state for the current frame. */
1900 malloc_state_machine::maybe_assume_non_null (sm_context
&sm_ctxt
,
1902 const gimple
*stmt
) const
1904 const region_model
*old_model
= sm_ctxt
.get_old_region_model ();
1908 tree null_ptr_cst
= build_int_cst (TREE_TYPE (ptr
), 0);
1909 tristate known_non_null
1910 = old_model
->eval_condition (ptr
, NE_EXPR
, null_ptr_cst
, NULL
);
1911 if (known_non_null
.is_unknown ())
1913 /* Cast away const-ness for cache-like operations. */
1914 malloc_state_machine
*mut_this
1915 = const_cast <malloc_state_machine
*> (this);
1917 = mut_this
->get_or_create_assumed_non_null_state_for_frame
1918 (old_model
->get_current_frame ());
1919 sm_ctxt
.set_next_state (stmt
, ptr
, next_state
);
1923 /* Implementation of state_machine::on_stmt vfunc for malloc_state_machine. */
1926 malloc_state_machine::on_stmt (sm_context
&sm_ctxt
,
1927 const supernode
*node
,
1928 const gimple
*stmt
) const
1930 if (const gcall
*call
= dyn_cast
<const gcall
*> (stmt
))
1931 if (tree callee_fndecl
= sm_ctxt
.get_fndecl_for_call (call
))
1933 if (known_allocator_p (callee_fndecl
, call
))
1935 on_allocator_call (sm_ctxt
, call
, &m_free
);
1939 if (!is_placement_new_p (call
))
1941 bool returns_nonnull
= !TREE_NOTHROW (callee_fndecl
)
1943 if (is_named_call_p (callee_fndecl
, "operator new"))
1944 on_allocator_call (sm_ctxt
, call
,
1945 &m_scalar_delete
, returns_nonnull
);
1946 else if (is_named_call_p (callee_fndecl
, "operator new []"))
1947 on_allocator_call (sm_ctxt
, call
,
1948 &m_vector_delete
, returns_nonnull
);
1951 if (is_named_call_p (callee_fndecl
, "operator delete", call
, 1)
1952 || is_named_call_p (callee_fndecl
, "operator delete", call
, 2))
1954 on_deallocator_call (sm_ctxt
, node
, call
,
1955 &m_scalar_delete
.m_deallocator
, 0);
1958 else if (is_named_call_p (callee_fndecl
, "operator delete []", call
, 1))
1960 on_deallocator_call (sm_ctxt
, node
, call
,
1961 &m_vector_delete
.m_deallocator
, 0);
1965 if (is_named_call_p (callee_fndecl
, "alloca", call
, 1)
1966 || is_named_call_p (callee_fndecl
, "__builtin_alloca", call
, 1))
1968 tree lhs
= gimple_call_lhs (call
);
1970 sm_ctxt
.on_transition (node
, stmt
, lhs
, m_start
, m_non_heap
);
1974 if (is_named_call_p (callee_fndecl
, "free", call
, 1)
1975 || is_std_named_call_p (callee_fndecl
, "free", call
, 1)
1976 || is_named_call_p (callee_fndecl
, "__builtin_free", call
, 1))
1978 on_deallocator_call (sm_ctxt
, node
, call
,
1979 &m_free
.m_deallocator
, 0);
1983 if (is_named_call_p (callee_fndecl
, "realloc", call
, 2)
1984 || is_std_named_call_p (callee_fndecl
, "realloc", call
, 2)
1985 || is_named_call_p (callee_fndecl
, "__builtin_realloc", call
, 2))
1987 on_realloc_call (sm_ctxt
, node
, call
);
1991 if (unaffected_by_call_p (callee_fndecl
))
1994 /* Cast away const-ness for cache-like operations. */
1995 malloc_state_machine
*mutable_this
1996 = const_cast <malloc_state_machine
*> (this);
1998 /* Handle interesting attributes of the callee_fndecl,
1999 or prioritize those of the builtin that callee_fndecl is expected
2001 Might want this to be controlled by a flag. */
2003 tree fndecl
= callee_fndecl
;
2004 /* If call is recognized as a builtin known_function, use that
2005 builtin's function_decl. */
2006 if (const region_model
*old_model
= sm_ctxt
.get_old_region_model ())
2007 if (const builtin_known_function
*builtin_kf
2008 = old_model
->get_builtin_kf (call
))
2009 fndecl
= builtin_kf
->builtin_decl ();
2011 /* Handle "__attribute__((malloc(FOO)))". */
2012 if (const deallocator_set
*deallocators
2013 = mutable_this
->get_or_create_custom_deallocator_set
2016 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (fndecl
));
2017 bool returns_nonnull
2018 = lookup_attribute ("returns_nonnull", attrs
);
2019 on_allocator_call (sm_ctxt
, call
, deallocators
, returns_nonnull
);
2023 /* Handle "__attribute__((nonnull))". */
2024 tree fntype
= TREE_TYPE (fndecl
);
2025 bitmap nonnull_args
= get_nonnull_args (fntype
);
2028 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2030 tree arg
= gimple_call_arg (stmt
, i
);
2031 if (TREE_CODE (TREE_TYPE (arg
)) != POINTER_TYPE
)
2033 /* If we have a nonnull-args, and either all pointers, or
2034 just the specified pointers. */
2035 if (bitmap_empty_p (nonnull_args
)
2036 || bitmap_bit_p (nonnull_args
, i
))
2038 state_t state
= sm_ctxt
.get_state (stmt
, arg
);
2039 /* Can't use a switch as the states are non-const. */
2040 /* Do use the fndecl that caused the warning so that the
2041 misused attributes are printed and the user not
2043 if (unchecked_p (state
))
2045 tree diag_arg
= sm_ctxt
.get_diagnostic_tree (arg
);
2046 sm_ctxt
.warn (node
, stmt
, arg
,
2047 make_unique
<possible_null_arg
>
2048 (*this, diag_arg
, fndecl
, i
));
2049 const allocation_state
*astate
2050 = as_a_allocation_state (state
);
2051 sm_ctxt
.set_next_state (stmt
, arg
,
2052 astate
->get_nonnull ());
2054 else if (state
== m_null
)
2056 tree diag_arg
= sm_ctxt
.get_diagnostic_tree (arg
);
2057 sm_ctxt
.warn (node
, stmt
, arg
,
2058 make_unique
<null_arg
>
2059 (*this, diag_arg
, fndecl
, i
));
2060 sm_ctxt
.set_next_state (stmt
, arg
, m_stop
);
2062 else if (state
== m_start
)
2063 maybe_assume_non_null (sm_ctxt
, arg
, stmt
);
2066 BITMAP_FREE (nonnull_args
);
2070 /* Check for this after nonnull, so that if we have both
2071 then we transition to "freed", rather than "checked". */
2072 unsigned dealloc_argno
= fndecl_dealloc_argno (fndecl
);
2073 if (dealloc_argno
!= UINT_MAX
)
2075 const deallocator
*d
2076 = mutable_this
->get_or_create_deallocator (fndecl
);
2077 on_deallocator_call (sm_ctxt
, node
, call
, d
, dealloc_argno
);
2082 /* Look for pointers explicitly being compared against zero
2083 that are in state assumed_non_null i.e. we already defererenced
2085 We have to do this check here, rather than in on_condition
2086 because we add a constraint that the pointer is non-null when
2087 dereferencing it, and this makes the apply_constraints_for_gcond
2088 find known-true and known-false conditions; on_condition is only
2089 called when adding new constraints. */
2090 if (const gcond
*cond_stmt
= dyn_cast
<const gcond
*> (stmt
))
2092 enum tree_code op
= gimple_cond_code (cond_stmt
);
2093 if (op
== EQ_EXPR
|| op
== NE_EXPR
)
2095 tree lhs
= gimple_cond_lhs (cond_stmt
);
2096 tree rhs
= gimple_cond_rhs (cond_stmt
);
2097 if (any_pointer_p (lhs
)
2098 && any_pointer_p (rhs
)
2101 state_t state
= sm_ctxt
.get_state (stmt
, lhs
);
2102 if (assumed_non_null_p (state
))
2103 maybe_complain_about_deref_before_check
2106 (const assumed_non_null_state
*)state
,
2112 if (tree lhs
= sm_ctxt
.is_zero_assignment (stmt
))
2113 if (any_pointer_p (lhs
))
2114 on_zero_assignment (sm_ctxt
, stmt
,lhs
);
2116 /* Handle dereferences. */
2117 for (unsigned i
= 0; i
< gimple_num_ops (stmt
); i
++)
2119 tree op
= gimple_op (stmt
, i
);
2122 if (TREE_CODE (op
) == COMPONENT_REF
)
2123 op
= TREE_OPERAND (op
, 0);
2125 if (TREE_CODE (op
) == MEM_REF
)
2127 tree arg
= TREE_OPERAND (op
, 0);
2129 state_t state
= sm_ctxt
.get_state (stmt
, arg
);
2130 if (state
== m_start
)
2131 maybe_assume_non_null (sm_ctxt
, arg
, stmt
);
2132 else if (unchecked_p (state
))
2134 tree diag_arg
= sm_ctxt
.get_diagnostic_tree (arg
);
2135 sm_ctxt
.warn (node
, stmt
, arg
,
2136 make_unique
<possible_null_deref
> (*this,
2138 const allocation_state
*astate
= as_a_allocation_state (state
);
2139 sm_ctxt
.set_next_state (stmt
, arg
, astate
->get_nonnull ());
2141 else if (state
== m_null
)
2143 tree diag_arg
= sm_ctxt
.get_diagnostic_tree (arg
);
2144 sm_ctxt
.warn (node
, stmt
, arg
,
2145 make_unique
<null_deref
> (*this, diag_arg
));
2146 sm_ctxt
.set_next_state (stmt
, arg
, m_stop
);
2148 else if (freed_p (state
))
2150 tree diag_arg
= sm_ctxt
.get_diagnostic_tree (arg
);
2151 const allocation_state
*astate
= as_a_allocation_state (state
);
2152 sm_ctxt
.warn (node
, stmt
, arg
,
2153 make_unique
<use_after_free
>
2154 (*this, diag_arg
, astate
->m_deallocator
));
2155 sm_ctxt
.set_next_state (stmt
, arg
, m_stop
);
2162 /* Given a check against null of PTR in assumed-non-null state STATE,
2163 potentially add a deref_before_check warning to SM_CTXT. */
2166 malloc_state_machine::
2167 maybe_complain_about_deref_before_check (sm_context
&sm_ctxt
,
2168 const supernode
*node
,
2170 const assumed_non_null_state
*state
,
2173 const region_model
*model
= sm_ctxt
.get_old_region_model ();
2177 /* Don't complain if the current frame (where the check is occurring) is
2178 deeper than the frame in which the "not null" assumption was made.
2179 This suppress false positives for cases like:
2181 void foo (struct s *p)
2183 int val = s->some_field; // deref here
2187 where "shared_helper" has:
2189 void shared_helper (struct s *p)
2191 if (!p) // check here
2196 since the check in "shared_helper" is OK. */
2197 const frame_region
*checked_in_frame
= model
->get_current_frame ();
2198 const frame_region
*assumed_nonnull_in_frame
= state
->m_frame
;
2199 if (checked_in_frame
->get_index () > assumed_nonnull_in_frame
->get_index ())
2202 /* Don't complain if STMT was inlined from another function, to avoid
2203 similar false positives involving shared helper functions. */
2206 inlining_info
info (stmt
->location
);
2207 if (info
.get_extra_frames () > 0)
2211 tree diag_ptr
= sm_ctxt
.get_diagnostic_tree (ptr
);
2215 make_unique
<deref_before_check
> (*this, diag_ptr
));
2216 sm_ctxt
.set_next_state (stmt
, ptr
, m_stop
);
2219 /* Handle a call to an allocator.
2220 RETURNS_NONNULL is true if CALL is to a fndecl known to have
2221 __attribute__((returns_nonnull)). */
2224 malloc_state_machine::on_allocator_call (sm_context
&sm_ctxt
,
2226 const deallocator_set
*deallocators
,
2227 bool returns_nonnull
) const
2229 tree lhs
= gimple_call_lhs (call
);
2232 if (sm_ctxt
.get_state (call
, lhs
) == m_start
)
2233 sm_ctxt
.set_next_state (call
, lhs
,
2235 ? deallocators
->m_nonnull
2236 : deallocators
->m_unchecked
));
2240 /* TODO: report leak. */
2244 /* Handle deallocations of non-heap pointers.
2245 non-heap -> stop, with warning. */
2248 malloc_state_machine::handle_free_of_non_heap (sm_context
&sm_ctxt
,
2249 const supernode
*node
,
2252 const deallocator
*d
) const
2254 tree diag_arg
= sm_ctxt
.get_diagnostic_tree (arg
);
2255 const region
*freed_reg
= NULL
;
2256 if (const program_state
*old_state
= sm_ctxt
.get_old_program_state ())
2258 const region_model
*old_model
= old_state
->m_region_model
;
2259 const svalue
*ptr_sval
= old_model
->get_rvalue (arg
, NULL
);
2260 freed_reg
= old_model
->deref_rvalue (ptr_sval
, arg
, NULL
);
2262 sm_ctxt
.warn (node
, call
, arg
,
2263 make_unique
<free_of_non_heap
>
2264 (*this, diag_arg
, freed_reg
, d
->m_name
));
2265 sm_ctxt
.set_next_state (call
, arg
, m_stop
);
2269 malloc_state_machine::on_deallocator_call (sm_context
&sm_ctxt
,
2270 const supernode
*node
,
2272 const deallocator
*d
,
2273 unsigned argno
) const
2275 if (argno
>= gimple_call_num_args (call
))
2277 tree arg
= gimple_call_arg (call
, argno
);
2279 state_t state
= sm_ctxt
.get_state (call
, arg
);
2281 /* start/assumed_non_null/unchecked/nonnull -> freed. */
2282 if (state
== m_start
|| assumed_non_null_p (state
))
2283 sm_ctxt
.set_next_state (call
, arg
, d
->m_freed
);
2284 else if (unchecked_p (state
) || nonnull_p (state
))
2286 const allocation_state
*astate
= as_a_allocation_state (state
);
2287 gcc_assert (astate
->m_deallocators
);
2288 if (!astate
->m_deallocators
->contains_p (d
))
2290 /* Wrong allocator. */
2291 tree diag_arg
= sm_ctxt
.get_diagnostic_tree (arg
);
2292 sm_ctxt
.warn (node
, call
, arg
,
2293 make_unique
<mismatching_deallocation
>
2295 astate
->m_deallocators
,
2298 sm_ctxt
.set_next_state (call
, arg
, d
->m_freed
);
2301 /* Keep state "null" as-is, rather than transitioning to "freed";
2302 we don't want to complain about double-free of NULL. */
2303 else if (state
== d
->m_freed
)
2305 /* freed -> stop, with warning. */
2306 tree diag_arg
= sm_ctxt
.get_diagnostic_tree (arg
);
2307 sm_ctxt
.warn (node
, call
, arg
,
2308 make_unique
<double_free
> (*this, diag_arg
, d
->m_name
));
2309 sm_ctxt
.set_next_state (call
, arg
, m_stop
);
2311 else if (state
== m_non_heap
)
2313 /* non-heap -> stop, with warning. */
2314 handle_free_of_non_heap (sm_ctxt
, node
, call
, arg
, d
);
2318 /* Handle a call to "realloc".
2319 Check for free of non-heap or mismatching allocators,
2320 transitioning to the "stop" state for such cases.
2322 Otherwise, kf_realloc::impl_call_post will later
2323 get called (which will handle other sm-state transitions
2324 when the state is bifurcated). */
2327 malloc_state_machine::on_realloc_call (sm_context
&sm_ctxt
,
2328 const supernode
*node
,
2329 const gcall
*call
) const
2331 const unsigned argno
= 0;
2332 const deallocator
*d
= &m_realloc
;
2334 tree arg
= gimple_call_arg (call
, argno
);
2336 state_t state
= sm_ctxt
.get_state (call
, arg
);
2338 if (unchecked_p (state
) || nonnull_p (state
))
2340 const allocation_state
*astate
= as_a_allocation_state (state
);
2341 gcc_assert (astate
->m_deallocators
);
2342 if (!astate
->m_deallocators
->contains_p (&m_free
.m_deallocator
))
2344 /* Wrong allocator. */
2345 tree diag_arg
= sm_ctxt
.get_diagnostic_tree (arg
);
2346 sm_ctxt
.warn (node
, call
, arg
,
2347 make_unique
<mismatching_deallocation
>
2349 astate
->m_deallocators
, d
));
2350 sm_ctxt
.set_next_state (call
, arg
, m_stop
);
2351 if (path_context
*path_ctxt
= sm_ctxt
.get_path_context ())
2352 path_ctxt
->terminate_path ();
2355 else if (state
== m_free
.m_deallocator
.m_freed
)
2357 /* freed -> stop, with warning. */
2358 tree diag_arg
= sm_ctxt
.get_diagnostic_tree (arg
);
2359 sm_ctxt
.warn (node
, call
, arg
,
2360 make_unique
<double_free
> (*this, diag_arg
, "free"));
2361 sm_ctxt
.set_next_state (call
, arg
, m_stop
);
2362 if (path_context
*path_ctxt
= sm_ctxt
.get_path_context ())
2363 path_ctxt
->terminate_path ();
2365 else if (state
== m_non_heap
)
2367 /* non-heap -> stop, with warning. */
2368 handle_free_of_non_heap (sm_ctxt
, node
, call
, arg
, d
);
2369 if (path_context
*path_ctxt
= sm_ctxt
.get_path_context ())
2370 path_ctxt
->terminate_path ();
2374 /* Implementation of state_machine::on_phi vfunc for malloc_state_machine. */
2377 malloc_state_machine::on_phi (sm_context
&sm_ctxt
,
2378 const supernode
*node ATTRIBUTE_UNUSED
,
2384 tree lhs
= gimple_phi_result (phi
);
2385 on_zero_assignment (sm_ctxt
, phi
, lhs
);
2389 /* Implementation of state_machine::on_condition vfunc for malloc_state_machine.
2390 Potentially transition state 'unchecked' to 'nonnull' or to 'null'. */
2393 malloc_state_machine::on_condition (sm_context
&sm_ctxt
,
2394 const supernode
*node ATTRIBUTE_UNUSED
,
2398 const svalue
*rhs
) const
2400 if (!rhs
->all_zeroes_p ())
2403 if (!any_pointer_p (lhs
))
2405 if (!any_pointer_p (rhs
))
2410 log ("got 'ARG != 0' match");
2411 state_t s
= sm_ctxt
.get_state (stmt
, lhs
);
2412 if (unchecked_p (s
))
2414 const allocation_state
*astate
= as_a_allocation_state (s
);
2415 sm_ctxt
.set_next_state (stmt
, lhs
, astate
->get_nonnull ());
2418 else if (op
== EQ_EXPR
)
2420 log ("got 'ARG == 0' match");
2421 state_t s
= sm_ctxt
.get_state (stmt
, lhs
);
2422 if (unchecked_p (s
))
2423 sm_ctxt
.set_next_state (stmt
, lhs
, m_null
);
2427 /* Implementation of state_machine::on_pop_frame vfunc for malloc_state_machine.
2428 Clear any "assumed-non-null" state where the assumption happened in
2432 malloc_state_machine::on_pop_frame (sm_state_map
*smap
,
2433 const frame_region
*frame_reg
) const
2435 hash_set
<const svalue
*> svals_to_clear
;
2436 for (auto kv
: *smap
)
2438 const svalue
*sval
= kv
.first
;
2439 state_t state
= kv
.second
.m_state
;
2440 if (assumed_non_null_p (state
))
2442 const assumed_non_null_state
*assumed_state
2443 = (const assumed_non_null_state
*)state
;
2444 if (frame_reg
== assumed_state
->m_frame
)
2445 svals_to_clear
.add (sval
);
2448 for (auto sval
: svals_to_clear
)
2449 smap
->clear_any_state (sval
);
2452 /* Implementation of state_machine::can_purge_p vfunc for malloc_state_machine.
2453 Don't allow purging of pointers in state 'unchecked' or 'nonnull'
2454 (to avoid false leak reports). */
2457 malloc_state_machine::can_purge_p (state_t s
) const
2459 enum resource_state rs
= get_rs (s
);
2460 return rs
!= RS_UNCHECKED
&& rs
!= RS_NONNULL
;
2463 /* Implementation of state_machine::on_leak vfunc for malloc_state_machine
2464 (for complaining about leaks of pointers in state 'unchecked' and
2467 std::unique_ptr
<pending_diagnostic
>
2468 malloc_state_machine::on_leak (tree var
) const
2470 return make_unique
<malloc_leak
> (*this, var
);
2473 /* Implementation of state_machine::reset_when_passed_to_unknown_fn_p vfunc
2474 for malloc_state_machine. */
2477 malloc_state_machine::reset_when_passed_to_unknown_fn_p (state_t s
,
2478 bool is_mutable
) const
2480 /* An on-stack ptr doesn't stop being stack-allocated when passed to an
2482 if (s
== m_non_heap
)
2485 /* Otherwise, pointers passed as non-const can be freed. */
2489 /* Implementation of state_machine::maybe_get_merged_states_nonequal vfunc
2490 for malloc_state_machine.
2492 Support discarding "assumed-non-null" states when merging with
2495 state_machine::state_t
2496 malloc_state_machine::maybe_get_merged_states_nonequal (state_t state_a
,
2497 state_t state_b
) const
2499 if (assumed_non_null_p (state_a
) && state_b
== m_start
)
2501 if (state_a
== m_start
&& assumed_non_null_p (state_b
))
2506 /* Return true if calls to FNDECL are known to not affect this sm-state. */
2509 malloc_state_machine::unaffected_by_call_p (tree fndecl
)
2511 /* A set of functions that are known to not affect allocation
2512 status, even if we haven't fully modelled the rest of their
2514 static const char * const funcnames
[] = {
2515 /* This array must be kept sorted. */
2518 const size_t count
= ARRAY_SIZE (funcnames
);
2519 function_set
fs (funcnames
, count
);
2521 if (fs
.contains_decl_p (fndecl
))
2527 /* Shared logic for handling GIMPLE_ASSIGNs and GIMPLE_PHIs that
2528 assign zero to LHS. */
2531 malloc_state_machine::on_zero_assignment (sm_context
&sm_ctxt
,
2535 state_t s
= sm_ctxt
.get_state (stmt
, lhs
);
2536 enum resource_state rs
= get_rs (s
);
2538 || rs
== RS_UNCHECKED
2541 sm_ctxt
.set_next_state (stmt
, lhs
, m_null
);
2544 /* Special-case hook for handling realloc, for the "success with move to
2545 a new buffer" case, marking OLD_PTR_SVAL as freed and NEW_PTR_SVAL as
2548 This is similar to on_deallocator_call and on_allocator_call,
2549 but the checks happen in on_realloc_call, and by splitting the states. */
2552 malloc_state_machine::
2553 on_realloc_with_move (region_model
*model
,
2555 const svalue
*old_ptr_sval
,
2556 const svalue
*new_ptr_sval
,
2557 const extrinsic_state
&ext_state
) const
2559 smap
->set_state (model
, old_ptr_sval
,
2560 m_free
.m_deallocator
.m_freed
,
2563 smap
->set_state (model
, new_ptr_sval
,
2568 /* Hook for get_or_create_region_for_heap_alloc for the case when we want
2569 ptr_sval to mark a newly created region as assumed non null on malloc SM. */
2571 malloc_state_machine::transition_ptr_sval_non_null (region_model
*model
,
2573 const svalue
*new_ptr_sval
,
2574 const extrinsic_state
&ext_state
) const
2576 smap
->set_state (model
, new_ptr_sval
, m_free
.m_nonnull
, NULL
, ext_state
);
2579 } // anonymous namespace
2581 /* Internal interface to this file. */
2584 make_malloc_state_machine (logger
*logger
)
2586 return new malloc_state_machine (logger
);
2589 /* Specialcase hook for handling realloc, for use by
2590 kf_realloc::impl_call_post::success_with_move::update_model. */
2593 region_model::on_realloc_with_move (const call_details
&cd
,
2594 const svalue
*old_ptr_sval
,
2595 const svalue
*new_ptr_sval
)
2597 region_model_context
*ctxt
= cd
.get_ctxt ();
2600 const extrinsic_state
*ext_state
= ctxt
->get_ext_state ();
2605 const state_machine
*sm
;
2607 if (!ctxt
->get_malloc_map (&smap
, &sm
, &sm_idx
))
2613 const malloc_state_machine
&malloc_sm
2614 = (const malloc_state_machine
&)*sm
;
2616 malloc_sm
.on_realloc_with_move (this,
2623 /* Moves ptr_sval from start to assumed non-null, for use by
2624 region_model::get_or_create_region_for_heap_alloc. */
2626 region_model::transition_ptr_sval_non_null (region_model_context
*ctxt
,
2627 const svalue
*ptr_sval
)
2631 const extrinsic_state
*ext_state
= ctxt
->get_ext_state ();
2636 const state_machine
*sm
;
2638 if (!ctxt
->get_malloc_map (&smap
, &sm
, &sm_idx
))
2644 const malloc_state_machine
&malloc_sm
= (const malloc_state_machine
&)*sm
;
2646 malloc_sm
.transition_ptr_sval_non_null (this, smap
, ptr_sval
, *ext_state
);
2651 #endif /* #if ENABLE_ANALYZER */