1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 // This clang plugin checks various invariants of the Blink garbage
6 // collection infrastructure.
8 // Errors are described at:
9 // http://www.chromium.org/developers/blink-gc-plugin-errors
14 #include "JsonWriter.h"
15 #include "RecordInfo.h"
17 #include "clang/AST/AST.h"
18 #include "clang/AST/ASTConsumer.h"
19 #include "clang/AST/RecursiveASTVisitor.h"
20 #include "clang/Frontend/CompilerInstance.h"
21 #include "clang/Frontend/FrontendPluginRegistry.h"
22 #include "clang/Sema/Sema.h"
24 using namespace clang
;
29 const char kClassMustLeftMostlyDeriveGC
[] =
30 "[blink-gc] Class %0 must derive its GC base in the left-most position.";
32 const char kClassRequiresTraceMethod
[] =
33 "[blink-gc] Class %0 requires a trace method.";
35 const char kBaseRequiresTracing
[] =
36 "[blink-gc] Base class %0 of derived class %1 requires tracing.";
38 const char kBaseRequiresTracingNote
[] =
39 "[blink-gc] Untraced base class %0 declared here:";
41 const char kFieldsRequireTracing
[] =
42 "[blink-gc] Class %0 has untraced fields that require tracing.";
44 const char kFieldRequiresTracingNote
[] =
45 "[blink-gc] Untraced field %0 declared here:";
47 const char kClassContainsInvalidFields
[] =
48 "[blink-gc] Class %0 contains invalid fields.";
50 const char kClassContainsGCRoot
[] =
51 "[blink-gc] Class %0 contains GC root in field %1.";
53 const char kClassRequiresFinalization
[] =
54 "[blink-gc] Class %0 requires finalization.";
56 const char kClassDoesNotRequireFinalization
[] =
57 "[blink-gc] Class %0 may not require finalization.";
59 const char kFinalizerAccessesFinalizedField
[] =
60 "[blink-gc] Finalizer %0 accesses potentially finalized field %1.";
62 const char kFinalizerAccessesEagerlyFinalizedField
[] =
63 "[blink-gc] Finalizer %0 accesses eagerly finalized field %1.";
65 const char kRawPtrToGCManagedClassNote
[] =
66 "[blink-gc] Raw pointer field %0 to a GC managed class declared here:";
68 const char kRefPtrToGCManagedClassNote
[] =
69 "[blink-gc] RefPtr field %0 to a GC managed class declared here:";
71 const char kOwnPtrToGCManagedClassNote
[] =
72 "[blink-gc] OwnPtr field %0 to a GC managed class declared here:";
74 const char kMemberToGCUnmanagedClassNote
[] =
75 "[blink-gc] Member field %0 to non-GC managed class declared here:";
77 const char kStackAllocatedFieldNote
[] =
78 "[blink-gc] Stack-allocated field %0 declared here:";
80 const char kMemberInUnmanagedClassNote
[] =
81 "[blink-gc] Member field %0 in unmanaged class declared here:";
83 const char kPartObjectToGCDerivedClassNote
[] =
84 "[blink-gc] Part-object field %0 to a GC derived class declared here:";
86 const char kPartObjectContainsGCRootNote
[] =
87 "[blink-gc] Field %0 with embedded GC root in %1 declared here:";
89 const char kFieldContainsGCRootNote
[] =
90 "[blink-gc] Field %0 defining a GC root declared here:";
92 const char kOverriddenNonVirtualTrace
[] =
93 "[blink-gc] Class %0 overrides non-virtual trace of base class %1.";
95 const char kOverriddenNonVirtualTraceNote
[] =
96 "[blink-gc] Non-virtual trace method declared here:";
98 const char kMissingTraceDispatchMethod
[] =
99 "[blink-gc] Class %0 is missing manual trace dispatch.";
101 const char kMissingFinalizeDispatchMethod
[] =
102 "[blink-gc] Class %0 is missing manual finalize dispatch.";
104 const char kVirtualAndManualDispatch
[] =
105 "[blink-gc] Class %0 contains or inherits virtual methods"
106 " but implements manual dispatching.";
108 const char kMissingTraceDispatch
[] =
109 "[blink-gc] Missing dispatch to class %0 in manual trace dispatch.";
111 const char kMissingFinalizeDispatch
[] =
112 "[blink-gc] Missing dispatch to class %0 in manual finalize dispatch.";
114 const char kFinalizedFieldNote
[] =
115 "[blink-gc] Potentially finalized field %0 declared here:";
117 const char kEagerlyFinalizedFieldNote
[] =
118 "[blink-gc] Field %0 having eagerly finalized value, declared here:";
120 const char kUserDeclaredDestructorNote
[] =
121 "[blink-gc] User-declared destructor declared here:";
123 const char kUserDeclaredFinalizerNote
[] =
124 "[blink-gc] User-declared finalizer declared here:";
126 const char kBaseRequiresFinalizationNote
[] =
127 "[blink-gc] Base class %0 requiring finalization declared here:";
129 const char kFieldRequiresFinalizationNote
[] =
130 "[blink-gc] Field %0 requiring finalization declared here:";
132 const char kManualDispatchMethodNote
[] =
133 "[blink-gc] Manual dispatch %0 declared here:";
135 const char kDerivesNonStackAllocated
[] =
136 "[blink-gc] Stack-allocated class %0 derives class %1"
137 " which is not stack allocated.";
139 const char kClassOverridesNew
[] =
140 "[blink-gc] Garbage collected class %0"
141 " is not permitted to override its new operator.";
143 const char kClassDeclaresPureVirtualTrace
[] =
144 "[blink-gc] Garbage collected class %0"
145 " is not permitted to declare a pure-virtual trace method.";
147 const char kLeftMostBaseMustBePolymorphic
[] =
148 "[blink-gc] Left-most base class %0 of derived class %1"
149 " must be polymorphic.";
151 const char kBaseClassMustDeclareVirtualTrace
[] =
152 "[blink-gc] Left-most base class %0 of derived class %1"
153 " must define a virtual trace method.";
155 const char kClassMustDeclareGCMixinTraceMethod
[] =
156 "[blink-gc] Class %0 which inherits from GarbageCollectedMixin must"
157 " locally declare and override trace(Visitor*)";
159 // Use a local RAV implementation to simply collect all FunctionDecls marked for
160 // late template parsing. This happens with the flag -fdelayed-template-parsing,
161 // which is on by default in MSVC-compatible mode.
162 std::set
<FunctionDecl
*> GetLateParsedFunctionDecls(TranslationUnitDecl
* decl
) {
163 struct Visitor
: public RecursiveASTVisitor
<Visitor
> {
164 bool VisitFunctionDecl(FunctionDecl
* function_decl
) {
165 if (function_decl
->isLateTemplateParsed())
166 late_parsed_decls
.insert(function_decl
);
170 std::set
<FunctionDecl
*> late_parsed_decls
;
172 v
.TraverseDecl(decl
);
173 return v
.late_parsed_decls
;
176 struct BlinkGCPluginOptions
{
177 BlinkGCPluginOptions()
178 : enable_oilpan(false)
180 , warn_raw_ptr(false)
181 , warn_unneeded_finalizer(false) {}
185 bool warn_unneeded_finalizer
;
186 std::set
<std::string
> ignored_classes
;
187 std::set
<std::string
> checked_namespaces
;
188 std::vector
<std::string
> ignored_directories
;
191 typedef std::vector
<CXXRecordDecl
*> RecordVector
;
192 typedef std::vector
<CXXMethodDecl
*> MethodVector
;
194 // Test if a template specialization is an instantiation.
195 static bool IsTemplateInstantiation(CXXRecordDecl
* record
) {
196 ClassTemplateSpecializationDecl
* spec
=
197 dyn_cast
<ClassTemplateSpecializationDecl
>(record
);
200 switch (spec
->getTemplateSpecializationKind()) {
201 case TSK_ImplicitInstantiation
:
202 case TSK_ExplicitInstantiationDefinition
:
205 case TSK_ExplicitSpecialization
:
207 // TODO: unsupported cases.
208 case TSK_ExplicitInstantiationDeclaration
:
211 assert(false && "Unknown template specialization kind");
214 // This visitor collects the entry points for the checker.
215 class CollectVisitor
: public RecursiveASTVisitor
<CollectVisitor
> {
219 RecordVector
& record_decls() { return record_decls_
; }
220 MethodVector
& trace_decls() { return trace_decls_
; }
222 bool shouldVisitTemplateInstantiations() { return false; }
224 // Collect record declarations, including nested declarations.
225 bool VisitCXXRecordDecl(CXXRecordDecl
* record
) {
226 if (record
->hasDefinition() && record
->isCompleteDefinition())
227 record_decls_
.push_back(record
);
231 // Collect tracing method definitions, but don't traverse method bodies.
232 bool TraverseCXXMethodDecl(CXXMethodDecl
* method
) {
233 if (method
->isThisDeclarationADefinition() && Config::IsTraceMethod(method
))
234 trace_decls_
.push_back(method
);
239 RecordVector record_decls_
;
240 MethodVector trace_decls_
;
243 // This visitor checks that a finalizer method does not have invalid access to
244 // fields that are potentially finalized. A potentially finalized field is
245 // either a Member, a heap-allocated collection or an off-heap collection that
246 // contains Members. Invalid uses are currently identified as passing the field
247 // as the argument of a procedure call or using the -> or [] operators on it.
248 class CheckFinalizerVisitor
249 : public RecursiveASTVisitor
<CheckFinalizerVisitor
> {
251 // Simple visitor to determine if the content of a field might be collected
252 // during finalization.
253 class MightBeCollectedVisitor
: public EdgeVisitor
{
255 MightBeCollectedVisitor(bool is_eagerly_finalized
)
256 : might_be_collected_(false)
257 , is_eagerly_finalized_(is_eagerly_finalized
)
258 , as_eagerly_finalized_(false) {}
259 bool might_be_collected() { return might_be_collected_
; }
260 bool as_eagerly_finalized() { return as_eagerly_finalized_
; }
261 void VisitMember(Member
* edge
) override
{
262 if (is_eagerly_finalized_
) {
263 if (edge
->ptr()->IsValue()) {
264 Value
* member
= static_cast<Value
*>(edge
->ptr());
265 if (member
->value()->IsEagerlyFinalized()) {
266 might_be_collected_
= true;
267 as_eagerly_finalized_
= true;
272 might_be_collected_
= true;
274 void VisitCollection(Collection
* edge
) override
{
275 if (edge
->on_heap() && !is_eagerly_finalized_
) {
276 might_be_collected_
= !edge
->is_root();
278 edge
->AcceptMembers(this);
283 bool might_be_collected_
;
284 bool is_eagerly_finalized_
;
285 bool as_eagerly_finalized_
;
291 Error(MemberExpr
*member
,
292 bool as_eagerly_finalized
,
295 , as_eagerly_finalized_(as_eagerly_finalized
)
299 bool as_eagerly_finalized_
;
303 typedef std::vector
<Error
> Errors
;
305 CheckFinalizerVisitor(RecordCache
* cache
, bool is_eagerly_finalized
)
306 : blacklist_context_(false)
308 , is_eagerly_finalized_(is_eagerly_finalized
) {}
310 Errors
& finalized_fields() { return finalized_fields_
; }
312 bool WalkUpFromCXXOperatorCallExpr(CXXOperatorCallExpr
* expr
) {
313 // Only continue the walk-up if the operator is a blacklisted one.
314 switch (expr
->getOperator()) {
317 this->WalkUpFromCallExpr(expr
);
323 // We consider all non-operator calls to be blacklisted contexts.
324 bool WalkUpFromCallExpr(CallExpr
* expr
) {
325 bool prev_blacklist_context
= blacklist_context_
;
326 blacklist_context_
= true;
327 for (size_t i
= 0; i
< expr
->getNumArgs(); ++i
)
328 this->TraverseStmt(expr
->getArg(i
));
329 blacklist_context_
= prev_blacklist_context
;
333 bool VisitMemberExpr(MemberExpr
* member
) {
334 FieldDecl
* field
= dyn_cast
<FieldDecl
>(member
->getMemberDecl());
338 RecordInfo
* info
= cache_
->Lookup(field
->getParent());
342 RecordInfo::Fields::iterator it
= info
->GetFields().find(field
);
343 if (it
== info
->GetFields().end())
346 if (seen_members_
.find(member
) != seen_members_
.end())
349 bool as_eagerly_finalized
= false;
350 if (blacklist_context_
&&
351 MightBeCollected(&it
->second
, as_eagerly_finalized
)) {
352 finalized_fields_
.push_back(
353 Error(member
, as_eagerly_finalized
, &it
->second
));
354 seen_members_
.insert(member
);
359 bool MightBeCollected(FieldPoint
* point
, bool& as_eagerly_finalized
) {
360 MightBeCollectedVisitor
visitor(is_eagerly_finalized_
);
361 point
->edge()->Accept(&visitor
);
362 as_eagerly_finalized
= visitor
.as_eagerly_finalized();
363 return visitor
.might_be_collected();
367 bool blacklist_context_
;
368 Errors finalized_fields_
;
369 std::set
<MemberExpr
*> seen_members_
;
371 bool is_eagerly_finalized_
;
374 // This visitor checks that a method contains within its body, a call to a
375 // method on the provided receiver class. This is used to check manual
376 // dispatching for trace and finalize methods.
377 class CheckDispatchVisitor
: public RecursiveASTVisitor
<CheckDispatchVisitor
> {
379 CheckDispatchVisitor(RecordInfo
* receiver
)
380 : receiver_(receiver
), dispatched_to_receiver_(false) {}
382 bool dispatched_to_receiver() { return dispatched_to_receiver_
; }
384 bool VisitMemberExpr(MemberExpr
* member
) {
385 if (CXXMethodDecl
* fn
= dyn_cast
<CXXMethodDecl
>(member
->getMemberDecl())) {
386 if (fn
->getParent() == receiver_
->record())
387 dispatched_to_receiver_
= true;
392 bool VisitUnresolvedMemberExpr(UnresolvedMemberExpr
* member
) {
393 for (Decl
* decl
: member
->decls()) {
394 if (CXXMethodDecl
* method
= dyn_cast
<CXXMethodDecl
>(decl
)) {
395 if (method
->getParent() == receiver_
->record() &&
396 Config::GetTraceMethodType(method
) ==
397 Config::TRACE_AFTER_DISPATCH_METHOD
) {
398 dispatched_to_receiver_
= true;
407 RecordInfo
* receiver_
;
408 bool dispatched_to_receiver_
;
411 // This visitor checks a tracing method by traversing its body.
412 // - A member field is considered traced if it is referenced in the body.
413 // - A base is traced if a base-qualified call to a trace method is found.
414 class CheckTraceVisitor
: public RecursiveASTVisitor
<CheckTraceVisitor
> {
416 CheckTraceVisitor(CXXMethodDecl
* trace
, RecordInfo
* info
, RecordCache
* cache
)
420 delegates_to_traceimpl_(false) {
423 bool delegates_to_traceimpl() const { return delegates_to_traceimpl_
; }
425 bool VisitMemberExpr(MemberExpr
* member
) {
426 // In weak callbacks, consider any occurrence as a correct usage.
427 // TODO: We really want to require that isAlive is checked on manually
428 // processed weak fields.
429 if (IsWeakCallback()) {
430 if (FieldDecl
* field
= dyn_cast
<FieldDecl
>(member
->getMemberDecl()))
436 bool VisitCallExpr(CallExpr
* call
) {
437 // In weak callbacks we don't check calls (see VisitMemberExpr).
438 if (IsWeakCallback())
441 Expr
* callee
= call
->getCallee();
443 // Trace calls from a templated derived class result in a
444 // DependentScopeMemberExpr because the concrete trace call depends on the
445 // instantiation of any shared template parameters. In this case the call is
446 // "unresolved" and we resort to comparing the syntactic type names.
447 if (CXXDependentScopeMemberExpr
* expr
=
448 dyn_cast
<CXXDependentScopeMemberExpr
>(callee
)) {
449 CheckCXXDependentScopeMemberExpr(call
, expr
);
453 // A tracing call will have either a |visitor| or a |m_field| argument.
454 // A registerWeakMembers call will have a |this| argument.
455 if (call
->getNumArgs() != 1)
457 Expr
* arg
= call
->getArg(0);
459 if (UnresolvedMemberExpr
* expr
= dyn_cast
<UnresolvedMemberExpr
>(callee
)) {
460 // This could be a trace call of a base class, as explained in the
461 // comments of CheckTraceBaseCall().
462 if (CheckTraceBaseCall(call
))
465 if (expr
->getMemberName().getAsString() == kRegisterWeakMembersName
)
466 MarkAllWeakMembersTraced();
468 QualType base
= expr
->getBaseType();
469 if (!base
->isPointerType())
471 CXXRecordDecl
* decl
= base
->getPointeeType()->getAsCXXRecordDecl();
473 CheckTraceFieldCall(expr
->getMemberName().getAsString(), decl
, arg
);
474 if (Config::IsTraceImplName(expr
->getMemberName().getAsString()))
475 delegates_to_traceimpl_
= true;
479 if (CXXMemberCallExpr
* expr
= dyn_cast
<CXXMemberCallExpr
>(call
)) {
480 if (CheckTraceFieldCall(expr
) || CheckRegisterWeakMembers(expr
))
483 if (Config::IsTraceImplName(expr
->getMethodDecl()->getNameAsString())) {
484 delegates_to_traceimpl_
= true;
489 CheckTraceBaseCall(call
);
494 bool IsTraceCallName(const std::string
& name
) {
495 if (trace_
->getName() == kTraceImplName
)
496 return name
== kTraceName
;
497 if (trace_
->getName() == kTraceAfterDispatchImplName
)
498 return name
== kTraceAfterDispatchName
;
499 // Currently, a manually dispatched class cannot have mixin bases (having
500 // one would add a vtable which we explicitly check against). This means
501 // that we can only make calls to a trace method of the same name. Revisit
502 // this if our mixin/vtable assumption changes.
503 return name
== trace_
->getName();
506 CXXRecordDecl
* GetDependentTemplatedDecl(CXXDependentScopeMemberExpr
* expr
) {
507 NestedNameSpecifier
* qual
= expr
->getQualifier();
511 const Type
* type
= qual
->getAsType();
515 return RecordInfo::GetDependentTemplatedDecl(*type
);
518 void CheckCXXDependentScopeMemberExpr(CallExpr
* call
,
519 CXXDependentScopeMemberExpr
* expr
) {
520 string fn_name
= expr
->getMember().getAsString();
522 // Check for VisitorDispatcher::trace(field) and
523 // VisitorDispatcher::registerWeakMembers.
524 if (!expr
->isImplicitAccess()) {
525 if (clang::DeclRefExpr
* base_decl
=
526 clang::dyn_cast
<clang::DeclRefExpr
>(expr
->getBase())) {
527 if (Config::IsVisitorDispatcherType(base_decl
->getType())) {
528 if (call
->getNumArgs() == 1 && fn_name
== kTraceName
) {
529 FindFieldVisitor finder
;
530 finder
.TraverseStmt(call
->getArg(0));
532 FoundField(finder
.field());
535 } else if (call
->getNumArgs() == 1 &&
536 fn_name
== kRegisterWeakMembersName
) {
537 MarkAllWeakMembersTraced();
543 CXXRecordDecl
* tmpl
= GetDependentTemplatedDecl(expr
);
547 // Check for Super<T>::trace(visitor)
548 if (call
->getNumArgs() == 1 && IsTraceCallName(fn_name
)) {
549 RecordInfo::Bases::iterator it
= info_
->GetBases().begin();
550 for (; it
!= info_
->GetBases().end(); ++it
) {
551 if (it
->first
->getName() == tmpl
->getName())
552 it
->second
.MarkTraced();
556 // Check for TraceIfNeeded<T>::trace(visitor, &field)
557 if (call
->getNumArgs() == 2 && fn_name
== kTraceName
&&
558 tmpl
->getName() == kTraceIfNeededName
) {
559 FindFieldVisitor finder
;
560 finder
.TraverseStmt(call
->getArg(1));
562 FoundField(finder
.field());
566 bool CheckTraceBaseCall(CallExpr
* call
) {
567 // Checks for "Base::trace(visitor)"-like calls.
569 // Checking code for these two variables is shared among MemberExpr* case
570 // and UnresolvedMemberCase* case below.
572 // For example, if we've got "Base::trace(visitor)" as |call|,
573 // callee_record will be "Base", and func_name will be "trace".
574 CXXRecordDecl
* callee_record
= nullptr;
575 std::string func_name
;
577 if (MemberExpr
* callee
= dyn_cast
<MemberExpr
>(call
->getCallee())) {
578 if (!callee
->hasQualifier())
581 FunctionDecl
* trace_decl
=
582 dyn_cast
<FunctionDecl
>(callee
->getMemberDecl());
583 if (!trace_decl
|| !Config::IsTraceMethod(trace_decl
))
586 const Type
* type
= callee
->getQualifier()->getAsType();
590 callee_record
= type
->getAsCXXRecordDecl();
591 func_name
= trace_decl
->getName();
592 } else if (UnresolvedMemberExpr
* callee
=
593 dyn_cast
<UnresolvedMemberExpr
>(call
->getCallee())) {
594 // Callee part may become unresolved if the type of the argument
595 // ("visitor") is a template parameter and the called function is
596 // overloaded (i.e. trace(Visitor*) and
597 // trace(InlinedGlobalMarkingVisitor)).
599 // Here, we try to find a function that looks like trace() from the
600 // candidate overloaded functions, and if we find one, we assume it is
603 CXXMethodDecl
* trace_decl
= nullptr;
604 for (NamedDecl
* named_decl
: callee
->decls()) {
605 if (CXXMethodDecl
* method_decl
= dyn_cast
<CXXMethodDecl
>(named_decl
)) {
606 if (Config::IsTraceMethod(method_decl
)) {
607 trace_decl
= method_decl
;
615 // Check if the passed argument is named "visitor".
616 if (call
->getNumArgs() != 1)
618 DeclRefExpr
* arg
= dyn_cast
<DeclRefExpr
>(call
->getArg(0));
619 if (!arg
|| arg
->getNameInfo().getAsString() != kVisitorVarName
)
622 callee_record
= trace_decl
->getParent();
623 func_name
= callee
->getMemberName().getAsString();
629 if (!IsTraceCallName(func_name
))
632 for (auto& base
: info_
->GetBases()) {
633 // We want to deal with omitted trace() function in an intermediary
634 // class in the class hierarchy, e.g.:
635 // class A : public GarbageCollected<A> { trace() { ... } };
636 // class B : public A { /* No trace(); have nothing to trace. */ };
637 // class C : public B { trace() { B::trace(visitor); } }
638 // where, B::trace() is actually A::trace(), and in some cases we get
639 // A as |callee_record| instead of B. We somehow need to mark B as
640 // traced if we find A::trace() call.
642 // To solve this, here we keep going up the class hierarchy as long as
643 // they are not required to have a trace method. The implementation is
644 // a simple DFS, where |base_records| represents the set of base classes
647 std::vector
<CXXRecordDecl
*> base_records
;
648 base_records
.push_back(base
.first
);
650 while (!base_records
.empty()) {
651 CXXRecordDecl
* base_record
= base_records
.back();
652 base_records
.pop_back();
654 if (base_record
== callee_record
) {
655 // If we find a matching trace method, pretend the user has written
656 // a correct trace() method of the base; in the example above, we
657 // find A::trace() here and mark B as correctly traced.
658 base
.second
.MarkTraced();
662 if (RecordInfo
* base_info
= cache_
->Lookup(base_record
)) {
663 if (!base_info
->RequiresTraceMethod()) {
664 // If this base class is not required to have a trace method, then
665 // the actual trace method may be defined in an ancestor.
666 for (auto& inner_base
: base_info
->GetBases())
667 base_records
.push_back(inner_base
.first
);
676 bool CheckTraceFieldCall(CXXMemberCallExpr
* call
) {
677 return CheckTraceFieldCall(call
->getMethodDecl()->getNameAsString(),
678 call
->getRecordDecl(),
682 bool CheckTraceFieldCall(string name
, CXXRecordDecl
* callee
, Expr
* arg
) {
683 if (name
!= kTraceName
|| !Config::IsVisitor(callee
->getName()))
686 FindFieldVisitor finder
;
687 finder
.TraverseStmt(arg
);
689 FoundField(finder
.field());
694 bool CheckRegisterWeakMembers(CXXMemberCallExpr
* call
) {
695 CXXMethodDecl
* fn
= call
->getMethodDecl();
696 if (fn
->getName() != kRegisterWeakMembersName
)
699 if (fn
->isTemplateInstantiation()) {
700 const TemplateArgumentList
& args
=
701 *fn
->getTemplateSpecializationInfo()->TemplateArguments
;
702 // The second template argument is the callback method.
703 if (args
.size() > 1 &&
704 args
[1].getKind() == TemplateArgument::Declaration
) {
705 if (FunctionDecl
* callback
=
706 dyn_cast
<FunctionDecl
>(args
[1].getAsDecl())) {
707 if (callback
->hasBody()) {
708 CheckTraceVisitor
nested_visitor(info_
);
709 nested_visitor
.TraverseStmt(callback
->getBody());
717 class FindFieldVisitor
: public RecursiveASTVisitor
<FindFieldVisitor
> {
719 FindFieldVisitor() : member_(0), field_(0) {}
720 MemberExpr
* member() const { return member_
; }
721 FieldDecl
* field() const { return field_
; }
722 bool TraverseMemberExpr(MemberExpr
* member
) {
723 if (FieldDecl
* field
= dyn_cast
<FieldDecl
>(member
->getMemberDecl())) {
735 // Nested checking for weak callbacks.
736 CheckTraceVisitor(RecordInfo
* info
)
737 : trace_(nullptr), info_(info
), cache_(nullptr) {}
739 bool IsWeakCallback() { return !trace_
; }
741 void MarkTraced(RecordInfo::Fields::iterator it
) {
742 // In a weak callback we can't mark strong fields as traced.
743 if (IsWeakCallback() && !it
->second
.edge()->IsWeakMember())
745 it
->second
.MarkTraced();
748 void FoundField(FieldDecl
* field
) {
749 if (IsTemplateInstantiation(info_
->record())) {
750 // Pointer equality on fields does not work for template instantiations.
751 // The trace method refers to fields of the template definition which
752 // are different from the instantiated fields that need to be traced.
753 const string
& name
= field
->getNameAsString();
754 for (RecordInfo::Fields::iterator it
= info_
->GetFields().begin();
755 it
!= info_
->GetFields().end();
757 if (it
->first
->getNameAsString() == name
) {
763 RecordInfo::Fields::iterator it
= info_
->GetFields().find(field
);
764 if (it
!= info_
->GetFields().end())
769 void MarkAllWeakMembersTraced() {
770 // If we find a call to registerWeakMembers which is unresolved we
771 // unsoundly consider all weak members as traced.
772 // TODO: Find out how to validate weak member tracing for unresolved call.
773 for (auto& field
: info_
->GetFields()) {
774 if (field
.second
.edge()->IsWeakMember())
775 field
.second
.MarkTraced();
779 CXXMethodDecl
* trace_
;
782 bool delegates_to_traceimpl_
;
785 // This visitor checks that the fields of a class and the fields of
786 // its part objects don't define GC roots.
787 class CheckGCRootsVisitor
: public RecursiveEdgeVisitor
{
789 typedef std::vector
<FieldPoint
*> RootPath
;
790 typedef std::set
<RecordInfo
*> VisitingSet
;
791 typedef std::vector
<RootPath
> Errors
;
793 CheckGCRootsVisitor() {}
795 Errors
& gc_roots() { return gc_roots_
; }
797 bool ContainsGCRoots(RecordInfo
* info
) {
798 for (RecordInfo::Fields::iterator it
= info
->GetFields().begin();
799 it
!= info
->GetFields().end();
801 current_
.push_back(&it
->second
);
802 it
->second
.edge()->Accept(this);
805 return !gc_roots_
.empty();
808 void VisitValue(Value
* edge
) override
{
809 // TODO: what should we do to check unions?
810 if (edge
->value()->record()->isUnion())
813 // Prevent infinite regress for cyclic part objects.
814 if (visiting_set_
.find(edge
->value()) != visiting_set_
.end())
817 visiting_set_
.insert(edge
->value());
818 // If the value is a part object, then continue checking for roots.
819 for (Context::iterator it
= context().begin();
820 it
!= context().end();
822 if (!(*it
)->IsCollection())
825 ContainsGCRoots(edge
->value());
826 visiting_set_
.erase(edge
->value());
829 void VisitPersistent(Persistent
* edge
) override
{
830 gc_roots_
.push_back(current_
);
833 void AtCollection(Collection
* edge
) override
{
835 gc_roots_
.push_back(current_
);
840 VisitingSet visiting_set_
;
844 // This visitor checks that the fields of a class are "well formed".
845 // - OwnPtr, RefPtr and RawPtr must not point to a GC derived types.
846 // - Part objects must not be GC derived types.
847 // - An on-heap class must never contain GC roots.
848 // - Only stack-allocated types may point to stack-allocated types.
849 class CheckFieldsVisitor
: public RecursiveEdgeVisitor
{
854 kRawPtrToGCManagedWarning
,
857 kMemberToGCUnmanaged
,
863 typedef std::vector
<std::pair
<FieldPoint
*, Error
> > Errors
;
865 CheckFieldsVisitor(const BlinkGCPluginOptions
& options
)
866 : options_(options
), current_(0), stack_allocated_host_(false) {}
868 Errors
& invalid_fields() { return invalid_fields_
; }
870 bool ContainsInvalidFields(RecordInfo
* info
) {
871 stack_allocated_host_
= info
->IsStackAllocated();
872 managed_host_
= stack_allocated_host_
||
873 info
->IsGCAllocated() ||
874 info
->IsNonNewable() ||
875 info
->IsOnlyPlacementNewable();
876 for (RecordInfo::Fields::iterator it
= info
->GetFields().begin();
877 it
!= info
->GetFields().end();
880 current_
= &it
->second
;
881 current_
->edge()->Accept(this);
883 return !invalid_fields_
.empty();
886 void AtMember(Member
* edge
) override
{
889 // A member is allowed to appear in the context of a root.
890 for (Context::iterator it
= context().begin();
891 it
!= context().end();
893 if ((*it
)->Kind() == Edge::kRoot
)
896 invalid_fields_
.push_back(std::make_pair(current_
, kMemberInUnmanaged
));
899 void AtValue(Value
* edge
) override
{
900 // TODO: what should we do to check unions?
901 if (edge
->value()->record()->isUnion())
904 if (!stack_allocated_host_
&& edge
->value()->IsStackAllocated()) {
905 invalid_fields_
.push_back(std::make_pair(current_
, kPtrFromHeapToStack
));
910 edge
->value()->IsGCDerived() &&
911 !edge
->value()->IsGCMixin()) {
912 invalid_fields_
.push_back(std::make_pair(current_
, kGCDerivedPartObject
));
916 // If in a stack allocated context, be fairly insistent that T in Member<T>
917 // is GC allocated, as stack allocated objects do not have a trace()
918 // that separately verifies the validity of Member<T>.
920 // Notice that an error is only reported if T's definition is in scope;
921 // we do not require that it must be brought into scope as that would
922 // prevent declarations of mutually dependent class types.
924 // (Note: Member<>'s constructor will at run-time verify that the
925 // pointer it wraps is indeed heap allocated.)
926 if (stack_allocated_host_
&& Parent() && Parent()->IsMember() &&
927 edge
->value()->HasDefinition() && !edge
->value()->IsGCAllocated()) {
928 invalid_fields_
.push_back(std::make_pair(current_
,
929 kMemberToGCUnmanaged
));
933 if (!Parent() || !edge
->value()->IsGCAllocated())
936 // In transition mode, disallow OwnPtr<T>, RawPtr<T> to GC allocated T's,
937 // also disallow T* in stack-allocated types.
938 if (options_
.enable_oilpan
) {
939 if (Parent()->IsOwnPtr() ||
940 Parent()->IsRawPtrClass() ||
941 (stack_allocated_host_
&& Parent()->IsRawPtr())) {
942 invalid_fields_
.push_back(std::make_pair(
943 current_
, InvalidSmartPtr(Parent())));
946 if (options_
.warn_raw_ptr
&& Parent()->IsRawPtr()) {
947 invalid_fields_
.push_back(std::make_pair(
948 current_
, kRawPtrToGCManagedWarning
));
953 if (Parent()->IsRawPtr() || Parent()->IsRefPtr() || Parent()->IsOwnPtr()) {
954 invalid_fields_
.push_back(std::make_pair(
955 current_
, InvalidSmartPtr(Parent())));
960 void AtCollection(Collection
* edge
) override
{
961 if (edge
->on_heap() && Parent() && Parent()->IsOwnPtr())
962 invalid_fields_
.push_back(std::make_pair(current_
, kOwnPtrToGCManaged
));
966 Error
InvalidSmartPtr(Edge
* ptr
) {
968 return kRawPtrToGCManaged
;
970 return kRefPtrToGCManaged
;
972 return kOwnPtrToGCManaged
;
973 assert(false && "Unknown smart pointer kind");
976 const BlinkGCPluginOptions
& options_
;
977 FieldPoint
* current_
;
978 bool stack_allocated_host_
;
980 Errors invalid_fields_
;
983 class EmptyStmtVisitor
984 : public RecursiveASTVisitor
<EmptyStmtVisitor
> {
986 static bool isEmpty(Stmt
* stmt
) {
987 EmptyStmtVisitor visitor
;
988 visitor
.TraverseStmt(stmt
);
989 return visitor
.empty_
;
992 bool WalkUpFromCompoundStmt(CompoundStmt
* stmt
) {
993 empty_
= stmt
->body_empty();
996 bool VisitStmt(Stmt
*) {
1001 EmptyStmtVisitor() : empty_(true) {}
1005 // Main class containing checks for various invariants of the Blink
1006 // garbage collection infrastructure.
1007 class BlinkGCPluginConsumer
: public ASTConsumer
{
1009 BlinkGCPluginConsumer(CompilerInstance
& instance
,
1010 const BlinkGCPluginOptions
& options
)
1011 : instance_(instance
),
1012 diagnostic_(instance
.getDiagnostics()),
1016 // Only check structures in the blink and WebKit namespaces.
1017 options_
.checked_namespaces
.insert("blink");
1019 // Ignore GC implementation files.
1020 options_
.ignored_directories
.push_back("/heap/");
1022 // Register warning/error messages.
1023 diag_class_must_left_mostly_derive_gc_
= diagnostic_
.getCustomDiagID(
1024 getErrorLevel(), kClassMustLeftMostlyDeriveGC
);
1025 diag_class_requires_trace_method_
=
1026 diagnostic_
.getCustomDiagID(getErrorLevel(), kClassRequiresTraceMethod
);
1027 diag_base_requires_tracing_
=
1028 diagnostic_
.getCustomDiagID(getErrorLevel(), kBaseRequiresTracing
);
1029 diag_fields_require_tracing_
=
1030 diagnostic_
.getCustomDiagID(getErrorLevel(), kFieldsRequireTracing
);
1031 diag_class_contains_invalid_fields_
= diagnostic_
.getCustomDiagID(
1032 getErrorLevel(), kClassContainsInvalidFields
);
1033 diag_class_contains_invalid_fields_warning_
= diagnostic_
.getCustomDiagID(
1034 DiagnosticsEngine::Warning
, kClassContainsInvalidFields
);
1035 diag_class_contains_gc_root_
=
1036 diagnostic_
.getCustomDiagID(getErrorLevel(), kClassContainsGCRoot
);
1037 diag_class_requires_finalization_
= diagnostic_
.getCustomDiagID(
1038 getErrorLevel(), kClassRequiresFinalization
);
1039 diag_class_does_not_require_finalization_
= diagnostic_
.getCustomDiagID(
1040 DiagnosticsEngine::Warning
, kClassDoesNotRequireFinalization
);
1041 diag_finalizer_accesses_finalized_field_
= diagnostic_
.getCustomDiagID(
1042 getErrorLevel(), kFinalizerAccessesFinalizedField
);
1043 diag_finalizer_eagerly_finalized_field_
= diagnostic_
.getCustomDiagID(
1044 getErrorLevel(), kFinalizerAccessesEagerlyFinalizedField
);
1045 diag_overridden_non_virtual_trace_
= diagnostic_
.getCustomDiagID(
1046 getErrorLevel(), kOverriddenNonVirtualTrace
);
1047 diag_missing_trace_dispatch_method_
= diagnostic_
.getCustomDiagID(
1048 getErrorLevel(), kMissingTraceDispatchMethod
);
1049 diag_missing_finalize_dispatch_method_
= diagnostic_
.getCustomDiagID(
1050 getErrorLevel(), kMissingFinalizeDispatchMethod
);
1051 diag_virtual_and_manual_dispatch_
=
1052 diagnostic_
.getCustomDiagID(getErrorLevel(), kVirtualAndManualDispatch
);
1053 diag_missing_trace_dispatch_
=
1054 diagnostic_
.getCustomDiagID(getErrorLevel(), kMissingTraceDispatch
);
1055 diag_missing_finalize_dispatch_
=
1056 diagnostic_
.getCustomDiagID(getErrorLevel(), kMissingFinalizeDispatch
);
1057 diag_derives_non_stack_allocated_
=
1058 diagnostic_
.getCustomDiagID(getErrorLevel(), kDerivesNonStackAllocated
);
1059 diag_class_overrides_new_
=
1060 diagnostic_
.getCustomDiagID(getErrorLevel(), kClassOverridesNew
);
1061 diag_class_declares_pure_virtual_trace_
= diagnostic_
.getCustomDiagID(
1062 getErrorLevel(), kClassDeclaresPureVirtualTrace
);
1063 diag_left_most_base_must_be_polymorphic_
= diagnostic_
.getCustomDiagID(
1064 getErrorLevel(), kLeftMostBaseMustBePolymorphic
);
1065 diag_base_class_must_declare_virtual_trace_
= diagnostic_
.getCustomDiagID(
1066 getErrorLevel(), kBaseClassMustDeclareVirtualTrace
);
1067 diag_class_must_declare_gc_mixin_trace_method_
=
1068 diagnostic_
.getCustomDiagID(getErrorLevel(),
1069 kClassMustDeclareGCMixinTraceMethod
);
1071 // Register note messages.
1072 diag_base_requires_tracing_note_
= diagnostic_
.getCustomDiagID(
1073 DiagnosticsEngine::Note
, kBaseRequiresTracingNote
);
1074 diag_field_requires_tracing_note_
= diagnostic_
.getCustomDiagID(
1075 DiagnosticsEngine::Note
, kFieldRequiresTracingNote
);
1076 diag_raw_ptr_to_gc_managed_class_note_
= diagnostic_
.getCustomDiagID(
1077 DiagnosticsEngine::Note
, kRawPtrToGCManagedClassNote
);
1078 diag_ref_ptr_to_gc_managed_class_note_
= diagnostic_
.getCustomDiagID(
1079 DiagnosticsEngine::Note
, kRefPtrToGCManagedClassNote
);
1080 diag_own_ptr_to_gc_managed_class_note_
= diagnostic_
.getCustomDiagID(
1081 DiagnosticsEngine::Note
, kOwnPtrToGCManagedClassNote
);
1082 diag_member_to_gc_unmanaged_class_note_
= diagnostic_
.getCustomDiagID(
1083 DiagnosticsEngine::Note
, kMemberToGCUnmanagedClassNote
);
1084 diag_stack_allocated_field_note_
= diagnostic_
.getCustomDiagID(
1085 DiagnosticsEngine::Note
, kStackAllocatedFieldNote
);
1086 diag_member_in_unmanaged_class_note_
= diagnostic_
.getCustomDiagID(
1087 DiagnosticsEngine::Note
, kMemberInUnmanagedClassNote
);
1088 diag_part_object_to_gc_derived_class_note_
= diagnostic_
.getCustomDiagID(
1089 DiagnosticsEngine::Note
, kPartObjectToGCDerivedClassNote
);
1090 diag_part_object_contains_gc_root_note_
= diagnostic_
.getCustomDiagID(
1091 DiagnosticsEngine::Note
, kPartObjectContainsGCRootNote
);
1092 diag_field_contains_gc_root_note_
= diagnostic_
.getCustomDiagID(
1093 DiagnosticsEngine::Note
, kFieldContainsGCRootNote
);
1094 diag_finalized_field_note_
= diagnostic_
.getCustomDiagID(
1095 DiagnosticsEngine::Note
, kFinalizedFieldNote
);
1096 diag_eagerly_finalized_field_note_
= diagnostic_
.getCustomDiagID(
1097 DiagnosticsEngine::Note
, kEagerlyFinalizedFieldNote
);
1098 diag_user_declared_destructor_note_
= diagnostic_
.getCustomDiagID(
1099 DiagnosticsEngine::Note
, kUserDeclaredDestructorNote
);
1100 diag_user_declared_finalizer_note_
= diagnostic_
.getCustomDiagID(
1101 DiagnosticsEngine::Note
, kUserDeclaredFinalizerNote
);
1102 diag_base_requires_finalization_note_
= diagnostic_
.getCustomDiagID(
1103 DiagnosticsEngine::Note
, kBaseRequiresFinalizationNote
);
1104 diag_field_requires_finalization_note_
= diagnostic_
.getCustomDiagID(
1105 DiagnosticsEngine::Note
, kFieldRequiresFinalizationNote
);
1106 diag_overridden_non_virtual_trace_note_
= diagnostic_
.getCustomDiagID(
1107 DiagnosticsEngine::Note
, kOverriddenNonVirtualTraceNote
);
1108 diag_manual_dispatch_method_note_
= diagnostic_
.getCustomDiagID(
1109 DiagnosticsEngine::Note
, kManualDispatchMethodNote
);
1112 void HandleTranslationUnit(ASTContext
& context
) override
{
1113 // Don't run the plugin if the compilation unit is already invalid.
1114 if (diagnostic_
.hasErrorOccurred())
1117 ParseFunctionTemplates(context
.getTranslationUnitDecl());
1119 CollectVisitor visitor
;
1120 visitor
.TraverseDecl(context
.getTranslationUnitDecl());
1122 if (options_
.dump_graph
) {
1123 std::error_code err
;
1124 // TODO: Make createDefaultOutputFile or a shorter createOutputFile work.
1125 json_
= JsonWriter::from(instance_
.createOutputFile(
1129 true, // RemoveFileOnSignal
1130 instance_
.getFrontendOpts().OutputFile
, // BaseInput
1131 "graph.json", // Extension
1132 false, // UseTemporary
1133 false, // CreateMissingDirectories
1134 0, // ResultPathName
1135 0)); // TempPathName
1136 if (!err
&& json_
) {
1142 << "Failed to create an output file for the object graph.\n";
1146 for (RecordVector::iterator it
= visitor
.record_decls().begin();
1147 it
!= visitor
.record_decls().end();
1149 CheckRecord(cache_
.Lookup(*it
));
1152 for (MethodVector::iterator it
= visitor
.trace_decls().begin();
1153 it
!= visitor
.trace_decls().end();
1155 CheckTracingMethod(*it
);
1165 void ParseFunctionTemplates(TranslationUnitDecl
* decl
) {
1166 if (!instance_
.getLangOpts().DelayedTemplateParsing
)
1167 return; // Nothing to do.
1169 std::set
<FunctionDecl
*> late_parsed_decls
=
1170 GetLateParsedFunctionDecls(decl
);
1171 clang::Sema
& sema
= instance_
.getSema();
1173 for (const FunctionDecl
* fd
: late_parsed_decls
) {
1174 assert(fd
->isLateTemplateParsed());
1176 if (!Config::IsTraceMethod(fd
))
1179 if (instance_
.getSourceManager().isInSystemHeader(
1180 instance_
.getSourceManager().getSpellingLoc(fd
->getLocation())))
1183 // Force parsing and AST building of the yet-uninstantiated function
1184 // template trace method bodies.
1185 clang::LateParsedTemplate
* lpt
= sema
.LateParsedTemplateMap
[fd
];
1186 sema
.LateTemplateParser(sema
.OpaqueParser
, *lpt
);
1190 // Main entry for checking a record declaration.
1191 void CheckRecord(RecordInfo
* info
) {
1192 if (IsIgnored(info
))
1195 CXXRecordDecl
* record
= info
->record();
1197 // TODO: what should we do to check unions?
1198 if (record
->isUnion())
1201 // If this is the primary template declaration, check its specializations.
1202 if (record
->isThisDeclarationADefinition() &&
1203 record
->getDescribedClassTemplate()) {
1204 ClassTemplateDecl
* tmpl
= record
->getDescribedClassTemplate();
1205 for (ClassTemplateDecl::spec_iterator it
= tmpl
->spec_begin();
1206 it
!= tmpl
->spec_end();
1208 CheckClass(cache_
.Lookup(*it
));
1216 // Check a class-like object (eg, class, specialization, instantiation).
1217 void CheckClass(RecordInfo
* info
) {
1221 // Check consistency of stack-allocated hierarchies.
1222 if (info
->IsStackAllocated()) {
1223 for (RecordInfo::Bases::iterator it
= info
->GetBases().begin();
1224 it
!= info
->GetBases().end();
1226 if (!it
->second
.info()->IsStackAllocated())
1227 ReportDerivesNonStackAllocated(info
, &it
->second
);
1231 if (CXXMethodDecl
* trace
= info
->GetTraceMethod()) {
1232 if (trace
->isPure())
1233 ReportClassDeclaresPureVirtualTrace(info
, trace
);
1234 } else if (info
->RequiresTraceMethod()) {
1235 ReportClassRequiresTraceMethod(info
);
1238 // Check polymorphic classes that are GC-derived or have a trace method.
1239 if (info
->record()->hasDefinition() && info
->record()->isPolymorphic()) {
1240 // TODO: Check classes that inherit a trace method.
1241 CXXMethodDecl
* trace
= info
->GetTraceMethod();
1242 if (trace
|| info
->IsGCDerived())
1243 CheckPolymorphicClass(info
, trace
);
1247 CheckFieldsVisitor
visitor(options_
);
1248 if (visitor
.ContainsInvalidFields(info
))
1249 ReportClassContainsInvalidFields(info
, &visitor
.invalid_fields());
1252 if (info
->IsGCDerived()) {
1254 if (!info
->IsGCMixin()) {
1255 CheckLeftMostDerived(info
);
1256 CheckDispatch(info
);
1257 if (CXXMethodDecl
* newop
= info
->DeclaresNewOperator())
1258 if (!Config::IsIgnoreAnnotated(newop
))
1259 ReportClassOverridesNew(info
, newop
);
1260 if (info
->IsGCMixinInstance()) {
1261 // Require that declared GCMixin implementations
1262 // also provide a trace() override.
1263 if (info
->DeclaresGCMixinMethods()
1264 && !info
->DeclaresLocalTraceMethod())
1265 ReportClassMustDeclareGCMixinTraceMethod(info
);
1270 CheckGCRootsVisitor visitor
;
1271 if (visitor
.ContainsGCRoots(info
))
1272 ReportClassContainsGCRoots(info
, &visitor
.gc_roots());
1275 if (info
->NeedsFinalization())
1276 CheckFinalization(info
);
1278 if (options_
.warn_unneeded_finalizer
&& info
->IsGCFinalized())
1279 CheckUnneededFinalization(info
);
1285 CXXRecordDecl
* GetDependentTemplatedDecl(const Type
& type
) {
1286 const TemplateSpecializationType
* tmpl_type
=
1287 type
.getAs
<TemplateSpecializationType
>();
1291 TemplateDecl
* tmpl_decl
= tmpl_type
->getTemplateName().getAsTemplateDecl();
1295 return dyn_cast
<CXXRecordDecl
>(tmpl_decl
->getTemplatedDecl());
1298 // The GC infrastructure assumes that if the vtable of a polymorphic
1299 // base-class is not initialized for a given object (ie, it is partially
1300 // initialized) then the object does not need to be traced. Thus, we must
1301 // ensure that any polymorphic class with a trace method does not have any
1302 // tractable fields that are initialized before we are sure that the vtable
1303 // and the trace method are both defined. There are two cases that need to
1304 // hold to satisfy that assumption:
1306 // 1. If trace is virtual, then it must be defined in the left-most base.
1307 // This ensures that if the vtable is initialized then it contains a pointer
1308 // to the trace method.
1310 // 2. If trace is non-virtual, then the trace method is defined and we must
1311 // ensure that the left-most base defines a vtable. This ensures that the
1312 // first thing to be initialized when constructing the object is the vtable
1314 void CheckPolymorphicClass(RecordInfo
* info
, CXXMethodDecl
* trace
) {
1315 CXXRecordDecl
* left_most
= info
->record();
1316 CXXRecordDecl::base_class_iterator it
= left_most
->bases_begin();
1317 CXXRecordDecl
* left_most_base
= 0;
1318 while (it
!= left_most
->bases_end()) {
1319 left_most_base
= it
->getType()->getAsCXXRecordDecl();
1320 if (!left_most_base
&& it
->getType()->isDependentType())
1321 left_most_base
= RecordInfo::GetDependentTemplatedDecl(*it
->getType());
1323 // TODO: Find a way to correctly check actual instantiations
1324 // for dependent types. The escape below will be hit, eg, when
1325 // we have a primary template with no definition and
1326 // specializations for each case (such as SupplementBase) in
1327 // which case we don't succeed in checking the required
1329 if (!left_most_base
|| !left_most_base
->hasDefinition())
1332 StringRef name
= left_most_base
->getName();
1333 // We know GCMixin base defines virtual trace.
1334 if (Config::IsGCMixinBase(name
))
1337 // Stop with the left-most prior to a safe polymorphic base (a safe base
1338 // is non-polymorphic and contains no fields).
1339 if (Config::IsSafePolymorphicBase(name
))
1342 left_most
= left_most_base
;
1343 it
= left_most
->bases_begin();
1346 if (RecordInfo
* left_most_info
= cache_
.Lookup(left_most
)) {
1348 // Check condition (1):
1349 if (trace
&& trace
->isVirtual()) {
1350 if (CXXMethodDecl
* trace
= left_most_info
->GetTraceMethod()) {
1351 if (trace
->isVirtual())
1354 ReportBaseClassMustDeclareVirtualTrace(info
, left_most
);
1358 // Check condition (2):
1359 if (DeclaresVirtualMethods(left_most
))
1361 if (left_most_base
) {
1362 // Get the base next to the "safe polymorphic base"
1363 if (it
!= left_most
->bases_end())
1365 if (it
!= left_most
->bases_end()) {
1366 if (CXXRecordDecl
* next_base
= it
->getType()->getAsCXXRecordDecl()) {
1367 if (CXXRecordDecl
* next_left_most
= GetLeftMostBase(next_base
)) {
1368 if (DeclaresVirtualMethods(next_left_most
))
1370 ReportLeftMostBaseMustBePolymorphic(info
, next_left_most
);
1376 ReportLeftMostBaseMustBePolymorphic(info
, left_most
);
1380 CXXRecordDecl
* GetLeftMostBase(CXXRecordDecl
* left_most
) {
1381 CXXRecordDecl::base_class_iterator it
= left_most
->bases_begin();
1382 while (it
!= left_most
->bases_end()) {
1383 if (it
->getType()->isDependentType())
1384 left_most
= RecordInfo::GetDependentTemplatedDecl(*it
->getType());
1386 left_most
= it
->getType()->getAsCXXRecordDecl();
1387 if (!left_most
|| !left_most
->hasDefinition())
1389 it
= left_most
->bases_begin();
1394 bool DeclaresVirtualMethods(CXXRecordDecl
* decl
) {
1395 CXXRecordDecl::method_iterator it
= decl
->method_begin();
1396 for (; it
!= decl
->method_end(); ++it
)
1397 if (it
->isVirtual() && !it
->isPure())
1402 void CheckLeftMostDerived(RecordInfo
* info
) {
1403 CXXRecordDecl
* left_most
= GetLeftMostBase(info
->record());
1406 if (!Config::IsGCBase(left_most
->getName()))
1407 ReportClassMustLeftMostlyDeriveGC(info
);
1410 void CheckDispatch(RecordInfo
* info
) {
1411 bool finalized
= info
->IsGCFinalized();
1412 CXXMethodDecl
* trace_dispatch
= info
->GetTraceDispatchMethod();
1413 CXXMethodDecl
* finalize_dispatch
= info
->GetFinalizeDispatchMethod();
1414 if (!trace_dispatch
&& !finalize_dispatch
)
1417 CXXRecordDecl
* base
= trace_dispatch
? trace_dispatch
->getParent()
1418 : finalize_dispatch
->getParent();
1420 // Check that dispatch methods are defined at the base.
1421 if (base
== info
->record()) {
1422 if (!trace_dispatch
)
1423 ReportMissingTraceDispatchMethod(info
);
1424 if (finalized
&& !finalize_dispatch
)
1425 ReportMissingFinalizeDispatchMethod(info
);
1426 if (!finalized
&& finalize_dispatch
) {
1427 ReportClassRequiresFinalization(info
);
1428 NoteUserDeclaredFinalizer(finalize_dispatch
);
1432 // Check that classes implementing manual dispatch do not have vtables.
1433 if (info
->record()->isPolymorphic())
1434 ReportVirtualAndManualDispatch(
1435 info
, trace_dispatch
? trace_dispatch
: finalize_dispatch
);
1437 // If this is a non-abstract class check that it is dispatched to.
1438 // TODO: Create a global variant of this local check. We can only check if
1439 // the dispatch body is known in this compilation unit.
1440 if (info
->IsConsideredAbstract())
1443 const FunctionDecl
* defn
;
1445 if (trace_dispatch
&& trace_dispatch
->isDefined(defn
)) {
1446 CheckDispatchVisitor
visitor(info
);
1447 visitor
.TraverseStmt(defn
->getBody());
1448 if (!visitor
.dispatched_to_receiver())
1449 ReportMissingTraceDispatch(defn
, info
);
1452 if (finalized
&& finalize_dispatch
&& finalize_dispatch
->isDefined(defn
)) {
1453 CheckDispatchVisitor
visitor(info
);
1454 visitor
.TraverseStmt(defn
->getBody());
1455 if (!visitor
.dispatched_to_receiver())
1456 ReportMissingFinalizeDispatch(defn
, info
);
1460 // TODO: Should we collect destructors similar to trace methods?
1461 void CheckFinalization(RecordInfo
* info
) {
1462 CXXDestructorDecl
* dtor
= info
->record()->getDestructor();
1464 // For finalized classes, check the finalization method if possible.
1465 if (info
->IsGCFinalized()) {
1466 if (dtor
&& dtor
->hasBody()) {
1467 CheckFinalizerVisitor
visitor(&cache_
, info
->IsEagerlyFinalized());
1468 visitor
.TraverseCXXMethodDecl(dtor
);
1469 if (!visitor
.finalized_fields().empty()) {
1470 ReportFinalizerAccessesFinalizedFields(
1471 dtor
, &visitor
.finalized_fields());
1477 // Don't require finalization of a mixin that has not yet been "mixed in".
1478 if (info
->IsGCMixin())
1481 // Report the finalization error, and proceed to print possible causes for
1482 // the finalization requirement.
1483 ReportClassRequiresFinalization(info
);
1485 if (dtor
&& dtor
->isUserProvided())
1486 NoteUserDeclaredDestructor(dtor
);
1488 for (RecordInfo::Bases::iterator it
= info
->GetBases().begin();
1489 it
!= info
->GetBases().end();
1491 if (it
->second
.info()->NeedsFinalization())
1492 NoteBaseRequiresFinalization(&it
->second
);
1495 for (RecordInfo::Fields::iterator it
= info
->GetFields().begin();
1496 it
!= info
->GetFields().end();
1498 if (it
->second
.edge()->NeedsFinalization())
1499 NoteField(&it
->second
, diag_field_requires_finalization_note_
);
1503 void CheckUnneededFinalization(RecordInfo
* info
) {
1504 if (!HasNonEmptyFinalizer(info
))
1505 ReportClassDoesNotRequireFinalization(info
);
1508 bool HasNonEmptyFinalizer(RecordInfo
* info
) {
1509 CXXDestructorDecl
* dtor
= info
->record()->getDestructor();
1510 if (dtor
&& dtor
->isUserProvided()) {
1511 if (!dtor
->hasBody() || !EmptyStmtVisitor::isEmpty(dtor
->getBody()))
1514 for (RecordInfo::Bases::iterator it
= info
->GetBases().begin();
1515 it
!= info
->GetBases().end();
1517 if (HasNonEmptyFinalizer(it
->second
.info()))
1520 for (RecordInfo::Fields::iterator it
= info
->GetFields().begin();
1521 it
!= info
->GetFields().end();
1523 if (it
->second
.edge()->NeedsFinalization())
1529 // This is the main entry for tracing method definitions.
1530 void CheckTracingMethod(CXXMethodDecl
* method
) {
1531 RecordInfo
* parent
= cache_
.Lookup(method
->getParent());
1532 if (IsIgnored(parent
))
1535 // Check templated tracing methods by checking the template instantiations.
1536 // Specialized templates are handled as ordinary classes.
1537 if (ClassTemplateDecl
* tmpl
=
1538 parent
->record()->getDescribedClassTemplate()) {
1539 for (ClassTemplateDecl::spec_iterator it
= tmpl
->spec_begin();
1540 it
!= tmpl
->spec_end();
1542 // Check trace using each template instantiation as the holder.
1543 if (IsTemplateInstantiation(*it
))
1544 CheckTraceOrDispatchMethod(cache_
.Lookup(*it
), method
);
1549 CheckTraceOrDispatchMethod(parent
, method
);
1552 // Determine what type of tracing method this is (dispatch or trace).
1553 void CheckTraceOrDispatchMethod(RecordInfo
* parent
, CXXMethodDecl
* method
) {
1554 Config::TraceMethodType trace_type
= Config::GetTraceMethodType(method
);
1555 if (trace_type
== Config::TRACE_AFTER_DISPATCH_METHOD
||
1556 trace_type
== Config::TRACE_AFTER_DISPATCH_IMPL_METHOD
||
1557 !parent
->GetTraceDispatchMethod()) {
1558 CheckTraceMethod(parent
, method
, trace_type
);
1560 // Dispatch methods are checked when we identify subclasses.
1563 // Check an actual trace method.
1564 void CheckTraceMethod(RecordInfo
* parent
,
1565 CXXMethodDecl
* trace
,
1566 Config::TraceMethodType trace_type
) {
1567 // A trace method must not override any non-virtual trace methods.
1568 if (trace_type
== Config::TRACE_METHOD
) {
1569 for (RecordInfo::Bases::iterator it
= parent
->GetBases().begin();
1570 it
!= parent
->GetBases().end();
1572 RecordInfo
* base
= it
->second
.info();
1573 if (CXXMethodDecl
* other
= base
->InheritsNonVirtualTrace())
1574 ReportOverriddenNonVirtualTrace(parent
, trace
, other
);
1578 CheckTraceVisitor
visitor(trace
, parent
, &cache_
);
1579 visitor
.TraverseCXXMethodDecl(trace
);
1581 // Skip reporting if this trace method is a just delegate to
1582 // traceImpl (or traceAfterDispatchImpl) method. We will report on
1583 // CheckTraceMethod on traceImpl method.
1584 if (visitor
.delegates_to_traceimpl())
1587 for (RecordInfo::Bases::iterator it
= parent
->GetBases().begin();
1588 it
!= parent
->GetBases().end();
1590 if (!it
->second
.IsProperlyTraced())
1591 ReportBaseRequiresTracing(parent
, trace
, it
->first
);
1594 for (RecordInfo::Fields::iterator it
= parent
->GetFields().begin();
1595 it
!= parent
->GetFields().end();
1597 if (!it
->second
.IsProperlyTraced()) {
1598 // Discontinue once an untraced-field error is found.
1599 ReportFieldsRequireTracing(parent
, trace
);
1605 void DumpClass(RecordInfo
* info
) {
1609 json_
->OpenObject();
1610 json_
->Write("name", info
->record()->getQualifiedNameAsString());
1611 json_
->Write("loc", GetLocString(info
->record()->getLocStart()));
1612 json_
->CloseObject();
1614 class DumpEdgeVisitor
: public RecursiveEdgeVisitor
{
1616 DumpEdgeVisitor(JsonWriter
* json
) : json_(json
) {}
1617 void DumpEdge(RecordInfo
* src
,
1620 const Edge::LivenessKind
& kind
,
1621 const string
& loc
) {
1622 json_
->OpenObject();
1623 json_
->Write("src", src
->record()->getQualifiedNameAsString());
1624 json_
->Write("dst", dst
->record()->getQualifiedNameAsString());
1625 json_
->Write("lbl", lbl
);
1626 json_
->Write("kind", kind
);
1627 json_
->Write("loc", loc
);
1630 Parent()->IsRawPtr() ? "raw" :
1631 Parent()->IsRefPtr() ? "ref" :
1632 Parent()->IsOwnPtr() ? "own" :
1633 (Parent()->IsMember() ||
1634 Parent()->IsWeakMember()) ? "mem" :
1636 json_
->CloseObject();
1639 void DumpField(RecordInfo
* src
, FieldPoint
* point
, const string
& loc
) {
1643 point_
->edge()->Accept(this);
1646 void AtValue(Value
* e
) override
{
1647 // The liveness kind of a path from the point to this value
1648 // is given by the innermost place that is non-strong.
1649 Edge::LivenessKind kind
= Edge::kStrong
;
1650 if (Config::IsIgnoreCycleAnnotated(point_
->field())) {
1653 for (Context::iterator it
= context().begin();
1654 it
!= context().end();
1656 Edge::LivenessKind pointer_kind
= (*it
)->Kind();
1657 if (pointer_kind
!= Edge::kStrong
) {
1658 kind
= pointer_kind
;
1664 src_
, e
->value(), point_
->field()->getNameAsString(), kind
, loc_
);
1674 DumpEdgeVisitor
visitor(json_
);
1676 RecordInfo::Bases
& bases
= info
->GetBases();
1677 for (RecordInfo::Bases::iterator it
= bases
.begin();
1680 visitor
.DumpEdge(info
,
1684 GetLocString(it
->second
.spec().getLocStart()));
1687 RecordInfo::Fields
& fields
= info
->GetFields();
1688 for (RecordInfo::Fields::iterator it
= fields
.begin();
1691 visitor
.DumpField(info
,
1693 GetLocString(it
->second
.field()->getLocStart()));
1697 // Adds either a warning or error, based on the current handling of -Werror.
1698 DiagnosticsEngine::Level
getErrorLevel() {
1699 return diagnostic_
.getWarningsAsErrors() ? DiagnosticsEngine::Error
1700 : DiagnosticsEngine::Warning
;
1703 const string
GetLocString(SourceLocation loc
) {
1704 const SourceManager
& source_manager
= instance_
.getSourceManager();
1705 PresumedLoc ploc
= source_manager
.getPresumedLoc(loc
);
1706 if (ploc
.isInvalid())
1709 llvm::raw_string_ostream
OS(loc_str
);
1710 OS
<< ploc
.getFilename()
1711 << ":" << ploc
.getLine()
1712 << ":" << ploc
.getColumn();
1716 bool IsIgnored(RecordInfo
* record
) {
1718 !InCheckedNamespace(record
) ||
1719 IsIgnoredClass(record
) ||
1720 InIgnoredDirectory(record
);
1723 bool IsIgnoredClass(RecordInfo
* info
) {
1724 // Ignore any class prefixed by SameSizeAs. These are used in
1725 // Blink to verify class sizes and don't need checking.
1726 const string SameSizeAs
= "SameSizeAs";
1727 if (info
->name().compare(0, SameSizeAs
.size(), SameSizeAs
) == 0)
1729 return options_
.ignored_classes
.find(info
->name()) !=
1730 options_
.ignored_classes
.end();
1733 bool InIgnoredDirectory(RecordInfo
* info
) {
1735 if (!GetFilename(info
->record()->getLocStart(), &filename
))
1736 return false; // TODO: should we ignore non-existing file locations?
1737 #if defined(LLVM_ON_WIN32)
1738 std::replace(filename
.begin(), filename
.end(), '\\', '/');
1740 std::vector
<string
>::iterator it
= options_
.ignored_directories
.begin();
1741 for (; it
!= options_
.ignored_directories
.end(); ++it
)
1742 if (filename
.find(*it
) != string::npos
)
1747 bool InCheckedNamespace(RecordInfo
* info
) {
1750 for (DeclContext
* context
= info
->record()->getDeclContext();
1751 !context
->isTranslationUnit();
1752 context
= context
->getParent()) {
1753 if (NamespaceDecl
* decl
= dyn_cast
<NamespaceDecl
>(context
)) {
1754 if (decl
->isAnonymousNamespace())
1756 if (options_
.checked_namespaces
.find(decl
->getNameAsString()) !=
1757 options_
.checked_namespaces
.end()) {
1765 bool GetFilename(SourceLocation loc
, string
* filename
) {
1766 const SourceManager
& source_manager
= instance_
.getSourceManager();
1767 SourceLocation spelling_location
= source_manager
.getSpellingLoc(loc
);
1768 PresumedLoc ploc
= source_manager
.getPresumedLoc(spelling_location
);
1769 if (ploc
.isInvalid()) {
1770 // If we're in an invalid location, we're looking at things that aren't
1771 // actually stated in the source.
1774 *filename
= ploc
.getFilename();
1778 void ReportClassMustLeftMostlyDeriveGC(RecordInfo
* info
) {
1779 SourceLocation loc
= info
->record()->getInnerLocStart();
1780 SourceManager
& manager
= instance_
.getSourceManager();
1781 FullSourceLoc
full_loc(loc
, manager
);
1782 diagnostic_
.Report(full_loc
, diag_class_must_left_mostly_derive_gc_
)
1786 void ReportClassRequiresTraceMethod(RecordInfo
* info
) {
1787 SourceLocation loc
= info
->record()->getInnerLocStart();
1788 SourceManager
& manager
= instance_
.getSourceManager();
1789 FullSourceLoc
full_loc(loc
, manager
);
1790 diagnostic_
.Report(full_loc
, diag_class_requires_trace_method_
)
1793 for (RecordInfo::Bases::iterator it
= info
->GetBases().begin();
1794 it
!= info
->GetBases().end();
1796 if (it
->second
.NeedsTracing().IsNeeded())
1797 NoteBaseRequiresTracing(&it
->second
);
1800 for (RecordInfo::Fields::iterator it
= info
->GetFields().begin();
1801 it
!= info
->GetFields().end();
1803 if (!it
->second
.IsProperlyTraced())
1804 NoteFieldRequiresTracing(info
, it
->first
);
1808 void ReportBaseRequiresTracing(RecordInfo
* derived
,
1809 CXXMethodDecl
* trace
,
1810 CXXRecordDecl
* base
) {
1811 SourceLocation loc
= trace
->getLocStart();
1812 SourceManager
& manager
= instance_
.getSourceManager();
1813 FullSourceLoc
full_loc(loc
, manager
);
1814 diagnostic_
.Report(full_loc
, diag_base_requires_tracing_
)
1815 << base
<< derived
->record();
1818 void ReportFieldsRequireTracing(RecordInfo
* info
, CXXMethodDecl
* trace
) {
1819 SourceLocation loc
= trace
->getLocStart();
1820 SourceManager
& manager
= instance_
.getSourceManager();
1821 FullSourceLoc
full_loc(loc
, manager
);
1822 diagnostic_
.Report(full_loc
, diag_fields_require_tracing_
)
1824 for (RecordInfo::Fields::iterator it
= info
->GetFields().begin();
1825 it
!= info
->GetFields().end();
1827 if (!it
->second
.IsProperlyTraced())
1828 NoteFieldRequiresTracing(info
, it
->first
);
1832 void ReportClassContainsInvalidFields(RecordInfo
* info
,
1833 CheckFieldsVisitor::Errors
* errors
) {
1834 SourceLocation loc
= info
->record()->getLocStart();
1835 SourceManager
& manager
= instance_
.getSourceManager();
1836 FullSourceLoc
full_loc(loc
, manager
);
1837 bool only_warnings
= options_
.warn_raw_ptr
;
1838 for (CheckFieldsVisitor::Errors::iterator it
= errors
->begin();
1839 only_warnings
&& it
!= errors
->end();
1841 if (it
->second
!= CheckFieldsVisitor::kRawPtrToGCManagedWarning
)
1842 only_warnings
= false;
1844 diagnostic_
.Report(full_loc
, only_warnings
?
1845 diag_class_contains_invalid_fields_warning_
:
1846 diag_class_contains_invalid_fields_
)
1848 for (CheckFieldsVisitor::Errors::iterator it
= errors
->begin();
1849 it
!= errors
->end();
1852 if (it
->second
== CheckFieldsVisitor::kRawPtrToGCManaged
||
1853 it
->second
== CheckFieldsVisitor::kRawPtrToGCManagedWarning
) {
1854 error
= diag_raw_ptr_to_gc_managed_class_note_
;
1855 } else if (it
->second
== CheckFieldsVisitor::kRefPtrToGCManaged
) {
1856 error
= diag_ref_ptr_to_gc_managed_class_note_
;
1857 } else if (it
->second
== CheckFieldsVisitor::kOwnPtrToGCManaged
) {
1858 error
= diag_own_ptr_to_gc_managed_class_note_
;
1859 } else if (it
->second
== CheckFieldsVisitor::kMemberToGCUnmanaged
) {
1860 error
= diag_member_to_gc_unmanaged_class_note_
;
1861 } else if (it
->second
== CheckFieldsVisitor::kMemberInUnmanaged
) {
1862 error
= diag_member_in_unmanaged_class_note_
;
1863 } else if (it
->second
== CheckFieldsVisitor::kPtrFromHeapToStack
) {
1864 error
= diag_stack_allocated_field_note_
;
1865 } else if (it
->second
== CheckFieldsVisitor::kGCDerivedPartObject
) {
1866 error
= diag_part_object_to_gc_derived_class_note_
;
1868 assert(false && "Unknown field error");
1870 NoteField(it
->first
, error
);
1874 void ReportClassContainsGCRoots(RecordInfo
* info
,
1875 CheckGCRootsVisitor::Errors
* errors
) {
1876 SourceLocation loc
= info
->record()->getLocStart();
1877 SourceManager
& manager
= instance_
.getSourceManager();
1878 FullSourceLoc
full_loc(loc
, manager
);
1879 for (CheckGCRootsVisitor::Errors::iterator it
= errors
->begin();
1880 it
!= errors
->end();
1882 CheckGCRootsVisitor::RootPath::iterator path
= it
->begin();
1883 FieldPoint
* point
= *path
;
1884 diagnostic_
.Report(full_loc
, diag_class_contains_gc_root_
)
1885 << info
->record() << point
->field();
1886 while (++path
!= it
->end()) {
1887 NotePartObjectContainsGCRoot(point
);
1890 NoteFieldContainsGCRoot(point
);
1894 void ReportFinalizerAccessesFinalizedFields(
1895 CXXMethodDecl
* dtor
,
1896 CheckFinalizerVisitor::Errors
* fields
) {
1897 for (CheckFinalizerVisitor::Errors::iterator it
= fields
->begin();
1898 it
!= fields
->end();
1900 SourceLocation loc
= it
->member_
->getLocStart();
1901 SourceManager
& manager
= instance_
.getSourceManager();
1902 bool as_eagerly_finalized
= it
->as_eagerly_finalized_
;
1903 unsigned diag_error
= as_eagerly_finalized
?
1904 diag_finalizer_eagerly_finalized_field_
:
1905 diag_finalizer_accesses_finalized_field_
;
1906 unsigned diag_note
= as_eagerly_finalized
?
1907 diag_eagerly_finalized_field_note_
:
1908 diag_finalized_field_note_
;
1909 FullSourceLoc
full_loc(loc
, manager
);
1910 diagnostic_
.Report(full_loc
, diag_error
)
1911 << dtor
<< it
->field_
->field();
1912 NoteField(it
->field_
, diag_note
);
1916 void ReportClassRequiresFinalization(RecordInfo
* info
) {
1917 SourceLocation loc
= info
->record()->getInnerLocStart();
1918 SourceManager
& manager
= instance_
.getSourceManager();
1919 FullSourceLoc
full_loc(loc
, manager
);
1920 diagnostic_
.Report(full_loc
, diag_class_requires_finalization_
)
1924 void ReportClassDoesNotRequireFinalization(RecordInfo
* info
) {
1925 SourceLocation loc
= info
->record()->getInnerLocStart();
1926 SourceManager
& manager
= instance_
.getSourceManager();
1927 FullSourceLoc
full_loc(loc
, manager
);
1928 diagnostic_
.Report(full_loc
, diag_class_does_not_require_finalization_
)
1932 void ReportClassMustDeclareGCMixinTraceMethod(RecordInfo
* info
) {
1933 SourceLocation loc
= info
->record()->getInnerLocStart();
1934 SourceManager
& manager
= instance_
.getSourceManager();
1935 FullSourceLoc
full_loc(loc
, manager
);
1937 full_loc
, diag_class_must_declare_gc_mixin_trace_method_
)
1941 void ReportOverriddenNonVirtualTrace(RecordInfo
* info
,
1942 CXXMethodDecl
* trace
,
1943 CXXMethodDecl
* overridden
) {
1944 SourceLocation loc
= trace
->getLocStart();
1945 SourceManager
& manager
= instance_
.getSourceManager();
1946 FullSourceLoc
full_loc(loc
, manager
);
1947 diagnostic_
.Report(full_loc
, diag_overridden_non_virtual_trace_
)
1948 << info
->record() << overridden
->getParent();
1949 NoteOverriddenNonVirtualTrace(overridden
);
1952 void ReportMissingTraceDispatchMethod(RecordInfo
* info
) {
1953 ReportMissingDispatchMethod(info
, diag_missing_trace_dispatch_method_
);
1956 void ReportMissingFinalizeDispatchMethod(RecordInfo
* info
) {
1957 ReportMissingDispatchMethod(info
, diag_missing_finalize_dispatch_method_
);
1960 void ReportMissingDispatchMethod(RecordInfo
* info
, unsigned error
) {
1961 SourceLocation loc
= info
->record()->getInnerLocStart();
1962 SourceManager
& manager
= instance_
.getSourceManager();
1963 FullSourceLoc
full_loc(loc
, manager
);
1964 diagnostic_
.Report(full_loc
, error
) << info
->record();
1967 void ReportVirtualAndManualDispatch(RecordInfo
* info
,
1968 CXXMethodDecl
* dispatch
) {
1969 SourceLocation loc
= info
->record()->getInnerLocStart();
1970 SourceManager
& manager
= instance_
.getSourceManager();
1971 FullSourceLoc
full_loc(loc
, manager
);
1972 diagnostic_
.Report(full_loc
, diag_virtual_and_manual_dispatch_
)
1974 NoteManualDispatchMethod(dispatch
);
1977 void ReportMissingTraceDispatch(const FunctionDecl
* dispatch
,
1978 RecordInfo
* receiver
) {
1979 ReportMissingDispatch(dispatch
, receiver
, diag_missing_trace_dispatch_
);
1982 void ReportMissingFinalizeDispatch(const FunctionDecl
* dispatch
,
1983 RecordInfo
* receiver
) {
1984 ReportMissingDispatch(dispatch
, receiver
, diag_missing_finalize_dispatch_
);
1987 void ReportMissingDispatch(const FunctionDecl
* dispatch
,
1988 RecordInfo
* receiver
,
1990 SourceLocation loc
= dispatch
->getLocStart();
1991 SourceManager
& manager
= instance_
.getSourceManager();
1992 FullSourceLoc
full_loc(loc
, manager
);
1993 diagnostic_
.Report(full_loc
, error
) << receiver
->record();
1996 void ReportDerivesNonStackAllocated(RecordInfo
* info
, BasePoint
* base
) {
1997 SourceLocation loc
= base
->spec().getLocStart();
1998 SourceManager
& manager
= instance_
.getSourceManager();
1999 FullSourceLoc
full_loc(loc
, manager
);
2000 diagnostic_
.Report(full_loc
, diag_derives_non_stack_allocated_
)
2001 << info
->record() << base
->info()->record();
2004 void ReportClassOverridesNew(RecordInfo
* info
, CXXMethodDecl
* newop
) {
2005 SourceLocation loc
= newop
->getLocStart();
2006 SourceManager
& manager
= instance_
.getSourceManager();
2007 FullSourceLoc
full_loc(loc
, manager
);
2008 diagnostic_
.Report(full_loc
, diag_class_overrides_new_
) << info
->record();
2011 void ReportClassDeclaresPureVirtualTrace(RecordInfo
* info
,
2012 CXXMethodDecl
* trace
) {
2013 SourceLocation loc
= trace
->getLocStart();
2014 SourceManager
& manager
= instance_
.getSourceManager();
2015 FullSourceLoc
full_loc(loc
, manager
);
2016 diagnostic_
.Report(full_loc
, diag_class_declares_pure_virtual_trace_
)
2020 void ReportLeftMostBaseMustBePolymorphic(RecordInfo
* derived
,
2021 CXXRecordDecl
* base
) {
2022 SourceLocation loc
= base
->getLocStart();
2023 SourceManager
& manager
= instance_
.getSourceManager();
2024 FullSourceLoc
full_loc(loc
, manager
);
2025 diagnostic_
.Report(full_loc
, diag_left_most_base_must_be_polymorphic_
)
2026 << base
<< derived
->record();
2029 void ReportBaseClassMustDeclareVirtualTrace(RecordInfo
* derived
,
2030 CXXRecordDecl
* base
) {
2031 SourceLocation loc
= base
->getLocStart();
2032 SourceManager
& manager
= instance_
.getSourceManager();
2033 FullSourceLoc
full_loc(loc
, manager
);
2034 diagnostic_
.Report(full_loc
, diag_base_class_must_declare_virtual_trace_
)
2035 << base
<< derived
->record();
2038 void NoteManualDispatchMethod(CXXMethodDecl
* dispatch
) {
2039 SourceLocation loc
= dispatch
->getLocStart();
2040 SourceManager
& manager
= instance_
.getSourceManager();
2041 FullSourceLoc
full_loc(loc
, manager
);
2042 diagnostic_
.Report(full_loc
, diag_manual_dispatch_method_note_
) << dispatch
;
2045 void NoteBaseRequiresTracing(BasePoint
* base
) {
2046 SourceLocation loc
= base
->spec().getLocStart();
2047 SourceManager
& manager
= instance_
.getSourceManager();
2048 FullSourceLoc
full_loc(loc
, manager
);
2049 diagnostic_
.Report(full_loc
, diag_base_requires_tracing_note_
)
2050 << base
->info()->record();
2053 void NoteFieldRequiresTracing(RecordInfo
* holder
, FieldDecl
* field
) {
2054 NoteField(field
, diag_field_requires_tracing_note_
);
2057 void NotePartObjectContainsGCRoot(FieldPoint
* point
) {
2058 FieldDecl
* field
= point
->field();
2059 SourceLocation loc
= field
->getLocStart();
2060 SourceManager
& manager
= instance_
.getSourceManager();
2061 FullSourceLoc
full_loc(loc
, manager
);
2062 diagnostic_
.Report(full_loc
, diag_part_object_contains_gc_root_note_
)
2063 << field
<< field
->getParent();
2066 void NoteFieldContainsGCRoot(FieldPoint
* point
) {
2067 NoteField(point
, diag_field_contains_gc_root_note_
);
2070 void NoteUserDeclaredDestructor(CXXMethodDecl
* dtor
) {
2071 SourceLocation loc
= dtor
->getLocStart();
2072 SourceManager
& manager
= instance_
.getSourceManager();
2073 FullSourceLoc
full_loc(loc
, manager
);
2074 diagnostic_
.Report(full_loc
, diag_user_declared_destructor_note_
);
2077 void NoteUserDeclaredFinalizer(CXXMethodDecl
* dtor
) {
2078 SourceLocation loc
= dtor
->getLocStart();
2079 SourceManager
& manager
= instance_
.getSourceManager();
2080 FullSourceLoc
full_loc(loc
, manager
);
2081 diagnostic_
.Report(full_loc
, diag_user_declared_finalizer_note_
);
2084 void NoteBaseRequiresFinalization(BasePoint
* base
) {
2085 SourceLocation loc
= base
->spec().getLocStart();
2086 SourceManager
& manager
= instance_
.getSourceManager();
2087 FullSourceLoc
full_loc(loc
, manager
);
2088 diagnostic_
.Report(full_loc
, diag_base_requires_finalization_note_
)
2089 << base
->info()->record();
2092 void NoteField(FieldPoint
* point
, unsigned note
) {
2093 NoteField(point
->field(), note
);
2096 void NoteField(FieldDecl
* field
, unsigned note
) {
2097 SourceLocation loc
= field
->getLocStart();
2098 SourceManager
& manager
= instance_
.getSourceManager();
2099 FullSourceLoc
full_loc(loc
, manager
);
2100 diagnostic_
.Report(full_loc
, note
) << field
;
2103 void NoteOverriddenNonVirtualTrace(CXXMethodDecl
* overridden
) {
2104 SourceLocation loc
= overridden
->getLocStart();
2105 SourceManager
& manager
= instance_
.getSourceManager();
2106 FullSourceLoc
full_loc(loc
, manager
);
2107 diagnostic_
.Report(full_loc
, diag_overridden_non_virtual_trace_note_
)
2111 unsigned diag_class_must_left_mostly_derive_gc_
;
2112 unsigned diag_class_requires_trace_method_
;
2113 unsigned diag_base_requires_tracing_
;
2114 unsigned diag_fields_require_tracing_
;
2115 unsigned diag_class_contains_invalid_fields_
;
2116 unsigned diag_class_contains_invalid_fields_warning_
;
2117 unsigned diag_class_contains_gc_root_
;
2118 unsigned diag_class_requires_finalization_
;
2119 unsigned diag_class_does_not_require_finalization_
;
2120 unsigned diag_finalizer_accesses_finalized_field_
;
2121 unsigned diag_finalizer_eagerly_finalized_field_
;
2122 unsigned diag_overridden_non_virtual_trace_
;
2123 unsigned diag_missing_trace_dispatch_method_
;
2124 unsigned diag_missing_finalize_dispatch_method_
;
2125 unsigned diag_virtual_and_manual_dispatch_
;
2126 unsigned diag_missing_trace_dispatch_
;
2127 unsigned diag_missing_finalize_dispatch_
;
2128 unsigned diag_derives_non_stack_allocated_
;
2129 unsigned diag_class_overrides_new_
;
2130 unsigned diag_class_declares_pure_virtual_trace_
;
2131 unsigned diag_left_most_base_must_be_polymorphic_
;
2132 unsigned diag_base_class_must_declare_virtual_trace_
;
2133 unsigned diag_class_must_declare_gc_mixin_trace_method_
;
2135 unsigned diag_base_requires_tracing_note_
;
2136 unsigned diag_field_requires_tracing_note_
;
2137 unsigned diag_raw_ptr_to_gc_managed_class_note_
;
2138 unsigned diag_ref_ptr_to_gc_managed_class_note_
;
2139 unsigned diag_own_ptr_to_gc_managed_class_note_
;
2140 unsigned diag_member_to_gc_unmanaged_class_note_
;
2141 unsigned diag_stack_allocated_field_note_
;
2142 unsigned diag_member_in_unmanaged_class_note_
;
2143 unsigned diag_part_object_to_gc_derived_class_note_
;
2144 unsigned diag_part_object_contains_gc_root_note_
;
2145 unsigned diag_field_contains_gc_root_note_
;
2146 unsigned diag_finalized_field_note_
;
2147 unsigned diag_eagerly_finalized_field_note_
;
2148 unsigned diag_user_declared_destructor_note_
;
2149 unsigned diag_user_declared_finalizer_note_
;
2150 unsigned diag_base_requires_finalization_note_
;
2151 unsigned diag_field_requires_finalization_note_
;
2152 unsigned diag_overridden_non_virtual_trace_note_
;
2153 unsigned diag_manual_dispatch_method_note_
;
2155 CompilerInstance
& instance_
;
2156 DiagnosticsEngine
& diagnostic_
;
2157 BlinkGCPluginOptions options_
;
2162 class BlinkGCPluginAction
: public PluginASTAction
{
2164 BlinkGCPluginAction() {}
2167 // Overridden from PluginASTAction:
2168 virtual std::unique_ptr
<ASTConsumer
> CreateASTConsumer(
2169 CompilerInstance
& instance
,
2170 llvm::StringRef ref
) {
2171 return llvm::make_unique
<BlinkGCPluginConsumer
>(instance
, options_
);
2174 virtual bool ParseArgs(const CompilerInstance
& instance
,
2175 const std::vector
<string
>& args
) {
2178 for (size_t i
= 0; i
< args
.size() && parsed
; ++i
) {
2179 if (args
[i
] == "enable-oilpan") {
2180 options_
.enable_oilpan
= true;
2181 } else if (args
[i
] == "dump-graph") {
2182 options_
.dump_graph
= true;
2183 } else if (args
[i
] == "warn-raw-ptr") {
2184 options_
.warn_raw_ptr
= true;
2185 } else if (args
[i
] == "warn-unneeded-finalizer") {
2186 options_
.warn_unneeded_finalizer
= true;
2189 llvm::errs() << "Unknown blink-gc-plugin argument: " << args
[i
] << "\n";
2197 BlinkGCPluginOptions options_
;
2202 static FrontendPluginRegistry::Add
<BlinkGCPluginAction
> X(
2204 "Check Blink GC invariants");