1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 // This clang plugin checks various invariants of the Blink garbage
6 // collection infrastructure.
8 // Errors are described at:
9 // http://www.chromium.org/developers/blink-gc-plugin-errors
14 #include "JsonWriter.h"
15 #include "RecordInfo.h"
17 #include "clang/AST/AST.h"
18 #include "clang/AST/ASTConsumer.h"
19 #include "clang/AST/RecursiveASTVisitor.h"
20 #include "clang/Frontend/CompilerInstance.h"
21 #include "clang/Frontend/FrontendPluginRegistry.h"
22 #include "clang/Sema/Sema.h"
24 using namespace clang
;
29 const char kClassMustLeftMostlyDeriveGC
[] =
30 "[blink-gc] Class %0 must derive its GC base in the left-most position.";
32 const char kClassRequiresTraceMethod
[] =
33 "[blink-gc] Class %0 requires a trace method.";
35 const char kBaseRequiresTracing
[] =
36 "[blink-gc] Base class %0 of derived class %1 requires tracing.";
38 const char kBaseRequiresTracingNote
[] =
39 "[blink-gc] Untraced base class %0 declared here:";
41 const char kFieldsRequireTracing
[] =
42 "[blink-gc] Class %0 has untraced fields that require tracing.";
44 const char kFieldRequiresTracingNote
[] =
45 "[blink-gc] Untraced field %0 declared here:";
47 const char kClassContainsInvalidFields
[] =
48 "[blink-gc] Class %0 contains invalid fields.";
50 const char kClassContainsGCRoot
[] =
51 "[blink-gc] Class %0 contains GC root in field %1.";
53 const char kClassRequiresFinalization
[] =
54 "[blink-gc] Class %0 requires finalization.";
56 const char kClassDoesNotRequireFinalization
[] =
57 "[blink-gc] Class %0 may not require finalization.";
59 const char kFinalizerAccessesFinalizedField
[] =
60 "[blink-gc] Finalizer %0 accesses potentially finalized field %1.";
62 const char kFinalizerAccessesEagerlyFinalizedField
[] =
63 "[blink-gc] Finalizer %0 accesses eagerly finalized field %1.";
65 const char kRawPtrToGCManagedClassNote
[] =
66 "[blink-gc] Raw pointer field %0 to a GC managed class declared here:";
68 const char kRefPtrToGCManagedClassNote
[] =
69 "[blink-gc] RefPtr field %0 to a GC managed class declared here:";
71 const char kReferencePtrToGCManagedClassNote
[] =
72 "[blink-gc] Reference pointer field %0 to a GC managed class"
75 const char kOwnPtrToGCManagedClassNote
[] =
76 "[blink-gc] OwnPtr field %0 to a GC managed class declared here:";
78 const char kMemberToGCUnmanagedClassNote
[] =
79 "[blink-gc] Member field %0 to non-GC managed class declared here:";
81 const char kStackAllocatedFieldNote
[] =
82 "[blink-gc] Stack-allocated field %0 declared here:";
84 const char kMemberInUnmanagedClassNote
[] =
85 "[blink-gc] Member field %0 in unmanaged class declared here:";
87 const char kPartObjectToGCDerivedClassNote
[] =
88 "[blink-gc] Part-object field %0 to a GC derived class declared here:";
90 const char kPartObjectContainsGCRootNote
[] =
91 "[blink-gc] Field %0 with embedded GC root in %1 declared here:";
93 const char kFieldContainsGCRootNote
[] =
94 "[blink-gc] Field %0 defining a GC root declared here:";
96 const char kOverriddenNonVirtualTrace
[] =
97 "[blink-gc] Class %0 overrides non-virtual trace of base class %1.";
99 const char kOverriddenNonVirtualTraceNote
[] =
100 "[blink-gc] Non-virtual trace method declared here:";
102 const char kMissingTraceDispatchMethod
[] =
103 "[blink-gc] Class %0 is missing manual trace dispatch.";
105 const char kMissingFinalizeDispatchMethod
[] =
106 "[blink-gc] Class %0 is missing manual finalize dispatch.";
108 const char kVirtualAndManualDispatch
[] =
109 "[blink-gc] Class %0 contains or inherits virtual methods"
110 " but implements manual dispatching.";
112 const char kMissingTraceDispatch
[] =
113 "[blink-gc] Missing dispatch to class %0 in manual trace dispatch.";
115 const char kMissingFinalizeDispatch
[] =
116 "[blink-gc] Missing dispatch to class %0 in manual finalize dispatch.";
118 const char kFinalizedFieldNote
[] =
119 "[blink-gc] Potentially finalized field %0 declared here:";
121 const char kEagerlyFinalizedFieldNote
[] =
122 "[blink-gc] Field %0 having eagerly finalized value, declared here:";
124 const char kUserDeclaredDestructorNote
[] =
125 "[blink-gc] User-declared destructor declared here:";
127 const char kUserDeclaredFinalizerNote
[] =
128 "[blink-gc] User-declared finalizer declared here:";
130 const char kBaseRequiresFinalizationNote
[] =
131 "[blink-gc] Base class %0 requiring finalization declared here:";
133 const char kFieldRequiresFinalizationNote
[] =
134 "[blink-gc] Field %0 requiring finalization declared here:";
136 const char kManualDispatchMethodNote
[] =
137 "[blink-gc] Manual dispatch %0 declared here:";
139 const char kDerivesNonStackAllocated
[] =
140 "[blink-gc] Stack-allocated class %0 derives class %1"
141 " which is not stack allocated.";
143 const char kClassOverridesNew
[] =
144 "[blink-gc] Garbage collected class %0"
145 " is not permitted to override its new operator.";
147 const char kClassDeclaresPureVirtualTrace
[] =
148 "[blink-gc] Garbage collected class %0"
149 " is not permitted to declare a pure-virtual trace method.";
151 const char kLeftMostBaseMustBePolymorphic
[] =
152 "[blink-gc] Left-most base class %0 of derived class %1"
153 " must be polymorphic.";
155 const char kBaseClassMustDeclareVirtualTrace
[] =
156 "[blink-gc] Left-most base class %0 of derived class %1"
157 " must define a virtual trace method.";
159 const char kClassMustDeclareGCMixinTraceMethod
[] =
160 "[blink-gc] Class %0 which inherits from GarbageCollectedMixin must"
161 " locally declare and override trace(Visitor*)";
163 // Use a local RAV implementation to simply collect all FunctionDecls marked for
164 // late template parsing. This happens with the flag -fdelayed-template-parsing,
165 // which is on by default in MSVC-compatible mode.
166 std::set
<FunctionDecl
*> GetLateParsedFunctionDecls(TranslationUnitDecl
* decl
) {
167 struct Visitor
: public RecursiveASTVisitor
<Visitor
> {
168 bool VisitFunctionDecl(FunctionDecl
* function_decl
) {
169 if (function_decl
->isLateTemplateParsed())
170 late_parsed_decls
.insert(function_decl
);
174 std::set
<FunctionDecl
*> late_parsed_decls
;
176 v
.TraverseDecl(decl
);
177 return v
.late_parsed_decls
;
180 struct BlinkGCPluginOptions
{
181 BlinkGCPluginOptions()
182 : enable_oilpan(false)
184 , warn_raw_ptr(false)
185 , warn_unneeded_finalizer(false) {}
189 bool warn_unneeded_finalizer
;
190 std::set
<std::string
> ignored_classes
;
191 std::set
<std::string
> checked_namespaces
;
192 std::vector
<std::string
> ignored_directories
;
195 typedef std::vector
<CXXRecordDecl
*> RecordVector
;
196 typedef std::vector
<CXXMethodDecl
*> MethodVector
;
198 // Test if a template specialization is an instantiation.
199 static bool IsTemplateInstantiation(CXXRecordDecl
* record
) {
200 ClassTemplateSpecializationDecl
* spec
=
201 dyn_cast
<ClassTemplateSpecializationDecl
>(record
);
204 switch (spec
->getTemplateSpecializationKind()) {
205 case TSK_ImplicitInstantiation
:
206 case TSK_ExplicitInstantiationDefinition
:
209 case TSK_ExplicitSpecialization
:
211 // TODO: unsupported cases.
212 case TSK_ExplicitInstantiationDeclaration
:
215 assert(false && "Unknown template specialization kind");
218 // This visitor collects the entry points for the checker.
219 class CollectVisitor
: public RecursiveASTVisitor
<CollectVisitor
> {
223 RecordVector
& record_decls() { return record_decls_
; }
224 MethodVector
& trace_decls() { return trace_decls_
; }
226 bool shouldVisitTemplateInstantiations() { return false; }
228 // Collect record declarations, including nested declarations.
229 bool VisitCXXRecordDecl(CXXRecordDecl
* record
) {
230 if (record
->hasDefinition() && record
->isCompleteDefinition())
231 record_decls_
.push_back(record
);
235 // Collect tracing method definitions, but don't traverse method bodies.
236 bool TraverseCXXMethodDecl(CXXMethodDecl
* method
) {
237 if (method
->isThisDeclarationADefinition() && Config::IsTraceMethod(method
))
238 trace_decls_
.push_back(method
);
243 RecordVector record_decls_
;
244 MethodVector trace_decls_
;
247 // This visitor checks that a finalizer method does not have invalid access to
248 // fields that are potentially finalized. A potentially finalized field is
249 // either a Member, a heap-allocated collection or an off-heap collection that
250 // contains Members. Invalid uses are currently identified as passing the field
251 // as the argument of a procedure call or using the -> or [] operators on it.
252 class CheckFinalizerVisitor
253 : public RecursiveASTVisitor
<CheckFinalizerVisitor
> {
255 // Simple visitor to determine if the content of a field might be collected
256 // during finalization.
257 class MightBeCollectedVisitor
: public EdgeVisitor
{
259 MightBeCollectedVisitor(bool is_eagerly_finalized
)
260 : might_be_collected_(false)
261 , is_eagerly_finalized_(is_eagerly_finalized
)
262 , as_eagerly_finalized_(false) {}
263 bool might_be_collected() { return might_be_collected_
; }
264 bool as_eagerly_finalized() { return as_eagerly_finalized_
; }
265 void VisitMember(Member
* edge
) override
{
266 if (is_eagerly_finalized_
) {
267 if (edge
->ptr()->IsValue()) {
268 Value
* member
= static_cast<Value
*>(edge
->ptr());
269 if (member
->value()->IsEagerlyFinalized()) {
270 might_be_collected_
= true;
271 as_eagerly_finalized_
= true;
276 might_be_collected_
= true;
278 void VisitCollection(Collection
* edge
) override
{
279 if (edge
->on_heap() && !is_eagerly_finalized_
) {
280 might_be_collected_
= !edge
->is_root();
282 edge
->AcceptMembers(this);
287 bool might_be_collected_
;
288 bool is_eagerly_finalized_
;
289 bool as_eagerly_finalized_
;
295 Error(MemberExpr
*member
,
296 bool as_eagerly_finalized
,
299 , as_eagerly_finalized_(as_eagerly_finalized
)
303 bool as_eagerly_finalized_
;
307 typedef std::vector
<Error
> Errors
;
309 CheckFinalizerVisitor(RecordCache
* cache
, bool is_eagerly_finalized
)
310 : blacklist_context_(false)
312 , is_eagerly_finalized_(is_eagerly_finalized
) {}
314 Errors
& finalized_fields() { return finalized_fields_
; }
316 bool WalkUpFromCXXOperatorCallExpr(CXXOperatorCallExpr
* expr
) {
317 // Only continue the walk-up if the operator is a blacklisted one.
318 switch (expr
->getOperator()) {
321 this->WalkUpFromCallExpr(expr
);
327 // We consider all non-operator calls to be blacklisted contexts.
328 bool WalkUpFromCallExpr(CallExpr
* expr
) {
329 bool prev_blacklist_context
= blacklist_context_
;
330 blacklist_context_
= true;
331 for (size_t i
= 0; i
< expr
->getNumArgs(); ++i
)
332 this->TraverseStmt(expr
->getArg(i
));
333 blacklist_context_
= prev_blacklist_context
;
337 bool VisitMemberExpr(MemberExpr
* member
) {
338 FieldDecl
* field
= dyn_cast
<FieldDecl
>(member
->getMemberDecl());
342 RecordInfo
* info
= cache_
->Lookup(field
->getParent());
346 RecordInfo::Fields::iterator it
= info
->GetFields().find(field
);
347 if (it
== info
->GetFields().end())
350 if (seen_members_
.find(member
) != seen_members_
.end())
353 bool as_eagerly_finalized
= false;
354 if (blacklist_context_
&&
355 MightBeCollected(&it
->second
, as_eagerly_finalized
)) {
356 finalized_fields_
.push_back(
357 Error(member
, as_eagerly_finalized
, &it
->second
));
358 seen_members_
.insert(member
);
363 bool MightBeCollected(FieldPoint
* point
, bool& as_eagerly_finalized
) {
364 MightBeCollectedVisitor
visitor(is_eagerly_finalized_
);
365 point
->edge()->Accept(&visitor
);
366 as_eagerly_finalized
= visitor
.as_eagerly_finalized();
367 return visitor
.might_be_collected();
371 bool blacklist_context_
;
372 Errors finalized_fields_
;
373 std::set
<MemberExpr
*> seen_members_
;
375 bool is_eagerly_finalized_
;
378 // This visitor checks that a method contains within its body, a call to a
379 // method on the provided receiver class. This is used to check manual
380 // dispatching for trace and finalize methods.
381 class CheckDispatchVisitor
: public RecursiveASTVisitor
<CheckDispatchVisitor
> {
383 CheckDispatchVisitor(RecordInfo
* receiver
)
384 : receiver_(receiver
), dispatched_to_receiver_(false) {}
386 bool dispatched_to_receiver() { return dispatched_to_receiver_
; }
388 bool VisitMemberExpr(MemberExpr
* member
) {
389 if (CXXMethodDecl
* fn
= dyn_cast
<CXXMethodDecl
>(member
->getMemberDecl())) {
390 if (fn
->getParent() == receiver_
->record())
391 dispatched_to_receiver_
= true;
396 bool VisitUnresolvedMemberExpr(UnresolvedMemberExpr
* member
) {
397 for (Decl
* decl
: member
->decls()) {
398 if (CXXMethodDecl
* method
= dyn_cast
<CXXMethodDecl
>(decl
)) {
399 if (method
->getParent() == receiver_
->record() &&
400 Config::GetTraceMethodType(method
) ==
401 Config::TRACE_AFTER_DISPATCH_METHOD
) {
402 dispatched_to_receiver_
= true;
411 RecordInfo
* receiver_
;
412 bool dispatched_to_receiver_
;
415 // This visitor checks a tracing method by traversing its body.
416 // - A member field is considered traced if it is referenced in the body.
417 // - A base is traced if a base-qualified call to a trace method is found.
418 class CheckTraceVisitor
: public RecursiveASTVisitor
<CheckTraceVisitor
> {
420 CheckTraceVisitor(CXXMethodDecl
* trace
, RecordInfo
* info
, RecordCache
* cache
)
424 delegates_to_traceimpl_(false) {
427 bool delegates_to_traceimpl() const { return delegates_to_traceimpl_
; }
429 bool VisitMemberExpr(MemberExpr
* member
) {
430 // In weak callbacks, consider any occurrence as a correct usage.
431 // TODO: We really want to require that isAlive is checked on manually
432 // processed weak fields.
433 if (IsWeakCallback()) {
434 if (FieldDecl
* field
= dyn_cast
<FieldDecl
>(member
->getMemberDecl()))
440 bool VisitCallExpr(CallExpr
* call
) {
441 // In weak callbacks we don't check calls (see VisitMemberExpr).
442 if (IsWeakCallback())
445 Expr
* callee
= call
->getCallee();
447 // Trace calls from a templated derived class result in a
448 // DependentScopeMemberExpr because the concrete trace call depends on the
449 // instantiation of any shared template parameters. In this case the call is
450 // "unresolved" and we resort to comparing the syntactic type names.
451 if (CXXDependentScopeMemberExpr
* expr
=
452 dyn_cast
<CXXDependentScopeMemberExpr
>(callee
)) {
453 CheckCXXDependentScopeMemberExpr(call
, expr
);
457 // A tracing call will have either a |visitor| or a |m_field| argument.
458 // A registerWeakMembers call will have a |this| argument.
459 if (call
->getNumArgs() != 1)
461 Expr
* arg
= call
->getArg(0);
463 if (UnresolvedMemberExpr
* expr
= dyn_cast
<UnresolvedMemberExpr
>(callee
)) {
464 // This could be a trace call of a base class, as explained in the
465 // comments of CheckTraceBaseCall().
466 if (CheckTraceBaseCall(call
))
469 if (expr
->getMemberName().getAsString() == kRegisterWeakMembersName
)
470 MarkAllWeakMembersTraced();
472 QualType base
= expr
->getBaseType();
473 if (!base
->isPointerType())
475 CXXRecordDecl
* decl
= base
->getPointeeType()->getAsCXXRecordDecl();
477 CheckTraceFieldCall(expr
->getMemberName().getAsString(), decl
, arg
);
478 if (Config::IsTraceImplName(expr
->getMemberName().getAsString()))
479 delegates_to_traceimpl_
= true;
483 if (CXXMemberCallExpr
* expr
= dyn_cast
<CXXMemberCallExpr
>(call
)) {
484 if (CheckTraceFieldCall(expr
) || CheckRegisterWeakMembers(expr
))
487 if (Config::IsTraceImplName(expr
->getMethodDecl()->getNameAsString())) {
488 delegates_to_traceimpl_
= true;
493 CheckTraceBaseCall(call
);
498 bool IsTraceCallName(const std::string
& name
) {
499 if (trace_
->getName() == kTraceImplName
)
500 return name
== kTraceName
;
501 if (trace_
->getName() == kTraceAfterDispatchImplName
)
502 return name
== kTraceAfterDispatchName
;
503 // Currently, a manually dispatched class cannot have mixin bases (having
504 // one would add a vtable which we explicitly check against). This means
505 // that we can only make calls to a trace method of the same name. Revisit
506 // this if our mixin/vtable assumption changes.
507 return name
== trace_
->getName();
510 CXXRecordDecl
* GetDependentTemplatedDecl(CXXDependentScopeMemberExpr
* expr
) {
511 NestedNameSpecifier
* qual
= expr
->getQualifier();
515 const Type
* type
= qual
->getAsType();
519 return RecordInfo::GetDependentTemplatedDecl(*type
);
522 void CheckCXXDependentScopeMemberExpr(CallExpr
* call
,
523 CXXDependentScopeMemberExpr
* expr
) {
524 string fn_name
= expr
->getMember().getAsString();
526 // Check for VisitorDispatcher::trace(field) and
527 // VisitorDispatcher::registerWeakMembers.
528 if (!expr
->isImplicitAccess()) {
529 if (clang::DeclRefExpr
* base_decl
=
530 clang::dyn_cast
<clang::DeclRefExpr
>(expr
->getBase())) {
531 if (Config::IsVisitorDispatcherType(base_decl
->getType())) {
532 if (call
->getNumArgs() == 1 && fn_name
== kTraceName
) {
533 FindFieldVisitor finder
;
534 finder
.TraverseStmt(call
->getArg(0));
536 FoundField(finder
.field());
539 } else if (call
->getNumArgs() == 1 &&
540 fn_name
== kRegisterWeakMembersName
) {
541 MarkAllWeakMembersTraced();
547 CXXRecordDecl
* tmpl
= GetDependentTemplatedDecl(expr
);
551 // Check for Super<T>::trace(visitor)
552 if (call
->getNumArgs() == 1 && IsTraceCallName(fn_name
)) {
553 RecordInfo::Bases::iterator it
= info_
->GetBases().begin();
554 for (; it
!= info_
->GetBases().end(); ++it
) {
555 if (it
->first
->getName() == tmpl
->getName())
556 it
->second
.MarkTraced();
560 // Check for TraceIfNeeded<T>::trace(visitor, &field)
561 if (call
->getNumArgs() == 2 && fn_name
== kTraceName
&&
562 tmpl
->getName() == kTraceIfNeededName
) {
563 FindFieldVisitor finder
;
564 finder
.TraverseStmt(call
->getArg(1));
566 FoundField(finder
.field());
570 bool CheckTraceBaseCall(CallExpr
* call
) {
571 // Checks for "Base::trace(visitor)"-like calls.
573 // Checking code for these two variables is shared among MemberExpr* case
574 // and UnresolvedMemberCase* case below.
576 // For example, if we've got "Base::trace(visitor)" as |call|,
577 // callee_record will be "Base", and func_name will be "trace".
578 CXXRecordDecl
* callee_record
= nullptr;
579 std::string func_name
;
581 if (MemberExpr
* callee
= dyn_cast
<MemberExpr
>(call
->getCallee())) {
582 if (!callee
->hasQualifier())
585 FunctionDecl
* trace_decl
=
586 dyn_cast
<FunctionDecl
>(callee
->getMemberDecl());
587 if (!trace_decl
|| !Config::IsTraceMethod(trace_decl
))
590 const Type
* type
= callee
->getQualifier()->getAsType();
594 callee_record
= type
->getAsCXXRecordDecl();
595 func_name
= trace_decl
->getName();
596 } else if (UnresolvedMemberExpr
* callee
=
597 dyn_cast
<UnresolvedMemberExpr
>(call
->getCallee())) {
598 // Callee part may become unresolved if the type of the argument
599 // ("visitor") is a template parameter and the called function is
600 // overloaded (i.e. trace(Visitor*) and
601 // trace(InlinedGlobalMarkingVisitor)).
603 // Here, we try to find a function that looks like trace() from the
604 // candidate overloaded functions, and if we find one, we assume it is
607 CXXMethodDecl
* trace_decl
= nullptr;
608 for (NamedDecl
* named_decl
: callee
->decls()) {
609 if (CXXMethodDecl
* method_decl
= dyn_cast
<CXXMethodDecl
>(named_decl
)) {
610 if (Config::IsTraceMethod(method_decl
)) {
611 trace_decl
= method_decl
;
619 // Check if the passed argument is named "visitor".
620 if (call
->getNumArgs() != 1)
622 DeclRefExpr
* arg
= dyn_cast
<DeclRefExpr
>(call
->getArg(0));
623 if (!arg
|| arg
->getNameInfo().getAsString() != kVisitorVarName
)
626 callee_record
= trace_decl
->getParent();
627 func_name
= callee
->getMemberName().getAsString();
633 if (!IsTraceCallName(func_name
))
636 for (auto& base
: info_
->GetBases()) {
637 // We want to deal with omitted trace() function in an intermediary
638 // class in the class hierarchy, e.g.:
639 // class A : public GarbageCollected<A> { trace() { ... } };
640 // class B : public A { /* No trace(); have nothing to trace. */ };
641 // class C : public B { trace() { B::trace(visitor); } }
642 // where, B::trace() is actually A::trace(), and in some cases we get
643 // A as |callee_record| instead of B. We somehow need to mark B as
644 // traced if we find A::trace() call.
646 // To solve this, here we keep going up the class hierarchy as long as
647 // they are not required to have a trace method. The implementation is
648 // a simple DFS, where |base_records| represents the set of base classes
651 std::vector
<CXXRecordDecl
*> base_records
;
652 base_records
.push_back(base
.first
);
654 while (!base_records
.empty()) {
655 CXXRecordDecl
* base_record
= base_records
.back();
656 base_records
.pop_back();
658 if (base_record
== callee_record
) {
659 // If we find a matching trace method, pretend the user has written
660 // a correct trace() method of the base; in the example above, we
661 // find A::trace() here and mark B as correctly traced.
662 base
.second
.MarkTraced();
666 if (RecordInfo
* base_info
= cache_
->Lookup(base_record
)) {
667 if (!base_info
->RequiresTraceMethod()) {
668 // If this base class is not required to have a trace method, then
669 // the actual trace method may be defined in an ancestor.
670 for (auto& inner_base
: base_info
->GetBases())
671 base_records
.push_back(inner_base
.first
);
680 bool CheckTraceFieldCall(CXXMemberCallExpr
* call
) {
681 return CheckTraceFieldCall(call
->getMethodDecl()->getNameAsString(),
682 call
->getRecordDecl(),
686 bool CheckTraceFieldCall(string name
, CXXRecordDecl
* callee
, Expr
* arg
) {
687 if (name
!= kTraceName
|| !Config::IsVisitor(callee
->getName()))
690 FindFieldVisitor finder
;
691 finder
.TraverseStmt(arg
);
693 FoundField(finder
.field());
698 bool CheckRegisterWeakMembers(CXXMemberCallExpr
* call
) {
699 CXXMethodDecl
* fn
= call
->getMethodDecl();
700 if (fn
->getName() != kRegisterWeakMembersName
)
703 if (fn
->isTemplateInstantiation()) {
704 const TemplateArgumentList
& args
=
705 *fn
->getTemplateSpecializationInfo()->TemplateArguments
;
706 // The second template argument is the callback method.
707 if (args
.size() > 1 &&
708 args
[1].getKind() == TemplateArgument::Declaration
) {
709 if (FunctionDecl
* callback
=
710 dyn_cast
<FunctionDecl
>(args
[1].getAsDecl())) {
711 if (callback
->hasBody()) {
712 CheckTraceVisitor
nested_visitor(info_
);
713 nested_visitor
.TraverseStmt(callback
->getBody());
721 class FindFieldVisitor
: public RecursiveASTVisitor
<FindFieldVisitor
> {
723 FindFieldVisitor() : member_(0), field_(0) {}
724 MemberExpr
* member() const { return member_
; }
725 FieldDecl
* field() const { return field_
; }
726 bool TraverseMemberExpr(MemberExpr
* member
) {
727 if (FieldDecl
* field
= dyn_cast
<FieldDecl
>(member
->getMemberDecl())) {
739 // Nested checking for weak callbacks.
740 CheckTraceVisitor(RecordInfo
* info
)
741 : trace_(nullptr), info_(info
), cache_(nullptr) {}
743 bool IsWeakCallback() { return !trace_
; }
745 void MarkTraced(RecordInfo::Fields::iterator it
) {
746 // In a weak callback we can't mark strong fields as traced.
747 if (IsWeakCallback() && !it
->second
.edge()->IsWeakMember())
749 it
->second
.MarkTraced();
752 void FoundField(FieldDecl
* field
) {
753 if (IsTemplateInstantiation(info_
->record())) {
754 // Pointer equality on fields does not work for template instantiations.
755 // The trace method refers to fields of the template definition which
756 // are different from the instantiated fields that need to be traced.
757 const string
& name
= field
->getNameAsString();
758 for (RecordInfo::Fields::iterator it
= info_
->GetFields().begin();
759 it
!= info_
->GetFields().end();
761 if (it
->first
->getNameAsString() == name
) {
767 RecordInfo::Fields::iterator it
= info_
->GetFields().find(field
);
768 if (it
!= info_
->GetFields().end())
773 void MarkAllWeakMembersTraced() {
774 // If we find a call to registerWeakMembers which is unresolved we
775 // unsoundly consider all weak members as traced.
776 // TODO: Find out how to validate weak member tracing for unresolved call.
777 for (auto& field
: info_
->GetFields()) {
778 if (field
.second
.edge()->IsWeakMember())
779 field
.second
.MarkTraced();
783 CXXMethodDecl
* trace_
;
786 bool delegates_to_traceimpl_
;
789 // This visitor checks that the fields of a class and the fields of
790 // its part objects don't define GC roots.
791 class CheckGCRootsVisitor
: public RecursiveEdgeVisitor
{
793 typedef std::vector
<FieldPoint
*> RootPath
;
794 typedef std::set
<RecordInfo
*> VisitingSet
;
795 typedef std::vector
<RootPath
> Errors
;
797 CheckGCRootsVisitor() {}
799 Errors
& gc_roots() { return gc_roots_
; }
801 bool ContainsGCRoots(RecordInfo
* info
) {
802 for (RecordInfo::Fields::iterator it
= info
->GetFields().begin();
803 it
!= info
->GetFields().end();
805 current_
.push_back(&it
->second
);
806 it
->second
.edge()->Accept(this);
809 return !gc_roots_
.empty();
812 void VisitValue(Value
* edge
) override
{
813 // TODO: what should we do to check unions?
814 if (edge
->value()->record()->isUnion())
817 // Prevent infinite regress for cyclic part objects.
818 if (visiting_set_
.find(edge
->value()) != visiting_set_
.end())
821 visiting_set_
.insert(edge
->value());
822 // If the value is a part object, then continue checking for roots.
823 for (Context::iterator it
= context().begin();
824 it
!= context().end();
826 if (!(*it
)->IsCollection())
829 ContainsGCRoots(edge
->value());
830 visiting_set_
.erase(edge
->value());
833 void VisitPersistent(Persistent
* edge
) override
{
834 gc_roots_
.push_back(current_
);
837 void AtCollection(Collection
* edge
) override
{
839 gc_roots_
.push_back(current_
);
844 VisitingSet visiting_set_
;
848 // This visitor checks that the fields of a class are "well formed".
849 // - OwnPtr, RefPtr and RawPtr must not point to a GC derived types.
850 // - Part objects must not be GC derived types.
851 // - An on-heap class must never contain GC roots.
852 // - Only stack-allocated types may point to stack-allocated types.
853 class CheckFieldsVisitor
: public RecursiveEdgeVisitor
{
858 kRawPtrToGCManagedWarning
,
860 kReferencePtrToGCManaged
,
861 kReferencePtrToGCManagedWarning
,
863 kMemberToGCUnmanaged
,
869 typedef std::vector
<std::pair
<FieldPoint
*, Error
> > Errors
;
871 CheckFieldsVisitor(const BlinkGCPluginOptions
& options
)
872 : options_(options
), current_(0), stack_allocated_host_(false) {}
874 Errors
& invalid_fields() { return invalid_fields_
; }
876 bool ContainsInvalidFields(RecordInfo
* info
) {
877 stack_allocated_host_
= info
->IsStackAllocated();
878 managed_host_
= stack_allocated_host_
||
879 info
->IsGCAllocated() ||
880 info
->IsNonNewable() ||
881 info
->IsOnlyPlacementNewable();
882 for (RecordInfo::Fields::iterator it
= info
->GetFields().begin();
883 it
!= info
->GetFields().end();
886 current_
= &it
->second
;
887 current_
->edge()->Accept(this);
889 return !invalid_fields_
.empty();
892 void AtMember(Member
* edge
) override
{
895 // A member is allowed to appear in the context of a root.
896 for (Context::iterator it
= context().begin();
897 it
!= context().end();
899 if ((*it
)->Kind() == Edge::kRoot
)
902 invalid_fields_
.push_back(std::make_pair(current_
, kMemberInUnmanaged
));
905 void AtValue(Value
* edge
) override
{
906 // TODO: what should we do to check unions?
907 if (edge
->value()->record()->isUnion())
910 if (!stack_allocated_host_
&& edge
->value()->IsStackAllocated()) {
911 invalid_fields_
.push_back(std::make_pair(current_
, kPtrFromHeapToStack
));
916 edge
->value()->IsGCDerived() &&
917 !edge
->value()->IsGCMixin()) {
918 invalid_fields_
.push_back(std::make_pair(current_
, kGCDerivedPartObject
));
922 // If in a stack allocated context, be fairly insistent that T in Member<T>
923 // is GC allocated, as stack allocated objects do not have a trace()
924 // that separately verifies the validity of Member<T>.
926 // Notice that an error is only reported if T's definition is in scope;
927 // we do not require that it must be brought into scope as that would
928 // prevent declarations of mutually dependent class types.
930 // (Note: Member<>'s constructor will at run-time verify that the
931 // pointer it wraps is indeed heap allocated.)
932 if (stack_allocated_host_
&& Parent() && Parent()->IsMember() &&
933 edge
->value()->HasDefinition() && !edge
->value()->IsGCAllocated()) {
934 invalid_fields_
.push_back(std::make_pair(current_
,
935 kMemberToGCUnmanaged
));
939 if (!Parent() || !edge
->value()->IsGCAllocated())
942 // In transition mode, disallow OwnPtr<T>, RawPtr<T> to GC allocated T's,
943 // also disallow T* in stack-allocated types.
944 if (options_
.enable_oilpan
) {
945 if (Parent()->IsOwnPtr() ||
946 Parent()->IsRawPtrClass() ||
947 (stack_allocated_host_
&& Parent()->IsRawPtr())) {
948 invalid_fields_
.push_back(std::make_pair(
949 current_
, InvalidSmartPtr(Parent())));
952 if (options_
.warn_raw_ptr
&& Parent()->IsRawPtr()) {
953 if (static_cast<RawPtr
*>(Parent())->HasReferenceType()) {
954 invalid_fields_
.push_back(std::make_pair(
955 current_
, kReferencePtrToGCManagedWarning
));
957 invalid_fields_
.push_back(std::make_pair(
958 current_
, kRawPtrToGCManagedWarning
));
964 if (Parent()->IsRawPtr() || Parent()->IsRefPtr() || Parent()->IsOwnPtr()) {
965 invalid_fields_
.push_back(std::make_pair(
966 current_
, InvalidSmartPtr(Parent())));
971 void AtCollection(Collection
* edge
) override
{
972 if (edge
->on_heap() && Parent() && Parent()->IsOwnPtr())
973 invalid_fields_
.push_back(std::make_pair(current_
, kOwnPtrToGCManaged
));
976 static bool IsWarning(Error error
) {
977 if (error
== kRawPtrToGCManagedWarning
)
979 if (error
== kReferencePtrToGCManagedWarning
)
984 static bool IsRawPtrError(Error error
) {
985 return error
== kRawPtrToGCManaged
||
986 error
== kRawPtrToGCManagedWarning
;
989 static bool IsReferencePtrError(Error error
) {
990 return error
== kReferencePtrToGCManaged
||
991 error
== kReferencePtrToGCManagedWarning
;
995 Error
InvalidSmartPtr(Edge
* ptr
) {
996 if (ptr
->IsRawPtr()) {
997 if (static_cast<RawPtr
*>(ptr
)->HasReferenceType())
998 return kReferencePtrToGCManaged
;
1000 return kRawPtrToGCManaged
;
1002 if (ptr
->IsRefPtr())
1003 return kRefPtrToGCManaged
;
1004 if (ptr
->IsOwnPtr())
1005 return kOwnPtrToGCManaged
;
1006 assert(false && "Unknown smart pointer kind");
1009 const BlinkGCPluginOptions
& options_
;
1010 FieldPoint
* current_
;
1011 bool stack_allocated_host_
;
1013 Errors invalid_fields_
;
1016 class EmptyStmtVisitor
1017 : public RecursiveASTVisitor
<EmptyStmtVisitor
> {
1019 static bool isEmpty(Stmt
* stmt
) {
1020 EmptyStmtVisitor visitor
;
1021 visitor
.TraverseStmt(stmt
);
1022 return visitor
.empty_
;
1025 bool WalkUpFromCompoundStmt(CompoundStmt
* stmt
) {
1026 empty_
= stmt
->body_empty();
1029 bool VisitStmt(Stmt
*) {
1034 EmptyStmtVisitor() : empty_(true) {}
1038 // Main class containing checks for various invariants of the Blink
1039 // garbage collection infrastructure.
1040 class BlinkGCPluginConsumer
: public ASTConsumer
{
1042 BlinkGCPluginConsumer(CompilerInstance
& instance
,
1043 const BlinkGCPluginOptions
& options
)
1044 : instance_(instance
),
1045 diagnostic_(instance
.getDiagnostics()),
1049 // Only check structures in the blink and WebKit namespaces.
1050 options_
.checked_namespaces
.insert("blink");
1052 // Ignore GC implementation files.
1053 options_
.ignored_directories
.push_back("/heap/");
1055 // Register warning/error messages.
1056 diag_class_must_left_mostly_derive_gc_
= diagnostic_
.getCustomDiagID(
1057 getErrorLevel(), kClassMustLeftMostlyDeriveGC
);
1058 diag_class_requires_trace_method_
=
1059 diagnostic_
.getCustomDiagID(getErrorLevel(), kClassRequiresTraceMethod
);
1060 diag_base_requires_tracing_
=
1061 diagnostic_
.getCustomDiagID(getErrorLevel(), kBaseRequiresTracing
);
1062 diag_fields_require_tracing_
=
1063 diagnostic_
.getCustomDiagID(getErrorLevel(), kFieldsRequireTracing
);
1064 diag_class_contains_invalid_fields_
= diagnostic_
.getCustomDiagID(
1065 getErrorLevel(), kClassContainsInvalidFields
);
1066 diag_class_contains_invalid_fields_warning_
= diagnostic_
.getCustomDiagID(
1067 DiagnosticsEngine::Warning
, kClassContainsInvalidFields
);
1068 diag_class_contains_gc_root_
=
1069 diagnostic_
.getCustomDiagID(getErrorLevel(), kClassContainsGCRoot
);
1070 diag_class_requires_finalization_
= diagnostic_
.getCustomDiagID(
1071 getErrorLevel(), kClassRequiresFinalization
);
1072 diag_class_does_not_require_finalization_
= diagnostic_
.getCustomDiagID(
1073 DiagnosticsEngine::Warning
, kClassDoesNotRequireFinalization
);
1074 diag_finalizer_accesses_finalized_field_
= diagnostic_
.getCustomDiagID(
1075 getErrorLevel(), kFinalizerAccessesFinalizedField
);
1076 diag_finalizer_eagerly_finalized_field_
= diagnostic_
.getCustomDiagID(
1077 getErrorLevel(), kFinalizerAccessesEagerlyFinalizedField
);
1078 diag_overridden_non_virtual_trace_
= diagnostic_
.getCustomDiagID(
1079 getErrorLevel(), kOverriddenNonVirtualTrace
);
1080 diag_missing_trace_dispatch_method_
= diagnostic_
.getCustomDiagID(
1081 getErrorLevel(), kMissingTraceDispatchMethod
);
1082 diag_missing_finalize_dispatch_method_
= diagnostic_
.getCustomDiagID(
1083 getErrorLevel(), kMissingFinalizeDispatchMethod
);
1084 diag_virtual_and_manual_dispatch_
=
1085 diagnostic_
.getCustomDiagID(getErrorLevel(), kVirtualAndManualDispatch
);
1086 diag_missing_trace_dispatch_
=
1087 diagnostic_
.getCustomDiagID(getErrorLevel(), kMissingTraceDispatch
);
1088 diag_missing_finalize_dispatch_
=
1089 diagnostic_
.getCustomDiagID(getErrorLevel(), kMissingFinalizeDispatch
);
1090 diag_derives_non_stack_allocated_
=
1091 diagnostic_
.getCustomDiagID(getErrorLevel(), kDerivesNonStackAllocated
);
1092 diag_class_overrides_new_
=
1093 diagnostic_
.getCustomDiagID(getErrorLevel(), kClassOverridesNew
);
1094 diag_class_declares_pure_virtual_trace_
= diagnostic_
.getCustomDiagID(
1095 getErrorLevel(), kClassDeclaresPureVirtualTrace
);
1096 diag_left_most_base_must_be_polymorphic_
= diagnostic_
.getCustomDiagID(
1097 getErrorLevel(), kLeftMostBaseMustBePolymorphic
);
1098 diag_base_class_must_declare_virtual_trace_
= diagnostic_
.getCustomDiagID(
1099 getErrorLevel(), kBaseClassMustDeclareVirtualTrace
);
1100 diag_class_must_declare_gc_mixin_trace_method_
=
1101 diagnostic_
.getCustomDiagID(getErrorLevel(),
1102 kClassMustDeclareGCMixinTraceMethod
);
1104 // Register note messages.
1105 diag_base_requires_tracing_note_
= diagnostic_
.getCustomDiagID(
1106 DiagnosticsEngine::Note
, kBaseRequiresTracingNote
);
1107 diag_field_requires_tracing_note_
= diagnostic_
.getCustomDiagID(
1108 DiagnosticsEngine::Note
, kFieldRequiresTracingNote
);
1109 diag_raw_ptr_to_gc_managed_class_note_
= diagnostic_
.getCustomDiagID(
1110 DiagnosticsEngine::Note
, kRawPtrToGCManagedClassNote
);
1111 diag_ref_ptr_to_gc_managed_class_note_
= diagnostic_
.getCustomDiagID(
1112 DiagnosticsEngine::Note
, kRefPtrToGCManagedClassNote
);
1113 diag_reference_ptr_to_gc_managed_class_note_
= diagnostic_
.getCustomDiagID(
1114 DiagnosticsEngine::Note
, kReferencePtrToGCManagedClassNote
);
1115 diag_own_ptr_to_gc_managed_class_note_
= diagnostic_
.getCustomDiagID(
1116 DiagnosticsEngine::Note
, kOwnPtrToGCManagedClassNote
);
1117 diag_member_to_gc_unmanaged_class_note_
= diagnostic_
.getCustomDiagID(
1118 DiagnosticsEngine::Note
, kMemberToGCUnmanagedClassNote
);
1119 diag_stack_allocated_field_note_
= diagnostic_
.getCustomDiagID(
1120 DiagnosticsEngine::Note
, kStackAllocatedFieldNote
);
1121 diag_member_in_unmanaged_class_note_
= diagnostic_
.getCustomDiagID(
1122 DiagnosticsEngine::Note
, kMemberInUnmanagedClassNote
);
1123 diag_part_object_to_gc_derived_class_note_
= diagnostic_
.getCustomDiagID(
1124 DiagnosticsEngine::Note
, kPartObjectToGCDerivedClassNote
);
1125 diag_part_object_contains_gc_root_note_
= diagnostic_
.getCustomDiagID(
1126 DiagnosticsEngine::Note
, kPartObjectContainsGCRootNote
);
1127 diag_field_contains_gc_root_note_
= diagnostic_
.getCustomDiagID(
1128 DiagnosticsEngine::Note
, kFieldContainsGCRootNote
);
1129 diag_finalized_field_note_
= diagnostic_
.getCustomDiagID(
1130 DiagnosticsEngine::Note
, kFinalizedFieldNote
);
1131 diag_eagerly_finalized_field_note_
= diagnostic_
.getCustomDiagID(
1132 DiagnosticsEngine::Note
, kEagerlyFinalizedFieldNote
);
1133 diag_user_declared_destructor_note_
= diagnostic_
.getCustomDiagID(
1134 DiagnosticsEngine::Note
, kUserDeclaredDestructorNote
);
1135 diag_user_declared_finalizer_note_
= diagnostic_
.getCustomDiagID(
1136 DiagnosticsEngine::Note
, kUserDeclaredFinalizerNote
);
1137 diag_base_requires_finalization_note_
= diagnostic_
.getCustomDiagID(
1138 DiagnosticsEngine::Note
, kBaseRequiresFinalizationNote
);
1139 diag_field_requires_finalization_note_
= diagnostic_
.getCustomDiagID(
1140 DiagnosticsEngine::Note
, kFieldRequiresFinalizationNote
);
1141 diag_overridden_non_virtual_trace_note_
= diagnostic_
.getCustomDiagID(
1142 DiagnosticsEngine::Note
, kOverriddenNonVirtualTraceNote
);
1143 diag_manual_dispatch_method_note_
= diagnostic_
.getCustomDiagID(
1144 DiagnosticsEngine::Note
, kManualDispatchMethodNote
);
1147 void HandleTranslationUnit(ASTContext
& context
) override
{
1148 // Don't run the plugin if the compilation unit is already invalid.
1149 if (diagnostic_
.hasErrorOccurred())
1152 ParseFunctionTemplates(context
.getTranslationUnitDecl());
1154 CollectVisitor visitor
;
1155 visitor
.TraverseDecl(context
.getTranslationUnitDecl());
1157 if (options_
.dump_graph
) {
1158 std::error_code err
;
1159 // TODO: Make createDefaultOutputFile or a shorter createOutputFile work.
1160 json_
= JsonWriter::from(instance_
.createOutputFile(
1164 true, // RemoveFileOnSignal
1165 instance_
.getFrontendOpts().OutputFile
, // BaseInput
1166 "graph.json", // Extension
1167 false, // UseTemporary
1168 false, // CreateMissingDirectories
1169 0, // ResultPathName
1170 0)); // TempPathName
1171 if (!err
&& json_
) {
1177 << "Failed to create an output file for the object graph.\n";
1181 for (RecordVector::iterator it
= visitor
.record_decls().begin();
1182 it
!= visitor
.record_decls().end();
1184 CheckRecord(cache_
.Lookup(*it
));
1187 for (MethodVector::iterator it
= visitor
.trace_decls().begin();
1188 it
!= visitor
.trace_decls().end();
1190 CheckTracingMethod(*it
);
1200 void ParseFunctionTemplates(TranslationUnitDecl
* decl
) {
1201 if (!instance_
.getLangOpts().DelayedTemplateParsing
)
1202 return; // Nothing to do.
1204 std::set
<FunctionDecl
*> late_parsed_decls
=
1205 GetLateParsedFunctionDecls(decl
);
1206 clang::Sema
& sema
= instance_
.getSema();
1208 for (const FunctionDecl
* fd
: late_parsed_decls
) {
1209 assert(fd
->isLateTemplateParsed());
1211 if (!Config::IsTraceMethod(fd
))
1214 if (instance_
.getSourceManager().isInSystemHeader(
1215 instance_
.getSourceManager().getSpellingLoc(fd
->getLocation())))
1218 // Force parsing and AST building of the yet-uninstantiated function
1219 // template trace method bodies.
1220 clang::LateParsedTemplate
* lpt
= sema
.LateParsedTemplateMap
[fd
];
1221 sema
.LateTemplateParser(sema
.OpaqueParser
, *lpt
);
1225 // Main entry for checking a record declaration.
1226 void CheckRecord(RecordInfo
* info
) {
1227 if (IsIgnored(info
))
1230 CXXRecordDecl
* record
= info
->record();
1232 // TODO: what should we do to check unions?
1233 if (record
->isUnion())
1236 // If this is the primary template declaration, check its specializations.
1237 if (record
->isThisDeclarationADefinition() &&
1238 record
->getDescribedClassTemplate()) {
1239 ClassTemplateDecl
* tmpl
= record
->getDescribedClassTemplate();
1240 for (ClassTemplateDecl::spec_iterator it
= tmpl
->spec_begin();
1241 it
!= tmpl
->spec_end();
1243 CheckClass(cache_
.Lookup(*it
));
1251 // Check a class-like object (eg, class, specialization, instantiation).
1252 void CheckClass(RecordInfo
* info
) {
1256 // Check consistency of stack-allocated hierarchies.
1257 if (info
->IsStackAllocated()) {
1258 for (RecordInfo::Bases::iterator it
= info
->GetBases().begin();
1259 it
!= info
->GetBases().end();
1261 if (!it
->second
.info()->IsStackAllocated())
1262 ReportDerivesNonStackAllocated(info
, &it
->second
);
1266 if (CXXMethodDecl
* trace
= info
->GetTraceMethod()) {
1267 if (trace
->isPure())
1268 ReportClassDeclaresPureVirtualTrace(info
, trace
);
1269 } else if (info
->RequiresTraceMethod()) {
1270 ReportClassRequiresTraceMethod(info
);
1273 // Check polymorphic classes that are GC-derived or have a trace method.
1274 if (info
->record()->hasDefinition() && info
->record()->isPolymorphic()) {
1275 // TODO: Check classes that inherit a trace method.
1276 CXXMethodDecl
* trace
= info
->GetTraceMethod();
1277 if (trace
|| info
->IsGCDerived())
1278 CheckPolymorphicClass(info
, trace
);
1282 CheckFieldsVisitor
visitor(options_
);
1283 if (visitor
.ContainsInvalidFields(info
))
1284 ReportClassContainsInvalidFields(info
, &visitor
.invalid_fields());
1287 if (info
->IsGCDerived()) {
1289 if (!info
->IsGCMixin()) {
1290 CheckLeftMostDerived(info
);
1291 CheckDispatch(info
);
1292 if (CXXMethodDecl
* newop
= info
->DeclaresNewOperator())
1293 if (!Config::IsIgnoreAnnotated(newop
))
1294 ReportClassOverridesNew(info
, newop
);
1295 if (info
->IsGCMixinInstance()) {
1296 // Require that declared GCMixin implementations
1297 // also provide a trace() override.
1298 if (info
->DeclaresGCMixinMethods()
1299 && !info
->DeclaresLocalTraceMethod())
1300 ReportClassMustDeclareGCMixinTraceMethod(info
);
1305 CheckGCRootsVisitor visitor
;
1306 if (visitor
.ContainsGCRoots(info
))
1307 ReportClassContainsGCRoots(info
, &visitor
.gc_roots());
1310 if (info
->NeedsFinalization())
1311 CheckFinalization(info
);
1313 if (options_
.warn_unneeded_finalizer
&& info
->IsGCFinalized())
1314 CheckUnneededFinalization(info
);
1320 CXXRecordDecl
* GetDependentTemplatedDecl(const Type
& type
) {
1321 const TemplateSpecializationType
* tmpl_type
=
1322 type
.getAs
<TemplateSpecializationType
>();
1326 TemplateDecl
* tmpl_decl
= tmpl_type
->getTemplateName().getAsTemplateDecl();
1330 return dyn_cast
<CXXRecordDecl
>(tmpl_decl
->getTemplatedDecl());
1333 // The GC infrastructure assumes that if the vtable of a polymorphic
1334 // base-class is not initialized for a given object (ie, it is partially
1335 // initialized) then the object does not need to be traced. Thus, we must
1336 // ensure that any polymorphic class with a trace method does not have any
1337 // tractable fields that are initialized before we are sure that the vtable
1338 // and the trace method are both defined. There are two cases that need to
1339 // hold to satisfy that assumption:
1341 // 1. If trace is virtual, then it must be defined in the left-most base.
1342 // This ensures that if the vtable is initialized then it contains a pointer
1343 // to the trace method.
1345 // 2. If trace is non-virtual, then the trace method is defined and we must
1346 // ensure that the left-most base defines a vtable. This ensures that the
1347 // first thing to be initialized when constructing the object is the vtable
1349 void CheckPolymorphicClass(RecordInfo
* info
, CXXMethodDecl
* trace
) {
1350 CXXRecordDecl
* left_most
= info
->record();
1351 CXXRecordDecl::base_class_iterator it
= left_most
->bases_begin();
1352 CXXRecordDecl
* left_most_base
= 0;
1353 while (it
!= left_most
->bases_end()) {
1354 left_most_base
= it
->getType()->getAsCXXRecordDecl();
1355 if (!left_most_base
&& it
->getType()->isDependentType())
1356 left_most_base
= RecordInfo::GetDependentTemplatedDecl(*it
->getType());
1358 // TODO: Find a way to correctly check actual instantiations
1359 // for dependent types. The escape below will be hit, eg, when
1360 // we have a primary template with no definition and
1361 // specializations for each case (such as SupplementBase) in
1362 // which case we don't succeed in checking the required
1364 if (!left_most_base
|| !left_most_base
->hasDefinition())
1367 StringRef name
= left_most_base
->getName();
1368 // We know GCMixin base defines virtual trace.
1369 if (Config::IsGCMixinBase(name
))
1372 // Stop with the left-most prior to a safe polymorphic base (a safe base
1373 // is non-polymorphic and contains no fields).
1374 if (Config::IsSafePolymorphicBase(name
))
1377 left_most
= left_most_base
;
1378 it
= left_most
->bases_begin();
1381 if (RecordInfo
* left_most_info
= cache_
.Lookup(left_most
)) {
1383 // Check condition (1):
1384 if (trace
&& trace
->isVirtual()) {
1385 if (CXXMethodDecl
* trace
= left_most_info
->GetTraceMethod()) {
1386 if (trace
->isVirtual())
1389 ReportBaseClassMustDeclareVirtualTrace(info
, left_most
);
1393 // Check condition (2):
1394 if (DeclaresVirtualMethods(left_most
))
1396 if (left_most_base
) {
1397 // Get the base next to the "safe polymorphic base"
1398 if (it
!= left_most
->bases_end())
1400 if (it
!= left_most
->bases_end()) {
1401 if (CXXRecordDecl
* next_base
= it
->getType()->getAsCXXRecordDecl()) {
1402 if (CXXRecordDecl
* next_left_most
= GetLeftMostBase(next_base
)) {
1403 if (DeclaresVirtualMethods(next_left_most
))
1405 ReportLeftMostBaseMustBePolymorphic(info
, next_left_most
);
1411 ReportLeftMostBaseMustBePolymorphic(info
, left_most
);
1415 CXXRecordDecl
* GetLeftMostBase(CXXRecordDecl
* left_most
) {
1416 CXXRecordDecl::base_class_iterator it
= left_most
->bases_begin();
1417 while (it
!= left_most
->bases_end()) {
1418 if (it
->getType()->isDependentType())
1419 left_most
= RecordInfo::GetDependentTemplatedDecl(*it
->getType());
1421 left_most
= it
->getType()->getAsCXXRecordDecl();
1422 if (!left_most
|| !left_most
->hasDefinition())
1424 it
= left_most
->bases_begin();
1429 bool DeclaresVirtualMethods(CXXRecordDecl
* decl
) {
1430 CXXRecordDecl::method_iterator it
= decl
->method_begin();
1431 for (; it
!= decl
->method_end(); ++it
)
1432 if (it
->isVirtual() && !it
->isPure())
1437 void CheckLeftMostDerived(RecordInfo
* info
) {
1438 CXXRecordDecl
* left_most
= GetLeftMostBase(info
->record());
1441 if (!Config::IsGCBase(left_most
->getName()))
1442 ReportClassMustLeftMostlyDeriveGC(info
);
1445 void CheckDispatch(RecordInfo
* info
) {
1446 bool finalized
= info
->IsGCFinalized();
1447 CXXMethodDecl
* trace_dispatch
= info
->GetTraceDispatchMethod();
1448 CXXMethodDecl
* finalize_dispatch
= info
->GetFinalizeDispatchMethod();
1449 if (!trace_dispatch
&& !finalize_dispatch
)
1452 CXXRecordDecl
* base
= trace_dispatch
? trace_dispatch
->getParent()
1453 : finalize_dispatch
->getParent();
1455 // Check that dispatch methods are defined at the base.
1456 if (base
== info
->record()) {
1457 if (!trace_dispatch
)
1458 ReportMissingTraceDispatchMethod(info
);
1459 if (finalized
&& !finalize_dispatch
)
1460 ReportMissingFinalizeDispatchMethod(info
);
1461 if (!finalized
&& finalize_dispatch
) {
1462 ReportClassRequiresFinalization(info
);
1463 NoteUserDeclaredFinalizer(finalize_dispatch
);
1467 // Check that classes implementing manual dispatch do not have vtables.
1468 if (info
->record()->isPolymorphic())
1469 ReportVirtualAndManualDispatch(
1470 info
, trace_dispatch
? trace_dispatch
: finalize_dispatch
);
1472 // If this is a non-abstract class check that it is dispatched to.
1473 // TODO: Create a global variant of this local check. We can only check if
1474 // the dispatch body is known in this compilation unit.
1475 if (info
->IsConsideredAbstract())
1478 const FunctionDecl
* defn
;
1480 if (trace_dispatch
&& trace_dispatch
->isDefined(defn
)) {
1481 CheckDispatchVisitor
visitor(info
);
1482 visitor
.TraverseStmt(defn
->getBody());
1483 if (!visitor
.dispatched_to_receiver())
1484 ReportMissingTraceDispatch(defn
, info
);
1487 if (finalized
&& finalize_dispatch
&& finalize_dispatch
->isDefined(defn
)) {
1488 CheckDispatchVisitor
visitor(info
);
1489 visitor
.TraverseStmt(defn
->getBody());
1490 if (!visitor
.dispatched_to_receiver())
1491 ReportMissingFinalizeDispatch(defn
, info
);
1495 // TODO: Should we collect destructors similar to trace methods?
1496 void CheckFinalization(RecordInfo
* info
) {
1497 CXXDestructorDecl
* dtor
= info
->record()->getDestructor();
1499 // For finalized classes, check the finalization method if possible.
1500 if (info
->IsGCFinalized()) {
1501 if (dtor
&& dtor
->hasBody()) {
1502 CheckFinalizerVisitor
visitor(&cache_
, info
->IsEagerlyFinalized());
1503 visitor
.TraverseCXXMethodDecl(dtor
);
1504 if (!visitor
.finalized_fields().empty()) {
1505 ReportFinalizerAccessesFinalizedFields(
1506 dtor
, &visitor
.finalized_fields());
1512 // Don't require finalization of a mixin that has not yet been "mixed in".
1513 if (info
->IsGCMixin())
1516 // Report the finalization error, and proceed to print possible causes for
1517 // the finalization requirement.
1518 ReportClassRequiresFinalization(info
);
1520 if (dtor
&& dtor
->isUserProvided())
1521 NoteUserDeclaredDestructor(dtor
);
1523 for (RecordInfo::Bases::iterator it
= info
->GetBases().begin();
1524 it
!= info
->GetBases().end();
1526 if (it
->second
.info()->NeedsFinalization())
1527 NoteBaseRequiresFinalization(&it
->second
);
1530 for (RecordInfo::Fields::iterator it
= info
->GetFields().begin();
1531 it
!= info
->GetFields().end();
1533 if (it
->second
.edge()->NeedsFinalization())
1534 NoteField(&it
->second
, diag_field_requires_finalization_note_
);
1538 void CheckUnneededFinalization(RecordInfo
* info
) {
1539 if (!HasNonEmptyFinalizer(info
))
1540 ReportClassDoesNotRequireFinalization(info
);
1543 bool HasNonEmptyFinalizer(RecordInfo
* info
) {
1544 CXXDestructorDecl
* dtor
= info
->record()->getDestructor();
1545 if (dtor
&& dtor
->isUserProvided()) {
1546 if (!dtor
->hasBody() || !EmptyStmtVisitor::isEmpty(dtor
->getBody()))
1549 for (RecordInfo::Bases::iterator it
= info
->GetBases().begin();
1550 it
!= info
->GetBases().end();
1552 if (HasNonEmptyFinalizer(it
->second
.info()))
1555 for (RecordInfo::Fields::iterator it
= info
->GetFields().begin();
1556 it
!= info
->GetFields().end();
1558 if (it
->second
.edge()->NeedsFinalization())
1564 // This is the main entry for tracing method definitions.
1565 void CheckTracingMethod(CXXMethodDecl
* method
) {
1566 RecordInfo
* parent
= cache_
.Lookup(method
->getParent());
1567 if (IsIgnored(parent
))
1570 // Check templated tracing methods by checking the template instantiations.
1571 // Specialized templates are handled as ordinary classes.
1572 if (ClassTemplateDecl
* tmpl
=
1573 parent
->record()->getDescribedClassTemplate()) {
1574 for (ClassTemplateDecl::spec_iterator it
= tmpl
->spec_begin();
1575 it
!= tmpl
->spec_end();
1577 // Check trace using each template instantiation as the holder.
1578 if (IsTemplateInstantiation(*it
))
1579 CheckTraceOrDispatchMethod(cache_
.Lookup(*it
), method
);
1584 CheckTraceOrDispatchMethod(parent
, method
);
1587 // Determine what type of tracing method this is (dispatch or trace).
1588 void CheckTraceOrDispatchMethod(RecordInfo
* parent
, CXXMethodDecl
* method
) {
1589 Config::TraceMethodType trace_type
= Config::GetTraceMethodType(method
);
1590 if (trace_type
== Config::TRACE_AFTER_DISPATCH_METHOD
||
1591 trace_type
== Config::TRACE_AFTER_DISPATCH_IMPL_METHOD
||
1592 !parent
->GetTraceDispatchMethod()) {
1593 CheckTraceMethod(parent
, method
, trace_type
);
1595 // Dispatch methods are checked when we identify subclasses.
1598 // Check an actual trace method.
1599 void CheckTraceMethod(RecordInfo
* parent
,
1600 CXXMethodDecl
* trace
,
1601 Config::TraceMethodType trace_type
) {
1602 // A trace method must not override any non-virtual trace methods.
1603 if (trace_type
== Config::TRACE_METHOD
) {
1604 for (RecordInfo::Bases::iterator it
= parent
->GetBases().begin();
1605 it
!= parent
->GetBases().end();
1607 RecordInfo
* base
= it
->second
.info();
1608 if (CXXMethodDecl
* other
= base
->InheritsNonVirtualTrace())
1609 ReportOverriddenNonVirtualTrace(parent
, trace
, other
);
1613 CheckTraceVisitor
visitor(trace
, parent
, &cache_
);
1614 visitor
.TraverseCXXMethodDecl(trace
);
1616 // Skip reporting if this trace method is a just delegate to
1617 // traceImpl (or traceAfterDispatchImpl) method. We will report on
1618 // CheckTraceMethod on traceImpl method.
1619 if (visitor
.delegates_to_traceimpl())
1622 for (RecordInfo::Bases::iterator it
= parent
->GetBases().begin();
1623 it
!= parent
->GetBases().end();
1625 if (!it
->second
.IsProperlyTraced())
1626 ReportBaseRequiresTracing(parent
, trace
, it
->first
);
1629 for (RecordInfo::Fields::iterator it
= parent
->GetFields().begin();
1630 it
!= parent
->GetFields().end();
1632 if (!it
->second
.IsProperlyTraced()) {
1633 // Discontinue once an untraced-field error is found.
1634 ReportFieldsRequireTracing(parent
, trace
);
1640 void DumpClass(RecordInfo
* info
) {
1644 json_
->OpenObject();
1645 json_
->Write("name", info
->record()->getQualifiedNameAsString());
1646 json_
->Write("loc", GetLocString(info
->record()->getLocStart()));
1647 json_
->CloseObject();
1649 class DumpEdgeVisitor
: public RecursiveEdgeVisitor
{
1651 DumpEdgeVisitor(JsonWriter
* json
) : json_(json
) {}
1652 void DumpEdge(RecordInfo
* src
,
1655 const Edge::LivenessKind
& kind
,
1656 const string
& loc
) {
1657 json_
->OpenObject();
1658 json_
->Write("src", src
->record()->getQualifiedNameAsString());
1659 json_
->Write("dst", dst
->record()->getQualifiedNameAsString());
1660 json_
->Write("lbl", lbl
);
1661 json_
->Write("kind", kind
);
1662 json_
->Write("loc", loc
);
1665 Parent()->IsRawPtr() ?
1666 (static_cast<RawPtr
*>(Parent())->HasReferenceType() ?
1667 "reference" : "raw") :
1668 Parent()->IsRefPtr() ? "ref" :
1669 Parent()->IsOwnPtr() ? "own" :
1670 (Parent()->IsMember() ||
1671 Parent()->IsWeakMember()) ? "mem" :
1673 json_
->CloseObject();
1676 void DumpField(RecordInfo
* src
, FieldPoint
* point
, const string
& loc
) {
1680 point_
->edge()->Accept(this);
1683 void AtValue(Value
* e
) override
{
1684 // The liveness kind of a path from the point to this value
1685 // is given by the innermost place that is non-strong.
1686 Edge::LivenessKind kind
= Edge::kStrong
;
1687 if (Config::IsIgnoreCycleAnnotated(point_
->field())) {
1690 for (Context::iterator it
= context().begin();
1691 it
!= context().end();
1693 Edge::LivenessKind pointer_kind
= (*it
)->Kind();
1694 if (pointer_kind
!= Edge::kStrong
) {
1695 kind
= pointer_kind
;
1701 src_
, e
->value(), point_
->field()->getNameAsString(), kind
, loc_
);
1711 DumpEdgeVisitor
visitor(json_
);
1713 RecordInfo::Bases
& bases
= info
->GetBases();
1714 for (RecordInfo::Bases::iterator it
= bases
.begin();
1717 visitor
.DumpEdge(info
,
1721 GetLocString(it
->second
.spec().getLocStart()));
1724 RecordInfo::Fields
& fields
= info
->GetFields();
1725 for (RecordInfo::Fields::iterator it
= fields
.begin();
1728 visitor
.DumpField(info
,
1730 GetLocString(it
->second
.field()->getLocStart()));
1734 // Adds either a warning or error, based on the current handling of -Werror.
1735 DiagnosticsEngine::Level
getErrorLevel() {
1736 return diagnostic_
.getWarningsAsErrors() ? DiagnosticsEngine::Error
1737 : DiagnosticsEngine::Warning
;
1740 const string
GetLocString(SourceLocation loc
) {
1741 const SourceManager
& source_manager
= instance_
.getSourceManager();
1742 PresumedLoc ploc
= source_manager
.getPresumedLoc(loc
);
1743 if (ploc
.isInvalid())
1746 llvm::raw_string_ostream
OS(loc_str
);
1747 OS
<< ploc
.getFilename()
1748 << ":" << ploc
.getLine()
1749 << ":" << ploc
.getColumn();
1753 bool IsIgnored(RecordInfo
* record
) {
1755 !InCheckedNamespace(record
) ||
1756 IsIgnoredClass(record
) ||
1757 InIgnoredDirectory(record
);
1760 bool IsIgnoredClass(RecordInfo
* info
) {
1761 // Ignore any class prefixed by SameSizeAs. These are used in
1762 // Blink to verify class sizes and don't need checking.
1763 const string SameSizeAs
= "SameSizeAs";
1764 if (info
->name().compare(0, SameSizeAs
.size(), SameSizeAs
) == 0)
1766 return options_
.ignored_classes
.find(info
->name()) !=
1767 options_
.ignored_classes
.end();
1770 bool InIgnoredDirectory(RecordInfo
* info
) {
1772 if (!GetFilename(info
->record()->getLocStart(), &filename
))
1773 return false; // TODO: should we ignore non-existing file locations?
1774 #if defined(LLVM_ON_WIN32)
1775 std::replace(filename
.begin(), filename
.end(), '\\', '/');
1777 std::vector
<string
>::iterator it
= options_
.ignored_directories
.begin();
1778 for (; it
!= options_
.ignored_directories
.end(); ++it
)
1779 if (filename
.find(*it
) != string::npos
)
1784 bool InCheckedNamespace(RecordInfo
* info
) {
1787 for (DeclContext
* context
= info
->record()->getDeclContext();
1788 !context
->isTranslationUnit();
1789 context
= context
->getParent()) {
1790 if (NamespaceDecl
* decl
= dyn_cast
<NamespaceDecl
>(context
)) {
1791 if (decl
->isAnonymousNamespace())
1793 if (options_
.checked_namespaces
.find(decl
->getNameAsString()) !=
1794 options_
.checked_namespaces
.end()) {
1802 bool GetFilename(SourceLocation loc
, string
* filename
) {
1803 const SourceManager
& source_manager
= instance_
.getSourceManager();
1804 SourceLocation spelling_location
= source_manager
.getSpellingLoc(loc
);
1805 PresumedLoc ploc
= source_manager
.getPresumedLoc(spelling_location
);
1806 if (ploc
.isInvalid()) {
1807 // If we're in an invalid location, we're looking at things that aren't
1808 // actually stated in the source.
1811 *filename
= ploc
.getFilename();
1815 void ReportClassMustLeftMostlyDeriveGC(RecordInfo
* info
) {
1816 SourceLocation loc
= info
->record()->getInnerLocStart();
1817 SourceManager
& manager
= instance_
.getSourceManager();
1818 FullSourceLoc
full_loc(loc
, manager
);
1819 diagnostic_
.Report(full_loc
, diag_class_must_left_mostly_derive_gc_
)
1823 void ReportClassRequiresTraceMethod(RecordInfo
* info
) {
1824 SourceLocation loc
= info
->record()->getInnerLocStart();
1825 SourceManager
& manager
= instance_
.getSourceManager();
1826 FullSourceLoc
full_loc(loc
, manager
);
1827 diagnostic_
.Report(full_loc
, diag_class_requires_trace_method_
)
1830 for (RecordInfo::Bases::iterator it
= info
->GetBases().begin();
1831 it
!= info
->GetBases().end();
1833 if (it
->second
.NeedsTracing().IsNeeded())
1834 NoteBaseRequiresTracing(&it
->second
);
1837 for (RecordInfo::Fields::iterator it
= info
->GetFields().begin();
1838 it
!= info
->GetFields().end();
1840 if (!it
->second
.IsProperlyTraced())
1841 NoteFieldRequiresTracing(info
, it
->first
);
1845 void ReportBaseRequiresTracing(RecordInfo
* derived
,
1846 CXXMethodDecl
* trace
,
1847 CXXRecordDecl
* base
) {
1848 SourceLocation loc
= trace
->getLocStart();
1849 SourceManager
& manager
= instance_
.getSourceManager();
1850 FullSourceLoc
full_loc(loc
, manager
);
1851 diagnostic_
.Report(full_loc
, diag_base_requires_tracing_
)
1852 << base
<< derived
->record();
1855 void ReportFieldsRequireTracing(RecordInfo
* info
, CXXMethodDecl
* trace
) {
1856 SourceLocation loc
= trace
->getLocStart();
1857 SourceManager
& manager
= instance_
.getSourceManager();
1858 FullSourceLoc
full_loc(loc
, manager
);
1859 diagnostic_
.Report(full_loc
, diag_fields_require_tracing_
)
1861 for (RecordInfo::Fields::iterator it
= info
->GetFields().begin();
1862 it
!= info
->GetFields().end();
1864 if (!it
->second
.IsProperlyTraced())
1865 NoteFieldRequiresTracing(info
, it
->first
);
1869 void ReportClassContainsInvalidFields(RecordInfo
* info
,
1870 CheckFieldsVisitor::Errors
* errors
) {
1871 SourceLocation loc
= info
->record()->getLocStart();
1872 SourceManager
& manager
= instance_
.getSourceManager();
1873 FullSourceLoc
full_loc(loc
, manager
);
1874 bool only_warnings
= options_
.warn_raw_ptr
;
1875 for (CheckFieldsVisitor::Errors::iterator it
= errors
->begin();
1876 only_warnings
&& it
!= errors
->end();
1878 if (!CheckFieldsVisitor::IsWarning(it
->second
))
1879 only_warnings
= false;
1881 diagnostic_
.Report(full_loc
, only_warnings
?
1882 diag_class_contains_invalid_fields_warning_
:
1883 diag_class_contains_invalid_fields_
)
1885 for (CheckFieldsVisitor::Errors::iterator it
= errors
->begin();
1886 it
!= errors
->end();
1889 if (CheckFieldsVisitor::IsRawPtrError(it
->second
)) {
1890 error
= diag_raw_ptr_to_gc_managed_class_note_
;
1891 } else if (CheckFieldsVisitor::IsReferencePtrError(it
->second
)) {
1892 error
= diag_reference_ptr_to_gc_managed_class_note_
;
1893 } else if (it
->second
== CheckFieldsVisitor::kRefPtrToGCManaged
) {
1894 error
= diag_ref_ptr_to_gc_managed_class_note_
;
1895 } else if (it
->second
== CheckFieldsVisitor::kOwnPtrToGCManaged
) {
1896 error
= diag_own_ptr_to_gc_managed_class_note_
;
1897 } else if (it
->second
== CheckFieldsVisitor::kMemberToGCUnmanaged
) {
1898 error
= diag_member_to_gc_unmanaged_class_note_
;
1899 } else if (it
->second
== CheckFieldsVisitor::kMemberInUnmanaged
) {
1900 error
= diag_member_in_unmanaged_class_note_
;
1901 } else if (it
->second
== CheckFieldsVisitor::kPtrFromHeapToStack
) {
1902 error
= diag_stack_allocated_field_note_
;
1903 } else if (it
->second
== CheckFieldsVisitor::kGCDerivedPartObject
) {
1904 error
= diag_part_object_to_gc_derived_class_note_
;
1906 assert(false && "Unknown field error");
1908 NoteField(it
->first
, error
);
1912 void ReportClassContainsGCRoots(RecordInfo
* info
,
1913 CheckGCRootsVisitor::Errors
* errors
) {
1914 SourceLocation loc
= info
->record()->getLocStart();
1915 SourceManager
& manager
= instance_
.getSourceManager();
1916 FullSourceLoc
full_loc(loc
, manager
);
1917 for (CheckGCRootsVisitor::Errors::iterator it
= errors
->begin();
1918 it
!= errors
->end();
1920 CheckGCRootsVisitor::RootPath::iterator path
= it
->begin();
1921 FieldPoint
* point
= *path
;
1922 diagnostic_
.Report(full_loc
, diag_class_contains_gc_root_
)
1923 << info
->record() << point
->field();
1924 while (++path
!= it
->end()) {
1925 NotePartObjectContainsGCRoot(point
);
1928 NoteFieldContainsGCRoot(point
);
1932 void ReportFinalizerAccessesFinalizedFields(
1933 CXXMethodDecl
* dtor
,
1934 CheckFinalizerVisitor::Errors
* fields
) {
1935 for (CheckFinalizerVisitor::Errors::iterator it
= fields
->begin();
1936 it
!= fields
->end();
1938 SourceLocation loc
= it
->member_
->getLocStart();
1939 SourceManager
& manager
= instance_
.getSourceManager();
1940 bool as_eagerly_finalized
= it
->as_eagerly_finalized_
;
1941 unsigned diag_error
= as_eagerly_finalized
?
1942 diag_finalizer_eagerly_finalized_field_
:
1943 diag_finalizer_accesses_finalized_field_
;
1944 unsigned diag_note
= as_eagerly_finalized
?
1945 diag_eagerly_finalized_field_note_
:
1946 diag_finalized_field_note_
;
1947 FullSourceLoc
full_loc(loc
, manager
);
1948 diagnostic_
.Report(full_loc
, diag_error
)
1949 << dtor
<< it
->field_
->field();
1950 NoteField(it
->field_
, diag_note
);
1954 void ReportClassRequiresFinalization(RecordInfo
* info
) {
1955 SourceLocation loc
= info
->record()->getInnerLocStart();
1956 SourceManager
& manager
= instance_
.getSourceManager();
1957 FullSourceLoc
full_loc(loc
, manager
);
1958 diagnostic_
.Report(full_loc
, diag_class_requires_finalization_
)
1962 void ReportClassDoesNotRequireFinalization(RecordInfo
* info
) {
1963 SourceLocation loc
= info
->record()->getInnerLocStart();
1964 SourceManager
& manager
= instance_
.getSourceManager();
1965 FullSourceLoc
full_loc(loc
, manager
);
1966 diagnostic_
.Report(full_loc
, diag_class_does_not_require_finalization_
)
1970 void ReportClassMustDeclareGCMixinTraceMethod(RecordInfo
* info
) {
1971 SourceLocation loc
= info
->record()->getInnerLocStart();
1972 SourceManager
& manager
= instance_
.getSourceManager();
1973 FullSourceLoc
full_loc(loc
, manager
);
1975 full_loc
, diag_class_must_declare_gc_mixin_trace_method_
)
1979 void ReportOverriddenNonVirtualTrace(RecordInfo
* info
,
1980 CXXMethodDecl
* trace
,
1981 CXXMethodDecl
* overridden
) {
1982 SourceLocation loc
= trace
->getLocStart();
1983 SourceManager
& manager
= instance_
.getSourceManager();
1984 FullSourceLoc
full_loc(loc
, manager
);
1985 diagnostic_
.Report(full_loc
, diag_overridden_non_virtual_trace_
)
1986 << info
->record() << overridden
->getParent();
1987 NoteOverriddenNonVirtualTrace(overridden
);
1990 void ReportMissingTraceDispatchMethod(RecordInfo
* info
) {
1991 ReportMissingDispatchMethod(info
, diag_missing_trace_dispatch_method_
);
1994 void ReportMissingFinalizeDispatchMethod(RecordInfo
* info
) {
1995 ReportMissingDispatchMethod(info
, diag_missing_finalize_dispatch_method_
);
1998 void ReportMissingDispatchMethod(RecordInfo
* info
, unsigned error
) {
1999 SourceLocation loc
= info
->record()->getInnerLocStart();
2000 SourceManager
& manager
= instance_
.getSourceManager();
2001 FullSourceLoc
full_loc(loc
, manager
);
2002 diagnostic_
.Report(full_loc
, error
) << info
->record();
2005 void ReportVirtualAndManualDispatch(RecordInfo
* info
,
2006 CXXMethodDecl
* dispatch
) {
2007 SourceLocation loc
= info
->record()->getInnerLocStart();
2008 SourceManager
& manager
= instance_
.getSourceManager();
2009 FullSourceLoc
full_loc(loc
, manager
);
2010 diagnostic_
.Report(full_loc
, diag_virtual_and_manual_dispatch_
)
2012 NoteManualDispatchMethod(dispatch
);
2015 void ReportMissingTraceDispatch(const FunctionDecl
* dispatch
,
2016 RecordInfo
* receiver
) {
2017 ReportMissingDispatch(dispatch
, receiver
, diag_missing_trace_dispatch_
);
2020 void ReportMissingFinalizeDispatch(const FunctionDecl
* dispatch
,
2021 RecordInfo
* receiver
) {
2022 ReportMissingDispatch(dispatch
, receiver
, diag_missing_finalize_dispatch_
);
2025 void ReportMissingDispatch(const FunctionDecl
* dispatch
,
2026 RecordInfo
* receiver
,
2028 SourceLocation loc
= dispatch
->getLocStart();
2029 SourceManager
& manager
= instance_
.getSourceManager();
2030 FullSourceLoc
full_loc(loc
, manager
);
2031 diagnostic_
.Report(full_loc
, error
) << receiver
->record();
2034 void ReportDerivesNonStackAllocated(RecordInfo
* info
, BasePoint
* base
) {
2035 SourceLocation loc
= base
->spec().getLocStart();
2036 SourceManager
& manager
= instance_
.getSourceManager();
2037 FullSourceLoc
full_loc(loc
, manager
);
2038 diagnostic_
.Report(full_loc
, diag_derives_non_stack_allocated_
)
2039 << info
->record() << base
->info()->record();
2042 void ReportClassOverridesNew(RecordInfo
* info
, CXXMethodDecl
* newop
) {
2043 SourceLocation loc
= newop
->getLocStart();
2044 SourceManager
& manager
= instance_
.getSourceManager();
2045 FullSourceLoc
full_loc(loc
, manager
);
2046 diagnostic_
.Report(full_loc
, diag_class_overrides_new_
) << info
->record();
2049 void ReportClassDeclaresPureVirtualTrace(RecordInfo
* info
,
2050 CXXMethodDecl
* trace
) {
2051 SourceLocation loc
= trace
->getLocStart();
2052 SourceManager
& manager
= instance_
.getSourceManager();
2053 FullSourceLoc
full_loc(loc
, manager
);
2054 diagnostic_
.Report(full_loc
, diag_class_declares_pure_virtual_trace_
)
2058 void ReportLeftMostBaseMustBePolymorphic(RecordInfo
* derived
,
2059 CXXRecordDecl
* base
) {
2060 SourceLocation loc
= base
->getLocStart();
2061 SourceManager
& manager
= instance_
.getSourceManager();
2062 FullSourceLoc
full_loc(loc
, manager
);
2063 diagnostic_
.Report(full_loc
, diag_left_most_base_must_be_polymorphic_
)
2064 << base
<< derived
->record();
2067 void ReportBaseClassMustDeclareVirtualTrace(RecordInfo
* derived
,
2068 CXXRecordDecl
* base
) {
2069 SourceLocation loc
= base
->getLocStart();
2070 SourceManager
& manager
= instance_
.getSourceManager();
2071 FullSourceLoc
full_loc(loc
, manager
);
2072 diagnostic_
.Report(full_loc
, diag_base_class_must_declare_virtual_trace_
)
2073 << base
<< derived
->record();
2076 void NoteManualDispatchMethod(CXXMethodDecl
* dispatch
) {
2077 SourceLocation loc
= dispatch
->getLocStart();
2078 SourceManager
& manager
= instance_
.getSourceManager();
2079 FullSourceLoc
full_loc(loc
, manager
);
2080 diagnostic_
.Report(full_loc
, diag_manual_dispatch_method_note_
) << dispatch
;
2083 void NoteBaseRequiresTracing(BasePoint
* base
) {
2084 SourceLocation loc
= base
->spec().getLocStart();
2085 SourceManager
& manager
= instance_
.getSourceManager();
2086 FullSourceLoc
full_loc(loc
, manager
);
2087 diagnostic_
.Report(full_loc
, diag_base_requires_tracing_note_
)
2088 << base
->info()->record();
2091 void NoteFieldRequiresTracing(RecordInfo
* holder
, FieldDecl
* field
) {
2092 NoteField(field
, diag_field_requires_tracing_note_
);
2095 void NotePartObjectContainsGCRoot(FieldPoint
* point
) {
2096 FieldDecl
* field
= point
->field();
2097 SourceLocation loc
= field
->getLocStart();
2098 SourceManager
& manager
= instance_
.getSourceManager();
2099 FullSourceLoc
full_loc(loc
, manager
);
2100 diagnostic_
.Report(full_loc
, diag_part_object_contains_gc_root_note_
)
2101 << field
<< field
->getParent();
2104 void NoteFieldContainsGCRoot(FieldPoint
* point
) {
2105 NoteField(point
, diag_field_contains_gc_root_note_
);
2108 void NoteUserDeclaredDestructor(CXXMethodDecl
* dtor
) {
2109 SourceLocation loc
= dtor
->getLocStart();
2110 SourceManager
& manager
= instance_
.getSourceManager();
2111 FullSourceLoc
full_loc(loc
, manager
);
2112 diagnostic_
.Report(full_loc
, diag_user_declared_destructor_note_
);
2115 void NoteUserDeclaredFinalizer(CXXMethodDecl
* dtor
) {
2116 SourceLocation loc
= dtor
->getLocStart();
2117 SourceManager
& manager
= instance_
.getSourceManager();
2118 FullSourceLoc
full_loc(loc
, manager
);
2119 diagnostic_
.Report(full_loc
, diag_user_declared_finalizer_note_
);
2122 void NoteBaseRequiresFinalization(BasePoint
* base
) {
2123 SourceLocation loc
= base
->spec().getLocStart();
2124 SourceManager
& manager
= instance_
.getSourceManager();
2125 FullSourceLoc
full_loc(loc
, manager
);
2126 diagnostic_
.Report(full_loc
, diag_base_requires_finalization_note_
)
2127 << base
->info()->record();
2130 void NoteField(FieldPoint
* point
, unsigned note
) {
2131 NoteField(point
->field(), note
);
2134 void NoteField(FieldDecl
* field
, unsigned note
) {
2135 SourceLocation loc
= field
->getLocStart();
2136 SourceManager
& manager
= instance_
.getSourceManager();
2137 FullSourceLoc
full_loc(loc
, manager
);
2138 diagnostic_
.Report(full_loc
, note
) << field
;
2141 void NoteOverriddenNonVirtualTrace(CXXMethodDecl
* overridden
) {
2142 SourceLocation loc
= overridden
->getLocStart();
2143 SourceManager
& manager
= instance_
.getSourceManager();
2144 FullSourceLoc
full_loc(loc
, manager
);
2145 diagnostic_
.Report(full_loc
, diag_overridden_non_virtual_trace_note_
)
2149 unsigned diag_class_must_left_mostly_derive_gc_
;
2150 unsigned diag_class_requires_trace_method_
;
2151 unsigned diag_base_requires_tracing_
;
2152 unsigned diag_fields_require_tracing_
;
2153 unsigned diag_class_contains_invalid_fields_
;
2154 unsigned diag_class_contains_invalid_fields_warning_
;
2155 unsigned diag_class_contains_gc_root_
;
2156 unsigned diag_class_requires_finalization_
;
2157 unsigned diag_class_does_not_require_finalization_
;
2158 unsigned diag_finalizer_accesses_finalized_field_
;
2159 unsigned diag_finalizer_eagerly_finalized_field_
;
2160 unsigned diag_overridden_non_virtual_trace_
;
2161 unsigned diag_missing_trace_dispatch_method_
;
2162 unsigned diag_missing_finalize_dispatch_method_
;
2163 unsigned diag_virtual_and_manual_dispatch_
;
2164 unsigned diag_missing_trace_dispatch_
;
2165 unsigned diag_missing_finalize_dispatch_
;
2166 unsigned diag_derives_non_stack_allocated_
;
2167 unsigned diag_class_overrides_new_
;
2168 unsigned diag_class_declares_pure_virtual_trace_
;
2169 unsigned diag_left_most_base_must_be_polymorphic_
;
2170 unsigned diag_base_class_must_declare_virtual_trace_
;
2171 unsigned diag_class_must_declare_gc_mixin_trace_method_
;
2173 unsigned diag_base_requires_tracing_note_
;
2174 unsigned diag_field_requires_tracing_note_
;
2175 unsigned diag_raw_ptr_to_gc_managed_class_note_
;
2176 unsigned diag_ref_ptr_to_gc_managed_class_note_
;
2177 unsigned diag_reference_ptr_to_gc_managed_class_note_
;
2178 unsigned diag_own_ptr_to_gc_managed_class_note_
;
2179 unsigned diag_member_to_gc_unmanaged_class_note_
;
2180 unsigned diag_stack_allocated_field_note_
;
2181 unsigned diag_member_in_unmanaged_class_note_
;
2182 unsigned diag_part_object_to_gc_derived_class_note_
;
2183 unsigned diag_part_object_contains_gc_root_note_
;
2184 unsigned diag_field_contains_gc_root_note_
;
2185 unsigned diag_finalized_field_note_
;
2186 unsigned diag_eagerly_finalized_field_note_
;
2187 unsigned diag_user_declared_destructor_note_
;
2188 unsigned diag_user_declared_finalizer_note_
;
2189 unsigned diag_base_requires_finalization_note_
;
2190 unsigned diag_field_requires_finalization_note_
;
2191 unsigned diag_overridden_non_virtual_trace_note_
;
2192 unsigned diag_manual_dispatch_method_note_
;
2194 CompilerInstance
& instance_
;
2195 DiagnosticsEngine
& diagnostic_
;
2196 BlinkGCPluginOptions options_
;
2201 class BlinkGCPluginAction
: public PluginASTAction
{
2203 BlinkGCPluginAction() {}
2206 // Overridden from PluginASTAction:
2207 virtual std::unique_ptr
<ASTConsumer
> CreateASTConsumer(
2208 CompilerInstance
& instance
,
2209 llvm::StringRef ref
) {
2210 return llvm::make_unique
<BlinkGCPluginConsumer
>(instance
, options_
);
2213 virtual bool ParseArgs(const CompilerInstance
& instance
,
2214 const std::vector
<string
>& args
) {
2217 for (size_t i
= 0; i
< args
.size() && parsed
; ++i
) {
2218 if (args
[i
] == "enable-oilpan") {
2219 options_
.enable_oilpan
= true;
2220 } else if (args
[i
] == "dump-graph") {
2221 options_
.dump_graph
= true;
2222 } else if (args
[i
] == "warn-raw-ptr") {
2223 options_
.warn_raw_ptr
= true;
2224 } else if (args
[i
] == "warn-unneeded-finalizer") {
2225 options_
.warn_unneeded_finalizer
= true;
2228 llvm::errs() << "Unknown blink-gc-plugin argument: " << args
[i
] << "\n";
2236 BlinkGCPluginOptions options_
;
2241 static FrontendPluginRegistry::Add
<BlinkGCPluginAction
> X(
2243 "Check Blink GC invariants");