Updated to Clang 3.5a.
Change-Id: I8127eb568f674c2e72635b639a3295381fe8af82
diff --git a/lib/Sema/AnalysisBasedWarnings.cpp b/lib/Sema/AnalysisBasedWarnings.cpp
index 93e3ecf..3e40485 100644
--- a/lib/Sema/AnalysisBasedWarnings.cpp
+++ b/lib/Sema/AnalysisBasedWarnings.cpp
@@ -65,16 +65,157 @@
public:
UnreachableCodeHandler(Sema &s) : S(s) {}
- void HandleUnreachable(SourceLocation L, SourceRange R1, SourceRange R2) {
- S.Diag(L, diag::warn_unreachable) << R1 << R2;
+ void HandleUnreachable(reachable_code::UnreachableKind UK,
+ SourceLocation L,
+ SourceRange SilenceableCondVal,
+ SourceRange R1,
+ SourceRange R2) override {
+ unsigned diag = diag::warn_unreachable;
+ switch (UK) {
+ case reachable_code::UK_Break:
+ diag = diag::warn_unreachable_break;
+ break;
+ case reachable_code::UK_Return:
+ diag = diag::warn_unreachable_return;
+ break;
+ case reachable_code::UK_Loop_Increment:
+ diag = diag::warn_unreachable_loop_increment;
+ break;
+ case reachable_code::UK_Other:
+ break;
+ }
+
+ S.Diag(L, diag) << R1 << R2;
+
+ SourceLocation Open = SilenceableCondVal.getBegin();
+ if (Open.isValid()) {
+ SourceLocation Close = SilenceableCondVal.getEnd();
+ Close = S.PP.getLocForEndOfToken(Close);
+ if (Close.isValid()) {
+ S.Diag(Open, diag::note_unreachable_silence)
+ << FixItHint::CreateInsertion(Open, "/* DISABLES CODE */ (")
+ << FixItHint::CreateInsertion(Close, ")");
+ }
+ }
}
};
}
/// CheckUnreachable - Check for unreachable code.
static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
+ // As a heuristic prune all diagnostics not in the main file. Currently
+ // the majority of warnings in headers are false positives. These
+ // are largely caused by configuration state, e.g. preprocessor
+ // defined code, etc.
+ //
+ // Note that this is also a performance optimization. Analyzing
+ // headers many times can be expensive.
+ if (!S.getSourceManager().isInMainFile(AC.getDecl()->getLocStart()))
+ return;
+
UnreachableCodeHandler UC(S);
- reachable_code::FindUnreachableCode(AC, UC);
+ reachable_code::FindUnreachableCode(AC, S.getPreprocessor(), UC);
+}
+
+//===----------------------------------------------------------------------===//
+// Check for infinite self-recursion in functions
+//===----------------------------------------------------------------------===//
+
+// All blocks are in one of three states. States are ordered so that blocks
+// can only move to higher states.
+enum RecursiveState {
+ FoundNoPath,
+ FoundPath,
+ FoundPathWithNoRecursiveCall
+};
+
+static void checkForFunctionCall(Sema &S, const FunctionDecl *FD,
+ CFGBlock &Block, unsigned ExitID,
+ llvm::SmallVectorImpl<RecursiveState> &States,
+ RecursiveState State) {
+ unsigned ID = Block.getBlockID();
+
+ // A block's state can only move to a higher state.
+ if (States[ID] >= State)
+ return;
+
+ States[ID] = State;
+
+ // Found a path to the exit node without a recursive call.
+ if (ID == ExitID && State == FoundPathWithNoRecursiveCall)
+ return;
+
+ if (State == FoundPathWithNoRecursiveCall) {
+ // If the current state is FoundPathWithNoRecursiveCall, the successors
+ // will be either FoundPathWithNoRecursiveCall or FoundPath. To determine
+ // which, process all the Stmt's in this block to find any recursive calls.
+ for (CFGBlock::iterator I = Block.begin(), E = Block.end(); I != E; ++I) {
+ if (I->getKind() != CFGElement::Statement)
+ continue;
+
+ const CallExpr *CE = dyn_cast<CallExpr>(I->getAs<CFGStmt>()->getStmt());
+ if (CE && CE->getCalleeDecl() &&
+ CE->getCalleeDecl()->getCanonicalDecl() == FD) {
+
+ // Skip function calls which are qualified with a templated class.
+ if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(
+ CE->getCallee()->IgnoreParenImpCasts())) {
+ if (NestedNameSpecifier *NNS = DRE->getQualifier()) {
+ if (NNS->getKind() == NestedNameSpecifier::TypeSpec &&
+ isa<TemplateSpecializationType>(NNS->getAsType())) {
+ continue;
+ }
+ }
+ }
+
+ if (const CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(CE)) {
+ if (isa<CXXThisExpr>(MCE->getImplicitObjectArgument()) ||
+ !MCE->getMethodDecl()->isVirtual()) {
+ State = FoundPath;
+ break;
+ }
+ } else {
+ State = FoundPath;
+ break;
+ }
+ }
+ }
+ }
+
+ for (CFGBlock::succ_iterator I = Block.succ_begin(), E = Block.succ_end();
+ I != E; ++I)
+ if (*I)
+ checkForFunctionCall(S, FD, **I, ExitID, States, State);
+}
+
+static void checkRecursiveFunction(Sema &S, const FunctionDecl *FD,
+ const Stmt *Body,
+ AnalysisDeclContext &AC) {
+ FD = FD->getCanonicalDecl();
+
+ // Only run on non-templated functions and non-templated members of
+ // templated classes.
+ if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate &&
+ FD->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization)
+ return;
+
+ CFG *cfg = AC.getCFG();
+ if (cfg == 0) return;
+
+ // If the exit block is unreachable, skip processing the function.
+ if (cfg->getExit().pred_empty())
+ return;
+
+ // Mark all nodes as FoundNoPath, then begin processing the entry block.
+ llvm::SmallVector<RecursiveState, 16> states(cfg->getNumBlockIDs(),
+ FoundNoPath);
+ checkForFunctionCall(S, FD, cfg->getEntry(), cfg->getExit().getBlockID(),
+ states, FoundPathWithNoRecursiveCall);
+
+ // Check that the exit block is reachable. This prevents triggering the
+ // warning on functions that do not terminate.
+ if (states[cfg->getExit().getBlockID()] == FoundPath)
+ S.Diag(Body->getLocStart(), diag::warn_infinite_recursive_function);
}
//===----------------------------------------------------------------------===//
@@ -330,18 +471,18 @@
bool HasNoReturn = false;
if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
- ReturnsVoid = FD->getResultType()->isVoidType();
+ ReturnsVoid = FD->getReturnType()->isVoidType();
HasNoReturn = FD->isNoReturn();
}
else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
- ReturnsVoid = MD->getResultType()->isVoidType();
+ ReturnsVoid = MD->getReturnType()->isVoidType();
HasNoReturn = MD->hasAttr<NoReturnAttr>();
}
else if (isa<BlockDecl>(D)) {
QualType BlockTy = blkExpr->getType();
if (const FunctionType *FT =
BlockTy->getPointeeType()->getAs<FunctionType>()) {
- if (FT->getResultType()->isVoidType())
+ if (FT->getReturnType()->isVoidType())
ReturnsVoid = true;
if (FT->getNoReturnAttr())
HasNoReturn = true;
@@ -776,6 +917,7 @@
while (!BlockQueue.empty()) {
const CFGBlock *P = BlockQueue.front();
BlockQueue.pop_front();
+ if (!P) continue;
const Stmt *Term = P->getTerminator();
if (Term && isa<SwitchStmt>(Term))
@@ -977,24 +1119,6 @@
}
-namespace {
-typedef std::pair<const Stmt *,
- sema::FunctionScopeInfo::WeakObjectUseMap::const_iterator>
- StmtUsesPair;
-
-class StmtUseSorter {
- const SourceManager &SM;
-
-public:
- explicit StmtUseSorter(const SourceManager &SM) : SM(SM) { }
-
- bool operator()(const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
- return SM.isBeforeInTranslationUnit(LHS.first->getLocStart(),
- RHS.first->getLocStart());
- }
-};
-}
-
static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
const Stmt *S) {
assert(S);
@@ -1029,6 +1153,8 @@
typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
+ typedef std::pair<const Stmt *, WeakObjectUseMap::const_iterator>
+ StmtUsesPair;
ASTContext &Ctx = S.getASTContext();
@@ -1087,8 +1213,12 @@
return;
// Sort by first use so that we emit the warnings in a deterministic order.
+ SourceManager &SM = S.getSourceManager();
std::sort(UsesByStmt.begin(), UsesByStmt.end(),
- StmtUseSorter(S.getSourceManager()));
+ [&SM](const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
+ return SM.isBeforeInTranslationUnit(LHS.first->getLocStart(),
+ RHS.first->getLocStart());
+ });
// Classify the current code body for better warning text.
// This enum should stay in sync with the cases in
@@ -1169,19 +1299,7 @@
}
}
-
namespace {
-struct SLocSort {
- bool operator()(const UninitUse &a, const UninitUse &b) {
- // Prefer a more confident report over a less confident one.
- if (a.getKind() != b.getKind())
- return a.getKind() > b.getKind();
- SourceLocation aLoc = a.getUser()->getLocStart();
- SourceLocation bLoc = b.getUser()->getLocStart();
- return aLoc.getRawEncoding() < bLoc.getRawEncoding();
- }
-};
-
class UninitValsDiagReporter : public UninitVariablesHandler {
Sema &S;
typedef SmallVector<UninitUse, 2> UsesVec;
@@ -1208,12 +1326,13 @@
return V;
}
-
- void handleUseOfUninitVariable(const VarDecl *vd, const UninitUse &use) {
+
+ void handleUseOfUninitVariable(const VarDecl *vd,
+ const UninitUse &use) override {
getUses(vd).getPointer()->push_back(use);
}
- void handleSelfInit(const VarDecl *vd) {
+ void handleSelfInit(const VarDecl *vd) override {
getUses(vd).setInt(true);
}
@@ -1240,8 +1359,14 @@
// Sort the uses by their SourceLocations. While not strictly
// guaranteed to produce them in line/column order, this will provide
// a stable ordering.
- std::sort(vec->begin(), vec->end(), SLocSort());
-
+ std::sort(vec->begin(), vec->end(),
+ [](const UninitUse &a, const UninitUse &b) {
+ // Prefer a more confident report over a less confident one.
+ if (a.getKind() != b.getKind())
+ return a.getKind() > b.getKind();
+ return a.getUser()->getLocStart() < b.getUser()->getLocStart();
+ });
+
for (UsesVec::iterator vi = vec->begin(), ve = vec->end(); vi != ve;
++vi) {
// If we have self-init, downgrade all uses to 'may be uninitialized'.
@@ -1304,12 +1429,13 @@
SourceLocation FunLocation, FunEndLocation;
// Helper functions
- void warnLockMismatch(unsigned DiagID, Name LockName, SourceLocation Loc) {
+ void warnLockMismatch(unsigned DiagID, StringRef Kind, Name LockName,
+ SourceLocation Loc) {
// Gracefully handle rare cases when the analysis can't get a more
// precise source location.
if (!Loc.isValid())
Loc = FunLocation;
- PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << LockName);
+ PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind << LockName);
Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
}
@@ -1332,22 +1458,33 @@
}
}
- void handleInvalidLockExp(SourceLocation Loc) {
- PartialDiagnosticAt Warning(Loc,
- S.PDiag(diag::warn_cannot_resolve_lock) << Loc);
+ void handleInvalidLockExp(StringRef Kind, SourceLocation Loc) override {
+ PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_cannot_resolve_lock)
+ << Loc);
Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
}
- void handleUnmatchedUnlock(Name LockName, SourceLocation Loc) {
- warnLockMismatch(diag::warn_unlock_but_no_lock, LockName, Loc);
+ void handleUnmatchedUnlock(StringRef Kind, Name LockName,
+ SourceLocation Loc) override {
+ warnLockMismatch(diag::warn_unlock_but_no_lock, Kind, LockName, Loc);
+ }
+ void handleIncorrectUnlockKind(StringRef Kind, Name LockName,
+ LockKind Expected, LockKind Received,
+ SourceLocation Loc) override {
+ if (Loc.isInvalid())
+ Loc = FunLocation;
+ PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_unlock_kind_mismatch)
+ << Kind << LockName << Received
+ << Expected);
+ Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
+ }
+ void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation Loc) override {
+ warnLockMismatch(diag::warn_double_lock, Kind, LockName, Loc);
}
- void handleDoubleLock(Name LockName, SourceLocation Loc) {
- warnLockMismatch(diag::warn_double_lock, LockName, Loc);
- }
-
- void handleMutexHeldEndOfScope(Name LockName, SourceLocation LocLocked,
+ void handleMutexHeldEndOfScope(StringRef Kind, Name LockName,
+ SourceLocation LocLocked,
SourceLocation LocEndOfScope,
- LockErrorKind LEK){
+ LockErrorKind LEK) override {
unsigned DiagID = 0;
switch (LEK) {
case LEK_LockedSomePredecessors:
@@ -1366,29 +1503,33 @@
if (LocEndOfScope.isInvalid())
LocEndOfScope = FunEndLocation;
- PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << LockName);
+ PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << Kind
+ << LockName);
if (LocLocked.isValid()) {
- PartialDiagnosticAt Note(LocLocked, S.PDiag(diag::note_locked_here));
+ PartialDiagnosticAt Note(LocLocked, S.PDiag(diag::note_locked_here)
+ << Kind);
Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
return;
}
Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
}
-
- void handleExclusiveAndShared(Name LockName, SourceLocation Loc1,
- SourceLocation Loc2) {
- PartialDiagnosticAt Warning(
- Loc1, S.PDiag(diag::warn_lock_exclusive_and_shared) << LockName);
- PartialDiagnosticAt Note(
- Loc2, S.PDiag(diag::note_lock_exclusive_and_shared) << LockName);
+ void handleExclusiveAndShared(StringRef Kind, Name LockName,
+ SourceLocation Loc1,
+ SourceLocation Loc2) override {
+ PartialDiagnosticAt Warning(Loc1,
+ S.PDiag(diag::warn_lock_exclusive_and_shared)
+ << Kind << LockName);
+ PartialDiagnosticAt Note(Loc2, S.PDiag(diag::note_lock_exclusive_and_shared)
+ << Kind << LockName);
Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
}
- void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
- AccessKind AK, SourceLocation Loc) {
- assert((POK == POK_VarAccess || POK == POK_VarDereference)
- && "Only works for variables");
+ void handleNoMutexHeld(StringRef Kind, const NamedDecl *D,
+ ProtectedOperationKind POK, AccessKind AK,
+ SourceLocation Loc) override {
+ assert((POK == POK_VarAccess || POK == POK_VarDereference) &&
+ "Only works for variables");
unsigned DiagID = POK == POK_VarAccess?
diag::warn_variable_requires_any_lock:
diag::warn_var_deref_requires_any_lock;
@@ -1397,9 +1538,10 @@
Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
}
- void handleMutexNotHeld(const NamedDecl *D, ProtectedOperationKind POK,
- Name LockName, LockKind LK, SourceLocation Loc,
- Name *PossibleMatch) {
+ void handleMutexNotHeld(StringRef Kind, const NamedDecl *D,
+ ProtectedOperationKind POK, Name LockName,
+ LockKind LK, SourceLocation Loc,
+ Name *PossibleMatch) override {
unsigned DiagID = 0;
if (PossibleMatch) {
switch (POK) {
@@ -1413,10 +1555,11 @@
DiagID = diag::warn_fun_requires_lock_precise;
break;
}
- PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
- << D->getNameAsString() << LockName << LK);
+ PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
+ << D->getNameAsString()
+ << LockName << LK);
PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
- << *PossibleMatch);
+ << *PossibleMatch);
Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
} else {
switch (POK) {
@@ -1430,15 +1573,17 @@
DiagID = diag::warn_fun_requires_lock;
break;
}
- PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
- << D->getNameAsString() << LockName << LK);
+ PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
+ << D->getNameAsString()
+ << LockName << LK);
Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
}
}
- void handleFunExcludesLock(Name FunName, Name LockName, SourceLocation Loc) {
- PartialDiagnosticAt Warning(Loc,
- S.PDiag(diag::warn_fun_excludes_mutex) << FunName << LockName);
+ void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName,
+ SourceLocation Loc) override {
+ PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_fun_excludes_mutex)
+ << Kind << FunName << LockName);
Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
}
};
@@ -1461,8 +1606,8 @@
public:
ConsumedWarningsHandler(Sema &S) : S(S) {}
-
- void emitDiagnostics() {
+
+ void emitDiagnostics() override {
Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
for (DiagList::iterator I = Warnings.begin(), E = Warnings.end();
@@ -1476,8 +1621,9 @@
}
}
}
-
- void warnLoopStateMismatch(SourceLocation Loc, StringRef VariableName) {
+
+ void warnLoopStateMismatch(SourceLocation Loc,
+ StringRef VariableName) override {
PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_loop_state_mismatch) <<
VariableName);
@@ -1487,7 +1633,7 @@
void warnParamReturnTypestateMismatch(SourceLocation Loc,
StringRef VariableName,
StringRef ExpectedState,
- StringRef ObservedState) {
+ StringRef ObservedState) override {
PartialDiagnosticAt Warning(Loc, S.PDiag(
diag::warn_param_return_typestate_mismatch) << VariableName <<
@@ -1497,7 +1643,7 @@
}
void warnParamTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
- StringRef ObservedState) {
+ StringRef ObservedState) override {
PartialDiagnosticAt Warning(Loc, S.PDiag(
diag::warn_param_typestate_mismatch) << ExpectedState << ObservedState);
@@ -1506,7 +1652,7 @@
}
void warnReturnTypestateForUnconsumableType(SourceLocation Loc,
- StringRef TypeName) {
+ StringRef TypeName) override {
PartialDiagnosticAt Warning(Loc, S.PDiag(
diag::warn_return_typestate_for_unconsumable_type) << TypeName);
@@ -1514,7 +1660,7 @@
}
void warnReturnTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
- StringRef ObservedState) {
+ StringRef ObservedState) override {
PartialDiagnosticAt Warning(Loc, S.PDiag(
diag::warn_return_typestate_mismatch) << ExpectedState << ObservedState);
@@ -1523,7 +1669,7 @@
}
void warnUseOfTempInInvalidState(StringRef MethodName, StringRef State,
- SourceLocation Loc) {
+ SourceLocation Loc) override {
PartialDiagnosticAt Warning(Loc, S.PDiag(
diag::warn_use_of_temp_in_invalid_state) << MethodName << State);
@@ -1532,7 +1678,7 @@
}
void warnUseInInvalidState(StringRef MethodName, StringRef VariableName,
- StringRef State, SourceLocation Loc) {
+ StringRef State, SourceLocation Loc) override {
PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_use_in_invalid_state) <<
MethodName << VariableName << State);
@@ -1554,6 +1700,11 @@
enableConsumedAnalysis = 0;
}
+static unsigned isEnabled(DiagnosticsEngine &D, unsigned diag) {
+ return (unsigned) D.getDiagnosticLevel(diag, SourceLocation()) !=
+ DiagnosticsEngine::Ignored;
+}
+
clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
: S(s),
NumFunctionsAnalyzed(0),
@@ -1565,16 +1716,21 @@
MaxUninitAnalysisVariablesPerFunction(0),
NumUninitAnalysisBlockVisits(0),
MaxUninitAnalysisBlockVisitsPerFunction(0) {
+
+ using namespace diag;
DiagnosticsEngine &D = S.getDiagnostics();
- DefaultPolicy.enableCheckUnreachable = (unsigned)
- (D.getDiagnosticLevel(diag::warn_unreachable, SourceLocation()) !=
- DiagnosticsEngine::Ignored);
- DefaultPolicy.enableThreadSafetyAnalysis = (unsigned)
- (D.getDiagnosticLevel(diag::warn_double_lock, SourceLocation()) !=
- DiagnosticsEngine::Ignored);
- DefaultPolicy.enableConsumedAnalysis = (unsigned)
- (D.getDiagnosticLevel(diag::warn_use_in_invalid_state, SourceLocation()) !=
- DiagnosticsEngine::Ignored);
+
+ DefaultPolicy.enableCheckUnreachable =
+ isEnabled(D, warn_unreachable) ||
+ isEnabled(D, warn_unreachable_break) ||
+ isEnabled(D, warn_unreachable_return) ||
+ isEnabled(D, warn_unreachable_loop_increment);
+
+ DefaultPolicy.enableThreadSafetyAnalysis =
+ isEnabled(D, warn_double_lock);
+
+ DefaultPolicy.enableConsumedAnalysis =
+ isEnabled(D, warn_use_in_invalid_state);
}
static void flushDiagnostics(Sema &S, sema::FunctionScopeInfo *fscope) {
@@ -1629,6 +1785,7 @@
AC.getCFGBuildOptions().AddInitializers = true;
AC.getCFGBuildOptions().AddImplicitDtors = true;
AC.getCFGBuildOptions().AddTemporaryDtors = true;
+ AC.getCFGBuildOptions().AddCXXNewAllocator = false;
// Force that certain expressions appear as CFGElements in the CFG. This
// is used to speed up various analyses.
@@ -1789,6 +1946,16 @@
D->getLocStart()) != DiagnosticsEngine::Ignored)
diagnoseRepeatedUseOfWeak(S, fscope, D, AC.getParentMap());
+
+ // Check for infinite self-recursion in functions
+ if (Diags.getDiagnosticLevel(diag::warn_infinite_recursive_function,
+ D->getLocStart())
+ != DiagnosticsEngine::Ignored) {
+ if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
+ checkRecursiveFunction(S, FD, Body, AC);
+ }
+ }
+
// Collect statistics about the CFG if it was built.
if (S.CollectStats && AC.isCFGBuilt()) {
++NumFunctionsAnalyzed;