blob: fbd49de20eb7bed0e8c5122c8ec3c93748c99f16 [file] [log] [blame]
Ted Kremenek6f342132011-03-15 03:17:07 +00001//==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
Ted Kremenek610068c2011-01-15 02:58:47 +00002//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements uninitialized values analysis for source-level CFGs.
11//
12//===----------------------------------------------------------------------===//
13
Richard Smith558e8872012-07-13 23:33:44 +000014#include "clang/AST/ASTContext.h"
Benjamin Kramer2fa67ef2012-12-01 15:09:41 +000015#include "clang/AST/Attr.h"
Ted Kremenek610068c2011-01-15 02:58:47 +000016#include "clang/AST/Decl.h"
Ted Kremenekc1602582012-11-17 02:00:00 +000017#include "clang/Analysis/Analyses/PostOrderCFGView.h"
Ted Kremenek6f342132011-03-15 03:17:07 +000018#include "clang/Analysis/Analyses/UninitializedValues.h"
Benjamin Kramer2fa67ef2012-12-01 15:09:41 +000019#include "clang/Analysis/AnalysisContext.h"
20#include "clang/Analysis/CFG.h"
Ted Kremenek25c1d572012-09-13 00:21:35 +000021#include "clang/Analysis/DomainSpecific/ObjCNoReturn.h"
Benjamin Kramer2fa67ef2012-12-01 15:09:41 +000022#include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h"
23#include "llvm/ADT/DenseMap.h"
24#include "llvm/ADT/Optional.h"
25#include "llvm/ADT/PackedVector.h"
26#include "llvm/ADT/SmallBitVector.h"
27#include "llvm/ADT/SmallVector.h"
Argyrios Kyrtzidisb2c60b02012-03-01 19:45:56 +000028#include "llvm/Support/SaveAndRestore.h"
Benjamin Kramer2fa67ef2012-12-01 15:09:41 +000029#include <utility>
Ted Kremenek610068c2011-01-15 02:58:47 +000030
31using namespace clang;
32
Richard Smith558e8872012-07-13 23:33:44 +000033#define DEBUG_LOGGING 0
34
Ted Kremenek40900ee2011-01-27 02:29:34 +000035static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
Ted Kremenek1cbc3152011-03-17 03:06:11 +000036 if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
Ted Kremeneka21612f2011-04-07 20:02:56 +000037 !vd->isExceptionVariable() &&
Ted Kremenek1cbc3152011-03-17 03:06:11 +000038 vd->getDeclContext() == dc) {
39 QualType ty = vd->getType();
40 return ty->isScalarType() || ty->isVectorType();
41 }
42 return false;
Ted Kremenekc104e532011-01-18 04:53:25 +000043}
44
Ted Kremenek610068c2011-01-15 02:58:47 +000045//------------------------------------------------------------------------====//
Ted Kremenek136f8f22011-03-15 04:57:27 +000046// DeclToIndex: a mapping from Decls we track to value indices.
Ted Kremenek610068c2011-01-15 02:58:47 +000047//====------------------------------------------------------------------------//
48
49namespace {
Ted Kremenek136f8f22011-03-15 04:57:27 +000050class DeclToIndex {
Ted Kremenek610068c2011-01-15 02:58:47 +000051 llvm::DenseMap<const VarDecl *, unsigned> map;
52public:
Ted Kremenek136f8f22011-03-15 04:57:27 +000053 DeclToIndex() {}
Ted Kremenek610068c2011-01-15 02:58:47 +000054
55 /// Compute the actual mapping from declarations to bits.
56 void computeMap(const DeclContext &dc);
57
58 /// Return the number of declarations in the map.
59 unsigned size() const { return map.size(); }
60
61 /// Returns the bit vector index for a given declaration.
Ted Kremenekb831c672011-03-29 01:40:00 +000062 llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const;
Ted Kremenek610068c2011-01-15 02:58:47 +000063};
64}
65
Ted Kremenek136f8f22011-03-15 04:57:27 +000066void DeclToIndex::computeMap(const DeclContext &dc) {
Ted Kremenek610068c2011-01-15 02:58:47 +000067 unsigned count = 0;
68 DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
69 E(dc.decls_end());
70 for ( ; I != E; ++I) {
David Blaikie581deb32012-06-06 20:45:41 +000071 const VarDecl *vd = *I;
Ted Kremenek40900ee2011-01-27 02:29:34 +000072 if (isTrackedVar(vd, &dc))
Ted Kremenek610068c2011-01-15 02:58:47 +000073 map[vd] = count++;
74 }
75}
76
Ted Kremenekb831c672011-03-29 01:40:00 +000077llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
78 llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
Ted Kremenek610068c2011-01-15 02:58:47 +000079 if (I == map.end())
80 return llvm::Optional<unsigned>();
81 return I->second;
82}
83
84//------------------------------------------------------------------------====//
85// CFGBlockValues: dataflow values for CFG blocks.
86//====------------------------------------------------------------------------//
87
Ted Kremenekf7bafc72011-03-15 04:57:38 +000088// These values are defined in such a way that a merge can be done using
89// a bitwise OR.
90enum Value { Unknown = 0x0, /* 00 */
91 Initialized = 0x1, /* 01 */
92 Uninitialized = 0x2, /* 10 */
93 MayUninitialized = 0x3 /* 11 */ };
94
95static bool isUninitialized(const Value v) {
96 return v >= Uninitialized;
97}
98static bool isAlwaysUninit(const Value v) {
99 return v == Uninitialized;
100}
Ted Kremenekafb10c42011-03-15 04:57:29 +0000101
Benjamin Kramerda57f3e2011-03-26 12:38:21 +0000102namespace {
Ted Kremenek496398d2011-03-15 04:57:32 +0000103
Benjamin Kramerda3d76b2012-09-28 16:44:29 +0000104typedef llvm::PackedVector<Value, 2, llvm::SmallBitVector> ValueVector;
Ted Kremenek13bd4232011-01-20 17:37:17 +0000105
Ted Kremenek610068c2011-01-15 02:58:47 +0000106class CFGBlockValues {
107 const CFG &cfg;
Benjamin Kramerda3d76b2012-09-28 16:44:29 +0000108 SmallVector<ValueVector, 8> vals;
Ted Kremenek136f8f22011-03-15 04:57:27 +0000109 ValueVector scratch;
Ted Kremenek4ddb3872011-03-15 05:30:12 +0000110 DeclToIndex declToIndex;
Ted Kremenek610068c2011-01-15 02:58:47 +0000111public:
112 CFGBlockValues(const CFG &cfg);
Ted Kremenekeee18c32012-07-19 04:59:05 +0000113
Ted Kremenekd40066b2011-04-04 23:29:12 +0000114 unsigned getNumEntries() const { return declToIndex.size(); }
115
Ted Kremenek610068c2011-01-15 02:58:47 +0000116 void computeSetOfDeclarations(const DeclContext &dc);
Ted Kremenekeee18c32012-07-19 04:59:05 +0000117 ValueVector &getValueVector(const CFGBlock *block) {
Benjamin Kramerda3d76b2012-09-28 16:44:29 +0000118 return vals[block->getBlockID()];
Ted Kremenekeee18c32012-07-19 04:59:05 +0000119 }
Ted Kremenek13bd4232011-01-20 17:37:17 +0000120
Richard Smitha9e8b9e2012-07-02 23:23:04 +0000121 void setAllScratchValues(Value V);
Ted Kremenek136f8f22011-03-15 04:57:27 +0000122 void mergeIntoScratch(ValueVector const &source, bool isFirst);
123 bool updateValueVectorWithScratch(const CFGBlock *block);
Ted Kremenek610068c2011-01-15 02:58:47 +0000124
125 bool hasNoDeclarations() const {
Ted Kremenek4ddb3872011-03-15 05:30:12 +0000126 return declToIndex.size() == 0;
Ted Kremenek610068c2011-01-15 02:58:47 +0000127 }
Ted Kremeneke0e29332011-08-20 01:15:28 +0000128
Ted Kremenek610068c2011-01-15 02:58:47 +0000129 void resetScratch();
Ted Kremenek13bd4232011-01-20 17:37:17 +0000130
Ted Kremenek136f8f22011-03-15 04:57:27 +0000131 ValueVector::reference operator[](const VarDecl *vd);
Richard Smith2815e1a2012-05-25 02:17:09 +0000132
133 Value getValue(const CFGBlock *block, const CFGBlock *dstBlock,
134 const VarDecl *vd) {
135 const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
136 assert(idx.hasValue());
Ted Kremenekeee18c32012-07-19 04:59:05 +0000137 return getValueVector(block)[idx.getValue()];
Richard Smith2815e1a2012-05-25 02:17:09 +0000138 }
Ted Kremenek610068c2011-01-15 02:58:47 +0000139};
Benjamin Kramerda57f3e2011-03-26 12:38:21 +0000140} // end anonymous namespace
Ted Kremenek610068c2011-01-15 02:58:47 +0000141
Ted Kremenekeee18c32012-07-19 04:59:05 +0000142CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {}
Ted Kremenek610068c2011-01-15 02:58:47 +0000143
Ted Kremenek610068c2011-01-15 02:58:47 +0000144void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
Ted Kremenek4ddb3872011-03-15 05:30:12 +0000145 declToIndex.computeMap(dc);
Ted Kremenekeee18c32012-07-19 04:59:05 +0000146 unsigned decls = declToIndex.size();
147 scratch.resize(decls);
148 unsigned n = cfg.getNumBlockIDs();
149 if (!n)
150 return;
151 vals.resize(n);
152 for (unsigned i = 0; i < n; ++i)
Benjamin Kramerda3d76b2012-09-28 16:44:29 +0000153 vals[i].resize(decls);
Ted Kremenek13bd4232011-01-20 17:37:17 +0000154}
155
Richard Smith558e8872012-07-13 23:33:44 +0000156#if DEBUG_LOGGING
Ted Kremenek136f8f22011-03-15 04:57:27 +0000157static void printVector(const CFGBlock *block, ValueVector &bv,
Ted Kremenek9fcbcee2011-02-01 17:43:18 +0000158 unsigned num) {
Ted Kremenek9fcbcee2011-02-01 17:43:18 +0000159 llvm::errs() << block->getBlockID() << " :";
160 for (unsigned i = 0; i < bv.size(); ++i) {
161 llvm::errs() << ' ' << bv[i];
162 }
163 llvm::errs() << " : " << num << '\n';
164}
165#endif
Ted Kremenek610068c2011-01-15 02:58:47 +0000166
Richard Smitha9e8b9e2012-07-02 23:23:04 +0000167void CFGBlockValues::setAllScratchValues(Value V) {
168 for (unsigned I = 0, E = scratch.size(); I != E; ++I)
169 scratch[I] = V;
170}
171
Ted Kremenekc5f740e2011-10-07 00:42:48 +0000172void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
173 bool isFirst) {
174 if (isFirst)
175 scratch = source;
176 else
177 scratch |= source;
178}
179
Ted Kremenek136f8f22011-03-15 04:57:27 +0000180bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
Ted Kremenekeee18c32012-07-19 04:59:05 +0000181 ValueVector &dst = getValueVector(block);
Ted Kremenek610068c2011-01-15 02:58:47 +0000182 bool changed = (dst != scratch);
183 if (changed)
184 dst = scratch;
Richard Smith558e8872012-07-13 23:33:44 +0000185#if DEBUG_LOGGING
Ted Kremenek9fcbcee2011-02-01 17:43:18 +0000186 printVector(block, scratch, 0);
187#endif
Ted Kremenek13bd4232011-01-20 17:37:17 +0000188 return changed;
189}
190
Ted Kremenek610068c2011-01-15 02:58:47 +0000191void CFGBlockValues::resetScratch() {
192 scratch.reset();
193}
194
Ted Kremenek136f8f22011-03-15 04:57:27 +0000195ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
Ted Kremenek4ddb3872011-03-15 05:30:12 +0000196 const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
Ted Kremenek610068c2011-01-15 02:58:47 +0000197 assert(idx.hasValue());
198 return scratch[idx.getValue()];
199}
200
201//------------------------------------------------------------------------====//
202// Worklist: worklist for dataflow analysis.
203//====------------------------------------------------------------------------//
204
205namespace {
206class DataflowWorklist {
Ted Kremenekc1602582012-11-17 02:00:00 +0000207 PostOrderCFGView::iterator PO_I, PO_E;
Chris Lattner5f9e2722011-07-23 10:55:15 +0000208 SmallVector<const CFGBlock *, 20> worklist;
Ted Kremenek496398d2011-03-15 04:57:32 +0000209 llvm::BitVector enqueuedBlocks;
Ted Kremenek610068c2011-01-15 02:58:47 +0000210public:
Ted Kremenekc1602582012-11-17 02:00:00 +0000211 DataflowWorklist(const CFG &cfg, PostOrderCFGView &view)
212 : PO_I(view.begin()), PO_E(view.end()),
213 enqueuedBlocks(cfg.getNumBlockIDs(), true) {
214 // Treat the first block as already analyzed.
215 if (PO_I != PO_E) {
216 assert(*PO_I == &cfg.getEntry());
217 enqueuedBlocks[(*PO_I)->getBlockID()] = false;
218 ++PO_I;
219 }
220 }
Ted Kremenek610068c2011-01-15 02:58:47 +0000221
Ted Kremenek610068c2011-01-15 02:58:47 +0000222 void enqueueSuccessors(const CFGBlock *block);
223 const CFGBlock *dequeue();
Ted Kremenek610068c2011-01-15 02:58:47 +0000224};
225}
226
Ted Kremenek610068c2011-01-15 02:58:47 +0000227void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
228 for (CFGBlock::const_succ_iterator I = block->succ_begin(),
229 E = block->succ_end(); I != E; ++I) {
Chandler Carruth80520502011-07-08 11:19:06 +0000230 const CFGBlock *Successor = *I;
231 if (!Successor || enqueuedBlocks[Successor->getBlockID()])
232 continue;
233 worklist.push_back(Successor);
234 enqueuedBlocks[Successor->getBlockID()] = true;
Ted Kremenek610068c2011-01-15 02:58:47 +0000235 }
236}
237
238const CFGBlock *DataflowWorklist::dequeue() {
Ted Kremenekc1602582012-11-17 02:00:00 +0000239 const CFGBlock *B = 0;
240
241 // First dequeue from the worklist. This can represent
242 // updates along backedges that we want propagated as quickly as possible.
243 if (!worklist.empty()) {
244 B = worklist.back();
245 worklist.pop_back();
246 }
247 // Next dequeue from the initial reverse post order. This is the
248 // theoretical ideal in the presence of no back edges.
249 else if (PO_I != PO_E) {
250 B = *PO_I;
251 ++PO_I;
252 }
253 else {
Ted Kremenek610068c2011-01-15 02:58:47 +0000254 return 0;
Ted Kremenekc1602582012-11-17 02:00:00 +0000255 }
256
257 assert(enqueuedBlocks[B->getBlockID()] == true);
258 enqueuedBlocks[B->getBlockID()] = false;
259 return B;
Ted Kremenek610068c2011-01-15 02:58:47 +0000260}
261
262//------------------------------------------------------------------------====//
Richard Smith9532e0d2012-07-17 00:06:14 +0000263// Classification of DeclRefExprs as use or initialization.
Ted Kremenek610068c2011-01-15 02:58:47 +0000264//====------------------------------------------------------------------------//
265
Ted Kremenek610068c2011-01-15 02:58:47 +0000266namespace {
267class FindVarResult {
268 const VarDecl *vd;
269 const DeclRefExpr *dr;
270public:
Richard Smith9532e0d2012-07-17 00:06:14 +0000271 FindVarResult(const VarDecl *vd, const DeclRefExpr *dr) : vd(vd), dr(dr) {}
272
Ted Kremenek610068c2011-01-15 02:58:47 +0000273 const DeclRefExpr *getDeclRefExpr() const { return dr; }
274 const VarDecl *getDecl() const { return vd; }
275};
Richard Smith9532e0d2012-07-17 00:06:14 +0000276
277static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
278 while (Ex) {
279 Ex = Ex->IgnoreParenNoopCasts(C);
280 if (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
281 if (CE->getCastKind() == CK_LValueBitCast) {
282 Ex = CE->getSubExpr();
283 continue;
284 }
285 }
286 break;
287 }
288 return Ex;
289}
290
291/// If E is an expression comprising a reference to a single variable, find that
292/// variable.
293static FindVarResult findVar(const Expr *E, const DeclContext *DC) {
294 if (const DeclRefExpr *DRE =
295 dyn_cast<DeclRefExpr>(stripCasts(DC->getParentASTContext(), E)))
296 if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl()))
297 if (isTrackedVar(VD, DC))
298 return FindVarResult(VD, DRE);
299 return FindVarResult(0, 0);
300}
301
302/// \brief Classify each DeclRefExpr as an initialization or a use. Any
303/// DeclRefExpr which isn't explicitly classified will be assumed to have
304/// escaped the analysis and will be treated as an initialization.
305class ClassifyRefs : public StmtVisitor<ClassifyRefs> {
306public:
307 enum Class {
308 Init,
309 Use,
310 SelfInit,
311 Ignore
312 };
313
314private:
315 const DeclContext *DC;
316 llvm::DenseMap<const DeclRefExpr*, Class> Classification;
317
318 bool isTrackedVar(const VarDecl *VD) const {
319 return ::isTrackedVar(VD, DC);
320 }
321
322 void classify(const Expr *E, Class C);
323
324public:
325 ClassifyRefs(AnalysisDeclContext &AC) : DC(cast<DeclContext>(AC.getDecl())) {}
326
327 void VisitDeclStmt(DeclStmt *DS);
328 void VisitUnaryOperator(UnaryOperator *UO);
329 void VisitBinaryOperator(BinaryOperator *BO);
330 void VisitCallExpr(CallExpr *CE);
331 void VisitCastExpr(CastExpr *CE);
332
333 void operator()(Stmt *S) { Visit(S); }
334
335 Class get(const DeclRefExpr *DRE) const {
336 llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I
337 = Classification.find(DRE);
338 if (I != Classification.end())
339 return I->second;
340
341 const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl());
342 if (!VD || !isTrackedVar(VD))
343 return Ignore;
344
345 return Init;
346 }
347};
348}
349
350static const DeclRefExpr *getSelfInitExpr(VarDecl *VD) {
351 if (Expr *Init = VD->getInit()) {
352 const DeclRefExpr *DRE
353 = dyn_cast<DeclRefExpr>(stripCasts(VD->getASTContext(), Init));
354 if (DRE && DRE->getDecl() == VD)
355 return DRE;
356 }
357 return 0;
358}
359
360void ClassifyRefs::classify(const Expr *E, Class C) {
361 FindVarResult Var = findVar(E, DC);
362 if (const DeclRefExpr *DRE = Var.getDeclRefExpr())
363 Classification[DRE] = std::max(Classification[DRE], C);
364}
365
366void ClassifyRefs::VisitDeclStmt(DeclStmt *DS) {
367 for (DeclStmt::decl_iterator DI = DS->decl_begin(), DE = DS->decl_end();
368 DI != DE; ++DI) {
369 VarDecl *VD = dyn_cast<VarDecl>(*DI);
370 if (VD && isTrackedVar(VD))
371 if (const DeclRefExpr *DRE = getSelfInitExpr(VD))
372 Classification[DRE] = SelfInit;
373 }
374}
375
376void ClassifyRefs::VisitBinaryOperator(BinaryOperator *BO) {
377 // Ignore the evaluation of a DeclRefExpr on the LHS of an assignment. If this
378 // is not a compound-assignment, we will treat it as initializing the variable
379 // when TransferFunctions visits it. A compound-assignment does not affect
380 // whether a variable is uninitialized, and there's no point counting it as a
381 // use.
Richard Smith6cfa78f2012-07-17 01:27:33 +0000382 if (BO->isCompoundAssignmentOp())
383 classify(BO->getLHS(), Use);
384 else if (BO->getOpcode() == BO_Assign)
Richard Smith9532e0d2012-07-17 00:06:14 +0000385 classify(BO->getLHS(), Ignore);
386}
387
388void ClassifyRefs::VisitUnaryOperator(UnaryOperator *UO) {
389 // Increment and decrement are uses despite there being no lvalue-to-rvalue
390 // conversion.
391 if (UO->isIncrementDecrementOp())
392 classify(UO->getSubExpr(), Use);
393}
394
395void ClassifyRefs::VisitCallExpr(CallExpr *CE) {
396 // If a value is passed by const reference to a function, we should not assume
397 // that it is initialized by the call, and we conservatively do not assume
398 // that it is used.
399 for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end();
400 I != E; ++I)
401 if ((*I)->getType().isConstQualified() && (*I)->isGLValue())
402 classify(*I, Ignore);
403}
404
405void ClassifyRefs::VisitCastExpr(CastExpr *CE) {
406 if (CE->getCastKind() == CK_LValueToRValue)
407 classify(CE->getSubExpr(), Use);
408 else if (CStyleCastExpr *CSE = dyn_cast<CStyleCastExpr>(CE)) {
409 if (CSE->getType()->isVoidType()) {
410 // Squelch any detected load of an uninitialized value if
411 // we cast it to void.
412 // e.g. (void) x;
413 classify(CSE->getSubExpr(), Ignore);
414 }
415 }
416}
417
418//------------------------------------------------------------------------====//
419// Transfer function for uninitialized values analysis.
420//====------------------------------------------------------------------------//
421
422namespace {
Ted Kremenek0c8e5a02011-07-19 14:18:48 +0000423class TransferFunctions : public StmtVisitor<TransferFunctions> {
Ted Kremenek610068c2011-01-15 02:58:47 +0000424 CFGBlockValues &vals;
425 const CFG &cfg;
Richard Smith2815e1a2012-05-25 02:17:09 +0000426 const CFGBlock *block;
Ted Kremenek1d26f482011-10-24 01:32:45 +0000427 AnalysisDeclContext &ac;
Richard Smith9532e0d2012-07-17 00:06:14 +0000428 const ClassifyRefs &classification;
Ted Kremenek25c1d572012-09-13 00:21:35 +0000429 ObjCNoReturn objCNoRet;
Ted Kremenekeba76a42012-11-17 07:18:30 +0000430 UninitVariablesHandler &handler;
Richard Smith9532e0d2012-07-17 00:06:14 +0000431
Ted Kremenek610068c2011-01-15 02:58:47 +0000432public:
433 TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
Richard Smith2815e1a2012-05-25 02:17:09 +0000434 const CFGBlock *block, AnalysisDeclContext &ac,
Richard Smith9532e0d2012-07-17 00:06:14 +0000435 const ClassifyRefs &classification,
Ted Kremenekeba76a42012-11-17 07:18:30 +0000436 UninitVariablesHandler &handler)
Richard Smith9532e0d2012-07-17 00:06:14 +0000437 : vals(vals), cfg(cfg), block(block), ac(ac),
Ted Kremenek25c1d572012-09-13 00:21:35 +0000438 classification(classification), objCNoRet(ac.getASTContext()),
439 handler(handler) {}
Richard Smith9532e0d2012-07-17 00:06:14 +0000440
Richard Smith81891882012-05-24 23:45:35 +0000441 void reportUse(const Expr *ex, const VarDecl *vd);
Ted Kremeneka8c17a52011-01-25 19:13:48 +0000442
Ted Kremenek25c1d572012-09-13 00:21:35 +0000443 void VisitBinaryOperator(BinaryOperator *bo);
Ted Kremeneka8c17a52011-01-25 19:13:48 +0000444 void VisitBlockExpr(BlockExpr *be);
Richard Smitha9e8b9e2012-07-02 23:23:04 +0000445 void VisitCallExpr(CallExpr *ce);
Ted Kremenekc21fed32011-01-18 21:18:58 +0000446 void VisitDeclRefExpr(DeclRefExpr *dr);
Ted Kremenek25c1d572012-09-13 00:21:35 +0000447 void VisitDeclStmt(DeclStmt *ds);
448 void VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS);
449 void VisitObjCMessageExpr(ObjCMessageExpr *ME);
Richard Smith2815e1a2012-05-25 02:17:09 +0000450
Ted Kremenek40900ee2011-01-27 02:29:34 +0000451 bool isTrackedVar(const VarDecl *vd) {
452 return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
453 }
Richard Smith2815e1a2012-05-25 02:17:09 +0000454
Richard Smith9532e0d2012-07-17 00:06:14 +0000455 FindVarResult findVar(const Expr *ex) {
456 return ::findVar(ex, cast<DeclContext>(ac.getDecl()));
457 }
458
Richard Smith2815e1a2012-05-25 02:17:09 +0000459 UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) {
460 UninitUse Use(ex, isAlwaysUninit(v));
461
462 assert(isUninitialized(v));
463 if (Use.getKind() == UninitUse::Always)
464 return Use;
465
466 // If an edge which leads unconditionally to this use did not initialize
467 // the variable, we can say something stronger than 'may be uninitialized':
468 // we can say 'either it's used uninitialized or you have dead code'.
469 //
470 // We track the number of successors of a node which have been visited, and
471 // visit a node once we have visited all of its successors. Only edges where
472 // the variable might still be uninitialized are followed. Since a variable
473 // can't transfer from being initialized to being uninitialized, this will
474 // trace out the subgraph which inevitably leads to the use and does not
475 // initialize the variable. We do not want to skip past loops, since their
476 // non-termination might be correlated with the initialization condition.
477 //
478 // For example:
479 //
480 // void f(bool a, bool b) {
481 // block1: int n;
482 // if (a) {
483 // block2: if (b)
484 // block3: n = 1;
485 // block4: } else if (b) {
486 // block5: while (!a) {
487 // block6: do_work(&a);
488 // n = 2;
489 // }
490 // }
491 // block7: if (a)
492 // block8: g();
493 // block9: return n;
494 // }
495 //
496 // Starting from the maybe-uninitialized use in block 9:
497 // * Block 7 is not visited because we have only visited one of its two
498 // successors.
499 // * Block 8 is visited because we've visited its only successor.
500 // From block 8:
501 // * Block 7 is visited because we've now visited both of its successors.
502 // From block 7:
503 // * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all
504 // of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively).
505 // * Block 3 is not visited because it initializes 'n'.
506 // Now the algorithm terminates, having visited blocks 7 and 8, and having
507 // found the frontier is blocks 2, 4, and 5.
508 //
509 // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2
510 // and 4), so we report that any time either of those edges is taken (in
511 // each case when 'b == false'), 'n' is used uninitialized.
Dmitri Gribenkocfa88f82013-01-12 19:30:44 +0000512 SmallVector<const CFGBlock*, 32> Queue;
513 SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0);
Richard Smith2815e1a2012-05-25 02:17:09 +0000514 Queue.push_back(block);
515 // Specify that we've already visited all successors of the starting block.
516 // This has the dual purpose of ensuring we never add it to the queue, and
517 // of marking it as not being a candidate element of the frontier.
518 SuccsVisited[block->getBlockID()] = block->succ_size();
519 while (!Queue.empty()) {
520 const CFGBlock *B = Queue.back();
521 Queue.pop_back();
522 for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end();
523 I != E; ++I) {
524 const CFGBlock *Pred = *I;
525 if (vals.getValue(Pred, B, vd) == Initialized)
526 // This block initializes the variable.
527 continue;
528
Richard Smith558e8872012-07-13 23:33:44 +0000529 unsigned &SV = SuccsVisited[Pred->getBlockID()];
530 if (!SV) {
531 // When visiting the first successor of a block, mark all NULL
532 // successors as having been visited.
533 for (CFGBlock::const_succ_iterator SI = Pred->succ_begin(),
534 SE = Pred->succ_end();
535 SI != SE; ++SI)
536 if (!*SI)
537 ++SV;
538 }
539
540 if (++SV == Pred->succ_size())
Richard Smith2815e1a2012-05-25 02:17:09 +0000541 // All paths from this block lead to the use and don't initialize the
542 // variable.
543 Queue.push_back(Pred);
544 }
545 }
546
547 // Scan the frontier, looking for blocks where the variable was
548 // uninitialized.
549 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
550 const CFGBlock *Block = *BI;
551 unsigned BlockID = Block->getBlockID();
552 const Stmt *Term = Block->getTerminator();
553 if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() &&
554 Term) {
555 // This block inevitably leads to the use. If we have an edge from here
556 // to a post-dominator block, and the variable is uninitialized on that
557 // edge, we have found a bug.
558 for (CFGBlock::const_succ_iterator I = Block->succ_begin(),
559 E = Block->succ_end(); I != E; ++I) {
560 const CFGBlock *Succ = *I;
561 if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() &&
562 vals.getValue(Block, Succ, vd) == Uninitialized) {
563 // Switch cases are a special case: report the label to the caller
564 // as the 'terminator', not the switch statement itself. Suppress
565 // situations where no label matched: we can't be sure that's
566 // possible.
567 if (isa<SwitchStmt>(Term)) {
568 const Stmt *Label = Succ->getLabel();
569 if (!Label || !isa<SwitchCase>(Label))
570 // Might not be possible.
571 continue;
572 UninitUse::Branch Branch;
573 Branch.Terminator = Label;
574 Branch.Output = 0; // Ignored.
575 Use.addUninitBranch(Branch);
576 } else {
577 UninitUse::Branch Branch;
578 Branch.Terminator = Term;
579 Branch.Output = I - Block->succ_begin();
580 Use.addUninitBranch(Branch);
581 }
582 }
583 }
584 }
585 }
586
587 return Use;
588 }
Ted Kremenek610068c2011-01-15 02:58:47 +0000589};
590}
591
Richard Smith81891882012-05-24 23:45:35 +0000592void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) {
Richard Smith81891882012-05-24 23:45:35 +0000593 Value v = vals[vd];
594 if (isUninitialized(v))
Ted Kremenekeba76a42012-11-17 07:18:30 +0000595 handler.handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
Ted Kremenek610068c2011-01-15 02:58:47 +0000596}
597
Richard Smith9532e0d2012-07-17 00:06:14 +0000598void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS) {
Ted Kremenek1ea800c2011-01-27 02:01:31 +0000599 // This represents an initialization of the 'element' value.
Richard Smith9532e0d2012-07-17 00:06:14 +0000600 if (DeclStmt *DS = dyn_cast<DeclStmt>(FS->getElement())) {
601 const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl());
602 if (isTrackedVar(VD))
603 vals[VD] = Initialized;
Ted Kremenek1ea800c2011-01-27 02:01:31 +0000604 }
Ted Kremenek1ea800c2011-01-27 02:01:31 +0000605}
606
Ted Kremeneka8c17a52011-01-25 19:13:48 +0000607void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
Ted Kremenekbc8b44c2011-03-31 22:32:41 +0000608 const BlockDecl *bd = be->getBlockDecl();
609 for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
610 e = bd->capture_end() ; i != e; ++i) {
611 const VarDecl *vd = i->getVariable();
Ted Kremenekbc8b44c2011-03-31 22:32:41 +0000612 if (!isTrackedVar(vd))
613 continue;
614 if (i->isByRef()) {
615 vals[vd] = Initialized;
616 continue;
617 }
Richard Smith81891882012-05-24 23:45:35 +0000618 reportUse(be, vd);
Ted Kremeneka8c17a52011-01-25 19:13:48 +0000619 }
620}
621
Richard Smitha9e8b9e2012-07-02 23:23:04 +0000622void TransferFunctions::VisitCallExpr(CallExpr *ce) {
Ted Kremenek44ca53f2012-09-12 05:53:43 +0000623 if (Decl *Callee = ce->getCalleeDecl()) {
624 if (Callee->hasAttr<ReturnsTwiceAttr>()) {
625 // After a call to a function like setjmp or vfork, any variable which is
626 // initialized anywhere within this function may now be initialized. For
627 // now, just assume such a call initializes all variables. FIXME: Only
628 // mark variables as initialized if they have an initializer which is
629 // reachable from here.
630 vals.setAllScratchValues(Initialized);
631 }
632 else if (Callee->hasAttr<AnalyzerNoReturnAttr>()) {
633 // Functions labeled like "analyzer_noreturn" are often used to denote
634 // "panic" functions that in special debug situations can still return,
635 // but for the most part should not be treated as returning. This is a
636 // useful annotation borrowed from the static analyzer that is useful for
637 // suppressing branch-specific false positives when we call one of these
638 // functions but keep pretending the path continues (when in reality the
639 // user doesn't care).
640 vals.setAllScratchValues(Unknown);
641 }
642 }
Richard Smitha9e8b9e2012-07-02 23:23:04 +0000643}
644
Ted Kremenek0c8e5a02011-07-19 14:18:48 +0000645void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
Richard Smith9532e0d2012-07-17 00:06:14 +0000646 switch (classification.get(dr)) {
647 case ClassifyRefs::Ignore:
648 break;
649 case ClassifyRefs::Use:
650 reportUse(dr, cast<VarDecl>(dr->getDecl()));
651 break;
652 case ClassifyRefs::Init:
653 vals[cast<VarDecl>(dr->getDecl())] = Initialized;
654 break;
655 case ClassifyRefs::SelfInit:
Ted Kremenekeba76a42012-11-17 07:18:30 +0000656 handler.handleSelfInit(cast<VarDecl>(dr->getDecl()));
Richard Smith9532e0d2012-07-17 00:06:14 +0000657 break;
658 }
Ted Kremenek0c8e5a02011-07-19 14:18:48 +0000659}
660
Richard Smith9532e0d2012-07-17 00:06:14 +0000661void TransferFunctions::VisitBinaryOperator(BinaryOperator *BO) {
662 if (BO->getOpcode() == BO_Assign) {
663 FindVarResult Var = findVar(BO->getLHS());
664 if (const VarDecl *VD = Var.getDecl())
665 vals[VD] = Initialized;
666 }
667}
668
669void TransferFunctions::VisitDeclStmt(DeclStmt *DS) {
670 for (DeclStmt::decl_iterator DI = DS->decl_begin(), DE = DS->decl_end();
Ted Kremenek610068c2011-01-15 02:58:47 +0000671 DI != DE; ++DI) {
Richard Smith9532e0d2012-07-17 00:06:14 +0000672 VarDecl *VD = dyn_cast<VarDecl>(*DI);
673 if (VD && isTrackedVar(VD)) {
674 if (getSelfInitExpr(VD)) {
675 // If the initializer consists solely of a reference to itself, we
676 // explicitly mark the variable as uninitialized. This allows code
677 // like the following:
678 //
679 // int x = x;
680 //
681 // to deliberately leave a variable uninitialized. Different analysis
682 // clients can detect this pattern and adjust their reporting
683 // appropriately, but we need to continue to analyze subsequent uses
684 // of the variable.
685 vals[VD] = Uninitialized;
686 } else if (VD->getInit()) {
687 // Treat the new variable as initialized.
688 vals[VD] = Initialized;
689 } else {
690 // No initializer: the variable is now uninitialized. This matters
691 // for cases like:
692 // while (...) {
693 // int n;
694 // use(n);
695 // n = 0;
696 // }
697 // FIXME: Mark the variable as uninitialized whenever its scope is
698 // left, since its scope could be re-entered by a jump over the
699 // declaration.
700 vals[VD] = Uninitialized;
Ted Kremenekc21fed32011-01-18 21:18:58 +0000701 }
Ted Kremenek610068c2011-01-15 02:58:47 +0000702 }
703 }
704}
705
Ted Kremenek25c1d572012-09-13 00:21:35 +0000706void TransferFunctions::VisitObjCMessageExpr(ObjCMessageExpr *ME) {
707 // If the Objective-C message expression is an implicit no-return that
708 // is not modeled in the CFG, set the tracked dataflow values to Unknown.
709 if (objCNoRet.isImplicitNoReturn(ME)) {
710 vals.setAllScratchValues(Unknown);
711 }
712}
713
Ted Kremenek610068c2011-01-15 02:58:47 +0000714//------------------------------------------------------------------------====//
715// High-level "driver" logic for uninitialized values analysis.
716//====------------------------------------------------------------------------//
717
Ted Kremenek13bd4232011-01-20 17:37:17 +0000718static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
Ted Kremenek1d26f482011-10-24 01:32:45 +0000719 AnalysisDeclContext &ac, CFGBlockValues &vals,
Richard Smith9532e0d2012-07-17 00:06:14 +0000720 const ClassifyRefs &classification,
Ted Kremenekf8adeef2011-04-04 20:30:58 +0000721 llvm::BitVector &wasAnalyzed,
Ted Kremenekeba76a42012-11-17 07:18:30 +0000722 UninitVariablesHandler &handler) {
Ted Kremenekf8adeef2011-04-04 20:30:58 +0000723 wasAnalyzed[block->getBlockID()] = true;
Ted Kremenek610068c2011-01-15 02:58:47 +0000724 vals.resetScratch();
Ted Kremenekeee18c32012-07-19 04:59:05 +0000725 // Merge in values of predecessor blocks.
Ted Kremenek610068c2011-01-15 02:58:47 +0000726 bool isFirst = true;
727 for (CFGBlock::const_pred_iterator I = block->pred_begin(),
728 E = block->pred_end(); I != E; ++I) {
Ted Kremenek6f275422011-09-02 19:39:26 +0000729 const CFGBlock *pred = *I;
730 if (wasAnalyzed[pred->getBlockID()]) {
Ted Kremenekeee18c32012-07-19 04:59:05 +0000731 vals.mergeIntoScratch(vals.getValueVector(pred), isFirst);
Ted Kremenek6f275422011-09-02 19:39:26 +0000732 isFirst = false;
733 }
Ted Kremenek610068c2011-01-15 02:58:47 +0000734 }
735 // Apply the transfer function.
Richard Smith9532e0d2012-07-17 00:06:14 +0000736 TransferFunctions tf(vals, cfg, block, ac, classification, handler);
Ted Kremenek610068c2011-01-15 02:58:47 +0000737 for (CFGBlock::const_iterator I = block->begin(), E = block->end();
738 I != E; ++I) {
739 if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) {
Ted Kremenekf1d10d92011-08-23 23:05:04 +0000740 tf.Visit(const_cast<Stmt*>(cs->getStmt()));
Ted Kremenek610068c2011-01-15 02:58:47 +0000741 }
742 }
Ted Kremenek136f8f22011-03-15 04:57:27 +0000743 return vals.updateValueVectorWithScratch(block);
Ted Kremenek610068c2011-01-15 02:58:47 +0000744}
745
Ted Kremenekeba76a42012-11-17 07:18:30 +0000746/// PruneBlocksHandler is a special UninitVariablesHandler that is used
747/// to detect when a CFGBlock has any *potential* use of an uninitialized
748/// variable. It is mainly used to prune out work during the final
749/// reporting pass.
750namespace {
751struct PruneBlocksHandler : public UninitVariablesHandler {
752 PruneBlocksHandler(unsigned numBlocks)
753 : hadUse(numBlocks, false), hadAnyUse(false),
754 currentBlock(0) {}
755
756 virtual ~PruneBlocksHandler() {}
757
758 /// Records if a CFGBlock had a potential use of an uninitialized variable.
759 llvm::BitVector hadUse;
760
761 /// Records if any CFGBlock had a potential use of an uninitialized variable.
762 bool hadAnyUse;
763
764 /// The current block to scribble use information.
765 unsigned currentBlock;
766
767 virtual void handleUseOfUninitVariable(const VarDecl *vd,
768 const UninitUse &use) {
769 hadUse[currentBlock] = true;
770 hadAnyUse = true;
771 }
772
773 /// Called when the uninitialized variable analysis detects the
774 /// idiom 'int x = x'. All other uses of 'x' within the initializer
775 /// are handled by handleUseOfUninitVariable.
776 virtual void handleSelfInit(const VarDecl *vd) {
777 hadUse[currentBlock] = true;
778 hadAnyUse = true;
779 }
780};
781}
782
Chandler Carruth5d989942011-07-06 16:21:37 +0000783void clang::runUninitializedVariablesAnalysis(
784 const DeclContext &dc,
785 const CFG &cfg,
Ted Kremenek1d26f482011-10-24 01:32:45 +0000786 AnalysisDeclContext &ac,
Chandler Carruth5d989942011-07-06 16:21:37 +0000787 UninitVariablesHandler &handler,
788 UninitVariablesAnalysisStats &stats) {
Ted Kremenek610068c2011-01-15 02:58:47 +0000789 CFGBlockValues vals(cfg);
790 vals.computeSetOfDeclarations(dc);
791 if (vals.hasNoDeclarations())
792 return;
Ted Kremenekd40066b2011-04-04 23:29:12 +0000793
Chandler Carruth5d989942011-07-06 16:21:37 +0000794 stats.NumVariablesAnalyzed = vals.getNumEntries();
795
Richard Smith9532e0d2012-07-17 00:06:14 +0000796 // Precompute which expressions are uses and which are initializations.
797 ClassifyRefs classification(ac);
798 cfg.VisitBlockStmts(classification);
799
Ted Kremenekd40066b2011-04-04 23:29:12 +0000800 // Mark all variables uninitialized at the entry.
801 const CFGBlock &entry = cfg.getEntry();
Ted Kremenekeee18c32012-07-19 04:59:05 +0000802 ValueVector &vec = vals.getValueVector(&entry);
803 const unsigned n = vals.getNumEntries();
804 for (unsigned j = 0; j < n ; ++j) {
805 vec[j] = Uninitialized;
Ted Kremenekd40066b2011-04-04 23:29:12 +0000806 }
807
808 // Proceed with the workist.
Ted Kremenekc1602582012-11-17 02:00:00 +0000809 DataflowWorklist worklist(cfg, *ac.getAnalysis<PostOrderCFGView>());
Ted Kremenek496398d2011-03-15 04:57:32 +0000810 llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
Ted Kremenek610068c2011-01-15 02:58:47 +0000811 worklist.enqueueSuccessors(&cfg.getEntry());
Ted Kremenekf8adeef2011-04-04 20:30:58 +0000812 llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
Ted Kremenek6f275422011-09-02 19:39:26 +0000813 wasAnalyzed[cfg.getEntry().getBlockID()] = true;
Ted Kremenekeba76a42012-11-17 07:18:30 +0000814 PruneBlocksHandler PBH(cfg.getNumBlockIDs());
Ted Kremenek610068c2011-01-15 02:58:47 +0000815
816 while (const CFGBlock *block = worklist.dequeue()) {
Ted Kremenekeba76a42012-11-17 07:18:30 +0000817 PBH.currentBlock = block->getBlockID();
818
Ted Kremenek610068c2011-01-15 02:58:47 +0000819 // Did the block change?
Richard Smith9532e0d2012-07-17 00:06:14 +0000820 bool changed = runOnBlock(block, cfg, ac, vals,
Ted Kremenekeba76a42012-11-17 07:18:30 +0000821 classification, wasAnalyzed, PBH);
Chandler Carruth5d989942011-07-06 16:21:37 +0000822 ++stats.NumBlockVisits;
Ted Kremenek610068c2011-01-15 02:58:47 +0000823 if (changed || !previouslyVisited[block->getBlockID()])
824 worklist.enqueueSuccessors(block);
825 previouslyVisited[block->getBlockID()] = true;
826 }
Ted Kremenekeba76a42012-11-17 07:18:30 +0000827
828 if (!PBH.hadAnyUse)
829 return;
830
Enea Zaffanella67d472c2013-01-11 11:37:08 +0000831 // Run through the blocks one more time, and report uninitialized variables.
Ted Kremenek610068c2011-01-15 02:58:47 +0000832 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
Ted Kremenek6f275422011-09-02 19:39:26 +0000833 const CFGBlock *block = *BI;
Ted Kremenekeba76a42012-11-17 07:18:30 +0000834 if (PBH.hadUse[block->getBlockID()]) {
835 runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, handler);
Chandler Carruth5d989942011-07-06 16:21:37 +0000836 ++stats.NumBlockVisits;
837 }
Ted Kremenek610068c2011-01-15 02:58:47 +0000838 }
839}
840
841UninitVariablesHandler::~UninitVariablesHandler() {}