blob: 824f76d952d8db908260600c1776cab50bf2c28a [file] [log] [blame]
Ted Kremenek294fd0a2011-08-20 06:00:03 +00001//=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines ExprEngine's support for calls and returns.
11//
12//===----------------------------------------------------------------------===//
13
Anna Zakse90d3f82012-08-09 00:21:33 +000014#define DEBUG_TYPE "ExprEngine"
15
Ted Kremenek294fd0a2011-08-20 06:00:03 +000016#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
Jordan Rose48314cf2012-10-03 01:08:35 +000017#include "clang/AST/CXXInheritance.h"
Ted Kremenek294fd0a2011-08-20 06:00:03 +000018#include "clang/AST/DeclCXX.h"
Jordan Rose6fe4dfb2012-08-27 18:39:22 +000019#include "clang/AST/ParentMap.h"
Chandler Carruth55fc8732012-12-04 09:13:33 +000020#include "clang/Analysis/Analyses/LiveVariables.h"
21#include "clang/StaticAnalyzer/Core/CheckerManager.h"
22#include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
Benjamin Kramer4a5f7242012-04-01 19:30:51 +000023#include "llvm/ADT/SmallSet.h"
Anna Zakse90d3f82012-08-09 00:21:33 +000024#include "llvm/ADT/Statistic.h"
Benjamin Kramer4a5f7242012-04-01 19:30:51 +000025#include "llvm/Support/SaveAndRestore.h"
Ted Kremenek294fd0a2011-08-20 06:00:03 +000026
27using namespace clang;
28using namespace ento;
29
Anna Zakse90d3f82012-08-09 00:21:33 +000030STATISTIC(NumOfDynamicDispatchPathSplits,
31 "The # of times we split the path due to imprecise dynamic dispatch info");
32
Anna Zaks210f5a22012-08-27 18:38:32 +000033STATISTIC(NumInlinedCalls,
34 "The # of times we inlined a call");
35
Anna Zaks79596712012-12-17 20:08:51 +000036STATISTIC(NumReachedInlineCountMax,
37 "The # of times we reached inline count maximum");
38
Ted Kremenek3070e132012-01-07 01:03:17 +000039void ExprEngine::processCallEnter(CallEnter CE, ExplodedNode *Pred) {
40 // Get the entry block in the CFG of the callee.
Ted Kremenek0849ade2012-01-12 19:25:46 +000041 const StackFrameContext *calleeCtx = CE.getCalleeContext();
42 const CFG *CalleeCFG = calleeCtx->getCFG();
Ted Kremenek3070e132012-01-07 01:03:17 +000043 const CFGBlock *Entry = &(CalleeCFG->getEntry());
44
45 // Validate the CFG.
46 assert(Entry->empty());
47 assert(Entry->succ_size() == 1);
48
49 // Get the solitary sucessor.
50 const CFGBlock *Succ = *(Entry->succ_begin());
51
52 // Construct an edge representing the starting location in the callee.
Ted Kremenek0849ade2012-01-12 19:25:46 +000053 BlockEdge Loc(Entry, Succ, calleeCtx);
Ted Kremenek3070e132012-01-07 01:03:17 +000054
Jordan Rosee54cfc72012-07-10 22:07:57 +000055 ProgramStateRef state = Pred->getState();
Ted Kremenek3070e132012-01-07 01:03:17 +000056
57 // Construct a new node and add it to the worklist.
58 bool isNew;
59 ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
60 Node->addPredecessor(Pred, G);
61 if (isNew)
62 Engine.getWorkList()->enqueue(Node);
Ted Kremenek294fd0a2011-08-20 06:00:03 +000063}
64
Anna Zaks0b3ade82012-04-20 21:59:08 +000065// Find the last statement on the path to the exploded node and the
66// corresponding Block.
67static std::pair<const Stmt*,
68 const CFGBlock*> getLastStmt(const ExplodedNode *Node) {
69 const Stmt *S = 0;
Jordan Rose4ecca282012-12-06 18:58:15 +000070 const CFGBlock *Blk = 0;
Anna Zaks0b3ade82012-04-20 21:59:08 +000071 const StackFrameContext *SF =
72 Node->getLocation().getLocationContext()->getCurrentStackFrame();
Jordan Rose888c90a2012-07-26 20:04:13 +000073
Jordan Rose632e5022012-08-28 20:52:13 +000074 // Back up through the ExplodedGraph until we reach a statement node in this
75 // stack frame.
Ted Kremenek256ef642012-01-11 01:06:27 +000076 while (Node) {
77 const ProgramPoint &PP = Node->getLocation();
Jordan Rose888c90a2012-07-26 20:04:13 +000078
Jordan Rose632e5022012-08-28 20:52:13 +000079 if (PP.getLocationContext()->getCurrentStackFrame() == SF) {
80 if (const StmtPoint *SP = dyn_cast<StmtPoint>(&PP)) {
81 S = SP->getStmt();
Jordan Rose888c90a2012-07-26 20:04:13 +000082 break;
Jordan Rose632e5022012-08-28 20:52:13 +000083 } else if (const CallExitEnd *CEE = dyn_cast<CallExitEnd>(&PP)) {
84 S = CEE->getCalleeContext()->getCallSite();
85 if (S)
86 break;
87
88 // If there is no statement, this is an implicitly-generated call.
89 // We'll walk backwards over it and then continue the loop to find
90 // an actual statement.
91 const CallEnter *CE;
92 do {
93 Node = Node->getFirstPred();
94 CE = Node->getLocationAs<CallEnter>();
95 } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext());
96
97 // Continue searching the graph.
Jordan Rose4ecca282012-12-06 18:58:15 +000098 } else if (const BlockEdge *BE = dyn_cast<BlockEdge>(&PP)) {
99 Blk = BE->getSrc();
Jordan Rose632e5022012-08-28 20:52:13 +0000100 }
Jordan Rose888c90a2012-07-26 20:04:13 +0000101 } else if (const CallEnter *CE = dyn_cast<CallEnter>(&PP)) {
102 // If we reached the CallEnter for this function, it has no statements.
103 if (CE->getCalleeContext() == SF)
104 break;
Ted Kremenek256ef642012-01-11 01:06:27 +0000105 }
Jordan Rose888c90a2012-07-26 20:04:13 +0000106
Anna Zaks8501b7a2012-11-03 02:54:20 +0000107 if (Node->pred_empty())
NAKAMURA Takumi0a591c22012-11-03 13:59:36 +0000108 return std::pair<const Stmt*, const CFGBlock*>((Stmt*)0, (CFGBlock*)0);
Anna Zaks8501b7a2012-11-03 02:54:20 +0000109
Jordan Rose888c90a2012-07-26 20:04:13 +0000110 Node = *Node->pred_begin();
Ted Kremenek256ef642012-01-11 01:06:27 +0000111 }
Jordan Rose888c90a2012-07-26 20:04:13 +0000112
Anna Zaks0b3ade82012-04-20 21:59:08 +0000113 return std::pair<const Stmt*, const CFGBlock*>(S, Blk);
Ted Kremenek256ef642012-01-11 01:06:27 +0000114}
115
Jordan Rose48314cf2012-10-03 01:08:35 +0000116/// Adjusts a return value when the called function's return type does not
117/// match the caller's expression type. This can happen when a dynamic call
118/// is devirtualized, and the overridding method has a covariant (more specific)
119/// return type than the parent's method. For C++ objects, this means we need
120/// to add base casts.
121static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
122 StoreManager &StoreMgr) {
123 // For now, the only adjustments we handle apply only to locations.
124 if (!isa<Loc>(V))
125 return V;
126
127 // If the types already match, don't do any unnecessary work.
Anna Zakse7ad14e2012-11-12 22:06:24 +0000128 ExpectedTy = ExpectedTy.getCanonicalType();
129 ActualTy = ActualTy.getCanonicalType();
Jordan Rose48314cf2012-10-03 01:08:35 +0000130 if (ExpectedTy == ActualTy)
131 return V;
132
133 // No adjustment is needed between Objective-C pointer types.
134 if (ExpectedTy->isObjCObjectPointerType() &&
135 ActualTy->isObjCObjectPointerType())
136 return V;
137
138 // C++ object pointers may need "derived-to-base" casts.
139 const CXXRecordDecl *ExpectedClass = ExpectedTy->getPointeeCXXRecordDecl();
140 const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
141 if (ExpectedClass && ActualClass) {
142 CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
143 /*DetectVirtual=*/false);
144 if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
145 !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
146 return StoreMgr.evalDerivedToBase(V, Paths.front());
147 }
148 }
149
150 // Unfortunately, Objective-C does not enforce that overridden methods have
151 // covariant return types, so we can't assert that that never happens.
152 // Be safe and return UnknownVal().
153 return UnknownVal();
154}
155
Anna Zaks8501b7a2012-11-03 02:54:20 +0000156void ExprEngine::removeDeadOnEndOfFunction(NodeBuilderContext& BC,
157 ExplodedNode *Pred,
158 ExplodedNodeSet &Dst) {
Anna Zaks8501b7a2012-11-03 02:54:20 +0000159 // Find the last statement in the function and the corresponding basic block.
160 const Stmt *LastSt = 0;
161 const CFGBlock *Blk = 0;
162 llvm::tie(LastSt, Blk) = getLastStmt(Pred);
163 if (!Blk || !LastSt) {
Jordan Rose84c48452012-11-15 19:11:27 +0000164 Dst.Add(Pred);
Anna Zaks8501b7a2012-11-03 02:54:20 +0000165 return;
166 }
Anna Zaks8501b7a2012-11-03 02:54:20 +0000167
Jordan Rose63bc1862012-11-15 19:11:43 +0000168 // Here, we destroy the current location context. We use the current
169 // function's entire body as a diagnostic statement, with which the program
170 // point will be associated. However, we only want to use LastStmt as a
171 // reference for what to clean up if it's a ReturnStmt; otherwise, everything
172 // is dead.
Jordan Rose84c48452012-11-15 19:11:27 +0000173 SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
Jordan Rose63bc1862012-11-15 19:11:43 +0000174 const LocationContext *LCtx = Pred->getLocationContext();
175 removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
176 LCtx->getAnalysisDeclContext()->getBody(),
Anna Zaks8501b7a2012-11-03 02:54:20 +0000177 ProgramPoint::PostStmtPurgeDeadSymbolsKind);
Anna Zaks8501b7a2012-11-03 02:54:20 +0000178}
179
Anna Zaksbae930d2012-11-13 00:13:44 +0000180static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call,
Anna Zaksd51db492012-11-12 23:40:29 +0000181 const StackFrameContext *calleeCtx) {
182 const Decl *RuntimeCallee = calleeCtx->getDecl();
183 const Decl *StaticDecl = Call->getDecl();
Anna Zaksbae930d2012-11-13 00:13:44 +0000184 assert(RuntimeCallee);
185 if (!StaticDecl)
186 return true;
Anna Zaksd51db492012-11-12 23:40:29 +0000187 return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
188}
189
Anna Zaks0b3ade82012-04-20 21:59:08 +0000190/// The call exit is simulated with a sequence of nodes, which occur between
191/// CallExitBegin and CallExitEnd. The following operations occur between the
192/// two program points:
193/// 1. CallExitBegin (triggers the start of call exit sequence)
194/// 2. Bind the return value
195/// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
196/// 4. CallExitEnd (switch to the caller context)
197/// 5. PostStmt<CallExpr>
198void ExprEngine::processCallExit(ExplodedNode *CEBNode) {
199 // Step 1 CEBNode was generated before the call.
200
201 const StackFrameContext *calleeCtx =
202 CEBNode->getLocationContext()->getCurrentStackFrame();
Ted Kremenek7fa9b4f2012-06-01 20:04:04 +0000203
204 // The parent context might not be a stack frame, so make sure we
205 // look up the first enclosing stack frame.
206 const StackFrameContext *callerCtx =
207 calleeCtx->getParent()->getCurrentStackFrame();
208
Ted Kremenek294fd0a2011-08-20 06:00:03 +0000209 const Stmt *CE = calleeCtx->getCallSite();
Anna Zaks0b3ade82012-04-20 21:59:08 +0000210 ProgramStateRef state = CEBNode->getState();
211 // Find the last statement in the function and the corresponding basic block.
212 const Stmt *LastSt = 0;
213 const CFGBlock *Blk = 0;
214 llvm::tie(LastSt, Blk) = getLastStmt(CEBNode);
215
Jordan Rose48314cf2012-10-03 01:08:35 +0000216 // Generate a CallEvent /before/ cleaning the state, so that we can get the
217 // correct value for 'this' (if necessary).
218 CallEventManager &CEMgr = getStateManager().getCallEventManager();
219 CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state);
220
Jordan Rose852aa0d2012-07-10 22:07:52 +0000221 // Step 2: generate node with bound return value: CEBNode -> BindedRetNode.
Anna Zaks0b3ade82012-04-20 21:59:08 +0000222
Ted Kremenek294fd0a2011-08-20 06:00:03 +0000223 // If the callee returns an expression, bind its value to CallExpr.
Jordan Rose852aa0d2012-07-10 22:07:52 +0000224 if (CE) {
225 if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
226 const LocationContext *LCtx = CEBNode->getLocationContext();
227 SVal V = state->getSVal(RS, LCtx);
Jordan Rose48314cf2012-10-03 01:08:35 +0000228
Anna Zaksd51db492012-11-12 23:40:29 +0000229 // Ensure that the return type matches the type of the returned Expr.
Anna Zaksbae930d2012-11-13 00:13:44 +0000230 if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) {
Anna Zaksd51db492012-11-12 23:40:29 +0000231 QualType ReturnedTy =
232 CallEvent::getDeclaredResultType(calleeCtx->getDecl());
Jordan Rose48314cf2012-10-03 01:08:35 +0000233 if (!ReturnedTy.isNull()) {
234 if (const Expr *Ex = dyn_cast<Expr>(CE)) {
235 V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
236 getStoreManager());
237 }
238 }
239 }
240
Jordan Rose57c03362012-07-30 23:39:47 +0000241 state = state->BindExpr(CE, callerCtx, V);
Jordan Rose852aa0d2012-07-10 22:07:52 +0000242 }
Anna Zaks0b3ade82012-04-20 21:59:08 +0000243
Jordan Rose852aa0d2012-07-10 22:07:52 +0000244 // Bind the constructed object value to CXXConstructExpr.
245 if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
246 loc::MemRegionVal This =
247 svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx);
248 SVal ThisV = state->getSVal(This);
Anna Zaks0b3ade82012-04-20 21:59:08 +0000249
Jordan Rose33e83b62013-01-31 18:04:03 +0000250 // If the constructed object is a prvalue, get its bindings.
251 // Note that we have to be careful here because constructors embedded
252 // in DeclStmts are not marked as lvalues.
253 if (!CCE->isGLValue())
254 if (const MemRegion *MR = ThisV.getAsRegion())
255 if (isa<CXXTempObjectRegion>(MR))
256 ThisV = state->getSVal(cast<Loc>(ThisV));
Jordan Rose0504a592012-10-01 17:51:35 +0000257
Jordan Rose57c03362012-07-30 23:39:47 +0000258 state = state->BindExpr(CCE, callerCtx, ThisV);
Jordan Rose852aa0d2012-07-10 22:07:52 +0000259 }
Ted Kremenek294fd0a2011-08-20 06:00:03 +0000260 }
Anna Zaks0b3ade82012-04-20 21:59:08 +0000261
Anna Zaks0b3ade82012-04-20 21:59:08 +0000262 // Step 3: BindedRetNode -> CleanedNodes
263 // If we can find a statement and a block in the inlined function, run remove
264 // dead bindings before returning from the call. This is important to ensure
265 // that we report the issues such as leaks in the stack contexts in which
266 // they occurred.
267 ExplodedNodeSet CleanedNodes;
Ted Kremenek255d4d42012-08-30 19:26:43 +0000268 if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) {
Jordan Rose48b62472012-07-10 22:08:01 +0000269 static SimpleProgramPointTag retValBind("ExprEngine : Bind Return Value");
270 PostStmt Loc(LastSt, calleeCtx, &retValBind);
271 bool isNew;
272 ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew);
273 BindedRetNode->addPredecessor(CEBNode, G);
274 if (!isNew)
275 return;
276
Anna Zaks0b3ade82012-04-20 21:59:08 +0000277 NodeBuilderContext Ctx(getCoreEngine(), Blk, BindedRetNode);
Ted Kremenek66c486f2012-08-22 06:26:15 +0000278 currBldrCtx = &Ctx;
Jordan Rose84c48452012-11-15 19:11:27 +0000279 // Here, we call the Symbol Reaper with 0 statement and callee location
Anna Zaks0b3ade82012-04-20 21:59:08 +0000280 // context, telling it to clean up everything in the callee's context
Jordan Rose63bc1862012-11-15 19:11:43 +0000281 // (and its children). We use the callee's function body as a diagnostic
282 // statement, with which the program point will be associated.
283 removeDead(BindedRetNode, CleanedNodes, 0, calleeCtx,
284 calleeCtx->getAnalysisDeclContext()->getBody(),
Anna Zaks0b3ade82012-04-20 21:59:08 +0000285 ProgramPoint::PostStmtPurgeDeadSymbolsKind);
Ted Kremenek66c486f2012-08-22 06:26:15 +0000286 currBldrCtx = 0;
Anna Zaks144e52b2012-06-01 23:48:40 +0000287 } else {
288 CleanedNodes.Add(CEBNode);
Anna Zaks0b3ade82012-04-20 21:59:08 +0000289 }
290
291 for (ExplodedNodeSet::iterator I = CleanedNodes.begin(),
292 E = CleanedNodes.end(); I != E; ++I) {
293
294 // Step 4: Generate the CallExit and leave the callee's context.
295 // CleanedNodes -> CEENode
Jordan Rose852aa0d2012-07-10 22:07:52 +0000296 CallExitEnd Loc(calleeCtx, callerCtx);
Anna Zaks0b3ade82012-04-20 21:59:08 +0000297 bool isNew;
Jordan Rose48b62472012-07-10 22:08:01 +0000298 ProgramStateRef CEEState = (*I == CEBNode) ? state : (*I)->getState();
299 ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew);
Anna Zaks0b3ade82012-04-20 21:59:08 +0000300 CEENode->addPredecessor(*I, G);
301 if (!isNew)
302 return;
303
304 // Step 5: Perform the post-condition check of the CallExpr and enqueue the
305 // result onto the work list.
306 // CEENode -> Dst -> WorkList
Anna Zaks0b3ade82012-04-20 21:59:08 +0000307 NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode);
Ted Kremenek66c486f2012-08-22 06:26:15 +0000308 SaveAndRestore<const NodeBuilderContext*> NBCSave(currBldrCtx,
Anna Zaks0b3ade82012-04-20 21:59:08 +0000309 &Ctx);
Ted Kremenek66c486f2012-08-22 06:26:15 +0000310 SaveAndRestore<unsigned> CBISave(currStmtIdx, calleeCtx->getIndex());
Anna Zaks0b3ade82012-04-20 21:59:08 +0000311
Jordan Rose4e79fdf2012-08-15 20:07:17 +0000312 CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
Jordan Rose57c03362012-07-30 23:39:47 +0000313
314 ExplodedNodeSet DstPostCall;
Jordan Rose4e79fdf2012-08-15 20:07:17 +0000315 getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode,
316 *UpdatedCall, *this,
317 /*WasInlined=*/true);
Jordan Rose57c03362012-07-30 23:39:47 +0000318
319 ExplodedNodeSet Dst;
Jordan Rose4e79fdf2012-08-15 20:07:17 +0000320 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
321 getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
322 *this,
323 /*WasInlined=*/true);
Jordan Rose57c03362012-07-30 23:39:47 +0000324 } else if (CE) {
325 getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
Jordan Rose4e79fdf2012-08-15 20:07:17 +0000326 *this, /*WasInlined=*/true);
Jordan Rose57c03362012-07-30 23:39:47 +0000327 } else {
328 Dst.insert(DstPostCall);
329 }
Anna Zaks0b3ade82012-04-20 21:59:08 +0000330
331 // Enqueue the next element in the block.
332 for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end();
333 PSI != PSE; ++PSI) {
334 Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(),
335 calleeCtx->getIndex()+1);
336 }
Ted Kremenek242384d2012-01-07 00:10:49 +0000337 }
Ted Kremenek294fd0a2011-08-20 06:00:03 +0000338}
339
Anna Zaks4ea9b892012-09-10 23:35:11 +0000340void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
Anna Zaks7229d002012-09-10 22:37:19 +0000341 bool &IsRecursive, unsigned &StackDepth) {
342 IsRecursive = false;
343 StackDepth = 0;
Anna Zaks4ea9b892012-09-10 23:35:11 +0000344
Ted Kremenek0849ade2012-01-12 19:25:46 +0000345 while (LCtx) {
Anna Zaks7229d002012-09-10 22:37:19 +0000346 if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
Anna Zaks4ea9b892012-09-10 23:35:11 +0000347 const Decl *DI = SFC->getDecl();
348
349 // Mark recursive (and mutually recursive) functions and always count
350 // them when measuring the stack depth.
351 if (DI == D) {
Anna Zaks7229d002012-09-10 22:37:19 +0000352 IsRecursive = true;
Anna Zaks4ea9b892012-09-10 23:35:11 +0000353 ++StackDepth;
354 LCtx = LCtx->getParent();
355 continue;
356 }
357
358 // Do not count the small functions when determining the stack depth.
359 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
360 const CFG *CalleeCFG = CalleeADC->getCFG();
361 if (CalleeCFG->getNumBlockIDs() > AMgr.options.getAlwaysInlineSize())
362 ++StackDepth;
Anna Zaks7229d002012-09-10 22:37:19 +0000363 }
Ted Kremenek0849ade2012-01-12 19:25:46 +0000364 LCtx = LCtx->getParent();
365 }
Anna Zaks4ea9b892012-09-10 23:35:11 +0000366
Ted Kremenek0849ade2012-01-12 19:25:46 +0000367}
368
Jordan Rose81fb50e2012-09-10 21:27:35 +0000369static bool IsInStdNamespace(const FunctionDecl *FD) {
370 const DeclContext *DC = FD->getEnclosingNamespaceContext();
371 const NamespaceDecl *ND = dyn_cast<NamespaceDecl>(DC);
372 if (!ND)
373 return false;
374
375 while (const DeclContext *Parent = ND->getParent()) {
376 if (!isa<NamespaceDecl>(Parent))
377 break;
378 ND = cast<NamespaceDecl>(Parent);
379 }
380
381 return ND->getName() == "std";
382}
383
Anna Zaks6cc09692012-03-13 22:15:58 +0000384// Determine if we should inline the call.
Ted Kremenek7fa9b4f2012-06-01 20:04:04 +0000385bool ExprEngine::shouldInlineDecl(const Decl *D, ExplodedNode *Pred) {
386 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
Anna Zaks6cc09692012-03-13 22:15:58 +0000387 const CFG *CalleeCFG = CalleeADC->getCFG();
388
Ted Kremenek01561d12012-04-17 01:36:03 +0000389 // It is possible that the CFG cannot be constructed.
390 // Be safe, and check if the CalleeCFG is valid.
391 if (!CalleeCFG)
392 return false;
393
Anna Zaks7229d002012-09-10 22:37:19 +0000394 bool IsRecursive = false;
395 unsigned StackDepth = 0;
396 examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
397 if ((StackDepth >= AMgr.options.InlineMaxStackDepth) &&
398 ((CalleeCFG->getNumBlockIDs() > AMgr.options.getAlwaysInlineSize())
399 || IsRecursive))
Anna Zaks6cc09692012-03-13 22:15:58 +0000400 return false;
401
Ted Kremenek7fa9b4f2012-06-01 20:04:04 +0000402 if (Engine.FunctionSummaries->hasReachedMaxBlockCount(D))
Anna Zaks3bbd8cd2012-03-30 05:48:10 +0000403 return false;
404
Anna Zaks6bbe1442013-01-30 19:12:36 +0000405 if (CalleeCFG->getNumBlockIDs() > AMgr.options.getMaxInlinableSize())
Anna Zaks6cc09692012-03-13 22:15:58 +0000406 return false;
407
Ted Kremenek10f77ad2012-06-22 23:55:50 +0000408 // Do not inline variadic calls (for now).
409 if (const BlockDecl *BD = dyn_cast<BlockDecl>(D)) {
410 if (BD->isVariadic())
411 return false;
Anna Zaks5903a372012-03-27 20:02:53 +0000412 }
Ted Kremenek10f77ad2012-06-22 23:55:50 +0000413 else if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
414 if (FD->isVariadic())
415 return false;
416 }
Anna Zaks5903a372012-03-27 20:02:53 +0000417
Jordan Rose81fb50e2012-09-10 21:27:35 +0000418 if (getContext().getLangOpts().CPlusPlus) {
419 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
420 // Conditionally allow the inlining of template functions.
Anna Zaks79596712012-12-17 20:08:51 +0000421 if (!AMgr.options.mayInlineTemplateFunctions())
Jordan Rose81fb50e2012-09-10 21:27:35 +0000422 if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
423 return false;
424
425 // Conditionally allow the inlining of C++ standard library functions.
Anna Zaks79596712012-12-17 20:08:51 +0000426 if (!AMgr.options.mayInlineCXXStandardLibrary())
Jordan Rose81fb50e2012-09-10 21:27:35 +0000427 if (getContext().getSourceManager().isInSystemHeader(FD->getLocation()))
428 if (IsInStdNamespace(FD))
429 return false;
430 }
431 }
432
Ted Kremenekd4aeb802012-07-02 20:21:52 +0000433 // It is possible that the live variables analysis cannot be
434 // run. If so, bail out.
435 if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
436 return false;
437
Anna Zaks79596712012-12-17 20:08:51 +0000438 if (Engine.FunctionSummaries->getNumTimesInlined(D) >
439 AMgr.options.getMaxTimesInlineLarge() &&
440 CalleeCFG->getNumBlockIDs() > 13) {
441 NumReachedInlineCountMax++;
442 return false;
443 }
444 Engine.FunctionSummaries->bumpNumTimesInlined(D);
445
Ted Kremenek10f77ad2012-06-22 23:55:50 +0000446 return true;
Anna Zaks5903a372012-03-27 20:02:53 +0000447}
448
Jordan Rose166d5022012-11-02 01:54:06 +0000449// The GDM component containing the dynamic dispatch bifurcation info. When
450// the exact type of the receiver is not known, we want to explore both paths -
451// one on which we do inline it and the other one on which we don't. This is
452// done to ensure we do not drop coverage.
453// This is the map from the receiver region to a bool, specifying either we
454// consider this region's information precise or not along the given path.
455namespace {
456 enum DynamicDispatchMode {
457 DynamicDispatchModeInlined = 1,
458 DynamicDispatchModeConservative
459 };
460}
Jordan Rose40d85512012-11-05 16:58:00 +0000461REGISTER_TRAIT_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
462 CLANG_ENTO_PROGRAMSTATE_MAP(const MemRegion *,
463 unsigned))
Anna Zaks5903a372012-03-27 20:02:53 +0000464
Anna Zakse90d3f82012-08-09 00:21:33 +0000465bool ExprEngine::inlineCall(const CallEvent &Call, const Decl *D,
466 NodeBuilder &Bldr, ExplodedNode *Pred,
467 ProgramStateRef State) {
468 assert(D);
Jordan Roseee158bc2012-07-09 16:54:49 +0000469
Jordan Rosec36b30c2012-07-12 00:16:25 +0000470 const LocationContext *CurLC = Pred->getLocationContext();
471 const StackFrameContext *CallerSFC = CurLC->getCurrentStackFrame();
Ted Kremenek7fa9b4f2012-06-01 20:04:04 +0000472 const LocationContext *ParentOfCallee = 0;
Jordan Rose69f87c92012-07-02 19:28:09 +0000473
Ted Kremenek622b6fb2012-10-01 18:28:19 +0000474 AnalyzerOptions &Opts = getAnalysisManager().options;
Jordan Rosede5277f2012-08-31 17:06:49 +0000475
Jordan Roseef158312012-07-31 01:07:55 +0000476 // FIXME: Refactor this check into a hypothetical CallEvent::canInline.
Jordan Rose69f87c92012-07-02 19:28:09 +0000477 switch (Call.getKind()) {
478 case CE_Function:
Jordan Rose2f9c40a2012-07-31 18:22:40 +0000479 break;
Jordan Rose69f87c92012-07-02 19:28:09 +0000480 case CE_CXXMember:
Jordan Rosee54cfc72012-07-10 22:07:57 +0000481 case CE_CXXMemberOperator:
Jordan Rosede5277f2012-08-31 17:06:49 +0000482 if (!Opts.mayInlineCXXMemberFunction(CIMK_MemberFunctions))
Jordan Rose2f9c40a2012-07-31 18:22:40 +0000483 return false;
Jordan Rose69f87c92012-07-02 19:28:09 +0000484 break;
Jordan Roseef158312012-07-31 01:07:55 +0000485 case CE_CXXConstructor: {
Jordan Rosede5277f2012-08-31 17:06:49 +0000486 if (!Opts.mayInlineCXXMemberFunction(CIMK_Constructors))
Jordan Rose2f9c40a2012-07-31 18:22:40 +0000487 return false;
488
Jordan Roseef158312012-07-31 01:07:55 +0000489 const CXXConstructorCall &Ctor = cast<CXXConstructorCall>(Call);
490
491 // FIXME: We don't handle constructors or destructors for arrays properly.
492 const MemRegion *Target = Ctor.getCXXThisVal().getAsRegion();
493 if (Target && isa<ElementRegion>(Target))
494 return false;
495
Jordan Rose6fe4dfb2012-08-27 18:39:22 +0000496 // FIXME: This is a hack. We don't use the correct region for a new
497 // expression, so if we inline the constructor its result will just be
498 // thrown away. This short-term hack is tracked in <rdar://problem/12180598>
499 // and the longer-term possible fix is discussed in PR12014.
500 const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
501 if (const Stmt *Parent = CurLC->getParentMap().getParent(CtorExpr))
502 if (isa<CXXNewExpr>(Parent))
503 return false;
504
Jordan Rosede5277f2012-08-31 17:06:49 +0000505 // Inlining constructors requires including initializers in the CFG.
506 const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
507 assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
Jordan Rose9eb214a2012-09-01 19:15:13 +0000508 (void)ADC;
Jordan Rosede5277f2012-08-31 17:06:49 +0000509
Jordan Rosec210cb72012-08-27 17:50:07 +0000510 // If the destructor is trivial, it's always safe to inline the constructor.
511 if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
512 break;
513
Jordan Rosede5277f2012-08-31 17:06:49 +0000514 // For other types, only inline constructors if destructor inlining is
515 // also enabled.
516 if (!Opts.mayInlineCXXMemberFunction(CIMK_Destructors))
Jordan Rosec210cb72012-08-27 17:50:07 +0000517 return false;
518
Jordan Roseef158312012-07-31 01:07:55 +0000519 // FIXME: This is a hack. We don't handle temporary destructors
520 // right now, so we shouldn't inline their constructors.
Jordan Roseef158312012-07-31 01:07:55 +0000521 if (CtorExpr->getConstructionKind() == CXXConstructExpr::CK_Complete)
522 if (!Target || !isa<DeclRegion>(Target))
523 return false;
524
525 break;
526 }
Jordan Roseda5fc532012-07-26 20:04:00 +0000527 case CE_CXXDestructor: {
Jordan Rosede5277f2012-08-31 17:06:49 +0000528 if (!Opts.mayInlineCXXMemberFunction(CIMK_Destructors))
Jordan Rose2f9c40a2012-07-31 18:22:40 +0000529 return false;
530
Jordan Rosede5277f2012-08-31 17:06:49 +0000531 // Inlining destructors requires building the CFG correctly.
Jordan Roseda5fc532012-07-26 20:04:00 +0000532 const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
Jordan Rosede5277f2012-08-31 17:06:49 +0000533 assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
Jordan Rose9eb214a2012-09-01 19:15:13 +0000534 (void)ADC;
Jordan Rose3a0a9e32012-07-26 20:04:21 +0000535
Jordan Roseef158312012-07-31 01:07:55 +0000536 const CXXDestructorCall &Dtor = cast<CXXDestructorCall>(Call);
537
Jordan Rosee460c462012-07-26 20:04:25 +0000538 // FIXME: We don't handle constructors or destructors for arrays properly.
Jordan Roseef158312012-07-31 01:07:55 +0000539 const MemRegion *Target = Dtor.getCXXThisVal().getAsRegion();
Jordan Rosee460c462012-07-26 20:04:25 +0000540 if (Target && isa<ElementRegion>(Target))
541 return false;
542
Jordan Roseda5fc532012-07-26 20:04:00 +0000543 break;
544 }
Jordan Rose70cbf3c2012-07-02 22:21:47 +0000545 case CE_CXXAllocator:
546 // Do not inline allocators until we model deallocators.
547 // This is unfortunate, but basically necessary for smart pointers and such.
548 return false;
Jordan Rose69f87c92012-07-02 19:28:09 +0000549 case CE_Block: {
550 const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
Jordan Roseee158bc2012-07-09 16:54:49 +0000551 assert(BR && "If we have the block definition we should have its region");
Jordan Rose69f87c92012-07-02 19:28:09 +0000552 AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
Ted Kremenek7fa9b4f2012-06-01 20:04:04 +0000553 ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
Jordan Rose69f87c92012-07-02 19:28:09 +0000554 cast<BlockDecl>(D),
Ted Kremenek7fa9b4f2012-06-01 20:04:04 +0000555 BR);
Jordan Rose69f87c92012-07-02 19:28:09 +0000556 break;
557 }
558 case CE_ObjCMessage:
Anna Zaks57330ee2012-09-10 22:56:41 +0000559 if (!Opts.mayInlineObjCMethod())
560 return false;
Anna Zaks73f05632013-01-24 23:15:25 +0000561 AnalyzerOptions &Options = getAnalysisManager().options;
Anna Zaksbfa9ab82013-01-24 23:15:30 +0000562 if (!(Options.getIPAMode() == IPAK_DynamicDispatch ||
563 Options.getIPAMode() == IPAK_DynamicDispatchBifurcate))
Anna Zakse13056a2012-07-30 20:31:18 +0000564 return false;
Anna Zaks9dc51672012-07-26 00:27:51 +0000565 break;
Ted Kremenek7fa9b4f2012-06-01 20:04:04 +0000566 }
Jordan Roseee158bc2012-07-09 16:54:49 +0000567
568 if (!shouldInlineDecl(D, Pred))
Ted Kremenek256ef642012-01-11 01:06:27 +0000569 return false;
570
Ted Kremenek7fa9b4f2012-06-01 20:04:04 +0000571 if (!ParentOfCallee)
572 ParentOfCallee = CallerSFC;
Anna Zaks8235f9c2012-03-02 19:05:03 +0000573
Jordan Rose852aa0d2012-07-10 22:07:52 +0000574 // This may be NULL, but that's fine.
Jordan Rose69f87c92012-07-02 19:28:09 +0000575 const Expr *CallE = Call.getOriginExpr();
Jordan Rose69f87c92012-07-02 19:28:09 +0000576
Ted Kremenek7fa9b4f2012-06-01 20:04:04 +0000577 // Construct a new stack frame for the callee.
578 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
579 const StackFrameContext *CalleeSFC =
Jordan Rose69f87c92012-07-02 19:28:09 +0000580 CalleeADC->getStackFrame(ParentOfCallee, CallE,
Ted Kremenek66c486f2012-08-22 06:26:15 +0000581 currBldrCtx->getBlock(),
582 currStmtIdx);
Ted Kremenek7fa9b4f2012-06-01 20:04:04 +0000583
Jordan Rosec36b30c2012-07-12 00:16:25 +0000584 CallEnter Loc(CallE, CalleeSFC, CurLC);
Jordan Rosee54cfc72012-07-10 22:07:57 +0000585
586 // Construct a new state which contains the mapping from actual to
587 // formal arguments.
Anna Zakse90d3f82012-08-09 00:21:33 +0000588 State = State->enterStackFrame(Call, CalleeSFC);
Jordan Rosee54cfc72012-07-10 22:07:57 +0000589
Ted Kremenek7fa9b4f2012-06-01 20:04:04 +0000590 bool isNew;
Jordan Rosee54cfc72012-07-10 22:07:57 +0000591 if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
Ted Kremenek7fa9b4f2012-06-01 20:04:04 +0000592 N->addPredecessor(Pred, G);
593 if (isNew)
594 Engine.getWorkList()->enqueue(N);
Ted Kremenek256ef642012-01-11 01:06:27 +0000595 }
Anna Zakse90d3f82012-08-09 00:21:33 +0000596
597 // If we decided to inline the call, the successor has been manually
598 // added onto the work list so remove it from the node builder.
599 Bldr.takeNodes(Pred);
600
Anna Zaks210f5a22012-08-27 18:38:32 +0000601 NumInlinedCalls++;
602
Anna Zaksfbcb3f12012-08-30 23:42:02 +0000603 // Mark the decl as visited.
604 if (VisitedCallees)
605 VisitedCallees->insert(D);
606
Ted Kremenek7fa9b4f2012-06-01 20:04:04 +0000607 return true;
Ted Kremenek256ef642012-01-11 01:06:27 +0000608}
609
Anna Zakse81ce252012-07-19 23:38:13 +0000610static ProgramStateRef getInlineFailedState(ProgramStateRef State,
Jordan Rose69f87c92012-07-02 19:28:09 +0000611 const Stmt *CallE) {
Jordan Rosebdc691f2013-01-14 18:58:42 +0000612 const void *ReplayState = State->get<ReplayWithoutInlining>();
Anna Zaks5903a372012-03-27 20:02:53 +0000613 if (!ReplayState)
614 return 0;
Jordan Rose28038f32012-07-10 22:07:42 +0000615
Jordan Rosebdc691f2013-01-14 18:58:42 +0000616 assert(ReplayState == CallE && "Backtracked to the wrong call.");
Jordan Rose28038f32012-07-10 22:07:42 +0000617 (void)CallE;
618
Anna Zakse81ce252012-07-19 23:38:13 +0000619 return State->remove<ReplayWithoutInlining>();
Ted Kremenek10520d72012-02-09 21:59:52 +0000620}
621
Ted Kremenek294fd0a2011-08-20 06:00:03 +0000622void ExprEngine::VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred,
623 ExplodedNodeSet &dst) {
624 // Perform the previsit of the CallExpr.
625 ExplodedNodeSet dstPreVisit;
626 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
Anna Zaks5903a372012-03-27 20:02:53 +0000627
Jordan Rosed563d3f2012-07-30 20:22:09 +0000628 // Get the call in its initial state. We use this as a template to perform
629 // all the checks.
630 CallEventManager &CEMgr = getStateManager().getCallEventManager();
Jordan Rose645baee2012-08-13 23:46:05 +0000631 CallEventRef<> CallTemplate
Jordan Rosed563d3f2012-07-30 20:22:09 +0000632 = CEMgr.getSimpleCall(CE, Pred->getState(), Pred->getLocationContext());
Anna Zaks5903a372012-03-27 20:02:53 +0000633
Jordan Rose69f87c92012-07-02 19:28:09 +0000634 // Evaluate the function call. We try each of the checkers
Ted Kremenek294fd0a2011-08-20 06:00:03 +0000635 // to see if the can evaluate the function call.
636 ExplodedNodeSet dstCallEvaluated;
Jordan Rose69f87c92012-07-02 19:28:09 +0000637 for (ExplodedNodeSet::iterator I = dstPreVisit.begin(), E = dstPreVisit.end();
638 I != E; ++I) {
Jordan Rosed563d3f2012-07-30 20:22:09 +0000639 evalCall(dstCallEvaluated, *I, *CallTemplate);
Jordan Rose69f87c92012-07-02 19:28:09 +0000640 }
641
Ted Kremenek294fd0a2011-08-20 06:00:03 +0000642 // Finally, perform the post-condition check of the CallExpr and store
643 // the created nodes in 'Dst'.
Jordan Rose69f87c92012-07-02 19:28:09 +0000644 // Note that if the call was inlined, dstCallEvaluated will be empty.
645 // The post-CallExpr check will occur in processCallExit.
Ted Kremenek294fd0a2011-08-20 06:00:03 +0000646 getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
647 *this);
648}
649
Jordan Rose69f87c92012-07-02 19:28:09 +0000650void ExprEngine::evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred,
Jordan Rose645baee2012-08-13 23:46:05 +0000651 const CallEvent &Call) {
Jordan Rosed563d3f2012-07-30 20:22:09 +0000652 // WARNING: At this time, the state attached to 'Call' may be older than the
653 // state in 'Pred'. This is a minor optimization since CheckerManager will
654 // use an updated CallEvent instance when calling checkers, but if 'Call' is
655 // ever used directly in this function all callers should be updated to pass
656 // the most recent state. (It is probably not worth doing the work here since
657 // for some callers this will not be necessary.)
658
Jordan Rose96479da2012-07-02 19:28:16 +0000659 // Run any pre-call checks using the generic call interface.
660 ExplodedNodeSet dstPreVisit;
661 getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred, Call, *this);
662
663 // Actually evaluate the function call. We try each of the checkers
664 // to see if the can evaluate the function call, and get a callback at
665 // defaultEvalCall if all of them fail.
666 ExplodedNodeSet dstCallEvaluated;
667 getCheckerManager().runCheckersForEvalCall(dstCallEvaluated, dstPreVisit,
668 Call, *this);
669
670 // Finally, run any post-call checks.
671 getCheckerManager().runCheckersForPostCall(Dst, dstCallEvaluated,
672 Call, *this);
Jordan Rose69f87c92012-07-02 19:28:09 +0000673}
674
Anna Zakse81ce252012-07-19 23:38:13 +0000675ProgramStateRef ExprEngine::bindReturnValue(const CallEvent &Call,
676 const LocationContext *LCtx,
677 ProgramStateRef State) {
678 const Expr *E = Call.getOriginExpr();
679 if (!E)
680 return State;
681
682 // Some method families have known return values.
683 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
684 switch (Msg->getMethodFamily()) {
685 default:
686 break;
687 case OMF_autorelease:
688 case OMF_retain:
689 case OMF_self: {
690 // These methods return their receivers.
691 return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
Anna Zakse81ce252012-07-19 23:38:13 +0000692 }
693 }
Jordan Rosee460c462012-07-26 20:04:25 +0000694 } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
Jordan Rose33e83b62013-01-31 18:04:03 +0000695 return State->BindExpr(E, LCtx, C->getCXXThisVal());
Anna Zakse81ce252012-07-19 23:38:13 +0000696 }
697
698 // Conjure a symbol if the return value is unknown.
699 QualType ResultTy = Call.getResultType();
700 SValBuilder &SVB = getSValBuilder();
Ted Kremenek66c486f2012-08-22 06:26:15 +0000701 unsigned Count = currBldrCtx->blockCount();
Ted Kremenek3b1df8b2012-08-22 06:26:06 +0000702 SVal R = SVB.conjureSymbolVal(0, E, LCtx, ResultTy, Count);
Anna Zakse81ce252012-07-19 23:38:13 +0000703 return State->BindExpr(E, LCtx, R);
704}
705
Anna Zakse90d3f82012-08-09 00:21:33 +0000706// Conservatively evaluate call by invalidating regions and binding
707// a conjured return value.
708void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
709 ExplodedNode *Pred, ProgramStateRef State) {
Ted Kremenek66c486f2012-08-22 06:26:15 +0000710 State = Call.invalidateRegions(currBldrCtx->blockCount(), State);
Anna Zakse90d3f82012-08-09 00:21:33 +0000711 State = bindReturnValue(Call, Pred->getLocationContext(), State);
712
713 // And make the result node.
714 Bldr.generateNode(Call.getProgramPoint(), State, Pred);
715}
716
Anna Zakse81ce252012-07-19 23:38:13 +0000717void ExprEngine::defaultEvalCall(NodeBuilder &Bldr, ExplodedNode *Pred,
Jordan Rosed563d3f2012-07-30 20:22:09 +0000718 const CallEvent &CallTemplate) {
719 // Make sure we have the most recent state attached to the call.
720 ProgramStateRef State = Pred->getState();
721 CallEventRef<> Call = CallTemplate.cloneWithState(State);
Anna Zakse81ce252012-07-19 23:38:13 +0000722
Anna Zaks75f31c42012-12-07 21:51:47 +0000723 if (HowToInline == Inline_None) {
Anna Zaks5960f4a2012-08-09 18:43:00 +0000724 conservativeEvalCall(*Call, Bldr, Pred, State);
725 return;
726 }
Jordan Rose69f87c92012-07-02 19:28:09 +0000727 // Try to inline the call.
Jordan Rose28038f32012-07-10 22:07:42 +0000728 // The origin expression here is just used as a kind of checksum;
Jordan Rosed563d3f2012-07-30 20:22:09 +0000729 // this should still be safe even for CallEvents that don't come from exprs.
730 const Expr *E = Call->getOriginExpr();
731 ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
732
733 if (InlinedFailedState) {
734 // If we already tried once and failed, make sure we don't retry later.
735 State = InlinedFailedState;
Anna Zaks5960f4a2012-08-09 18:43:00 +0000736 } else {
Ted Kremenekddc0c482012-09-21 06:13:13 +0000737 RuntimeDefinition RD = Call->getRuntimeDefinition();
Anna Zaksfc05dec2012-08-09 02:57:02 +0000738 const Decl *D = RD.getDecl();
Anna Zakse90d3f82012-08-09 00:21:33 +0000739 if (D) {
Jordan Roseb763ede2012-08-15 00:52:00 +0000740 if (RD.mayHaveOtherDefinitions()) {
Anna Zaks73f05632013-01-24 23:15:25 +0000741 AnalyzerOptions &Options = getAnalysisManager().options;
742
Jordan Roseb763ede2012-08-15 00:52:00 +0000743 // Explore with and without inlining the call.
Anna Zaksbfa9ab82013-01-24 23:15:30 +0000744 if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
Jordan Roseb763ede2012-08-15 00:52:00 +0000745 BifurcateCall(RD.getDispatchRegion(), *Call, D, Bldr, Pred);
746 return;
747 }
748
749 // Don't inline if we're not in any dynamic dispatch mode.
Anna Zaksbfa9ab82013-01-24 23:15:30 +0000750 if (Options.getIPAMode() != IPAK_DynamicDispatch) {
Jordan Roseda29ac52012-08-15 21:05:15 +0000751 conservativeEvalCall(*Call, Bldr, Pred, State);
Jordan Roseb763ede2012-08-15 00:52:00 +0000752 return;
Jordan Roseda29ac52012-08-15 21:05:15 +0000753 }
Anna Zakse90d3f82012-08-09 00:21:33 +0000754 }
Jordan Roseb763ede2012-08-15 00:52:00 +0000755
Anna Zaks5960f4a2012-08-09 18:43:00 +0000756 // We are not bifurcating and we do have a Decl, so just inline.
757 if (inlineCall(*Call, D, Bldr, Pred, State))
758 return;
Anna Zakse90d3f82012-08-09 00:21:33 +0000759 }
Anna Zakse81ce252012-07-19 23:38:13 +0000760 }
Jordan Rose69f87c92012-07-02 19:28:09 +0000761
762 // If we can't inline it, handle the return value and invalidate the regions.
Anna Zakse90d3f82012-08-09 00:21:33 +0000763 conservativeEvalCall(*Call, Bldr, Pred, State);
Jordan Rose69f87c92012-07-02 19:28:09 +0000764}
765
Anna Zakse90d3f82012-08-09 00:21:33 +0000766void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
767 const CallEvent &Call, const Decl *D,
768 NodeBuilder &Bldr, ExplodedNode *Pred) {
769 assert(BifurReg);
Jordan Roseb763ede2012-08-15 00:52:00 +0000770 BifurReg = BifurReg->StripCasts();
Anna Zakse90d3f82012-08-09 00:21:33 +0000771
772 // Check if we've performed the split already - note, we only want
773 // to split the path once per memory region.
774 ProgramStateRef State = Pred->getState();
Jordan Rose166d5022012-11-02 01:54:06 +0000775 const unsigned *BState =
Anna Zaks6960f6e2012-08-09 21:02:41 +0000776 State->get<DynamicDispatchBifurcationMap>(BifurReg);
Anna Zaks5960f4a2012-08-09 18:43:00 +0000777 if (BState) {
778 // If we are on "inline path", keep inlining if possible.
Anna Zaks6960f6e2012-08-09 21:02:41 +0000779 if (*BState == DynamicDispatchModeInlined)
Anna Zaks5960f4a2012-08-09 18:43:00 +0000780 if (inlineCall(Call, D, Bldr, Pred, State))
781 return;
782 // If inline failed, or we are on the path where we assume we
783 // don't have enough info about the receiver to inline, conjure the
784 // return value and invalidate the regions.
785 conservativeEvalCall(Call, Bldr, Pred, State);
786 return;
Anna Zakse90d3f82012-08-09 00:21:33 +0000787 }
788
789 // If we got here, this is the first time we process a message to this
790 // region, so split the path.
791 ProgramStateRef IState =
Anna Zaks6960f6e2012-08-09 21:02:41 +0000792 State->set<DynamicDispatchBifurcationMap>(BifurReg,
793 DynamicDispatchModeInlined);
Anna Zakse90d3f82012-08-09 00:21:33 +0000794 inlineCall(Call, D, Bldr, Pred, IState);
795
796 ProgramStateRef NoIState =
Anna Zaks6960f6e2012-08-09 21:02:41 +0000797 State->set<DynamicDispatchBifurcationMap>(BifurReg,
798 DynamicDispatchModeConservative);
Anna Zakse90d3f82012-08-09 00:21:33 +0000799 conservativeEvalCall(Call, Bldr, Pred, NoIState);
800
801 NumOfDynamicDispatchPathSplits++;
802 return;
803}
804
805
Ted Kremenek294fd0a2011-08-20 06:00:03 +0000806void ExprEngine::VisitReturnStmt(const ReturnStmt *RS, ExplodedNode *Pred,
807 ExplodedNodeSet &Dst) {
Ted Kremenek256ef642012-01-11 01:06:27 +0000808
809 ExplodedNodeSet dstPreVisit;
810 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
811
Ted Kremenek66c486f2012-08-22 06:26:15 +0000812 StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
Ted Kremenek256ef642012-01-11 01:06:27 +0000813
814 if (RS->getRetValue()) {
815 for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
816 ei = dstPreVisit.end(); it != ei; ++it) {
817 B.generateNode(RS, *it, (*it)->getState());
Ted Kremenek294fd0a2011-08-20 06:00:03 +0000818 }
Ted Kremenek294fd0a2011-08-20 06:00:03 +0000819 }
Ted Kremenek294fd0a2011-08-20 06:00:03 +0000820}