blob: 491b18e6fc7fc87cfa9d771225f275195eff2c2f [file] [log] [blame]
Chris Lattner530d4bf2003-05-29 15:11:31 +00001//===- InlineFunction.cpp - Code to perform function inlining -------------===//
Misha Brukmanb1c93172005-04-21 23:48:37 +00002//
John Criswell482202a2003-10-20 19:43:21 +00003// The LLVM Compiler Infrastructure
4//
Chris Lattnerf3ebc3f2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Misha Brukmanb1c93172005-04-21 23:48:37 +00007//
John Criswell482202a2003-10-20 19:43:21 +00008//===----------------------------------------------------------------------===//
Chris Lattner530d4bf2003-05-29 15:11:31 +00009//
10// This file implements inlining of a function into a call site, resolving
11// parameters and the return value as appropriate.
12//
Chris Lattner530d4bf2003-05-29 15:11:31 +000013//===----------------------------------------------------------------------===//
14
15#include "llvm/Transforms/Utils/Cloning.h"
Weiming Zhao45d4cb92015-11-24 18:57:06 +000016#include "llvm/ADT/SetVector.h"
Hal Finkel94146652014-07-24 14:25:39 +000017#include "llvm/ADT/SmallSet.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000018#include "llvm/ADT/SmallVector.h"
19#include "llvm/ADT/StringExtras.h"
Hal Finkelff0bcb62014-07-25 15:50:08 +000020#include "llvm/Analysis/AliasAnalysis.h"
Chandler Carruth66b31302015-01-04 12:03:27 +000021#include "llvm/Analysis/AssumptionCache.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000022#include "llvm/Analysis/CallGraph.h"
Hal Finkelff0bcb62014-07-25 15:50:08 +000023#include "llvm/Analysis/CaptureTracking.h"
David Majnemer8a1c45d2015-12-12 05:38:55 +000024#include "llvm/Analysis/EHPersonalities.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000025#include "llvm/Analysis/InstructionSimplify.h"
Hal Finkel94146652014-07-24 14:25:39 +000026#include "llvm/Analysis/ValueTracking.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000027#include "llvm/IR/Attributes.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000028#include "llvm/IR/CallSite.h"
Reid Klecknerf0915aa2014-05-15 20:11:28 +000029#include "llvm/IR/CFG.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000030#include "llvm/IR/Constants.h"
31#include "llvm/IR/DataLayout.h"
Chandler Carruth9a4c9e52014-03-06 00:46:21 +000032#include "llvm/IR/DebugInfo.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000033#include "llvm/IR/DerivedTypes.h"
Adrian Prantl3e2659e2015-01-30 19:37:48 +000034#include "llvm/IR/DIBuilder.h"
Hal Finkelff0bcb62014-07-25 15:50:08 +000035#include "llvm/IR/Dominators.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000036#include "llvm/IR/IRBuilder.h"
37#include "llvm/IR/Instructions.h"
38#include "llvm/IR/IntrinsicInst.h"
39#include "llvm/IR/Intrinsics.h"
Hal Finkel94146652014-07-24 14:25:39 +000040#include "llvm/IR/MDBuilder.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000041#include "llvm/IR/Module.h"
Chandler Carruthaafe0912012-06-29 12:38:19 +000042#include "llvm/Transforms/Utils/Local.h"
Hal Finkelff0bcb62014-07-25 15:50:08 +000043#include "llvm/Support/CommandLine.h"
44#include <algorithm>
Hans Wennborg083ca9b2015-10-06 23:24:35 +000045
Chris Lattnerdf3c3422004-01-09 06:12:26 +000046using namespace llvm;
Chris Lattner530d4bf2003-05-29 15:11:31 +000047
Hal Finkelff0bcb62014-07-25 15:50:08 +000048static cl::opt<bool>
James Molloy6b95d8e2014-09-04 13:23:08 +000049EnableNoAliasConversion("enable-noalias-to-md-conversion", cl::init(true),
Hal Finkelff0bcb62014-07-25 15:50:08 +000050 cl::Hidden,
51 cl::desc("Convert noalias attributes to metadata during inlining."));
52
Hal Finkel68dc3c72014-10-15 23:44:41 +000053static cl::opt<bool>
54PreserveAlignmentAssumptions("preserve-alignment-assumptions-during-inlining",
55 cl::init(true), cl::Hidden,
56 cl::desc("Convert align attributes to assumptions during inlining."));
57
Eric Christopherf16bee82012-03-26 19:09:38 +000058bool llvm::InlineFunction(CallInst *CI, InlineFunctionInfo &IFI,
Chandler Carruth7b560d42015-09-09 17:55:00 +000059 AAResults *CalleeAAR, bool InsertLifetime) {
60 return InlineFunction(CallSite(CI), IFI, CalleeAAR, InsertLifetime);
Chris Lattner0841fb12006-01-14 20:07:50 +000061}
Eric Christopherf16bee82012-03-26 19:09:38 +000062bool llvm::InlineFunction(InvokeInst *II, InlineFunctionInfo &IFI,
Chandler Carruth7b560d42015-09-09 17:55:00 +000063 AAResults *CalleeAAR, bool InsertLifetime) {
64 return InlineFunction(CallSite(II), IFI, CalleeAAR, InsertLifetime);
Chris Lattner0841fb12006-01-14 20:07:50 +000065}
Chris Lattner0cc265e2003-08-24 06:59:16 +000066
John McCallbd04b742011-05-27 18:34:38 +000067namespace {
David Majnemer654e1302015-07-31 17:58:14 +000068 /// A class for recording information about inlining a landing pad.
69 class LandingPadInliningInfo {
Dmitri Gribenkodbeafa72012-06-09 00:01:45 +000070 BasicBlock *OuterResumeDest; ///< Destination of the invoke's unwind.
71 BasicBlock *InnerResumeDest; ///< Destination for the callee's resume.
72 LandingPadInst *CallerLPad; ///< LandingPadInst associated with the invoke.
73 PHINode *InnerEHValuesPHI; ///< PHI for EH values from landingpad insts.
Bill Wendling0c2d82b2012-01-31 01:22:03 +000074 SmallVector<Value*, 8> UnwindDestPHIValues;
Bill Wendlingfa284402011-07-28 07:31:46 +000075
Bill Wendling55421f02011-08-14 08:01:36 +000076 public:
David Majnemer654e1302015-07-31 17:58:14 +000077 LandingPadInliningInfo(InvokeInst *II)
Craig Topperf40110f2014-04-25 05:29:35 +000078 : OuterResumeDest(II->getUnwindDest()), InnerResumeDest(nullptr),
79 CallerLPad(nullptr), InnerEHValuesPHI(nullptr) {
Bill Wendling55421f02011-08-14 08:01:36 +000080 // If there are PHI nodes in the unwind destination block, we need to keep
81 // track of which values came into them from the invoke before removing
82 // the edge from this block.
83 llvm::BasicBlock *InvokeBB = II->getParent();
Bill Wendlingea6e9352012-01-31 01:25:54 +000084 BasicBlock::iterator I = OuterResumeDest->begin();
Bill Wendling55421f02011-08-14 08:01:36 +000085 for (; isa<PHINode>(I); ++I) {
John McCallbd04b742011-05-27 18:34:38 +000086 // Save the value to use for this edge.
Bill Wendling55421f02011-08-14 08:01:36 +000087 PHINode *PHI = cast<PHINode>(I);
88 UnwindDestPHIValues.push_back(PHI->getIncomingValueForBlock(InvokeBB));
89 }
90
Bill Wendlingf3cae512012-01-31 00:56:53 +000091 CallerLPad = cast<LandingPadInst>(I);
John McCallbd04b742011-05-27 18:34:38 +000092 }
93
Sanjay Patel0fdb4372015-03-10 19:42:57 +000094 /// The outer unwind destination is the target of
Bill Wendlingea6e9352012-01-31 01:25:54 +000095 /// unwind edges introduced for calls within the inlined function.
Bill Wendling0c2d82b2012-01-31 01:22:03 +000096 BasicBlock *getOuterResumeDest() const {
Bill Wendlingea6e9352012-01-31 01:25:54 +000097 return OuterResumeDest;
John McCallbd04b742011-05-27 18:34:38 +000098 }
99
Bill Wendling3fd879d2012-01-31 01:48:40 +0000100 BasicBlock *getInnerResumeDest();
Bill Wendling55421f02011-08-14 08:01:36 +0000101
102 LandingPadInst *getLandingPadInst() const { return CallerLPad; }
103
Sanjay Patel0fdb4372015-03-10 19:42:57 +0000104 /// Forward the 'resume' instruction to the caller's landing pad block.
105 /// When the landing pad block has only one predecessor, this is
Bill Wendling55421f02011-08-14 08:01:36 +0000106 /// a simple branch. When there is more than one predecessor, we need to
107 /// split the landing pad block after the landingpad instruction and jump
108 /// to there.
Bill Wendling56f15bf2013-03-22 20:31:05 +0000109 void forwardResume(ResumeInst *RI,
Craig Topper71b7b682014-08-21 05:55:13 +0000110 SmallPtrSetImpl<LandingPadInst*> &InlinedLPads);
Bill Wendling55421f02011-08-14 08:01:36 +0000111
Sanjay Patel0fdb4372015-03-10 19:42:57 +0000112 /// Add incoming-PHI values to the unwind destination block for the given
113 /// basic block, using the values for the original invoke's source block.
John McCallbd04b742011-05-27 18:34:38 +0000114 void addIncomingPHIValuesFor(BasicBlock *BB) const {
Bill Wendlingea6e9352012-01-31 01:25:54 +0000115 addIncomingPHIValuesForInto(BB, OuterResumeDest);
John McCall046c47e2011-05-28 07:45:59 +0000116 }
Bill Wendlingad088e62011-07-30 05:42:50 +0000117
John McCall046c47e2011-05-28 07:45:59 +0000118 void addIncomingPHIValuesForInto(BasicBlock *src, BasicBlock *dest) const {
119 BasicBlock::iterator I = dest->begin();
John McCallbd04b742011-05-27 18:34:38 +0000120 for (unsigned i = 0, e = UnwindDestPHIValues.size(); i != e; ++i, ++I) {
Bill Wendlingad088e62011-07-30 05:42:50 +0000121 PHINode *phi = cast<PHINode>(I);
122 phi->addIncoming(UnwindDestPHIValues[i], src);
John McCallbd04b742011-05-27 18:34:38 +0000123 }
124 }
125 };
Hans Wennborg083ca9b2015-10-06 23:24:35 +0000126} // anonymous namespace
John McCallbd04b742011-05-27 18:34:38 +0000127
Sanjay Patel0fdb4372015-03-10 19:42:57 +0000128/// Get or create a target for the branch from ResumeInsts.
David Majnemer654e1302015-07-31 17:58:14 +0000129BasicBlock *LandingPadInliningInfo::getInnerResumeDest() {
Bill Wendling55421f02011-08-14 08:01:36 +0000130 if (InnerResumeDest) return InnerResumeDest;
131
132 // Split the landing pad.
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +0000133 BasicBlock::iterator SplitPoint = ++CallerLPad->getIterator();
Bill Wendling55421f02011-08-14 08:01:36 +0000134 InnerResumeDest =
135 OuterResumeDest->splitBasicBlock(SplitPoint,
136 OuterResumeDest->getName() + ".body");
137
138 // The number of incoming edges we expect to the inner landing pad.
139 const unsigned PHICapacity = 2;
140
141 // Create corresponding new PHIs for all the PHIs in the outer landing pad.
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +0000142 Instruction *InsertPoint = &InnerResumeDest->front();
Bill Wendling55421f02011-08-14 08:01:36 +0000143 BasicBlock::iterator I = OuterResumeDest->begin();
144 for (unsigned i = 0, e = UnwindDestPHIValues.size(); i != e; ++i, ++I) {
145 PHINode *OuterPHI = cast<PHINode>(I);
146 PHINode *InnerPHI = PHINode::Create(OuterPHI->getType(), PHICapacity,
147 OuterPHI->getName() + ".lpad-body",
148 InsertPoint);
149 OuterPHI->replaceAllUsesWith(InnerPHI);
150 InnerPHI->addIncoming(OuterPHI, OuterResumeDest);
151 }
152
153 // Create a PHI for the exception values.
154 InnerEHValuesPHI = PHINode::Create(CallerLPad->getType(), PHICapacity,
155 "eh.lpad-body", InsertPoint);
156 CallerLPad->replaceAllUsesWith(InnerEHValuesPHI);
157 InnerEHValuesPHI->addIncoming(CallerLPad, OuterResumeDest);
158
159 // All done.
160 return InnerResumeDest;
161}
162
Sanjay Patel0fdb4372015-03-10 19:42:57 +0000163/// Forward the 'resume' instruction to the caller's landing pad block.
164/// When the landing pad block has only one predecessor, this is a simple
Bill Wendling55421f02011-08-14 08:01:36 +0000165/// branch. When there is more than one predecessor, we need to split the
166/// landing pad block after the landingpad instruction and jump to there.
David Majnemer654e1302015-07-31 17:58:14 +0000167void LandingPadInliningInfo::forwardResume(
168 ResumeInst *RI, SmallPtrSetImpl<LandingPadInst *> &InlinedLPads) {
Bill Wendling3fd879d2012-01-31 01:48:40 +0000169 BasicBlock *Dest = getInnerResumeDest();
Bill Wendling55421f02011-08-14 08:01:36 +0000170 BasicBlock *Src = RI->getParent();
171
172 BranchInst::Create(Dest, Src);
173
174 // Update the PHIs in the destination. They were inserted in an order which
175 // makes this work.
176 addIncomingPHIValuesForInto(Src, Dest);
177
178 InnerEHValuesPHI->addIncoming(RI->getOperand(0), Src);
179 RI->eraseFromParent();
180}
181
Joseph Tremouletb41632b2016-01-20 02:15:15 +0000182/// Helper for getUnwindDestToken/getUnwindDestTokenHelper.
183static Value *getParentPad(Value *EHPad) {
184 if (auto *FPI = dyn_cast<FuncletPadInst>(EHPad))
185 return FPI->getParentPad();
186 return cast<CatchSwitchInst>(EHPad)->getParentPad();
187}
188
189typedef DenseMap<Instruction *, Value *> UnwindDestMemoTy;
190
191/// Helper for getUnwindDestToken that does the descendant-ward part of
192/// the search.
193static Value *getUnwindDestTokenHelper(Instruction *EHPad,
194 UnwindDestMemoTy &MemoMap) {
195 SmallVector<Instruction *, 8> Worklist(1, EHPad);
196
197 while (!Worklist.empty()) {
198 Instruction *CurrentPad = Worklist.pop_back_val();
199 // We only put pads on the worklist that aren't in the MemoMap. When
200 // we find an unwind dest for a pad we may update its ancestors, but
201 // the queue only ever contains uncles/great-uncles/etc. of CurrentPad,
202 // so they should never get updated while queued on the worklist.
203 assert(!MemoMap.count(CurrentPad));
204 Value *UnwindDestToken = nullptr;
205 if (auto *CatchSwitch = dyn_cast<CatchSwitchInst>(CurrentPad)) {
206 if (CatchSwitch->hasUnwindDest()) {
207 UnwindDestToken = CatchSwitch->getUnwindDest()->getFirstNonPHI();
208 } else {
209 // Catchswitch doesn't have a 'nounwind' variant, and one might be
210 // annotated as "unwinds to caller" when really it's nounwind (see
211 // e.g. SimplifyCFGOpt::SimplifyUnreachable), so we can't infer the
212 // parent's unwind dest from this. We can check its catchpads'
213 // descendants, since they might include a cleanuppad with an
214 // "unwinds to caller" cleanupret, which can be trusted.
215 for (auto HI = CatchSwitch->handler_begin(),
216 HE = CatchSwitch->handler_end();
217 HI != HE && !UnwindDestToken; ++HI) {
218 BasicBlock *HandlerBlock = *HI;
219 auto *CatchPad = cast<CatchPadInst>(HandlerBlock->getFirstNonPHI());
220 for (User *Child : CatchPad->users()) {
221 // Intentionally ignore invokes here -- since the catchswitch is
222 // marked "unwind to caller", it would be a verifier error if it
223 // contained an invoke which unwinds out of it, so any invoke we'd
224 // encounter must unwind to some child of the catch.
225 if (!isa<CleanupPadInst>(Child) && !isa<CatchSwitchInst>(Child))
226 continue;
227
228 Instruction *ChildPad = cast<Instruction>(Child);
229 auto Memo = MemoMap.find(ChildPad);
230 if (Memo == MemoMap.end()) {
231 // Haven't figure out this child pad yet; queue it.
232 Worklist.push_back(ChildPad);
233 continue;
234 }
235 // We've already checked this child, but might have found that
236 // it offers no proof either way.
237 Value *ChildUnwindDestToken = Memo->second;
238 if (!ChildUnwindDestToken)
239 continue;
240 // We already know the child's unwind dest, which can either
241 // be ConstantTokenNone to indicate unwind to caller, or can
242 // be another child of the catchpad. Only the former indicates
243 // the unwind dest of the catchswitch.
244 if (isa<ConstantTokenNone>(ChildUnwindDestToken)) {
245 UnwindDestToken = ChildUnwindDestToken;
246 break;
247 }
248 assert(getParentPad(ChildUnwindDestToken) == CatchPad);
249 }
250 }
251 }
252 } else {
253 auto *CleanupPad = cast<CleanupPadInst>(CurrentPad);
254 for (User *U : CleanupPad->users()) {
255 if (auto *CleanupRet = dyn_cast<CleanupReturnInst>(U)) {
256 if (BasicBlock *RetUnwindDest = CleanupRet->getUnwindDest())
257 UnwindDestToken = RetUnwindDest->getFirstNonPHI();
258 else
259 UnwindDestToken = ConstantTokenNone::get(CleanupPad->getContext());
260 break;
261 }
262 Value *ChildUnwindDestToken;
263 if (auto *Invoke = dyn_cast<InvokeInst>(U)) {
264 ChildUnwindDestToken = Invoke->getUnwindDest()->getFirstNonPHI();
265 } else if (isa<CleanupPadInst>(U) || isa<CatchSwitchInst>(U)) {
266 Instruction *ChildPad = cast<Instruction>(U);
267 auto Memo = MemoMap.find(ChildPad);
268 if (Memo == MemoMap.end()) {
269 // Haven't resolved this child yet; queue it and keep searching.
270 Worklist.push_back(ChildPad);
271 continue;
272 }
273 // We've checked this child, but still need to ignore it if it
274 // had no proof either way.
275 ChildUnwindDestToken = Memo->second;
276 if (!ChildUnwindDestToken)
277 continue;
278 } else {
279 // Not a relevant user of the cleanuppad
280 continue;
281 }
282 // In a well-formed program, the child/invoke must either unwind to
283 // an(other) child of the cleanup, or exit the cleanup. In the
284 // first case, continue searching.
285 if (isa<Instruction>(ChildUnwindDestToken) &&
286 getParentPad(ChildUnwindDestToken) == CleanupPad)
287 continue;
288 UnwindDestToken = ChildUnwindDestToken;
289 break;
290 }
291 }
292 // If we haven't found an unwind dest for CurrentPad, we may have queued its
293 // children, so move on to the next in the worklist.
294 if (!UnwindDestToken)
295 continue;
296
297 // Now we know that CurrentPad unwinds to UnwindDestToken. It also exits
298 // any ancestors of CurrentPad up to but not including UnwindDestToken's
299 // parent pad. Record this in the memo map, and check to see if the
300 // original EHPad being queried is one of the ones exited.
301 Value *UnwindParent;
302 if (auto *UnwindPad = dyn_cast<Instruction>(UnwindDestToken))
303 UnwindParent = getParentPad(UnwindPad);
304 else
305 UnwindParent = nullptr;
306 bool ExitedOriginalPad = false;
307 for (Instruction *ExitedPad = CurrentPad;
308 ExitedPad && ExitedPad != UnwindParent;
309 ExitedPad = dyn_cast<Instruction>(getParentPad(ExitedPad))) {
310 // Skip over catchpads since they just follow their catchswitches.
311 if (isa<CatchPadInst>(ExitedPad))
312 continue;
313 MemoMap[ExitedPad] = UnwindDestToken;
314 ExitedOriginalPad |= (ExitedPad == EHPad);
315 }
316
317 if (ExitedOriginalPad)
318 return UnwindDestToken;
319
320 // Continue the search.
321 }
322
323 // No definitive information is contained within this funclet.
324 return nullptr;
325}
326
327/// Given an EH pad, find where it unwinds. If it unwinds to an EH pad,
328/// return that pad instruction. If it unwinds to caller, return
329/// ConstantTokenNone. If it does not have a definitive unwind destination,
330/// return nullptr.
331///
332/// This routine gets invoked for calls in funclets in inlinees when inlining
333/// an invoke. Since many funclets don't have calls inside them, it's queried
334/// on-demand rather than building a map of pads to unwind dests up front.
335/// Determining a funclet's unwind dest may require recursively searching its
336/// descendants, and also ancestors and cousins if the descendants don't provide
337/// an answer. Since most funclets will have their unwind dest immediately
338/// available as the unwind dest of a catchswitch or cleanupret, this routine
339/// searches top-down from the given pad and then up. To avoid worst-case
340/// quadratic run-time given that approach, it uses a memo map to avoid
341/// re-processing funclet trees. The callers that rewrite the IR as they go
342/// take advantage of this, for correctness, by checking/forcing rewritten
343/// pads' entries to match the original callee view.
344static Value *getUnwindDestToken(Instruction *EHPad,
345 UnwindDestMemoTy &MemoMap) {
346 // Catchpads unwind to the same place as their catchswitch;
347 // redirct any queries on catchpads so the code below can
348 // deal with just catchswitches and cleanuppads.
349 if (auto *CPI = dyn_cast<CatchPadInst>(EHPad))
350 EHPad = CPI->getCatchSwitch();
351
352 // Check if we've already determined the unwind dest for this pad.
353 auto Memo = MemoMap.find(EHPad);
354 if (Memo != MemoMap.end())
355 return Memo->second;
356
357 // Search EHPad and, if necessary, its descendants.
358 Value *UnwindDestToken = getUnwindDestTokenHelper(EHPad, MemoMap);
359 assert((UnwindDestToken == nullptr) != (MemoMap.count(EHPad) != 0));
360 if (UnwindDestToken)
361 return UnwindDestToken;
362
363 // No information is available for this EHPad from itself or any of its
364 // descendants. An unwind all the way out to a pad in the caller would
365 // need also to agree with the unwind dest of the parent funclet, so
366 // search up the chain to try to find a funclet with information. Put
367 // null entries in the memo map to avoid re-processing as we go up.
368 MemoMap[EHPad] = nullptr;
369 Instruction *LastUselessPad = EHPad;
370 Value *AncestorToken;
371 for (AncestorToken = getParentPad(EHPad);
372 auto *AncestorPad = dyn_cast<Instruction>(AncestorToken);
373 AncestorToken = getParentPad(AncestorToken)) {
374 // Skip over catchpads since they just follow their catchswitches.
375 if (isa<CatchPadInst>(AncestorPad))
376 continue;
377 assert(!MemoMap.count(AncestorPad) || MemoMap[AncestorPad]);
378 auto AncestorMemo = MemoMap.find(AncestorPad);
379 if (AncestorMemo == MemoMap.end()) {
380 UnwindDestToken = getUnwindDestTokenHelper(AncestorPad, MemoMap);
381 } else {
382 UnwindDestToken = AncestorMemo->second;
383 }
384 if (UnwindDestToken)
385 break;
386 LastUselessPad = AncestorPad;
387 }
388
389 // Since the whole tree under LastUselessPad has no information, it all must
390 // match UnwindDestToken; record that to avoid repeating the search.
391 SmallVector<Instruction *, 8> Worklist(1, LastUselessPad);
392 while (!Worklist.empty()) {
393 Instruction *UselessPad = Worklist.pop_back_val();
394 assert(!MemoMap.count(UselessPad) || MemoMap[UselessPad] == nullptr);
395 MemoMap[UselessPad] = UnwindDestToken;
396 if (auto *CatchSwitch = dyn_cast<CatchSwitchInst>(UselessPad)) {
397 for (BasicBlock *HandlerBlock : CatchSwitch->handlers())
398 for (User *U : HandlerBlock->getFirstNonPHI()->users())
399 if (isa<CatchSwitchInst>(U) || isa<CleanupPadInst>(U))
400 Worklist.push_back(cast<Instruction>(U));
401 } else {
402 assert(isa<CleanupPadInst>(UselessPad));
403 for (User *U : UselessPad->users())
404 if (isa<CatchSwitchInst>(U) || isa<CleanupPadInst>(U))
405 Worklist.push_back(cast<Instruction>(U));
406 }
407 }
408
409 return UnwindDestToken;
410}
411
Sanjay Patel0fdb4372015-03-10 19:42:57 +0000412/// When we inline a basic block into an invoke,
413/// we have to turn all of the calls that can throw into invokes.
414/// This function analyze BB to see if there are any calls, and if so,
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000415/// it rewrites them to be invokes that jump to InvokeDest and fills in the PHI
Chris Lattner8900f3e2009-09-01 18:44:06 +0000416/// nodes in that block with the values specified in InvokeDestPHIValues.
Joseph Tremouletb41632b2016-01-20 02:15:15 +0000417static BasicBlock *HandleCallsInBlockInlinedThroughInvoke(
418 BasicBlock *BB, BasicBlock *UnwindEdge,
419 UnwindDestMemoTy *FuncletUnwindMap = nullptr) {
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000420 for (BasicBlock::iterator BBI = BB->begin(), E = BB->end(); BBI != E; ) {
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +0000421 Instruction *I = &*BBI++;
Bill Wendling55421f02011-08-14 08:01:36 +0000422
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000423 // We only need to check for function calls: inlined invoke
424 // instructions require no special handling.
425 CallInst *CI = dyn_cast<CallInst>(I);
John McCallbd04b742011-05-27 18:34:38 +0000426
Manman Ren87a2adc2013-10-31 21:56:03 +0000427 if (!CI || CI->doesNotThrow() || isa<InlineAsm>(CI->getCalledValue()))
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000428 continue;
Bill Wendling518a2052012-01-31 01:05:20 +0000429
Joseph Tremouletb41632b2016-01-20 02:15:15 +0000430 if (auto FuncletBundle = CI->getOperandBundle(LLVMContext::OB_funclet)) {
431 // This call is nested inside a funclet. If that funclet has an unwind
432 // destination within the inlinee, then unwinding out of this call would
433 // be UB. Rewriting this call to an invoke which targets the inlined
434 // invoke's unwind dest would give the call's parent funclet multiple
435 // unwind destinations, which is something that subsequent EH table
436 // generation can't handle and that the veirifer rejects. So when we
437 // see such a call, leave it as a call.
438 auto *FuncletPad = cast<Instruction>(FuncletBundle->Inputs[0]);
439 Value *UnwindDestToken =
440 getUnwindDestToken(FuncletPad, *FuncletUnwindMap);
441 if (UnwindDestToken && !isa<ConstantTokenNone>(UnwindDestToken))
442 continue;
443#ifndef NDEBUG
444 Instruction *MemoKey;
445 if (auto *CatchPad = dyn_cast<CatchPadInst>(FuncletPad))
446 MemoKey = CatchPad->getCatchSwitch();
447 else
448 MemoKey = FuncletPad;
449 assert(FuncletUnwindMap->count(MemoKey) &&
450 (*FuncletUnwindMap)[MemoKey] == UnwindDestToken &&
451 "must get memoized to avoid confusing later searches");
452#endif // NDEBUG
453 }
454
Bill Wendling518a2052012-01-31 01:05:20 +0000455 // Convert this function call into an invoke instruction. First, split the
456 // basic block.
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +0000457 BasicBlock *Split =
458 BB->splitBasicBlock(CI->getIterator(), CI->getName() + ".noexc");
John McCallbd04b742011-05-27 18:34:38 +0000459
John McCall046c47e2011-05-28 07:45:59 +0000460 // Delete the unconditional branch inserted by splitBasicBlock
461 BB->getInstList().pop_back();
John McCallbd04b742011-05-27 18:34:38 +0000462
Bill Wendling621699d2012-01-31 01:14:49 +0000463 // Create the new invoke instruction.
Sanjoy Dasccd14562015-12-10 06:39:02 +0000464 SmallVector<Value*, 8> InvokeArgs(CI->arg_begin(), CI->arg_end());
Sanjoy Das2d161452015-11-18 06:23:38 +0000465 SmallVector<OperandBundleDef, 1> OpBundles;
466
Sanjoy Dasccd14562015-12-10 06:39:02 +0000467 CI->getOperandBundlesAsDefs(OpBundles);
Sanjoy Das2d161452015-11-18 06:23:38 +0000468
469 // Note: we're round tripping operand bundles through memory here, and that
470 // can potentially be avoided with a cleverer API design that we do not have
471 // as of this time.
472
473 InvokeInst *II =
474 InvokeInst::Create(CI->getCalledValue(), Split, UnwindEdge, InvokeArgs,
475 OpBundles, CI->getName(), BB);
David Blaikie644d2ee2014-06-30 20:30:39 +0000476 II->setDebugLoc(CI->getDebugLoc());
John McCall046c47e2011-05-28 07:45:59 +0000477 II->setCallingConv(CI->getCallingConv());
478 II->setAttributes(CI->getAttributes());
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000479
John McCall046c47e2011-05-28 07:45:59 +0000480 // Make sure that anything using the call now uses the invoke! This also
481 // updates the CallGraph if present, because it uses a WeakVH.
482 CI->replaceAllUsesWith(II);
John McCallbd04b742011-05-27 18:34:38 +0000483
Bill Wendlingce0c2292012-01-31 01:01:16 +0000484 // Delete the original call
485 Split->getInstList().pop_front();
David Majnemer654e1302015-07-31 17:58:14 +0000486 return BB;
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000487 }
David Majnemer654e1302015-07-31 17:58:14 +0000488 return nullptr;
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000489}
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000490
Sanjay Patel0fdb4372015-03-10 19:42:57 +0000491/// If we inlined an invoke site, we need to convert calls
Bill Wendling0aef16a2012-02-06 21:44:22 +0000492/// in the body of the inlined function into invokes.
Chris Lattner908d7952006-01-13 19:05:59 +0000493///
Nick Lewycky12a130b2009-02-03 04:34:40 +0000494/// II is the invoke instruction being inlined. FirstNewBlock is the first
Chris Lattner908d7952006-01-13 19:05:59 +0000495/// block of the inlined code (the last block is the end of the function),
496/// and InlineCodeInfo is information about the code that got inlined.
David Majnemer654e1302015-07-31 17:58:14 +0000497static void HandleInlinedLandingPad(InvokeInst *II, BasicBlock *FirstNewBlock,
498 ClonedCodeInfo &InlinedCodeInfo) {
Chris Lattner908d7952006-01-13 19:05:59 +0000499 BasicBlock *InvokeDest = II->getUnwindDest();
Chris Lattner908d7952006-01-13 19:05:59 +0000500
501 Function *Caller = FirstNewBlock->getParent();
Duncan Sands7c8fb1a2008-09-05 12:37:12 +0000502
Chris Lattner908d7952006-01-13 19:05:59 +0000503 // The inlined code is currently at the end of the function, scan from the
504 // start of the inlined code to its end, checking for stuff we need to
Bill Wendling173c71f2013-03-21 23:30:12 +0000505 // rewrite.
David Majnemer654e1302015-07-31 17:58:14 +0000506 LandingPadInliningInfo Invoke(II);
Bill Wendling173c71f2013-03-21 23:30:12 +0000507
Bill Wendling56f15bf2013-03-22 20:31:05 +0000508 // Get all of the inlined landing pad instructions.
509 SmallPtrSet<LandingPadInst*, 16> InlinedLPads;
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +0000510 for (Function::iterator I = FirstNewBlock->getIterator(), E = Caller->end();
511 I != E; ++I)
Bill Wendling56f15bf2013-03-22 20:31:05 +0000512 if (InvokeInst *II = dyn_cast<InvokeInst>(I->getTerminator()))
513 InlinedLPads.insert(II->getLandingPadInst());
514
Mark Seabornef3dbb92013-12-08 00:50:58 +0000515 // Append the clauses from the outer landing pad instruction into the inlined
516 // landing pad instructions.
517 LandingPadInst *OuterLPad = Invoke.getLandingPadInst();
Craig Topper46276792014-08-24 23:23:06 +0000518 for (LandingPadInst *InlinedLPad : InlinedLPads) {
Mark Seabornef3dbb92013-12-08 00:50:58 +0000519 unsigned OuterNum = OuterLPad->getNumClauses();
520 InlinedLPad->reserveClauses(OuterNum);
521 for (unsigned OuterIdx = 0; OuterIdx != OuterNum; ++OuterIdx)
522 InlinedLPad->addClause(OuterLPad->getClause(OuterIdx));
Mark Seaborn1b3dd352013-12-08 00:51:21 +0000523 if (OuterLPad->isCleanup())
524 InlinedLPad->setCleanup(true);
Mark Seabornef3dbb92013-12-08 00:50:58 +0000525 }
526
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +0000527 for (Function::iterator BB = FirstNewBlock->getIterator(), E = Caller->end();
528 BB != E; ++BB) {
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000529 if (InlinedCodeInfo.ContainsCalls)
David Majnemer654e1302015-07-31 17:58:14 +0000530 if (BasicBlock *NewBB = HandleCallsInBlockInlinedThroughInvoke(
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +0000531 &*BB, Invoke.getOuterResumeDest()))
David Majnemer654e1302015-07-31 17:58:14 +0000532 // Update any PHI nodes in the exceptional block to indicate that there
533 // is now a new entry in them.
534 Invoke.addIncomingPHIValuesFor(NewBB);
Duncan Sands7c8fb1a2008-09-05 12:37:12 +0000535
Bill Wendling173c71f2013-03-21 23:30:12 +0000536 // Forward any resumes that are remaining here.
Bill Wendling621699d2012-01-31 01:14:49 +0000537 if (ResumeInst *RI = dyn_cast<ResumeInst>(BB->getTerminator()))
Bill Wendling56f15bf2013-03-22 20:31:05 +0000538 Invoke.forwardResume(RI, InlinedLPads);
Chris Lattner908d7952006-01-13 19:05:59 +0000539 }
540
541 // Now that everything is happy, we have one final detail. The PHI nodes in
542 // the exception destination block still have entries due to the original
Bill Wendling173c71f2013-03-21 23:30:12 +0000543 // invoke instruction. Eliminate these entries (which might even delete the
Chris Lattner908d7952006-01-13 19:05:59 +0000544 // PHI node) now.
545 InvokeDest->removePredecessor(II->getParent());
546}
547
David Majnemer654e1302015-07-31 17:58:14 +0000548/// If we inlined an invoke site, we need to convert calls
549/// in the body of the inlined function into invokes.
550///
551/// II is the invoke instruction being inlined. FirstNewBlock is the first
552/// block of the inlined code (the last block is the end of the function),
553/// and InlineCodeInfo is information about the code that got inlined.
554static void HandleInlinedEHPad(InvokeInst *II, BasicBlock *FirstNewBlock,
555 ClonedCodeInfo &InlinedCodeInfo) {
556 BasicBlock *UnwindDest = II->getUnwindDest();
557 Function *Caller = FirstNewBlock->getParent();
558
559 assert(UnwindDest->getFirstNonPHI()->isEHPad() && "unexpected BasicBlock!");
560
561 // If there are PHI nodes in the unwind destination block, we need to keep
562 // track of which values came into them from the invoke before removing the
563 // edge from this block.
564 SmallVector<Value *, 8> UnwindDestPHIValues;
565 llvm::BasicBlock *InvokeBB = II->getParent();
566 for (Instruction &I : *UnwindDest) {
567 // Save the value to use for this edge.
568 PHINode *PHI = dyn_cast<PHINode>(&I);
569 if (!PHI)
570 break;
571 UnwindDestPHIValues.push_back(PHI->getIncomingValueForBlock(InvokeBB));
572 }
573
574 // Add incoming-PHI values to the unwind destination block for the given basic
575 // block, using the values for the original invoke's source block.
576 auto UpdatePHINodes = [&](BasicBlock *Src) {
577 BasicBlock::iterator I = UnwindDest->begin();
578 for (Value *V : UnwindDestPHIValues) {
579 PHINode *PHI = cast<PHINode>(I);
580 PHI->addIncoming(V, Src);
581 ++I;
582 }
583 };
584
David Majnemer8a1c45d2015-12-12 05:38:55 +0000585 // This connects all the instructions which 'unwind to caller' to the invoke
586 // destination.
Joseph Tremouletb41632b2016-01-20 02:15:15 +0000587 UnwindDestMemoTy FuncletUnwindMap;
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +0000588 for (Function::iterator BB = FirstNewBlock->getIterator(), E = Caller->end();
589 BB != E; ++BB) {
David Majnemer654e1302015-07-31 17:58:14 +0000590 if (auto *CRI = dyn_cast<CleanupReturnInst>(BB->getTerminator())) {
591 if (CRI->unwindsToCaller()) {
Joseph Tremouletb41632b2016-01-20 02:15:15 +0000592 auto *CleanupPad = CRI->getCleanupPad();
593 CleanupReturnInst::Create(CleanupPad, UnwindDest, CRI);
David Majnemer654e1302015-07-31 17:58:14 +0000594 CRI->eraseFromParent();
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +0000595 UpdatePHINodes(&*BB);
Joseph Tremouletb41632b2016-01-20 02:15:15 +0000596 // Finding a cleanupret with an unwind destination would confuse
597 // subsequent calls to getUnwindDestToken, so map the cleanuppad
598 // to short-circuit any such calls and recognize this as an "unwind
599 // to caller" cleanup.
600 assert(!FuncletUnwindMap.count(CleanupPad) ||
601 isa<ConstantTokenNone>(FuncletUnwindMap[CleanupPad]));
602 FuncletUnwindMap[CleanupPad] =
603 ConstantTokenNone::get(Caller->getContext());
David Majnemer654e1302015-07-31 17:58:14 +0000604 }
605 }
David Majnemer8a1c45d2015-12-12 05:38:55 +0000606
607 Instruction *I = BB->getFirstNonPHI();
608 if (!I->isEHPad())
609 continue;
610
611 Instruction *Replacement = nullptr;
David Majnemerbbfc7212015-12-14 18:34:23 +0000612 if (auto *CatchSwitch = dyn_cast<CatchSwitchInst>(I)) {
David Majnemer8a1c45d2015-12-12 05:38:55 +0000613 if (CatchSwitch->unwindsToCaller()) {
Joseph Tremouletb41632b2016-01-20 02:15:15 +0000614 Value *UnwindDestToken;
615 if (auto *ParentPad =
616 dyn_cast<Instruction>(CatchSwitch->getParentPad())) {
617 // This catchswitch is nested inside another funclet. If that
618 // funclet has an unwind destination within the inlinee, then
619 // unwinding out of this catchswitch would be UB. Rewriting this
620 // catchswitch to unwind to the inlined invoke's unwind dest would
621 // give the parent funclet multiple unwind destinations, which is
622 // something that subsequent EH table generation can't handle and
623 // that the veirifer rejects. So when we see such a call, leave it
624 // as "unwind to caller".
625 UnwindDestToken = getUnwindDestToken(ParentPad, FuncletUnwindMap);
626 if (UnwindDestToken && !isa<ConstantTokenNone>(UnwindDestToken))
627 continue;
628 } else {
629 // This catchswitch has no parent to inherit constraints from, and
630 // none of its descendants can have an unwind edge that exits it and
631 // targets another funclet in the inlinee. It may or may not have a
632 // descendant that definitively has an unwind to caller. In either
633 // case, we'll have to assume that any unwinds out of it may need to
634 // be routed to the caller, so treat it as though it has a definitive
635 // unwind to caller.
636 UnwindDestToken = ConstantTokenNone::get(Caller->getContext());
637 }
David Majnemer8a1c45d2015-12-12 05:38:55 +0000638 auto *NewCatchSwitch = CatchSwitchInst::Create(
639 CatchSwitch->getParentPad(), UnwindDest,
640 CatchSwitch->getNumHandlers(), CatchSwitch->getName(),
641 CatchSwitch);
642 for (BasicBlock *PadBB : CatchSwitch->handlers())
643 NewCatchSwitch->addHandler(PadBB);
Joseph Tremouletb41632b2016-01-20 02:15:15 +0000644 // Propagate info for the old catchswitch over to the new one in
645 // the unwind map. This also serves to short-circuit any subsequent
646 // checks for the unwind dest of this catchswitch, which would get
647 // confused if they found the outer handler in the callee.
648 FuncletUnwindMap[NewCatchSwitch] = UnwindDestToken;
David Majnemer8a1c45d2015-12-12 05:38:55 +0000649 Replacement = NewCatchSwitch;
650 }
651 } else if (!isa<FuncletPadInst>(I)) {
652 llvm_unreachable("unexpected EHPad!");
653 }
654
655 if (Replacement) {
656 Replacement->takeName(I);
657 I->replaceAllUsesWith(Replacement);
658 I->eraseFromParent();
659 UpdatePHINodes(&*BB);
660 }
David Majnemer654e1302015-07-31 17:58:14 +0000661 }
662
663 if (InlinedCodeInfo.ContainsCalls)
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +0000664 for (Function::iterator BB = FirstNewBlock->getIterator(),
665 E = Caller->end();
666 BB != E; ++BB)
Joseph Tremouletb41632b2016-01-20 02:15:15 +0000667 if (BasicBlock *NewBB = HandleCallsInBlockInlinedThroughInvoke(
668 &*BB, UnwindDest, &FuncletUnwindMap))
David Majnemer654e1302015-07-31 17:58:14 +0000669 // Update any PHI nodes in the exceptional block to indicate that there
670 // is now a new entry in them.
671 UpdatePHINodes(NewBB);
672
673 // Now that everything is happy, we have one final detail. The PHI nodes in
674 // the exception destination block still have entries due to the original
675 // invoke instruction. Eliminate these entries (which might even delete the
676 // PHI node) now.
677 UnwindDest->removePredecessor(InvokeBB);
678}
679
Sanjay Patel0fdb4372015-03-10 19:42:57 +0000680/// When inlining a function that contains noalias scope metadata,
681/// this metadata needs to be cloned so that the inlined blocks
Hal Finkel94146652014-07-24 14:25:39 +0000682/// have different "unqiue scopes" at every call site. Were this not done, then
683/// aliasing scopes from a function inlined into a caller multiple times could
684/// not be differentiated (and this would lead to miscompiles because the
685/// non-aliasing property communicated by the metadata could have
686/// call-site-specific control dependencies).
687static void CloneAliasScopeMetadata(CallSite CS, ValueToValueMapTy &VMap) {
688 const Function *CalledFunc = CS.getCalledFunction();
689 SetVector<const MDNode *> MD;
690
691 // Note: We could only clone the metadata if it is already used in the
692 // caller. I'm omitting that check here because it might confuse
693 // inter-procedural alias analysis passes. We can revisit this if it becomes
694 // an efficiency or overhead problem.
695
696 for (Function::const_iterator I = CalledFunc->begin(), IE = CalledFunc->end();
697 I != IE; ++I)
698 for (BasicBlock::const_iterator J = I->begin(), JE = I->end(); J != JE; ++J) {
Duncan P. N. Exon Smithde36e802014-11-11 21:30:22 +0000699 if (const MDNode *M = J->getMetadata(LLVMContext::MD_alias_scope))
Hal Finkel94146652014-07-24 14:25:39 +0000700 MD.insert(M);
Duncan P. N. Exon Smithde36e802014-11-11 21:30:22 +0000701 if (const MDNode *M = J->getMetadata(LLVMContext::MD_noalias))
Hal Finkel94146652014-07-24 14:25:39 +0000702 MD.insert(M);
703 }
704
705 if (MD.empty())
706 return;
707
708 // Walk the existing metadata, adding the complete (perhaps cyclic) chain to
709 // the set.
Duncan P. N. Exon Smith5bf8fef2014-12-09 18:38:53 +0000710 SmallVector<const Metadata *, 16> Queue(MD.begin(), MD.end());
Hal Finkel94146652014-07-24 14:25:39 +0000711 while (!Queue.empty()) {
712 const MDNode *M = cast<MDNode>(Queue.pop_back_val());
713 for (unsigned i = 0, ie = M->getNumOperands(); i != ie; ++i)
714 if (const MDNode *M1 = dyn_cast<MDNode>(M->getOperand(i)))
715 if (MD.insert(M1))
716 Queue.push_back(M1);
717 }
718
719 // Now we have a complete set of all metadata in the chains used to specify
720 // the noalias scopes and the lists of those scopes.
Duncan P. N. Exon Smith7d823132015-01-19 21:30:18 +0000721 SmallVector<TempMDTuple, 16> DummyNodes;
Duncan P. N. Exon Smith5bf8fef2014-12-09 18:38:53 +0000722 DenseMap<const MDNode *, TrackingMDNodeRef> MDMap;
Hal Finkel94146652014-07-24 14:25:39 +0000723 for (SetVector<const MDNode *>::iterator I = MD.begin(), IE = MD.end();
724 I != IE; ++I) {
Duncan P. N. Exon Smith7d823132015-01-19 21:30:18 +0000725 DummyNodes.push_back(MDTuple::getTemporary(CalledFunc->getContext(), None));
726 MDMap[*I].reset(DummyNodes.back().get());
Hal Finkel94146652014-07-24 14:25:39 +0000727 }
728
729 // Create new metadata nodes to replace the dummy nodes, replacing old
730 // metadata references with either a dummy node or an already-created new
731 // node.
732 for (SetVector<const MDNode *>::iterator I = MD.begin(), IE = MD.end();
733 I != IE; ++I) {
Duncan P. N. Exon Smith5bf8fef2014-12-09 18:38:53 +0000734 SmallVector<Metadata *, 4> NewOps;
Hal Finkel94146652014-07-24 14:25:39 +0000735 for (unsigned i = 0, ie = (*I)->getNumOperands(); i != ie; ++i) {
Duncan P. N. Exon Smith5bf8fef2014-12-09 18:38:53 +0000736 const Metadata *V = (*I)->getOperand(i);
Hal Finkel94146652014-07-24 14:25:39 +0000737 if (const MDNode *M = dyn_cast<MDNode>(V))
738 NewOps.push_back(MDMap[M]);
739 else
Duncan P. N. Exon Smith5bf8fef2014-12-09 18:38:53 +0000740 NewOps.push_back(const_cast<Metadata *>(V));
Hal Finkel94146652014-07-24 14:25:39 +0000741 }
742
Duncan P. N. Exon Smith5bf8fef2014-12-09 18:38:53 +0000743 MDNode *NewM = MDNode::get(CalledFunc->getContext(), NewOps);
Duncan P. N. Exon Smith946fdcc2015-01-19 20:36:39 +0000744 MDTuple *TempM = cast<MDTuple>(MDMap[*I]);
745 assert(TempM->isTemporary() && "Expected temporary node");
Hal Finkel94146652014-07-24 14:25:39 +0000746
747 TempM->replaceAllUsesWith(NewM);
748 }
749
750 // Now replace the metadata in the new inlined instructions with the
751 // repacements from the map.
752 for (ValueToValueMapTy::iterator VMI = VMap.begin(), VMIE = VMap.end();
753 VMI != VMIE; ++VMI) {
754 if (!VMI->second)
755 continue;
756
757 Instruction *NI = dyn_cast<Instruction>(VMI->second);
758 if (!NI)
759 continue;
760
Duncan P. N. Exon Smithde36e802014-11-11 21:30:22 +0000761 if (MDNode *M = NI->getMetadata(LLVMContext::MD_alias_scope)) {
Hal Finkel61c38612014-08-14 21:09:37 +0000762 MDNode *NewMD = MDMap[M];
763 // If the call site also had alias scope metadata (a list of scopes to
764 // which instructions inside it might belong), propagate those scopes to
765 // the inlined instructions.
766 if (MDNode *CSM =
Duncan P. N. Exon Smithde36e802014-11-11 21:30:22 +0000767 CS.getInstruction()->getMetadata(LLVMContext::MD_alias_scope))
Hal Finkel61c38612014-08-14 21:09:37 +0000768 NewMD = MDNode::concatenate(NewMD, CSM);
769 NI->setMetadata(LLVMContext::MD_alias_scope, NewMD);
770 } else if (NI->mayReadOrWriteMemory()) {
771 if (MDNode *M =
Duncan P. N. Exon Smithde36e802014-11-11 21:30:22 +0000772 CS.getInstruction()->getMetadata(LLVMContext::MD_alias_scope))
Hal Finkel61c38612014-08-14 21:09:37 +0000773 NI->setMetadata(LLVMContext::MD_alias_scope, M);
774 }
Hal Finkel94146652014-07-24 14:25:39 +0000775
Duncan P. N. Exon Smithde36e802014-11-11 21:30:22 +0000776 if (MDNode *M = NI->getMetadata(LLVMContext::MD_noalias)) {
Hal Finkel61c38612014-08-14 21:09:37 +0000777 MDNode *NewMD = MDMap[M];
778 // If the call site also had noalias metadata (a list of scopes with
779 // which instructions inside it don't alias), propagate those scopes to
780 // the inlined instructions.
Duncan P. N. Exon Smithde36e802014-11-11 21:30:22 +0000781 if (MDNode *CSM =
782 CS.getInstruction()->getMetadata(LLVMContext::MD_noalias))
Hal Finkel61c38612014-08-14 21:09:37 +0000783 NewMD = MDNode::concatenate(NewMD, CSM);
784 NI->setMetadata(LLVMContext::MD_noalias, NewMD);
785 } else if (NI->mayReadOrWriteMemory()) {
Duncan P. N. Exon Smithde36e802014-11-11 21:30:22 +0000786 if (MDNode *M = CS.getInstruction()->getMetadata(LLVMContext::MD_noalias))
Hal Finkel61c38612014-08-14 21:09:37 +0000787 NI->setMetadata(LLVMContext::MD_noalias, M);
788 }
Hal Finkel94146652014-07-24 14:25:39 +0000789 }
Hal Finkel94146652014-07-24 14:25:39 +0000790}
791
Sanjay Patel0fdb4372015-03-10 19:42:57 +0000792/// If the inlined function has noalias arguments,
793/// then add new alias scopes for each noalias argument, tag the mapped noalias
Hal Finkelff0bcb62014-07-25 15:50:08 +0000794/// parameters with noalias metadata specifying the new scope, and tag all
795/// non-derived loads, stores and memory intrinsics with the new alias scopes.
796static void AddAliasScopeMetadata(CallSite CS, ValueToValueMapTy &VMap,
Chandler Carruth7b560d42015-09-09 17:55:00 +0000797 const DataLayout &DL, AAResults *CalleeAAR) {
Hal Finkelff0bcb62014-07-25 15:50:08 +0000798 if (!EnableNoAliasConversion)
799 return;
800
801 const Function *CalledFunc = CS.getCalledFunction();
802 SmallVector<const Argument *, 4> NoAliasArgs;
803
Sanjay Patel42c73552016-01-13 22:16:48 +0000804 for (const Argument &Arg : CalledFunc->args())
805 if (Arg.hasNoAliasAttr() && !Arg.use_empty())
806 NoAliasArgs.push_back(&Arg);
Hal Finkelff0bcb62014-07-25 15:50:08 +0000807
808 if (NoAliasArgs.empty())
809 return;
810
811 // To do a good job, if a noalias variable is captured, we need to know if
812 // the capture point dominates the particular use we're considering.
813 DominatorTree DT;
814 DT.recalculate(const_cast<Function&>(*CalledFunc));
815
816 // noalias indicates that pointer values based on the argument do not alias
817 // pointer values which are not based on it. So we add a new "scope" for each
818 // noalias function argument. Accesses using pointers based on that argument
819 // become part of that alias scope, accesses using pointers not based on that
820 // argument are tagged as noalias with that scope.
821
822 DenseMap<const Argument *, MDNode *> NewScopes;
823 MDBuilder MDB(CalledFunc->getContext());
824
825 // Create a new scope domain for this function.
826 MDNode *NewDomain =
827 MDB.createAnonymousAliasScopeDomain(CalledFunc->getName());
828 for (unsigned i = 0, e = NoAliasArgs.size(); i != e; ++i) {
829 const Argument *A = NoAliasArgs[i];
830
831 std::string Name = CalledFunc->getName();
832 if (A->hasName()) {
833 Name += ": %";
834 Name += A->getName();
835 } else {
836 Name += ": argument ";
837 Name += utostr(i);
838 }
839
840 // Note: We always create a new anonymous root here. This is true regardless
841 // of the linkage of the callee because the aliasing "scope" is not just a
842 // property of the callee, but also all control dependencies in the caller.
843 MDNode *NewScope = MDB.createAnonymousAliasScope(NewDomain, Name);
844 NewScopes.insert(std::make_pair(A, NewScope));
845 }
846
847 // Iterate over all new instructions in the map; for all memory-access
848 // instructions, add the alias scope metadata.
849 for (ValueToValueMapTy::iterator VMI = VMap.begin(), VMIE = VMap.end();
850 VMI != VMIE; ++VMI) {
851 if (const Instruction *I = dyn_cast<Instruction>(VMI->first)) {
852 if (!VMI->second)
853 continue;
854
855 Instruction *NI = dyn_cast<Instruction>(VMI->second);
856 if (!NI)
857 continue;
858
Hal Finkel0c083022014-09-01 09:01:39 +0000859 bool IsArgMemOnlyCall = false, IsFuncCall = false;
Hal Finkelff0bcb62014-07-25 15:50:08 +0000860 SmallVector<const Value *, 2> PtrArgs;
861
862 if (const LoadInst *LI = dyn_cast<LoadInst>(I))
863 PtrArgs.push_back(LI->getPointerOperand());
864 else if (const StoreInst *SI = dyn_cast<StoreInst>(I))
865 PtrArgs.push_back(SI->getPointerOperand());
866 else if (const VAArgInst *VAAI = dyn_cast<VAArgInst>(I))
867 PtrArgs.push_back(VAAI->getPointerOperand());
868 else if (const AtomicCmpXchgInst *CXI = dyn_cast<AtomicCmpXchgInst>(I))
869 PtrArgs.push_back(CXI->getPointerOperand());
870 else if (const AtomicRMWInst *RMWI = dyn_cast<AtomicRMWInst>(I))
871 PtrArgs.push_back(RMWI->getPointerOperand());
Hal Finkeld2dee162014-08-14 16:44:03 +0000872 else if (ImmutableCallSite ICS = ImmutableCallSite(I)) {
Hal Finkela3708df2014-08-30 12:48:33 +0000873 // If we know that the call does not access memory, then we'll still
874 // know that about the inlined clone of this call site, and we don't
875 // need to add metadata.
Hal Finkeld2dee162014-08-14 16:44:03 +0000876 if (ICS.doesNotAccessMemory())
877 continue;
878
Hal Finkel0c083022014-09-01 09:01:39 +0000879 IsFuncCall = true;
Chandler Carruth7b560d42015-09-09 17:55:00 +0000880 if (CalleeAAR) {
881 FunctionModRefBehavior MRB = CalleeAAR->getModRefBehavior(ICS);
Chandler Carruth194f59c2015-07-22 23:15:57 +0000882 if (MRB == FMRB_OnlyAccessesArgumentPointees ||
883 MRB == FMRB_OnlyReadsArgumentPointees)
Hal Finkel0c083022014-09-01 09:01:39 +0000884 IsArgMemOnlyCall = true;
885 }
886
Sanjay Patele01dcab2016-01-13 21:39:26 +0000887 for (Value *Arg : ICS.args()) {
Hal Finkela3708df2014-08-30 12:48:33 +0000888 // We need to check the underlying objects of all arguments, not just
889 // the pointer arguments, because we might be passing pointers as
890 // integers, etc.
Hal Finkel0c083022014-09-01 09:01:39 +0000891 // However, if we know that the call only accesses pointer arguments,
Hal Finkeld2dee162014-08-14 16:44:03 +0000892 // then we only need to check the pointer arguments.
Sanjay Patele01dcab2016-01-13 21:39:26 +0000893 if (IsArgMemOnlyCall && !Arg->getType()->isPointerTy())
Hal Finkel0c083022014-09-01 09:01:39 +0000894 continue;
Hal Finkelff0bcb62014-07-25 15:50:08 +0000895
Sanjay Patele01dcab2016-01-13 21:39:26 +0000896 PtrArgs.push_back(Arg);
Hal Finkel0c083022014-09-01 09:01:39 +0000897 }
898 }
Hal Finkelcbb85f22014-09-01 04:26:40 +0000899
Hal Finkelff0bcb62014-07-25 15:50:08 +0000900 // If we found no pointers, then this instruction is not suitable for
901 // pairing with an instruction to receive aliasing metadata.
Hal Finkeld2dee162014-08-14 16:44:03 +0000902 // However, if this is a call, this we might just alias with none of the
903 // noalias arguments.
Hal Finkelcbb85f22014-09-01 04:26:40 +0000904 if (PtrArgs.empty() && !IsFuncCall)
Hal Finkelff0bcb62014-07-25 15:50:08 +0000905 continue;
906
907 // It is possible that there is only one underlying object, but you
908 // need to go through several PHIs to see it, and thus could be
909 // repeated in the Objects list.
910 SmallPtrSet<const Value *, 4> ObjSet;
Duncan P. N. Exon Smith5bf8fef2014-12-09 18:38:53 +0000911 SmallVector<Metadata *, 4> Scopes, NoAliases;
Hal Finkelff0bcb62014-07-25 15:50:08 +0000912
913 SmallSetVector<const Argument *, 4> NAPtrArgs;
Sanjay Patele01dcab2016-01-13 21:39:26 +0000914 for (const Value *V : PtrArgs) {
Hal Finkelff0bcb62014-07-25 15:50:08 +0000915 SmallVector<Value *, 4> Objects;
Sanjay Patele01dcab2016-01-13 21:39:26 +0000916 GetUnderlyingObjects(const_cast<Value*>(V),
Hans Wennborg083ca9b2015-10-06 23:24:35 +0000917 Objects, DL, /* LI = */ nullptr);
Hal Finkelff0bcb62014-07-25 15:50:08 +0000918
919 for (Value *O : Objects)
920 ObjSet.insert(O);
921 }
922
Hal Finkel2d3d6da2014-08-29 16:33:41 +0000923 // Figure out if we're derived from anything that is not a noalias
Hal Finkelff0bcb62014-07-25 15:50:08 +0000924 // argument.
Hal Finkela3708df2014-08-30 12:48:33 +0000925 bool CanDeriveViaCapture = false, UsesAliasingPtr = false;
926 for (const Value *V : ObjSet) {
927 // Is this value a constant that cannot be derived from any pointer
928 // value (we need to exclude constant expressions, for example, that
929 // are formed from arithmetic on global symbols).
930 bool IsNonPtrConst = isa<ConstantInt>(V) || isa<ConstantFP>(V) ||
931 isa<ConstantPointerNull>(V) ||
932 isa<ConstantDataVector>(V) || isa<UndefValue>(V);
Hal Finkelcbb85f22014-09-01 04:26:40 +0000933 if (IsNonPtrConst)
934 continue;
935
936 // If this is anything other than a noalias argument, then we cannot
937 // completely describe the aliasing properties using alias.scope
938 // metadata (and, thus, won't add any).
939 if (const Argument *A = dyn_cast<Argument>(V)) {
940 if (!A->hasNoAliasAttr())
941 UsesAliasingPtr = true;
942 } else {
Hal Finkela3708df2014-08-30 12:48:33 +0000943 UsesAliasingPtr = true;
Hal Finkelff0bcb62014-07-25 15:50:08 +0000944 }
Hal Finkelcbb85f22014-09-01 04:26:40 +0000945
946 // If this is not some identified function-local object (which cannot
947 // directly alias a noalias argument), or some other argument (which,
948 // by definition, also cannot alias a noalias argument), then we could
949 // alias a noalias argument that has been captured).
950 if (!isa<Argument>(V) &&
951 !isIdentifiedFunctionLocal(const_cast<Value*>(V)))
952 CanDeriveViaCapture = true;
Hal Finkela3708df2014-08-30 12:48:33 +0000953 }
Hal Finkelcbb85f22014-09-01 04:26:40 +0000954
955 // A function call can always get captured noalias pointers (via other
956 // parameters, globals, etc.).
957 if (IsFuncCall && !IsArgMemOnlyCall)
958 CanDeriveViaCapture = true;
959
Hal Finkelff0bcb62014-07-25 15:50:08 +0000960 // First, we want to figure out all of the sets with which we definitely
961 // don't alias. Iterate over all noalias set, and add those for which:
962 // 1. The noalias argument is not in the set of objects from which we
963 // definitely derive.
964 // 2. The noalias argument has not yet been captured.
Hal Finkelcbb85f22014-09-01 04:26:40 +0000965 // An arbitrary function that might load pointers could see captured
966 // noalias arguments via other noalias arguments or globals, and so we
967 // must always check for prior capture.
Hal Finkelff0bcb62014-07-25 15:50:08 +0000968 for (const Argument *A : NoAliasArgs) {
969 if (!ObjSet.count(A) && (!CanDeriveViaCapture ||
Hal Finkela3708df2014-08-30 12:48:33 +0000970 // It might be tempting to skip the
971 // PointerMayBeCapturedBefore check if
972 // A->hasNoCaptureAttr() is true, but this is
973 // incorrect because nocapture only guarantees
974 // that no copies outlive the function, not
975 // that the value cannot be locally captured.
Hal Finkelff0bcb62014-07-25 15:50:08 +0000976 !PointerMayBeCapturedBefore(A,
977 /* ReturnCaptures */ false,
978 /* StoreCaptures */ false, I, &DT)))
979 NoAliases.push_back(NewScopes[A]);
980 }
981
982 if (!NoAliases.empty())
Duncan P. N. Exon Smith3872d002014-11-01 00:10:31 +0000983 NI->setMetadata(LLVMContext::MD_noalias,
984 MDNode::concatenate(
Duncan P. N. Exon Smithde36e802014-11-11 21:30:22 +0000985 NI->getMetadata(LLVMContext::MD_noalias),
Duncan P. N. Exon Smith3872d002014-11-01 00:10:31 +0000986 MDNode::get(CalledFunc->getContext(), NoAliases)));
Hal Finkela3708df2014-08-30 12:48:33 +0000987
Hal Finkelff0bcb62014-07-25 15:50:08 +0000988 // Next, we want to figure out all of the sets to which we might belong.
Hal Finkela3708df2014-08-30 12:48:33 +0000989 // We might belong to a set if the noalias argument is in the set of
990 // underlying objects. If there is some non-noalias argument in our list
991 // of underlying objects, then we cannot add a scope because the fact
992 // that some access does not alias with any set of our noalias arguments
993 // cannot itself guarantee that it does not alias with this access
994 // (because there is some pointer of unknown origin involved and the
995 // other access might also depend on this pointer). We also cannot add
996 // scopes to arbitrary functions unless we know they don't access any
997 // non-parameter pointer-values.
998 bool CanAddScopes = !UsesAliasingPtr;
Hal Finkelcbb85f22014-09-01 04:26:40 +0000999 if (CanAddScopes && IsFuncCall)
1000 CanAddScopes = IsArgMemOnlyCall;
Hal Finkelff0bcb62014-07-25 15:50:08 +00001001
Hal Finkela3708df2014-08-30 12:48:33 +00001002 if (CanAddScopes)
1003 for (const Argument *A : NoAliasArgs) {
1004 if (ObjSet.count(A))
1005 Scopes.push_back(NewScopes[A]);
1006 }
1007
Hal Finkelff0bcb62014-07-25 15:50:08 +00001008 if (!Scopes.empty())
Duncan P. N. Exon Smith3872d002014-11-01 00:10:31 +00001009 NI->setMetadata(
1010 LLVMContext::MD_alias_scope,
Duncan P. N. Exon Smithde36e802014-11-11 21:30:22 +00001011 MDNode::concatenate(NI->getMetadata(LLVMContext::MD_alias_scope),
Duncan P. N. Exon Smith3872d002014-11-01 00:10:31 +00001012 MDNode::get(CalledFunc->getContext(), Scopes)));
Hal Finkelff0bcb62014-07-25 15:50:08 +00001013 }
1014 }
1015}
1016
Hal Finkel68dc3c72014-10-15 23:44:41 +00001017/// If the inlined function has non-byval align arguments, then
1018/// add @llvm.assume-based alignment assumptions to preserve this information.
1019static void AddAlignmentAssumptions(CallSite CS, InlineFunctionInfo &IFI) {
Mehdi Amini46a43552015-03-04 18:43:29 +00001020 if (!PreserveAlignmentAssumptions)
Hal Finkel68dc3c72014-10-15 23:44:41 +00001021 return;
Mehdi Amini46a43552015-03-04 18:43:29 +00001022 auto &DL = CS.getCaller()->getParent()->getDataLayout();
Hal Finkel68dc3c72014-10-15 23:44:41 +00001023
1024 // To avoid inserting redundant assumptions, we should check for assumptions
1025 // already in the caller. To do this, we might need a DT of the caller.
1026 DominatorTree DT;
1027 bool DTCalculated = false;
1028
Chandler Carruth66b31302015-01-04 12:03:27 +00001029 Function *CalledFunc = CS.getCalledFunction();
1030 for (Function::arg_iterator I = CalledFunc->arg_begin(),
1031 E = CalledFunc->arg_end();
1032 I != E; ++I) {
Hal Finkel68dc3c72014-10-15 23:44:41 +00001033 unsigned Align = I->getType()->isPointerTy() ? I->getParamAlignment() : 0;
1034 if (Align && !I->hasByValOrInAllocaAttr() && !I->hasNUses(0)) {
1035 if (!DTCalculated) {
1036 DT.recalculate(const_cast<Function&>(*CS.getInstruction()->getParent()
1037 ->getParent()));
1038 DTCalculated = true;
1039 }
1040
1041 // If we can already prove the asserted alignment in the context of the
1042 // caller, then don't bother inserting the assumption.
1043 Value *Arg = CS.getArgument(I->getArgNo());
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001044 if (getKnownAlignment(Arg, DL, CS.getInstruction(),
Vedant Kumarff08e922015-09-23 15:49:08 +00001045 &IFI.ACT->getAssumptionCache(*CS.getCaller()),
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001046 &DT) >= Align)
Hal Finkel68dc3c72014-10-15 23:44:41 +00001047 continue;
1048
Mehdi Amini46a43552015-03-04 18:43:29 +00001049 IRBuilder<>(CS.getInstruction())
1050 .CreateAlignmentAssumption(DL, Arg, Align);
Hal Finkel68dc3c72014-10-15 23:44:41 +00001051 }
1052 }
1053}
1054
Sanjay Patel0fdb4372015-03-10 19:42:57 +00001055/// Once we have cloned code over from a callee into the caller,
1056/// update the specified callgraph to reflect the changes we made.
1057/// Note that it's possible that not all code was copied over, so only
Duncan Sands46911f12008-09-08 11:05:51 +00001058/// some edges of the callgraph may remain.
1059static void UpdateCallGraphAfterInlining(CallSite CS,
Chris Lattner5de3b8b2006-07-12 18:29:36 +00001060 Function::iterator FirstNewBlock,
Rafael Espindola229e38f2010-10-13 01:36:30 +00001061 ValueToValueMapTy &VMap,
Chris Lattner2eee5d32010-04-22 23:37:35 +00001062 InlineFunctionInfo &IFI) {
1063 CallGraph &CG = *IFI.CG;
Duncan Sands46911f12008-09-08 11:05:51 +00001064 const Function *Caller = CS.getInstruction()->getParent()->getParent();
1065 const Function *Callee = CS.getCalledFunction();
Chris Lattner0841fb12006-01-14 20:07:50 +00001066 CallGraphNode *CalleeNode = CG[Callee];
1067 CallGraphNode *CallerNode = CG[Caller];
Duncan Sands7c8fb1a2008-09-05 12:37:12 +00001068
Chris Lattner5de3b8b2006-07-12 18:29:36 +00001069 // Since we inlined some uninlined call sites in the callee into the caller,
Chris Lattner0841fb12006-01-14 20:07:50 +00001070 // add edges from the caller to all of the callees of the callee.
Gabor Greif5aa19222009-01-15 18:40:09 +00001071 CallGraphNode::iterator I = CalleeNode->begin(), E = CalleeNode->end();
1072
1073 // Consider the case where CalleeNode == CallerNode.
Gabor Greiff1abfdc2009-01-17 00:09:08 +00001074 CallGraphNode::CalledFunctionsVector CallCache;
Gabor Greif5aa19222009-01-15 18:40:09 +00001075 if (CalleeNode == CallerNode) {
1076 CallCache.assign(I, E);
1077 I = CallCache.begin();
1078 E = CallCache.end();
1079 }
1080
1081 for (; I != E; ++I) {
Chris Lattner063d0652009-09-01 06:31:31 +00001082 const Value *OrigCall = I->first;
Duncan Sands7c8fb1a2008-09-05 12:37:12 +00001083
Rafael Espindola229e38f2010-10-13 01:36:30 +00001084 ValueToValueMapTy::iterator VMI = VMap.find(OrigCall);
Chris Lattnerb3c64f72006-07-12 21:37:11 +00001085 // Only copy the edge if the call was inlined!
Craig Topperf40110f2014-04-25 05:29:35 +00001086 if (VMI == VMap.end() || VMI->second == nullptr)
Chris Lattner5eef6ad2009-08-27 03:51:50 +00001087 continue;
1088
1089 // If the call was inlined, but then constant folded, there is no edge to
1090 // add. Check for this case.
Chris Lattner016c00a2010-04-22 21:31:00 +00001091 Instruction *NewCall = dyn_cast<Instruction>(VMI->second);
Sanjay Patelc04b6f22015-03-11 15:12:32 +00001092 if (!NewCall)
1093 continue;
Chris Lattnerc2432b92010-05-01 01:26:13 +00001094
Sanjay Patelc04b6f22015-03-11 15:12:32 +00001095 // We do not treat intrinsic calls like real function calls because we
1096 // expect them to become inline code; do not add an edge for an intrinsic.
1097 CallSite CS = CallSite(NewCall);
1098 if (CS && CS.getCalledFunction() && CS.getCalledFunction()->isIntrinsic())
1099 continue;
1100
Chris Lattnerc2432b92010-05-01 01:26:13 +00001101 // Remember that this call site got inlined for the client of
1102 // InlineFunction.
1103 IFI.InlinedCalls.push_back(NewCall);
1104
Chris Lattner016c00a2010-04-22 21:31:00 +00001105 // It's possible that inlining the callsite will cause it to go from an
1106 // indirect to a direct call by resolving a function pointer. If this
1107 // happens, set the callee of the new call site to a more precise
1108 // destination. This can also happen if the call graph node of the caller
1109 // was just unnecessarily imprecise.
Craig Topperf40110f2014-04-25 05:29:35 +00001110 if (!I->second->getFunction())
Chris Lattner016c00a2010-04-22 21:31:00 +00001111 if (Function *F = CallSite(NewCall).getCalledFunction()) {
1112 // Indirect call site resolved to direct call.
Gabor Greif7b0a5fd2010-07-27 15:02:37 +00001113 CallerNode->addCalledFunction(CallSite(NewCall), CG[F]);
1114
Chris Lattner016c00a2010-04-22 21:31:00 +00001115 continue;
1116 }
Gabor Greif7b0a5fd2010-07-27 15:02:37 +00001117
1118 CallerNode->addCalledFunction(CallSite(NewCall), I->second);
Chris Lattner5de3b8b2006-07-12 18:29:36 +00001119 }
Chris Lattner5eef6ad2009-08-27 03:51:50 +00001120
Dale Johannesen0aeabdf2009-01-13 22:43:37 +00001121 // Update the call graph by deleting the edge from Callee to Caller. We must
1122 // do this after the loop above in case Caller and Callee are the same.
1123 CallerNode->removeCallEdgeFor(CS);
Chris Lattner0841fb12006-01-14 20:07:50 +00001124}
1125
Julien Lerouge957e91c2014-04-15 18:01:54 +00001126static void HandleByValArgumentInit(Value *Dst, Value *Src, Module *M,
1127 BasicBlock *InsertBlock,
1128 InlineFunctionInfo &IFI) {
Julien Lerouge957e91c2014-04-15 18:01:54 +00001129 Type *AggTy = cast<PointerType>(Src->getType())->getElementType();
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +00001130 IRBuilder<> Builder(InsertBlock, InsertBlock->begin());
Julien Lerouge957e91c2014-04-15 18:01:54 +00001131
Mehdi Amini46a43552015-03-04 18:43:29 +00001132 Value *Size = Builder.getInt64(M->getDataLayout().getTypeStoreSize(AggTy));
Julien Lerouge957e91c2014-04-15 18:01:54 +00001133
1134 // Always generate a memcpy of alignment 1 here because we don't know
1135 // the alignment of the src pointer. Other optimizations can infer
1136 // better alignment.
Pete Cooper67cf9a72015-11-19 05:56:52 +00001137 Builder.CreateMemCpy(Dst, Src, Size, /*Align=*/1);
Julien Lerouge957e91c2014-04-15 18:01:54 +00001138}
1139
Sanjay Patel0fdb4372015-03-10 19:42:57 +00001140/// When inlining a call site that has a byval argument,
Chris Lattner0f114952010-12-20 08:10:40 +00001141/// we have to make the implicit memcpy explicit by adding it.
David Majnemer120f4a02013-11-03 12:22:13 +00001142static Value *HandleByValArgument(Value *Arg, Instruction *TheCall,
Chris Lattner00997442010-12-20 07:57:41 +00001143 const Function *CalledFunc,
1144 InlineFunctionInfo &IFI,
Reid Klecknerdd3f3ed2014-11-04 02:02:14 +00001145 unsigned ByValAlignment) {
Matt Arsenaultbe558882014-04-23 20:58:57 +00001146 PointerType *ArgTy = cast<PointerType>(Arg->getType());
1147 Type *AggTy = ArgTy->getElementType();
Chris Lattner0f114952010-12-20 08:10:40 +00001148
Chandler Carruth66b31302015-01-04 12:03:27 +00001149 Function *Caller = TheCall->getParent()->getParent();
1150
Chris Lattner0f114952010-12-20 08:10:40 +00001151 // If the called function is readonly, then it could not mutate the caller's
1152 // copy of the byval'd memory. In this case, it is safe to elide the copy and
1153 // temporary.
David Majnemer120f4a02013-11-03 12:22:13 +00001154 if (CalledFunc->onlyReadsMemory()) {
Chris Lattner0f114952010-12-20 08:10:40 +00001155 // If the byval argument has a specified alignment that is greater than the
1156 // passed in pointer, then we either have to round up the input pointer or
1157 // give up on this transformation.
1158 if (ByValAlignment <= 1) // 0 = unspecified, 1 = no particular alignment.
David Majnemer120f4a02013-11-03 12:22:13 +00001159 return Arg;
Chris Lattner0f114952010-12-20 08:10:40 +00001160
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001161 const DataLayout &DL = Caller->getParent()->getDataLayout();
1162
Chris Lattner20fca482010-12-25 20:42:38 +00001163 // If the pointer is already known to be sufficiently aligned, or if we can
1164 // round it up to a larger alignment, then we don't need a temporary.
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001165 if (getOrEnforceKnownAlignment(Arg, ByValAlignment, DL, TheCall,
1166 &IFI.ACT->getAssumptionCache(*Caller)) >=
1167 ByValAlignment)
David Majnemer120f4a02013-11-03 12:22:13 +00001168 return Arg;
Chris Lattner0f114952010-12-20 08:10:40 +00001169
Chris Lattner20fca482010-12-25 20:42:38 +00001170 // Otherwise, we have to make a memcpy to get a safe alignment. This is bad
1171 // for code quality, but rarely happens and is required for correctness.
Chris Lattner0f114952010-12-20 08:10:40 +00001172 }
Chris Lattner00997442010-12-20 07:57:41 +00001173
Micah Villmowcdfe20b2012-10-08 16:38:25 +00001174 // Create the alloca. If we have DataLayout, use nice alignment.
Mehdi Amini46a43552015-03-04 18:43:29 +00001175 unsigned Align =
1176 Caller->getParent()->getDataLayout().getPrefTypeAlignment(AggTy);
1177
Chris Lattner00997442010-12-20 07:57:41 +00001178 // If the byval had an alignment specified, we *must* use at least that
1179 // alignment, as it is required by the byval argument (and uses of the
1180 // pointer inside the callee).
1181 Align = std::max(Align, ByValAlignment);
1182
Craig Topperf40110f2014-04-25 05:29:35 +00001183 Value *NewAlloca = new AllocaInst(AggTy, nullptr, Align, Arg->getName(),
Chris Lattner00997442010-12-20 07:57:41 +00001184 &*Caller->begin()->begin());
Julien Lerougebe4fe322014-04-15 18:06:46 +00001185 IFI.StaticAllocas.push_back(cast<AllocaInst>(NewAlloca));
Chris Lattner00997442010-12-20 07:57:41 +00001186
1187 // Uses of the argument in the function should use our new alloca
1188 // instead.
1189 return NewAlloca;
1190}
1191
Sanjay Patel0fdb4372015-03-10 19:42:57 +00001192// Check whether this Value is used by a lifetime intrinsic.
Nick Lewyckya68ec832011-05-22 05:22:10 +00001193static bool isUsedByLifetimeMarker(Value *V) {
Chandler Carruthcdf47882014-03-09 03:16:01 +00001194 for (User *U : V->users()) {
1195 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(U)) {
Nick Lewyckya68ec832011-05-22 05:22:10 +00001196 switch (II->getIntrinsicID()) {
1197 default: break;
1198 case Intrinsic::lifetime_start:
1199 case Intrinsic::lifetime_end:
1200 return true;
1201 }
1202 }
1203 }
1204 return false;
1205}
1206
Sanjay Patel0fdb4372015-03-10 19:42:57 +00001207// Check whether the given alloca already has
Nick Lewyckya68ec832011-05-22 05:22:10 +00001208// lifetime.start or lifetime.end intrinsics.
1209static bool hasLifetimeMarkers(AllocaInst *AI) {
Matt Arsenaultbe558882014-04-23 20:58:57 +00001210 Type *Ty = AI->getType();
1211 Type *Int8PtrTy = Type::getInt8PtrTy(Ty->getContext(),
1212 Ty->getPointerAddressSpace());
1213 if (Ty == Int8PtrTy)
Nick Lewyckya68ec832011-05-22 05:22:10 +00001214 return isUsedByLifetimeMarker(AI);
1215
Nick Lewycky9711b5c2011-06-14 00:59:24 +00001216 // Do a scan to find all the casts to i8*.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001217 for (User *U : AI->users()) {
1218 if (U->getType() != Int8PtrTy) continue;
1219 if (U->stripPointerCasts() != AI) continue;
1220 if (isUsedByLifetimeMarker(U))
Nick Lewyckya68ec832011-05-22 05:22:10 +00001221 return true;
1222 }
1223 return false;
1224}
1225
David Blaikiedf706282015-01-21 22:57:29 +00001226/// Rebuild the entire inlined-at chain for this instruction so that the top of
1227/// the chain now is inlined-at the new call site.
1228static DebugLoc
Duncan P. N. Exon Smitha9308c42015-04-29 16:38:44 +00001229updateInlinedAtInfo(DebugLoc DL, DILocation *InlinedAtNode, LLVMContext &Ctx,
1230 DenseMap<const DILocation *, DILocation *> &IANodes) {
1231 SmallVector<DILocation *, 3> InlinedAtLocations;
1232 DILocation *Last = InlinedAtNode;
1233 DILocation *CurInlinedAt = DL;
David Blaikiedf706282015-01-21 22:57:29 +00001234
1235 // Gather all the inlined-at nodes
Duncan P. N. Exon Smitha9308c42015-04-29 16:38:44 +00001236 while (DILocation *IA = CurInlinedAt->getInlinedAt()) {
David Blaikiedf706282015-01-21 22:57:29 +00001237 // Skip any we've already built nodes for
Duncan P. N. Exon Smitha9308c42015-04-29 16:38:44 +00001238 if (DILocation *Found = IANodes[IA]) {
David Blaikiedf706282015-01-21 22:57:29 +00001239 Last = Found;
1240 break;
1241 }
1242
1243 InlinedAtLocations.push_back(IA);
Duncan P. N. Exon Smithec819c02015-03-30 19:49:49 +00001244 CurInlinedAt = IA;
Devang Patel35797402011-07-08 18:01:31 +00001245 }
Eric Christopherf16bee82012-03-26 19:09:38 +00001246
David Blaikiedf706282015-01-21 22:57:29 +00001247 // Starting from the top, rebuild the nodes to point to the new inlined-at
1248 // location (then rebuilding the rest of the chain behind it) and update the
1249 // map of already-constructed inlined-at nodes.
Pete Cooper7679afd2015-07-24 21:13:43 +00001250 for (const DILocation *MD : make_range(InlinedAtLocations.rbegin(),
1251 InlinedAtLocations.rend())) {
Duncan P. N. Exon Smitha9308c42015-04-29 16:38:44 +00001252 Last = IANodes[MD] = DILocation::getDistinct(
David Blaikiedf706282015-01-21 22:57:29 +00001253 Ctx, MD->getLine(), MD->getColumn(), MD->getScope(), Last);
1254 }
1255
1256 // And finally create the normal location for this instruction, referring to
1257 // the new inlined-at chain.
Duncan P. N. Exon Smithec819c02015-03-30 19:49:49 +00001258 return DebugLoc::get(DL.getLine(), DL.getCol(), DL.getScope(), Last);
Devang Patel35797402011-07-08 18:01:31 +00001259}
1260
Sanjay Patel0fdb4372015-03-10 19:42:57 +00001261/// Update inlined instructions' line numbers to
Devang Patel35797402011-07-08 18:01:31 +00001262/// to encode location where these instructions are inlined.
1263static void fixupLineNumbers(Function *Fn, Function::iterator FI,
Eric Christopher2b40fdf2012-03-26 19:09:40 +00001264 Instruction *TheCall) {
Devang Patel35797402011-07-08 18:01:31 +00001265 DebugLoc TheCallDL = TheCall->getDebugLoc();
Duncan P. N. Exon Smithec819c02015-03-30 19:49:49 +00001266 if (!TheCallDL)
Devang Patel35797402011-07-08 18:01:31 +00001267 return;
1268
David Blaikiedf706282015-01-21 22:57:29 +00001269 auto &Ctx = Fn->getContext();
Duncan P. N. Exon Smitha9308c42015-04-29 16:38:44 +00001270 DILocation *InlinedAtNode = TheCallDL;
David Blaikiedf706282015-01-21 22:57:29 +00001271
1272 // Create a unique call site, not to be confused with any other call from the
1273 // same location.
Duncan P. N. Exon Smitha9308c42015-04-29 16:38:44 +00001274 InlinedAtNode = DILocation::getDistinct(
David Blaikiedf706282015-01-21 22:57:29 +00001275 Ctx, InlinedAtNode->getLine(), InlinedAtNode->getColumn(),
1276 InlinedAtNode->getScope(), InlinedAtNode->getInlinedAt());
1277
1278 // Cache the inlined-at nodes as they're built so they are reused, without
1279 // this every instruction's inlined-at chain would become distinct from each
1280 // other.
Duncan P. N. Exon Smitha9308c42015-04-29 16:38:44 +00001281 DenseMap<const DILocation *, DILocation *> IANodes;
David Blaikiedf706282015-01-21 22:57:29 +00001282
Devang Patel35797402011-07-08 18:01:31 +00001283 for (; FI != Fn->end(); ++FI) {
1284 for (BasicBlock::iterator BI = FI->begin(), BE = FI->end();
1285 BI != BE; ++BI) {
1286 DebugLoc DL = BI->getDebugLoc();
Duncan P. N. Exon Smithec819c02015-03-30 19:49:49 +00001287 if (!DL) {
Evgeniy Stepanov2be29922014-06-09 09:09:19 +00001288 // If the inlined instruction has no line number, make it look as if it
1289 // originates from the call location. This is important for
1290 // ((__always_inline__, __nodebug__)) functions which must use caller
1291 // location for all instructions in their function body.
Paul Robinsonf60e0a12014-10-21 01:00:55 +00001292
1293 // Don't update static allocas, as they may get moved later.
1294 if (auto *AI = dyn_cast<AllocaInst>(BI))
1295 if (isa<Constant>(AI->getArraySize()))
1296 continue;
1297
Evgeniy Stepanov2be29922014-06-09 09:09:19 +00001298 BI->setDebugLoc(TheCallDL);
1299 } else {
David Blaikiedf706282015-01-21 22:57:29 +00001300 BI->setDebugLoc(updateInlinedAtInfo(DL, InlinedAtNode, BI->getContext(), IANodes));
Devang Patelbb23a4a2011-08-10 21:50:54 +00001301 }
Devang Patel35797402011-07-08 18:01:31 +00001302 }
1303 }
1304}
1305
Sanjay Patel0fdb4372015-03-10 19:42:57 +00001306/// This function inlines the called function into the basic block of the
1307/// caller. This returns false if it is not possible to inline this call.
1308/// The program is still in a well defined state if this occurs though.
Bill Wendlingce0c2292012-01-31 01:01:16 +00001309///
1310/// Note that this only does one level of inlining. For example, if the
1311/// instruction 'call B' is inlined, and 'B' calls 'C', then the call to 'C' now
1312/// exists in the instruction stream. Similarly this will inline a recursive
1313/// function by one level.
Eric Christopherf16bee82012-03-26 19:09:38 +00001314bool llvm::InlineFunction(CallSite CS, InlineFunctionInfo &IFI,
Chandler Carruth7b560d42015-09-09 17:55:00 +00001315 AAResults *CalleeAAR, bool InsertLifetime) {
Chris Lattner0cc265e2003-08-24 06:59:16 +00001316 Instruction *TheCall = CS.getInstruction();
1317 assert(TheCall->getParent() && TheCall->getParent()->getParent() &&
1318 "Instruction not in function!");
Chris Lattner530d4bf2003-05-29 15:11:31 +00001319
Chris Lattner4ba01ec2010-04-22 23:07:58 +00001320 // If IFI has any state in it, zap it before we fill it in.
1321 IFI.reset();
Easwaran Ramanb1bd3982016-03-08 00:36:35 +00001322
Chris Lattner0cc265e2003-08-24 06:59:16 +00001323 const Function *CalledFunc = CS.getCalledFunction();
Craig Topperf40110f2014-04-25 05:29:35 +00001324 if (!CalledFunc || // Can't inline external function or indirect
Reid Spencer5301e7c2007-01-30 20:08:39 +00001325 CalledFunc->isDeclaration() || // call, or call to a vararg function!
Eric Christopher1d385382010-03-24 23:35:21 +00001326 CalledFunc->getFunctionType()->isVarArg()) return false;
Chris Lattner530d4bf2003-05-29 15:11:31 +00001327
Sanjoy Das2d161452015-11-18 06:23:38 +00001328 // The inliner does not know how to inline through calls with operand bundles
1329 // in general ...
1330 if (CS.hasOperandBundles()) {
David Majnemer3bb88c02015-12-15 21:27:27 +00001331 for (int i = 0, e = CS.getNumOperandBundles(); i != e; ++i) {
1332 uint32_t Tag = CS.getOperandBundleAt(i).getTagID();
1333 // ... but it knows how to inline through "deopt" operand bundles ...
1334 if (Tag == LLVMContext::OB_deopt)
1335 continue;
1336 // ... and "funclet" operand bundles.
1337 if (Tag == LLVMContext::OB_funclet)
1338 continue;
1339
Sanjoy Das2d161452015-11-18 06:23:38 +00001340 return false;
David Majnemer3bb88c02015-12-15 21:27:27 +00001341 }
Sanjoy Das2d161452015-11-18 06:23:38 +00001342 }
Sanjoy Das0a1bee82015-10-23 20:09:55 +00001343
Duncan Sandsaa31b922007-12-19 21:13:37 +00001344 // If the call to the callee cannot throw, set the 'nounwind' flag on any
1345 // calls that we inline.
1346 bool MarkNoUnwind = CS.doesNotThrow();
1347
Chris Lattner0cc265e2003-08-24 06:59:16 +00001348 BasicBlock *OrigBB = TheCall->getParent();
Chris Lattner530d4bf2003-05-29 15:11:31 +00001349 Function *Caller = OrigBB->getParent();
1350
Gordon Henriksenb969c592007-12-25 03:10:07 +00001351 // GC poses two hazards to inlining, which only occur when the callee has GC:
1352 // 1. If the caller has no GC, then the callee's GC must be propagated to the
1353 // caller.
1354 // 2. If the caller has a differing GC, it is invalid to inline.
Gordon Henriksend930f912008-08-17 18:44:35 +00001355 if (CalledFunc->hasGC()) {
1356 if (!Caller->hasGC())
1357 Caller->setGC(CalledFunc->getGC());
1358 else if (CalledFunc->getGC() != Caller->getGC())
Gordon Henriksenb969c592007-12-25 03:10:07 +00001359 return false;
1360 }
Duncan Sands7c8fb1a2008-09-05 12:37:12 +00001361
Benjamin Kramer4d2b8712011-12-02 18:37:31 +00001362 // Get the personality function from the callee if it contains a landing pad.
David Majnemer7fddecc2015-06-17 20:52:32 +00001363 Constant *CalledPersonality =
David Majnemereba62792015-10-13 22:08:17 +00001364 CalledFunc->hasPersonalityFn()
1365 ? CalledFunc->getPersonalityFn()->stripPointerCasts()
1366 : nullptr;
Benjamin Kramer4d2b8712011-12-02 18:37:31 +00001367
Bill Wendling55421f02011-08-14 08:01:36 +00001368 // Find the personality function used by the landing pads of the caller. If it
1369 // exists, then check to see that it matches the personality function used in
1370 // the callee.
David Majnemer7fddecc2015-06-17 20:52:32 +00001371 Constant *CallerPersonality =
David Majnemereba62792015-10-13 22:08:17 +00001372 Caller->hasPersonalityFn()
1373 ? Caller->getPersonalityFn()->stripPointerCasts()
1374 : nullptr;
David Majnemer7fddecc2015-06-17 20:52:32 +00001375 if (CalledPersonality) {
1376 if (!CallerPersonality)
1377 Caller->setPersonalityFn(CalledPersonality);
1378 // If the personality functions match, then we can perform the
1379 // inlining. Otherwise, we can't inline.
1380 // TODO: This isn't 100% true. Some personality functions are proper
1381 // supersets of others and can be used in place of the other.
1382 else if (CalledPersonality != CallerPersonality)
1383 return false;
Bill Wendlingce0c2292012-01-31 01:01:16 +00001384 }
Bill Wendling55421f02011-08-14 08:01:36 +00001385
David Majnemer8a1c45d2015-12-12 05:38:55 +00001386 // We need to figure out which funclet the callsite was in so that we may
1387 // properly nest the callee.
1388 Instruction *CallSiteEHPad = nullptr;
David Majnemer3bb88c02015-12-15 21:27:27 +00001389 if (CallerPersonality) {
1390 EHPersonality Personality = classifyEHPersonality(CallerPersonality);
David Majnemer8a1c45d2015-12-12 05:38:55 +00001391 if (isFuncletEHPersonality(Personality)) {
David Majnemer3bb88c02015-12-15 21:27:27 +00001392 Optional<OperandBundleUse> ParentFunclet =
1393 CS.getOperandBundle(LLVMContext::OB_funclet);
1394 if (ParentFunclet)
1395 CallSiteEHPad = cast<FuncletPadInst>(ParentFunclet->Inputs.front());
David Majnemer8a1c45d2015-12-12 05:38:55 +00001396
1397 // OK, the inlining site is legal. What about the target function?
1398
1399 if (CallSiteEHPad) {
1400 if (Personality == EHPersonality::MSVC_CXX) {
1401 // The MSVC personality cannot tolerate catches getting inlined into
1402 // cleanup funclets.
1403 if (isa<CleanupPadInst>(CallSiteEHPad)) {
1404 // Ok, the call site is within a cleanuppad. Let's check the callee
1405 // for catchpads.
1406 for (const BasicBlock &CalledBB : *CalledFunc) {
David Majnemer3bb88c02015-12-15 21:27:27 +00001407 if (isa<CatchSwitchInst>(CalledBB.getFirstNonPHI()))
David Majnemer8a1c45d2015-12-12 05:38:55 +00001408 return false;
1409 }
1410 }
1411 } else if (isAsynchronousEHPersonality(Personality)) {
1412 // SEH is even less tolerant, there may not be any sort of exceptional
1413 // funclet in the callee.
1414 for (const BasicBlock &CalledBB : *CalledFunc) {
1415 if (CalledBB.isEHPad())
1416 return false;
1417 }
1418 }
1419 }
1420 }
1421 }
1422
David Majnemer223538f2016-02-23 17:11:04 +00001423 // Determine if we are dealing with a call in an EHPad which does not unwind
1424 // to caller.
1425 bool EHPadForCallUnwindsLocally = false;
1426 if (CallSiteEHPad && CS.isCall()) {
1427 UnwindDestMemoTy FuncletUnwindMap;
1428 Value *CallSiteUnwindDestToken =
1429 getUnwindDestToken(CallSiteEHPad, FuncletUnwindMap);
1430
1431 EHPadForCallUnwindsLocally =
1432 CallSiteUnwindDestToken &&
1433 !isa<ConstantTokenNone>(CallSiteUnwindDestToken);
1434 }
1435
Chris Lattner9fc977e2004-02-04 01:41:09 +00001436 // Get an iterator to the last basic block in the function, which will have
1437 // the new function inlined after it.
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +00001438 Function::iterator LastBlock = --Caller->end();
Chris Lattner9fc977e2004-02-04 01:41:09 +00001439
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001440 // Make sure to capture all of the return instructions from the cloned
Chris Lattner530d4bf2003-05-29 15:11:31 +00001441 // function.
Chris Lattnerd84dbb32009-08-27 04:02:30 +00001442 SmallVector<ReturnInst*, 8> Returns;
Chris Lattner908d7952006-01-13 19:05:59 +00001443 ClonedCodeInfo InlinedFunctionInfo;
Dale Johannesen845e5822009-03-04 02:09:48 +00001444 Function::iterator FirstNewBlock;
Duncan Sandsaa31b922007-12-19 21:13:37 +00001445
Devang Patelb8f11de2010-06-23 23:55:51 +00001446 { // Scope to destroy VMap after cloning.
Rafael Espindola229e38f2010-10-13 01:36:30 +00001447 ValueToValueMapTy VMap;
Julien Lerouge957e91c2014-04-15 18:01:54 +00001448 // Keep a list of pair (dst, src) to emit byval initializations.
1449 SmallVector<std::pair<Value*, Value*>, 4> ByValInit;
Chris Lattnerbe853d72006-05-27 01:28:04 +00001450
Mehdi Amini46a43552015-03-04 18:43:29 +00001451 auto &DL = Caller->getParent()->getDataLayout();
1452
Dan Gohman3ada1e12008-06-20 17:11:32 +00001453 assert(CalledFunc->arg_size() == CS.arg_size() &&
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001454 "No varargs calls can be inlined!");
Duncan Sands7c8fb1a2008-09-05 12:37:12 +00001455
Chris Lattner908117b2008-01-11 06:09:30 +00001456 // Calculate the vector of arguments to pass into the function cloner, which
1457 // matches up the formal to the actual argument values.
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001458 CallSite::arg_iterator AI = CS.arg_begin();
Chris Lattner908117b2008-01-11 06:09:30 +00001459 unsigned ArgNo = 0;
Chris Lattner531f9e92005-03-15 04:54:21 +00001460 for (Function::const_arg_iterator I = CalledFunc->arg_begin(),
Chris Lattner908117b2008-01-11 06:09:30 +00001461 E = CalledFunc->arg_end(); I != E; ++I, ++AI, ++ArgNo) {
1462 Value *ActualArg = *AI;
Duncan Sands7c8fb1a2008-09-05 12:37:12 +00001463
Duncan Sands053c9872008-01-27 18:12:58 +00001464 // When byval arguments actually inlined, we need to make the copy implied
1465 // by them explicit. However, we don't do this if the callee is readonly
1466 // or readnone, because the copy would be unneeded: the callee doesn't
1467 // modify the struct.
Nick Lewycky612d70b2011-11-20 19:09:04 +00001468 if (CS.isByValArgument(ArgNo)) {
David Majnemer120f4a02013-11-03 12:22:13 +00001469 ActualArg = HandleByValArgument(ActualArg, TheCall, CalledFunc, IFI,
Reid Klecknerdd3f3ed2014-11-04 02:02:14 +00001470 CalledFunc->getParamAlignment(ArgNo+1));
Reid Kleckner9b2cc642014-04-21 20:48:47 +00001471 if (ActualArg != *AI)
Julien Lerouge957e91c2014-04-15 18:01:54 +00001472 ByValInit.push_back(std::make_pair(ActualArg, (Value*) *AI));
Chris Lattner908117b2008-01-11 06:09:30 +00001473 }
Duncan Sands7c8fb1a2008-09-05 12:37:12 +00001474
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +00001475 VMap[&*I] = ActualArg;
Chris Lattner908117b2008-01-11 06:09:30 +00001476 }
Misha Brukmanb1c93172005-04-21 23:48:37 +00001477
Hal Finkel68dc3c72014-10-15 23:44:41 +00001478 // Add alignment assumptions if necessary. We do this before the inlined
1479 // instructions are actually cloned into the caller so that we can easily
1480 // check what will be known at the start of the inlined code.
1481 AddAlignmentAssumptions(CS, IFI);
1482
Chris Lattnerbe853d72006-05-27 01:28:04 +00001483 // We want the inliner to prune the code as it copies. We would LOVE to
1484 // have no dead or constant instructions leftover after inlining occurs
1485 // (which can happen, e.g., because an argument was constant), but we'll be
1486 // happy with whatever the cloner can do.
Mehdi Amini46a43552015-03-04 18:43:29 +00001487 CloneAndPruneFunctionInto(Caller, CalledFunc, VMap,
Dan Gohmanca26f792010-08-26 15:41:53 +00001488 /*ModuleLevelChanges=*/false, Returns, ".i",
Easwaran Ramanb1bd3982016-03-08 00:36:35 +00001489 &InlinedFunctionInfo, TheCall);
Duncan Sands7c8fb1a2008-09-05 12:37:12 +00001490
Chris Lattner5de3b8b2006-07-12 18:29:36 +00001491 // Remember the first block that is newly cloned over.
1492 FirstNewBlock = LastBlock; ++FirstNewBlock;
Duncan Sands7c8fb1a2008-09-05 12:37:12 +00001493
Julien Lerouge957e91c2014-04-15 18:01:54 +00001494 // Inject byval arguments initialization.
1495 for (std::pair<Value*, Value*> &Init : ByValInit)
1496 HandleByValArgumentInit(Init.first, Init.second, Caller->getParent(),
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +00001497 &*FirstNewBlock, IFI);
Julien Lerouge957e91c2014-04-15 18:01:54 +00001498
David Majnemer3bb88c02015-12-15 21:27:27 +00001499 Optional<OperandBundleUse> ParentDeopt =
1500 CS.getOperandBundle(LLVMContext::OB_deopt);
1501 if (ParentDeopt) {
Sanjoy Das2d161452015-11-18 06:23:38 +00001502 SmallVector<OperandBundleDef, 2> OpDefs;
1503
1504 for (auto &VH : InlinedFunctionInfo.OperandBundleCallSites) {
Sanjoy Dasab0626e2015-12-19 22:40:28 +00001505 Instruction *I = dyn_cast_or_null<Instruction>(VH);
1506 if (!I) continue; // instruction was DCE'd or RAUW'ed to undef
Sanjoy Das2d161452015-11-18 06:23:38 +00001507
1508 OpDefs.clear();
1509
1510 CallSite ICS(I);
1511 OpDefs.reserve(ICS.getNumOperandBundles());
1512
1513 for (unsigned i = 0, e = ICS.getNumOperandBundles(); i < e; ++i) {
1514 auto ChildOB = ICS.getOperandBundleAt(i);
1515 if (ChildOB.getTagID() != LLVMContext::OB_deopt) {
1516 // If the inlined call has other operand bundles, let them be
1517 OpDefs.emplace_back(ChildOB);
1518 continue;
1519 }
1520
1521 // It may be useful to separate this logic (of handling operand
1522 // bundles) out to a separate "policy" component if this gets crowded.
1523 // Prepend the parent's deoptimization continuation to the newly
1524 // inlined call's deoptimization continuation.
1525 std::vector<Value *> MergedDeoptArgs;
David Majnemer3bb88c02015-12-15 21:27:27 +00001526 MergedDeoptArgs.reserve(ParentDeopt->Inputs.size() +
Sanjoy Das2d161452015-11-18 06:23:38 +00001527 ChildOB.Inputs.size());
1528
1529 MergedDeoptArgs.insert(MergedDeoptArgs.end(),
David Majnemer3bb88c02015-12-15 21:27:27 +00001530 ParentDeopt->Inputs.begin(),
1531 ParentDeopt->Inputs.end());
Sanjoy Das2d161452015-11-18 06:23:38 +00001532 MergedDeoptArgs.insert(MergedDeoptArgs.end(), ChildOB.Inputs.begin(),
1533 ChildOB.Inputs.end());
1534
Sanjoy Das8da1f952015-12-08 03:50:32 +00001535 OpDefs.emplace_back("deopt", std::move(MergedDeoptArgs));
Sanjoy Das2d161452015-11-18 06:23:38 +00001536 }
1537
1538 Instruction *NewI = nullptr;
1539 if (isa<CallInst>(I))
1540 NewI = CallInst::Create(cast<CallInst>(I), OpDefs, I);
1541 else
1542 NewI = InvokeInst::Create(cast<InvokeInst>(I), OpDefs, I);
1543
1544 // Note: the RAUW does the appropriate fixup in VMap, so we need to do
1545 // this even if the call returns void.
1546 I->replaceAllUsesWith(NewI);
1547
1548 VH = nullptr;
1549 I->eraseFromParent();
1550 }
1551 }
1552
Chris Lattner5de3b8b2006-07-12 18:29:36 +00001553 // Update the callgraph if requested.
Chris Lattner4ba01ec2010-04-22 23:07:58 +00001554 if (IFI.CG)
Devang Patelb8f11de2010-06-23 23:55:51 +00001555 UpdateCallGraphAfterInlining(CS, FirstNewBlock, VMap, IFI);
Devang Patel35797402011-07-08 18:01:31 +00001556
1557 // Update inlined instructions' line number information.
1558 fixupLineNumbers(Caller, FirstNewBlock, TheCall);
Hal Finkel94146652014-07-24 14:25:39 +00001559
1560 // Clone existing noalias metadata if necessary.
1561 CloneAliasScopeMetadata(CS, VMap);
Hal Finkelff0bcb62014-07-25 15:50:08 +00001562
1563 // Add noalias metadata if necessary.
Chandler Carruth7b560d42015-09-09 17:55:00 +00001564 AddAliasScopeMetadata(CS, VMap, DL, CalleeAAR);
Hal Finkel74c2f352014-09-07 12:44:26 +00001565
1566 // FIXME: We could register any cloned assumptions instead of clearing the
1567 // whole function's cache.
Chandler Carruth66b31302015-01-04 12:03:27 +00001568 if (IFI.ACT)
1569 IFI.ACT->getAssumptionCache(*Caller).clear();
Misha Brukmanb1c93172005-04-21 23:48:37 +00001570 }
Duncan Sands7c8fb1a2008-09-05 12:37:12 +00001571
Chris Lattner530d4bf2003-05-29 15:11:31 +00001572 // If there are any alloca instructions in the block that used to be the entry
1573 // block for the callee, move them to the entry block of the caller. First
1574 // calculate which instruction they should be inserted before. We insert the
1575 // instructions at the end of the current alloca list.
Chris Lattner257492c2006-01-13 18:16:48 +00001576 {
Chris Lattner0cc265e2003-08-24 06:59:16 +00001577 BasicBlock::iterator InsertPoint = Caller->begin()->begin();
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001578 for (BasicBlock::iterator I = FirstNewBlock->begin(),
Chris Lattner5eef6ad2009-08-27 03:51:50 +00001579 E = FirstNewBlock->end(); I != E; ) {
1580 AllocaInst *AI = dyn_cast<AllocaInst>(I++);
Craig Topperf40110f2014-04-25 05:29:35 +00001581 if (!AI) continue;
Chris Lattner5eef6ad2009-08-27 03:51:50 +00001582
1583 // If the alloca is now dead, remove it. This often occurs due to code
1584 // specialization.
1585 if (AI->use_empty()) {
1586 AI->eraseFromParent();
1587 continue;
Chris Lattner6ef6d062006-09-13 19:23:57 +00001588 }
Chris Lattner5eef6ad2009-08-27 03:51:50 +00001589
1590 if (!isa<Constant>(AI->getArraySize()))
1591 continue;
1592
Chris Lattnercd3af962010-12-06 07:43:04 +00001593 // Keep track of the static allocas that we inline into the caller.
Chris Lattner4ba01ec2010-04-22 23:07:58 +00001594 IFI.StaticAllocas.push_back(AI);
Chris Lattnerb1cba3f2009-08-27 04:20:52 +00001595
Chris Lattner5eef6ad2009-08-27 03:51:50 +00001596 // Scan for the block of allocas that we can move over, and move them
1597 // all at once.
1598 while (isa<AllocaInst>(I) &&
Chris Lattnerb1cba3f2009-08-27 04:20:52 +00001599 isa<Constant>(cast<AllocaInst>(I)->getArraySize())) {
Chris Lattner4ba01ec2010-04-22 23:07:58 +00001600 IFI.StaticAllocas.push_back(cast<AllocaInst>(I));
Chris Lattner5eef6ad2009-08-27 03:51:50 +00001601 ++I;
Chris Lattnerb1cba3f2009-08-27 04:20:52 +00001602 }
Chris Lattner5eef6ad2009-08-27 03:51:50 +00001603
1604 // Transfer all of the allocas over in a block. Using splice means
1605 // that the instructions aren't removed from the symbol table, then
1606 // reinserted.
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +00001607 Caller->getEntryBlock().getInstList().splice(
1608 InsertPoint, FirstNewBlock->getInstList(), AI->getIterator(), I);
Chris Lattner5eef6ad2009-08-27 03:51:50 +00001609 }
Adrian Prantl4d365252015-01-30 01:55:25 +00001610 // Move any dbg.declares describing the allocas into the entry basic block.
Adrian Prantl3e2659e2015-01-30 19:37:48 +00001611 DIBuilder DIB(*Caller->getParent());
Adrian Prantl133e1022015-01-30 19:42:59 +00001612 for (auto &AI : IFI.StaticAllocas)
1613 replaceDbgDeclareForAlloca(AI, AI, DIB, /*Deref=*/false);
Chris Lattner0cc265e2003-08-24 06:59:16 +00001614 }
Chris Lattner530d4bf2003-05-29 15:11:31 +00001615
Reid Klecknerf0915aa2014-05-15 20:11:28 +00001616 bool InlinedMustTailCalls = false;
1617 if (InlinedFunctionInfo.ContainsCalls) {
Reid Kleckner6af21242014-05-15 20:39:42 +00001618 CallInst::TailCallKind CallSiteTailKind = CallInst::TCK_None;
1619 if (CallInst *CI = dyn_cast<CallInst>(TheCall))
1620 CallSiteTailKind = CI->getTailCallKind();
1621
Reid Klecknerf0915aa2014-05-15 20:11:28 +00001622 for (Function::iterator BB = FirstNewBlock, E = Caller->end(); BB != E;
1623 ++BB) {
1624 for (Instruction &I : *BB) {
1625 CallInst *CI = dyn_cast<CallInst>(&I);
1626 if (!CI)
1627 continue;
1628
1629 // We need to reduce the strength of any inlined tail calls. For
1630 // musttail, we have to avoid introducing potential unbounded stack
1631 // growth. For example, if functions 'f' and 'g' are mutually recursive
1632 // with musttail, we can inline 'g' into 'f' so long as we preserve
1633 // musttail on the cloned call to 'f'. If either the inlined call site
1634 // or the cloned call site is *not* musttail, the program already has
1635 // one frame of stack growth, so it's safe to remove musttail. Here is
1636 // a table of example transformations:
1637 //
1638 // f -> musttail g -> musttail f ==> f -> musttail f
1639 // f -> musttail g -> tail f ==> f -> tail f
1640 // f -> g -> musttail f ==> f -> f
1641 // f -> g -> tail f ==> f -> f
1642 CallInst::TailCallKind ChildTCK = CI->getTailCallKind();
1643 ChildTCK = std::min(CallSiteTailKind, ChildTCK);
Reid Klecknerdd3f3ed2014-11-04 02:02:14 +00001644 CI->setTailCallKind(ChildTCK);
Reid Klecknerf0915aa2014-05-15 20:11:28 +00001645 InlinedMustTailCalls |= CI->isMustTailCall();
1646
1647 // Calls inlined through a 'nounwind' call site should be marked
1648 // 'nounwind'.
1649 if (MarkNoUnwind)
1650 CI->setDoesNotThrow();
1651 }
1652 }
1653 }
1654
Nick Lewyckya68ec832011-05-22 05:22:10 +00001655 // Leave lifetime markers for the static alloca's, scoping them to the
1656 // function we just inlined.
Chad Rosier07d37bc2012-02-25 02:56:01 +00001657 if (InsertLifetime && !IFI.StaticAllocas.empty()) {
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +00001658 IRBuilder<> builder(&FirstNewBlock->front());
Nick Lewyckya68ec832011-05-22 05:22:10 +00001659 for (unsigned ai = 0, ae = IFI.StaticAllocas.size(); ai != ae; ++ai) {
1660 AllocaInst *AI = IFI.StaticAllocas[ai];
1661
1662 // If the alloca is already scoped to something smaller than the whole
1663 // function then there's no need to add redundant, less accurate markers.
1664 if (hasLifetimeMarkers(AI))
1665 continue;
1666
Alexey Samsonovcfd662f2012-11-13 07:15:32 +00001667 // Try to determine the size of the allocation.
Craig Topperf40110f2014-04-25 05:29:35 +00001668 ConstantInt *AllocaSize = nullptr;
Alexey Samsonovcfd662f2012-11-13 07:15:32 +00001669 if (ConstantInt *AIArraySize =
1670 dyn_cast<ConstantInt>(AI->getArraySize())) {
Mehdi Amini46a43552015-03-04 18:43:29 +00001671 auto &DL = Caller->getParent()->getDataLayout();
1672 Type *AllocaType = AI->getAllocatedType();
1673 uint64_t AllocaTypeSize = DL.getTypeAllocSize(AllocaType);
1674 uint64_t AllocaArraySize = AIArraySize->getLimitedValue();
Akira Hatanaka2cc2b632015-04-20 16:11:05 +00001675
1676 // Don't add markers for zero-sized allocas.
1677 if (AllocaArraySize == 0)
1678 continue;
1679
Mehdi Amini46a43552015-03-04 18:43:29 +00001680 // Check that array size doesn't saturate uint64_t and doesn't
1681 // overflow when it's multiplied by type size.
1682 if (AllocaArraySize != ~0ULL &&
1683 UINT64_MAX / AllocaArraySize >= AllocaTypeSize) {
1684 AllocaSize = ConstantInt::get(Type::getInt64Ty(AI->getContext()),
1685 AllocaArraySize * AllocaTypeSize);
Alexey Samsonovcfd662f2012-11-13 07:15:32 +00001686 }
1687 }
1688
1689 builder.CreateLifetimeStart(AI, AllocaSize);
Reid Kleckner900d46f2014-05-15 21:10:46 +00001690 for (ReturnInst *RI : Returns) {
1691 // Don't insert llvm.lifetime.end calls between a musttail call and a
1692 // return. The return kills all local allocas.
Reid Klecknere31acf22014-08-12 00:05:15 +00001693 if (InlinedMustTailCalls &&
1694 RI->getParent()->getTerminatingMustTailCall())
Reid Kleckner900d46f2014-05-15 21:10:46 +00001695 continue;
Reid Klecknerf0915aa2014-05-15 20:11:28 +00001696 IRBuilder<>(RI).CreateLifetimeEnd(AI, AllocaSize);
Reid Kleckner900d46f2014-05-15 21:10:46 +00001697 }
Nick Lewyckya68ec832011-05-22 05:22:10 +00001698 }
1699 }
1700
Chris Lattner2be06072006-01-13 19:34:14 +00001701 // If the inlined code contained dynamic alloca instructions, wrap the inlined
1702 // code with llvm.stacksave/llvm.stackrestore intrinsics.
1703 if (InlinedFunctionInfo.ContainsDynamicAllocas) {
1704 Module *M = Caller->getParent();
Chris Lattner2be06072006-01-13 19:34:14 +00001705 // Get the two intrinsics we care about.
Chris Lattner88b36f12009-10-17 05:39:39 +00001706 Function *StackSave = Intrinsic::getDeclaration(M, Intrinsic::stacksave);
1707 Function *StackRestore=Intrinsic::getDeclaration(M,Intrinsic::stackrestore);
Chris Lattner5de3b8b2006-07-12 18:29:36 +00001708
Chris Lattner2be06072006-01-13 19:34:14 +00001709 // Insert the llvm.stacksave.
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +00001710 CallInst *SavedPtr = IRBuilder<>(&*FirstNewBlock, FirstNewBlock->begin())
David Blaikieff6409d2015-05-18 22:13:54 +00001711 .CreateCall(StackSave, {}, "savedstack");
Duncan Sands7c8fb1a2008-09-05 12:37:12 +00001712
Chris Lattner2be06072006-01-13 19:34:14 +00001713 // Insert a call to llvm.stackrestore before any return instructions in the
1714 // inlined function.
Reid Kleckner900d46f2014-05-15 21:10:46 +00001715 for (ReturnInst *RI : Returns) {
1716 // Don't insert llvm.stackrestore calls between a musttail call and a
1717 // return. The return will restore the stack pointer.
Reid Klecknere31acf22014-08-12 00:05:15 +00001718 if (InlinedMustTailCalls && RI->getParent()->getTerminatingMustTailCall())
Reid Kleckner900d46f2014-05-15 21:10:46 +00001719 continue;
Reid Klecknerf0915aa2014-05-15 20:11:28 +00001720 IRBuilder<>(RI).CreateCall(StackRestore, SavedPtr);
Reid Kleckner900d46f2014-05-15 21:10:46 +00001721 }
Chris Lattner9f3dced2005-05-06 06:47:52 +00001722 }
1723
Joseph Tremouletb41632b2016-01-20 02:15:15 +00001724 // If we are inlining for an invoke instruction, we must make sure to rewrite
1725 // any call instructions into invoke instructions. This is sensitive to which
1726 // funclet pads were top-level in the inlinee, so must be done before
1727 // rewriting the "parent pad" links.
1728 if (auto *II = dyn_cast<InvokeInst>(TheCall)) {
1729 BasicBlock *UnwindDest = II->getUnwindDest();
1730 Instruction *FirstNonPHI = UnwindDest->getFirstNonPHI();
1731 if (isa<LandingPadInst>(FirstNonPHI)) {
1732 HandleInlinedLandingPad(II, &*FirstNewBlock, InlinedFunctionInfo);
1733 } else {
1734 HandleInlinedEHPad(II, &*FirstNewBlock, InlinedFunctionInfo);
1735 }
1736 }
1737
David Majnemer3bb88c02015-12-15 21:27:27 +00001738 // Update the lexical scopes of the new funclets and callsites.
1739 // Anything that had 'none' as its parent is now nested inside the callsite's
1740 // EHPad.
1741
David Majnemer8a1c45d2015-12-12 05:38:55 +00001742 if (CallSiteEHPad) {
1743 for (Function::iterator BB = FirstNewBlock->getIterator(),
1744 E = Caller->end();
1745 BB != E; ++BB) {
David Majnemer3bb88c02015-12-15 21:27:27 +00001746 // Add bundle operands to any top-level call sites.
1747 SmallVector<OperandBundleDef, 1> OpBundles;
1748 for (BasicBlock::iterator BBI = BB->begin(), E = BB->end(); BBI != E;) {
1749 Instruction *I = &*BBI++;
1750 CallSite CS(I);
1751 if (!CS)
1752 continue;
1753
1754 // Skip call sites which are nounwind intrinsics.
1755 auto *CalledFn =
1756 dyn_cast<Function>(CS.getCalledValue()->stripPointerCasts());
1757 if (CalledFn && CalledFn->isIntrinsic() && CS.doesNotThrow())
1758 continue;
1759
1760 // Skip call sites which already have a "funclet" bundle.
1761 if (CS.getOperandBundle(LLVMContext::OB_funclet))
1762 continue;
1763
1764 CS.getOperandBundlesAsDefs(OpBundles);
1765 OpBundles.emplace_back("funclet", CallSiteEHPad);
1766
1767 Instruction *NewInst;
1768 if (CS.isCall())
1769 NewInst = CallInst::Create(cast<CallInst>(I), OpBundles, I);
1770 else
1771 NewInst = InvokeInst::Create(cast<InvokeInst>(I), OpBundles, I);
David Majnemer3bb88c02015-12-15 21:27:27 +00001772 NewInst->takeName(I);
1773 I->replaceAllUsesWith(NewInst);
1774 I->eraseFromParent();
1775
1776 OpBundles.clear();
1777 }
1778
David Majnemer223538f2016-02-23 17:11:04 +00001779 // It is problematic if the inlinee has a cleanupret which unwinds to
1780 // caller and we inline it into a call site which doesn't unwind but into
1781 // an EH pad that does. Such an edge must be dynamically unreachable.
1782 // As such, we replace the cleanupret with unreachable.
1783 if (auto *CleanupRet = dyn_cast<CleanupReturnInst>(BB->getTerminator()))
1784 if (CleanupRet->unwindsToCaller() && EHPadForCallUnwindsLocally)
1785 changeToUnreachable(CleanupRet, /*UseLLVMTrap=*/false);
1786
David Majnemer8a1c45d2015-12-12 05:38:55 +00001787 Instruction *I = BB->getFirstNonPHI();
1788 if (!I->isEHPad())
1789 continue;
1790
David Majnemerbbfc7212015-12-14 18:34:23 +00001791 if (auto *CatchSwitch = dyn_cast<CatchSwitchInst>(I)) {
David Majnemer8a1c45d2015-12-12 05:38:55 +00001792 if (isa<ConstantTokenNone>(CatchSwitch->getParentPad()))
1793 CatchSwitch->setParentPad(CallSiteEHPad);
1794 } else {
1795 auto *FPI = cast<FuncletPadInst>(I);
1796 if (isa<ConstantTokenNone>(FPI->getParentPad()))
1797 FPI->setParentPad(CallSiteEHPad);
1798 }
1799 }
1800 }
1801
Reid Klecknerf0915aa2014-05-15 20:11:28 +00001802 // Handle any inlined musttail call sites. In order for a new call site to be
1803 // musttail, the source of the clone and the inlined call site must have been
1804 // musttail. Therefore it's safe to return without merging control into the
1805 // phi below.
1806 if (InlinedMustTailCalls) {
1807 // Check if we need to bitcast the result of any musttail calls.
1808 Type *NewRetTy = Caller->getReturnType();
1809 bool NeedBitCast = !TheCall->use_empty() && TheCall->getType() != NewRetTy;
1810
1811 // Handle the returns preceded by musttail calls separately.
1812 SmallVector<ReturnInst *, 8> NormalReturns;
1813 for (ReturnInst *RI : Returns) {
Reid Klecknere31acf22014-08-12 00:05:15 +00001814 CallInst *ReturnedMustTail =
1815 RI->getParent()->getTerminatingMustTailCall();
Reid Klecknerf0915aa2014-05-15 20:11:28 +00001816 if (!ReturnedMustTail) {
1817 NormalReturns.push_back(RI);
1818 continue;
1819 }
1820 if (!NeedBitCast)
1821 continue;
1822
1823 // Delete the old return and any preceding bitcast.
1824 BasicBlock *CurBB = RI->getParent();
1825 auto *OldCast = dyn_cast_or_null<BitCastInst>(RI->getReturnValue());
1826 RI->eraseFromParent();
1827 if (OldCast)
1828 OldCast->eraseFromParent();
1829
1830 // Insert a new bitcast and return with the right type.
1831 IRBuilder<> Builder(CurBB);
1832 Builder.CreateRet(Builder.CreateBitCast(ReturnedMustTail, NewRetTy));
1833 }
1834
1835 // Leave behind the normal returns so we can merge control flow.
1836 std::swap(Returns, NormalReturns);
1837 }
1838
Chris Lattner0fa8c7c2004-02-04 04:17:06 +00001839 // If we cloned in _exactly one_ basic block, and if that block ends in a
1840 // return instruction, we splice the body of the inlined callee directly into
1841 // the calling basic block.
1842 if (Returns.size() == 1 && std::distance(FirstNewBlock, Caller->end()) == 1) {
1843 // Move all of the instructions right before the call.
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +00001844 OrigBB->getInstList().splice(TheCall->getIterator(),
1845 FirstNewBlock->getInstList(),
Chris Lattner0fa8c7c2004-02-04 04:17:06 +00001846 FirstNewBlock->begin(), FirstNewBlock->end());
1847 // Remove the cloned basic block.
1848 Caller->getBasicBlockList().pop_back();
Misha Brukmanb1c93172005-04-21 23:48:37 +00001849
Chris Lattner0fa8c7c2004-02-04 04:17:06 +00001850 // If the call site was an invoke instruction, add a branch to the normal
1851 // destination.
Adrian Prantl15db52b2013-04-23 19:56:03 +00001852 if (InvokeInst *II = dyn_cast<InvokeInst>(TheCall)) {
1853 BranchInst *NewBr = BranchInst::Create(II->getNormalDest(), TheCall);
1854 NewBr->setDebugLoc(Returns[0]->getDebugLoc());
1855 }
Chris Lattner0fa8c7c2004-02-04 04:17:06 +00001856
1857 // If the return instruction returned a value, replace uses of the call with
1858 // uses of the returned value.
Devang Patel841322b2008-03-04 21:15:15 +00001859 if (!TheCall->use_empty()) {
1860 ReturnInst *R = Returns[0];
Eli Friedman36b90262009-05-08 00:22:04 +00001861 if (TheCall == R->getReturnValue())
Owen Andersonb292b8c2009-07-30 23:03:37 +00001862 TheCall->replaceAllUsesWith(UndefValue::get(TheCall->getType()));
Eli Friedman36b90262009-05-08 00:22:04 +00001863 else
1864 TheCall->replaceAllUsesWith(R->getReturnValue());
Devang Patel841322b2008-03-04 21:15:15 +00001865 }
Chris Lattner0fa8c7c2004-02-04 04:17:06 +00001866 // Since we are now done with the Call/Invoke, we can delete it.
Dan Gohman158ff2c2008-06-21 22:08:46 +00001867 TheCall->eraseFromParent();
Chris Lattner0fa8c7c2004-02-04 04:17:06 +00001868
1869 // Since we are now done with the return instruction, delete it also.
Dan Gohman158ff2c2008-06-21 22:08:46 +00001870 Returns[0]->eraseFromParent();
Chris Lattner0fa8c7c2004-02-04 04:17:06 +00001871
1872 // We are now done with the inlining.
1873 return true;
1874 }
1875
1876 // Otherwise, we have the normal case, of more than one block to inline or
1877 // multiple return sites.
1878
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001879 // We want to clone the entire callee function into the hole between the
1880 // "starter" and "ender" blocks. How we accomplish this depends on whether
1881 // this is an invoke instruction or a call instruction.
1882 BasicBlock *AfterCallBB;
Craig Topperf40110f2014-04-25 05:29:35 +00001883 BranchInst *CreatedBranchToNormalDest = nullptr;
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001884 if (InvokeInst *II = dyn_cast<InvokeInst>(TheCall)) {
Misha Brukmanb1c93172005-04-21 23:48:37 +00001885
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001886 // Add an unconditional branch to make this look like the CallInst case...
Adrian Prantl15db52b2013-04-23 19:56:03 +00001887 CreatedBranchToNormalDest = BranchInst::Create(II->getNormalDest(), TheCall);
Misha Brukmanb1c93172005-04-21 23:48:37 +00001888
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001889 // Split the basic block. This guarantees that no PHI nodes will have to be
1890 // updated due to new incoming edges, and make the invoke case more
1891 // symmetric to the call case.
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +00001892 AfterCallBB =
1893 OrigBB->splitBasicBlock(CreatedBranchToNormalDest->getIterator(),
1894 CalledFunc->getName() + ".exit");
Misha Brukmanb1c93172005-04-21 23:48:37 +00001895
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001896 } else { // It's a call
Chris Lattner0fa8c7c2004-02-04 04:17:06 +00001897 // If this is a call instruction, we need to split the basic block that
1898 // the call lives in.
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001899 //
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +00001900 AfterCallBB = OrigBB->splitBasicBlock(TheCall->getIterator(),
1901 CalledFunc->getName() + ".exit");
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001902 }
1903
Chris Lattner0fa8c7c2004-02-04 04:17:06 +00001904 // Change the branch that used to go to AfterCallBB to branch to the first
1905 // basic block of the inlined function.
1906 //
1907 TerminatorInst *Br = OrigBB->getTerminator();
Misha Brukmanb1c93172005-04-21 23:48:37 +00001908 assert(Br && Br->getOpcode() == Instruction::Br &&
Chris Lattner0fa8c7c2004-02-04 04:17:06 +00001909 "splitBasicBlock broken!");
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +00001910 Br->setOperand(0, &*FirstNewBlock);
Chris Lattner0fa8c7c2004-02-04 04:17:06 +00001911
1912 // Now that the function is correct, make it a little bit nicer. In
1913 // particular, move the basic blocks inserted from the end of the function
1914 // into the space made by splitting the source basic block.
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +00001915 Caller->getBasicBlockList().splice(AfterCallBB->getIterator(),
1916 Caller->getBasicBlockList(), FirstNewBlock,
1917 Caller->end());
Chris Lattner0fa8c7c2004-02-04 04:17:06 +00001918
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001919 // Handle all of the return instructions that we just cloned in, and eliminate
1920 // any users of the original call/invoke instruction.
Chris Lattner229907c2011-07-18 04:54:35 +00001921 Type *RTy = CalledFunc->getReturnType();
Dan Gohman3b18fd72008-06-20 01:03:44 +00001922
Craig Topperf40110f2014-04-25 05:29:35 +00001923 PHINode *PHI = nullptr;
Dan Gohmanfa1211f2008-07-23 00:34:11 +00001924 if (Returns.size() > 1) {
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001925 // The PHI node should go at the front of the new basic block to merge all
1926 // possible incoming values.
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001927 if (!TheCall->use_empty()) {
Jay Foad52131342011-03-30 11:28:46 +00001928 PHI = PHINode::Create(RTy, Returns.size(), TheCall->getName(),
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +00001929 &AfterCallBB->front());
Dan Gohmanfa1211f2008-07-23 00:34:11 +00001930 // Anything that used the result of the function call should now use the
1931 // PHI node as their operand.
Duncan Sands7c8fb1a2008-09-05 12:37:12 +00001932 TheCall->replaceAllUsesWith(PHI);
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001933 }
Misha Brukmanb1c93172005-04-21 23:48:37 +00001934
Gabor Greif5aa19222009-01-15 18:40:09 +00001935 // Loop over all of the return instructions adding entries to the PHI node
1936 // as appropriate.
Dan Gohmanfa1211f2008-07-23 00:34:11 +00001937 if (PHI) {
1938 for (unsigned i = 0, e = Returns.size(); i != e; ++i) {
1939 ReturnInst *RI = Returns[i];
1940 assert(RI->getReturnValue()->getType() == PHI->getType() &&
1941 "Ret value not consistent in function!");
1942 PHI->addIncoming(RI->getReturnValue(), RI->getParent());
Devang Patel780b3ca62008-03-07 20:06:16 +00001943 }
1944 }
1945
Gabor Greif8c573f72009-01-16 23:08:50 +00001946 // Add a branch to the merge points and remove return instructions.
Richard Trieu624c2eb2013-04-30 22:45:10 +00001947 DebugLoc Loc;
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001948 for (unsigned i = 0, e = Returns.size(); i != e; ++i) {
Richard Trieu624c2eb2013-04-30 22:45:10 +00001949 ReturnInst *RI = Returns[i];
Adrian Prantl09416382013-04-30 17:08:16 +00001950 BranchInst* BI = BranchInst::Create(AfterCallBB, RI);
Richard Trieu624c2eb2013-04-30 22:45:10 +00001951 Loc = RI->getDebugLoc();
1952 BI->setDebugLoc(Loc);
Devang Patel64d0f072008-03-10 18:34:00 +00001953 RI->eraseFromParent();
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001954 }
Adrian Prantl09416382013-04-30 17:08:16 +00001955 // We need to set the debug location to *somewhere* inside the
Adrian Prantl8beccf92013-04-30 17:33:32 +00001956 // inlined function. The line number may be nonsensical, but the
Adrian Prantl09416382013-04-30 17:08:16 +00001957 // instruction will at least be associated with the right
1958 // function.
1959 if (CreatedBranchToNormalDest)
Richard Trieu624c2eb2013-04-30 22:45:10 +00001960 CreatedBranchToNormalDest->setDebugLoc(Loc);
Devang Patel64d0f072008-03-10 18:34:00 +00001961 } else if (!Returns.empty()) {
1962 // Otherwise, if there is exactly one return value, just replace anything
1963 // using the return value of the call with the computed value.
Eli Friedman36b90262009-05-08 00:22:04 +00001964 if (!TheCall->use_empty()) {
1965 if (TheCall == Returns[0]->getReturnValue())
Owen Andersonb292b8c2009-07-30 23:03:37 +00001966 TheCall->replaceAllUsesWith(UndefValue::get(TheCall->getType()));
Eli Friedman36b90262009-05-08 00:22:04 +00001967 else
1968 TheCall->replaceAllUsesWith(Returns[0]->getReturnValue());
1969 }
Duncan Sands7c8fb1a2008-09-05 12:37:12 +00001970
Jay Foad61ea0e42011-06-23 09:09:15 +00001971 // Update PHI nodes that use the ReturnBB to use the AfterCallBB.
1972 BasicBlock *ReturnBB = Returns[0]->getParent();
1973 ReturnBB->replaceAllUsesWith(AfterCallBB);
1974
Devang Patel64d0f072008-03-10 18:34:00 +00001975 // Splice the code from the return block into the block that it will return
1976 // to, which contains the code that was after the call.
Devang Patel64d0f072008-03-10 18:34:00 +00001977 AfterCallBB->getInstList().splice(AfterCallBB->begin(),
1978 ReturnBB->getInstList());
Duncan Sands7c8fb1a2008-09-05 12:37:12 +00001979
Adrian Prantl15db52b2013-04-23 19:56:03 +00001980 if (CreatedBranchToNormalDest)
1981 CreatedBranchToNormalDest->setDebugLoc(Returns[0]->getDebugLoc());
1982
Devang Patel64d0f072008-03-10 18:34:00 +00001983 // Delete the return instruction now and empty ReturnBB now.
1984 Returns[0]->eraseFromParent();
1985 ReturnBB->eraseFromParent();
Chris Lattner6e79e552004-10-17 23:21:07 +00001986 } else if (!TheCall->use_empty()) {
1987 // No returns, but something is using the return value of the call. Just
1988 // nuke the result.
Owen Andersonb292b8c2009-07-30 23:03:37 +00001989 TheCall->replaceAllUsesWith(UndefValue::get(TheCall->getType()));
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001990 }
Misha Brukmanb1c93172005-04-21 23:48:37 +00001991
Chris Lattner18ef3fd2004-02-04 02:51:48 +00001992 // Since we are now done with the Call/Invoke, we can delete it.
Chris Lattner6e79e552004-10-17 23:21:07 +00001993 TheCall->eraseFromParent();
Chris Lattner530d4bf2003-05-29 15:11:31 +00001994
Reid Klecknerf0915aa2014-05-15 20:11:28 +00001995 // If we inlined any musttail calls and the original return is now
1996 // unreachable, delete it. It can only contain a bitcast and ret.
Easwaran Ramanb1bd3982016-03-08 00:36:35 +00001997 if (InlinedMustTailCalls && pred_begin(AfterCallBB) == pred_end(AfterCallBB))
Reid Klecknerf0915aa2014-05-15 20:11:28 +00001998 AfterCallBB->eraseFromParent();
1999
Chris Lattnerfc3fe5c2003-08-24 04:06:56 +00002000 // We should always be able to fold the entry block of the function into the
2001 // single predecessor of the block...
Chris Lattner0328d752004-04-16 05:17:59 +00002002 assert(cast<BranchInst>(Br)->isUnconditional() && "splitBasicBlock broken!");
Chris Lattnerfc3fe5c2003-08-24 04:06:56 +00002003 BasicBlock *CalleeEntry = cast<BranchInst>(Br)->getSuccessor(0);
Chris Lattner0fa8c7c2004-02-04 04:17:06 +00002004
Chris Lattner0328d752004-04-16 05:17:59 +00002005 // Splice the code entry block into calling block, right before the
2006 // unconditional branch.
Eric Christopher96513122011-06-23 06:24:52 +00002007 CalleeEntry->replaceAllUsesWith(OrigBB); // Update PHI nodes
Duncan P. N. Exon Smith5b4c8372015-10-13 02:39:05 +00002008 OrigBB->getInstList().splice(Br->getIterator(), CalleeEntry->getInstList());
Chris Lattner0328d752004-04-16 05:17:59 +00002009
2010 // Remove the unconditional branch.
2011 OrigBB->getInstList().erase(Br);
2012
2013 // Now we can remove the CalleeEntry block, which is now empty.
2014 Caller->getBasicBlockList().erase(CalleeEntry);
Duncan Sands7c8fb1a2008-09-05 12:37:12 +00002015
Duncan Sands9d9a4e22010-11-17 11:16:23 +00002016 // If we inserted a phi node, check to see if it has a single value (e.g. all
2017 // the entries are the same or undef). If so, remove the PHI so it doesn't
2018 // block other optimizations.
Bill Wendlingce0c2292012-01-31 01:01:16 +00002019 if (PHI) {
Mehdi Amini46a43552015-03-04 18:43:29 +00002020 auto &DL = Caller->getParent()->getDataLayout();
Mehdi Aminia28d91d2015-03-10 02:37:25 +00002021 if (Value *V = SimplifyInstruction(PHI, DL, nullptr, nullptr,
Chandler Carruth66b31302015-01-04 12:03:27 +00002022 &IFI.ACT->getAssumptionCache(*Caller))) {
Duncan Sands9d9a4e22010-11-17 11:16:23 +00002023 PHI->replaceAllUsesWith(V);
2024 PHI->eraseFromParent();
2025 }
Bill Wendlingce0c2292012-01-31 01:01:16 +00002026 }
Duncan Sands9d9a4e22010-11-17 11:16:23 +00002027
Chris Lattner530d4bf2003-05-29 15:11:31 +00002028 return true;
2029}