blob: 3f4731c937d17ce31c098cdb34bfc63243d96455 [file] [log] [blame]
Chris Lattner7d30a6c2004-06-20 04:11:48 +00001//===- Inliner.cpp - Code common to all inliners --------------------------===//
Misha Brukmanb1c93172005-04-21 23:48:37 +00002//
John Criswell482202a2003-10-20 19:43:21 +00003// The LLVM Compiler Infrastructure
4//
Chris Lattnerf3ebc3f2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Misha Brukmanb1c93172005-04-21 23:48:37 +00007//
John Criswell482202a2003-10-20 19:43:21 +00008//===----------------------------------------------------------------------===//
Chris Lattnerd075cc22003-08-31 19:10:30 +00009//
Chris Lattner6754b822004-05-23 21:22:17 +000010// This file implements the mechanics required to implement inlining without
11// missing any calls and updating the call graph. The decisions of which calls
12// are profitable to inline are implemented elsewhere.
Chris Lattnerd075cc22003-08-31 19:10:30 +000013//
14//===----------------------------------------------------------------------===//
15
Chandler Carruth1d963112016-12-20 03:15:32 +000016#include "llvm/Transforms/IPO/Inliner.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000017#include "llvm/ADT/SmallPtrSet.h"
18#include "llvm/ADT/Statistic.h"
Hal Finkel0c083022014-09-01 09:01:39 +000019#include "llvm/Analysis/AliasAnalysis.h"
Daniel Jasperaec2fa32016-12-19 08:22:17 +000020#include "llvm/Analysis/AssumptionCache.h"
Chandler Carruth7b560d42015-09-09 17:55:00 +000021#include "llvm/Analysis/BasicAliasAnalysis.h"
Chris Lattnerd075cc22003-08-31 19:10:30 +000022#include "llvm/Analysis/CallGraph.h"
Dan Gohman4552e3c2009-10-13 18:30:07 +000023#include "llvm/Analysis/InlineCost.h"
Adam Nemet896c09b2016-08-10 00:44:44 +000024#include "llvm/Analysis/OptimizationDiagnosticInfo.h"
Easwaran Raman71069cf2016-06-09 22:23:21 +000025#include "llvm/Analysis/ProfileSummaryInfo.h"
Benjamin Kramer799003b2015-03-23 19:32:43 +000026#include "llvm/Analysis/TargetLibraryInfo.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000027#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000028#include "llvm/IR/DataLayout.h"
Diego Novilloa9298b22014-04-08 16:42:34 +000029#include "llvm/IR/DiagnosticInfo.h"
Chandler Carruth1d963112016-12-20 03:15:32 +000030#include "llvm/IR/InstIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000031#include "llvm/IR/Instructions.h"
32#include "llvm/IR/IntrinsicInst.h"
33#include "llvm/IR/Module.h"
Reid Spencer7c16caa2004-09-01 22:55:40 +000034#include "llvm/Support/Debug.h"
Daniel Dunbar0dd5e1e2009-07-25 00:23:56 +000035#include "llvm/Support/raw_ostream.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000036#include "llvm/Transforms/Utils/Cloning.h"
37#include "llvm/Transforms/Utils/Local.h"
Chandler Carruth6e9bb7e2016-12-26 23:43:27 +000038#include "llvm/Transforms/Utils/ModuleUtils.h"
Chris Lattnera82f1312003-11-21 21:45:31 +000039using namespace llvm;
Brian Gaeke960707c2003-11-11 22:41:34 +000040
Chandler Carruth964daaa2014-04-22 02:55:47 +000041#define DEBUG_TYPE "inline"
42
Chris Lattner1631bcb2006-12-19 22:09:18 +000043STATISTIC(NumInlined, "Number of functions inlined");
Chris Lattnerb49a6222010-05-01 17:19:38 +000044STATISTIC(NumCallsDeleted, "Number of call sites deleted, not inlined");
Chris Lattner1631bcb2006-12-19 22:09:18 +000045STATISTIC(NumDeleted, "Number of functions deleted because all callers found");
Chris Lattnerd3374e82009-08-27 06:29:33 +000046STATISTIC(NumMergedAllocas, "Number of allocas merged together");
Chris Lattner1631bcb2006-12-19 22:09:18 +000047
Benjamin Kramerbde91762012-06-02 10:20:22 +000048// This weirdly named statistic tracks the number of times that, when attempting
Chandler Carruth7ae90d42012-04-11 10:15:10 +000049// to inline a function A into B, we analyze the callers of B in order to see
50// if those would be more profitable and blocked inline steps.
51STATISTIC(NumCallerCallersAnalyzed, "Number of caller-callers analyzed");
52
Chandler Carruthf702d8e2016-08-17 02:40:23 +000053/// Flag to disable manual alloca merging.
54///
55/// Merging of allocas was originally done as a stack-size saving technique
56/// prior to LLVM's code generator having support for stack coloring based on
57/// lifetime markers. It is now in the process of being removed. To experiment
58/// with disabling it and relying fully on lifetime marker based stack
59/// coloring, you can pass this flag to LLVM.
60static cl::opt<bool>
61 DisableInlinedAllocaMerging("disable-inlined-alloca-merging",
62 cl::init(false), cl::Hidden);
63
Piotr Padlewski84abc742016-07-29 00:27:16 +000064namespace {
65enum class InlinerFunctionImportStatsOpts {
66 No = 0,
67 Basic = 1,
68 Verbose = 2,
69};
70
71cl::opt<InlinerFunctionImportStatsOpts> InlinerFunctionImportStats(
72 "inliner-function-import-stats",
73 cl::init(InlinerFunctionImportStatsOpts::No),
74 cl::values(clEnumValN(InlinerFunctionImportStatsOpts::Basic, "basic",
75 "basic statistics"),
76 clEnumValN(InlinerFunctionImportStatsOpts::Verbose, "verbose",
Mehdi Amini732afdd2016-10-08 19:41:06 +000077 "printing of statistics for each inlined function")),
Piotr Padlewski84abc742016-07-29 00:27:16 +000078 cl::Hidden, cl::desc("Enable inliner stats for imported functions"));
79} // namespace
80
Chandler Carruth1d963112016-12-20 03:15:32 +000081LegacyInlinerBase::LegacyInlinerBase(char &ID)
82 : CallGraphSCCPass(ID), InsertLifetime(true) {}
Chris Lattnerd075cc22003-08-31 19:10:30 +000083
Chandler Carruth1d963112016-12-20 03:15:32 +000084LegacyInlinerBase::LegacyInlinerBase(char &ID, bool InsertLifetime)
Easwaran Ramanb1bd3982016-03-08 00:36:35 +000085 : CallGraphSCCPass(ID), InsertLifetime(InsertLifetime) {}
Chris Lattner22ad7ab2008-01-12 06:49:13 +000086
Sanjay Patelf1b0db12015-03-10 16:42:24 +000087/// For this class, we declare that we require and preserve the call graph.
88/// If the derived class implements this method, it should
Chris Lattnerf94bed32007-01-30 23:28:39 +000089/// always explicitly call the implementation here.
Chandler Carruth1d963112016-12-20 03:15:32 +000090void LegacyInlinerBase::getAnalysisUsage(AnalysisUsage &AU) const {
Daniel Jasperaec2fa32016-12-19 08:22:17 +000091 AU.addRequired<AssumptionCacheTracker>();
Easwaran Raman71069cf2016-06-09 22:23:21 +000092 AU.addRequired<ProfileSummaryInfoWrapperPass>();
Chandler Carruth7b560d42015-09-09 17:55:00 +000093 AU.addRequired<TargetLibraryInfoWrapperPass>();
Chandler Carruth12884f72016-03-02 15:56:53 +000094 getAAResultsAnalysisUsage(AU);
Chandler Carruthe40e60e2012-12-27 11:17:15 +000095 CallGraphSCCPass::getAnalysisUsage(AU);
Chris Lattnerf94bed32007-01-30 23:28:39 +000096}
97
Chandler Carruth8562d3a2016-08-03 01:02:31 +000098typedef DenseMap<ArrayType *, std::vector<AllocaInst *>> InlinedArrayAllocasTy;
Chris Lattnerd3374e82009-08-27 06:29:33 +000099
Chandler Carruthf702d8e2016-08-17 02:40:23 +0000100/// Look at all of the allocas that we inlined through this call site. If we
101/// have already inlined other allocas through other calls into this function,
102/// then we know that they have disjoint lifetimes and that we can merge them.
Chris Lattnerd3374e82009-08-27 06:29:33 +0000103///
Chandler Carruthf702d8e2016-08-17 02:40:23 +0000104/// There are many heuristics possible for merging these allocas, and the
105/// different options have different tradeoffs. One thing that we *really*
106/// don't want to hurt is SRoA: once inlining happens, often allocas are no
107/// longer address taken and so they can be promoted.
108///
109/// Our "solution" for that is to only merge allocas whose outermost type is an
110/// array type. These are usually not promoted because someone is using a
111/// variable index into them. These are also often the most important ones to
112/// merge.
113///
114/// A better solution would be to have real memory lifetime markers in the IR
115/// and not have the inliner do any merging of allocas at all. This would
116/// allow the backend to do proper stack slot coloring of all allocas that
117/// *actually make it to the backend*, which is really what we want.
118///
119/// Because we don't have this information, we do this simple and useful hack.
120static void mergeInlinedArrayAllocas(
121 Function *Caller, InlineFunctionInfo &IFI,
122 InlinedArrayAllocasTy &InlinedArrayAllocas, int InlineHistory) {
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000123 SmallPtrSet<AllocaInst *, 16> UsedAllocas;
124
Chris Lattnerfb212de2010-12-06 07:52:42 +0000125 // When processing our SCC, check to see if CS was inlined from some other
126 // call site. For example, if we're processing "A" in this code:
127 // A() { B() }
128 // B() { x = alloca ... C() }
129 // C() { y = alloca ... }
130 // Assume that C was not inlined into B initially, and so we're processing A
131 // and decide to inline B into A. Doing this makes an alloca available for
132 // reuse and makes a callsite (C) available for inlining. When we process
133 // the C call site we don't want to do any alloca merging between X and Y
134 // because their scopes are not disjoint. We could make this smarter by
135 // keeping track of the inline history for each alloca in the
136 // InlinedArrayAllocas but this isn't likely to be a significant win.
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000137 if (InlineHistory != -1) // Only do merging for top-level call sites in SCC.
Chandler Carruthf702d8e2016-08-17 02:40:23 +0000138 return;
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000139
Chris Lattnerd3374e82009-08-27 06:29:33 +0000140 // Loop over all the allocas we have so far and see if they can be merged with
141 // a previously inlined alloca. If not, remember that we had it.
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000142 for (unsigned AllocaNo = 0, e = IFI.StaticAllocas.size(); AllocaNo != e;
143 ++AllocaNo) {
Chris Lattner4ba01ec2010-04-22 23:07:58 +0000144 AllocaInst *AI = IFI.StaticAllocas[AllocaNo];
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000145
Chris Lattnerd3374e82009-08-27 06:29:33 +0000146 // Don't bother trying to merge array allocations (they will usually be
147 // canonicalized to be an allocation *of* an array), or allocations whose
148 // type is not itself an array (because we're afraid of pessimizing SRoA).
Chris Lattner229907c2011-07-18 04:54:35 +0000149 ArrayType *ATy = dyn_cast<ArrayType>(AI->getAllocatedType());
Craig Topperf40110f2014-04-25 05:29:35 +0000150 if (!ATy || AI->isArrayAllocation())
Chris Lattnerd3374e82009-08-27 06:29:33 +0000151 continue;
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000152
Chris Lattnerd3374e82009-08-27 06:29:33 +0000153 // Get the list of all available allocas for this array type.
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000154 std::vector<AllocaInst *> &AllocasForType = InlinedArrayAllocas[ATy];
155
Chris Lattnerd3374e82009-08-27 06:29:33 +0000156 // Loop over the allocas in AllocasForType to see if we can reuse one. Note
157 // that we have to be careful not to reuse the same "available" alloca for
158 // multiple different allocas that we just inlined, we use the 'UsedAllocas'
159 // set to keep track of which "available" allocas are being used by this
160 // function. Also, AllocasForType can be empty of course!
161 bool MergedAwayAlloca = false;
Yaron Keren62064d62015-06-25 19:28:24 +0000162 for (AllocaInst *AvailableAlloca : AllocasForType) {
Hal Finkel9caa8f72013-07-16 17:10:55 +0000163
164 unsigned Align1 = AI->getAlignment(),
165 Align2 = AvailableAlloca->getAlignment();
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000166
Chris Lattnerd3374e82009-08-27 06:29:33 +0000167 // The available alloca has to be in the right function, not in some other
168 // function in this SCC.
169 if (AvailableAlloca->getParent() != AI->getParent())
170 continue;
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000171
Chris Lattnerd3374e82009-08-27 06:29:33 +0000172 // If the inlined function already uses this alloca then we can't reuse
173 // it.
David Blaikie70573dc2014-11-19 07:49:26 +0000174 if (!UsedAllocas.insert(AvailableAlloca).second)
Chris Lattnerd3374e82009-08-27 06:29:33 +0000175 continue;
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000176
Chris Lattnerd3374e82009-08-27 06:29:33 +0000177 // Otherwise, we *can* reuse it, RAUW AI into AvailableAlloca and declare
178 // success!
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000179 DEBUG(dbgs() << " ***MERGED ALLOCA: " << *AI
180 << "\n\t\tINTO: " << *AvailableAlloca << '\n');
181
Evgeniy Stepanovd8b86f72015-09-29 00:30:19 +0000182 // Move affected dbg.declare calls immediately after the new alloca to
Simon Pilgrim7d18a702016-11-20 13:19:49 +0000183 // avoid the situation when a dbg.declare precedes its alloca.
Evgeniy Stepanovd8b86f72015-09-29 00:30:19 +0000184 if (auto *L = LocalAsMetadata::getIfExists(AI))
185 if (auto *MDV = MetadataAsValue::getIfExists(AI->getContext(), L))
186 for (User *U : MDV->users())
187 if (DbgDeclareInst *DDI = dyn_cast<DbgDeclareInst>(U))
188 DDI->moveBefore(AvailableAlloca->getNextNode());
189
Chris Lattnerd3374e82009-08-27 06:29:33 +0000190 AI->replaceAllUsesWith(AvailableAlloca);
Hal Finkel9caa8f72013-07-16 17:10:55 +0000191
Hal Finkelec7cd262013-07-17 14:32:41 +0000192 if (Align1 != Align2) {
193 if (!Align1 || !Align2) {
Mehdi Amini46a43552015-03-04 18:43:29 +0000194 const DataLayout &DL = Caller->getParent()->getDataLayout();
195 unsigned TypeAlign = DL.getABITypeAlignment(AI->getAllocatedType());
Hal Finkelec7cd262013-07-17 14:32:41 +0000196
197 Align1 = Align1 ? Align1 : TypeAlign;
198 Align2 = Align2 ? Align2 : TypeAlign;
199 }
200
201 if (Align1 > Align2)
202 AvailableAlloca->setAlignment(AI->getAlignment());
203 }
Hal Finkel9caa8f72013-07-16 17:10:55 +0000204
Chris Lattnerd3374e82009-08-27 06:29:33 +0000205 AI->eraseFromParent();
206 MergedAwayAlloca = true;
207 ++NumMergedAllocas;
Craig Topperf40110f2014-04-25 05:29:35 +0000208 IFI.StaticAllocas[AllocaNo] = nullptr;
Chris Lattnerd3374e82009-08-27 06:29:33 +0000209 break;
210 }
Misha Brukmanb1c93172005-04-21 23:48:37 +0000211
Chris Lattnerd3374e82009-08-27 06:29:33 +0000212 // If we already nuked the alloca, we're done with it.
213 if (MergedAwayAlloca)
214 continue;
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000215
Chris Lattnerd3374e82009-08-27 06:29:33 +0000216 // If we were unable to merge away the alloca either because there are no
217 // allocas of the right type available or because we reused them all
218 // already, remember that this alloca came from an inlined function and mark
219 // it used so we don't reuse it for other allocas from this inline
220 // operation.
221 AllocasForType.push_back(AI);
222 UsedAllocas.insert(AI);
Chris Lattner6754b822004-05-23 21:22:17 +0000223 }
Chandler Carruthf702d8e2016-08-17 02:40:23 +0000224}
225
226/// If it is possible to inline the specified call site,
227/// do so and update the CallGraph for this operation.
228///
229/// This function also does some basic book-keeping to update the IR. The
230/// InlinedArrayAllocas map keeps track of any allocas that are already
231/// available from other functions inlined into the caller. If we are able to
232/// inline this call site we attempt to reuse already available allocas or add
233/// any new allocas to the set if not possible.
234static bool InlineCallIfPossible(
235 CallSite CS, InlineFunctionInfo &IFI,
236 InlinedArrayAllocasTy &InlinedArrayAllocas, int InlineHistory,
237 bool InsertLifetime, function_ref<AAResults &(Function &)> &AARGetter,
238 ImportedFunctionsInliningStatistics &ImportedFunctionsStats) {
239 Function *Callee = CS.getCalledFunction();
240 Function *Caller = CS.getCaller();
241
242 AAResults &AAR = AARGetter(*Callee);
243
244 // Try to inline the function. Get the list of static allocas that were
245 // inlined.
246 if (!InlineFunction(CS, IFI, &AAR, InsertLifetime))
247 return false;
248
249 if (InlinerFunctionImportStats != InlinerFunctionImportStatsOpts::No)
250 ImportedFunctionsStats.recordInline(*Caller, *Callee);
251
252 AttributeFuncs::mergeAttributesForInlining(*Caller, *Callee);
253
254 if (!DisableInlinedAllocaMerging)
255 mergeInlinedArrayAllocas(Caller, IFI, InlinedArrayAllocas, InlineHistory);
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000256
Chris Lattner6754b822004-05-23 21:22:17 +0000257 return true;
Chris Lattnerd075cc22003-08-31 19:10:30 +0000258}
Jakob Stoklund Olesen8a19d3c2010-01-20 17:51:28 +0000259
Sean Silvaab6a6832016-07-23 04:22:50 +0000260/// Return true if inlining of CS can block the caller from being
261/// inlined which is proved to be more beneficial. \p IC is the
262/// estimated inline cost associated with callsite \p CS.
263/// \p TotalAltCost will be set to the estimated cost of inlining the caller
264/// if \p CS is suppressed for inlining.
265static bool
266shouldBeDeferred(Function *Caller, CallSite CS, InlineCost IC,
267 int &TotalSecondaryCost,
Benjamin Kramer41e66da2016-08-06 12:33:46 +0000268 function_ref<InlineCost(CallSite CS)> GetInlineCost) {
Xinliang David Li4b2fdcc2016-04-29 22:59:36 +0000269
270 // For now we only handle local or inline functions.
271 if (!Caller->hasLocalLinkage() && !Caller->hasLinkOnceODRLinkage())
272 return false;
273 // Try to detect the case where the current inlining candidate caller (call
274 // it B) is a static or linkonce-ODR function and is an inlining candidate
275 // elsewhere, and the current candidate callee (call it C) is large enough
276 // that inlining it into B would make B too big to inline later. In these
277 // circumstances it may be best not to inline C into B, but to inline B into
278 // its callers.
279 //
280 // This only applies to static and linkonce-ODR functions because those are
281 // expected to be available for inlining in the translation units where they
282 // are used. Thus we will always have the opportunity to make local inlining
283 // decisions. Importantly the linkonce-ODR linkage covers inline functions
284 // and templates in C++.
285 //
286 // FIXME: All of this logic should be sunk into getInlineCost. It relies on
287 // the internal implementation of the inline cost metrics rather than
288 // treating them as truly abstract units etc.
289 TotalSecondaryCost = 0;
290 // The candidate cost to be imposed upon the current function.
291 int CandidateCost = IC.getCost() - (InlineConstants::CallPenalty + 1);
292 // This bool tracks what happens if we do NOT inline C into B.
293 bool callerWillBeRemoved = Caller->hasLocalLinkage();
294 // This bool tracks what happens if we DO inline C into B.
295 bool inliningPreventsSomeOuterInline = false;
296 for (User *U : Caller->users()) {
297 CallSite CS2(U);
298
299 // If this isn't a call to Caller (it could be some other sort
300 // of reference) skip it. Such references will prevent the caller
301 // from being removed.
302 if (!CS2 || CS2.getCalledFunction() != Caller) {
303 callerWillBeRemoved = false;
304 continue;
305 }
306
Sean Silvaab6a6832016-07-23 04:22:50 +0000307 InlineCost IC2 = GetInlineCost(CS2);
Xinliang David Li4b2fdcc2016-04-29 22:59:36 +0000308 ++NumCallerCallersAnalyzed;
309 if (!IC2) {
310 callerWillBeRemoved = false;
311 continue;
312 }
313 if (IC2.isAlways())
314 continue;
315
Xinliang David Lif450b882016-11-04 03:00:52 +0000316 // See if inlining of the original callsite would erase the cost delta of
Xinliang David Li4b2fdcc2016-04-29 22:59:36 +0000317 // this callsite. We subtract off the penalty for the call instruction,
318 // which we would be deleting.
319 if (IC2.getCostDelta() <= CandidateCost) {
320 inliningPreventsSomeOuterInline = true;
321 TotalSecondaryCost += IC2.getCost();
322 }
323 }
324 // If all outer calls to Caller would get inlined, the cost for the last
325 // one is set very low by getInlineCost, in anticipation that Caller will
326 // be removed entirely. We did not account for this above unless there
327 // is only one caller of Caller.
328 if (callerWillBeRemoved && !Caller->use_empty())
Piotr Padlewskid89875c2016-08-10 21:15:22 +0000329 TotalSecondaryCost -= InlineConstants::LastCallToStaticBonus;
Xinliang David Li4b2fdcc2016-04-29 22:59:36 +0000330
331 if (inliningPreventsSomeOuterInline && TotalSecondaryCost < IC.getCost())
332 return true;
333
334 return false;
335}
336
Sanjay Patelf1b0db12015-03-10 16:42:24 +0000337/// Return true if the inliner should attempt to inline at the given CallSite.
Sean Silvaab6a6832016-07-23 04:22:50 +0000338static bool shouldInline(CallSite CS,
Adam Nemet896c09b2016-08-10 00:44:44 +0000339 function_ref<InlineCost(CallSite CS)> GetInlineCost,
340 OptimizationRemarkEmitter &ORE) {
Adam Nemetc507ac92016-09-27 23:47:03 +0000341 using namespace ore;
Sean Silvaab6a6832016-07-23 04:22:50 +0000342 InlineCost IC = GetInlineCost(CS);
Adam Nemetc507ac92016-09-27 23:47:03 +0000343 Instruction *Call = CS.getInstruction();
344 Function *Callee = CS.getCalledFunction();
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000345
Daniel Dunbar3933e662008-10-30 19:26:59 +0000346 if (IC.isAlways()) {
David Greene0122fc42010-01-05 01:27:51 +0000347 DEBUG(dbgs() << " Inlining: cost=always"
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000348 << ", Call: " << *CS.getInstruction() << "\n");
Adam Nemetc507ac92016-09-27 23:47:03 +0000349 ORE.emit(OptimizationRemarkAnalysis(DEBUG_TYPE, "AlwaysInline", Call)
350 << NV("Callee", Callee)
351 << " should always be inlined (cost=always)");
Daniel Dunbar3933e662008-10-30 19:26:59 +0000352 return true;
353 }
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000354
Daniel Dunbar3933e662008-10-30 19:26:59 +0000355 if (IC.isNever()) {
David Greene0122fc42010-01-05 01:27:51 +0000356 DEBUG(dbgs() << " NOT Inlining: cost=never"
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000357 << ", Call: " << *CS.getInstruction() << "\n");
Adam Nemetc507ac92016-09-27 23:47:03 +0000358 ORE.emit(OptimizationRemarkAnalysis(DEBUG_TYPE, "NeverInline", Call)
359 << NV("Callee", Callee)
360 << " should never be inlined (cost=never)");
Daniel Dunbar3933e662008-10-30 19:26:59 +0000361 return false;
362 }
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000363
Dale Johannesen30599242009-10-09 00:11:32 +0000364 Function *Caller = CS.getCaller();
Chandler Carruth0539c072012-03-31 12:42:41 +0000365 if (!IC) {
366 DEBUG(dbgs() << " NOT Inlining: cost=" << IC.getCost()
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000367 << ", thres=" << (IC.getCostDelta() + IC.getCost())
368 << ", Call: " << *CS.getInstruction() << "\n");
Adam Nemetc507ac92016-09-27 23:47:03 +0000369 ORE.emit(OptimizationRemarkAnalysis(DEBUG_TYPE, "TooCostly", Call)
370 << NV("Callee", Callee) << " too costly to inline (cost="
371 << NV("Cost", IC.getCost()) << ", threshold="
372 << NV("Threshold", IC.getCostDelta() + IC.getCost()) << ")");
Daniel Dunbare7fbf9f42008-10-29 01:02:02 +0000373 return false;
Daniel Dunbare7fbf9f42008-10-29 01:02:02 +0000374 }
Dale Johannesen30599242009-10-09 00:11:32 +0000375
Xinliang David Li4b2fdcc2016-04-29 22:59:36 +0000376 int TotalSecondaryCost = 0;
Sean Silvaab6a6832016-07-23 04:22:50 +0000377 if (shouldBeDeferred(Caller, CS, IC, TotalSecondaryCost, GetInlineCost)) {
Xinliang David Li4b2fdcc2016-04-29 22:59:36 +0000378 DEBUG(dbgs() << " NOT Inlining: " << *CS.getInstruction()
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000379 << " Cost = " << IC.getCost()
380 << ", outer Cost = " << TotalSecondaryCost << '\n');
Adam Nemetc507ac92016-09-27 23:47:03 +0000381 ORE.emit(OptimizationRemarkAnalysis(DEBUG_TYPE,
382 "IncreaseCostInOtherContexts", Call)
383 << "Not inlining. Cost of inlining " << NV("Callee", Callee)
384 << " increases the cost of inlining " << NV("Caller", Caller)
385 << " in other contexts");
Xinliang David Li4b2fdcc2016-04-29 22:59:36 +0000386 return false;
Dale Johannesen30599242009-10-09 00:11:32 +0000387 }
388
Chandler Carruth0539c072012-03-31 12:42:41 +0000389 DEBUG(dbgs() << " Inlining: cost=" << IC.getCost()
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000390 << ", thres=" << (IC.getCostDelta() + IC.getCost())
391 << ", Call: " << *CS.getInstruction() << '\n');
Adam Nemetc507ac92016-09-27 23:47:03 +0000392 ORE.emit(OptimizationRemarkAnalysis(DEBUG_TYPE, "CanBeInlined", Call)
393 << NV("Callee", Callee) << " can be inlined into "
394 << NV("Caller", Caller) << " with cost=" << NV("Cost", IC.getCost())
395 << " (threshold="
396 << NV("Threshold", IC.getCostDelta() + IC.getCost()) << ")");
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000397 return true;
Daniel Dunbare7fbf9f42008-10-29 01:02:02 +0000398}
Chris Lattnerd075cc22003-08-31 19:10:30 +0000399
Sanjay Patelf1b0db12015-03-10 16:42:24 +0000400/// Return true if the specified inline history ID
Chris Lattnere8262672010-05-01 01:05:10 +0000401/// indicates an inline history that includes the specified function.
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000402static bool InlineHistoryIncludes(
403 Function *F, int InlineHistoryID,
404 const SmallVectorImpl<std::pair<Function *, int>> &InlineHistory) {
Chris Lattnere8262672010-05-01 01:05:10 +0000405 while (InlineHistoryID != -1) {
406 assert(unsigned(InlineHistoryID) < InlineHistory.size() &&
407 "Invalid inline history ID");
408 if (InlineHistory[InlineHistoryID].first == F)
409 return true;
410 InlineHistoryID = InlineHistory[InlineHistoryID].second;
411 }
412 return false;
413}
414
Chandler Carruth1d963112016-12-20 03:15:32 +0000415bool LegacyInlinerBase::doInitialization(CallGraph &CG) {
Piotr Padlewski84abc742016-07-29 00:27:16 +0000416 if (InlinerFunctionImportStats != InlinerFunctionImportStatsOpts::No)
417 ImportedFunctionsStats.setModuleInfo(CG.getModule());
418 return false; // No changes to CallGraph.
419}
420
Chandler Carruth1d963112016-12-20 03:15:32 +0000421bool LegacyInlinerBase::runOnSCC(CallGraphSCC &SCC) {
Andrew Kayloraa641a52016-04-22 22:06:11 +0000422 if (skipSCC(SCC))
423 return false;
Andrew Kaylor9c81d0f2016-05-23 21:57:54 +0000424 return inlineCalls(SCC);
425}
Andrew Kayloraa641a52016-04-22 22:06:11 +0000426
Sean Silvaab6a6832016-07-23 04:22:50 +0000427static bool
428inlineCallsImpl(CallGraphSCC &SCC, CallGraph &CG,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000429 std::function<AssumptionCache &(Function &)> GetAssumptionCache,
Sean Silvaab6a6832016-07-23 04:22:50 +0000430 ProfileSummaryInfo *PSI, TargetLibraryInfo &TLI,
431 bool InsertLifetime,
Benjamin Kramer41e66da2016-08-06 12:33:46 +0000432 function_ref<InlineCost(CallSite CS)> GetInlineCost,
433 function_ref<AAResults &(Function &)> AARGetter,
Piotr Padlewski84abc742016-07-29 00:27:16 +0000434 ImportedFunctionsInliningStatistics &ImportedFunctionsStats) {
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000435 SmallPtrSet<Function *, 8> SCCFunctions;
David Greene0122fc42010-01-05 01:27:51 +0000436 DEBUG(dbgs() << "Inliner visiting SCC:");
Yaron Keren4c548f22015-06-20 07:12:33 +0000437 for (CallGraphNode *Node : SCC) {
438 Function *F = Node->getFunction();
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000439 if (F)
440 SCCFunctions.insert(F);
David Greene0122fc42010-01-05 01:27:51 +0000441 DEBUG(dbgs() << " " << (F ? F->getName() : "INDIRECTNODE"));
Chris Lattnerd075cc22003-08-31 19:10:30 +0000442 }
Chris Lattnerd075cc22003-08-31 19:10:30 +0000443
Chris Lattner6754b822004-05-23 21:22:17 +0000444 // Scan through and identify all call sites ahead of time so that we only
445 // inline call sites in the original functions, not call sites that result
446 // from inlining other functions.
Chris Lattnere8262672010-05-01 01:05:10 +0000447 SmallVector<std::pair<CallSite, int>, 16> CallSites;
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000448
Chris Lattnere8262672010-05-01 01:05:10 +0000449 // When inlining a callee produces new call sites, we want to keep track of
450 // the fact that they were inlined from the callee. This allows us to avoid
451 // infinite inlining in some obscure cases. To represent this, we use an
452 // index into the InlineHistory vector.
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000453 SmallVector<std::pair<Function *, int>, 8> InlineHistory;
Chris Lattner6754b822004-05-23 21:22:17 +0000454
Yaron Keren4c548f22015-06-20 07:12:33 +0000455 for (CallGraphNode *Node : SCC) {
456 Function *F = Node->getFunction();
Adam Nemetcef33142016-08-26 20:21:05 +0000457 if (!F || F->isDeclaration())
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000458 continue;
459
Adam Nemetcef33142016-08-26 20:21:05 +0000460 OptimizationRemarkEmitter ORE(F);
Yaron Keren4c548f22015-06-20 07:12:33 +0000461 for (BasicBlock &BB : *F)
462 for (Instruction &I : BB) {
463 CallSite CS(cast<Value>(&I));
Dale Johannesen30599242009-10-09 00:11:32 +0000464 // If this isn't a call, or it is a call to an intrinsic, it can
Chris Lattner9e507472009-08-31 05:34:32 +0000465 // never be inlined.
Gabor Greif62f0aac2010-07-28 22:50:26 +0000466 if (!CS || isa<IntrinsicInst>(I))
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000467 continue;
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000468
Chris Lattner9e507472009-08-31 05:34:32 +0000469 // If this is a direct call to an external function, we can never inline
470 // it. If it is an indirect call, inlining may resolve it to be a
471 // direct call, so we keep it.
Yaron Kerenc66c06b2015-07-19 15:48:07 +0000472 if (Function *Callee = CS.getCalledFunction())
Adam Nemetcef33142016-08-26 20:21:05 +0000473 if (Callee->isDeclaration()) {
Adam Nemeta62b7e12016-09-27 20:55:07 +0000474 using namespace ore;
Adam Nemet04758ba2016-09-27 22:19:23 +0000475 ORE.emit(OptimizationRemarkMissed(DEBUG_TYPE, "NoDefinition", &I)
Adam Nemeta62b7e12016-09-27 20:55:07 +0000476 << NV("Callee", Callee) << " will not be inlined into "
Adam Nemet11421472016-09-27 21:58:17 +0000477 << NV("Caller", CS.getCaller())
478 << " because its definition is unavailable"
479 << setIsVerbose());
Yaron Kerenc66c06b2015-07-19 15:48:07 +0000480 continue;
Adam Nemetcef33142016-08-26 20:21:05 +0000481 }
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000482
Chris Lattnere8262672010-05-01 01:05:10 +0000483 CallSites.push_back(std::make_pair(CS, -1));
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000484 }
485 }
Chris Lattnerd075cc22003-08-31 19:10:30 +0000486
David Greene0122fc42010-01-05 01:27:51 +0000487 DEBUG(dbgs() << ": " << CallSites.size() << " call sites.\n");
Misha Brukmanb1c93172005-04-21 23:48:37 +0000488
Chris Lattnera5cdd5e2010-04-20 00:47:08 +0000489 // If there are no calls in this function, exit early.
490 if (CallSites.empty())
491 return false;
Yaron Keren6967cbb2015-07-02 14:25:09 +0000492
Chris Lattner6754b822004-05-23 21:22:17 +0000493 // Now that we have all of the call sites, move the ones to functions in the
494 // current SCC to the end of the list.
495 unsigned FirstCallInSCC = CallSites.size();
496 for (unsigned i = 0; i < FirstCallInSCC; ++i)
Chris Lattnere8262672010-05-01 01:05:10 +0000497 if (Function *F = CallSites[i].first.getCalledFunction())
Chris Lattner6754b822004-05-23 21:22:17 +0000498 if (SCCFunctions.count(F))
499 std::swap(CallSites[i--], CallSites[--FirstCallInSCC]);
Misha Brukmanb1c93172005-04-21 23:48:37 +0000500
Chris Lattnerd3374e82009-08-27 06:29:33 +0000501 InlinedArrayAllocasTy InlinedArrayAllocas;
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000502 InlineFunctionInfo InlineInfo(&CG, &GetAssumptionCache);
Chandler Carruth66b31302015-01-04 12:03:27 +0000503
Chris Lattner6754b822004-05-23 21:22:17 +0000504 // Now that we have all of the call sites, loop over them and inline them if
505 // it looks profitable to do so.
506 bool Changed = false;
507 bool LocalChange;
508 do {
509 LocalChange = false;
510 // Iterate over the outer loop because inlining functions can cause indirect
511 // calls to become direct calls.
Yaron Keren4c548f22015-06-20 07:12:33 +0000512 // CallSites may be modified inside so ranged for loop can not be used.
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000513 for (unsigned CSi = 0; CSi != CallSites.size(); ++CSi) {
Chris Lattnere8262672010-05-01 01:05:10 +0000514 CallSite CS = CallSites[CSi].first;
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000515
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000516 Function *Caller = CS.getCaller();
Chris Lattnereb9acbf2009-11-12 07:56:08 +0000517 Function *Callee = CS.getCalledFunction();
518
519 // If this call site is dead and it is to a readonly function, we should
520 // just delete the call instead of trying to inline it, regardless of
521 // size. This happens because IPSCCP propagates the result out of the
522 // call and then we're left with the dead call.
Chandler Carruth7b560d42015-09-09 17:55:00 +0000523 if (isInstructionTriviallyDead(CS.getInstruction(), &TLI)) {
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000524 DEBUG(dbgs() << " -> Deleting dead call: " << *CS.getInstruction()
525 << "\n");
Chris Lattnereb9acbf2009-11-12 07:56:08 +0000526 // Update the call graph by deleting the edge from Callee to Caller.
527 CG[Caller]->removeCallEdgeFor(CS);
528 CS.getInstruction()->eraseFromParent();
529 ++NumCallsDeleted;
530 } else {
531 // We can only inline direct calls to non-declarations.
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000532 if (!Callee || Callee->isDeclaration())
533 continue;
534
Eric Christopherea2820342010-07-13 18:27:13 +0000535 // If this call site was obtained by inlining another function, verify
Chris Lattnere8262672010-05-01 01:05:10 +0000536 // that the include path for the function did not include the callee
Chris Lattner5b6a8652010-12-06 07:38:40 +0000537 // itself. If so, we'd be recursively inlining the same function,
Chris Lattnere8262672010-05-01 01:05:10 +0000538 // which would provide the same callsites, which would cause us to
539 // infinitely inline.
540 int InlineHistoryID = CallSites[CSi].second;
541 if (InlineHistoryID != -1 &&
542 InlineHistoryIncludes(Callee, InlineHistoryID, InlineHistory))
543 continue;
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000544
NAKAMURA Takumicd1fc4b2014-04-17 12:22:14 +0000545 // Get DebugLoc to report. CS will be invalid after Inliner.
546 DebugLoc DLoc = CS.getInstruction()->getDebugLoc();
Adam Nemet896c09b2016-08-10 00:44:44 +0000547 BasicBlock *Block = CS.getParent();
548 // FIXME for new PM: because of the old PM we currently generate ORE and
549 // in turn BFI on demand. With the new PM, the ORE dependency should
550 // just become a regular analysis dependency.
551 OptimizationRemarkEmitter ORE(Caller);
NAKAMURA Takumicd1fc4b2014-04-17 12:22:14 +0000552
Diego Novillo7f8af8b2014-05-22 14:19:46 +0000553 // If the policy determines that we should inline this function,
554 // try to do so.
Adam Nemetc507ac92016-09-27 23:47:03 +0000555 using namespace ore;
Adam Nemet896c09b2016-08-10 00:44:44 +0000556 if (!shouldInline(CS, GetInlineCost, ORE)) {
Adam Nemetc507ac92016-09-27 23:47:03 +0000557 ORE.emit(
558 OptimizationRemarkMissed(DEBUG_TYPE, "NotInlined", DLoc, Block)
559 << NV("Callee", Callee) << " will not be inlined into "
560 << NV("Caller", Caller));
Diego Novillo7f8af8b2014-05-22 14:19:46 +0000561 continue;
562 }
563
Chris Lattner2eee5d32010-04-22 23:37:35 +0000564 // Attempt to inline the function.
Sean Silvaab6a6832016-07-23 04:22:50 +0000565 if (!InlineCallIfPossible(CS, InlineInfo, InlinedArrayAllocas,
Piotr Padlewski84abc742016-07-29 00:27:16 +0000566 InlineHistoryID, InsertLifetime, AARGetter,
567 ImportedFunctionsStats)) {
Adam Nemetc507ac92016-09-27 23:47:03 +0000568 ORE.emit(
569 OptimizationRemarkMissed(DEBUG_TYPE, "NotInlined", DLoc, Block)
570 << NV("Callee", Callee) << " will not be inlined into "
571 << NV("Caller", Caller));
Chris Lattnereb9acbf2009-11-12 07:56:08 +0000572 continue;
Diego Novillo7f8af8b2014-05-22 14:19:46 +0000573 }
Chris Lattnereb9acbf2009-11-12 07:56:08 +0000574 ++NumInlined;
Diego Novilloa9298b22014-04-08 16:42:34 +0000575
576 // Report the inline decision.
Adam Nemetc507ac92016-09-27 23:47:03 +0000577 ORE.emit(OptimizationRemark(DEBUG_TYPE, "Inlined", DLoc, Block)
578 << NV("Callee", Callee) << " inlined into "
579 << NV("Caller", Caller));
Diego Novilloa9298b22014-04-08 16:42:34 +0000580
Chris Lattnerc2432b92010-05-01 01:26:13 +0000581 // If inlining this function gave us any new call sites, throw them
Chris Lattner2eee5d32010-04-22 23:37:35 +0000582 // onto our worklist to process. They are useful inline candidates.
Chris Lattnerc2432b92010-05-01 01:26:13 +0000583 if (!InlineInfo.InlinedCalls.empty()) {
Chris Lattnere8262672010-05-01 01:05:10 +0000584 // Create a new inline history entry for this, so that we remember
585 // that these new callsites came about due to inlining Callee.
586 int NewHistoryID = InlineHistory.size();
587 InlineHistory.push_back(std::make_pair(Callee, InlineHistoryID));
588
Yaron Keren4c548f22015-06-20 07:12:33 +0000589 for (Value *Ptr : InlineInfo.InlinedCalls)
Chandler Carruth21211992012-03-25 04:03:40 +0000590 CallSites.push_back(std::make_pair(CallSite(Ptr), NewHistoryID));
Chris Lattnerc691de32010-04-23 18:37:01 +0000591 }
Chris Lattnereb9acbf2009-11-12 07:56:08 +0000592 }
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000593
Chris Lattnereb9acbf2009-11-12 07:56:08 +0000594 // If we inlined or deleted the last possible call site to the function,
595 // delete the function body now.
596 if (Callee && Callee->use_empty() && Callee->hasLocalLinkage() &&
Chris Lattner9e507472009-08-31 05:34:32 +0000597 // TODO: Can remove if in SCC now.
Chris Lattner081375b2009-08-31 03:15:49 +0000598 !SCCFunctions.count(Callee) &&
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000599
Chris Lattner081375b2009-08-31 03:15:49 +0000600 // The function may be apparently dead, but if there are indirect
601 // callgraph references to the node, we cannot delete it yet, this
602 // could invalidate the CGSCC iterator.
603 CG[Callee]->getNumReferences() == 0) {
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000604 DEBUG(dbgs() << " -> Deleting dead function: " << Callee->getName()
605 << "\n");
Chris Lattnerd3374e82009-08-27 06:29:33 +0000606 CallGraphNode *CalleeNode = CG[Callee];
Yaron Keren6967cbb2015-07-02 14:25:09 +0000607
Chris Lattnerd3374e82009-08-27 06:29:33 +0000608 // Remove any call graph edges from the callee to its callees.
609 CalleeNode->removeAllCalledFunctions();
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000610
Chris Lattnerd3374e82009-08-27 06:29:33 +0000611 // Removing the node for callee from the call graph and delete it.
Easwaran Ramanb1bd3982016-03-08 00:36:35 +0000612 delete CG.removeFunctionFromModule(CalleeNode);
Chris Lattnerd3374e82009-08-27 06:29:33 +0000613 ++NumDeleted;
614 }
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000615
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000616 // Remove this call site from the list. If possible, use
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000617 // swap/pop_back for efficiency, but do not use it if doing so would
618 // move a call site to a function in this SCC before the
619 // 'FirstCallInSCC' barrier.
Chris Lattner4422d312010-04-16 22:42:17 +0000620 if (SCC.isSingular()) {
Benjamin Kramer5ac57e32010-05-31 12:50:41 +0000621 CallSites[CSi] = CallSites.back();
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000622 CallSites.pop_back();
623 } else {
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000624 CallSites.erase(CallSites.begin() + CSi);
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000625 }
626 --CSi;
627
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000628 Changed = true;
629 LocalChange = true;
630 }
Chris Lattner6754b822004-05-23 21:22:17 +0000631 } while (LocalChange);
Chris Lattner4d25c862004-04-08 06:34:31 +0000632
Chris Lattnerd075cc22003-08-31 19:10:30 +0000633 return Changed;
634}
635
Chandler Carruth1d963112016-12-20 03:15:32 +0000636bool LegacyInlinerBase::inlineCalls(CallGraphSCC &SCC) {
Sean Silvaab6a6832016-07-23 04:22:50 +0000637 CallGraph &CG = getAnalysis<CallGraphWrapperPass>().getCallGraph();
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000638 ACT = &getAnalysis<AssumptionCacheTracker>();
Dehao Chen5461d8b2016-09-28 21:00:58 +0000639 PSI = getAnalysis<ProfileSummaryInfoWrapperPass>().getPSI();
Sean Silvaab6a6832016-07-23 04:22:50 +0000640 auto &TLI = getAnalysis<TargetLibraryInfoWrapperPass>().getTLI();
641 // We compute dedicated AA results for each function in the SCC as needed. We
642 // use a lambda referencing external objects so that they live long enough to
643 // be queried, but we re-use them each time.
644 Optional<BasicAAResult> BAR;
645 Optional<AAResults> AAR;
646 auto AARGetter = [&](Function &F) -> AAResults & {
647 BAR.emplace(createLegacyPMBasicAAResult(*this, F));
648 AAR.emplace(createLegacyPMAAResults(*this, F, *BAR));
649 return *AAR;
650 };
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000651 auto GetAssumptionCache = [&](Function &F) -> AssumptionCache & {
652 return ACT->getAssumptionCache(F);
653 };
654 return inlineCallsImpl(SCC, CG, GetAssumptionCache, PSI, TLI, InsertLifetime,
Sean Silvaab6a6832016-07-23 04:22:50 +0000655 [this](CallSite CS) { return getInlineCost(CS); },
Piotr Padlewski84abc742016-07-29 00:27:16 +0000656 AARGetter, ImportedFunctionsStats);
Sean Silvaab6a6832016-07-23 04:22:50 +0000657}
658
Sanjay Patelf1b0db12015-03-10 16:42:24 +0000659/// Remove now-dead linkonce functions at the end of
660/// processing to avoid breaking the SCC traversal.
Chandler Carruth1d963112016-12-20 03:15:32 +0000661bool LegacyInlinerBase::doFinalization(CallGraph &CG) {
Piotr Padlewski84abc742016-07-29 00:27:16 +0000662 if (InlinerFunctionImportStats != InlinerFunctionImportStatsOpts::No)
663 ImportedFunctionsStats.dump(InlinerFunctionImportStats ==
664 InlinerFunctionImportStatsOpts::Verbose);
Devang Patelf0ef3572008-11-05 01:39:16 +0000665 return removeDeadFunctions(CG);
666}
667
Sanjay Patelf1b0db12015-03-10 16:42:24 +0000668/// Remove dead functions that are not included in DNR (Do Not Remove) list.
Chandler Carruth1d963112016-12-20 03:15:32 +0000669bool LegacyInlinerBase::removeDeadFunctions(CallGraph &CG,
670 bool AlwaysInlineOnly) {
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000671 SmallVector<CallGraphNode *, 16> FunctionsToRemove;
Chandler Carruth6e9bb7e2016-12-26 23:43:27 +0000672 SmallVector<Function *, 16> DeadFunctionsInComdats;
David Majnemerac256cf2015-05-05 20:14:22 +0000673
674 auto RemoveCGN = [&](CallGraphNode *CGN) {
675 // Remove any call graph edges from the function to its callees.
676 CGN->removeAllCalledFunctions();
677
678 // Remove any edges from the external node to the function's call graph
679 // node. These edges might have been made irrelegant due to
680 // optimization of the program.
681 CG.getExternalCallingNode()->removeAnyCallEdgeTo(CGN);
682
683 // Removing the node for callee from the call graph and delete it.
684 FunctionsToRemove.push_back(CGN);
685 };
Chris Lattnerbe8bb802004-04-21 20:44:33 +0000686
687 // Scan for all of the functions, looking for ones that should now be removed
688 // from the program. Insert the dead ones in the FunctionsToRemove set.
David Blaikiea5d7de92015-08-05 20:55:50 +0000689 for (const auto &I : CG) {
690 CallGraphNode *CGN = I.second.get();
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000691 Function *F = CGN->getFunction();
Chandler Carruthd7a5f2a2012-03-16 06:10:13 +0000692 if (!F || F->isDeclaration())
693 continue;
694
695 // Handle the case when this function is called and we only want to care
696 // about always-inline functions. This is a bit of a hack to share code
697 // between here and the InlineAlways pass.
Duncan P. N. Exon Smith2c79ad92015-02-14 01:11:29 +0000698 if (AlwaysInlineOnly && !F->hasFnAttribute(Attribute::AlwaysInline))
Chandler Carruthd7a5f2a2012-03-16 06:10:13 +0000699 continue;
700
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000701 // If the only remaining users of the function are dead constants, remove
702 // them.
703 F->removeDeadConstantUsers();
Chris Lattnerbe8bb802004-04-21 20:44:33 +0000704
Eli Friedman1923a332011-10-20 05:23:42 +0000705 if (!F->isDefTriviallyDead())
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000706 continue;
David Majnemerac077032014-10-08 19:32:32 +0000707
708 // It is unsafe to drop a function with discardable linkage from a COMDAT
709 // without also dropping the other members of the COMDAT.
710 // The inliner doesn't visit non-function entities which are in COMDAT
711 // groups so it is unsafe to do so *unless* the linkage is local.
David Majnemerac256cf2015-05-05 20:14:22 +0000712 if (!F->hasLocalLinkage()) {
Chandler Carruth6e9bb7e2016-12-26 23:43:27 +0000713 if (F->hasComdat()) {
714 DeadFunctionsInComdats.push_back(F);
David Majnemerac256cf2015-05-05 20:14:22 +0000715 continue;
716 }
717 }
Devang Patelf0ef3572008-11-05 01:39:16 +0000718
David Majnemerac256cf2015-05-05 20:14:22 +0000719 RemoveCGN(CGN);
Chris Lattnerc87784f2004-04-20 22:06:53 +0000720 }
David Majnemerac256cf2015-05-05 20:14:22 +0000721 if (!DeadFunctionsInComdats.empty()) {
Chandler Carruth6e9bb7e2016-12-26 23:43:27 +0000722 // Filter out the functions whose comdats remain alive.
723 filterDeadComdatFunctions(CG.getModule(), DeadFunctionsInComdats);
724 // Remove the rest.
725 for (Function *F : DeadFunctionsInComdats)
726 RemoveCGN(CG[F]);
David Majnemerac256cf2015-05-05 20:14:22 +0000727 }
728
Chandler Carruthd7a5f2a2012-03-16 06:10:13 +0000729 if (FunctionsToRemove.empty())
730 return false;
Chris Lattnerbe8bb802004-04-21 20:44:33 +0000731
732 // Now that we know which functions to delete, do so. We didn't want to do
733 // this inline, because that would invalidate our CallGraph::iterator
734 // objects. :(
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000735 //
Chandler Carruthd7a5f2a2012-03-16 06:10:13 +0000736 // Note that it doesn't matter that we are iterating over a non-stable order
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000737 // here to do this, it doesn't matter which order the functions are deleted
738 // in.
Chandler Carruth45ae88f2012-04-01 10:41:24 +0000739 array_pod_sort(FunctionsToRemove.begin(), FunctionsToRemove.end());
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000740 FunctionsToRemove.erase(
741 std::unique(FunctionsToRemove.begin(), FunctionsToRemove.end()),
742 FunctionsToRemove.end());
Yaron Keren62064d62015-06-25 19:28:24 +0000743 for (CallGraphNode *CGN : FunctionsToRemove) {
Easwaran Ramanb1bd3982016-03-08 00:36:35 +0000744 delete CG.removeFunctionFromModule(CGN);
Chris Lattnerbe8bb802004-04-21 20:44:33 +0000745 ++NumDeleted;
Chris Lattnerbe8bb802004-04-21 20:44:33 +0000746 }
Chandler Carruthd7a5f2a2012-03-16 06:10:13 +0000747 return true;
Chris Lattnerc87784f2004-04-20 22:06:53 +0000748}
Chandler Carruth1d963112016-12-20 03:15:32 +0000749
750PreservedAnalyses InlinerPass::run(LazyCallGraph::SCC &InitialC,
751 CGSCCAnalysisManager &AM, LazyCallGraph &CG,
752 CGSCCUpdateResult &UR) {
753 FunctionAnalysisManager &FAM =
754 AM.getResult<FunctionAnalysisManagerCGSCCProxy>(InitialC, CG)
755 .getManager();
756 const ModuleAnalysisManager &MAM =
757 AM.getResult<ModuleAnalysisManagerCGSCCProxy>(InitialC, CG).getManager();
758 bool Changed = false;
759
760 assert(InitialC.size() > 0 && "Cannot handle an empty SCC!");
761 Module &M = *InitialC.begin()->getFunction().getParent();
762 ProfileSummaryInfo *PSI = MAM.getCachedResult<ProfileSummaryAnalysis>(M);
763
764 std::function<AssumptionCache &(Function &)> GetAssumptionCache =
765 [&](Function &F) -> AssumptionCache & {
766 return FAM.getResult<AssumptionAnalysis>(F);
767 };
768
769 // Setup the data structure used to plumb customization into the
770 // `InlineFunction` routine.
Easwaran Raman180bd9f2016-12-22 01:07:01 +0000771 InlineFunctionInfo IFI(/*cg=*/nullptr, &GetAssumptionCache);
Chandler Carruth1d963112016-12-20 03:15:32 +0000772
773 auto GetInlineCost = [&](CallSite CS) {
774 Function &Callee = *CS.getCalledFunction();
775 auto &CalleeTTI = FAM.getResult<TargetIRAnalysis>(Callee);
776 return getInlineCost(CS, Params, CalleeTTI, GetAssumptionCache, PSI);
777 };
778
779 // We use a worklist of nodes to process so that we can handle if the SCC
780 // structure changes and some nodes are no longer part of the current SCC. We
781 // also need to use an updatable pointer for the SCC as a consequence.
782 SmallVector<LazyCallGraph::Node *, 16> Nodes;
783 for (auto &N : InitialC)
784 Nodes.push_back(&N);
785 auto *C = &InitialC;
786 auto *RC = &C->getOuterRefSCC();
787
788 // We also use a secondary worklist of call sites within a particular node to
789 // allow quickly continuing to inline through newly inlined call sites where
790 // possible.
Chandler Carruth141bf5d2016-12-27 06:46:20 +0000791 SmallVector<std::pair<CallSite, int>, 16> Calls;
792
793 // When inlining a callee produces new call sites, we want to keep track of
794 // the fact that they were inlined from the callee. This allows us to avoid
795 // infinite inlining in some obscure cases. To represent this, we use an
796 // index into the InlineHistory vector.
797 SmallVector<std::pair<Function *, int>, 16> InlineHistory;
Chandler Carruth1d963112016-12-20 03:15:32 +0000798
799 // Track a set vector of inlined callees so that we can augment the caller
800 // with all of their edges in the call graph before pruning out the ones that
801 // got simplified away.
802 SmallSetVector<Function *, 4> InlinedCallees;
803
804 // Track the dead functions to delete once finished with inlining calls. We
805 // defer deleting these to make it easier to handle the call graph updates.
806 SmallVector<Function *, 4> DeadFunctions;
807
808 do {
809 auto &N = *Nodes.pop_back_val();
810 if (CG.lookupSCC(N) != C)
811 continue;
812 Function &F = N.getFunction();
813 if (F.hasFnAttribute(Attribute::OptimizeNone))
814 continue;
815
816 // Get the remarks emission analysis for the caller.
817 auto &ORE = FAM.getResult<OptimizationRemarkEmitterAnalysis>(F);
818
819 // We want to generally process call sites top-down in order for
820 // simplifications stemming from replacing the call with the returned value
821 // after inlining to be visible to subsequent inlining decisions. So we
822 // walk the function backwards and then process the back of the vector.
823 // FIXME: Using reverse is a really bad way to do this. Instead we should
824 // do an actual PO walk of the function body.
825 for (Instruction &I : reverse(instructions(F)))
826 if (auto CS = CallSite(&I))
827 if (Function *Callee = CS.getCalledFunction())
828 if (!Callee->isDeclaration())
Chandler Carruth141bf5d2016-12-27 06:46:20 +0000829 Calls.push_back({CS, -1});
Chandler Carruth1d963112016-12-20 03:15:32 +0000830
831 bool DidInline = false;
832 while (!Calls.empty()) {
Chandler Carruth141bf5d2016-12-27 06:46:20 +0000833 int InlineHistoryID;
834 CallSite CS;
835 std::tie(CS, InlineHistoryID) = Calls.pop_back_val();
Chandler Carruth1d963112016-12-20 03:15:32 +0000836 Function &Callee = *CS.getCalledFunction();
837
Chandler Carruth141bf5d2016-12-27 06:46:20 +0000838 if (InlineHistoryID != -1 &&
839 InlineHistoryIncludes(&Callee, InlineHistoryID, InlineHistory))
840 continue;
841
Chandler Carruth1d963112016-12-20 03:15:32 +0000842 // Check whether we want to inline this callsite.
843 if (!shouldInline(CS, GetInlineCost, ORE))
844 continue;
845
846 if (!InlineFunction(CS, IFI))
847 continue;
848 DidInline = true;
849 InlinedCallees.insert(&Callee);
850
851 // Add any new callsites to defined functions to the worklist.
Chandler Carruth141bf5d2016-12-27 06:46:20 +0000852 if (!IFI.InlinedCallSites.empty()) {
853 int NewHistoryID = InlineHistory.size();
854 InlineHistory.push_back({&Callee, InlineHistoryID});
855 for (CallSite &CS : reverse(IFI.InlinedCallSites))
856 if (Function *NewCallee = CS.getCalledFunction())
857 if (!NewCallee->isDeclaration())
858 Calls.push_back({CS, NewHistoryID});
859 }
Chandler Carruth1d963112016-12-20 03:15:32 +0000860
Chandler Carruth03130d92016-12-27 03:39:54 +0000861 // Merge the attributes based on the inlining.
862 AttributeFuncs::mergeAttributesForInlining(F, Callee);
863
Chandler Carruth1d963112016-12-20 03:15:32 +0000864 // For local functions, check whether this makes the callee trivially
865 // dead. In that case, we can drop the body of the function eagerly
866 // which may reduce the number of callers of other functions to one,
867 // changing inline cost thresholds.
868 if (Callee.hasLocalLinkage()) {
869 // To check this we also need to nuke any dead constant uses (perhaps
870 // made dead by this operation on other functions).
871 Callee.removeDeadConstantUsers();
872 if (Callee.use_empty()) {
873 // Clear the body and queue the function itself for deletion when we
874 // finish inlining and call graph updates.
875 // Note that after this point, it is an error to do anything other
876 // than use the callee's address or delete it.
877 Callee.dropAllReferences();
878 assert(find(DeadFunctions, &Callee) == DeadFunctions.end() &&
879 "Cannot put cause a function to become dead twice!");
880 DeadFunctions.push_back(&Callee);
881 }
882 }
883 }
884
885 if (!DidInline)
886 continue;
887 Changed = true;
888
Chandler Carruth9900d182016-12-28 03:13:12 +0000889 // Add all the inlined callees' edges as ref edges to the caller. These are
890 // by definition trivial edges as we always have *some* transitive ref edge
891 // chain. While in some cases these edges are direct calls inside the
892 // callee, they have to be modeled in the inliner as reference edges as
893 // there may be a reference edge anywhere along the chain from the current
894 // caller to the callee that causes the whole thing to appear like
895 // a (transitive) reference edge that will require promotion to a call edge
896 // below.
Chandler Carruth1d963112016-12-20 03:15:32 +0000897 for (Function *InlinedCallee : InlinedCallees) {
898 LazyCallGraph::Node &CalleeN = *CG.lookup(*InlinedCallee);
899 for (LazyCallGraph::Edge &E : CalleeN)
Chandler Carruth9900d182016-12-28 03:13:12 +0000900 RC->insertTrivialRefEdge(N, *E.getNode());
Chandler Carruth1d963112016-12-20 03:15:32 +0000901 }
902 InlinedCallees.clear();
903
904 // At this point, since we have made changes we have at least removed
905 // a call instruction. However, in the process we do some incremental
906 // simplification of the surrounding code. This simplification can
907 // essentially do all of the same things as a function pass and we can
908 // re-use the exact same logic for updating the call graph to reflect the
909 // change..
910 C = &updateCGAndAnalysisManagerForFunctionPass(CG, *C, N, AM, UR);
911 RC = &C->getOuterRefSCC();
912 } while (!Nodes.empty());
913
914 // Now that we've finished inlining all of the calls across this SCC, delete
915 // all of the trivially dead functions, updating the call graph and the CGSCC
916 // pass manager in the process.
917 //
918 // Note that this walks a pointer set which has non-deterministic order but
919 // that is OK as all we do is delete things and add pointers to unordered
920 // sets.
921 for (Function *DeadF : DeadFunctions) {
922 // Get the necessary information out of the call graph and nuke the
923 // function there.
924 auto &DeadC = *CG.lookupSCC(*CG.lookup(*DeadF));
925 auto &DeadRC = DeadC.getOuterRefSCC();
926 CG.removeDeadFunction(*DeadF);
927
928 // Mark the relevant parts of the call graph as invalid so we don't visit
929 // them.
930 UR.InvalidatedSCCs.insert(&DeadC);
931 UR.InvalidatedRefSCCs.insert(&DeadRC);
932
933 // And delete the actual function from the module.
934 M.getFunctionList().erase(DeadF);
935 }
936 return Changed ? PreservedAnalyses::none() : PreservedAnalyses::all();
937}