blob: e2dc1158549b3195b7d288e8197061a42e4b65b2 [file] [log] [blame]
Chris Lattner7d30a6c2004-06-20 04:11:48 +00001//===- Inliner.cpp - Code common to all inliners --------------------------===//
Misha Brukmanb1c93172005-04-21 23:48:37 +00002//
John Criswell482202a2003-10-20 19:43:21 +00003// The LLVM Compiler Infrastructure
4//
Chris Lattnerf3ebc3f2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Misha Brukmanb1c93172005-04-21 23:48:37 +00007//
John Criswell482202a2003-10-20 19:43:21 +00008//===----------------------------------------------------------------------===//
Chris Lattnerd075cc22003-08-31 19:10:30 +00009//
Chris Lattner6754b822004-05-23 21:22:17 +000010// This file implements the mechanics required to implement inlining without
11// missing any calls and updating the call graph. The decisions of which calls
12// are profitable to inline are implemented elsewhere.
Chris Lattnerd075cc22003-08-31 19:10:30 +000013//
14//===----------------------------------------------------------------------===//
15
Chandler Carruth1d963112016-12-20 03:15:32 +000016#include "llvm/Transforms/IPO/Inliner.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000017#include "llvm/ADT/SmallPtrSet.h"
18#include "llvm/ADT/Statistic.h"
Hal Finkel0c083022014-09-01 09:01:39 +000019#include "llvm/Analysis/AliasAnalysis.h"
Daniel Jasperaec2fa32016-12-19 08:22:17 +000020#include "llvm/Analysis/AssumptionCache.h"
Chandler Carruth7b560d42015-09-09 17:55:00 +000021#include "llvm/Analysis/BasicAliasAnalysis.h"
Easwaran Raman12585b02017-01-20 22:44:04 +000022#include "llvm/Analysis/BlockFrequencyInfo.h"
Chris Lattnerd075cc22003-08-31 19:10:30 +000023#include "llvm/Analysis/CallGraph.h"
Dan Gohman4552e3c2009-10-13 18:30:07 +000024#include "llvm/Analysis/InlineCost.h"
Adam Nemet896c09b2016-08-10 00:44:44 +000025#include "llvm/Analysis/OptimizationDiagnosticInfo.h"
Easwaran Raman71069cf2016-06-09 22:23:21 +000026#include "llvm/Analysis/ProfileSummaryInfo.h"
Benjamin Kramer799003b2015-03-23 19:32:43 +000027#include "llvm/Analysis/TargetLibraryInfo.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000028#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000029#include "llvm/IR/DataLayout.h"
Diego Novilloa9298b22014-04-08 16:42:34 +000030#include "llvm/IR/DiagnosticInfo.h"
Chandler Carruth1d963112016-12-20 03:15:32 +000031#include "llvm/IR/InstIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000032#include "llvm/IR/Instructions.h"
33#include "llvm/IR/IntrinsicInst.h"
34#include "llvm/IR/Module.h"
Reid Spencer7c16caa2004-09-01 22:55:40 +000035#include "llvm/Support/Debug.h"
Daniel Dunbar0dd5e1e2009-07-25 00:23:56 +000036#include "llvm/Support/raw_ostream.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000037#include "llvm/Transforms/Utils/Cloning.h"
38#include "llvm/Transforms/Utils/Local.h"
Chandler Carruth6e9bb7e2016-12-26 23:43:27 +000039#include "llvm/Transforms/Utils/ModuleUtils.h"
Chris Lattnera82f1312003-11-21 21:45:31 +000040using namespace llvm;
Brian Gaeke960707c2003-11-11 22:41:34 +000041
Chandler Carruth964daaa2014-04-22 02:55:47 +000042#define DEBUG_TYPE "inline"
43
Chris Lattner1631bcb2006-12-19 22:09:18 +000044STATISTIC(NumInlined, "Number of functions inlined");
Chris Lattnerb49a6222010-05-01 17:19:38 +000045STATISTIC(NumCallsDeleted, "Number of call sites deleted, not inlined");
Chris Lattner1631bcb2006-12-19 22:09:18 +000046STATISTIC(NumDeleted, "Number of functions deleted because all callers found");
Chris Lattnerd3374e82009-08-27 06:29:33 +000047STATISTIC(NumMergedAllocas, "Number of allocas merged together");
Chris Lattner1631bcb2006-12-19 22:09:18 +000048
Benjamin Kramerbde91762012-06-02 10:20:22 +000049// This weirdly named statistic tracks the number of times that, when attempting
Chandler Carruth7ae90d42012-04-11 10:15:10 +000050// to inline a function A into B, we analyze the callers of B in order to see
51// if those would be more profitable and blocked inline steps.
52STATISTIC(NumCallerCallersAnalyzed, "Number of caller-callers analyzed");
53
Chandler Carruthf702d8e2016-08-17 02:40:23 +000054/// Flag to disable manual alloca merging.
55///
56/// Merging of allocas was originally done as a stack-size saving technique
57/// prior to LLVM's code generator having support for stack coloring based on
58/// lifetime markers. It is now in the process of being removed. To experiment
59/// with disabling it and relying fully on lifetime marker based stack
60/// coloring, you can pass this flag to LLVM.
61static cl::opt<bool>
62 DisableInlinedAllocaMerging("disable-inlined-alloca-merging",
63 cl::init(false), cl::Hidden);
64
Piotr Padlewski84abc742016-07-29 00:27:16 +000065namespace {
66enum class InlinerFunctionImportStatsOpts {
67 No = 0,
68 Basic = 1,
69 Verbose = 2,
70};
71
72cl::opt<InlinerFunctionImportStatsOpts> InlinerFunctionImportStats(
73 "inliner-function-import-stats",
74 cl::init(InlinerFunctionImportStatsOpts::No),
75 cl::values(clEnumValN(InlinerFunctionImportStatsOpts::Basic, "basic",
76 "basic statistics"),
77 clEnumValN(InlinerFunctionImportStatsOpts::Verbose, "verbose",
Mehdi Amini732afdd2016-10-08 19:41:06 +000078 "printing of statistics for each inlined function")),
Piotr Padlewski84abc742016-07-29 00:27:16 +000079 cl::Hidden, cl::desc("Enable inliner stats for imported functions"));
80} // namespace
81
Chandler Carruth1d963112016-12-20 03:15:32 +000082LegacyInlinerBase::LegacyInlinerBase(char &ID)
83 : CallGraphSCCPass(ID), InsertLifetime(true) {}
Chris Lattnerd075cc22003-08-31 19:10:30 +000084
Chandler Carruth1d963112016-12-20 03:15:32 +000085LegacyInlinerBase::LegacyInlinerBase(char &ID, bool InsertLifetime)
Easwaran Ramanb1bd3982016-03-08 00:36:35 +000086 : CallGraphSCCPass(ID), InsertLifetime(InsertLifetime) {}
Chris Lattner22ad7ab2008-01-12 06:49:13 +000087
Sanjay Patelf1b0db12015-03-10 16:42:24 +000088/// For this class, we declare that we require and preserve the call graph.
89/// If the derived class implements this method, it should
Chris Lattnerf94bed32007-01-30 23:28:39 +000090/// always explicitly call the implementation here.
Chandler Carruth1d963112016-12-20 03:15:32 +000091void LegacyInlinerBase::getAnalysisUsage(AnalysisUsage &AU) const {
Daniel Jasperaec2fa32016-12-19 08:22:17 +000092 AU.addRequired<AssumptionCacheTracker>();
Easwaran Raman71069cf2016-06-09 22:23:21 +000093 AU.addRequired<ProfileSummaryInfoWrapperPass>();
Chandler Carruth7b560d42015-09-09 17:55:00 +000094 AU.addRequired<TargetLibraryInfoWrapperPass>();
Chandler Carruth12884f72016-03-02 15:56:53 +000095 getAAResultsAnalysisUsage(AU);
Chandler Carruthe40e60e2012-12-27 11:17:15 +000096 CallGraphSCCPass::getAnalysisUsage(AU);
Chris Lattnerf94bed32007-01-30 23:28:39 +000097}
98
Chandler Carruth8562d3a2016-08-03 01:02:31 +000099typedef DenseMap<ArrayType *, std::vector<AllocaInst *>> InlinedArrayAllocasTy;
Chris Lattnerd3374e82009-08-27 06:29:33 +0000100
Chandler Carruthf702d8e2016-08-17 02:40:23 +0000101/// Look at all of the allocas that we inlined through this call site. If we
102/// have already inlined other allocas through other calls into this function,
103/// then we know that they have disjoint lifetimes and that we can merge them.
Chris Lattnerd3374e82009-08-27 06:29:33 +0000104///
Chandler Carruthf702d8e2016-08-17 02:40:23 +0000105/// There are many heuristics possible for merging these allocas, and the
106/// different options have different tradeoffs. One thing that we *really*
107/// don't want to hurt is SRoA: once inlining happens, often allocas are no
108/// longer address taken and so they can be promoted.
109///
110/// Our "solution" for that is to only merge allocas whose outermost type is an
111/// array type. These are usually not promoted because someone is using a
112/// variable index into them. These are also often the most important ones to
113/// merge.
114///
115/// A better solution would be to have real memory lifetime markers in the IR
116/// and not have the inliner do any merging of allocas at all. This would
117/// allow the backend to do proper stack slot coloring of all allocas that
118/// *actually make it to the backend*, which is really what we want.
119///
120/// Because we don't have this information, we do this simple and useful hack.
121static void mergeInlinedArrayAllocas(
122 Function *Caller, InlineFunctionInfo &IFI,
123 InlinedArrayAllocasTy &InlinedArrayAllocas, int InlineHistory) {
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000124 SmallPtrSet<AllocaInst *, 16> UsedAllocas;
125
Chris Lattnerfb212de2010-12-06 07:52:42 +0000126 // When processing our SCC, check to see if CS was inlined from some other
127 // call site. For example, if we're processing "A" in this code:
128 // A() { B() }
129 // B() { x = alloca ... C() }
130 // C() { y = alloca ... }
131 // Assume that C was not inlined into B initially, and so we're processing A
132 // and decide to inline B into A. Doing this makes an alloca available for
133 // reuse and makes a callsite (C) available for inlining. When we process
134 // the C call site we don't want to do any alloca merging between X and Y
135 // because their scopes are not disjoint. We could make this smarter by
136 // keeping track of the inline history for each alloca in the
137 // InlinedArrayAllocas but this isn't likely to be a significant win.
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000138 if (InlineHistory != -1) // Only do merging for top-level call sites in SCC.
Chandler Carruthf702d8e2016-08-17 02:40:23 +0000139 return;
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000140
Chris Lattnerd3374e82009-08-27 06:29:33 +0000141 // Loop over all the allocas we have so far and see if they can be merged with
142 // a previously inlined alloca. If not, remember that we had it.
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000143 for (unsigned AllocaNo = 0, e = IFI.StaticAllocas.size(); AllocaNo != e;
144 ++AllocaNo) {
Chris Lattner4ba01ec2010-04-22 23:07:58 +0000145 AllocaInst *AI = IFI.StaticAllocas[AllocaNo];
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000146
Chris Lattnerd3374e82009-08-27 06:29:33 +0000147 // Don't bother trying to merge array allocations (they will usually be
148 // canonicalized to be an allocation *of* an array), or allocations whose
149 // type is not itself an array (because we're afraid of pessimizing SRoA).
Chris Lattner229907c2011-07-18 04:54:35 +0000150 ArrayType *ATy = dyn_cast<ArrayType>(AI->getAllocatedType());
Craig Topperf40110f2014-04-25 05:29:35 +0000151 if (!ATy || AI->isArrayAllocation())
Chris Lattnerd3374e82009-08-27 06:29:33 +0000152 continue;
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000153
Chris Lattnerd3374e82009-08-27 06:29:33 +0000154 // Get the list of all available allocas for this array type.
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000155 std::vector<AllocaInst *> &AllocasForType = InlinedArrayAllocas[ATy];
156
Chris Lattnerd3374e82009-08-27 06:29:33 +0000157 // Loop over the allocas in AllocasForType to see if we can reuse one. Note
158 // that we have to be careful not to reuse the same "available" alloca for
159 // multiple different allocas that we just inlined, we use the 'UsedAllocas'
160 // set to keep track of which "available" allocas are being used by this
161 // function. Also, AllocasForType can be empty of course!
162 bool MergedAwayAlloca = false;
Yaron Keren62064d62015-06-25 19:28:24 +0000163 for (AllocaInst *AvailableAlloca : AllocasForType) {
Hal Finkel9caa8f72013-07-16 17:10:55 +0000164
165 unsigned Align1 = AI->getAlignment(),
166 Align2 = AvailableAlloca->getAlignment();
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000167
Chris Lattnerd3374e82009-08-27 06:29:33 +0000168 // The available alloca has to be in the right function, not in some other
169 // function in this SCC.
170 if (AvailableAlloca->getParent() != AI->getParent())
171 continue;
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000172
Chris Lattnerd3374e82009-08-27 06:29:33 +0000173 // If the inlined function already uses this alloca then we can't reuse
174 // it.
David Blaikie70573dc2014-11-19 07:49:26 +0000175 if (!UsedAllocas.insert(AvailableAlloca).second)
Chris Lattnerd3374e82009-08-27 06:29:33 +0000176 continue;
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000177
Chris Lattnerd3374e82009-08-27 06:29:33 +0000178 // Otherwise, we *can* reuse it, RAUW AI into AvailableAlloca and declare
179 // success!
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000180 DEBUG(dbgs() << " ***MERGED ALLOCA: " << *AI
181 << "\n\t\tINTO: " << *AvailableAlloca << '\n');
182
Evgeniy Stepanovd8b86f72015-09-29 00:30:19 +0000183 // Move affected dbg.declare calls immediately after the new alloca to
Simon Pilgrim7d18a702016-11-20 13:19:49 +0000184 // avoid the situation when a dbg.declare precedes its alloca.
Evgeniy Stepanovd8b86f72015-09-29 00:30:19 +0000185 if (auto *L = LocalAsMetadata::getIfExists(AI))
186 if (auto *MDV = MetadataAsValue::getIfExists(AI->getContext(), L))
187 for (User *U : MDV->users())
188 if (DbgDeclareInst *DDI = dyn_cast<DbgDeclareInst>(U))
189 DDI->moveBefore(AvailableAlloca->getNextNode());
190
Chris Lattnerd3374e82009-08-27 06:29:33 +0000191 AI->replaceAllUsesWith(AvailableAlloca);
Hal Finkel9caa8f72013-07-16 17:10:55 +0000192
Hal Finkelec7cd262013-07-17 14:32:41 +0000193 if (Align1 != Align2) {
194 if (!Align1 || !Align2) {
Mehdi Amini46a43552015-03-04 18:43:29 +0000195 const DataLayout &DL = Caller->getParent()->getDataLayout();
196 unsigned TypeAlign = DL.getABITypeAlignment(AI->getAllocatedType());
Hal Finkelec7cd262013-07-17 14:32:41 +0000197
198 Align1 = Align1 ? Align1 : TypeAlign;
199 Align2 = Align2 ? Align2 : TypeAlign;
200 }
201
202 if (Align1 > Align2)
203 AvailableAlloca->setAlignment(AI->getAlignment());
204 }
Hal Finkel9caa8f72013-07-16 17:10:55 +0000205
Chris Lattnerd3374e82009-08-27 06:29:33 +0000206 AI->eraseFromParent();
207 MergedAwayAlloca = true;
208 ++NumMergedAllocas;
Craig Topperf40110f2014-04-25 05:29:35 +0000209 IFI.StaticAllocas[AllocaNo] = nullptr;
Chris Lattnerd3374e82009-08-27 06:29:33 +0000210 break;
211 }
Misha Brukmanb1c93172005-04-21 23:48:37 +0000212
Chris Lattnerd3374e82009-08-27 06:29:33 +0000213 // If we already nuked the alloca, we're done with it.
214 if (MergedAwayAlloca)
215 continue;
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000216
Chris Lattnerd3374e82009-08-27 06:29:33 +0000217 // If we were unable to merge away the alloca either because there are no
218 // allocas of the right type available or because we reused them all
219 // already, remember that this alloca came from an inlined function and mark
220 // it used so we don't reuse it for other allocas from this inline
221 // operation.
222 AllocasForType.push_back(AI);
223 UsedAllocas.insert(AI);
Chris Lattner6754b822004-05-23 21:22:17 +0000224 }
Chandler Carruthf702d8e2016-08-17 02:40:23 +0000225}
226
227/// If it is possible to inline the specified call site,
228/// do so and update the CallGraph for this operation.
229///
230/// This function also does some basic book-keeping to update the IR. The
231/// InlinedArrayAllocas map keeps track of any allocas that are already
232/// available from other functions inlined into the caller. If we are able to
233/// inline this call site we attempt to reuse already available allocas or add
234/// any new allocas to the set if not possible.
235static bool InlineCallIfPossible(
236 CallSite CS, InlineFunctionInfo &IFI,
237 InlinedArrayAllocasTy &InlinedArrayAllocas, int InlineHistory,
238 bool InsertLifetime, function_ref<AAResults &(Function &)> &AARGetter,
239 ImportedFunctionsInliningStatistics &ImportedFunctionsStats) {
240 Function *Callee = CS.getCalledFunction();
241 Function *Caller = CS.getCaller();
242
243 AAResults &AAR = AARGetter(*Callee);
244
245 // Try to inline the function. Get the list of static allocas that were
246 // inlined.
247 if (!InlineFunction(CS, IFI, &AAR, InsertLifetime))
248 return false;
249
250 if (InlinerFunctionImportStats != InlinerFunctionImportStatsOpts::No)
251 ImportedFunctionsStats.recordInline(*Caller, *Callee);
252
253 AttributeFuncs::mergeAttributesForInlining(*Caller, *Callee);
254
255 if (!DisableInlinedAllocaMerging)
256 mergeInlinedArrayAllocas(Caller, IFI, InlinedArrayAllocas, InlineHistory);
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000257
Chris Lattner6754b822004-05-23 21:22:17 +0000258 return true;
Chris Lattnerd075cc22003-08-31 19:10:30 +0000259}
Jakob Stoklund Olesen8a19d3c2010-01-20 17:51:28 +0000260
Sean Silvaab6a6832016-07-23 04:22:50 +0000261/// Return true if inlining of CS can block the caller from being
262/// inlined which is proved to be more beneficial. \p IC is the
263/// estimated inline cost associated with callsite \p CS.
264/// \p TotalAltCost will be set to the estimated cost of inlining the caller
265/// if \p CS is suppressed for inlining.
266static bool
267shouldBeDeferred(Function *Caller, CallSite CS, InlineCost IC,
268 int &TotalSecondaryCost,
Benjamin Kramer41e66da2016-08-06 12:33:46 +0000269 function_ref<InlineCost(CallSite CS)> GetInlineCost) {
Xinliang David Li4b2fdcc2016-04-29 22:59:36 +0000270
271 // For now we only handle local or inline functions.
272 if (!Caller->hasLocalLinkage() && !Caller->hasLinkOnceODRLinkage())
273 return false;
274 // Try to detect the case where the current inlining candidate caller (call
275 // it B) is a static or linkonce-ODR function and is an inlining candidate
276 // elsewhere, and the current candidate callee (call it C) is large enough
277 // that inlining it into B would make B too big to inline later. In these
278 // circumstances it may be best not to inline C into B, but to inline B into
279 // its callers.
280 //
281 // This only applies to static and linkonce-ODR functions because those are
282 // expected to be available for inlining in the translation units where they
283 // are used. Thus we will always have the opportunity to make local inlining
284 // decisions. Importantly the linkonce-ODR linkage covers inline functions
285 // and templates in C++.
286 //
287 // FIXME: All of this logic should be sunk into getInlineCost. It relies on
288 // the internal implementation of the inline cost metrics rather than
289 // treating them as truly abstract units etc.
290 TotalSecondaryCost = 0;
291 // The candidate cost to be imposed upon the current function.
292 int CandidateCost = IC.getCost() - (InlineConstants::CallPenalty + 1);
293 // This bool tracks what happens if we do NOT inline C into B.
294 bool callerWillBeRemoved = Caller->hasLocalLinkage();
295 // This bool tracks what happens if we DO inline C into B.
296 bool inliningPreventsSomeOuterInline = false;
297 for (User *U : Caller->users()) {
298 CallSite CS2(U);
299
300 // If this isn't a call to Caller (it could be some other sort
301 // of reference) skip it. Such references will prevent the caller
302 // from being removed.
303 if (!CS2 || CS2.getCalledFunction() != Caller) {
304 callerWillBeRemoved = false;
305 continue;
306 }
307
Sean Silvaab6a6832016-07-23 04:22:50 +0000308 InlineCost IC2 = GetInlineCost(CS2);
Xinliang David Li4b2fdcc2016-04-29 22:59:36 +0000309 ++NumCallerCallersAnalyzed;
310 if (!IC2) {
311 callerWillBeRemoved = false;
312 continue;
313 }
314 if (IC2.isAlways())
315 continue;
316
Xinliang David Lif450b882016-11-04 03:00:52 +0000317 // See if inlining of the original callsite would erase the cost delta of
Xinliang David Li4b2fdcc2016-04-29 22:59:36 +0000318 // this callsite. We subtract off the penalty for the call instruction,
319 // which we would be deleting.
320 if (IC2.getCostDelta() <= CandidateCost) {
321 inliningPreventsSomeOuterInline = true;
322 TotalSecondaryCost += IC2.getCost();
323 }
324 }
325 // If all outer calls to Caller would get inlined, the cost for the last
326 // one is set very low by getInlineCost, in anticipation that Caller will
327 // be removed entirely. We did not account for this above unless there
328 // is only one caller of Caller.
329 if (callerWillBeRemoved && !Caller->use_empty())
Piotr Padlewskid89875c2016-08-10 21:15:22 +0000330 TotalSecondaryCost -= InlineConstants::LastCallToStaticBonus;
Xinliang David Li4b2fdcc2016-04-29 22:59:36 +0000331
332 if (inliningPreventsSomeOuterInline && TotalSecondaryCost < IC.getCost())
333 return true;
334
335 return false;
336}
337
Sanjay Patelf1b0db12015-03-10 16:42:24 +0000338/// Return true if the inliner should attempt to inline at the given CallSite.
Sean Silvaab6a6832016-07-23 04:22:50 +0000339static bool shouldInline(CallSite CS,
Adam Nemet896c09b2016-08-10 00:44:44 +0000340 function_ref<InlineCost(CallSite CS)> GetInlineCost,
341 OptimizationRemarkEmitter &ORE) {
Adam Nemetc507ac92016-09-27 23:47:03 +0000342 using namespace ore;
Sean Silvaab6a6832016-07-23 04:22:50 +0000343 InlineCost IC = GetInlineCost(CS);
Adam Nemetc507ac92016-09-27 23:47:03 +0000344 Instruction *Call = CS.getInstruction();
345 Function *Callee = CS.getCalledFunction();
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000346
Daniel Dunbar3933e662008-10-30 19:26:59 +0000347 if (IC.isAlways()) {
David Greene0122fc42010-01-05 01:27:51 +0000348 DEBUG(dbgs() << " Inlining: cost=always"
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000349 << ", Call: " << *CS.getInstruction() << "\n");
Adam Nemetc507ac92016-09-27 23:47:03 +0000350 ORE.emit(OptimizationRemarkAnalysis(DEBUG_TYPE, "AlwaysInline", Call)
351 << NV("Callee", Callee)
352 << " should always be inlined (cost=always)");
Daniel Dunbar3933e662008-10-30 19:26:59 +0000353 return true;
354 }
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000355
Daniel Dunbar3933e662008-10-30 19:26:59 +0000356 if (IC.isNever()) {
David Greene0122fc42010-01-05 01:27:51 +0000357 DEBUG(dbgs() << " NOT Inlining: cost=never"
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000358 << ", Call: " << *CS.getInstruction() << "\n");
Adam Nemetc507ac92016-09-27 23:47:03 +0000359 ORE.emit(OptimizationRemarkAnalysis(DEBUG_TYPE, "NeverInline", Call)
360 << NV("Callee", Callee)
361 << " should never be inlined (cost=never)");
Daniel Dunbar3933e662008-10-30 19:26:59 +0000362 return false;
363 }
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000364
Dale Johannesen30599242009-10-09 00:11:32 +0000365 Function *Caller = CS.getCaller();
Chandler Carruth0539c072012-03-31 12:42:41 +0000366 if (!IC) {
367 DEBUG(dbgs() << " NOT Inlining: cost=" << IC.getCost()
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000368 << ", thres=" << (IC.getCostDelta() + IC.getCost())
369 << ", Call: " << *CS.getInstruction() << "\n");
Adam Nemetc507ac92016-09-27 23:47:03 +0000370 ORE.emit(OptimizationRemarkAnalysis(DEBUG_TYPE, "TooCostly", Call)
371 << NV("Callee", Callee) << " too costly to inline (cost="
372 << NV("Cost", IC.getCost()) << ", threshold="
373 << NV("Threshold", IC.getCostDelta() + IC.getCost()) << ")");
Daniel Dunbare7fbf9f42008-10-29 01:02:02 +0000374 return false;
Daniel Dunbare7fbf9f42008-10-29 01:02:02 +0000375 }
Dale Johannesen30599242009-10-09 00:11:32 +0000376
Xinliang David Li4b2fdcc2016-04-29 22:59:36 +0000377 int TotalSecondaryCost = 0;
Sean Silvaab6a6832016-07-23 04:22:50 +0000378 if (shouldBeDeferred(Caller, CS, IC, TotalSecondaryCost, GetInlineCost)) {
Xinliang David Li4b2fdcc2016-04-29 22:59:36 +0000379 DEBUG(dbgs() << " NOT Inlining: " << *CS.getInstruction()
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000380 << " Cost = " << IC.getCost()
381 << ", outer Cost = " << TotalSecondaryCost << '\n');
Adam Nemetc507ac92016-09-27 23:47:03 +0000382 ORE.emit(OptimizationRemarkAnalysis(DEBUG_TYPE,
383 "IncreaseCostInOtherContexts", Call)
384 << "Not inlining. Cost of inlining " << NV("Callee", Callee)
385 << " increases the cost of inlining " << NV("Caller", Caller)
386 << " in other contexts");
Xinliang David Li4b2fdcc2016-04-29 22:59:36 +0000387 return false;
Dale Johannesen30599242009-10-09 00:11:32 +0000388 }
389
Chandler Carruth0539c072012-03-31 12:42:41 +0000390 DEBUG(dbgs() << " Inlining: cost=" << IC.getCost()
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000391 << ", thres=" << (IC.getCostDelta() + IC.getCost())
392 << ", Call: " << *CS.getInstruction() << '\n');
Adam Nemetc507ac92016-09-27 23:47:03 +0000393 ORE.emit(OptimizationRemarkAnalysis(DEBUG_TYPE, "CanBeInlined", Call)
394 << NV("Callee", Callee) << " can be inlined into "
395 << NV("Caller", Caller) << " with cost=" << NV("Cost", IC.getCost())
396 << " (threshold="
397 << NV("Threshold", IC.getCostDelta() + IC.getCost()) << ")");
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000398 return true;
Daniel Dunbare7fbf9f42008-10-29 01:02:02 +0000399}
Chris Lattnerd075cc22003-08-31 19:10:30 +0000400
Sanjay Patelf1b0db12015-03-10 16:42:24 +0000401/// Return true if the specified inline history ID
Chris Lattnere8262672010-05-01 01:05:10 +0000402/// indicates an inline history that includes the specified function.
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000403static bool InlineHistoryIncludes(
404 Function *F, int InlineHistoryID,
405 const SmallVectorImpl<std::pair<Function *, int>> &InlineHistory) {
Chris Lattnere8262672010-05-01 01:05:10 +0000406 while (InlineHistoryID != -1) {
407 assert(unsigned(InlineHistoryID) < InlineHistory.size() &&
408 "Invalid inline history ID");
409 if (InlineHistory[InlineHistoryID].first == F)
410 return true;
411 InlineHistoryID = InlineHistory[InlineHistoryID].second;
412 }
413 return false;
414}
415
Chandler Carruth1d963112016-12-20 03:15:32 +0000416bool LegacyInlinerBase::doInitialization(CallGraph &CG) {
Piotr Padlewski84abc742016-07-29 00:27:16 +0000417 if (InlinerFunctionImportStats != InlinerFunctionImportStatsOpts::No)
418 ImportedFunctionsStats.setModuleInfo(CG.getModule());
419 return false; // No changes to CallGraph.
420}
421
Chandler Carruth1d963112016-12-20 03:15:32 +0000422bool LegacyInlinerBase::runOnSCC(CallGraphSCC &SCC) {
Andrew Kayloraa641a52016-04-22 22:06:11 +0000423 if (skipSCC(SCC))
424 return false;
Andrew Kaylor9c81d0f2016-05-23 21:57:54 +0000425 return inlineCalls(SCC);
426}
Andrew Kayloraa641a52016-04-22 22:06:11 +0000427
Sean Silvaab6a6832016-07-23 04:22:50 +0000428static bool
429inlineCallsImpl(CallGraphSCC &SCC, CallGraph &CG,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000430 std::function<AssumptionCache &(Function &)> GetAssumptionCache,
Sean Silvaab6a6832016-07-23 04:22:50 +0000431 ProfileSummaryInfo *PSI, TargetLibraryInfo &TLI,
432 bool InsertLifetime,
Benjamin Kramer41e66da2016-08-06 12:33:46 +0000433 function_ref<InlineCost(CallSite CS)> GetInlineCost,
434 function_ref<AAResults &(Function &)> AARGetter,
Piotr Padlewski84abc742016-07-29 00:27:16 +0000435 ImportedFunctionsInliningStatistics &ImportedFunctionsStats) {
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000436 SmallPtrSet<Function *, 8> SCCFunctions;
David Greene0122fc42010-01-05 01:27:51 +0000437 DEBUG(dbgs() << "Inliner visiting SCC:");
Yaron Keren4c548f22015-06-20 07:12:33 +0000438 for (CallGraphNode *Node : SCC) {
439 Function *F = Node->getFunction();
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000440 if (F)
441 SCCFunctions.insert(F);
David Greene0122fc42010-01-05 01:27:51 +0000442 DEBUG(dbgs() << " " << (F ? F->getName() : "INDIRECTNODE"));
Chris Lattnerd075cc22003-08-31 19:10:30 +0000443 }
Chris Lattnerd075cc22003-08-31 19:10:30 +0000444
Chris Lattner6754b822004-05-23 21:22:17 +0000445 // Scan through and identify all call sites ahead of time so that we only
446 // inline call sites in the original functions, not call sites that result
447 // from inlining other functions.
Chris Lattnere8262672010-05-01 01:05:10 +0000448 SmallVector<std::pair<CallSite, int>, 16> CallSites;
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000449
Chris Lattnere8262672010-05-01 01:05:10 +0000450 // When inlining a callee produces new call sites, we want to keep track of
451 // the fact that they were inlined from the callee. This allows us to avoid
452 // infinite inlining in some obscure cases. To represent this, we use an
453 // index into the InlineHistory vector.
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000454 SmallVector<std::pair<Function *, int>, 8> InlineHistory;
Chris Lattner6754b822004-05-23 21:22:17 +0000455
Yaron Keren4c548f22015-06-20 07:12:33 +0000456 for (CallGraphNode *Node : SCC) {
457 Function *F = Node->getFunction();
Adam Nemetcef33142016-08-26 20:21:05 +0000458 if (!F || F->isDeclaration())
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000459 continue;
460
Adam Nemetcef33142016-08-26 20:21:05 +0000461 OptimizationRemarkEmitter ORE(F);
Yaron Keren4c548f22015-06-20 07:12:33 +0000462 for (BasicBlock &BB : *F)
463 for (Instruction &I : BB) {
464 CallSite CS(cast<Value>(&I));
Dale Johannesen30599242009-10-09 00:11:32 +0000465 // If this isn't a call, or it is a call to an intrinsic, it can
Chris Lattner9e507472009-08-31 05:34:32 +0000466 // never be inlined.
Gabor Greif62f0aac2010-07-28 22:50:26 +0000467 if (!CS || isa<IntrinsicInst>(I))
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000468 continue;
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000469
Chris Lattner9e507472009-08-31 05:34:32 +0000470 // If this is a direct call to an external function, we can never inline
471 // it. If it is an indirect call, inlining may resolve it to be a
472 // direct call, so we keep it.
Yaron Kerenc66c06b2015-07-19 15:48:07 +0000473 if (Function *Callee = CS.getCalledFunction())
Adam Nemetcef33142016-08-26 20:21:05 +0000474 if (Callee->isDeclaration()) {
Adam Nemeta62b7e12016-09-27 20:55:07 +0000475 using namespace ore;
Adam Nemet04758ba2016-09-27 22:19:23 +0000476 ORE.emit(OptimizationRemarkMissed(DEBUG_TYPE, "NoDefinition", &I)
Adam Nemeta62b7e12016-09-27 20:55:07 +0000477 << NV("Callee", Callee) << " will not be inlined into "
Adam Nemet11421472016-09-27 21:58:17 +0000478 << NV("Caller", CS.getCaller())
479 << " because its definition is unavailable"
480 << setIsVerbose());
Yaron Kerenc66c06b2015-07-19 15:48:07 +0000481 continue;
Adam Nemetcef33142016-08-26 20:21:05 +0000482 }
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000483
Chris Lattnere8262672010-05-01 01:05:10 +0000484 CallSites.push_back(std::make_pair(CS, -1));
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000485 }
486 }
Chris Lattnerd075cc22003-08-31 19:10:30 +0000487
David Greene0122fc42010-01-05 01:27:51 +0000488 DEBUG(dbgs() << ": " << CallSites.size() << " call sites.\n");
Misha Brukmanb1c93172005-04-21 23:48:37 +0000489
Chris Lattnera5cdd5e2010-04-20 00:47:08 +0000490 // If there are no calls in this function, exit early.
491 if (CallSites.empty())
492 return false;
Yaron Keren6967cbb2015-07-02 14:25:09 +0000493
Chris Lattner6754b822004-05-23 21:22:17 +0000494 // Now that we have all of the call sites, move the ones to functions in the
495 // current SCC to the end of the list.
496 unsigned FirstCallInSCC = CallSites.size();
497 for (unsigned i = 0; i < FirstCallInSCC; ++i)
Chris Lattnere8262672010-05-01 01:05:10 +0000498 if (Function *F = CallSites[i].first.getCalledFunction())
Chris Lattner6754b822004-05-23 21:22:17 +0000499 if (SCCFunctions.count(F))
500 std::swap(CallSites[i--], CallSites[--FirstCallInSCC]);
Misha Brukmanb1c93172005-04-21 23:48:37 +0000501
Chris Lattnerd3374e82009-08-27 06:29:33 +0000502 InlinedArrayAllocasTy InlinedArrayAllocas;
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000503 InlineFunctionInfo InlineInfo(&CG, &GetAssumptionCache);
Chandler Carruth66b31302015-01-04 12:03:27 +0000504
Chris Lattner6754b822004-05-23 21:22:17 +0000505 // Now that we have all of the call sites, loop over them and inline them if
506 // it looks profitable to do so.
507 bool Changed = false;
508 bool LocalChange;
509 do {
510 LocalChange = false;
511 // Iterate over the outer loop because inlining functions can cause indirect
512 // calls to become direct calls.
Yaron Keren4c548f22015-06-20 07:12:33 +0000513 // CallSites may be modified inside so ranged for loop can not be used.
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000514 for (unsigned CSi = 0; CSi != CallSites.size(); ++CSi) {
Chris Lattnere8262672010-05-01 01:05:10 +0000515 CallSite CS = CallSites[CSi].first;
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000516
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000517 Function *Caller = CS.getCaller();
Chris Lattnereb9acbf2009-11-12 07:56:08 +0000518 Function *Callee = CS.getCalledFunction();
519
520 // If this call site is dead and it is to a readonly function, we should
521 // just delete the call instead of trying to inline it, regardless of
522 // size. This happens because IPSCCP propagates the result out of the
523 // call and then we're left with the dead call.
Chandler Carruth7b560d42015-09-09 17:55:00 +0000524 if (isInstructionTriviallyDead(CS.getInstruction(), &TLI)) {
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000525 DEBUG(dbgs() << " -> Deleting dead call: " << *CS.getInstruction()
526 << "\n");
Chris Lattnereb9acbf2009-11-12 07:56:08 +0000527 // Update the call graph by deleting the edge from Callee to Caller.
528 CG[Caller]->removeCallEdgeFor(CS);
529 CS.getInstruction()->eraseFromParent();
530 ++NumCallsDeleted;
531 } else {
532 // We can only inline direct calls to non-declarations.
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000533 if (!Callee || Callee->isDeclaration())
534 continue;
535
Eric Christopherea2820342010-07-13 18:27:13 +0000536 // If this call site was obtained by inlining another function, verify
Chris Lattnere8262672010-05-01 01:05:10 +0000537 // that the include path for the function did not include the callee
Chris Lattner5b6a8652010-12-06 07:38:40 +0000538 // itself. If so, we'd be recursively inlining the same function,
Chris Lattnere8262672010-05-01 01:05:10 +0000539 // which would provide the same callsites, which would cause us to
540 // infinitely inline.
541 int InlineHistoryID = CallSites[CSi].second;
542 if (InlineHistoryID != -1 &&
543 InlineHistoryIncludes(Callee, InlineHistoryID, InlineHistory))
544 continue;
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000545
NAKAMURA Takumicd1fc4b2014-04-17 12:22:14 +0000546 // Get DebugLoc to report. CS will be invalid after Inliner.
547 DebugLoc DLoc = CS.getInstruction()->getDebugLoc();
Adam Nemet896c09b2016-08-10 00:44:44 +0000548 BasicBlock *Block = CS.getParent();
549 // FIXME for new PM: because of the old PM we currently generate ORE and
550 // in turn BFI on demand. With the new PM, the ORE dependency should
551 // just become a regular analysis dependency.
552 OptimizationRemarkEmitter ORE(Caller);
NAKAMURA Takumicd1fc4b2014-04-17 12:22:14 +0000553
Diego Novillo7f8af8b2014-05-22 14:19:46 +0000554 // If the policy determines that we should inline this function,
555 // try to do so.
Adam Nemetc507ac92016-09-27 23:47:03 +0000556 using namespace ore;
Adam Nemet896c09b2016-08-10 00:44:44 +0000557 if (!shouldInline(CS, GetInlineCost, ORE)) {
Adam Nemetc507ac92016-09-27 23:47:03 +0000558 ORE.emit(
559 OptimizationRemarkMissed(DEBUG_TYPE, "NotInlined", DLoc, Block)
560 << NV("Callee", Callee) << " will not be inlined into "
561 << NV("Caller", Caller));
Diego Novillo7f8af8b2014-05-22 14:19:46 +0000562 continue;
563 }
564
Chris Lattner2eee5d32010-04-22 23:37:35 +0000565 // Attempt to inline the function.
Sean Silvaab6a6832016-07-23 04:22:50 +0000566 if (!InlineCallIfPossible(CS, InlineInfo, InlinedArrayAllocas,
Piotr Padlewski84abc742016-07-29 00:27:16 +0000567 InlineHistoryID, InsertLifetime, AARGetter,
568 ImportedFunctionsStats)) {
Adam Nemetc507ac92016-09-27 23:47:03 +0000569 ORE.emit(
570 OptimizationRemarkMissed(DEBUG_TYPE, "NotInlined", DLoc, Block)
571 << NV("Callee", Callee) << " will not be inlined into "
572 << NV("Caller", Caller));
Chris Lattnereb9acbf2009-11-12 07:56:08 +0000573 continue;
Diego Novillo7f8af8b2014-05-22 14:19:46 +0000574 }
Chris Lattnereb9acbf2009-11-12 07:56:08 +0000575 ++NumInlined;
Diego Novilloa9298b22014-04-08 16:42:34 +0000576
577 // Report the inline decision.
Adam Nemetc507ac92016-09-27 23:47:03 +0000578 ORE.emit(OptimizationRemark(DEBUG_TYPE, "Inlined", DLoc, Block)
579 << NV("Callee", Callee) << " inlined into "
580 << NV("Caller", Caller));
Diego Novilloa9298b22014-04-08 16:42:34 +0000581
Chris Lattnerc2432b92010-05-01 01:26:13 +0000582 // If inlining this function gave us any new call sites, throw them
Chris Lattner2eee5d32010-04-22 23:37:35 +0000583 // onto our worklist to process. They are useful inline candidates.
Chris Lattnerc2432b92010-05-01 01:26:13 +0000584 if (!InlineInfo.InlinedCalls.empty()) {
Chris Lattnere8262672010-05-01 01:05:10 +0000585 // Create a new inline history entry for this, so that we remember
586 // that these new callsites came about due to inlining Callee.
587 int NewHistoryID = InlineHistory.size();
588 InlineHistory.push_back(std::make_pair(Callee, InlineHistoryID));
589
Yaron Keren4c548f22015-06-20 07:12:33 +0000590 for (Value *Ptr : InlineInfo.InlinedCalls)
Chandler Carruth21211992012-03-25 04:03:40 +0000591 CallSites.push_back(std::make_pair(CallSite(Ptr), NewHistoryID));
Chris Lattnerc691de32010-04-23 18:37:01 +0000592 }
Chris Lattnereb9acbf2009-11-12 07:56:08 +0000593 }
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000594
Chris Lattnereb9acbf2009-11-12 07:56:08 +0000595 // If we inlined or deleted the last possible call site to the function,
596 // delete the function body now.
597 if (Callee && Callee->use_empty() && Callee->hasLocalLinkage() &&
Chris Lattner9e507472009-08-31 05:34:32 +0000598 // TODO: Can remove if in SCC now.
Chris Lattner081375b2009-08-31 03:15:49 +0000599 !SCCFunctions.count(Callee) &&
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000600
Chris Lattner081375b2009-08-31 03:15:49 +0000601 // The function may be apparently dead, but if there are indirect
602 // callgraph references to the node, we cannot delete it yet, this
603 // could invalidate the CGSCC iterator.
604 CG[Callee]->getNumReferences() == 0) {
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000605 DEBUG(dbgs() << " -> Deleting dead function: " << Callee->getName()
606 << "\n");
Chris Lattnerd3374e82009-08-27 06:29:33 +0000607 CallGraphNode *CalleeNode = CG[Callee];
Yaron Keren6967cbb2015-07-02 14:25:09 +0000608
Chris Lattnerd3374e82009-08-27 06:29:33 +0000609 // Remove any call graph edges from the callee to its callees.
610 CalleeNode->removeAllCalledFunctions();
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000611
Chris Lattnerd3374e82009-08-27 06:29:33 +0000612 // Removing the node for callee from the call graph and delete it.
Easwaran Ramanb1bd3982016-03-08 00:36:35 +0000613 delete CG.removeFunctionFromModule(CalleeNode);
Chris Lattnerd3374e82009-08-27 06:29:33 +0000614 ++NumDeleted;
615 }
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000616
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000617 // Remove this call site from the list. If possible, use
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000618 // swap/pop_back for efficiency, but do not use it if doing so would
619 // move a call site to a function in this SCC before the
620 // 'FirstCallInSCC' barrier.
Chris Lattner4422d312010-04-16 22:42:17 +0000621 if (SCC.isSingular()) {
Benjamin Kramer5ac57e32010-05-31 12:50:41 +0000622 CallSites[CSi] = CallSites.back();
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000623 CallSites.pop_back();
624 } else {
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000625 CallSites.erase(CallSites.begin() + CSi);
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000626 }
627 --CSi;
628
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000629 Changed = true;
630 LocalChange = true;
631 }
Chris Lattner6754b822004-05-23 21:22:17 +0000632 } while (LocalChange);
Chris Lattner4d25c862004-04-08 06:34:31 +0000633
Chris Lattnerd075cc22003-08-31 19:10:30 +0000634 return Changed;
635}
636
Chandler Carruth1d963112016-12-20 03:15:32 +0000637bool LegacyInlinerBase::inlineCalls(CallGraphSCC &SCC) {
Sean Silvaab6a6832016-07-23 04:22:50 +0000638 CallGraph &CG = getAnalysis<CallGraphWrapperPass>().getCallGraph();
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000639 ACT = &getAnalysis<AssumptionCacheTracker>();
Dehao Chen5461d8b2016-09-28 21:00:58 +0000640 PSI = getAnalysis<ProfileSummaryInfoWrapperPass>().getPSI();
Sean Silvaab6a6832016-07-23 04:22:50 +0000641 auto &TLI = getAnalysis<TargetLibraryInfoWrapperPass>().getTLI();
642 // We compute dedicated AA results for each function in the SCC as needed. We
643 // use a lambda referencing external objects so that they live long enough to
644 // be queried, but we re-use them each time.
645 Optional<BasicAAResult> BAR;
646 Optional<AAResults> AAR;
647 auto AARGetter = [&](Function &F) -> AAResults & {
648 BAR.emplace(createLegacyPMBasicAAResult(*this, F));
649 AAR.emplace(createLegacyPMAAResults(*this, F, *BAR));
650 return *AAR;
651 };
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000652 auto GetAssumptionCache = [&](Function &F) -> AssumptionCache & {
653 return ACT->getAssumptionCache(F);
654 };
655 return inlineCallsImpl(SCC, CG, GetAssumptionCache, PSI, TLI, InsertLifetime,
Sean Silvaab6a6832016-07-23 04:22:50 +0000656 [this](CallSite CS) { return getInlineCost(CS); },
Piotr Padlewski84abc742016-07-29 00:27:16 +0000657 AARGetter, ImportedFunctionsStats);
Sean Silvaab6a6832016-07-23 04:22:50 +0000658}
659
Sanjay Patelf1b0db12015-03-10 16:42:24 +0000660/// Remove now-dead linkonce functions at the end of
661/// processing to avoid breaking the SCC traversal.
Chandler Carruth1d963112016-12-20 03:15:32 +0000662bool LegacyInlinerBase::doFinalization(CallGraph &CG) {
Piotr Padlewski84abc742016-07-29 00:27:16 +0000663 if (InlinerFunctionImportStats != InlinerFunctionImportStatsOpts::No)
664 ImportedFunctionsStats.dump(InlinerFunctionImportStats ==
665 InlinerFunctionImportStatsOpts::Verbose);
Devang Patelf0ef3572008-11-05 01:39:16 +0000666 return removeDeadFunctions(CG);
667}
668
Sanjay Patelf1b0db12015-03-10 16:42:24 +0000669/// Remove dead functions that are not included in DNR (Do Not Remove) list.
Chandler Carruth1d963112016-12-20 03:15:32 +0000670bool LegacyInlinerBase::removeDeadFunctions(CallGraph &CG,
671 bool AlwaysInlineOnly) {
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000672 SmallVector<CallGraphNode *, 16> FunctionsToRemove;
Chandler Carruth6e9bb7e2016-12-26 23:43:27 +0000673 SmallVector<Function *, 16> DeadFunctionsInComdats;
David Majnemerac256cf2015-05-05 20:14:22 +0000674
675 auto RemoveCGN = [&](CallGraphNode *CGN) {
676 // Remove any call graph edges from the function to its callees.
677 CGN->removeAllCalledFunctions();
678
679 // Remove any edges from the external node to the function's call graph
680 // node. These edges might have been made irrelegant due to
681 // optimization of the program.
682 CG.getExternalCallingNode()->removeAnyCallEdgeTo(CGN);
683
684 // Removing the node for callee from the call graph and delete it.
685 FunctionsToRemove.push_back(CGN);
686 };
Chris Lattnerbe8bb802004-04-21 20:44:33 +0000687
688 // Scan for all of the functions, looking for ones that should now be removed
689 // from the program. Insert the dead ones in the FunctionsToRemove set.
David Blaikiea5d7de92015-08-05 20:55:50 +0000690 for (const auto &I : CG) {
691 CallGraphNode *CGN = I.second.get();
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000692 Function *F = CGN->getFunction();
Chandler Carruthd7a5f2a2012-03-16 06:10:13 +0000693 if (!F || F->isDeclaration())
694 continue;
695
696 // Handle the case when this function is called and we only want to care
697 // about always-inline functions. This is a bit of a hack to share code
698 // between here and the InlineAlways pass.
Duncan P. N. Exon Smith2c79ad92015-02-14 01:11:29 +0000699 if (AlwaysInlineOnly && !F->hasFnAttribute(Attribute::AlwaysInline))
Chandler Carruthd7a5f2a2012-03-16 06:10:13 +0000700 continue;
701
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000702 // If the only remaining users of the function are dead constants, remove
703 // them.
704 F->removeDeadConstantUsers();
Chris Lattnerbe8bb802004-04-21 20:44:33 +0000705
Eli Friedman1923a332011-10-20 05:23:42 +0000706 if (!F->isDefTriviallyDead())
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000707 continue;
David Majnemerac077032014-10-08 19:32:32 +0000708
709 // It is unsafe to drop a function with discardable linkage from a COMDAT
710 // without also dropping the other members of the COMDAT.
711 // The inliner doesn't visit non-function entities which are in COMDAT
712 // groups so it is unsafe to do so *unless* the linkage is local.
David Majnemerac256cf2015-05-05 20:14:22 +0000713 if (!F->hasLocalLinkage()) {
Chandler Carruth6e9bb7e2016-12-26 23:43:27 +0000714 if (F->hasComdat()) {
715 DeadFunctionsInComdats.push_back(F);
David Majnemerac256cf2015-05-05 20:14:22 +0000716 continue;
717 }
718 }
Devang Patelf0ef3572008-11-05 01:39:16 +0000719
David Majnemerac256cf2015-05-05 20:14:22 +0000720 RemoveCGN(CGN);
Chris Lattnerc87784f2004-04-20 22:06:53 +0000721 }
David Majnemerac256cf2015-05-05 20:14:22 +0000722 if (!DeadFunctionsInComdats.empty()) {
Chandler Carruth6e9bb7e2016-12-26 23:43:27 +0000723 // Filter out the functions whose comdats remain alive.
724 filterDeadComdatFunctions(CG.getModule(), DeadFunctionsInComdats);
725 // Remove the rest.
726 for (Function *F : DeadFunctionsInComdats)
727 RemoveCGN(CG[F]);
David Majnemerac256cf2015-05-05 20:14:22 +0000728 }
729
Chandler Carruthd7a5f2a2012-03-16 06:10:13 +0000730 if (FunctionsToRemove.empty())
731 return false;
Chris Lattnerbe8bb802004-04-21 20:44:33 +0000732
733 // Now that we know which functions to delete, do so. We didn't want to do
734 // this inline, because that would invalidate our CallGraph::iterator
735 // objects. :(
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000736 //
Chandler Carruthd7a5f2a2012-03-16 06:10:13 +0000737 // Note that it doesn't matter that we are iterating over a non-stable order
Chris Lattner5eef6ad2009-08-27 03:51:50 +0000738 // here to do this, it doesn't matter which order the functions are deleted
739 // in.
Chandler Carruth45ae88f2012-04-01 10:41:24 +0000740 array_pod_sort(FunctionsToRemove.begin(), FunctionsToRemove.end());
Chandler Carruth8562d3a2016-08-03 01:02:31 +0000741 FunctionsToRemove.erase(
742 std::unique(FunctionsToRemove.begin(), FunctionsToRemove.end()),
743 FunctionsToRemove.end());
Yaron Keren62064d62015-06-25 19:28:24 +0000744 for (CallGraphNode *CGN : FunctionsToRemove) {
Easwaran Ramanb1bd3982016-03-08 00:36:35 +0000745 delete CG.removeFunctionFromModule(CGN);
Chris Lattnerbe8bb802004-04-21 20:44:33 +0000746 ++NumDeleted;
Chris Lattnerbe8bb802004-04-21 20:44:33 +0000747 }
Chandler Carruthd7a5f2a2012-03-16 06:10:13 +0000748 return true;
Chris Lattnerc87784f2004-04-20 22:06:53 +0000749}
Chandler Carruth1d963112016-12-20 03:15:32 +0000750
751PreservedAnalyses InlinerPass::run(LazyCallGraph::SCC &InitialC,
752 CGSCCAnalysisManager &AM, LazyCallGraph &CG,
753 CGSCCUpdateResult &UR) {
Chandler Carruth1d963112016-12-20 03:15:32 +0000754 const ModuleAnalysisManager &MAM =
755 AM.getResult<ModuleAnalysisManagerCGSCCProxy>(InitialC, CG).getManager();
756 bool Changed = false;
757
758 assert(InitialC.size() > 0 && "Cannot handle an empty SCC!");
759 Module &M = *InitialC.begin()->getFunction().getParent();
760 ProfileSummaryInfo *PSI = MAM.getCachedResult<ProfileSummaryAnalysis>(M);
761
Chandler Carruth1d963112016-12-20 03:15:32 +0000762 // We use a worklist of nodes to process so that we can handle if the SCC
763 // structure changes and some nodes are no longer part of the current SCC. We
764 // also need to use an updatable pointer for the SCC as a consequence.
765 SmallVector<LazyCallGraph::Node *, 16> Nodes;
766 for (auto &N : InitialC)
767 Nodes.push_back(&N);
768 auto *C = &InitialC;
769 auto *RC = &C->getOuterRefSCC();
770
771 // We also use a secondary worklist of call sites within a particular node to
772 // allow quickly continuing to inline through newly inlined call sites where
773 // possible.
Chandler Carruth141bf5d2016-12-27 06:46:20 +0000774 SmallVector<std::pair<CallSite, int>, 16> Calls;
775
776 // When inlining a callee produces new call sites, we want to keep track of
777 // the fact that they were inlined from the callee. This allows us to avoid
778 // infinite inlining in some obscure cases. To represent this, we use an
779 // index into the InlineHistory vector.
780 SmallVector<std::pair<Function *, int>, 16> InlineHistory;
Chandler Carruth1d963112016-12-20 03:15:32 +0000781
782 // Track a set vector of inlined callees so that we can augment the caller
783 // with all of their edges in the call graph before pruning out the ones that
784 // got simplified away.
785 SmallSetVector<Function *, 4> InlinedCallees;
786
787 // Track the dead functions to delete once finished with inlining calls. We
788 // defer deleting these to make it easier to handle the call graph updates.
789 SmallVector<Function *, 4> DeadFunctions;
790
791 do {
792 auto &N = *Nodes.pop_back_val();
793 if (CG.lookupSCC(N) != C)
794 continue;
795 Function &F = N.getFunction();
796 if (F.hasFnAttribute(Attribute::OptimizeNone))
797 continue;
798
Chandler Carruthd4be9f42017-01-22 10:33:58 +0000799 DEBUG(dbgs() << "Inlining calls in: " << F.getName() << "\n");
800
Chandler Carruthb698d592017-01-22 10:34:01 +0000801 // Get a FunctionAnalysisManager via a proxy for this particular node. We
802 // do this each time we visit a node as the SCC may have changed and as
803 // we're going to mutate this particular function we want to make sure the
804 // proxy is in place to forward any invalidation events. We can use the
805 // manager we get here for looking up results for functions other than this
806 // node however because those functions aren't going to be mutated by this
807 // pass.
808 FunctionAnalysisManager &FAM =
809 AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, CG)
810 .getManager();
811 std::function<AssumptionCache &(Function &)> GetAssumptionCache =
812 [&](Function &F) -> AssumptionCache & {
813 return FAM.getResult<AssumptionAnalysis>(F);
814 };
815 auto GetBFI = [&](Function &F) -> BlockFrequencyInfo & {
816 return FAM.getResult<BlockFrequencyAnalysis>(F);
817 };
818
819 auto GetInlineCost = [&](CallSite CS) {
820 Function &Callee = *CS.getCalledFunction();
821 auto &CalleeTTI = FAM.getResult<TargetIRAnalysis>(Callee);
822 return getInlineCost(CS, Params, CalleeTTI, GetAssumptionCache, {GetBFI},
823 PSI);
824 };
825
Chandler Carruth1d963112016-12-20 03:15:32 +0000826 // Get the remarks emission analysis for the caller.
827 auto &ORE = FAM.getResult<OptimizationRemarkEmitterAnalysis>(F);
828
829 // We want to generally process call sites top-down in order for
830 // simplifications stemming from replacing the call with the returned value
831 // after inlining to be visible to subsequent inlining decisions. So we
832 // walk the function backwards and then process the back of the vector.
833 // FIXME: Using reverse is a really bad way to do this. Instead we should
834 // do an actual PO walk of the function body.
835 for (Instruction &I : reverse(instructions(F)))
836 if (auto CS = CallSite(&I))
837 if (Function *Callee = CS.getCalledFunction())
838 if (!Callee->isDeclaration())
Chandler Carruth141bf5d2016-12-27 06:46:20 +0000839 Calls.push_back({CS, -1});
Chandler Carruth1d963112016-12-20 03:15:32 +0000840
841 bool DidInline = false;
842 while (!Calls.empty()) {
Chandler Carruth141bf5d2016-12-27 06:46:20 +0000843 int InlineHistoryID;
844 CallSite CS;
845 std::tie(CS, InlineHistoryID) = Calls.pop_back_val();
Chandler Carruth1d963112016-12-20 03:15:32 +0000846 Function &Callee = *CS.getCalledFunction();
847
Chandler Carruth141bf5d2016-12-27 06:46:20 +0000848 if (InlineHistoryID != -1 &&
849 InlineHistoryIncludes(&Callee, InlineHistoryID, InlineHistory))
850 continue;
851
Chandler Carruth1d963112016-12-20 03:15:32 +0000852 // Check whether we want to inline this callsite.
853 if (!shouldInline(CS, GetInlineCost, ORE))
854 continue;
855
Easwaran Raman12585b02017-01-20 22:44:04 +0000856 // Setup the data structure used to plumb customization into the
857 // `InlineFunction` routine.
858 InlineFunctionInfo IFI(
859 /*cg=*/nullptr, &GetAssumptionCache,
860 &FAM.getResult<BlockFrequencyAnalysis>(*(CS.getCaller())),
861 &FAM.getResult<BlockFrequencyAnalysis>(Callee));
862
Chandler Carruth1d963112016-12-20 03:15:32 +0000863 if (!InlineFunction(CS, IFI))
864 continue;
865 DidInline = true;
866 InlinedCallees.insert(&Callee);
867
868 // Add any new callsites to defined functions to the worklist.
Chandler Carruth141bf5d2016-12-27 06:46:20 +0000869 if (!IFI.InlinedCallSites.empty()) {
870 int NewHistoryID = InlineHistory.size();
871 InlineHistory.push_back({&Callee, InlineHistoryID});
872 for (CallSite &CS : reverse(IFI.InlinedCallSites))
873 if (Function *NewCallee = CS.getCalledFunction())
874 if (!NewCallee->isDeclaration())
875 Calls.push_back({CS, NewHistoryID});
876 }
Chandler Carruth1d963112016-12-20 03:15:32 +0000877
Chandler Carruth03130d92016-12-27 03:39:54 +0000878 // Merge the attributes based on the inlining.
879 AttributeFuncs::mergeAttributesForInlining(F, Callee);
880
Chandler Carruth1d963112016-12-20 03:15:32 +0000881 // For local functions, check whether this makes the callee trivially
882 // dead. In that case, we can drop the body of the function eagerly
883 // which may reduce the number of callers of other functions to one,
884 // changing inline cost thresholds.
885 if (Callee.hasLocalLinkage()) {
886 // To check this we also need to nuke any dead constant uses (perhaps
887 // made dead by this operation on other functions).
888 Callee.removeDeadConstantUsers();
889 if (Callee.use_empty()) {
Chandler Carruth9524af42017-01-23 07:03:41 +0000890 // Clear all analyses and the body and queue the function itself for
891 // deletion when we finish inlining and call graph updates.
Chandler Carruth1d963112016-12-20 03:15:32 +0000892 // Note that after this point, it is an error to do anything other
893 // than use the callee's address or delete it.
Chandler Carruth9524af42017-01-23 07:03:41 +0000894 FAM.clear(Callee);
Chandler Carruth1d963112016-12-20 03:15:32 +0000895 Callee.dropAllReferences();
896 assert(find(DeadFunctions, &Callee) == DeadFunctions.end() &&
897 "Cannot put cause a function to become dead twice!");
898 DeadFunctions.push_back(&Callee);
899 }
900 }
901 }
902
903 if (!DidInline)
904 continue;
905 Changed = true;
906
Chandler Carruth9900d182016-12-28 03:13:12 +0000907 // Add all the inlined callees' edges as ref edges to the caller. These are
908 // by definition trivial edges as we always have *some* transitive ref edge
909 // chain. While in some cases these edges are direct calls inside the
910 // callee, they have to be modeled in the inliner as reference edges as
911 // there may be a reference edge anywhere along the chain from the current
912 // caller to the callee that causes the whole thing to appear like
913 // a (transitive) reference edge that will require promotion to a call edge
914 // below.
Chandler Carruth1d963112016-12-20 03:15:32 +0000915 for (Function *InlinedCallee : InlinedCallees) {
916 LazyCallGraph::Node &CalleeN = *CG.lookup(*InlinedCallee);
917 for (LazyCallGraph::Edge &E : CalleeN)
Chandler Carruth9900d182016-12-28 03:13:12 +0000918 RC->insertTrivialRefEdge(N, *E.getNode());
Chandler Carruth1d963112016-12-20 03:15:32 +0000919 }
920 InlinedCallees.clear();
921
922 // At this point, since we have made changes we have at least removed
923 // a call instruction. However, in the process we do some incremental
924 // simplification of the surrounding code. This simplification can
925 // essentially do all of the same things as a function pass and we can
926 // re-use the exact same logic for updating the call graph to reflect the
927 // change..
928 C = &updateCGAndAnalysisManagerForFunctionPass(CG, *C, N, AM, UR);
Chandler Carruthd4be9f42017-01-22 10:33:58 +0000929 DEBUG(dbgs() << "Updated inlining SCC: " << *C << "\n");
Chandler Carruth1d963112016-12-20 03:15:32 +0000930 RC = &C->getOuterRefSCC();
931 } while (!Nodes.empty());
932
933 // Now that we've finished inlining all of the calls across this SCC, delete
934 // all of the trivially dead functions, updating the call graph and the CGSCC
935 // pass manager in the process.
936 //
937 // Note that this walks a pointer set which has non-deterministic order but
938 // that is OK as all we do is delete things and add pointers to unordered
939 // sets.
940 for (Function *DeadF : DeadFunctions) {
941 // Get the necessary information out of the call graph and nuke the
942 // function there.
943 auto &DeadC = *CG.lookupSCC(*CG.lookup(*DeadF));
944 auto &DeadRC = DeadC.getOuterRefSCC();
945 CG.removeDeadFunction(*DeadF);
946
947 // Mark the relevant parts of the call graph as invalid so we don't visit
948 // them.
949 UR.InvalidatedSCCs.insert(&DeadC);
950 UR.InvalidatedRefSCCs.insert(&DeadRC);
951
952 // And delete the actual function from the module.
953 M.getFunctionList().erase(DeadF);
954 }
955 return Changed ? PreservedAnalyses::none() : PreservedAnalyses::all();
956}