blob: 468da09ce1182f402a5c80d51e346627543efd8b [file] [log] [blame]
Dan Gohman4552e3c2009-10-13 18:30:07 +00001//===- InlineCost.cpp - Cost analysis for inliner -------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements inline cost analysis.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Analysis/InlineCost.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000015#include "llvm/ADT/STLExtras.h"
16#include "llvm/ADT/SetVector.h"
17#include "llvm/ADT/SmallPtrSet.h"
18#include "llvm/ADT/SmallVector.h"
19#include "llvm/ADT/Statistic.h"
Daniel Jasperaec2fa32016-12-19 08:22:17 +000020#include "llvm/Analysis/AssumptionCache.h"
Easwaran Raman12585b02017-01-20 22:44:04 +000021#include "llvm/Analysis/BlockFrequencyInfo.h"
Hal Finkel57f03dd2014-09-07 13:49:57 +000022#include "llvm/Analysis/CodeMetrics.h"
Chandler Carruthd9903882015-01-14 11:23:27 +000023#include "llvm/Analysis/ConstantFolding.h"
Haicheng Wu3739e142017-12-14 14:36:18 +000024#include "llvm/Analysis/CFG.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000025#include "llvm/Analysis/InstructionSimplify.h"
Easwaran Raman71069cf2016-06-09 22:23:21 +000026#include "llvm/Analysis/ProfileSummaryInfo.h"
Chandler Carruth42f3dce2013-01-21 11:55:09 +000027#include "llvm/Analysis/TargetTransformInfo.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000028#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000029#include "llvm/IR/CallingConv.h"
30#include "llvm/IR/DataLayout.h"
Chandler Carruth03eb0de2014-03-04 10:40:04 +000031#include "llvm/IR/GetElementPtrTypeIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000032#include "llvm/IR/GlobalAlias.h"
Chandler Carruth7da14f12014-03-06 03:23:41 +000033#include "llvm/IR/InstVisitor.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000034#include "llvm/IR/IntrinsicInst.h"
35#include "llvm/IR/Operator.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000036#include "llvm/Support/Debug.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000037#include "llvm/Support/raw_ostream.h"
Eric Christopher2dfbd7e2011-02-05 00:49:15 +000038
Dan Gohman4552e3c2009-10-13 18:30:07 +000039using namespace llvm;
40
Chandler Carruthf1221bd2014-04-22 02:48:03 +000041#define DEBUG_TYPE "inline-cost"
42
Chandler Carruth7ae90d42012-04-11 10:15:10 +000043STATISTIC(NumCallsAnalyzed, "Number of call sites analyzed");
44
Easwaran Raman1c57cc22016-08-10 00:48:04 +000045static cl::opt<int> InlineThreshold(
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000046 "inline-threshold", cl::Hidden, cl::init(225), cl::ZeroOrMore,
47 cl::desc("Control the amount of inlining to perform (default = 225)"));
48
49static cl::opt<int> HintThreshold(
50 "inlinehint-threshold", cl::Hidden, cl::init(325),
51 cl::desc("Threshold for inlining functions with inline hint"));
52
Easwaran Raman12585b02017-01-20 22:44:04 +000053static cl::opt<int>
54 ColdCallSiteThreshold("inline-cold-callsite-threshold", cl::Hidden,
55 cl::init(45),
56 cl::desc("Threshold for inlining cold callsites"));
57
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000058// We introduce this threshold to help performance of instrumentation based
59// PGO before we actually hook up inliner with analysis passes such as BPI and
60// BFI.
61static cl::opt<int> ColdThreshold(
Easwaran Ramanc103ef82017-05-11 21:36:28 +000062 "inlinecold-threshold", cl::Hidden, cl::init(45),
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000063 cl::desc("Threshold for inlining functions with cold attribute"));
64
Dehao Chende39cb92016-08-05 20:28:41 +000065static cl::opt<int>
66 HotCallSiteThreshold("hot-callsite-threshold", cl::Hidden, cl::init(3000),
67 cl::ZeroOrMore,
68 cl::desc("Threshold for hot callsites "));
69
Easwaran Raman974d4ee2017-08-03 22:23:33 +000070static cl::opt<int> LocallyHotCallSiteThreshold(
71 "locally-hot-callsite-threshold", cl::Hidden, cl::init(525), cl::ZeroOrMore,
72 cl::desc("Threshold for locally hot callsites "));
73
Easwaran Ramanc5fa6352017-06-27 23:11:18 +000074static cl::opt<int> ColdCallSiteRelFreq(
75 "cold-callsite-rel-freq", cl::Hidden, cl::init(2), cl::ZeroOrMore,
76 cl::desc("Maxmimum block frequency, expressed as a percentage of caller's "
77 "entry frequency, for a callsite to be cold in the absence of "
78 "profile information."));
79
Easwaran Raman974d4ee2017-08-03 22:23:33 +000080static cl::opt<int> HotCallSiteRelFreq(
81 "hot-callsite-rel-freq", cl::Hidden, cl::init(60), cl::ZeroOrMore,
Easwaran Ramanff77cc72017-08-04 17:15:17 +000082 cl::desc("Minimum block frequency, expressed as a multiple of caller's "
Easwaran Raman974d4ee2017-08-03 22:23:33 +000083 "entry frequency, for a callsite to be hot in the absence of "
84 "profile information."));
85
Easwaran Raman4924bb02017-09-13 20:16:02 +000086static cl::opt<bool> OptComputeFullInlineCost(
Haicheng Wu0812c5b2017-08-21 20:00:09 +000087 "inline-cost-full", cl::Hidden, cl::init(false),
88 cl::desc("Compute the full inline cost of a call site even when the cost "
89 "exceeds the threshold."));
90
Chandler Carruth0539c072012-03-31 12:42:41 +000091namespace {
Chandler Carrutha3089552012-03-14 07:32:53 +000092
Chandler Carruth0539c072012-03-31 12:42:41 +000093class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> {
94 typedef InstVisitor<CallAnalyzer, bool> Base;
95 friend class InstVisitor<CallAnalyzer, bool>;
Owen Andersona08318a2010-09-09 16:56:42 +000096
Chandler Carruth42f3dce2013-01-21 11:55:09 +000097 /// The TargetTransformInfo available for this compilation.
98 const TargetTransformInfo &TTI;
99
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000100 /// Getter for the cache of @llvm.assume intrinsics.
101 std::function<AssumptionCache &(Function &)> &GetAssumptionCache;
102
Easwaran Raman12585b02017-01-20 22:44:04 +0000103 /// Getter for BlockFrequencyInfo
104 Optional<function_ref<BlockFrequencyInfo &(Function &)>> &GetBFI;
105
Easwaran Raman71069cf2016-06-09 22:23:21 +0000106 /// Profile summary information.
107 ProfileSummaryInfo *PSI;
108
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000109 /// The called function.
Chandler Carruth0539c072012-03-31 12:42:41 +0000110 Function &F;
Owen Andersona08318a2010-09-09 16:56:42 +0000111
Eric Christopher85be8ca2017-04-15 06:14:50 +0000112 // Cache the DataLayout since we use it a lot.
113 const DataLayout &DL;
114
Haicheng Wu0812c5b2017-08-21 20:00:09 +0000115 /// The OptimizationRemarkEmitter available for this compilation.
116 OptimizationRemarkEmitter *ORE;
117
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000118 /// The candidate callsite being analyzed. Please do not use this to do
119 /// analysis in the caller function; we want the inline cost query to be
120 /// easily cacheable. Instead, use the cover function paramHasAttr.
Philip Reames9b5c9582015-06-26 20:51:17 +0000121 CallSite CandidateCS;
122
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000123 /// Tunable parameters that control the analysis.
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000124 const InlineParams &Params;
125
Chandler Carruth0539c072012-03-31 12:42:41 +0000126 int Threshold;
127 int Cost;
Easwaran Raman4924bb02017-09-13 20:16:02 +0000128 bool ComputeFullInlineCost;
Owen Andersona08318a2010-09-09 16:56:42 +0000129
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000130 bool IsCallerRecursive;
131 bool IsRecursiveCall;
Chandler Carruth0539c072012-03-31 12:42:41 +0000132 bool ExposesReturnsTwice;
133 bool HasDynamicAlloca;
James Molloy4f6fb952012-12-20 16:04:27 +0000134 bool ContainsNoDuplicateCall;
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000135 bool HasReturn;
136 bool HasIndirectBr;
Reid Kleckner223de262015-04-14 20:38:14 +0000137 bool HasFrameEscape;
James Molloy4f6fb952012-12-20 16:04:27 +0000138
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000139 /// Number of bytes allocated statically by the callee.
140 uint64_t AllocatedSize;
Chandler Carruth0539c072012-03-31 12:42:41 +0000141 unsigned NumInstructions, NumVectorInstructions;
Easwaran Raman51b809b2017-07-28 21:47:36 +0000142 int VectorBonus, TenPercentVectorBonus;
143 // Bonus to be applied when the callee has only one reachable basic block.
144 int SingleBBBonus;
Chandler Carruth0539c072012-03-31 12:42:41 +0000145
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000146 /// While we walk the potentially-inlined instructions, we build up and
147 /// maintain a mapping of simplified values specific to this callsite. The
148 /// idea is to propagate any special information we have about arguments to
149 /// this call through the inlinable section of the function, and account for
150 /// likely simplifications post-inlining. The most important aspect we track
151 /// is CFG altering simplifications -- when we prove a basic block dead, that
152 /// can cause dramatic shifts in the cost of inlining a function.
Chandler Carruth0539c072012-03-31 12:42:41 +0000153 DenseMap<Value *, Constant *> SimplifiedValues;
154
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000155 /// Keep track of the values which map back (through function arguments) to
156 /// allocas on the caller stack which could be simplified through SROA.
Chandler Carruth0539c072012-03-31 12:42:41 +0000157 DenseMap<Value *, Value *> SROAArgValues;
158
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000159 /// The mapping of caller Alloca values to their accumulated cost savings. If
160 /// we have to disable SROA for one of the allocas, this tells us how much
161 /// cost must be added.
Chandler Carruth0539c072012-03-31 12:42:41 +0000162 DenseMap<Value *, int> SROAArgCosts;
163
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000164 /// Keep track of values which map to a pointer base and constant offset.
Chad Rosier567556a2016-04-28 14:47:23 +0000165 DenseMap<Value *, std::pair<Value *, APInt>> ConstantOffsetPtrs;
Chandler Carruth0539c072012-03-31 12:42:41 +0000166
Haicheng Wu3739e142017-12-14 14:36:18 +0000167 /// Keep track of dead blocks due to the constant arguments.
168 SetVector<BasicBlock *> DeadBlocks;
169
170 /// The mapping of the blocks to their known unique successors due to the
171 /// constant arguments.
172 DenseMap<BasicBlock *, BasicBlock *> KnownSuccessors;
173
Chandler Carruth0539c072012-03-31 12:42:41 +0000174 // Custom simplification helper routines.
175 bool isAllocaDerivedArg(Value *V);
176 bool lookupSROAArgAndCost(Value *V, Value *&Arg,
177 DenseMap<Value *, int>::iterator &CostIt);
178 void disableSROA(DenseMap<Value *, int>::iterator CostIt);
179 void disableSROA(Value *V);
Haicheng Wu3739e142017-12-14 14:36:18 +0000180 void findDeadBlocks(BasicBlock *CurrBB, BasicBlock *NextBB);
Chandler Carruth0539c072012-03-31 12:42:41 +0000181 void accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
182 int InstructionCost);
Haicheng Wu201b1912017-01-20 18:51:22 +0000183 bool isGEPFree(GetElementPtrInst &GEP);
Evgeny Astigeevichd3558b52017-10-03 12:00:40 +0000184 bool canFoldInboundsGEP(GetElementPtrInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000185 bool accumulateGEPOffset(GEPOperator &GEP, APInt &Offset);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000186 bool simplifyCallSite(Function *F, CallSite CS);
Easwaran Raman617f6362017-02-18 17:22:52 +0000187 template <typename Callable>
188 bool simplifyInstruction(Instruction &I, Callable Evaluate);
Chandler Carruth0539c072012-03-31 12:42:41 +0000189 ConstantInt *stripAndComputeInBoundsConstantOffsets(Value *&V);
190
Philip Reames9b5c9582015-06-26 20:51:17 +0000191 /// Return true if the given argument to the function being considered for
192 /// inlining has the given attribute set either at the call site or the
193 /// function declaration. Primarily used to inspect call site specific
194 /// attributes since these can be more precise than the ones on the callee
Easwaran Raman3676da42015-12-03 19:03:20 +0000195 /// itself.
Philip Reames9b5c9582015-06-26 20:51:17 +0000196 bool paramHasAttr(Argument *A, Attribute::AttrKind Attr);
Chad Rosier567556a2016-04-28 14:47:23 +0000197
Philip Reames9b5c9582015-06-26 20:51:17 +0000198 /// Return true if the given value is known non null within the callee if
Easwaran Raman3676da42015-12-03 19:03:20 +0000199 /// inlined through this particular callsite.
Philip Reames9b5c9582015-06-26 20:51:17 +0000200 bool isKnownNonNullInCallee(Value *V);
201
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000202 /// Update Threshold based on callsite properties such as callee
203 /// attributes and callee hotness for PGO builds. The Callee is explicitly
204 /// passed to support analyzing indirect calls whose target is inferred by
205 /// analysis.
206 void updateThreshold(CallSite CS, Function &Callee);
207
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000208 /// Return true if size growth is allowed when inlining the callee at CS.
209 bool allowSizeGrowth(CallSite CS);
210
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000211 /// Return true if \p CS is a cold callsite.
212 bool isColdCallSite(CallSite CS, BlockFrequencyInfo *CallerBFI);
213
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000214 /// Return a higher threshold if \p CS is a hot callsite.
215 Optional<int> getHotCallSiteThreshold(CallSite CS,
216 BlockFrequencyInfo *CallerBFI);
217
Chandler Carruth0539c072012-03-31 12:42:41 +0000218 // Custom analysis routines.
Hal Finkel57f03dd2014-09-07 13:49:57 +0000219 bool analyzeBlock(BasicBlock *BB, SmallPtrSetImpl<const Value *> &EphValues);
Chandler Carruth0539c072012-03-31 12:42:41 +0000220
221 // Disable several entry points to the visitor so we don't accidentally use
222 // them by declaring but not defining them here.
Chad Rosier567556a2016-04-28 14:47:23 +0000223 void visit(Module *);
224 void visit(Module &);
225 void visit(Function *);
226 void visit(Function &);
227 void visit(BasicBlock *);
228 void visit(BasicBlock &);
Chandler Carruth0539c072012-03-31 12:42:41 +0000229
230 // Provide base case for our instruction visit.
231 bool visitInstruction(Instruction &I);
232
233 // Our visit overrides.
234 bool visitAlloca(AllocaInst &I);
235 bool visitPHI(PHINode &I);
236 bool visitGetElementPtr(GetElementPtrInst &I);
237 bool visitBitCast(BitCastInst &I);
238 bool visitPtrToInt(PtrToIntInst &I);
239 bool visitIntToPtr(IntToPtrInst &I);
240 bool visitCastInst(CastInst &I);
241 bool visitUnaryInstruction(UnaryInstruction &I);
Matt Arsenault727aa342013-07-20 04:09:00 +0000242 bool visitCmpInst(CmpInst &I);
Chad Rosier2e1c0502017-08-02 14:40:42 +0000243 bool visitAnd(BinaryOperator &I);
244 bool visitOr(BinaryOperator &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000245 bool visitSub(BinaryOperator &I);
246 bool visitBinaryOperator(BinaryOperator &I);
247 bool visitLoad(LoadInst &I);
248 bool visitStore(StoreInst &I);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000249 bool visitExtractValue(ExtractValueInst &I);
250 bool visitInsertValue(InsertValueInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000251 bool visitCallSite(CallSite CS);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000252 bool visitReturnInst(ReturnInst &RI);
253 bool visitBranchInst(BranchInst &BI);
Haicheng Wu3ec848b2017-09-27 14:44:56 +0000254 bool visitSelectInst(SelectInst &SI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000255 bool visitSwitchInst(SwitchInst &SI);
256 bool visitIndirectBrInst(IndirectBrInst &IBI);
257 bool visitResumeInst(ResumeInst &RI);
David Majnemer654e1302015-07-31 17:58:14 +0000258 bool visitCleanupReturnInst(CleanupReturnInst &RI);
259 bool visitCatchReturnInst(CatchReturnInst &RI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000260 bool visitUnreachableInst(UnreachableInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000261
262public:
Sean Silvaab6a6832016-07-23 04:22:50 +0000263 CallAnalyzer(const TargetTransformInfo &TTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000264 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +0000265 Optional<function_ref<BlockFrequencyInfo &(Function &)>> &GetBFI,
Haicheng Wu0812c5b2017-08-21 20:00:09 +0000266 ProfileSummaryInfo *PSI, OptimizationRemarkEmitter *ORE,
267 Function &Callee, CallSite CSArg, const InlineParams &Params)
Easwaran Raman12585b02017-01-20 22:44:04 +0000268 : TTI(TTI), GetAssumptionCache(GetAssumptionCache), GetBFI(GetBFI),
Haicheng Wu0812c5b2017-08-21 20:00:09 +0000269 PSI(PSI), F(Callee), DL(F.getParent()->getDataLayout()), ORE(ORE),
Eric Christopher85be8ca2017-04-15 06:14:50 +0000270 CandidateCS(CSArg), Params(Params), Threshold(Params.DefaultThreshold),
Easwaran Raman4924bb02017-09-13 20:16:02 +0000271 Cost(0), ComputeFullInlineCost(OptComputeFullInlineCost ||
272 Params.ComputeFullInlineCost || ORE),
273 IsCallerRecursive(false), IsRecursiveCall(false),
Eric Christopher85be8ca2017-04-15 06:14:50 +0000274 ExposesReturnsTwice(false), HasDynamicAlloca(false),
275 ContainsNoDuplicateCall(false), HasReturn(false), HasIndirectBr(false),
276 HasFrameEscape(false), AllocatedSize(0), NumInstructions(0),
Easwaran Raman51b809b2017-07-28 21:47:36 +0000277 NumVectorInstructions(0), VectorBonus(0), SingleBBBonus(0),
278 NumConstantArgs(0), NumConstantOffsetPtrArgs(0), NumAllocaArgs(0),
279 NumConstantPtrCmps(0), NumConstantPtrDiffs(0),
280 NumInstructionsSimplified(0), SROACostSavings(0),
281 SROACostSavingsLost(0) {}
Chandler Carruth0539c072012-03-31 12:42:41 +0000282
283 bool analyzeCall(CallSite CS);
284
285 int getThreshold() { return Threshold; }
286 int getCost() { return Cost; }
287
288 // Keep a bunch of stats about the cost savings found so we can print them
289 // out when debugging.
290 unsigned NumConstantArgs;
291 unsigned NumConstantOffsetPtrArgs;
292 unsigned NumAllocaArgs;
293 unsigned NumConstantPtrCmps;
294 unsigned NumConstantPtrDiffs;
295 unsigned NumInstructionsSimplified;
296 unsigned SROACostSavings;
297 unsigned SROACostSavingsLost;
298
299 void dump();
300};
301
302} // namespace
303
304/// \brief Test whether the given value is an Alloca-derived function argument.
305bool CallAnalyzer::isAllocaDerivedArg(Value *V) {
306 return SROAArgValues.count(V);
Owen Andersona08318a2010-09-09 16:56:42 +0000307}
308
Chandler Carruth0539c072012-03-31 12:42:41 +0000309/// \brief Lookup the SROA-candidate argument and cost iterator which V maps to.
310/// Returns false if V does not map to a SROA-candidate.
311bool CallAnalyzer::lookupSROAArgAndCost(
312 Value *V, Value *&Arg, DenseMap<Value *, int>::iterator &CostIt) {
313 if (SROAArgValues.empty() || SROAArgCosts.empty())
314 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000315
Chandler Carruth0539c072012-03-31 12:42:41 +0000316 DenseMap<Value *, Value *>::iterator ArgIt = SROAArgValues.find(V);
317 if (ArgIt == SROAArgValues.end())
318 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000319
Chandler Carruth0539c072012-03-31 12:42:41 +0000320 Arg = ArgIt->second;
321 CostIt = SROAArgCosts.find(Arg);
322 return CostIt != SROAArgCosts.end();
Chandler Carruth783b7192012-03-09 02:49:36 +0000323}
324
Chandler Carruth0539c072012-03-31 12:42:41 +0000325/// \brief Disable SROA for the candidate marked by this cost iterator.
Chandler Carruth783b7192012-03-09 02:49:36 +0000326///
Benjamin Kramerbde91762012-06-02 10:20:22 +0000327/// This marks the candidate as no longer viable for SROA, and adds the cost
Chandler Carruth0539c072012-03-31 12:42:41 +0000328/// savings associated with it back into the inline cost measurement.
329void CallAnalyzer::disableSROA(DenseMap<Value *, int>::iterator CostIt) {
330 // If we're no longer able to perform SROA we need to undo its cost savings
331 // and prevent subsequent analysis.
332 Cost += CostIt->second;
333 SROACostSavings -= CostIt->second;
334 SROACostSavingsLost += CostIt->second;
335 SROAArgCosts.erase(CostIt);
336}
337
338/// \brief If 'V' maps to a SROA candidate, disable SROA for it.
339void CallAnalyzer::disableSROA(Value *V) {
340 Value *SROAArg;
341 DenseMap<Value *, int>::iterator CostIt;
342 if (lookupSROAArgAndCost(V, SROAArg, CostIt))
343 disableSROA(CostIt);
344}
345
346/// \brief Accumulate the given cost for a particular SROA candidate.
347void CallAnalyzer::accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
348 int InstructionCost) {
349 CostIt->second += InstructionCost;
350 SROACostSavings += InstructionCost;
351}
352
Chandler Carruth0539c072012-03-31 12:42:41 +0000353/// \brief Accumulate a constant GEP offset into an APInt if possible.
354///
355/// Returns false if unable to compute the offset for any reason. Respects any
356/// simplified values known during the analysis of this callsite.
357bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000358 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +0000359 assert(IntPtrWidth == Offset.getBitWidth());
360
361 for (gep_type_iterator GTI = gep_type_begin(GEP), GTE = gep_type_end(GEP);
362 GTI != GTE; ++GTI) {
363 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand());
364 if (!OpC)
365 if (Constant *SimpleOp = SimplifiedValues.lookup(GTI.getOperand()))
366 OpC = dyn_cast<ConstantInt>(SimpleOp);
367 if (!OpC)
Chandler Carruth783b7192012-03-09 02:49:36 +0000368 return false;
Chad Rosier567556a2016-04-28 14:47:23 +0000369 if (OpC->isZero())
370 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000371
Chandler Carruth0539c072012-03-31 12:42:41 +0000372 // Handle a struct index, which adds its field offset to the pointer.
Peter Collingbourneab85225b2016-12-02 02:24:42 +0000373 if (StructType *STy = GTI.getStructTypeOrNull()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000374 unsigned ElementIdx = OpC->getZExtValue();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000375 const StructLayout *SL = DL.getStructLayout(STy);
Chandler Carruth0539c072012-03-31 12:42:41 +0000376 Offset += APInt(IntPtrWidth, SL->getElementOffset(ElementIdx));
377 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000378 }
Chandler Carruth783b7192012-03-09 02:49:36 +0000379
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000380 APInt TypeSize(IntPtrWidth, DL.getTypeAllocSize(GTI.getIndexedType()));
Chandler Carruth0539c072012-03-31 12:42:41 +0000381 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize;
382 }
383 return true;
384}
385
Haicheng Wu201b1912017-01-20 18:51:22 +0000386/// \brief Use TTI to check whether a GEP is free.
387///
388/// Respects any simplified values known during the analysis of this callsite.
389bool CallAnalyzer::isGEPFree(GetElementPtrInst &GEP) {
Evgeny Astigeevich61c1bd52017-07-27 12:49:27 +0000390 SmallVector<Value *, 4> Operands;
391 Operands.push_back(GEP.getOperand(0));
Haicheng Wu201b1912017-01-20 18:51:22 +0000392 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
393 if (Constant *SimpleOp = SimplifiedValues.lookup(*I))
Evgeny Astigeevich61c1bd52017-07-27 12:49:27 +0000394 Operands.push_back(SimpleOp);
Haicheng Wu201b1912017-01-20 18:51:22 +0000395 else
Evgeny Astigeevich61c1bd52017-07-27 12:49:27 +0000396 Operands.push_back(*I);
397 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&GEP, Operands);
Haicheng Wu201b1912017-01-20 18:51:22 +0000398}
399
Chandler Carruth0539c072012-03-31 12:42:41 +0000400bool CallAnalyzer::visitAlloca(AllocaInst &I) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000401 // Check whether inlining will turn a dynamic alloca into a static
Sanjay Patel0f153422016-05-09 21:51:53 +0000402 // alloca and handle that case.
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000403 if (I.isArrayAllocation()) {
Sanjay Patel0f153422016-05-09 21:51:53 +0000404 Constant *Size = SimplifiedValues.lookup(I.getArraySize());
405 if (auto *AllocSize = dyn_cast_or_null<ConstantInt>(Size)) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000406 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000407 AllocatedSize = SaturatingMultiplyAdd(
408 AllocSize->getLimitedValue(), DL.getTypeAllocSize(Ty), AllocatedSize);
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000409 return Base::visitAlloca(I);
410 }
411 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000412
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000413 // Accumulate the allocated size.
414 if (I.isStaticAlloca()) {
415 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000416 AllocatedSize = SaturatingAdd(DL.getTypeAllocSize(Ty), AllocatedSize);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000417 }
418
Bob Wilsona5b0dc82012-11-19 07:04:35 +0000419 // We will happily inline static alloca instructions.
420 if (I.isStaticAlloca())
Chandler Carruth0539c072012-03-31 12:42:41 +0000421 return Base::visitAlloca(I);
422
423 // FIXME: This is overly conservative. Dynamic allocas are inefficient for
424 // a variety of reasons, and so we would like to not inline them into
425 // functions which don't currently have a dynamic alloca. This simply
426 // disables inlining altogether in the presence of a dynamic alloca.
427 HasDynamicAlloca = true;
428 return false;
429}
430
431bool CallAnalyzer::visitPHI(PHINode &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000432 // FIXME: We need to propagate SROA *disabling* through phi nodes, even
433 // though we don't want to propagate it's bonuses. The idea is to disable
434 // SROA if it *might* be used in an inappropriate manner.
435
436 // Phi nodes are always zero-cost.
Haicheng Wu3739e142017-12-14 14:36:18 +0000437
438 APInt ZeroOffset = APInt::getNullValue(DL.getPointerSizeInBits());
439 bool CheckSROA = I.getType()->isPointerTy();
440
441 // Track the constant or pointer with constant offset we've seen so far.
442 Constant *FirstC = nullptr;
443 std::pair<Value *, APInt> FirstBaseAndOffset = {nullptr, ZeroOffset};
444 Value *FirstV = nullptr;
445
446 for (unsigned i = 0, e = I.getNumIncomingValues(); i != e; ++i) {
447 BasicBlock *Pred = I.getIncomingBlock(i);
448 // If the incoming block is dead, skip the incoming block.
449 if (DeadBlocks.count(Pred))
450 continue;
451 // If the parent block of phi is not the known successor of the incoming
452 // block, skip the incoming block.
453 BasicBlock *KnownSuccessor = KnownSuccessors[Pred];
454 if (KnownSuccessor && KnownSuccessor != I.getParent())
455 continue;
456
457 Value *V = I.getIncomingValue(i);
458 // If the incoming value is this phi itself, skip the incoming value.
459 if (&I == V)
460 continue;
461
462 Constant *C = dyn_cast<Constant>(V);
463 if (!C)
464 C = SimplifiedValues.lookup(V);
465
466 std::pair<Value *, APInt> BaseAndOffset = {nullptr, ZeroOffset};
467 if (!C && CheckSROA)
468 BaseAndOffset = ConstantOffsetPtrs.lookup(V);
469
470 if (!C && !BaseAndOffset.first)
471 // The incoming value is neither a constant nor a pointer with constant
472 // offset, exit early.
473 return true;
474
475 if (FirstC) {
476 if (FirstC == C)
477 // If we've seen a constant incoming value before and it is the same
478 // constant we see this time, continue checking the next incoming value.
479 continue;
480 // Otherwise early exit because we either see a different constant or saw
481 // a constant before but we have a pointer with constant offset this time.
482 return true;
483 }
484
485 if (FirstV) {
486 // The same logic as above, but check pointer with constant offset here.
487 if (FirstBaseAndOffset == BaseAndOffset)
488 continue;
489 return true;
490 }
491
492 if (C) {
493 // This is the 1st time we've seen a constant, record it.
494 FirstC = C;
495 continue;
496 }
497
498 // The remaining case is that this is the 1st time we've seen a pointer with
499 // constant offset, record it.
500 FirstV = V;
501 FirstBaseAndOffset = BaseAndOffset;
502 }
503
504 // Check if we can map phi to a constant.
505 if (FirstC) {
506 SimplifiedValues[&I] = FirstC;
507 return true;
508 }
509
510 // Check if we can map phi to a pointer with constant offset.
511 if (FirstBaseAndOffset.first) {
512 ConstantOffsetPtrs[&I] = FirstBaseAndOffset;
513
514 Value *SROAArg;
515 DenseMap<Value *, int>::iterator CostIt;
516 if (lookupSROAArgAndCost(FirstV, SROAArg, CostIt))
517 SROAArgValues[&I] = SROAArg;
518 }
519
Chandler Carruth0539c072012-03-31 12:42:41 +0000520 return true;
521}
522
Evgeny Astigeevichd3558b52017-10-03 12:00:40 +0000523/// \brief Check we can fold GEPs of constant-offset call site argument pointers.
524/// This requires target data and inbounds GEPs.
525///
526/// \return true if the specified GEP can be folded.
527bool CallAnalyzer::canFoldInboundsGEP(GetElementPtrInst &I) {
528 // Check if we have a base + offset for the pointer.
529 std::pair<Value *, APInt> BaseAndOffset =
530 ConstantOffsetPtrs.lookup(I.getPointerOperand());
531 if (!BaseAndOffset.first)
532 return false;
533
534 // Check if the offset of this GEP is constant, and if so accumulate it
535 // into Offset.
536 if (!accumulateGEPOffset(cast<GEPOperator>(I), BaseAndOffset.second))
537 return false;
538
539 // Add the result as a new mapping to Base + Offset.
540 ConstantOffsetPtrs[&I] = BaseAndOffset;
541
542 return true;
543}
544
Chandler Carruth0539c072012-03-31 12:42:41 +0000545bool CallAnalyzer::visitGetElementPtr(GetElementPtrInst &I) {
546 Value *SROAArg;
547 DenseMap<Value *, int>::iterator CostIt;
Chad Rosier567556a2016-04-28 14:47:23 +0000548 bool SROACandidate =
549 lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt);
Chandler Carruth0539c072012-03-31 12:42:41 +0000550
Easwaran Ramana8b9cdc2017-02-25 00:10:22 +0000551 // Lambda to check whether a GEP's indices are all constant.
552 auto IsGEPOffsetConstant = [&](GetElementPtrInst &GEP) {
553 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
554 if (!isa<Constant>(*I) && !SimplifiedValues.lookup(*I))
555 return false;
556 return true;
557 };
558
Evgeny Astigeevichd3558b52017-10-03 12:00:40 +0000559 if ((I.isInBounds() && canFoldInboundsGEP(I)) || IsGEPOffsetConstant(I)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000560 if (SROACandidate)
561 SROAArgValues[&I] = SROAArg;
562
563 // Constant GEPs are modeled as free.
564 return true;
565 }
566
567 // Variable GEPs will require math and will disable SROA.
568 if (SROACandidate)
569 disableSROA(CostIt);
Haicheng Wu201b1912017-01-20 18:51:22 +0000570 return isGEPFree(I);
Chandler Carruth783b7192012-03-09 02:49:36 +0000571}
572
Easwaran Raman617f6362017-02-18 17:22:52 +0000573/// Simplify \p I if its operands are constants and update SimplifiedValues.
574/// \p Evaluate is a callable specific to instruction type that evaluates the
575/// instruction when all the operands are constants.
576template <typename Callable>
577bool CallAnalyzer::simplifyInstruction(Instruction &I, Callable Evaluate) {
578 SmallVector<Constant *, 2> COps;
579 for (Value *Op : I.operands()) {
580 Constant *COp = dyn_cast<Constant>(Op);
581 if (!COp)
582 COp = SimplifiedValues.lookup(Op);
583 if (!COp)
584 return false;
585 COps.push_back(COp);
586 }
587 auto *C = Evaluate(COps);
588 if (!C)
589 return false;
590 SimplifiedValues[&I] = C;
591 return true;
592}
593
Chandler Carruth0539c072012-03-31 12:42:41 +0000594bool CallAnalyzer::visitBitCast(BitCastInst &I) {
595 // Propagate constants through bitcasts.
Easwaran Raman617f6362017-02-18 17:22:52 +0000596 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
597 return ConstantExpr::getBitCast(COps[0], I.getType());
598 }))
599 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000600
Chandler Carruth0539c072012-03-31 12:42:41 +0000601 // Track base/offsets through casts
Chad Rosier567556a2016-04-28 14:47:23 +0000602 std::pair<Value *, APInt> BaseAndOffset =
603 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000604 // Casts don't change the offset, just wrap it up.
605 if (BaseAndOffset.first)
606 ConstantOffsetPtrs[&I] = BaseAndOffset;
607
608 // Also look for SROA candidates here.
609 Value *SROAArg;
610 DenseMap<Value *, int>::iterator CostIt;
611 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
612 SROAArgValues[&I] = SROAArg;
613
614 // Bitcasts are always zero cost.
615 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000616}
617
Chandler Carruth0539c072012-03-31 12:42:41 +0000618bool CallAnalyzer::visitPtrToInt(PtrToIntInst &I) {
619 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000620 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
621 return ConstantExpr::getPtrToInt(COps[0], I.getType());
622 }))
623 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000624
625 // Track base/offset pairs when converted to a plain integer provided the
626 // integer is large enough to represent the pointer.
627 unsigned IntegerSize = I.getType()->getScalarSizeInBits();
Mehdi Amini46a43552015-03-04 18:43:29 +0000628 if (IntegerSize >= DL.getPointerSizeInBits()) {
Chad Rosier567556a2016-04-28 14:47:23 +0000629 std::pair<Value *, APInt> BaseAndOffset =
630 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000631 if (BaseAndOffset.first)
632 ConstantOffsetPtrs[&I] = BaseAndOffset;
633 }
634
635 // This is really weird. Technically, ptrtoint will disable SROA. However,
636 // unless that ptrtoint is *used* somewhere in the live basic blocks after
637 // inlining, it will be nuked, and SROA should proceed. All of the uses which
638 // would block SROA would also block SROA if applied directly to a pointer,
639 // and so we can just add the integer in here. The only places where SROA is
640 // preserved either cannot fire on an integer, or won't in-and-of themselves
641 // disable SROA (ext) w/o some later use that we would see and disable.
642 Value *SROAArg;
643 DenseMap<Value *, int>::iterator CostIt;
644 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
645 SROAArgValues[&I] = SROAArg;
646
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000647 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000648}
649
Chandler Carruth0539c072012-03-31 12:42:41 +0000650bool CallAnalyzer::visitIntToPtr(IntToPtrInst &I) {
651 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000652 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
653 return ConstantExpr::getIntToPtr(COps[0], I.getType());
654 }))
655 return true;
Dan Gohman4552e3c2009-10-13 18:30:07 +0000656
Chandler Carruth0539c072012-03-31 12:42:41 +0000657 // Track base/offset pairs when round-tripped through a pointer without
658 // modifications provided the integer is not too large.
659 Value *Op = I.getOperand(0);
660 unsigned IntegerSize = Op->getType()->getScalarSizeInBits();
Mehdi Amini46a43552015-03-04 18:43:29 +0000661 if (IntegerSize <= DL.getPointerSizeInBits()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000662 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op);
663 if (BaseAndOffset.first)
664 ConstantOffsetPtrs[&I] = BaseAndOffset;
665 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000666
Chandler Carruth0539c072012-03-31 12:42:41 +0000667 // "Propagate" SROA here in the same manner as we do for ptrtoint above.
668 Value *SROAArg;
669 DenseMap<Value *, int>::iterator CostIt;
670 if (lookupSROAArgAndCost(Op, SROAArg, CostIt))
671 SROAArgValues[&I] = SROAArg;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000672
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000673 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000674}
675
676bool CallAnalyzer::visitCastInst(CastInst &I) {
677 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000678 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
679 return ConstantExpr::getCast(I.getOpcode(), COps[0], I.getType());
680 }))
681 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000682
683 // Disable SROA in the face of arbitrary casts we don't whitelist elsewhere.
684 disableSROA(I.getOperand(0));
685
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000686 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000687}
688
689bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) {
690 Value *Operand = I.getOperand(0);
Easwaran Raman617f6362017-02-18 17:22:52 +0000691 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
Easwaran Raman617f6362017-02-18 17:22:52 +0000692 return ConstantFoldInstOperands(&I, COps[0], DL);
693 }))
694 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000695
696 // Disable any SROA on the argument to arbitrary unary operators.
697 disableSROA(Operand);
698
699 return false;
700}
701
Philip Reames9b5c9582015-06-26 20:51:17 +0000702bool CallAnalyzer::paramHasAttr(Argument *A, Attribute::AttrKind Attr) {
Reid Klecknerfb502d22017-04-14 20:19:02 +0000703 return CandidateCS.paramHasAttr(A->getArgNo(), Attr);
Philip Reames9b5c9582015-06-26 20:51:17 +0000704}
705
706bool CallAnalyzer::isKnownNonNullInCallee(Value *V) {
707 // Does the *call site* have the NonNull attribute set on an argument? We
708 // use the attribute on the call site to memoize any analysis done in the
709 // caller. This will also trip if the callee function has a non-null
710 // parameter attribute, but that's a less interesting case because hopefully
711 // the callee would already have been simplified based on that.
712 if (Argument *A = dyn_cast<Argument>(V))
713 if (paramHasAttr(A, Attribute::NonNull))
714 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000715
Philip Reames9b5c9582015-06-26 20:51:17 +0000716 // Is this an alloca in the caller? This is distinct from the attribute case
717 // above because attributes aren't updated within the inliner itself and we
718 // always want to catch the alloca derived case.
719 if (isAllocaDerivedArg(V))
720 // We can actually predict the result of comparisons between an
721 // alloca-derived value and null. Note that this fires regardless of
722 // SROA firing.
723 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000724
Philip Reames9b5c9582015-06-26 20:51:17 +0000725 return false;
726}
727
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000728bool CallAnalyzer::allowSizeGrowth(CallSite CS) {
729 // If the normal destination of the invoke or the parent block of the call
730 // site is unreachable-terminated, there is little point in inlining this
731 // unless there is literally zero cost.
732 // FIXME: Note that it is possible that an unreachable-terminated block has a
733 // hot entry. For example, in below scenario inlining hot_call_X() may be
734 // beneficial :
735 // main() {
736 // hot_call_1();
737 // ...
738 // hot_call_N()
739 // exit(0);
740 // }
741 // For now, we are not handling this corner case here as it is rare in real
742 // code. In future, we should elaborate this based on BPI and BFI in more
743 // general threshold adjusting heuristics in updateThreshold().
744 Instruction *Instr = CS.getInstruction();
745 if (InvokeInst *II = dyn_cast<InvokeInst>(Instr)) {
746 if (isa<UnreachableInst>(II->getNormalDest()->getTerminator()))
747 return false;
748 } else if (isa<UnreachableInst>(Instr->getParent()->getTerminator()))
749 return false;
750
751 return true;
752}
753
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000754bool CallAnalyzer::isColdCallSite(CallSite CS, BlockFrequencyInfo *CallerBFI) {
755 // If global profile summary is available, then callsite's coldness is
756 // determined based on that.
Chandler Carruthbba762a2017-08-14 21:25:00 +0000757 if (PSI && PSI->hasProfileSummary())
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000758 return PSI->isColdCallSite(CS, CallerBFI);
Chandler Carruthbba762a2017-08-14 21:25:00 +0000759
760 // Otherwise we need BFI to be available.
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000761 if (!CallerBFI)
762 return false;
763
Chandler Carruthbba762a2017-08-14 21:25:00 +0000764 // Determine if the callsite is cold relative to caller's entry. We could
765 // potentially cache the computation of scaled entry frequency, but the added
766 // complexity is not worth it unless this scaling shows up high in the
767 // profiles.
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000768 const BranchProbability ColdProb(ColdCallSiteRelFreq, 100);
769 auto CallSiteBB = CS.getInstruction()->getParent();
770 auto CallSiteFreq = CallerBFI->getBlockFreq(CallSiteBB);
771 auto CallerEntryFreq =
772 CallerBFI->getBlockFreq(&(CS.getCaller()->getEntryBlock()));
773 return CallSiteFreq < CallerEntryFreq * ColdProb;
774}
775
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000776Optional<int>
777CallAnalyzer::getHotCallSiteThreshold(CallSite CS,
778 BlockFrequencyInfo *CallerBFI) {
Chandler Carruthbba762a2017-08-14 21:25:00 +0000779
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000780 // If global profile summary is available, then callsite's hotness is
781 // determined based on that.
Chandler Carruthbba762a2017-08-14 21:25:00 +0000782 if (PSI && PSI->hasProfileSummary() && PSI->isHotCallSite(CS, CallerBFI))
783 return Params.HotCallSiteThreshold;
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000784
Chandler Carruthbba762a2017-08-14 21:25:00 +0000785 // Otherwise we need BFI to be available and to have a locally hot callsite
786 // threshold.
787 if (!CallerBFI || !Params.LocallyHotCallSiteThreshold)
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000788 return None;
789
Chandler Carruthbba762a2017-08-14 21:25:00 +0000790 // Determine if the callsite is hot relative to caller's entry. We could
791 // potentially cache the computation of scaled entry frequency, but the added
792 // complexity is not worth it unless this scaling shows up high in the
793 // profiles.
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000794 auto CallSiteBB = CS.getInstruction()->getParent();
795 auto CallSiteFreq = CallerBFI->getBlockFreq(CallSiteBB).getFrequency();
796 auto CallerEntryFreq = CallerBFI->getEntryFreq();
797 if (CallSiteFreq >= CallerEntryFreq * HotCallSiteRelFreq)
Chandler Carruthbba762a2017-08-14 21:25:00 +0000798 return Params.LocallyHotCallSiteThreshold;
799
800 // Otherwise treat it normally.
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000801 return None;
802}
803
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000804void CallAnalyzer::updateThreshold(CallSite CS, Function &Callee) {
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000805 // If no size growth is allowed for this inlining, set Threshold to 0.
806 if (!allowSizeGrowth(CS)) {
807 Threshold = 0;
808 return;
809 }
810
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000811 Function *Caller = CS.getCaller();
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000812
813 // return min(A, B) if B is valid.
814 auto MinIfValid = [](int A, Optional<int> B) {
815 return B ? std::min(A, B.getValue()) : A;
816 };
817
Easwaran Raman0d58fca2016-08-11 03:58:05 +0000818 // return max(A, B) if B is valid.
819 auto MaxIfValid = [](int A, Optional<int> B) {
820 return B ? std::max(A, B.getValue()) : A;
821 };
822
Easwaran Raman51b809b2017-07-28 21:47:36 +0000823 // Various bonus percentages. These are multiplied by Threshold to get the
824 // bonus values.
825 // SingleBBBonus: This bonus is applied if the callee has a single reachable
826 // basic block at the given callsite context. This is speculatively applied
827 // and withdrawn if more than one basic block is seen.
828 //
829 // Vector bonuses: We want to more aggressively inline vector-dense kernels
830 // and apply this bonus based on the percentage of vector instructions. A
831 // bonus is applied if the vector instructions exceed 50% and half that amount
832 // is applied if it exceeds 10%. Note that these bonuses are some what
833 // arbitrary and evolved over time by accident as much as because they are
834 // principled bonuses.
835 // FIXME: It would be nice to base the bonus values on something more
836 // scientific.
837 //
838 // LstCallToStaticBonus: This large bonus is applied to ensure the inlining
839 // of the last call to a static function as inlining such functions is
840 // guaranteed to reduce code size.
841 //
842 // These bonus percentages may be set to 0 based on properties of the caller
843 // and the callsite.
844 int SingleBBBonusPercent = 50;
845 int VectorBonusPercent = 150;
846 int LastCallToStaticBonus = InlineConstants::LastCallToStaticBonus;
847
848 // Lambda to set all the above bonus and bonus percentages to 0.
849 auto DisallowAllBonuses = [&]() {
850 SingleBBBonusPercent = 0;
851 VectorBonusPercent = 0;
852 LastCallToStaticBonus = 0;
853 };
854
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000855 // Use the OptMinSizeThreshold or OptSizeThreshold knob if they are available
856 // and reduce the threshold if the caller has the necessary attribute.
Easwaran Raman51b809b2017-07-28 21:47:36 +0000857 if (Caller->optForMinSize()) {
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000858 Threshold = MinIfValid(Threshold, Params.OptMinSizeThreshold);
Easwaran Raman51b809b2017-07-28 21:47:36 +0000859 // For minsize, we want to disable the single BB bonus and the vector
860 // bonuses, but not the last-call-to-static bonus. Inlining the last call to
861 // a static function will, at the minimum, eliminate the parameter setup and
862 // call/return instructions.
863 SingleBBBonusPercent = 0;
864 VectorBonusPercent = 0;
865 } else if (Caller->optForSize())
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000866 Threshold = MinIfValid(Threshold, Params.OptSizeThreshold);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000867
Easwaran Ramane08b1392017-01-09 21:56:26 +0000868 // Adjust the threshold based on inlinehint attribute and profile based
869 // hotness information if the caller does not have MinSize attribute.
870 if (!Caller->optForMinSize()) {
871 if (Callee.hasFnAttribute(Attribute::InlineHint))
872 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
Chandler Carruthbba762a2017-08-14 21:25:00 +0000873
874 // FIXME: After switching to the new passmanager, simplify the logic below
875 // by checking only the callsite hotness/coldness as we will reliably
876 // have local profile information.
877 //
878 // Callsite hotness and coldness can be determined if sample profile is
879 // used (which adds hotness metadata to calls) or if caller's
880 // BlockFrequencyInfo is available.
881 BlockFrequencyInfo *CallerBFI = GetBFI ? &((*GetBFI)(*Caller)) : nullptr;
882 auto HotCallSiteThreshold = getHotCallSiteThreshold(CS, CallerBFI);
883 if (!Caller->optForSize() && HotCallSiteThreshold) {
884 DEBUG(dbgs() << "Hot callsite.\n");
885 // FIXME: This should update the threshold only if it exceeds the
886 // current threshold, but AutoFDO + ThinLTO currently relies on this
887 // behavior to prevent inlining of hot callsites during ThinLTO
888 // compile phase.
889 Threshold = HotCallSiteThreshold.getValue();
890 } else if (isColdCallSite(CS, CallerBFI)) {
891 DEBUG(dbgs() << "Cold callsite.\n");
892 // Do not apply bonuses for a cold callsite including the
893 // LastCallToStatic bonus. While this bonus might result in code size
894 // reduction, it can cause the size of a non-cold caller to increase
895 // preventing it from being inlined.
896 DisallowAllBonuses();
897 Threshold = MinIfValid(Threshold, Params.ColdCallSiteThreshold);
898 } else if (PSI) {
899 // Use callee's global profile information only if we have no way of
900 // determining this via callsite information.
901 if (PSI->isFunctionEntryHot(&Callee)) {
902 DEBUG(dbgs() << "Hot callee.\n");
903 // If callsite hotness can not be determined, we may still know
904 // that the callee is hot and treat it as a weaker hint for threshold
905 // increase.
906 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
907 } else if (PSI->isFunctionEntryCold(&Callee)) {
908 DEBUG(dbgs() << "Cold callee.\n");
909 // Do not apply bonuses for a cold callee including the
910 // LastCallToStatic bonus. While this bonus might result in code size
911 // reduction, it can cause the size of a non-cold caller to increase
912 // preventing it from being inlined.
913 DisallowAllBonuses();
914 Threshold = MinIfValid(Threshold, Params.ColdThreshold);
Easwaran Ramane08b1392017-01-09 21:56:26 +0000915 }
916 }
Dehao Chene1c7c572016-08-05 20:49:04 +0000917 }
Dehao Chen9232f982016-07-11 16:48:54 +0000918
Justin Lebar8650a4d2016-04-15 01:38:48 +0000919 // Finally, take the target-specific inlining threshold multiplier into
920 // account.
921 Threshold *= TTI.getInliningThresholdMultiplier();
Easwaran Raman51b809b2017-07-28 21:47:36 +0000922
923 SingleBBBonus = Threshold * SingleBBBonusPercent / 100;
924 VectorBonus = Threshold * VectorBonusPercent / 100;
925
926 bool OnlyOneCallAndLocalLinkage =
927 F.hasLocalLinkage() && F.hasOneUse() && &F == CS.getCalledFunction();
928 // If there is only one call of the function, and it has internal linkage,
929 // the cost of inlining it drops dramatically. It may seem odd to update
930 // Cost in updateThreshold, but the bonus depends on the logic in this method.
931 if (OnlyOneCallAndLocalLinkage)
932 Cost -= LastCallToStaticBonus;
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000933}
934
Matt Arsenault727aa342013-07-20 04:09:00 +0000935bool CallAnalyzer::visitCmpInst(CmpInst &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000936 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
937 // First try to handle simplified comparisons.
Easwaran Raman617f6362017-02-18 17:22:52 +0000938 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
939 return ConstantExpr::getCompare(I.getPredicate(), COps[0], COps[1]);
940 }))
941 return true;
Matt Arsenault727aa342013-07-20 04:09:00 +0000942
943 if (I.getOpcode() == Instruction::FCmp)
944 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000945
946 // Otherwise look for a comparison between constant offset pointers with
947 // a common base.
948 Value *LHSBase, *RHSBase;
949 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000950 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000951 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000952 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000953 if (RHSBase && LHSBase == RHSBase) {
954 // We have common bases, fold the icmp to a constant based on the
955 // offsets.
956 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
957 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
958 if (Constant *C = ConstantExpr::getICmp(I.getPredicate(), CLHS, CRHS)) {
959 SimplifiedValues[&I] = C;
960 ++NumConstantPtrCmps;
961 return true;
962 }
963 }
964 }
965
966 // If the comparison is an equality comparison with null, we can simplify it
Philip Reames9b5c9582015-06-26 20:51:17 +0000967 // if we know the value (argument) can't be null
968 if (I.isEquality() && isa<ConstantPointerNull>(I.getOperand(1)) &&
969 isKnownNonNullInCallee(I.getOperand(0))) {
970 bool IsNotEqual = I.getPredicate() == CmpInst::ICMP_NE;
971 SimplifiedValues[&I] = IsNotEqual ? ConstantInt::getTrue(I.getType())
972 : ConstantInt::getFalse(I.getType());
973 return true;
974 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000975 // Finally check for SROA candidates in comparisons.
976 Value *SROAArg;
977 DenseMap<Value *, int>::iterator CostIt;
978 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) {
979 if (isa<ConstantPointerNull>(I.getOperand(1))) {
980 accumulateSROACost(CostIt, InlineConstants::InstrCost);
981 return true;
982 }
983
984 disableSROA(CostIt);
985 }
986
987 return false;
988}
989
Chad Rosier2e1c0502017-08-02 14:40:42 +0000990bool CallAnalyzer::visitOr(BinaryOperator &I) {
991 // This is necessary because the generic simplify instruction only works if
992 // both operands are constants.
993 for (unsigned i = 0; i < 2; ++i) {
994 if (ConstantInt *C = dyn_cast_or_null<ConstantInt>(
995 SimplifiedValues.lookup(I.getOperand(i))))
996 if (C->isAllOnesValue()) {
997 SimplifiedValues[&I] = C;
998 return true;
999 }
1000 }
1001 return Base::visitOr(I);
1002}
1003
1004bool CallAnalyzer::visitAnd(BinaryOperator &I) {
1005 // This is necessary because the generic simplify instruction only works if
1006 // both operands are constants.
1007 for (unsigned i = 0; i < 2; ++i) {
1008 if (ConstantInt *C = dyn_cast_or_null<ConstantInt>(
1009 SimplifiedValues.lookup(I.getOperand(i))))
1010 if (C->isZero()) {
1011 SimplifiedValues[&I] = C;
1012 return true;
1013 }
1014 }
1015 return Base::visitAnd(I);
1016}
1017
Chandler Carruth0539c072012-03-31 12:42:41 +00001018bool CallAnalyzer::visitSub(BinaryOperator &I) {
1019 // Try to handle a special case: we can fold computing the difference of two
1020 // constant-related pointers.
1021 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
1022 Value *LHSBase, *RHSBase;
1023 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +00001024 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +00001025 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +00001026 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +00001027 if (RHSBase && LHSBase == RHSBase) {
1028 // We have common bases, fold the subtract to a constant based on the
1029 // offsets.
1030 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
1031 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
1032 if (Constant *C = ConstantExpr::getSub(CLHS, CRHS)) {
1033 SimplifiedValues[&I] = C;
1034 ++NumConstantPtrDiffs;
1035 return true;
1036 }
1037 }
1038 }
1039
1040 // Otherwise, fall back to the generic logic for simplifying and handling
1041 // instructions.
1042 return Base::visitSub(I);
1043}
1044
1045bool CallAnalyzer::visitBinaryOperator(BinaryOperator &I) {
1046 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
Easwaran Raman617f6362017-02-18 17:22:52 +00001047 auto Evaluate = [&](SmallVectorImpl<Constant *> &COps) {
1048 Value *SimpleV = nullptr;
Easwaran Raman617f6362017-02-18 17:22:52 +00001049 if (auto FI = dyn_cast<FPMathOperator>(&I))
1050 SimpleV = SimplifyFPBinOp(I.getOpcode(), COps[0], COps[1],
1051 FI->getFastMathFlags(), DL);
1052 else
1053 SimpleV = SimplifyBinOp(I.getOpcode(), COps[0], COps[1], DL);
1054 return dyn_cast_or_null<Constant>(SimpleV);
1055 };
Michael Zolotukhin4e8598e2015-02-06 20:02:51 +00001056
Easwaran Raman617f6362017-02-18 17:22:52 +00001057 if (simplifyInstruction(I, Evaluate))
Chandler Carruth0539c072012-03-31 12:42:41 +00001058 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001059
1060 // Disable any SROA on arguments to arbitrary, unsimplified binary operators.
1061 disableSROA(LHS);
1062 disableSROA(RHS);
1063
1064 return false;
1065}
1066
1067bool CallAnalyzer::visitLoad(LoadInst &I) {
1068 Value *SROAArg;
1069 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +00001070 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001071 if (I.isSimple()) {
1072 accumulateSROACost(CostIt, InlineConstants::InstrCost);
1073 return true;
1074 }
1075
1076 disableSROA(CostIt);
1077 }
1078
1079 return false;
1080}
1081
1082bool CallAnalyzer::visitStore(StoreInst &I) {
1083 Value *SROAArg;
1084 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +00001085 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001086 if (I.isSimple()) {
1087 accumulateSROACost(CostIt, InlineConstants::InstrCost);
1088 return true;
1089 }
1090
1091 disableSROA(CostIt);
1092 }
1093
1094 return false;
1095}
1096
Chandler Carruth753e21d2012-12-28 14:23:32 +00001097bool CallAnalyzer::visitExtractValue(ExtractValueInst &I) {
1098 // Constant folding for extract value is trivial.
Easwaran Raman617f6362017-02-18 17:22:52 +00001099 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
1100 return ConstantExpr::getExtractValue(COps[0], I.getIndices());
1101 }))
Chandler Carruth753e21d2012-12-28 14:23:32 +00001102 return true;
Chandler Carruth753e21d2012-12-28 14:23:32 +00001103
1104 // SROA can look through these but give them a cost.
1105 return false;
1106}
1107
1108bool CallAnalyzer::visitInsertValue(InsertValueInst &I) {
1109 // Constant folding for insert value is trivial.
Easwaran Raman617f6362017-02-18 17:22:52 +00001110 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
1111 return ConstantExpr::getInsertValue(/*AggregateOperand*/ COps[0],
1112 /*InsertedValueOperand*/ COps[1],
1113 I.getIndices());
1114 }))
Chandler Carruth753e21d2012-12-28 14:23:32 +00001115 return true;
Chandler Carruth753e21d2012-12-28 14:23:32 +00001116
1117 // SROA can look through these but give them a cost.
1118 return false;
1119}
1120
1121/// \brief Try to simplify a call site.
1122///
1123/// Takes a concrete function and callsite and tries to actually simplify it by
1124/// analyzing the arguments and call itself with instsimplify. Returns true if
1125/// it has simplified the callsite to some other entity (a constant), making it
1126/// free.
1127bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
1128 // FIXME: Using the instsimplify logic directly for this is inefficient
1129 // because we have to continually rebuild the argument list even when no
1130 // simplifications can be performed. Until that is fixed with remapping
1131 // inside of instsimplify, directly constant fold calls here.
Andrew Kaylor647025f2017-06-09 23:18:11 +00001132 if (!canConstantFoldCallTo(CS, F))
Chandler Carruth753e21d2012-12-28 14:23:32 +00001133 return false;
1134
1135 // Try to re-map the arguments to constants.
1136 SmallVector<Constant *, 4> ConstantArgs;
1137 ConstantArgs.reserve(CS.arg_size());
Chad Rosier567556a2016-04-28 14:47:23 +00001138 for (CallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end(); I != E;
1139 ++I) {
Chandler Carruth753e21d2012-12-28 14:23:32 +00001140 Constant *C = dyn_cast<Constant>(*I);
1141 if (!C)
1142 C = dyn_cast_or_null<Constant>(SimplifiedValues.lookup(*I));
1143 if (!C)
1144 return false; // This argument doesn't map to a constant.
1145
1146 ConstantArgs.push_back(C);
1147 }
Andrew Kaylor647025f2017-06-09 23:18:11 +00001148 if (Constant *C = ConstantFoldCall(CS, F, ConstantArgs)) {
Chandler Carruth753e21d2012-12-28 14:23:32 +00001149 SimplifiedValues[CS.getInstruction()] = C;
1150 return true;
1151 }
1152
1153 return false;
1154}
1155
Chandler Carruth0539c072012-03-31 12:42:41 +00001156bool CallAnalyzer::visitCallSite(CallSite CS) {
Chandler Carruth37d25de2013-12-13 08:00:01 +00001157 if (CS.hasFnAttr(Attribute::ReturnsTwice) &&
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001158 !F.hasFnAttribute(Attribute::ReturnsTwice)) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001159 // This aborts the entire analysis.
1160 ExposesReturnsTwice = true;
1161 return false;
1162 }
Chad Rosier567556a2016-04-28 14:47:23 +00001163 if (CS.isCall() && cast<CallInst>(CS.getInstruction())->cannotDuplicate())
James Molloy4f6fb952012-12-20 16:04:27 +00001164 ContainsNoDuplicateCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001165
Chandler Carruth0539c072012-03-31 12:42:41 +00001166 if (Function *F = CS.getCalledFunction()) {
Chandler Carruth753e21d2012-12-28 14:23:32 +00001167 // When we have a concrete function, first try to simplify it directly.
1168 if (simplifyCallSite(F, CS))
1169 return true;
1170
1171 // Next check if it is an intrinsic we know about.
1172 // FIXME: Lift this into part of the InstVisitor.
1173 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
1174 switch (II->getIntrinsicID()) {
1175 default:
1176 return Base::visitCallSite(CS);
1177
Peter Collingbourne7dd8dbf2016-04-22 21:18:02 +00001178 case Intrinsic::load_relative:
1179 // This is normally lowered to 4 LLVM instructions.
1180 Cost += 3 * InlineConstants::InstrCost;
1181 return false;
1182
Chandler Carruth753e21d2012-12-28 14:23:32 +00001183 case Intrinsic::memset:
1184 case Intrinsic::memcpy:
1185 case Intrinsic::memmove:
1186 // SROA can usually chew through these intrinsics, but they aren't free.
1187 return false;
Reid Kleckner60381792015-07-07 22:25:32 +00001188 case Intrinsic::localescape:
Reid Kleckner223de262015-04-14 20:38:14 +00001189 HasFrameEscape = true;
1190 return false;
Chandler Carruth753e21d2012-12-28 14:23:32 +00001191 }
1192 }
1193
Davide Italiano9d939c82017-11-30 22:10:35 +00001194 if (F == CS.getInstruction()->getFunction()) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001195 // This flag will fully abort the analysis, so don't bother with anything
1196 // else.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001197 IsRecursiveCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001198 return false;
1199 }
1200
Chandler Carruth0ba8db42013-01-22 11:26:02 +00001201 if (TTI.isLoweredToCall(F)) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001202 // We account for the average 1 instruction per call argument setup
1203 // here.
1204 Cost += CS.arg_size() * InlineConstants::InstrCost;
1205
1206 // Everything other than inline ASM will also have a significant cost
1207 // merely from making the call.
1208 if (!isa<InlineAsm>(CS.getCalledValue()))
1209 Cost += InlineConstants::CallPenalty;
1210 }
1211
1212 return Base::visitCallSite(CS);
1213 }
1214
1215 // Otherwise we're in a very special case -- an indirect function call. See
1216 // if we can be particularly clever about this.
1217 Value *Callee = CS.getCalledValue();
1218
1219 // First, pay the price of the argument setup. We account for the average
1220 // 1 instruction per call argument setup here.
1221 Cost += CS.arg_size() * InlineConstants::InstrCost;
1222
1223 // Next, check if this happens to be an indirect function call to a known
1224 // function in this inline context. If not, we've done all we can.
1225 Function *F = dyn_cast_or_null<Function>(SimplifiedValues.lookup(Callee));
1226 if (!F)
1227 return Base::visitCallSite(CS);
1228
1229 // If we have a constant that we are calling as a function, we can peer
1230 // through it and see the function target. This happens not infrequently
1231 // during devirtualization and so we want to give it a hefty bonus for
1232 // inlining, but cap that bonus in the event that inlining wouldn't pan
1233 // out. Pretend to inline the function, with a custom threshold.
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001234 auto IndirectCallParams = Params;
1235 IndirectCallParams.DefaultThreshold = InlineConstants::IndirectCallThreshold;
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001236 CallAnalyzer CA(TTI, GetAssumptionCache, GetBFI, PSI, ORE, *F, CS,
Easwaran Raman12585b02017-01-20 22:44:04 +00001237 IndirectCallParams);
Chandler Carruth0539c072012-03-31 12:42:41 +00001238 if (CA.analyzeCall(CS)) {
1239 // We were able to inline the indirect call! Subtract the cost from the
Easwaran Raman6d90d9f2015-12-07 21:21:20 +00001240 // threshold to get the bonus we want to apply, but don't go below zero.
1241 Cost -= std::max(0, CA.getThreshold() - CA.getCost());
Chandler Carruth0539c072012-03-31 12:42:41 +00001242 }
1243
1244 return Base::visitCallSite(CS);
1245}
1246
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001247bool CallAnalyzer::visitReturnInst(ReturnInst &RI) {
1248 // At least one return instruction will be free after inlining.
1249 bool Free = !HasReturn;
1250 HasReturn = true;
1251 return Free;
1252}
1253
1254bool CallAnalyzer::visitBranchInst(BranchInst &BI) {
1255 // We model unconditional branches as essentially free -- they really
1256 // shouldn't exist at all, but handling them makes the behavior of the
1257 // inliner more regular and predictable. Interestingly, conditional branches
1258 // which will fold away are also free.
1259 return BI.isUnconditional() || isa<ConstantInt>(BI.getCondition()) ||
1260 dyn_cast_or_null<ConstantInt>(
1261 SimplifiedValues.lookup(BI.getCondition()));
1262}
1263
Haicheng Wu3ec848b2017-09-27 14:44:56 +00001264bool CallAnalyzer::visitSelectInst(SelectInst &SI) {
1265 bool CheckSROA = SI.getType()->isPointerTy();
1266 Value *TrueVal = SI.getTrueValue();
1267 Value *FalseVal = SI.getFalseValue();
1268
1269 Constant *TrueC = dyn_cast<Constant>(TrueVal);
1270 if (!TrueC)
1271 TrueC = SimplifiedValues.lookup(TrueVal);
1272 Constant *FalseC = dyn_cast<Constant>(FalseVal);
1273 if (!FalseC)
1274 FalseC = SimplifiedValues.lookup(FalseVal);
1275 Constant *CondC =
1276 dyn_cast_or_null<Constant>(SimplifiedValues.lookup(SI.getCondition()));
1277
1278 if (!CondC) {
1279 // Select C, X, X => X
1280 if (TrueC == FalseC && TrueC) {
1281 SimplifiedValues[&SI] = TrueC;
1282 return true;
1283 }
1284
1285 if (!CheckSROA)
1286 return Base::visitSelectInst(SI);
1287
1288 std::pair<Value *, APInt> TrueBaseAndOffset =
1289 ConstantOffsetPtrs.lookup(TrueVal);
1290 std::pair<Value *, APInt> FalseBaseAndOffset =
1291 ConstantOffsetPtrs.lookup(FalseVal);
1292 if (TrueBaseAndOffset == FalseBaseAndOffset && TrueBaseAndOffset.first) {
1293 ConstantOffsetPtrs[&SI] = TrueBaseAndOffset;
1294
1295 Value *SROAArg;
1296 DenseMap<Value *, int>::iterator CostIt;
1297 if (lookupSROAArgAndCost(TrueVal, SROAArg, CostIt))
1298 SROAArgValues[&SI] = SROAArg;
1299 return true;
1300 }
1301
1302 return Base::visitSelectInst(SI);
1303 }
1304
1305 // Select condition is a constant.
1306 Value *SelectedV = CondC->isAllOnesValue()
1307 ? TrueVal
1308 : (CondC->isNullValue()) ? FalseVal : nullptr;
1309 if (!SelectedV) {
1310 // Condition is a vector constant that is not all 1s or all 0s. If all
1311 // operands are constants, ConstantExpr::getSelect() can handle the cases
1312 // such as select vectors.
1313 if (TrueC && FalseC) {
1314 if (auto *C = ConstantExpr::getSelect(CondC, TrueC, FalseC)) {
1315 SimplifiedValues[&SI] = C;
1316 return true;
1317 }
1318 }
1319 return Base::visitSelectInst(SI);
1320 }
1321
1322 // Condition is either all 1s or all 0s. SI can be simplified.
1323 if (Constant *SelectedC = dyn_cast<Constant>(SelectedV)) {
1324 SimplifiedValues[&SI] = SelectedC;
1325 return true;
1326 }
1327
1328 if (!CheckSROA)
1329 return true;
1330
1331 std::pair<Value *, APInt> BaseAndOffset =
1332 ConstantOffsetPtrs.lookup(SelectedV);
1333 if (BaseAndOffset.first) {
1334 ConstantOffsetPtrs[&SI] = BaseAndOffset;
1335
1336 Value *SROAArg;
1337 DenseMap<Value *, int>::iterator CostIt;
1338 if (lookupSROAArgAndCost(SelectedV, SROAArg, CostIt))
1339 SROAArgValues[&SI] = SROAArg;
1340 }
1341
1342 return true;
1343}
1344
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001345bool CallAnalyzer::visitSwitchInst(SwitchInst &SI) {
1346 // We model unconditional switches as free, see the comments on handling
1347 // branches.
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001348 if (isa<ConstantInt>(SI.getCondition()))
1349 return true;
1350 if (Value *V = SimplifiedValues.lookup(SI.getCondition()))
1351 if (isa<ConstantInt>(V))
1352 return true;
1353
Eric Christopher7ad02ee2017-06-28 21:10:31 +00001354 // Assume the most general case where the switch is lowered into
Jun Bum Lim2960d412017-06-02 20:42:54 +00001355 // either a jump table, bit test, or a balanced binary tree consisting of
1356 // case clusters without merging adjacent clusters with the same
1357 // destination. We do not consider the switches that are lowered with a mix
1358 // of jump table/bit test/binary search tree. The cost of the switch is
1359 // proportional to the size of the tree or the size of jump table range.
1360 //
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001361 // NB: We convert large switches which are just used to initialize large phi
1362 // nodes to lookup tables instead in simplify-cfg, so this shouldn't prevent
1363 // inlining those. It will prevent inlining in cases where the optimization
1364 // does not (yet) fire.
Jun Bum Lim2960d412017-06-02 20:42:54 +00001365
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001366 // Maximum valid cost increased in this function.
1367 int CostUpperBound = INT_MAX - InlineConstants::InstrCost - 1;
1368
Jun Bum Lim2960d412017-06-02 20:42:54 +00001369 // Exit early for a large switch, assuming one case needs at least one
1370 // instruction.
1371 // FIXME: This is not true for a bit test, but ignore such case for now to
1372 // save compile-time.
1373 int64_t CostLowerBound =
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001374 std::min((int64_t)CostUpperBound,
Jun Bum Lim2960d412017-06-02 20:42:54 +00001375 (int64_t)SI.getNumCases() * InlineConstants::InstrCost + Cost);
1376
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001377 if (CostLowerBound > Threshold && !ComputeFullInlineCost) {
Jun Bum Lim2960d412017-06-02 20:42:54 +00001378 Cost = CostLowerBound;
1379 return false;
1380 }
1381
1382 unsigned JumpTableSize = 0;
1383 unsigned NumCaseCluster =
1384 TTI.getEstimatedNumberOfCaseClusters(SI, JumpTableSize);
1385
1386 // If suitable for a jump table, consider the cost for the table size and
1387 // branch to destination.
1388 if (JumpTableSize) {
1389 int64_t JTCost = (int64_t)JumpTableSize * InlineConstants::InstrCost +
1390 4 * InlineConstants::InstrCost;
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001391
1392 Cost = std::min((int64_t)CostUpperBound, JTCost + Cost);
Jun Bum Lim2960d412017-06-02 20:42:54 +00001393 return false;
1394 }
1395
1396 // Considering forming a binary search, we should find the number of nodes
1397 // which is same as the number of comparisons when lowered. For a given
1398 // number of clusters, n, we can define a recursive function, f(n), to find
1399 // the number of nodes in the tree. The recursion is :
1400 // f(n) = 1 + f(n/2) + f (n - n/2), when n > 3,
1401 // and f(n) = n, when n <= 3.
1402 // This will lead a binary tree where the leaf should be either f(2) or f(3)
1403 // when n > 3. So, the number of comparisons from leaves should be n, while
1404 // the number of non-leaf should be :
1405 // 2^(log2(n) - 1) - 1
1406 // = 2^log2(n) * 2^-1 - 1
1407 // = n / 2 - 1.
1408 // Considering comparisons from leaf and non-leaf nodes, we can estimate the
1409 // number of comparisons in a simple closed form :
1410 // n + n / 2 - 1 = n * 3 / 2 - 1
1411 if (NumCaseCluster <= 3) {
1412 // Suppose a comparison includes one compare and one conditional branch.
1413 Cost += NumCaseCluster * 2 * InlineConstants::InstrCost;
1414 return false;
1415 }
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001416
1417 int64_t ExpectedNumberOfCompare = 3 * (int64_t)NumCaseCluster / 2 - 1;
1418 int64_t SwitchCost =
Jun Bum Lim2960d412017-06-02 20:42:54 +00001419 ExpectedNumberOfCompare * 2 * InlineConstants::InstrCost;
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001420
1421 Cost = std::min((int64_t)CostUpperBound, SwitchCost + Cost);
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001422 return false;
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001423}
1424
1425bool CallAnalyzer::visitIndirectBrInst(IndirectBrInst &IBI) {
1426 // We never want to inline functions that contain an indirectbr. This is
1427 // incorrect because all the blockaddress's (in static global initializers
1428 // for example) would be referring to the original function, and this
1429 // indirect jump would jump from the inlined copy of the function into the
1430 // original function which is extremely undefined behavior.
1431 // FIXME: This logic isn't really right; we can safely inline functions with
1432 // indirectbr's as long as no other function or global references the
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001433 // blockaddress of a block within the current function.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001434 HasIndirectBr = true;
1435 return false;
1436}
1437
1438bool CallAnalyzer::visitResumeInst(ResumeInst &RI) {
1439 // FIXME: It's not clear that a single instruction is an accurate model for
1440 // the inline cost of a resume instruction.
1441 return false;
1442}
1443
David Majnemer654e1302015-07-31 17:58:14 +00001444bool CallAnalyzer::visitCleanupReturnInst(CleanupReturnInst &CRI) {
1445 // FIXME: It's not clear that a single instruction is an accurate model for
1446 // the inline cost of a cleanupret instruction.
1447 return false;
1448}
1449
1450bool CallAnalyzer::visitCatchReturnInst(CatchReturnInst &CRI) {
1451 // FIXME: It's not clear that a single instruction is an accurate model for
Joseph Tremoulet8220bcc2015-08-23 00:26:33 +00001452 // the inline cost of a catchret instruction.
David Majnemer654e1302015-07-31 17:58:14 +00001453 return false;
1454}
1455
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001456bool CallAnalyzer::visitUnreachableInst(UnreachableInst &I) {
1457 // FIXME: It might be reasonably to discount the cost of instructions leading
1458 // to unreachable as they have the lowest possible impact on both runtime and
1459 // code size.
1460 return true; // No actual code is needed for unreachable.
1461}
1462
Chandler Carruth0539c072012-03-31 12:42:41 +00001463bool CallAnalyzer::visitInstruction(Instruction &I) {
Chandler Carruthda7513a2012-05-04 00:58:03 +00001464 // Some instructions are free. All of the free intrinsics can also be
1465 // handled by SROA, etc.
Chandler Carruthb8cf5102013-01-21 12:05:16 +00001466 if (TargetTransformInfo::TCC_Free == TTI.getUserCost(&I))
Chandler Carruthda7513a2012-05-04 00:58:03 +00001467 return true;
1468
Chandler Carruth0539c072012-03-31 12:42:41 +00001469 // We found something we don't understand or can't handle. Mark any SROA-able
1470 // values in the operand list as no longer viable.
1471 for (User::op_iterator OI = I.op_begin(), OE = I.op_end(); OI != OE; ++OI)
1472 disableSROA(*OI);
1473
1474 return false;
1475}
1476
Chandler Carruth0539c072012-03-31 12:42:41 +00001477/// \brief Analyze a basic block for its contribution to the inline cost.
1478///
1479/// This method walks the analyzer over every instruction in the given basic
1480/// block and accounts for their cost during inlining at this callsite. It
1481/// aborts early if the threshold has been exceeded or an impossible to inline
1482/// construct has been detected. It returns false if inlining is no longer
1483/// viable, and true if inlining remains viable.
Hal Finkel57f03dd2014-09-07 13:49:57 +00001484bool CallAnalyzer::analyzeBlock(BasicBlock *BB,
1485 SmallPtrSetImpl<const Value *> &EphValues) {
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001486 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
Chandler Carruth6b4cc8b2014-02-01 10:38:17 +00001487 // FIXME: Currently, the number of instructions in a function regardless of
1488 // our ability to simplify them during inline to constants or dead code,
1489 // are actually used by the vector bonus heuristic. As long as that's true,
1490 // we have to special case debug intrinsics here to prevent differences in
1491 // inlining due to debug symbols. Eventually, the number of unsimplified
1492 // instructions shouldn't factor into the cost computation, but until then,
1493 // hack around it here.
1494 if (isa<DbgInfoIntrinsic>(I))
1495 continue;
1496
Hal Finkel57f03dd2014-09-07 13:49:57 +00001497 // Skip ephemeral values.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001498 if (EphValues.count(&*I))
Hal Finkel57f03dd2014-09-07 13:49:57 +00001499 continue;
1500
Chandler Carruth0539c072012-03-31 12:42:41 +00001501 ++NumInstructions;
1502 if (isa<ExtractElementInst>(I) || I->getType()->isVectorTy())
1503 ++NumVectorInstructions;
1504
Sanjay Patele9434e82015-09-15 15:26:25 +00001505 // If the instruction is floating point, and the target says this operation
1506 // is expensive or the function has the "use-soft-float" attribute, this may
1507 // eventually become a library call. Treat the cost as such.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001508 if (I->getType()->isFloatingPointTy()) {
Sanjay Patele9434e82015-09-15 15:26:25 +00001509 // If the function has the "use-soft-float" attribute, mark it as
1510 // expensive.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001511 if (TTI.getFPOpCost(I->getType()) == TargetTransformInfo::TCC_Expensive ||
Eric Christopher908ed7f2017-04-15 06:14:52 +00001512 (F.getFnAttribute("use-soft-float").getValueAsString() == "true"))
Cameron Esfahani17177d12015-02-05 02:09:33 +00001513 Cost += InlineConstants::CallPenalty;
1514 }
1515
Chandler Carruth0539c072012-03-31 12:42:41 +00001516 // If the instruction simplified to a constant, there is no cost to this
1517 // instruction. Visit the instructions using our InstVisitor to account for
1518 // all of the per-instruction logic. The visit tree returns true if we
1519 // consumed the instruction in any way, and false if the instruction's base
1520 // cost should count against inlining.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001521 if (Base::visit(&*I))
Chandler Carruth0539c072012-03-31 12:42:41 +00001522 ++NumInstructionsSimplified;
1523 else
1524 Cost += InlineConstants::InstrCost;
1525
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001526 using namespace ore;
Chandler Carruth0539c072012-03-31 12:42:41 +00001527 // If the visit this instruction detected an uninlinable pattern, abort.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001528 if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca ||
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001529 HasIndirectBr || HasFrameEscape) {
1530 if (ORE)
Vivek Pandya95906582017-10-11 17:12:59 +00001531 ORE->emit([&]() {
1532 return OptimizationRemarkMissed(DEBUG_TYPE, "NeverInline",
1533 CandidateCS.getInstruction())
1534 << NV("Callee", &F)
1535 << " has uninlinable pattern and cost is not fully computed";
1536 });
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001537 return false;
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001538 }
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001539
1540 // If the caller is a recursive function then we don't want to inline
1541 // functions which allocate a lot of stack space because it would increase
1542 // the caller stack usage dramatically.
1543 if (IsCallerRecursive &&
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001544 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller) {
1545 if (ORE)
Vivek Pandya95906582017-10-11 17:12:59 +00001546 ORE->emit([&]() {
1547 return OptimizationRemarkMissed(DEBUG_TYPE, "NeverInline",
1548 CandidateCS.getInstruction())
1549 << NV("Callee", &F)
1550 << " is recursive and allocates too much stack space. Cost is "
1551 "not fully computed";
1552 });
Chandler Carruth0539c072012-03-31 12:42:41 +00001553 return false;
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001554 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001555
Chandler Carrutha004f222015-05-27 02:49:05 +00001556 // Check if we've past the maximum possible threshold so we don't spin in
1557 // huge basic blocks that will never inline.
Haicheng Wu61995362017-08-25 19:00:33 +00001558 if (Cost >= Threshold && !ComputeFullInlineCost)
Chandler Carruth0539c072012-03-31 12:42:41 +00001559 return false;
1560 }
1561
1562 return true;
1563}
1564
1565/// \brief Compute the base pointer and cumulative constant offsets for V.
1566///
1567/// This strips all constant offsets off of V, leaving it the base pointer, and
1568/// accumulates the total constant offset applied in the returned constant. It
1569/// returns 0 if V is not a pointer, and returns the constant '0' if there are
1570/// no constant offsets applied.
1571ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001572 if (!V->getType()->isPointerTy())
Craig Topper353eda42014-04-24 06:44:33 +00001573 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001574
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001575 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +00001576 APInt Offset = APInt::getNullValue(IntPtrWidth);
1577
1578 // Even though we don't look through PHI nodes, we could be called on an
1579 // instruction in an unreachable block, which may be on a cycle.
1580 SmallPtrSet<Value *, 4> Visited;
1581 Visited.insert(V);
1582 do {
1583 if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
1584 if (!GEP->isInBounds() || !accumulateGEPOffset(*GEP, Offset))
Craig Topper353eda42014-04-24 06:44:33 +00001585 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001586 V = GEP->getPointerOperand();
1587 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
1588 V = cast<Operator>(V)->getOperand(0);
1589 } else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
Sanjoy Das5ce32722016-04-08 00:48:30 +00001590 if (GA->isInterposable())
Chandler Carruth0539c072012-03-31 12:42:41 +00001591 break;
1592 V = GA->getAliasee();
1593 } else {
1594 break;
1595 }
1596 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
David Blaikie70573dc2014-11-19 07:49:26 +00001597 } while (Visited.insert(V).second);
Chandler Carruth0539c072012-03-31 12:42:41 +00001598
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001599 Type *IntPtrTy = DL.getIntPtrType(V->getContext());
Chandler Carruth0539c072012-03-31 12:42:41 +00001600 return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset));
1601}
1602
Haicheng Wu3739e142017-12-14 14:36:18 +00001603/// \brief Find dead blocks due to deleted CFG edges during inlining.
1604///
1605/// If we know the successor of the current block, \p CurrBB, has to be \p
1606/// NextBB, the other successors of \p CurrBB are dead if these successors have
1607/// no live incoming CFG edges. If one block is found to be dead, we can
1608/// continue growing the dead block list by checking the successors of the dead
1609/// blocks to see if all their incoming edges are dead or not.
1610void CallAnalyzer::findDeadBlocks(BasicBlock *CurrBB, BasicBlock *NextBB) {
1611 auto IsEdgeDead = [&](BasicBlock *Pred, BasicBlock *Succ) {
1612 // A CFG edge is dead if the predecessor is dead or the predessor has a
1613 // known successor which is not the one under exam.
1614 return (DeadBlocks.count(Pred) ||
1615 (KnownSuccessors[Pred] && KnownSuccessors[Pred] != Succ));
1616 };
1617
1618 auto IsNewlyDead = [&](BasicBlock *BB) {
1619 // If all the edges to a block are dead, the block is also dead.
1620 return (!DeadBlocks.count(BB) &&
1621 llvm::all_of(predecessors(BB),
1622 [&](BasicBlock *P) { return IsEdgeDead(P, BB); }));
1623 };
1624
1625 for (BasicBlock *Succ : successors(CurrBB)) {
1626 if (Succ == NextBB || !IsNewlyDead(Succ))
1627 continue;
1628 SmallVector<BasicBlock *, 4> NewDead;
1629 NewDead.push_back(Succ);
1630 while (!NewDead.empty()) {
1631 BasicBlock *Dead = NewDead.pop_back_val();
1632 if (DeadBlocks.insert(Dead))
1633 // Continue growing the dead block lists.
1634 for (BasicBlock *S : successors(Dead))
1635 if (IsNewlyDead(S))
1636 NewDead.push_back(S);
1637 }
1638 }
1639}
1640
Chandler Carruth0539c072012-03-31 12:42:41 +00001641/// \brief Analyze a call site for potential inlining.
1642///
1643/// Returns true if inlining this call is viable, and false if it is not
1644/// viable. It computes the cost and adjusts the threshold based on numerous
1645/// factors and heuristics. If this method returns false but the computed cost
1646/// is below the computed threshold, then inlining was forcibly disabled by
Bob Wilson266802d2012-11-19 07:04:30 +00001647/// some artifact of the routine.
Chandler Carruth0539c072012-03-31 12:42:41 +00001648bool CallAnalyzer::analyzeCall(CallSite CS) {
Chandler Carruth7ae90d42012-04-11 10:15:10 +00001649 ++NumCallsAnalyzed;
1650
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001651 // Perform some tweaks to the cost and threshold based on the direct
1652 // callsite information.
Chandler Carruth0539c072012-03-31 12:42:41 +00001653
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001654 // We want to more aggressively inline vector-dense kernels, so up the
1655 // threshold, and we'll lower it if the % of vector instructions gets too
Chandler Carrutha004f222015-05-27 02:49:05 +00001656 // low. Note that these bonuses are some what arbitrary and evolved over time
1657 // by accident as much as because they are principled bonuses.
1658 //
1659 // FIXME: It would be nice to remove all such bonuses. At least it would be
1660 // nice to base the bonus values on something more scientific.
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001661 assert(NumInstructions == 0);
1662 assert(NumVectorInstructions == 0);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001663
1664 // Update the threshold based on callsite properties
1665 updateThreshold(CS, F);
1666
Chandler Carrutha004f222015-05-27 02:49:05 +00001667 // Speculatively apply all possible bonuses to Threshold. If cost exceeds
1668 // this Threshold any time, and cost cannot decrease, we can stop processing
1669 // the rest of the function body.
Easwaran Raman51b809b2017-07-28 21:47:36 +00001670 Threshold += (SingleBBBonus + VectorBonus);
Chandler Carrutha004f222015-05-27 02:49:05 +00001671
Xinliang David Li351d9b02017-05-02 05:38:41 +00001672 // Give out bonuses for the callsite, as the instructions setting them up
1673 // will be gone after inlining.
1674 Cost -= getCallsiteCost(CS, DL);
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001675
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001676 // If this function uses the coldcc calling convention, prefer not to inline
1677 // it.
1678 if (F.getCallingConv() == CallingConv::Cold)
1679 Cost += InlineConstants::ColdccPenalty;
1680
1681 // Check if we're done. This can happen due to bonuses and penalties.
Haicheng Wu61995362017-08-25 19:00:33 +00001682 if (Cost >= Threshold && !ComputeFullInlineCost)
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001683 return false;
1684
Chandler Carruth0539c072012-03-31 12:42:41 +00001685 if (F.empty())
1686 return true;
1687
Davide Italiano9d939c82017-11-30 22:10:35 +00001688 Function *Caller = CS.getInstruction()->getFunction();
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001689 // Check if the caller function is recursive itself.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001690 for (User *U : Caller->users()) {
1691 CallSite Site(U);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001692 if (!Site)
1693 continue;
1694 Instruction *I = Site.getInstruction();
Davide Italiano9d939c82017-11-30 22:10:35 +00001695 if (I->getFunction() == Caller) {
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001696 IsCallerRecursive = true;
1697 break;
1698 }
1699 }
1700
Chandler Carruth0539c072012-03-31 12:42:41 +00001701 // Populate our simplified values by mapping from function arguments to call
1702 // arguments with known important simplifications.
1703 CallSite::arg_iterator CAI = CS.arg_begin();
1704 for (Function::arg_iterator FAI = F.arg_begin(), FAE = F.arg_end();
1705 FAI != FAE; ++FAI, ++CAI) {
1706 assert(CAI != CS.arg_end());
1707 if (Constant *C = dyn_cast<Constant>(CAI))
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001708 SimplifiedValues[&*FAI] = C;
Chandler Carruth0539c072012-03-31 12:42:41 +00001709
1710 Value *PtrArg = *CAI;
1711 if (ConstantInt *C = stripAndComputeInBoundsConstantOffsets(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001712 ConstantOffsetPtrs[&*FAI] = std::make_pair(PtrArg, C->getValue());
Chandler Carruth0539c072012-03-31 12:42:41 +00001713
1714 // We can SROA any pointer arguments derived from alloca instructions.
1715 if (isa<AllocaInst>(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001716 SROAArgValues[&*FAI] = PtrArg;
Chandler Carruth0539c072012-03-31 12:42:41 +00001717 SROAArgCosts[PtrArg] = 0;
1718 }
1719 }
1720 }
1721 NumConstantArgs = SimplifiedValues.size();
1722 NumConstantOffsetPtrArgs = ConstantOffsetPtrs.size();
1723 NumAllocaArgs = SROAArgValues.size();
1724
Hal Finkel57f03dd2014-09-07 13:49:57 +00001725 // FIXME: If a caller has multiple calls to a callee, we end up recomputing
1726 // the ephemeral values multiple times (and they're completely determined by
1727 // the callee, so this is purely duplicate work).
1728 SmallPtrSet<const Value *, 32> EphValues;
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001729 CodeMetrics::collectEphemeralValues(&F, &GetAssumptionCache(F), EphValues);
Hal Finkel57f03dd2014-09-07 13:49:57 +00001730
Chandler Carruth0539c072012-03-31 12:42:41 +00001731 // The worklist of live basic blocks in the callee *after* inlining. We avoid
1732 // adding basic blocks of the callee which can be proven to be dead for this
1733 // particular call site in order to get more accurate cost estimates. This
1734 // requires a somewhat heavyweight iteration pattern: we need to walk the
1735 // basic blocks in a breadth-first order as we insert live successors. To
1736 // accomplish this, prioritizing for small iterations because we exit after
1737 // crossing our threshold, we use a small-size optimized SetVector.
1738 typedef SetVector<BasicBlock *, SmallVector<BasicBlock *, 16>,
Chad Rosier567556a2016-04-28 14:47:23 +00001739 SmallPtrSet<BasicBlock *, 16>>
1740 BBSetVector;
Chandler Carruth0539c072012-03-31 12:42:41 +00001741 BBSetVector BBWorklist;
1742 BBWorklist.insert(&F.getEntryBlock());
Easwaran Raman51b809b2017-07-28 21:47:36 +00001743 bool SingleBB = true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001744 // Note that we *must not* cache the size, this loop grows the worklist.
1745 for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) {
1746 // Bail out the moment we cross the threshold. This means we'll under-count
1747 // the cost, but only when undercounting doesn't matter.
Haicheng Wu61995362017-08-25 19:00:33 +00001748 if (Cost >= Threshold && !ComputeFullInlineCost)
Chandler Carruth0539c072012-03-31 12:42:41 +00001749 break;
1750
1751 BasicBlock *BB = BBWorklist[Idx];
1752 if (BB->empty())
Chandler Carruth4d1d34f2012-03-14 23:19:53 +00001753 continue;
Dan Gohman4552e3c2009-10-13 18:30:07 +00001754
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001755 // Disallow inlining a blockaddress. A blockaddress only has defined
1756 // behavior for an indirect branch in the same function, and we do not
1757 // currently support inlining indirect branches. But, the inliner may not
1758 // see an indirect branch that ends up being dead code at a particular call
1759 // site. If the blockaddress escapes the function, e.g., via a global
1760 // variable, inlining may lead to an invalid cross-function reference.
1761 if (BB->hasAddressTaken())
1762 return false;
1763
Chandler Carruth0539c072012-03-31 12:42:41 +00001764 // Analyze the cost of this block. If we blow through the threshold, this
1765 // returns false, and we can bail on out.
Easwaran Ramand295b002016-04-13 21:20:22 +00001766 if (!analyzeBlock(BB, EphValues))
1767 return false;
Eric Christopher46308e62011-02-01 01:16:32 +00001768
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001769 TerminatorInst *TI = BB->getTerminator();
1770
Chandler Carruth0539c072012-03-31 12:42:41 +00001771 // Add in the live successors by first checking whether we have terminator
1772 // that may be simplified based on the values simplified by this call.
1773 if (BranchInst *BI = dyn_cast<BranchInst>(TI)) {
1774 if (BI->isConditional()) {
1775 Value *Cond = BI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001776 if (ConstantInt *SimpleCond =
1777 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Haicheng Wu3739e142017-12-14 14:36:18 +00001778 BasicBlock *NextBB = BI->getSuccessor(SimpleCond->isZero() ? 1 : 0);
1779 BBWorklist.insert(NextBB);
1780 KnownSuccessors[BB] = NextBB;
1781 findDeadBlocks(BB, NextBB);
Chandler Carruth0539c072012-03-31 12:42:41 +00001782 continue;
Eric Christopher46308e62011-02-01 01:16:32 +00001783 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001784 }
1785 } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) {
1786 Value *Cond = SI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001787 if (ConstantInt *SimpleCond =
1788 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Haicheng Wu3739e142017-12-14 14:36:18 +00001789 BasicBlock *NextBB = SI->findCaseValue(SimpleCond)->getCaseSuccessor();
1790 BBWorklist.insert(NextBB);
1791 KnownSuccessors[BB] = NextBB;
1792 findDeadBlocks(BB, NextBB);
Chandler Carruth0539c072012-03-31 12:42:41 +00001793 continue;
1794 }
1795 }
Eric Christopher46308e62011-02-01 01:16:32 +00001796
Chandler Carruth0539c072012-03-31 12:42:41 +00001797 // If we're unable to select a particular successor, just count all of
1798 // them.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001799 for (unsigned TIdx = 0, TSize = TI->getNumSuccessors(); TIdx != TSize;
1800 ++TIdx)
Chandler Carruth0539c072012-03-31 12:42:41 +00001801 BBWorklist.insert(TI->getSuccessor(TIdx));
1802
1803 // If we had any successors at this point, than post-inlining is likely to
1804 // have them as well. Note that we assume any basic blocks which existed
1805 // due to branches or switches which folded above will also fold after
1806 // inlining.
1807 if (SingleBB && TI->getNumSuccessors() > 1) {
1808 // Take off the bonus we applied to the threshold.
1809 Threshold -= SingleBBBonus;
1810 SingleBB = false;
Eric Christopher46308e62011-02-01 01:16:32 +00001811 }
1812 }
Andrew Trickcaa500b2011-10-01 01:27:56 +00001813
Easwaran Raman51b809b2017-07-28 21:47:36 +00001814 bool OnlyOneCallAndLocalLinkage =
1815 F.hasLocalLinkage() && F.hasOneUse() && &F == CS.getCalledFunction();
Chandler Carruthcb5beb32013-12-12 11:59:26 +00001816 // If this is a noduplicate call, we can still inline as long as
James Molloy4f6fb952012-12-20 16:04:27 +00001817 // inlining this would cause the removal of the caller (so the instruction
1818 // is not actually duplicated, just moved).
1819 if (!OnlyOneCallAndLocalLinkage && ContainsNoDuplicateCall)
1820 return false;
1821
Chandler Carrutha004f222015-05-27 02:49:05 +00001822 // We applied the maximum possible vector bonus at the beginning. Now,
1823 // subtract the excess bonus, if any, from the Threshold before
1824 // comparing against Cost.
1825 if (NumVectorInstructions <= NumInstructions / 10)
Easwaran Raman51b809b2017-07-28 21:47:36 +00001826 Threshold -= VectorBonus;
Chandler Carrutha004f222015-05-27 02:49:05 +00001827 else if (NumVectorInstructions <= NumInstructions / 2)
Easwaran Raman51b809b2017-07-28 21:47:36 +00001828 Threshold -= VectorBonus/2;
Chandler Carruth0539c072012-03-31 12:42:41 +00001829
Hans Wennborg00ab73d2016-02-05 20:32:42 +00001830 return Cost < std::max(1, Threshold);
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001831}
1832
Aaron Ballman615eb472017-10-15 14:32:27 +00001833#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
Chandler Carruth0539c072012-03-31 12:42:41 +00001834/// \brief Dump stats about this call's analysis.
Yaron Kereneb2a2542016-01-29 20:50:44 +00001835LLVM_DUMP_METHOD void CallAnalyzer::dump() {
Eric Christophera13839f2014-02-26 23:27:16 +00001836#define DEBUG_PRINT_STAT(x) dbgs() << " " #x ": " << x << "\n"
Chandler Carruth0539c072012-03-31 12:42:41 +00001837 DEBUG_PRINT_STAT(NumConstantArgs);
1838 DEBUG_PRINT_STAT(NumConstantOffsetPtrArgs);
1839 DEBUG_PRINT_STAT(NumAllocaArgs);
1840 DEBUG_PRINT_STAT(NumConstantPtrCmps);
1841 DEBUG_PRINT_STAT(NumConstantPtrDiffs);
1842 DEBUG_PRINT_STAT(NumInstructionsSimplified);
Chandler Carrutha004f222015-05-27 02:49:05 +00001843 DEBUG_PRINT_STAT(NumInstructions);
Chandler Carruth0539c072012-03-31 12:42:41 +00001844 DEBUG_PRINT_STAT(SROACostSavings);
1845 DEBUG_PRINT_STAT(SROACostSavingsLost);
James Molloy4f6fb952012-12-20 16:04:27 +00001846 DEBUG_PRINT_STAT(ContainsNoDuplicateCall);
Chandler Carruth394e34f2014-01-31 22:32:32 +00001847 DEBUG_PRINT_STAT(Cost);
1848 DEBUG_PRINT_STAT(Threshold);
Chandler Carruth0539c072012-03-31 12:42:41 +00001849#undef DEBUG_PRINT_STAT
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001850}
Manman Renc3366cc2012-09-06 19:55:56 +00001851#endif
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001852
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001853/// \brief Test that there are no attribute conflicts between Caller and Callee
1854/// that prevent inlining.
1855static bool functionsHaveCompatibleAttributes(Function *Caller,
Eric Christopher4371b132015-07-02 01:11:47 +00001856 Function *Callee,
1857 TargetTransformInfo &TTI) {
Eric Christopherd566fb12015-07-29 22:09:48 +00001858 return TTI.areInlineCompatible(Caller, Callee) &&
Akira Hatanaka1cb242e2015-12-22 23:57:37 +00001859 AttributeFuncs::areInlineCompatible(*Caller, *Callee);
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001860}
1861
Xinliang David Li351d9b02017-05-02 05:38:41 +00001862int llvm::getCallsiteCost(CallSite CS, const DataLayout &DL) {
1863 int Cost = 0;
1864 for (unsigned I = 0, E = CS.arg_size(); I != E; ++I) {
1865 if (CS.isByValArgument(I)) {
1866 // We approximate the number of loads and stores needed by dividing the
1867 // size of the byval type by the target's pointer size.
1868 PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType());
1869 unsigned TypeSize = DL.getTypeSizeInBits(PTy->getElementType());
1870 unsigned PointerSize = DL.getPointerSizeInBits();
1871 // Ceiling division.
1872 unsigned NumStores = (TypeSize + PointerSize - 1) / PointerSize;
1873
1874 // If it generates more than 8 stores it is likely to be expanded as an
1875 // inline memcpy so we take that as an upper bound. Otherwise we assume
1876 // one load and one store per word copied.
1877 // FIXME: The maxStoresPerMemcpy setting from the target should be used
1878 // here instead of a magic number of 8, but it's not available via
1879 // DataLayout.
1880 NumStores = std::min(NumStores, 8U);
1881
1882 Cost += 2 * NumStores * InlineConstants::InstrCost;
1883 } else {
1884 // For non-byval arguments subtract off one instruction per call
1885 // argument.
1886 Cost += InlineConstants::InstrCost;
1887 }
1888 }
1889 // The call instruction also disappears after inlining.
1890 Cost += InlineConstants::InstrCost + InlineConstants::CallPenalty;
1891 return Cost;
1892}
1893
Sean Silvaab6a6832016-07-23 04:22:50 +00001894InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001895 CallSite CS, const InlineParams &Params, TargetTransformInfo &CalleeTTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001896 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +00001897 Optional<function_ref<BlockFrequencyInfo &(Function &)>> GetBFI,
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001898 ProfileSummaryInfo *PSI, OptimizationRemarkEmitter *ORE) {
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001899 return getInlineCost(CS, CS.getCalledFunction(), Params, CalleeTTI,
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001900 GetAssumptionCache, GetBFI, PSI, ORE);
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001901}
1902
Sean Silvaab6a6832016-07-23 04:22:50 +00001903InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001904 CallSite CS, Function *Callee, const InlineParams &Params,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001905 TargetTransformInfo &CalleeTTI,
1906 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +00001907 Optional<function_ref<BlockFrequencyInfo &(Function &)>> GetBFI,
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001908 ProfileSummaryInfo *PSI, OptimizationRemarkEmitter *ORE) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001909
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001910 // Cannot inline indirect calls.
1911 if (!Callee)
1912 return llvm::InlineCost::getNever();
1913
1914 // Calls to functions with always-inline attributes should be inlined
1915 // whenever possible.
Peter Collingbourne68a88972014-05-19 18:25:54 +00001916 if (CS.hasFnAttr(Attribute::AlwaysInline)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001917 if (isInlineViable(*Callee))
1918 return llvm::InlineCost::getAlways();
1919 return llvm::InlineCost::getNever();
1920 }
1921
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001922 // Never inline functions with conflicting attributes (unless callee has
1923 // always-inline attribute).
Chad Rosier5ce28f42017-08-02 14:50:27 +00001924 Function *Caller = CS.getCaller();
1925 if (!functionsHaveCompatibleAttributes(Caller, Callee, CalleeTTI))
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001926 return llvm::InlineCost::getNever();
1927
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001928 // Don't inline this call if the caller has the optnone attribute.
Chad Rosier5ce28f42017-08-02 14:50:27 +00001929 if (Caller->hasFnAttribute(Attribute::OptimizeNone))
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001930 return llvm::InlineCost::getNever();
1931
Sanjoy Das5ce32722016-04-08 00:48:30 +00001932 // Don't inline functions which can be interposed at link-time. Don't inline
1933 // functions marked noinline or call sites marked noinline.
Craig Topper107b1872016-12-09 02:18:04 +00001934 // Note: inlining non-exact non-interposable functions is fine, since we know
Sanjoy Das5ce32722016-04-08 00:48:30 +00001935 // we have *a* correct implementation of the source level function.
Chad Rosier567556a2016-04-28 14:47:23 +00001936 if (Callee->isInterposable() || Callee->hasFnAttribute(Attribute::NoInline) ||
1937 CS.isNoInline())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001938 return llvm::InlineCost::getNever();
1939
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001940 DEBUG(llvm::dbgs() << " Analyzing call of " << Callee->getName()
Chad Rosier4eb18742017-08-21 19:56:46 +00001941 << "... (caller:" << Caller->getName() << ")\n");
Andrew Trickcaa500b2011-10-01 01:27:56 +00001942
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001943 CallAnalyzer CA(CalleeTTI, GetAssumptionCache, GetBFI, PSI, ORE, *Callee, CS,
Easwaran Raman12585b02017-01-20 22:44:04 +00001944 Params);
Chandler Carruth0539c072012-03-31 12:42:41 +00001945 bool ShouldInline = CA.analyzeCall(CS);
Dan Gohman4552e3c2009-10-13 18:30:07 +00001946
Chandler Carruth0539c072012-03-31 12:42:41 +00001947 DEBUG(CA.dump());
1948
1949 // Check if there was a reason to force inlining or no inlining.
1950 if (!ShouldInline && CA.getCost() < CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001951 return InlineCost::getNever();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001952 if (ShouldInline && CA.getCost() >= CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001953 return InlineCost::getAlways();
Andrew Trickcaa500b2011-10-01 01:27:56 +00001954
Chandler Carruth0539c072012-03-31 12:42:41 +00001955 return llvm::InlineCost::get(CA.getCost(), CA.getThreshold());
Dan Gohman4552e3c2009-10-13 18:30:07 +00001956}
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001957
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001958bool llvm::isInlineViable(Function &F) {
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001959 bool ReturnsTwice = F.hasFnAttribute(Attribute::ReturnsTwice);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001960 for (Function::iterator BI = F.begin(), BE = F.end(); BI != BE; ++BI) {
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001961 // Disallow inlining of functions which contain indirect branches or
1962 // blockaddresses.
1963 if (isa<IndirectBrInst>(BI->getTerminator()) || BI->hasAddressTaken())
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001964 return false;
1965
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001966 for (auto &II : *BI) {
1967 CallSite CS(&II);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001968 if (!CS)
1969 continue;
1970
1971 // Disallow recursive calls.
1972 if (&F == CS.getCalledFunction())
1973 return false;
1974
1975 // Disallow calls which expose returns-twice to a function not previously
1976 // attributed as such.
1977 if (!ReturnsTwice && CS.isCall() &&
1978 cast<CallInst>(CS.getInstruction())->canReturnTwice())
1979 return false;
Reid Kleckner223de262015-04-14 20:38:14 +00001980
Reid Kleckner60381792015-07-07 22:25:32 +00001981 // Disallow inlining functions that call @llvm.localescape. Doing this
Reid Kleckner223de262015-04-14 20:38:14 +00001982 // correctly would require major changes to the inliner.
1983 if (CS.getCalledFunction() &&
1984 CS.getCalledFunction()->getIntrinsicID() ==
Reid Kleckner60381792015-07-07 22:25:32 +00001985 llvm::Intrinsic::localescape)
Reid Kleckner223de262015-04-14 20:38:14 +00001986 return false;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001987 }
1988 }
1989
1990 return true;
1991}
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001992
1993// APIs to create InlineParams based on command line flags and/or other
1994// parameters.
1995
1996InlineParams llvm::getInlineParams(int Threshold) {
1997 InlineParams Params;
1998
1999 // This field is the threshold to use for a callee by default. This is
2000 // derived from one or more of:
2001 // * optimization or size-optimization levels,
2002 // * a value passed to createFunctionInliningPass function, or
2003 // * the -inline-threshold flag.
2004 // If the -inline-threshold flag is explicitly specified, that is used
2005 // irrespective of anything else.
2006 if (InlineThreshold.getNumOccurrences() > 0)
2007 Params.DefaultThreshold = InlineThreshold;
2008 else
2009 Params.DefaultThreshold = Threshold;
2010
2011 // Set the HintThreshold knob from the -inlinehint-threshold.
2012 Params.HintThreshold = HintThreshold;
2013
2014 // Set the HotCallSiteThreshold knob from the -hot-callsite-threshold.
2015 Params.HotCallSiteThreshold = HotCallSiteThreshold;
2016
Easwaran Raman974d4ee2017-08-03 22:23:33 +00002017 // If the -locally-hot-callsite-threshold is explicitly specified, use it to
2018 // populate LocallyHotCallSiteThreshold. Later, we populate
2019 // Params.LocallyHotCallSiteThreshold from -locally-hot-callsite-threshold if
2020 // we know that optimization level is O3 (in the getInlineParams variant that
2021 // takes the opt and size levels).
2022 // FIXME: Remove this check (and make the assignment unconditional) after
2023 // addressing size regression issues at O2.
2024 if (LocallyHotCallSiteThreshold.getNumOccurrences() > 0)
2025 Params.LocallyHotCallSiteThreshold = LocallyHotCallSiteThreshold;
2026
Easwaran Raman12585b02017-01-20 22:44:04 +00002027 // Set the ColdCallSiteThreshold knob from the -inline-cold-callsite-threshold.
2028 Params.ColdCallSiteThreshold = ColdCallSiteThreshold;
2029
Easwaran Raman1c57cc22016-08-10 00:48:04 +00002030 // Set the OptMinSizeThreshold and OptSizeThreshold params only if the
Easwaran Raman1c57cc22016-08-10 00:48:04 +00002031 // -inlinehint-threshold commandline option is not explicitly given. If that
2032 // option is present, then its value applies even for callees with size and
2033 // minsize attributes.
2034 // If the -inline-threshold is not specified, set the ColdThreshold from the
2035 // -inlinecold-threshold even if it is not explicitly passed. If
2036 // -inline-threshold is specified, then -inlinecold-threshold needs to be
2037 // explicitly specified to set the ColdThreshold knob
2038 if (InlineThreshold.getNumOccurrences() == 0) {
2039 Params.OptMinSizeThreshold = InlineConstants::OptMinSizeThreshold;
2040 Params.OptSizeThreshold = InlineConstants::OptSizeThreshold;
2041 Params.ColdThreshold = ColdThreshold;
2042 } else if (ColdThreshold.getNumOccurrences() > 0) {
2043 Params.ColdThreshold = ColdThreshold;
2044 }
2045 return Params;
2046}
2047
2048InlineParams llvm::getInlineParams() {
2049 return getInlineParams(InlineThreshold);
2050}
2051
2052// Compute the default threshold for inlining based on the opt level and the
2053// size opt level.
2054static int computeThresholdFromOptLevels(unsigned OptLevel,
2055 unsigned SizeOptLevel) {
2056 if (OptLevel > 2)
2057 return InlineConstants::OptAggressiveThreshold;
2058 if (SizeOptLevel == 1) // -Os
2059 return InlineConstants::OptSizeThreshold;
2060 if (SizeOptLevel == 2) // -Oz
2061 return InlineConstants::OptMinSizeThreshold;
2062 return InlineThreshold;
2063}
2064
2065InlineParams llvm::getInlineParams(unsigned OptLevel, unsigned SizeOptLevel) {
Easwaran Raman974d4ee2017-08-03 22:23:33 +00002066 auto Params =
2067 getInlineParams(computeThresholdFromOptLevels(OptLevel, SizeOptLevel));
2068 // At O3, use the value of -locally-hot-callsite-threshold option to populate
2069 // Params.LocallyHotCallSiteThreshold. Below O3, this flag has effect only
2070 // when it is specified explicitly.
2071 if (OptLevel > 2)
2072 Params.LocallyHotCallSiteThreshold = LocallyHotCallSiteThreshold;
2073 return Params;
Easwaran Raman1c57cc22016-08-10 00:48:04 +00002074}