blob: ffb49fe30fcf6e1aceb9d307eb5f5562be67b568 [file] [log] [blame]
Dan Gohman4552e3c2009-10-13 18:30:07 +00001//===- InlineCost.cpp - Cost analysis for inliner -------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements inline cost analysis.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Analysis/InlineCost.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000015#include "llvm/ADT/STLExtras.h"
16#include "llvm/ADT/SetVector.h"
17#include "llvm/ADT/SmallPtrSet.h"
18#include "llvm/ADT/SmallVector.h"
19#include "llvm/ADT/Statistic.h"
Daniel Jasperaec2fa32016-12-19 08:22:17 +000020#include "llvm/Analysis/AssumptionCache.h"
Easwaran Raman12585b02017-01-20 22:44:04 +000021#include "llvm/Analysis/BlockFrequencyInfo.h"
Hal Finkel57f03dd2014-09-07 13:49:57 +000022#include "llvm/Analysis/CodeMetrics.h"
Chandler Carruthd9903882015-01-14 11:23:27 +000023#include "llvm/Analysis/ConstantFolding.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000024#include "llvm/Analysis/InstructionSimplify.h"
Easwaran Raman71069cf2016-06-09 22:23:21 +000025#include "llvm/Analysis/ProfileSummaryInfo.h"
Chandler Carruth42f3dce2013-01-21 11:55:09 +000026#include "llvm/Analysis/TargetTransformInfo.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000027#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000028#include "llvm/IR/CallingConv.h"
29#include "llvm/IR/DataLayout.h"
Chandler Carruth03eb0de2014-03-04 10:40:04 +000030#include "llvm/IR/GetElementPtrTypeIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000031#include "llvm/IR/GlobalAlias.h"
Chandler Carruth7da14f12014-03-06 03:23:41 +000032#include "llvm/IR/InstVisitor.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000033#include "llvm/IR/IntrinsicInst.h"
34#include "llvm/IR/Operator.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000035#include "llvm/Support/Debug.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000036#include "llvm/Support/raw_ostream.h"
Eric Christopher2dfbd7e2011-02-05 00:49:15 +000037
Dan Gohman4552e3c2009-10-13 18:30:07 +000038using namespace llvm;
39
Chandler Carruthf1221bd2014-04-22 02:48:03 +000040#define DEBUG_TYPE "inline-cost"
41
Chandler Carruth7ae90d42012-04-11 10:15:10 +000042STATISTIC(NumCallsAnalyzed, "Number of call sites analyzed");
43
Easwaran Raman1c57cc22016-08-10 00:48:04 +000044static cl::opt<int> InlineThreshold(
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000045 "inline-threshold", cl::Hidden, cl::init(225), cl::ZeroOrMore,
46 cl::desc("Control the amount of inlining to perform (default = 225)"));
47
48static cl::opt<int> HintThreshold(
49 "inlinehint-threshold", cl::Hidden, cl::init(325),
50 cl::desc("Threshold for inlining functions with inline hint"));
51
Easwaran Raman12585b02017-01-20 22:44:04 +000052static cl::opt<int>
53 ColdCallSiteThreshold("inline-cold-callsite-threshold", cl::Hidden,
54 cl::init(45),
55 cl::desc("Threshold for inlining cold callsites"));
56
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000057// We introduce this threshold to help performance of instrumentation based
58// PGO before we actually hook up inliner with analysis passes such as BPI and
59// BFI.
60static cl::opt<int> ColdThreshold(
Easwaran Ramanc103ef82017-05-11 21:36:28 +000061 "inlinecold-threshold", cl::Hidden, cl::init(45),
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000062 cl::desc("Threshold for inlining functions with cold attribute"));
63
Dehao Chende39cb92016-08-05 20:28:41 +000064static cl::opt<int>
65 HotCallSiteThreshold("hot-callsite-threshold", cl::Hidden, cl::init(3000),
66 cl::ZeroOrMore,
67 cl::desc("Threshold for hot callsites "));
68
Easwaran Raman974d4ee2017-08-03 22:23:33 +000069static cl::opt<int> LocallyHotCallSiteThreshold(
70 "locally-hot-callsite-threshold", cl::Hidden, cl::init(525), cl::ZeroOrMore,
71 cl::desc("Threshold for locally hot callsites "));
72
Easwaran Ramanc5fa6352017-06-27 23:11:18 +000073static cl::opt<int> ColdCallSiteRelFreq(
74 "cold-callsite-rel-freq", cl::Hidden, cl::init(2), cl::ZeroOrMore,
75 cl::desc("Maxmimum block frequency, expressed as a percentage of caller's "
76 "entry frequency, for a callsite to be cold in the absence of "
77 "profile information."));
78
Easwaran Raman974d4ee2017-08-03 22:23:33 +000079static cl::opt<int> HotCallSiteRelFreq(
80 "hot-callsite-rel-freq", cl::Hidden, cl::init(60), cl::ZeroOrMore,
Easwaran Ramanff77cc72017-08-04 17:15:17 +000081 cl::desc("Minimum block frequency, expressed as a multiple of caller's "
Easwaran Raman974d4ee2017-08-03 22:23:33 +000082 "entry frequency, for a callsite to be hot in the absence of "
83 "profile information."));
84
Chandler Carruth0539c072012-03-31 12:42:41 +000085namespace {
Chandler Carrutha3089552012-03-14 07:32:53 +000086
Chandler Carruth0539c072012-03-31 12:42:41 +000087class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> {
88 typedef InstVisitor<CallAnalyzer, bool> Base;
89 friend class InstVisitor<CallAnalyzer, bool>;
Owen Andersona08318a2010-09-09 16:56:42 +000090
Chandler Carruth42f3dce2013-01-21 11:55:09 +000091 /// The TargetTransformInfo available for this compilation.
92 const TargetTransformInfo &TTI;
93
Daniel Jasperaec2fa32016-12-19 08:22:17 +000094 /// Getter for the cache of @llvm.assume intrinsics.
95 std::function<AssumptionCache &(Function &)> &GetAssumptionCache;
96
Easwaran Raman12585b02017-01-20 22:44:04 +000097 /// Getter for BlockFrequencyInfo
98 Optional<function_ref<BlockFrequencyInfo &(Function &)>> &GetBFI;
99
Easwaran Raman71069cf2016-06-09 22:23:21 +0000100 /// Profile summary information.
101 ProfileSummaryInfo *PSI;
102
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000103 /// The called function.
Chandler Carruth0539c072012-03-31 12:42:41 +0000104 Function &F;
Owen Andersona08318a2010-09-09 16:56:42 +0000105
Eric Christopher85be8ca2017-04-15 06:14:50 +0000106 // Cache the DataLayout since we use it a lot.
107 const DataLayout &DL;
108
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000109 /// The candidate callsite being analyzed. Please do not use this to do
110 /// analysis in the caller function; we want the inline cost query to be
111 /// easily cacheable. Instead, use the cover function paramHasAttr.
Philip Reames9b5c9582015-06-26 20:51:17 +0000112 CallSite CandidateCS;
113
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000114 /// Tunable parameters that control the analysis.
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000115 const InlineParams &Params;
116
Chandler Carruth0539c072012-03-31 12:42:41 +0000117 int Threshold;
118 int Cost;
Owen Andersona08318a2010-09-09 16:56:42 +0000119
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000120 bool IsCallerRecursive;
121 bool IsRecursiveCall;
Chandler Carruth0539c072012-03-31 12:42:41 +0000122 bool ExposesReturnsTwice;
123 bool HasDynamicAlloca;
James Molloy4f6fb952012-12-20 16:04:27 +0000124 bool ContainsNoDuplicateCall;
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000125 bool HasReturn;
126 bool HasIndirectBr;
Reid Kleckner223de262015-04-14 20:38:14 +0000127 bool HasFrameEscape;
James Molloy4f6fb952012-12-20 16:04:27 +0000128
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000129 /// Number of bytes allocated statically by the callee.
130 uint64_t AllocatedSize;
Chandler Carruth0539c072012-03-31 12:42:41 +0000131 unsigned NumInstructions, NumVectorInstructions;
Easwaran Raman51b809b2017-07-28 21:47:36 +0000132 int VectorBonus, TenPercentVectorBonus;
133 // Bonus to be applied when the callee has only one reachable basic block.
134 int SingleBBBonus;
Chandler Carruth0539c072012-03-31 12:42:41 +0000135
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000136 /// While we walk the potentially-inlined instructions, we build up and
137 /// maintain a mapping of simplified values specific to this callsite. The
138 /// idea is to propagate any special information we have about arguments to
139 /// this call through the inlinable section of the function, and account for
140 /// likely simplifications post-inlining. The most important aspect we track
141 /// is CFG altering simplifications -- when we prove a basic block dead, that
142 /// can cause dramatic shifts in the cost of inlining a function.
Chandler Carruth0539c072012-03-31 12:42:41 +0000143 DenseMap<Value *, Constant *> SimplifiedValues;
144
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000145 /// Keep track of the values which map back (through function arguments) to
146 /// allocas on the caller stack which could be simplified through SROA.
Chandler Carruth0539c072012-03-31 12:42:41 +0000147 DenseMap<Value *, Value *> SROAArgValues;
148
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000149 /// The mapping of caller Alloca values to their accumulated cost savings. If
150 /// we have to disable SROA for one of the allocas, this tells us how much
151 /// cost must be added.
Chandler Carruth0539c072012-03-31 12:42:41 +0000152 DenseMap<Value *, int> SROAArgCosts;
153
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000154 /// Keep track of values which map to a pointer base and constant offset.
Chad Rosier567556a2016-04-28 14:47:23 +0000155 DenseMap<Value *, std::pair<Value *, APInt>> ConstantOffsetPtrs;
Chandler Carruth0539c072012-03-31 12:42:41 +0000156
157 // Custom simplification helper routines.
158 bool isAllocaDerivedArg(Value *V);
159 bool lookupSROAArgAndCost(Value *V, Value *&Arg,
160 DenseMap<Value *, int>::iterator &CostIt);
161 void disableSROA(DenseMap<Value *, int>::iterator CostIt);
162 void disableSROA(Value *V);
163 void accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
164 int InstructionCost);
Haicheng Wu201b1912017-01-20 18:51:22 +0000165 bool isGEPFree(GetElementPtrInst &GEP);
Chandler Carruth0539c072012-03-31 12:42:41 +0000166 bool accumulateGEPOffset(GEPOperator &GEP, APInt &Offset);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000167 bool simplifyCallSite(Function *F, CallSite CS);
Easwaran Raman617f6362017-02-18 17:22:52 +0000168 template <typename Callable>
169 bool simplifyInstruction(Instruction &I, Callable Evaluate);
Chandler Carruth0539c072012-03-31 12:42:41 +0000170 ConstantInt *stripAndComputeInBoundsConstantOffsets(Value *&V);
171
Philip Reames9b5c9582015-06-26 20:51:17 +0000172 /// Return true if the given argument to the function being considered for
173 /// inlining has the given attribute set either at the call site or the
174 /// function declaration. Primarily used to inspect call site specific
175 /// attributes since these can be more precise than the ones on the callee
Easwaran Raman3676da42015-12-03 19:03:20 +0000176 /// itself.
Philip Reames9b5c9582015-06-26 20:51:17 +0000177 bool paramHasAttr(Argument *A, Attribute::AttrKind Attr);
Chad Rosier567556a2016-04-28 14:47:23 +0000178
Philip Reames9b5c9582015-06-26 20:51:17 +0000179 /// Return true if the given value is known non null within the callee if
Easwaran Raman3676da42015-12-03 19:03:20 +0000180 /// inlined through this particular callsite.
Philip Reames9b5c9582015-06-26 20:51:17 +0000181 bool isKnownNonNullInCallee(Value *V);
182
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000183 /// Update Threshold based on callsite properties such as callee
184 /// attributes and callee hotness for PGO builds. The Callee is explicitly
185 /// passed to support analyzing indirect calls whose target is inferred by
186 /// analysis.
187 void updateThreshold(CallSite CS, Function &Callee);
188
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000189 /// Return true if size growth is allowed when inlining the callee at CS.
190 bool allowSizeGrowth(CallSite CS);
191
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000192 /// Return true if \p CS is a cold callsite.
193 bool isColdCallSite(CallSite CS, BlockFrequencyInfo *CallerBFI);
194
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000195 /// Return a higher threshold if \p CS is a hot callsite.
196 Optional<int> getHotCallSiteThreshold(CallSite CS,
197 BlockFrequencyInfo *CallerBFI);
198
Chandler Carruth0539c072012-03-31 12:42:41 +0000199 // Custom analysis routines.
Hal Finkel57f03dd2014-09-07 13:49:57 +0000200 bool analyzeBlock(BasicBlock *BB, SmallPtrSetImpl<const Value *> &EphValues);
Chandler Carruth0539c072012-03-31 12:42:41 +0000201
202 // Disable several entry points to the visitor so we don't accidentally use
203 // them by declaring but not defining them here.
Chad Rosier567556a2016-04-28 14:47:23 +0000204 void visit(Module *);
205 void visit(Module &);
206 void visit(Function *);
207 void visit(Function &);
208 void visit(BasicBlock *);
209 void visit(BasicBlock &);
Chandler Carruth0539c072012-03-31 12:42:41 +0000210
211 // Provide base case for our instruction visit.
212 bool visitInstruction(Instruction &I);
213
214 // Our visit overrides.
215 bool visitAlloca(AllocaInst &I);
216 bool visitPHI(PHINode &I);
217 bool visitGetElementPtr(GetElementPtrInst &I);
218 bool visitBitCast(BitCastInst &I);
219 bool visitPtrToInt(PtrToIntInst &I);
220 bool visitIntToPtr(IntToPtrInst &I);
221 bool visitCastInst(CastInst &I);
222 bool visitUnaryInstruction(UnaryInstruction &I);
Matt Arsenault727aa342013-07-20 04:09:00 +0000223 bool visitCmpInst(CmpInst &I);
Chad Rosier2e1c0502017-08-02 14:40:42 +0000224 bool visitAnd(BinaryOperator &I);
225 bool visitOr(BinaryOperator &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000226 bool visitSub(BinaryOperator &I);
227 bool visitBinaryOperator(BinaryOperator &I);
228 bool visitLoad(LoadInst &I);
229 bool visitStore(StoreInst &I);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000230 bool visitExtractValue(ExtractValueInst &I);
231 bool visitInsertValue(InsertValueInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000232 bool visitCallSite(CallSite CS);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000233 bool visitReturnInst(ReturnInst &RI);
234 bool visitBranchInst(BranchInst &BI);
235 bool visitSwitchInst(SwitchInst &SI);
236 bool visitIndirectBrInst(IndirectBrInst &IBI);
237 bool visitResumeInst(ResumeInst &RI);
David Majnemer654e1302015-07-31 17:58:14 +0000238 bool visitCleanupReturnInst(CleanupReturnInst &RI);
239 bool visitCatchReturnInst(CatchReturnInst &RI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000240 bool visitUnreachableInst(UnreachableInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000241
242public:
Sean Silvaab6a6832016-07-23 04:22:50 +0000243 CallAnalyzer(const TargetTransformInfo &TTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000244 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +0000245 Optional<function_ref<BlockFrequencyInfo &(Function &)>> &GetBFI,
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000246 ProfileSummaryInfo *PSI, Function &Callee, CallSite CSArg,
247 const InlineParams &Params)
Easwaran Raman12585b02017-01-20 22:44:04 +0000248 : TTI(TTI), GetAssumptionCache(GetAssumptionCache), GetBFI(GetBFI),
Eric Christopher85be8ca2017-04-15 06:14:50 +0000249 PSI(PSI), F(Callee), DL(F.getParent()->getDataLayout()),
250 CandidateCS(CSArg), Params(Params), Threshold(Params.DefaultThreshold),
251 Cost(0), IsCallerRecursive(false), IsRecursiveCall(false),
252 ExposesReturnsTwice(false), HasDynamicAlloca(false),
253 ContainsNoDuplicateCall(false), HasReturn(false), HasIndirectBr(false),
254 HasFrameEscape(false), AllocatedSize(0), NumInstructions(0),
Easwaran Raman51b809b2017-07-28 21:47:36 +0000255 NumVectorInstructions(0), VectorBonus(0), SingleBBBonus(0),
256 NumConstantArgs(0), NumConstantOffsetPtrArgs(0), NumAllocaArgs(0),
257 NumConstantPtrCmps(0), NumConstantPtrDiffs(0),
258 NumInstructionsSimplified(0), SROACostSavings(0),
259 SROACostSavingsLost(0) {}
Chandler Carruth0539c072012-03-31 12:42:41 +0000260
261 bool analyzeCall(CallSite CS);
262
263 int getThreshold() { return Threshold; }
264 int getCost() { return Cost; }
265
266 // Keep a bunch of stats about the cost savings found so we can print them
267 // out when debugging.
268 unsigned NumConstantArgs;
269 unsigned NumConstantOffsetPtrArgs;
270 unsigned NumAllocaArgs;
271 unsigned NumConstantPtrCmps;
272 unsigned NumConstantPtrDiffs;
273 unsigned NumInstructionsSimplified;
274 unsigned SROACostSavings;
275 unsigned SROACostSavingsLost;
276
277 void dump();
278};
279
280} // namespace
281
282/// \brief Test whether the given value is an Alloca-derived function argument.
283bool CallAnalyzer::isAllocaDerivedArg(Value *V) {
284 return SROAArgValues.count(V);
Owen Andersona08318a2010-09-09 16:56:42 +0000285}
286
Chandler Carruth0539c072012-03-31 12:42:41 +0000287/// \brief Lookup the SROA-candidate argument and cost iterator which V maps to.
288/// Returns false if V does not map to a SROA-candidate.
289bool CallAnalyzer::lookupSROAArgAndCost(
290 Value *V, Value *&Arg, DenseMap<Value *, int>::iterator &CostIt) {
291 if (SROAArgValues.empty() || SROAArgCosts.empty())
292 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000293
Chandler Carruth0539c072012-03-31 12:42:41 +0000294 DenseMap<Value *, Value *>::iterator ArgIt = SROAArgValues.find(V);
295 if (ArgIt == SROAArgValues.end())
296 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000297
Chandler Carruth0539c072012-03-31 12:42:41 +0000298 Arg = ArgIt->second;
299 CostIt = SROAArgCosts.find(Arg);
300 return CostIt != SROAArgCosts.end();
Chandler Carruth783b7192012-03-09 02:49:36 +0000301}
302
Chandler Carruth0539c072012-03-31 12:42:41 +0000303/// \brief Disable SROA for the candidate marked by this cost iterator.
Chandler Carruth783b7192012-03-09 02:49:36 +0000304///
Benjamin Kramerbde91762012-06-02 10:20:22 +0000305/// This marks the candidate as no longer viable for SROA, and adds the cost
Chandler Carruth0539c072012-03-31 12:42:41 +0000306/// savings associated with it back into the inline cost measurement.
307void CallAnalyzer::disableSROA(DenseMap<Value *, int>::iterator CostIt) {
308 // If we're no longer able to perform SROA we need to undo its cost savings
309 // and prevent subsequent analysis.
310 Cost += CostIt->second;
311 SROACostSavings -= CostIt->second;
312 SROACostSavingsLost += CostIt->second;
313 SROAArgCosts.erase(CostIt);
314}
315
316/// \brief If 'V' maps to a SROA candidate, disable SROA for it.
317void CallAnalyzer::disableSROA(Value *V) {
318 Value *SROAArg;
319 DenseMap<Value *, int>::iterator CostIt;
320 if (lookupSROAArgAndCost(V, SROAArg, CostIt))
321 disableSROA(CostIt);
322}
323
324/// \brief Accumulate the given cost for a particular SROA candidate.
325void CallAnalyzer::accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
326 int InstructionCost) {
327 CostIt->second += InstructionCost;
328 SROACostSavings += InstructionCost;
329}
330
Chandler Carruth0539c072012-03-31 12:42:41 +0000331/// \brief Accumulate a constant GEP offset into an APInt if possible.
332///
333/// Returns false if unable to compute the offset for any reason. Respects any
334/// simplified values known during the analysis of this callsite.
335bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000336 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +0000337 assert(IntPtrWidth == Offset.getBitWidth());
338
339 for (gep_type_iterator GTI = gep_type_begin(GEP), GTE = gep_type_end(GEP);
340 GTI != GTE; ++GTI) {
341 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand());
342 if (!OpC)
343 if (Constant *SimpleOp = SimplifiedValues.lookup(GTI.getOperand()))
344 OpC = dyn_cast<ConstantInt>(SimpleOp);
345 if (!OpC)
Chandler Carruth783b7192012-03-09 02:49:36 +0000346 return false;
Chad Rosier567556a2016-04-28 14:47:23 +0000347 if (OpC->isZero())
348 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000349
Chandler Carruth0539c072012-03-31 12:42:41 +0000350 // Handle a struct index, which adds its field offset to the pointer.
Peter Collingbourneab85225b2016-12-02 02:24:42 +0000351 if (StructType *STy = GTI.getStructTypeOrNull()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000352 unsigned ElementIdx = OpC->getZExtValue();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000353 const StructLayout *SL = DL.getStructLayout(STy);
Chandler Carruth0539c072012-03-31 12:42:41 +0000354 Offset += APInt(IntPtrWidth, SL->getElementOffset(ElementIdx));
355 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000356 }
Chandler Carruth783b7192012-03-09 02:49:36 +0000357
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000358 APInt TypeSize(IntPtrWidth, DL.getTypeAllocSize(GTI.getIndexedType()));
Chandler Carruth0539c072012-03-31 12:42:41 +0000359 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize;
360 }
361 return true;
362}
363
Haicheng Wu201b1912017-01-20 18:51:22 +0000364/// \brief Use TTI to check whether a GEP is free.
365///
366/// Respects any simplified values known during the analysis of this callsite.
367bool CallAnalyzer::isGEPFree(GetElementPtrInst &GEP) {
Evgeny Astigeevich61c1bd52017-07-27 12:49:27 +0000368 SmallVector<Value *, 4> Operands;
369 Operands.push_back(GEP.getOperand(0));
Haicheng Wu201b1912017-01-20 18:51:22 +0000370 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
371 if (Constant *SimpleOp = SimplifiedValues.lookup(*I))
Evgeny Astigeevich61c1bd52017-07-27 12:49:27 +0000372 Operands.push_back(SimpleOp);
Haicheng Wu201b1912017-01-20 18:51:22 +0000373 else
Evgeny Astigeevich61c1bd52017-07-27 12:49:27 +0000374 Operands.push_back(*I);
375 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&GEP, Operands);
Haicheng Wu201b1912017-01-20 18:51:22 +0000376}
377
Chandler Carruth0539c072012-03-31 12:42:41 +0000378bool CallAnalyzer::visitAlloca(AllocaInst &I) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000379 // Check whether inlining will turn a dynamic alloca into a static
Sanjay Patel0f153422016-05-09 21:51:53 +0000380 // alloca and handle that case.
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000381 if (I.isArrayAllocation()) {
Sanjay Patel0f153422016-05-09 21:51:53 +0000382 Constant *Size = SimplifiedValues.lookup(I.getArraySize());
383 if (auto *AllocSize = dyn_cast_or_null<ConstantInt>(Size)) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000384 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000385 AllocatedSize = SaturatingMultiplyAdd(
386 AllocSize->getLimitedValue(), DL.getTypeAllocSize(Ty), AllocatedSize);
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000387 return Base::visitAlloca(I);
388 }
389 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000390
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000391 // Accumulate the allocated size.
392 if (I.isStaticAlloca()) {
393 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000394 AllocatedSize = SaturatingAdd(DL.getTypeAllocSize(Ty), AllocatedSize);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000395 }
396
Bob Wilsona5b0dc82012-11-19 07:04:35 +0000397 // We will happily inline static alloca instructions.
398 if (I.isStaticAlloca())
Chandler Carruth0539c072012-03-31 12:42:41 +0000399 return Base::visitAlloca(I);
400
401 // FIXME: This is overly conservative. Dynamic allocas are inefficient for
402 // a variety of reasons, and so we would like to not inline them into
403 // functions which don't currently have a dynamic alloca. This simply
404 // disables inlining altogether in the presence of a dynamic alloca.
405 HasDynamicAlloca = true;
406 return false;
407}
408
409bool CallAnalyzer::visitPHI(PHINode &I) {
410 // FIXME: We should potentially be tracking values through phi nodes,
411 // especially when they collapse to a single value due to deleted CFG edges
412 // during inlining.
413
414 // FIXME: We need to propagate SROA *disabling* through phi nodes, even
415 // though we don't want to propagate it's bonuses. The idea is to disable
416 // SROA if it *might* be used in an inappropriate manner.
417
418 // Phi nodes are always zero-cost.
419 return true;
420}
421
422bool CallAnalyzer::visitGetElementPtr(GetElementPtrInst &I) {
423 Value *SROAArg;
424 DenseMap<Value *, int>::iterator CostIt;
Chad Rosier567556a2016-04-28 14:47:23 +0000425 bool SROACandidate =
426 lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt);
Chandler Carruth0539c072012-03-31 12:42:41 +0000427
428 // Try to fold GEPs of constant-offset call site argument pointers. This
429 // requires target data and inbounds GEPs.
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000430 if (I.isInBounds()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000431 // Check if we have a base + offset for the pointer.
432 Value *Ptr = I.getPointerOperand();
433 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Ptr);
434 if (BaseAndOffset.first) {
435 // Check if the offset of this GEP is constant, and if so accumulate it
436 // into Offset.
437 if (!accumulateGEPOffset(cast<GEPOperator>(I), BaseAndOffset.second)) {
438 // Non-constant GEPs aren't folded, and disable SROA.
439 if (SROACandidate)
440 disableSROA(CostIt);
Haicheng Wu201b1912017-01-20 18:51:22 +0000441 return isGEPFree(I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000442 }
443
444 // Add the result as a new mapping to Base + Offset.
445 ConstantOffsetPtrs[&I] = BaseAndOffset;
446
447 // Also handle SROA candidates here, we already know that the GEP is
448 // all-constant indexed.
449 if (SROACandidate)
450 SROAArgValues[&I] = SROAArg;
451
Chandler Carruth783b7192012-03-09 02:49:36 +0000452 return true;
453 }
454 }
455
Easwaran Ramana8b9cdc2017-02-25 00:10:22 +0000456 // Lambda to check whether a GEP's indices are all constant.
457 auto IsGEPOffsetConstant = [&](GetElementPtrInst &GEP) {
458 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
459 if (!isa<Constant>(*I) && !SimplifiedValues.lookup(*I))
460 return false;
461 return true;
462 };
463
464 if (IsGEPOffsetConstant(I)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000465 if (SROACandidate)
466 SROAArgValues[&I] = SROAArg;
467
468 // Constant GEPs are modeled as free.
469 return true;
470 }
471
472 // Variable GEPs will require math and will disable SROA.
473 if (SROACandidate)
474 disableSROA(CostIt);
Haicheng Wu201b1912017-01-20 18:51:22 +0000475 return isGEPFree(I);
Chandler Carruth783b7192012-03-09 02:49:36 +0000476}
477
Easwaran Raman617f6362017-02-18 17:22:52 +0000478/// Simplify \p I if its operands are constants and update SimplifiedValues.
479/// \p Evaluate is a callable specific to instruction type that evaluates the
480/// instruction when all the operands are constants.
481template <typename Callable>
482bool CallAnalyzer::simplifyInstruction(Instruction &I, Callable Evaluate) {
483 SmallVector<Constant *, 2> COps;
484 for (Value *Op : I.operands()) {
485 Constant *COp = dyn_cast<Constant>(Op);
486 if (!COp)
487 COp = SimplifiedValues.lookup(Op);
488 if (!COp)
489 return false;
490 COps.push_back(COp);
491 }
492 auto *C = Evaluate(COps);
493 if (!C)
494 return false;
495 SimplifiedValues[&I] = C;
496 return true;
497}
498
Chandler Carruth0539c072012-03-31 12:42:41 +0000499bool CallAnalyzer::visitBitCast(BitCastInst &I) {
500 // Propagate constants through bitcasts.
Easwaran Raman617f6362017-02-18 17:22:52 +0000501 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
502 return ConstantExpr::getBitCast(COps[0], I.getType());
503 }))
504 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000505
Chandler Carruth0539c072012-03-31 12:42:41 +0000506 // Track base/offsets through casts
Chad Rosier567556a2016-04-28 14:47:23 +0000507 std::pair<Value *, APInt> BaseAndOffset =
508 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000509 // Casts don't change the offset, just wrap it up.
510 if (BaseAndOffset.first)
511 ConstantOffsetPtrs[&I] = BaseAndOffset;
512
513 // Also look for SROA candidates here.
514 Value *SROAArg;
515 DenseMap<Value *, int>::iterator CostIt;
516 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
517 SROAArgValues[&I] = SROAArg;
518
519 // Bitcasts are always zero cost.
520 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000521}
522
Chandler Carruth0539c072012-03-31 12:42:41 +0000523bool CallAnalyzer::visitPtrToInt(PtrToIntInst &I) {
524 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000525 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
526 return ConstantExpr::getPtrToInt(COps[0], I.getType());
527 }))
528 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000529
530 // Track base/offset pairs when converted to a plain integer provided the
531 // integer is large enough to represent the pointer.
532 unsigned IntegerSize = I.getType()->getScalarSizeInBits();
Mehdi Amini46a43552015-03-04 18:43:29 +0000533 if (IntegerSize >= DL.getPointerSizeInBits()) {
Chad Rosier567556a2016-04-28 14:47:23 +0000534 std::pair<Value *, APInt> BaseAndOffset =
535 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000536 if (BaseAndOffset.first)
537 ConstantOffsetPtrs[&I] = BaseAndOffset;
538 }
539
540 // This is really weird. Technically, ptrtoint will disable SROA. However,
541 // unless that ptrtoint is *used* somewhere in the live basic blocks after
542 // inlining, it will be nuked, and SROA should proceed. All of the uses which
543 // would block SROA would also block SROA if applied directly to a pointer,
544 // and so we can just add the integer in here. The only places where SROA is
545 // preserved either cannot fire on an integer, or won't in-and-of themselves
546 // disable SROA (ext) w/o some later use that we would see and disable.
547 Value *SROAArg;
548 DenseMap<Value *, int>::iterator CostIt;
549 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
550 SROAArgValues[&I] = SROAArg;
551
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000552 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000553}
554
Chandler Carruth0539c072012-03-31 12:42:41 +0000555bool CallAnalyzer::visitIntToPtr(IntToPtrInst &I) {
556 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000557 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
558 return ConstantExpr::getIntToPtr(COps[0], I.getType());
559 }))
560 return true;
Dan Gohman4552e3c2009-10-13 18:30:07 +0000561
Chandler Carruth0539c072012-03-31 12:42:41 +0000562 // Track base/offset pairs when round-tripped through a pointer without
563 // modifications provided the integer is not too large.
564 Value *Op = I.getOperand(0);
565 unsigned IntegerSize = Op->getType()->getScalarSizeInBits();
Mehdi Amini46a43552015-03-04 18:43:29 +0000566 if (IntegerSize <= DL.getPointerSizeInBits()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000567 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op);
568 if (BaseAndOffset.first)
569 ConstantOffsetPtrs[&I] = BaseAndOffset;
570 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000571
Chandler Carruth0539c072012-03-31 12:42:41 +0000572 // "Propagate" SROA here in the same manner as we do for ptrtoint above.
573 Value *SROAArg;
574 DenseMap<Value *, int>::iterator CostIt;
575 if (lookupSROAArgAndCost(Op, SROAArg, CostIt))
576 SROAArgValues[&I] = SROAArg;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000577
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000578 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000579}
580
581bool CallAnalyzer::visitCastInst(CastInst &I) {
582 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000583 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
584 return ConstantExpr::getCast(I.getOpcode(), COps[0], I.getType());
585 }))
586 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000587
588 // Disable SROA in the face of arbitrary casts we don't whitelist elsewhere.
589 disableSROA(I.getOperand(0));
590
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000591 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000592}
593
594bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) {
595 Value *Operand = I.getOperand(0);
Easwaran Raman617f6362017-02-18 17:22:52 +0000596 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
Easwaran Raman617f6362017-02-18 17:22:52 +0000597 return ConstantFoldInstOperands(&I, COps[0], DL);
598 }))
599 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000600
601 // Disable any SROA on the argument to arbitrary unary operators.
602 disableSROA(Operand);
603
604 return false;
605}
606
Philip Reames9b5c9582015-06-26 20:51:17 +0000607bool CallAnalyzer::paramHasAttr(Argument *A, Attribute::AttrKind Attr) {
Reid Klecknerfb502d22017-04-14 20:19:02 +0000608 return CandidateCS.paramHasAttr(A->getArgNo(), Attr);
Philip Reames9b5c9582015-06-26 20:51:17 +0000609}
610
611bool CallAnalyzer::isKnownNonNullInCallee(Value *V) {
612 // Does the *call site* have the NonNull attribute set on an argument? We
613 // use the attribute on the call site to memoize any analysis done in the
614 // caller. This will also trip if the callee function has a non-null
615 // parameter attribute, but that's a less interesting case because hopefully
616 // the callee would already have been simplified based on that.
617 if (Argument *A = dyn_cast<Argument>(V))
618 if (paramHasAttr(A, Attribute::NonNull))
619 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000620
Philip Reames9b5c9582015-06-26 20:51:17 +0000621 // Is this an alloca in the caller? This is distinct from the attribute case
622 // above because attributes aren't updated within the inliner itself and we
623 // always want to catch the alloca derived case.
624 if (isAllocaDerivedArg(V))
625 // We can actually predict the result of comparisons between an
626 // alloca-derived value and null. Note that this fires regardless of
627 // SROA firing.
628 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000629
Philip Reames9b5c9582015-06-26 20:51:17 +0000630 return false;
631}
632
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000633bool CallAnalyzer::allowSizeGrowth(CallSite CS) {
634 // If the normal destination of the invoke or the parent block of the call
635 // site is unreachable-terminated, there is little point in inlining this
636 // unless there is literally zero cost.
637 // FIXME: Note that it is possible that an unreachable-terminated block has a
638 // hot entry. For example, in below scenario inlining hot_call_X() may be
639 // beneficial :
640 // main() {
641 // hot_call_1();
642 // ...
643 // hot_call_N()
644 // exit(0);
645 // }
646 // For now, we are not handling this corner case here as it is rare in real
647 // code. In future, we should elaborate this based on BPI and BFI in more
648 // general threshold adjusting heuristics in updateThreshold().
649 Instruction *Instr = CS.getInstruction();
650 if (InvokeInst *II = dyn_cast<InvokeInst>(Instr)) {
651 if (isa<UnreachableInst>(II->getNormalDest()->getTerminator()))
652 return false;
653 } else if (isa<UnreachableInst>(Instr->getParent()->getTerminator()))
654 return false;
655
656 return true;
657}
658
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000659bool CallAnalyzer::isColdCallSite(CallSite CS, BlockFrequencyInfo *CallerBFI) {
660 // If global profile summary is available, then callsite's coldness is
661 // determined based on that.
662 if (PSI->hasProfileSummary())
663 return PSI->isColdCallSite(CS, CallerBFI);
664 if (!CallerBFI)
665 return false;
666
667 // In the absence of global profile summary, determine if the callsite is cold
668 // relative to caller's entry. We could potentially cache the computation of
669 // scaled entry frequency, but the added complexity is not worth it unless
670 // this scaling shows up high in the profiles.
671 const BranchProbability ColdProb(ColdCallSiteRelFreq, 100);
672 auto CallSiteBB = CS.getInstruction()->getParent();
673 auto CallSiteFreq = CallerBFI->getBlockFreq(CallSiteBB);
674 auto CallerEntryFreq =
675 CallerBFI->getBlockFreq(&(CS.getCaller()->getEntryBlock()));
676 return CallSiteFreq < CallerEntryFreq * ColdProb;
677}
678
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000679Optional<int>
680CallAnalyzer::getHotCallSiteThreshold(CallSite CS,
681 BlockFrequencyInfo *CallerBFI) {
682 // If global profile summary is available, then callsite's hotness is
683 // determined based on that.
684
685 auto HotCallSiteThreshold = Params.HotCallSiteThreshold;
686 if (PSI->hasProfileSummary() && PSI->isHotCallSite(CS, CallerBFI))
687 return HotCallSiteThreshold;
688 if (!CallerBFI)
689 return None;
690
691 HotCallSiteThreshold = Params.LocallyHotCallSiteThreshold;
692 if (!HotCallSiteThreshold)
693 return None;
694
695 // In the absence of global profile summary, determine if the callsite is hot
696 // relative to caller's entry. We could potentially cache the computation of
697 // scaled entry frequency, but the added complexity is not worth it unless
698 // this scaling shows up high in the profiles.
699 auto CallSiteBB = CS.getInstruction()->getParent();
700 auto CallSiteFreq = CallerBFI->getBlockFreq(CallSiteBB).getFrequency();
701 auto CallerEntryFreq = CallerBFI->getEntryFreq();
702 if (CallSiteFreq >= CallerEntryFreq * HotCallSiteRelFreq)
703 return HotCallSiteThreshold;
704 return None;
705}
706
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000707void CallAnalyzer::updateThreshold(CallSite CS, Function &Callee) {
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000708 // If no size growth is allowed for this inlining, set Threshold to 0.
709 if (!allowSizeGrowth(CS)) {
710 Threshold = 0;
711 return;
712 }
713
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000714 Function *Caller = CS.getCaller();
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000715
716 // return min(A, B) if B is valid.
717 auto MinIfValid = [](int A, Optional<int> B) {
718 return B ? std::min(A, B.getValue()) : A;
719 };
720
Easwaran Raman0d58fca2016-08-11 03:58:05 +0000721 // return max(A, B) if B is valid.
722 auto MaxIfValid = [](int A, Optional<int> B) {
723 return B ? std::max(A, B.getValue()) : A;
724 };
725
Easwaran Raman51b809b2017-07-28 21:47:36 +0000726 // Various bonus percentages. These are multiplied by Threshold to get the
727 // bonus values.
728 // SingleBBBonus: This bonus is applied if the callee has a single reachable
729 // basic block at the given callsite context. This is speculatively applied
730 // and withdrawn if more than one basic block is seen.
731 //
732 // Vector bonuses: We want to more aggressively inline vector-dense kernels
733 // and apply this bonus based on the percentage of vector instructions. A
734 // bonus is applied if the vector instructions exceed 50% and half that amount
735 // is applied if it exceeds 10%. Note that these bonuses are some what
736 // arbitrary and evolved over time by accident as much as because they are
737 // principled bonuses.
738 // FIXME: It would be nice to base the bonus values on something more
739 // scientific.
740 //
741 // LstCallToStaticBonus: This large bonus is applied to ensure the inlining
742 // of the last call to a static function as inlining such functions is
743 // guaranteed to reduce code size.
744 //
745 // These bonus percentages may be set to 0 based on properties of the caller
746 // and the callsite.
747 int SingleBBBonusPercent = 50;
748 int VectorBonusPercent = 150;
749 int LastCallToStaticBonus = InlineConstants::LastCallToStaticBonus;
750
751 // Lambda to set all the above bonus and bonus percentages to 0.
752 auto DisallowAllBonuses = [&]() {
753 SingleBBBonusPercent = 0;
754 VectorBonusPercent = 0;
755 LastCallToStaticBonus = 0;
756 };
757
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000758 // Use the OptMinSizeThreshold or OptSizeThreshold knob if they are available
759 // and reduce the threshold if the caller has the necessary attribute.
Easwaran Raman51b809b2017-07-28 21:47:36 +0000760 if (Caller->optForMinSize()) {
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000761 Threshold = MinIfValid(Threshold, Params.OptMinSizeThreshold);
Easwaran Raman51b809b2017-07-28 21:47:36 +0000762 // For minsize, we want to disable the single BB bonus and the vector
763 // bonuses, but not the last-call-to-static bonus. Inlining the last call to
764 // a static function will, at the minimum, eliminate the parameter setup and
765 // call/return instructions.
766 SingleBBBonusPercent = 0;
767 VectorBonusPercent = 0;
768 } else if (Caller->optForSize())
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000769 Threshold = MinIfValid(Threshold, Params.OptSizeThreshold);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000770
Easwaran Ramane08b1392017-01-09 21:56:26 +0000771 // Adjust the threshold based on inlinehint attribute and profile based
772 // hotness information if the caller does not have MinSize attribute.
773 if (!Caller->optForMinSize()) {
774 if (Callee.hasFnAttribute(Attribute::InlineHint))
775 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
776 if (PSI) {
Easwaran Raman12585b02017-01-20 22:44:04 +0000777 BlockFrequencyInfo *CallerBFI = GetBFI ? &((*GetBFI)(*Caller)) : nullptr;
Easwaran Raman3cd14792017-05-16 21:18:09 +0000778 // FIXME: After switching to the new passmanager, simplify the logic below
779 // by checking only the callsite hotness/coldness. The check for CallerBFI
780 // exists only because we do not have BFI available with the old PM.
781 //
782 // Use callee's hotness information only if we have no way of determining
783 // callsite's hotness information. Callsite hotness can be determined if
784 // sample profile is used (which adds hotness metadata to calls) or if
785 // caller's BlockFrequencyInfo is available.
786 if (CallerBFI || PSI->hasSampleProfile()) {
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000787 auto HotCallSiteThreshold = getHotCallSiteThreshold(CS, CallerBFI);
788 if (!Caller->optForSize() && HotCallSiteThreshold) {
Easwaran Raman3cd14792017-05-16 21:18:09 +0000789 DEBUG(dbgs() << "Hot callsite.\n");
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000790 // FIXME: This should update the threshold only if it exceeds the
791 // current threshold, but AutoFDO + ThinLTO currently relies on this
792 // behavior to prevent inlining of hot callsites during ThinLTO
793 // compile phase.
794 Threshold = HotCallSiteThreshold.getValue();
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000795 } else if (isColdCallSite(CS, CallerBFI)) {
Easwaran Raman3cd14792017-05-16 21:18:09 +0000796 DEBUG(dbgs() << "Cold callsite.\n");
Easwaran Raman51b809b2017-07-28 21:47:36 +0000797 // Do not apply bonuses for a cold callsite including the
798 // LastCallToStatic bonus. While this bonus might result in code size
799 // reduction, it can cause the size of a non-cold caller to increase
800 // preventing it from being inlined.
801 DisallowAllBonuses();
Easwaran Raman3cd14792017-05-16 21:18:09 +0000802 Threshold = MinIfValid(Threshold, Params.ColdCallSiteThreshold);
803 }
804 } else {
805 if (PSI->isFunctionEntryHot(&Callee)) {
806 DEBUG(dbgs() << "Hot callee.\n");
807 // If callsite hotness can not be determined, we may still know
808 // that the callee is hot and treat it as a weaker hint for threshold
809 // increase.
810 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
811 } else if (PSI->isFunctionEntryCold(&Callee)) {
812 DEBUG(dbgs() << "Cold callee.\n");
Easwaran Raman51b809b2017-07-28 21:47:36 +0000813 // Do not apply bonuses for a cold callee including the
814 // LastCallToStatic bonus. While this bonus might result in code size
815 // reduction, it can cause the size of a non-cold caller to increase
816 // preventing it from being inlined.
817 DisallowAllBonuses();
Easwaran Raman3cd14792017-05-16 21:18:09 +0000818 Threshold = MinIfValid(Threshold, Params.ColdThreshold);
819 }
Easwaran Ramane08b1392017-01-09 21:56:26 +0000820 }
821 }
Dehao Chene1c7c572016-08-05 20:49:04 +0000822 }
Dehao Chen9232f982016-07-11 16:48:54 +0000823
Justin Lebar8650a4d2016-04-15 01:38:48 +0000824 // Finally, take the target-specific inlining threshold multiplier into
825 // account.
826 Threshold *= TTI.getInliningThresholdMultiplier();
Easwaran Raman51b809b2017-07-28 21:47:36 +0000827
828 SingleBBBonus = Threshold * SingleBBBonusPercent / 100;
829 VectorBonus = Threshold * VectorBonusPercent / 100;
830
831 bool OnlyOneCallAndLocalLinkage =
832 F.hasLocalLinkage() && F.hasOneUse() && &F == CS.getCalledFunction();
833 // If there is only one call of the function, and it has internal linkage,
834 // the cost of inlining it drops dramatically. It may seem odd to update
835 // Cost in updateThreshold, but the bonus depends on the logic in this method.
836 if (OnlyOneCallAndLocalLinkage)
837 Cost -= LastCallToStaticBonus;
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000838}
839
Matt Arsenault727aa342013-07-20 04:09:00 +0000840bool CallAnalyzer::visitCmpInst(CmpInst &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000841 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
842 // First try to handle simplified comparisons.
Easwaran Raman617f6362017-02-18 17:22:52 +0000843 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
844 return ConstantExpr::getCompare(I.getPredicate(), COps[0], COps[1]);
845 }))
846 return true;
Matt Arsenault727aa342013-07-20 04:09:00 +0000847
848 if (I.getOpcode() == Instruction::FCmp)
849 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000850
851 // Otherwise look for a comparison between constant offset pointers with
852 // a common base.
853 Value *LHSBase, *RHSBase;
854 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000855 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000856 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000857 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000858 if (RHSBase && LHSBase == RHSBase) {
859 // We have common bases, fold the icmp to a constant based on the
860 // offsets.
861 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
862 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
863 if (Constant *C = ConstantExpr::getICmp(I.getPredicate(), CLHS, CRHS)) {
864 SimplifiedValues[&I] = C;
865 ++NumConstantPtrCmps;
866 return true;
867 }
868 }
869 }
870
871 // If the comparison is an equality comparison with null, we can simplify it
Philip Reames9b5c9582015-06-26 20:51:17 +0000872 // if we know the value (argument) can't be null
873 if (I.isEquality() && isa<ConstantPointerNull>(I.getOperand(1)) &&
874 isKnownNonNullInCallee(I.getOperand(0))) {
875 bool IsNotEqual = I.getPredicate() == CmpInst::ICMP_NE;
876 SimplifiedValues[&I] = IsNotEqual ? ConstantInt::getTrue(I.getType())
877 : ConstantInt::getFalse(I.getType());
878 return true;
879 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000880 // Finally check for SROA candidates in comparisons.
881 Value *SROAArg;
882 DenseMap<Value *, int>::iterator CostIt;
883 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) {
884 if (isa<ConstantPointerNull>(I.getOperand(1))) {
885 accumulateSROACost(CostIt, InlineConstants::InstrCost);
886 return true;
887 }
888
889 disableSROA(CostIt);
890 }
891
892 return false;
893}
894
Chad Rosier2e1c0502017-08-02 14:40:42 +0000895bool CallAnalyzer::visitOr(BinaryOperator &I) {
896 // This is necessary because the generic simplify instruction only works if
897 // both operands are constants.
898 for (unsigned i = 0; i < 2; ++i) {
899 if (ConstantInt *C = dyn_cast_or_null<ConstantInt>(
900 SimplifiedValues.lookup(I.getOperand(i))))
901 if (C->isAllOnesValue()) {
902 SimplifiedValues[&I] = C;
903 return true;
904 }
905 }
906 return Base::visitOr(I);
907}
908
909bool CallAnalyzer::visitAnd(BinaryOperator &I) {
910 // This is necessary because the generic simplify instruction only works if
911 // both operands are constants.
912 for (unsigned i = 0; i < 2; ++i) {
913 if (ConstantInt *C = dyn_cast_or_null<ConstantInt>(
914 SimplifiedValues.lookup(I.getOperand(i))))
915 if (C->isZero()) {
916 SimplifiedValues[&I] = C;
917 return true;
918 }
919 }
920 return Base::visitAnd(I);
921}
922
Chandler Carruth0539c072012-03-31 12:42:41 +0000923bool CallAnalyzer::visitSub(BinaryOperator &I) {
924 // Try to handle a special case: we can fold computing the difference of two
925 // constant-related pointers.
926 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
927 Value *LHSBase, *RHSBase;
928 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000929 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000930 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000931 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000932 if (RHSBase && LHSBase == RHSBase) {
933 // We have common bases, fold the subtract to a constant based on the
934 // offsets.
935 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
936 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
937 if (Constant *C = ConstantExpr::getSub(CLHS, CRHS)) {
938 SimplifiedValues[&I] = C;
939 ++NumConstantPtrDiffs;
940 return true;
941 }
942 }
943 }
944
945 // Otherwise, fall back to the generic logic for simplifying and handling
946 // instructions.
947 return Base::visitSub(I);
948}
949
950bool CallAnalyzer::visitBinaryOperator(BinaryOperator &I) {
951 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
Easwaran Raman617f6362017-02-18 17:22:52 +0000952 auto Evaluate = [&](SmallVectorImpl<Constant *> &COps) {
953 Value *SimpleV = nullptr;
Easwaran Raman617f6362017-02-18 17:22:52 +0000954 if (auto FI = dyn_cast<FPMathOperator>(&I))
955 SimpleV = SimplifyFPBinOp(I.getOpcode(), COps[0], COps[1],
956 FI->getFastMathFlags(), DL);
957 else
958 SimpleV = SimplifyBinOp(I.getOpcode(), COps[0], COps[1], DL);
959 return dyn_cast_or_null<Constant>(SimpleV);
960 };
Michael Zolotukhin4e8598e2015-02-06 20:02:51 +0000961
Easwaran Raman617f6362017-02-18 17:22:52 +0000962 if (simplifyInstruction(I, Evaluate))
Chandler Carruth0539c072012-03-31 12:42:41 +0000963 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000964
965 // Disable any SROA on arguments to arbitrary, unsimplified binary operators.
966 disableSROA(LHS);
967 disableSROA(RHS);
968
969 return false;
970}
971
972bool CallAnalyzer::visitLoad(LoadInst &I) {
973 Value *SROAArg;
974 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000975 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000976 if (I.isSimple()) {
977 accumulateSROACost(CostIt, InlineConstants::InstrCost);
978 return true;
979 }
980
981 disableSROA(CostIt);
982 }
983
984 return false;
985}
986
987bool CallAnalyzer::visitStore(StoreInst &I) {
988 Value *SROAArg;
989 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000990 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000991 if (I.isSimple()) {
992 accumulateSROACost(CostIt, InlineConstants::InstrCost);
993 return true;
994 }
995
996 disableSROA(CostIt);
997 }
998
999 return false;
1000}
1001
Chandler Carruth753e21d2012-12-28 14:23:32 +00001002bool CallAnalyzer::visitExtractValue(ExtractValueInst &I) {
1003 // Constant folding for extract value is trivial.
Easwaran Raman617f6362017-02-18 17:22:52 +00001004 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
1005 return ConstantExpr::getExtractValue(COps[0], I.getIndices());
1006 }))
Chandler Carruth753e21d2012-12-28 14:23:32 +00001007 return true;
Chandler Carruth753e21d2012-12-28 14:23:32 +00001008
1009 // SROA can look through these but give them a cost.
1010 return false;
1011}
1012
1013bool CallAnalyzer::visitInsertValue(InsertValueInst &I) {
1014 // Constant folding for insert value is trivial.
Easwaran Raman617f6362017-02-18 17:22:52 +00001015 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
1016 return ConstantExpr::getInsertValue(/*AggregateOperand*/ COps[0],
1017 /*InsertedValueOperand*/ COps[1],
1018 I.getIndices());
1019 }))
Chandler Carruth753e21d2012-12-28 14:23:32 +00001020 return true;
Chandler Carruth753e21d2012-12-28 14:23:32 +00001021
1022 // SROA can look through these but give them a cost.
1023 return false;
1024}
1025
1026/// \brief Try to simplify a call site.
1027///
1028/// Takes a concrete function and callsite and tries to actually simplify it by
1029/// analyzing the arguments and call itself with instsimplify. Returns true if
1030/// it has simplified the callsite to some other entity (a constant), making it
1031/// free.
1032bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
1033 // FIXME: Using the instsimplify logic directly for this is inefficient
1034 // because we have to continually rebuild the argument list even when no
1035 // simplifications can be performed. Until that is fixed with remapping
1036 // inside of instsimplify, directly constant fold calls here.
Andrew Kaylor647025f2017-06-09 23:18:11 +00001037 if (!canConstantFoldCallTo(CS, F))
Chandler Carruth753e21d2012-12-28 14:23:32 +00001038 return false;
1039
1040 // Try to re-map the arguments to constants.
1041 SmallVector<Constant *, 4> ConstantArgs;
1042 ConstantArgs.reserve(CS.arg_size());
Chad Rosier567556a2016-04-28 14:47:23 +00001043 for (CallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end(); I != E;
1044 ++I) {
Chandler Carruth753e21d2012-12-28 14:23:32 +00001045 Constant *C = dyn_cast<Constant>(*I);
1046 if (!C)
1047 C = dyn_cast_or_null<Constant>(SimplifiedValues.lookup(*I));
1048 if (!C)
1049 return false; // This argument doesn't map to a constant.
1050
1051 ConstantArgs.push_back(C);
1052 }
Andrew Kaylor647025f2017-06-09 23:18:11 +00001053 if (Constant *C = ConstantFoldCall(CS, F, ConstantArgs)) {
Chandler Carruth753e21d2012-12-28 14:23:32 +00001054 SimplifiedValues[CS.getInstruction()] = C;
1055 return true;
1056 }
1057
1058 return false;
1059}
1060
Chandler Carruth0539c072012-03-31 12:42:41 +00001061bool CallAnalyzer::visitCallSite(CallSite CS) {
Chandler Carruth37d25de2013-12-13 08:00:01 +00001062 if (CS.hasFnAttr(Attribute::ReturnsTwice) &&
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001063 !F.hasFnAttribute(Attribute::ReturnsTwice)) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001064 // This aborts the entire analysis.
1065 ExposesReturnsTwice = true;
1066 return false;
1067 }
Chad Rosier567556a2016-04-28 14:47:23 +00001068 if (CS.isCall() && cast<CallInst>(CS.getInstruction())->cannotDuplicate())
James Molloy4f6fb952012-12-20 16:04:27 +00001069 ContainsNoDuplicateCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001070
Chandler Carruth0539c072012-03-31 12:42:41 +00001071 if (Function *F = CS.getCalledFunction()) {
Chandler Carruth753e21d2012-12-28 14:23:32 +00001072 // When we have a concrete function, first try to simplify it directly.
1073 if (simplifyCallSite(F, CS))
1074 return true;
1075
1076 // Next check if it is an intrinsic we know about.
1077 // FIXME: Lift this into part of the InstVisitor.
1078 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
1079 switch (II->getIntrinsicID()) {
1080 default:
1081 return Base::visitCallSite(CS);
1082
Peter Collingbourne7dd8dbf2016-04-22 21:18:02 +00001083 case Intrinsic::load_relative:
1084 // This is normally lowered to 4 LLVM instructions.
1085 Cost += 3 * InlineConstants::InstrCost;
1086 return false;
1087
Chandler Carruth753e21d2012-12-28 14:23:32 +00001088 case Intrinsic::memset:
1089 case Intrinsic::memcpy:
1090 case Intrinsic::memmove:
1091 // SROA can usually chew through these intrinsics, but they aren't free.
1092 return false;
Reid Kleckner60381792015-07-07 22:25:32 +00001093 case Intrinsic::localescape:
Reid Kleckner223de262015-04-14 20:38:14 +00001094 HasFrameEscape = true;
1095 return false;
Chandler Carruth753e21d2012-12-28 14:23:32 +00001096 }
1097 }
1098
Chandler Carruth0539c072012-03-31 12:42:41 +00001099 if (F == CS.getInstruction()->getParent()->getParent()) {
1100 // This flag will fully abort the analysis, so don't bother with anything
1101 // else.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001102 IsRecursiveCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001103 return false;
1104 }
1105
Chandler Carruth0ba8db42013-01-22 11:26:02 +00001106 if (TTI.isLoweredToCall(F)) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001107 // We account for the average 1 instruction per call argument setup
1108 // here.
1109 Cost += CS.arg_size() * InlineConstants::InstrCost;
1110
1111 // Everything other than inline ASM will also have a significant cost
1112 // merely from making the call.
1113 if (!isa<InlineAsm>(CS.getCalledValue()))
1114 Cost += InlineConstants::CallPenalty;
1115 }
1116
1117 return Base::visitCallSite(CS);
1118 }
1119
1120 // Otherwise we're in a very special case -- an indirect function call. See
1121 // if we can be particularly clever about this.
1122 Value *Callee = CS.getCalledValue();
1123
1124 // First, pay the price of the argument setup. We account for the average
1125 // 1 instruction per call argument setup here.
1126 Cost += CS.arg_size() * InlineConstants::InstrCost;
1127
1128 // Next, check if this happens to be an indirect function call to a known
1129 // function in this inline context. If not, we've done all we can.
1130 Function *F = dyn_cast_or_null<Function>(SimplifiedValues.lookup(Callee));
1131 if (!F)
1132 return Base::visitCallSite(CS);
1133
1134 // If we have a constant that we are calling as a function, we can peer
1135 // through it and see the function target. This happens not infrequently
1136 // during devirtualization and so we want to give it a hefty bonus for
1137 // inlining, but cap that bonus in the event that inlining wouldn't pan
1138 // out. Pretend to inline the function, with a custom threshold.
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001139 auto IndirectCallParams = Params;
1140 IndirectCallParams.DefaultThreshold = InlineConstants::IndirectCallThreshold;
Easwaran Raman12585b02017-01-20 22:44:04 +00001141 CallAnalyzer CA(TTI, GetAssumptionCache, GetBFI, PSI, *F, CS,
1142 IndirectCallParams);
Chandler Carruth0539c072012-03-31 12:42:41 +00001143 if (CA.analyzeCall(CS)) {
1144 // We were able to inline the indirect call! Subtract the cost from the
Easwaran Raman6d90d9f2015-12-07 21:21:20 +00001145 // threshold to get the bonus we want to apply, but don't go below zero.
1146 Cost -= std::max(0, CA.getThreshold() - CA.getCost());
Chandler Carruth0539c072012-03-31 12:42:41 +00001147 }
1148
1149 return Base::visitCallSite(CS);
1150}
1151
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001152bool CallAnalyzer::visitReturnInst(ReturnInst &RI) {
1153 // At least one return instruction will be free after inlining.
1154 bool Free = !HasReturn;
1155 HasReturn = true;
1156 return Free;
1157}
1158
1159bool CallAnalyzer::visitBranchInst(BranchInst &BI) {
1160 // We model unconditional branches as essentially free -- they really
1161 // shouldn't exist at all, but handling them makes the behavior of the
1162 // inliner more regular and predictable. Interestingly, conditional branches
1163 // which will fold away are also free.
1164 return BI.isUnconditional() || isa<ConstantInt>(BI.getCondition()) ||
1165 dyn_cast_or_null<ConstantInt>(
1166 SimplifiedValues.lookup(BI.getCondition()));
1167}
1168
1169bool CallAnalyzer::visitSwitchInst(SwitchInst &SI) {
1170 // We model unconditional switches as free, see the comments on handling
1171 // branches.
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001172 if (isa<ConstantInt>(SI.getCondition()))
1173 return true;
1174 if (Value *V = SimplifiedValues.lookup(SI.getCondition()))
1175 if (isa<ConstantInt>(V))
1176 return true;
1177
Eric Christopher7ad02ee2017-06-28 21:10:31 +00001178 // Assume the most general case where the switch is lowered into
Jun Bum Lim2960d412017-06-02 20:42:54 +00001179 // either a jump table, bit test, or a balanced binary tree consisting of
1180 // case clusters without merging adjacent clusters with the same
1181 // destination. We do not consider the switches that are lowered with a mix
1182 // of jump table/bit test/binary search tree. The cost of the switch is
1183 // proportional to the size of the tree or the size of jump table range.
1184 //
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001185 // NB: We convert large switches which are just used to initialize large phi
1186 // nodes to lookup tables instead in simplify-cfg, so this shouldn't prevent
1187 // inlining those. It will prevent inlining in cases where the optimization
1188 // does not (yet) fire.
Jun Bum Lim2960d412017-06-02 20:42:54 +00001189
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001190 // Maximum valid cost increased in this function.
1191 int CostUpperBound = INT_MAX - InlineConstants::InstrCost - 1;
1192
Jun Bum Lim2960d412017-06-02 20:42:54 +00001193 // Exit early for a large switch, assuming one case needs at least one
1194 // instruction.
1195 // FIXME: This is not true for a bit test, but ignore such case for now to
1196 // save compile-time.
1197 int64_t CostLowerBound =
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001198 std::min((int64_t)CostUpperBound,
Jun Bum Lim2960d412017-06-02 20:42:54 +00001199 (int64_t)SI.getNumCases() * InlineConstants::InstrCost + Cost);
1200
1201 if (CostLowerBound > Threshold) {
1202 Cost = CostLowerBound;
1203 return false;
1204 }
1205
1206 unsigned JumpTableSize = 0;
1207 unsigned NumCaseCluster =
1208 TTI.getEstimatedNumberOfCaseClusters(SI, JumpTableSize);
1209
1210 // If suitable for a jump table, consider the cost for the table size and
1211 // branch to destination.
1212 if (JumpTableSize) {
1213 int64_t JTCost = (int64_t)JumpTableSize * InlineConstants::InstrCost +
1214 4 * InlineConstants::InstrCost;
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001215
1216 Cost = std::min((int64_t)CostUpperBound, JTCost + Cost);
Jun Bum Lim2960d412017-06-02 20:42:54 +00001217 return false;
1218 }
1219
1220 // Considering forming a binary search, we should find the number of nodes
1221 // which is same as the number of comparisons when lowered. For a given
1222 // number of clusters, n, we can define a recursive function, f(n), to find
1223 // the number of nodes in the tree. The recursion is :
1224 // f(n) = 1 + f(n/2) + f (n - n/2), when n > 3,
1225 // and f(n) = n, when n <= 3.
1226 // This will lead a binary tree where the leaf should be either f(2) or f(3)
1227 // when n > 3. So, the number of comparisons from leaves should be n, while
1228 // the number of non-leaf should be :
1229 // 2^(log2(n) - 1) - 1
1230 // = 2^log2(n) * 2^-1 - 1
1231 // = n / 2 - 1.
1232 // Considering comparisons from leaf and non-leaf nodes, we can estimate the
1233 // number of comparisons in a simple closed form :
1234 // n + n / 2 - 1 = n * 3 / 2 - 1
1235 if (NumCaseCluster <= 3) {
1236 // Suppose a comparison includes one compare and one conditional branch.
1237 Cost += NumCaseCluster * 2 * InlineConstants::InstrCost;
1238 return false;
1239 }
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001240
1241 int64_t ExpectedNumberOfCompare = 3 * (int64_t)NumCaseCluster / 2 - 1;
1242 int64_t SwitchCost =
Jun Bum Lim2960d412017-06-02 20:42:54 +00001243 ExpectedNumberOfCompare * 2 * InlineConstants::InstrCost;
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001244
1245 Cost = std::min((int64_t)CostUpperBound, SwitchCost + Cost);
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001246 return false;
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001247}
1248
1249bool CallAnalyzer::visitIndirectBrInst(IndirectBrInst &IBI) {
1250 // We never want to inline functions that contain an indirectbr. This is
1251 // incorrect because all the blockaddress's (in static global initializers
1252 // for example) would be referring to the original function, and this
1253 // indirect jump would jump from the inlined copy of the function into the
1254 // original function which is extremely undefined behavior.
1255 // FIXME: This logic isn't really right; we can safely inline functions with
1256 // indirectbr's as long as no other function or global references the
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001257 // blockaddress of a block within the current function.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001258 HasIndirectBr = true;
1259 return false;
1260}
1261
1262bool CallAnalyzer::visitResumeInst(ResumeInst &RI) {
1263 // FIXME: It's not clear that a single instruction is an accurate model for
1264 // the inline cost of a resume instruction.
1265 return false;
1266}
1267
David Majnemer654e1302015-07-31 17:58:14 +00001268bool CallAnalyzer::visitCleanupReturnInst(CleanupReturnInst &CRI) {
1269 // FIXME: It's not clear that a single instruction is an accurate model for
1270 // the inline cost of a cleanupret instruction.
1271 return false;
1272}
1273
1274bool CallAnalyzer::visitCatchReturnInst(CatchReturnInst &CRI) {
1275 // FIXME: It's not clear that a single instruction is an accurate model for
Joseph Tremoulet8220bcc2015-08-23 00:26:33 +00001276 // the inline cost of a catchret instruction.
David Majnemer654e1302015-07-31 17:58:14 +00001277 return false;
1278}
1279
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001280bool CallAnalyzer::visitUnreachableInst(UnreachableInst &I) {
1281 // FIXME: It might be reasonably to discount the cost of instructions leading
1282 // to unreachable as they have the lowest possible impact on both runtime and
1283 // code size.
1284 return true; // No actual code is needed for unreachable.
1285}
1286
Chandler Carruth0539c072012-03-31 12:42:41 +00001287bool CallAnalyzer::visitInstruction(Instruction &I) {
Chandler Carruthda7513a2012-05-04 00:58:03 +00001288 // Some instructions are free. All of the free intrinsics can also be
1289 // handled by SROA, etc.
Chandler Carruthb8cf5102013-01-21 12:05:16 +00001290 if (TargetTransformInfo::TCC_Free == TTI.getUserCost(&I))
Chandler Carruthda7513a2012-05-04 00:58:03 +00001291 return true;
1292
Chandler Carruth0539c072012-03-31 12:42:41 +00001293 // We found something we don't understand or can't handle. Mark any SROA-able
1294 // values in the operand list as no longer viable.
1295 for (User::op_iterator OI = I.op_begin(), OE = I.op_end(); OI != OE; ++OI)
1296 disableSROA(*OI);
1297
1298 return false;
1299}
1300
Chandler Carruth0539c072012-03-31 12:42:41 +00001301/// \brief Analyze a basic block for its contribution to the inline cost.
1302///
1303/// This method walks the analyzer over every instruction in the given basic
1304/// block and accounts for their cost during inlining at this callsite. It
1305/// aborts early if the threshold has been exceeded or an impossible to inline
1306/// construct has been detected. It returns false if inlining is no longer
1307/// viable, and true if inlining remains viable.
Hal Finkel57f03dd2014-09-07 13:49:57 +00001308bool CallAnalyzer::analyzeBlock(BasicBlock *BB,
1309 SmallPtrSetImpl<const Value *> &EphValues) {
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001310 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
Chandler Carruth6b4cc8b2014-02-01 10:38:17 +00001311 // FIXME: Currently, the number of instructions in a function regardless of
1312 // our ability to simplify them during inline to constants or dead code,
1313 // are actually used by the vector bonus heuristic. As long as that's true,
1314 // we have to special case debug intrinsics here to prevent differences in
1315 // inlining due to debug symbols. Eventually, the number of unsimplified
1316 // instructions shouldn't factor into the cost computation, but until then,
1317 // hack around it here.
1318 if (isa<DbgInfoIntrinsic>(I))
1319 continue;
1320
Hal Finkel57f03dd2014-09-07 13:49:57 +00001321 // Skip ephemeral values.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001322 if (EphValues.count(&*I))
Hal Finkel57f03dd2014-09-07 13:49:57 +00001323 continue;
1324
Chandler Carruth0539c072012-03-31 12:42:41 +00001325 ++NumInstructions;
1326 if (isa<ExtractElementInst>(I) || I->getType()->isVectorTy())
1327 ++NumVectorInstructions;
1328
Sanjay Patele9434e82015-09-15 15:26:25 +00001329 // If the instruction is floating point, and the target says this operation
1330 // is expensive or the function has the "use-soft-float" attribute, this may
1331 // eventually become a library call. Treat the cost as such.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001332 if (I->getType()->isFloatingPointTy()) {
Sanjay Patele9434e82015-09-15 15:26:25 +00001333 // If the function has the "use-soft-float" attribute, mark it as
1334 // expensive.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001335 if (TTI.getFPOpCost(I->getType()) == TargetTransformInfo::TCC_Expensive ||
Eric Christopher908ed7f2017-04-15 06:14:52 +00001336 (F.getFnAttribute("use-soft-float").getValueAsString() == "true"))
Cameron Esfahani17177d12015-02-05 02:09:33 +00001337 Cost += InlineConstants::CallPenalty;
1338 }
1339
Chandler Carruth0539c072012-03-31 12:42:41 +00001340 // If the instruction simplified to a constant, there is no cost to this
1341 // instruction. Visit the instructions using our InstVisitor to account for
1342 // all of the per-instruction logic. The visit tree returns true if we
1343 // consumed the instruction in any way, and false if the instruction's base
1344 // cost should count against inlining.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001345 if (Base::visit(&*I))
Chandler Carruth0539c072012-03-31 12:42:41 +00001346 ++NumInstructionsSimplified;
1347 else
1348 Cost += InlineConstants::InstrCost;
1349
1350 // If the visit this instruction detected an uninlinable pattern, abort.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001351 if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca ||
Reid Kleckner223de262015-04-14 20:38:14 +00001352 HasIndirectBr || HasFrameEscape)
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001353 return false;
1354
1355 // If the caller is a recursive function then we don't want to inline
1356 // functions which allocate a lot of stack space because it would increase
1357 // the caller stack usage dramatically.
1358 if (IsCallerRecursive &&
1359 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller)
Chandler Carruth0539c072012-03-31 12:42:41 +00001360 return false;
1361
Chandler Carrutha004f222015-05-27 02:49:05 +00001362 // Check if we've past the maximum possible threshold so we don't spin in
1363 // huge basic blocks that will never inline.
1364 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001365 return false;
1366 }
1367
1368 return true;
1369}
1370
1371/// \brief Compute the base pointer and cumulative constant offsets for V.
1372///
1373/// This strips all constant offsets off of V, leaving it the base pointer, and
1374/// accumulates the total constant offset applied in the returned constant. It
1375/// returns 0 if V is not a pointer, and returns the constant '0' if there are
1376/// no constant offsets applied.
1377ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001378 if (!V->getType()->isPointerTy())
Craig Topper353eda42014-04-24 06:44:33 +00001379 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001380
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001381 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +00001382 APInt Offset = APInt::getNullValue(IntPtrWidth);
1383
1384 // Even though we don't look through PHI nodes, we could be called on an
1385 // instruction in an unreachable block, which may be on a cycle.
1386 SmallPtrSet<Value *, 4> Visited;
1387 Visited.insert(V);
1388 do {
1389 if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
1390 if (!GEP->isInBounds() || !accumulateGEPOffset(*GEP, Offset))
Craig Topper353eda42014-04-24 06:44:33 +00001391 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001392 V = GEP->getPointerOperand();
1393 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
1394 V = cast<Operator>(V)->getOperand(0);
1395 } else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
Sanjoy Das5ce32722016-04-08 00:48:30 +00001396 if (GA->isInterposable())
Chandler Carruth0539c072012-03-31 12:42:41 +00001397 break;
1398 V = GA->getAliasee();
1399 } else {
1400 break;
1401 }
1402 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
David Blaikie70573dc2014-11-19 07:49:26 +00001403 } while (Visited.insert(V).second);
Chandler Carruth0539c072012-03-31 12:42:41 +00001404
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001405 Type *IntPtrTy = DL.getIntPtrType(V->getContext());
Chandler Carruth0539c072012-03-31 12:42:41 +00001406 return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset));
1407}
1408
1409/// \brief Analyze a call site for potential inlining.
1410///
1411/// Returns true if inlining this call is viable, and false if it is not
1412/// viable. It computes the cost and adjusts the threshold based on numerous
1413/// factors and heuristics. If this method returns false but the computed cost
1414/// is below the computed threshold, then inlining was forcibly disabled by
Bob Wilson266802d2012-11-19 07:04:30 +00001415/// some artifact of the routine.
Chandler Carruth0539c072012-03-31 12:42:41 +00001416bool CallAnalyzer::analyzeCall(CallSite CS) {
Chandler Carruth7ae90d42012-04-11 10:15:10 +00001417 ++NumCallsAnalyzed;
1418
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001419 // Perform some tweaks to the cost and threshold based on the direct
1420 // callsite information.
Chandler Carruth0539c072012-03-31 12:42:41 +00001421
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001422 // We want to more aggressively inline vector-dense kernels, so up the
1423 // threshold, and we'll lower it if the % of vector instructions gets too
Chandler Carrutha004f222015-05-27 02:49:05 +00001424 // low. Note that these bonuses are some what arbitrary and evolved over time
1425 // by accident as much as because they are principled bonuses.
1426 //
1427 // FIXME: It would be nice to remove all such bonuses. At least it would be
1428 // nice to base the bonus values on something more scientific.
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001429 assert(NumInstructions == 0);
1430 assert(NumVectorInstructions == 0);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001431
1432 // Update the threshold based on callsite properties
1433 updateThreshold(CS, F);
1434
Chandler Carrutha004f222015-05-27 02:49:05 +00001435 // Speculatively apply all possible bonuses to Threshold. If cost exceeds
1436 // this Threshold any time, and cost cannot decrease, we can stop processing
1437 // the rest of the function body.
Easwaran Raman51b809b2017-07-28 21:47:36 +00001438 Threshold += (SingleBBBonus + VectorBonus);
Chandler Carrutha004f222015-05-27 02:49:05 +00001439
Xinliang David Li351d9b02017-05-02 05:38:41 +00001440 // Give out bonuses for the callsite, as the instructions setting them up
1441 // will be gone after inlining.
1442 Cost -= getCallsiteCost(CS, DL);
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001443
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001444 // If this function uses the coldcc calling convention, prefer not to inline
1445 // it.
1446 if (F.getCallingConv() == CallingConv::Cold)
1447 Cost += InlineConstants::ColdccPenalty;
1448
1449 // Check if we're done. This can happen due to bonuses and penalties.
1450 if (Cost > Threshold)
1451 return false;
1452
Chandler Carruth0539c072012-03-31 12:42:41 +00001453 if (F.empty())
1454 return true;
1455
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001456 Function *Caller = CS.getInstruction()->getParent()->getParent();
1457 // Check if the caller function is recursive itself.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001458 for (User *U : Caller->users()) {
1459 CallSite Site(U);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001460 if (!Site)
1461 continue;
1462 Instruction *I = Site.getInstruction();
1463 if (I->getParent()->getParent() == Caller) {
1464 IsCallerRecursive = true;
1465 break;
1466 }
1467 }
1468
Chandler Carruth0539c072012-03-31 12:42:41 +00001469 // Populate our simplified values by mapping from function arguments to call
1470 // arguments with known important simplifications.
1471 CallSite::arg_iterator CAI = CS.arg_begin();
1472 for (Function::arg_iterator FAI = F.arg_begin(), FAE = F.arg_end();
1473 FAI != FAE; ++FAI, ++CAI) {
1474 assert(CAI != CS.arg_end());
1475 if (Constant *C = dyn_cast<Constant>(CAI))
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001476 SimplifiedValues[&*FAI] = C;
Chandler Carruth0539c072012-03-31 12:42:41 +00001477
1478 Value *PtrArg = *CAI;
1479 if (ConstantInt *C = stripAndComputeInBoundsConstantOffsets(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001480 ConstantOffsetPtrs[&*FAI] = std::make_pair(PtrArg, C->getValue());
Chandler Carruth0539c072012-03-31 12:42:41 +00001481
1482 // We can SROA any pointer arguments derived from alloca instructions.
1483 if (isa<AllocaInst>(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001484 SROAArgValues[&*FAI] = PtrArg;
Chandler Carruth0539c072012-03-31 12:42:41 +00001485 SROAArgCosts[PtrArg] = 0;
1486 }
1487 }
1488 }
1489 NumConstantArgs = SimplifiedValues.size();
1490 NumConstantOffsetPtrArgs = ConstantOffsetPtrs.size();
1491 NumAllocaArgs = SROAArgValues.size();
1492
Hal Finkel57f03dd2014-09-07 13:49:57 +00001493 // FIXME: If a caller has multiple calls to a callee, we end up recomputing
1494 // the ephemeral values multiple times (and they're completely determined by
1495 // the callee, so this is purely duplicate work).
1496 SmallPtrSet<const Value *, 32> EphValues;
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001497 CodeMetrics::collectEphemeralValues(&F, &GetAssumptionCache(F), EphValues);
Hal Finkel57f03dd2014-09-07 13:49:57 +00001498
Chandler Carruth0539c072012-03-31 12:42:41 +00001499 // The worklist of live basic blocks in the callee *after* inlining. We avoid
1500 // adding basic blocks of the callee which can be proven to be dead for this
1501 // particular call site in order to get more accurate cost estimates. This
1502 // requires a somewhat heavyweight iteration pattern: we need to walk the
1503 // basic blocks in a breadth-first order as we insert live successors. To
1504 // accomplish this, prioritizing for small iterations because we exit after
1505 // crossing our threshold, we use a small-size optimized SetVector.
1506 typedef SetVector<BasicBlock *, SmallVector<BasicBlock *, 16>,
Chad Rosier567556a2016-04-28 14:47:23 +00001507 SmallPtrSet<BasicBlock *, 16>>
1508 BBSetVector;
Chandler Carruth0539c072012-03-31 12:42:41 +00001509 BBSetVector BBWorklist;
1510 BBWorklist.insert(&F.getEntryBlock());
Easwaran Raman51b809b2017-07-28 21:47:36 +00001511 bool SingleBB = true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001512 // Note that we *must not* cache the size, this loop grows the worklist.
1513 for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) {
1514 // Bail out the moment we cross the threshold. This means we'll under-count
1515 // the cost, but only when undercounting doesn't matter.
Chandler Carrutha004f222015-05-27 02:49:05 +00001516 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001517 break;
1518
1519 BasicBlock *BB = BBWorklist[Idx];
1520 if (BB->empty())
Chandler Carruth4d1d34f2012-03-14 23:19:53 +00001521 continue;
Dan Gohman4552e3c2009-10-13 18:30:07 +00001522
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001523 // Disallow inlining a blockaddress. A blockaddress only has defined
1524 // behavior for an indirect branch in the same function, and we do not
1525 // currently support inlining indirect branches. But, the inliner may not
1526 // see an indirect branch that ends up being dead code at a particular call
1527 // site. If the blockaddress escapes the function, e.g., via a global
1528 // variable, inlining may lead to an invalid cross-function reference.
1529 if (BB->hasAddressTaken())
1530 return false;
1531
Chandler Carruth0539c072012-03-31 12:42:41 +00001532 // Analyze the cost of this block. If we blow through the threshold, this
1533 // returns false, and we can bail on out.
Easwaran Ramand295b002016-04-13 21:20:22 +00001534 if (!analyzeBlock(BB, EphValues))
1535 return false;
Eric Christopher46308e62011-02-01 01:16:32 +00001536
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001537 TerminatorInst *TI = BB->getTerminator();
1538
Chandler Carruth0539c072012-03-31 12:42:41 +00001539 // Add in the live successors by first checking whether we have terminator
1540 // that may be simplified based on the values simplified by this call.
1541 if (BranchInst *BI = dyn_cast<BranchInst>(TI)) {
1542 if (BI->isConditional()) {
1543 Value *Cond = BI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001544 if (ConstantInt *SimpleCond =
1545 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001546 BBWorklist.insert(BI->getSuccessor(SimpleCond->isZero() ? 1 : 0));
1547 continue;
Eric Christopher46308e62011-02-01 01:16:32 +00001548 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001549 }
1550 } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) {
1551 Value *Cond = SI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001552 if (ConstantInt *SimpleCond =
1553 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Chandler Carruth927d8e62017-04-12 07:27:28 +00001554 BBWorklist.insert(SI->findCaseValue(SimpleCond)->getCaseSuccessor());
Chandler Carruth0539c072012-03-31 12:42:41 +00001555 continue;
1556 }
1557 }
Eric Christopher46308e62011-02-01 01:16:32 +00001558
Chandler Carruth0539c072012-03-31 12:42:41 +00001559 // If we're unable to select a particular successor, just count all of
1560 // them.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001561 for (unsigned TIdx = 0, TSize = TI->getNumSuccessors(); TIdx != TSize;
1562 ++TIdx)
Chandler Carruth0539c072012-03-31 12:42:41 +00001563 BBWorklist.insert(TI->getSuccessor(TIdx));
1564
1565 // If we had any successors at this point, than post-inlining is likely to
1566 // have them as well. Note that we assume any basic blocks which existed
1567 // due to branches or switches which folded above will also fold after
1568 // inlining.
1569 if (SingleBB && TI->getNumSuccessors() > 1) {
1570 // Take off the bonus we applied to the threshold.
1571 Threshold -= SingleBBBonus;
1572 SingleBB = false;
Eric Christopher46308e62011-02-01 01:16:32 +00001573 }
1574 }
Andrew Trickcaa500b2011-10-01 01:27:56 +00001575
Easwaran Raman51b809b2017-07-28 21:47:36 +00001576 bool OnlyOneCallAndLocalLinkage =
1577 F.hasLocalLinkage() && F.hasOneUse() && &F == CS.getCalledFunction();
Chandler Carruthcb5beb32013-12-12 11:59:26 +00001578 // If this is a noduplicate call, we can still inline as long as
James Molloy4f6fb952012-12-20 16:04:27 +00001579 // inlining this would cause the removal of the caller (so the instruction
1580 // is not actually duplicated, just moved).
1581 if (!OnlyOneCallAndLocalLinkage && ContainsNoDuplicateCall)
1582 return false;
1583
Chandler Carrutha004f222015-05-27 02:49:05 +00001584 // We applied the maximum possible vector bonus at the beginning. Now,
1585 // subtract the excess bonus, if any, from the Threshold before
1586 // comparing against Cost.
1587 if (NumVectorInstructions <= NumInstructions / 10)
Easwaran Raman51b809b2017-07-28 21:47:36 +00001588 Threshold -= VectorBonus;
Chandler Carrutha004f222015-05-27 02:49:05 +00001589 else if (NumVectorInstructions <= NumInstructions / 2)
Easwaran Raman51b809b2017-07-28 21:47:36 +00001590 Threshold -= VectorBonus/2;
Chandler Carruth0539c072012-03-31 12:42:41 +00001591
Hans Wennborg00ab73d2016-02-05 20:32:42 +00001592 return Cost < std::max(1, Threshold);
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001593}
1594
Manman Ren49d684e2012-09-12 05:06:18 +00001595#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
Chandler Carruth0539c072012-03-31 12:42:41 +00001596/// \brief Dump stats about this call's analysis.
Yaron Kereneb2a2542016-01-29 20:50:44 +00001597LLVM_DUMP_METHOD void CallAnalyzer::dump() {
Eric Christophera13839f2014-02-26 23:27:16 +00001598#define DEBUG_PRINT_STAT(x) dbgs() << " " #x ": " << x << "\n"
Chandler Carruth0539c072012-03-31 12:42:41 +00001599 DEBUG_PRINT_STAT(NumConstantArgs);
1600 DEBUG_PRINT_STAT(NumConstantOffsetPtrArgs);
1601 DEBUG_PRINT_STAT(NumAllocaArgs);
1602 DEBUG_PRINT_STAT(NumConstantPtrCmps);
1603 DEBUG_PRINT_STAT(NumConstantPtrDiffs);
1604 DEBUG_PRINT_STAT(NumInstructionsSimplified);
Chandler Carrutha004f222015-05-27 02:49:05 +00001605 DEBUG_PRINT_STAT(NumInstructions);
Chandler Carruth0539c072012-03-31 12:42:41 +00001606 DEBUG_PRINT_STAT(SROACostSavings);
1607 DEBUG_PRINT_STAT(SROACostSavingsLost);
James Molloy4f6fb952012-12-20 16:04:27 +00001608 DEBUG_PRINT_STAT(ContainsNoDuplicateCall);
Chandler Carruth394e34f2014-01-31 22:32:32 +00001609 DEBUG_PRINT_STAT(Cost);
1610 DEBUG_PRINT_STAT(Threshold);
Chandler Carruth0539c072012-03-31 12:42:41 +00001611#undef DEBUG_PRINT_STAT
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001612}
Manman Renc3366cc2012-09-06 19:55:56 +00001613#endif
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001614
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001615/// \brief Test that there are no attribute conflicts between Caller and Callee
1616/// that prevent inlining.
1617static bool functionsHaveCompatibleAttributes(Function *Caller,
Eric Christopher4371b132015-07-02 01:11:47 +00001618 Function *Callee,
1619 TargetTransformInfo &TTI) {
Eric Christopherd566fb12015-07-29 22:09:48 +00001620 return TTI.areInlineCompatible(Caller, Callee) &&
Akira Hatanaka1cb242e2015-12-22 23:57:37 +00001621 AttributeFuncs::areInlineCompatible(*Caller, *Callee);
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001622}
1623
Xinliang David Li351d9b02017-05-02 05:38:41 +00001624int llvm::getCallsiteCost(CallSite CS, const DataLayout &DL) {
1625 int Cost = 0;
1626 for (unsigned I = 0, E = CS.arg_size(); I != E; ++I) {
1627 if (CS.isByValArgument(I)) {
1628 // We approximate the number of loads and stores needed by dividing the
1629 // size of the byval type by the target's pointer size.
1630 PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType());
1631 unsigned TypeSize = DL.getTypeSizeInBits(PTy->getElementType());
1632 unsigned PointerSize = DL.getPointerSizeInBits();
1633 // Ceiling division.
1634 unsigned NumStores = (TypeSize + PointerSize - 1) / PointerSize;
1635
1636 // If it generates more than 8 stores it is likely to be expanded as an
1637 // inline memcpy so we take that as an upper bound. Otherwise we assume
1638 // one load and one store per word copied.
1639 // FIXME: The maxStoresPerMemcpy setting from the target should be used
1640 // here instead of a magic number of 8, but it's not available via
1641 // DataLayout.
1642 NumStores = std::min(NumStores, 8U);
1643
1644 Cost += 2 * NumStores * InlineConstants::InstrCost;
1645 } else {
1646 // For non-byval arguments subtract off one instruction per call
1647 // argument.
1648 Cost += InlineConstants::InstrCost;
1649 }
1650 }
1651 // The call instruction also disappears after inlining.
1652 Cost += InlineConstants::InstrCost + InlineConstants::CallPenalty;
1653 return Cost;
1654}
1655
Sean Silvaab6a6832016-07-23 04:22:50 +00001656InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001657 CallSite CS, const InlineParams &Params, TargetTransformInfo &CalleeTTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001658 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +00001659 Optional<function_ref<BlockFrequencyInfo &(Function &)>> GetBFI,
Sean Silvaab6a6832016-07-23 04:22:50 +00001660 ProfileSummaryInfo *PSI) {
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001661 return getInlineCost(CS, CS.getCalledFunction(), Params, CalleeTTI,
Easwaran Raman12585b02017-01-20 22:44:04 +00001662 GetAssumptionCache, GetBFI, PSI);
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001663}
1664
Sean Silvaab6a6832016-07-23 04:22:50 +00001665InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001666 CallSite CS, Function *Callee, const InlineParams &Params,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001667 TargetTransformInfo &CalleeTTI,
1668 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +00001669 Optional<function_ref<BlockFrequencyInfo &(Function &)>> GetBFI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001670 ProfileSummaryInfo *PSI) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001671
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001672 // Cannot inline indirect calls.
1673 if (!Callee)
1674 return llvm::InlineCost::getNever();
1675
1676 // Calls to functions with always-inline attributes should be inlined
1677 // whenever possible.
Peter Collingbourne68a88972014-05-19 18:25:54 +00001678 if (CS.hasFnAttr(Attribute::AlwaysInline)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001679 if (isInlineViable(*Callee))
1680 return llvm::InlineCost::getAlways();
1681 return llvm::InlineCost::getNever();
1682 }
1683
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001684 // Never inline functions with conflicting attributes (unless callee has
1685 // always-inline attribute).
Chad Rosier5ce28f42017-08-02 14:50:27 +00001686 Function *Caller = CS.getCaller();
1687 if (!functionsHaveCompatibleAttributes(Caller, Callee, CalleeTTI))
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001688 return llvm::InlineCost::getNever();
1689
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001690 // Don't inline this call if the caller has the optnone attribute.
Chad Rosier5ce28f42017-08-02 14:50:27 +00001691 if (Caller->hasFnAttribute(Attribute::OptimizeNone))
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001692 return llvm::InlineCost::getNever();
1693
Sanjoy Das5ce32722016-04-08 00:48:30 +00001694 // Don't inline functions which can be interposed at link-time. Don't inline
1695 // functions marked noinline or call sites marked noinline.
Craig Topper107b1872016-12-09 02:18:04 +00001696 // Note: inlining non-exact non-interposable functions is fine, since we know
Sanjoy Das5ce32722016-04-08 00:48:30 +00001697 // we have *a* correct implementation of the source level function.
Chad Rosier567556a2016-04-28 14:47:23 +00001698 if (Callee->isInterposable() || Callee->hasFnAttribute(Attribute::NoInline) ||
1699 CS.isNoInline())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001700 return llvm::InlineCost::getNever();
1701
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001702 DEBUG(llvm::dbgs() << " Analyzing call of " << Callee->getName()
Chad Rosier567556a2016-04-28 14:47:23 +00001703 << "...\n");
Andrew Trickcaa500b2011-10-01 01:27:56 +00001704
Easwaran Raman12585b02017-01-20 22:44:04 +00001705 CallAnalyzer CA(CalleeTTI, GetAssumptionCache, GetBFI, PSI, *Callee, CS,
1706 Params);
Chandler Carruth0539c072012-03-31 12:42:41 +00001707 bool ShouldInline = CA.analyzeCall(CS);
Dan Gohman4552e3c2009-10-13 18:30:07 +00001708
Chandler Carruth0539c072012-03-31 12:42:41 +00001709 DEBUG(CA.dump());
1710
1711 // Check if there was a reason to force inlining or no inlining.
1712 if (!ShouldInline && CA.getCost() < CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001713 return InlineCost::getNever();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001714 if (ShouldInline && CA.getCost() >= CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001715 return InlineCost::getAlways();
Andrew Trickcaa500b2011-10-01 01:27:56 +00001716
Chandler Carruth0539c072012-03-31 12:42:41 +00001717 return llvm::InlineCost::get(CA.getCost(), CA.getThreshold());
Dan Gohman4552e3c2009-10-13 18:30:07 +00001718}
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001719
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001720bool llvm::isInlineViable(Function &F) {
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001721 bool ReturnsTwice = F.hasFnAttribute(Attribute::ReturnsTwice);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001722 for (Function::iterator BI = F.begin(), BE = F.end(); BI != BE; ++BI) {
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001723 // Disallow inlining of functions which contain indirect branches or
1724 // blockaddresses.
1725 if (isa<IndirectBrInst>(BI->getTerminator()) || BI->hasAddressTaken())
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001726 return false;
1727
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001728 for (auto &II : *BI) {
1729 CallSite CS(&II);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001730 if (!CS)
1731 continue;
1732
1733 // Disallow recursive calls.
1734 if (&F == CS.getCalledFunction())
1735 return false;
1736
1737 // Disallow calls which expose returns-twice to a function not previously
1738 // attributed as such.
1739 if (!ReturnsTwice && CS.isCall() &&
1740 cast<CallInst>(CS.getInstruction())->canReturnTwice())
1741 return false;
Reid Kleckner223de262015-04-14 20:38:14 +00001742
Reid Kleckner60381792015-07-07 22:25:32 +00001743 // Disallow inlining functions that call @llvm.localescape. Doing this
Reid Kleckner223de262015-04-14 20:38:14 +00001744 // correctly would require major changes to the inliner.
1745 if (CS.getCalledFunction() &&
1746 CS.getCalledFunction()->getIntrinsicID() ==
Reid Kleckner60381792015-07-07 22:25:32 +00001747 llvm::Intrinsic::localescape)
Reid Kleckner223de262015-04-14 20:38:14 +00001748 return false;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001749 }
1750 }
1751
1752 return true;
1753}
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001754
1755// APIs to create InlineParams based on command line flags and/or other
1756// parameters.
1757
1758InlineParams llvm::getInlineParams(int Threshold) {
1759 InlineParams Params;
1760
1761 // This field is the threshold to use for a callee by default. This is
1762 // derived from one or more of:
1763 // * optimization or size-optimization levels,
1764 // * a value passed to createFunctionInliningPass function, or
1765 // * the -inline-threshold flag.
1766 // If the -inline-threshold flag is explicitly specified, that is used
1767 // irrespective of anything else.
1768 if (InlineThreshold.getNumOccurrences() > 0)
1769 Params.DefaultThreshold = InlineThreshold;
1770 else
1771 Params.DefaultThreshold = Threshold;
1772
1773 // Set the HintThreshold knob from the -inlinehint-threshold.
1774 Params.HintThreshold = HintThreshold;
1775
1776 // Set the HotCallSiteThreshold knob from the -hot-callsite-threshold.
1777 Params.HotCallSiteThreshold = HotCallSiteThreshold;
1778
Easwaran Raman974d4ee2017-08-03 22:23:33 +00001779 // If the -locally-hot-callsite-threshold is explicitly specified, use it to
1780 // populate LocallyHotCallSiteThreshold. Later, we populate
1781 // Params.LocallyHotCallSiteThreshold from -locally-hot-callsite-threshold if
1782 // we know that optimization level is O3 (in the getInlineParams variant that
1783 // takes the opt and size levels).
1784 // FIXME: Remove this check (and make the assignment unconditional) after
1785 // addressing size regression issues at O2.
1786 if (LocallyHotCallSiteThreshold.getNumOccurrences() > 0)
1787 Params.LocallyHotCallSiteThreshold = LocallyHotCallSiteThreshold;
1788
Easwaran Raman12585b02017-01-20 22:44:04 +00001789 // Set the ColdCallSiteThreshold knob from the -inline-cold-callsite-threshold.
1790 Params.ColdCallSiteThreshold = ColdCallSiteThreshold;
1791
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001792 // Set the OptMinSizeThreshold and OptSizeThreshold params only if the
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001793 // -inlinehint-threshold commandline option is not explicitly given. If that
1794 // option is present, then its value applies even for callees with size and
1795 // minsize attributes.
1796 // If the -inline-threshold is not specified, set the ColdThreshold from the
1797 // -inlinecold-threshold even if it is not explicitly passed. If
1798 // -inline-threshold is specified, then -inlinecold-threshold needs to be
1799 // explicitly specified to set the ColdThreshold knob
1800 if (InlineThreshold.getNumOccurrences() == 0) {
1801 Params.OptMinSizeThreshold = InlineConstants::OptMinSizeThreshold;
1802 Params.OptSizeThreshold = InlineConstants::OptSizeThreshold;
1803 Params.ColdThreshold = ColdThreshold;
1804 } else if (ColdThreshold.getNumOccurrences() > 0) {
1805 Params.ColdThreshold = ColdThreshold;
1806 }
1807 return Params;
1808}
1809
1810InlineParams llvm::getInlineParams() {
1811 return getInlineParams(InlineThreshold);
1812}
1813
1814// Compute the default threshold for inlining based on the opt level and the
1815// size opt level.
1816static int computeThresholdFromOptLevels(unsigned OptLevel,
1817 unsigned SizeOptLevel) {
1818 if (OptLevel > 2)
1819 return InlineConstants::OptAggressiveThreshold;
1820 if (SizeOptLevel == 1) // -Os
1821 return InlineConstants::OptSizeThreshold;
1822 if (SizeOptLevel == 2) // -Oz
1823 return InlineConstants::OptMinSizeThreshold;
1824 return InlineThreshold;
1825}
1826
1827InlineParams llvm::getInlineParams(unsigned OptLevel, unsigned SizeOptLevel) {
Easwaran Raman974d4ee2017-08-03 22:23:33 +00001828 auto Params =
1829 getInlineParams(computeThresholdFromOptLevels(OptLevel, SizeOptLevel));
1830 // At O3, use the value of -locally-hot-callsite-threshold option to populate
1831 // Params.LocallyHotCallSiteThreshold. Below O3, this flag has effect only
1832 // when it is specified explicitly.
1833 if (OptLevel > 2)
1834 Params.LocallyHotCallSiteThreshold = LocallyHotCallSiteThreshold;
1835 return Params;
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001836}