blob: 0228a1ba38f33c67bf91ff902f97750018f69f50 [file] [log] [blame]
Dan Gohman4552e3c2009-10-13 18:30:07 +00001//===- InlineCost.cpp - Cost analysis for inliner -------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements inline cost analysis.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Analysis/InlineCost.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000015#include "llvm/ADT/STLExtras.h"
16#include "llvm/ADT/SetVector.h"
17#include "llvm/ADT/SmallPtrSet.h"
18#include "llvm/ADT/SmallVector.h"
19#include "llvm/ADT/Statistic.h"
Daniel Jasperaec2fa32016-12-19 08:22:17 +000020#include "llvm/Analysis/AssumptionCache.h"
Hal Finkel57f03dd2014-09-07 13:49:57 +000021#include "llvm/Analysis/CodeMetrics.h"
Chandler Carruthd9903882015-01-14 11:23:27 +000022#include "llvm/Analysis/ConstantFolding.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000023#include "llvm/Analysis/InstructionSimplify.h"
Easwaran Raman71069cf2016-06-09 22:23:21 +000024#include "llvm/Analysis/ProfileSummaryInfo.h"
Chandler Carruth42f3dce2013-01-21 11:55:09 +000025#include "llvm/Analysis/TargetTransformInfo.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000026#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000027#include "llvm/IR/CallingConv.h"
28#include "llvm/IR/DataLayout.h"
Chandler Carruth03eb0de2014-03-04 10:40:04 +000029#include "llvm/IR/GetElementPtrTypeIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000030#include "llvm/IR/GlobalAlias.h"
Chandler Carruth7da14f12014-03-06 03:23:41 +000031#include "llvm/IR/InstVisitor.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000032#include "llvm/IR/IntrinsicInst.h"
33#include "llvm/IR/Operator.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000034#include "llvm/Support/Debug.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000035#include "llvm/Support/raw_ostream.h"
Eric Christopher2dfbd7e2011-02-05 00:49:15 +000036
Dan Gohman4552e3c2009-10-13 18:30:07 +000037using namespace llvm;
38
Chandler Carruthf1221bd2014-04-22 02:48:03 +000039#define DEBUG_TYPE "inline-cost"
40
Chandler Carruth7ae90d42012-04-11 10:15:10 +000041STATISTIC(NumCallsAnalyzed, "Number of call sites analyzed");
42
Easwaran Raman1c57cc22016-08-10 00:48:04 +000043static cl::opt<int> InlineThreshold(
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000044 "inline-threshold", cl::Hidden, cl::init(225), cl::ZeroOrMore,
45 cl::desc("Control the amount of inlining to perform (default = 225)"));
46
47static cl::opt<int> HintThreshold(
48 "inlinehint-threshold", cl::Hidden, cl::init(325),
49 cl::desc("Threshold for inlining functions with inline hint"));
50
51// We introduce this threshold to help performance of instrumentation based
52// PGO before we actually hook up inliner with analysis passes such as BPI and
53// BFI.
54static cl::opt<int> ColdThreshold(
55 "inlinecold-threshold", cl::Hidden, cl::init(225),
56 cl::desc("Threshold for inlining functions with cold attribute"));
57
Dehao Chende39cb92016-08-05 20:28:41 +000058static cl::opt<int>
59 HotCallSiteThreshold("hot-callsite-threshold", cl::Hidden, cl::init(3000),
60 cl::ZeroOrMore,
61 cl::desc("Threshold for hot callsites "));
62
Chandler Carruth0539c072012-03-31 12:42:41 +000063namespace {
Chandler Carrutha3089552012-03-14 07:32:53 +000064
Chandler Carruth0539c072012-03-31 12:42:41 +000065class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> {
66 typedef InstVisitor<CallAnalyzer, bool> Base;
67 friend class InstVisitor<CallAnalyzer, bool>;
Owen Andersona08318a2010-09-09 16:56:42 +000068
Chandler Carruth42f3dce2013-01-21 11:55:09 +000069 /// The TargetTransformInfo available for this compilation.
70 const TargetTransformInfo &TTI;
71
Daniel Jasperaec2fa32016-12-19 08:22:17 +000072 /// Getter for the cache of @llvm.assume intrinsics.
73 std::function<AssumptionCache &(Function &)> &GetAssumptionCache;
74
Easwaran Raman71069cf2016-06-09 22:23:21 +000075 /// Profile summary information.
76 ProfileSummaryInfo *PSI;
77
Piotr Padlewskif3d122c2016-09-30 21:05:49 +000078 /// The called function.
Chandler Carruth0539c072012-03-31 12:42:41 +000079 Function &F;
Owen Andersona08318a2010-09-09 16:56:42 +000080
Piotr Padlewskif3d122c2016-09-30 21:05:49 +000081 /// The candidate callsite being analyzed. Please do not use this to do
82 /// analysis in the caller function; we want the inline cost query to be
83 /// easily cacheable. Instead, use the cover function paramHasAttr.
Philip Reames9b5c9582015-06-26 20:51:17 +000084 CallSite CandidateCS;
85
Piotr Padlewskif3d122c2016-09-30 21:05:49 +000086 /// Tunable parameters that control the analysis.
Easwaran Raman1c57cc22016-08-10 00:48:04 +000087 const InlineParams &Params;
88
Chandler Carruth0539c072012-03-31 12:42:41 +000089 int Threshold;
90 int Cost;
Owen Andersona08318a2010-09-09 16:56:42 +000091
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +000092 bool IsCallerRecursive;
93 bool IsRecursiveCall;
Chandler Carruth0539c072012-03-31 12:42:41 +000094 bool ExposesReturnsTwice;
95 bool HasDynamicAlloca;
James Molloy4f6fb952012-12-20 16:04:27 +000096 bool ContainsNoDuplicateCall;
Chandler Carruth0814d2a2013-12-13 07:59:56 +000097 bool HasReturn;
98 bool HasIndirectBr;
Reid Kleckner223de262015-04-14 20:38:14 +000099 bool HasFrameEscape;
James Molloy4f6fb952012-12-20 16:04:27 +0000100
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000101 /// Number of bytes allocated statically by the callee.
102 uint64_t AllocatedSize;
Chandler Carruth0539c072012-03-31 12:42:41 +0000103 unsigned NumInstructions, NumVectorInstructions;
104 int FiftyPercentVectorBonus, TenPercentVectorBonus;
105 int VectorBonus;
106
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000107 /// While we walk the potentially-inlined instructions, we build up and
108 /// maintain a mapping of simplified values specific to this callsite. The
109 /// idea is to propagate any special information we have about arguments to
110 /// this call through the inlinable section of the function, and account for
111 /// likely simplifications post-inlining. The most important aspect we track
112 /// is CFG altering simplifications -- when we prove a basic block dead, that
113 /// can cause dramatic shifts in the cost of inlining a function.
Chandler Carruth0539c072012-03-31 12:42:41 +0000114 DenseMap<Value *, Constant *> SimplifiedValues;
115
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000116 /// Keep track of the values which map back (through function arguments) to
117 /// allocas on the caller stack which could be simplified through SROA.
Chandler Carruth0539c072012-03-31 12:42:41 +0000118 DenseMap<Value *, Value *> SROAArgValues;
119
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000120 /// The mapping of caller Alloca values to their accumulated cost savings. If
121 /// we have to disable SROA for one of the allocas, this tells us how much
122 /// cost must be added.
Chandler Carruth0539c072012-03-31 12:42:41 +0000123 DenseMap<Value *, int> SROAArgCosts;
124
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000125 /// Keep track of values which map to a pointer base and constant offset.
Chad Rosier567556a2016-04-28 14:47:23 +0000126 DenseMap<Value *, std::pair<Value *, APInt>> ConstantOffsetPtrs;
Chandler Carruth0539c072012-03-31 12:42:41 +0000127
128 // Custom simplification helper routines.
129 bool isAllocaDerivedArg(Value *V);
130 bool lookupSROAArgAndCost(Value *V, Value *&Arg,
131 DenseMap<Value *, int>::iterator &CostIt);
132 void disableSROA(DenseMap<Value *, int>::iterator CostIt);
133 void disableSROA(Value *V);
134 void accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
135 int InstructionCost);
Chandler Carruth0539c072012-03-31 12:42:41 +0000136 bool isGEPOffsetConstant(GetElementPtrInst &GEP);
137 bool accumulateGEPOffset(GEPOperator &GEP, APInt &Offset);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000138 bool simplifyCallSite(Function *F, CallSite CS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000139 ConstantInt *stripAndComputeInBoundsConstantOffsets(Value *&V);
140
Philip Reames9b5c9582015-06-26 20:51:17 +0000141 /// Return true if the given argument to the function being considered for
142 /// inlining has the given attribute set either at the call site or the
143 /// function declaration. Primarily used to inspect call site specific
144 /// attributes since these can be more precise than the ones on the callee
Easwaran Raman3676da42015-12-03 19:03:20 +0000145 /// itself.
Philip Reames9b5c9582015-06-26 20:51:17 +0000146 bool paramHasAttr(Argument *A, Attribute::AttrKind Attr);
Chad Rosier567556a2016-04-28 14:47:23 +0000147
Philip Reames9b5c9582015-06-26 20:51:17 +0000148 /// Return true if the given value is known non null within the callee if
Easwaran Raman3676da42015-12-03 19:03:20 +0000149 /// inlined through this particular callsite.
Philip Reames9b5c9582015-06-26 20:51:17 +0000150 bool isKnownNonNullInCallee(Value *V);
151
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000152 /// Update Threshold based on callsite properties such as callee
153 /// attributes and callee hotness for PGO builds. The Callee is explicitly
154 /// passed to support analyzing indirect calls whose target is inferred by
155 /// analysis.
156 void updateThreshold(CallSite CS, Function &Callee);
157
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000158 /// Return true if size growth is allowed when inlining the callee at CS.
159 bool allowSizeGrowth(CallSite CS);
160
Chandler Carruth0539c072012-03-31 12:42:41 +0000161 // Custom analysis routines.
Hal Finkel57f03dd2014-09-07 13:49:57 +0000162 bool analyzeBlock(BasicBlock *BB, SmallPtrSetImpl<const Value *> &EphValues);
Chandler Carruth0539c072012-03-31 12:42:41 +0000163
164 // Disable several entry points to the visitor so we don't accidentally use
165 // them by declaring but not defining them here.
Chad Rosier567556a2016-04-28 14:47:23 +0000166 void visit(Module *);
167 void visit(Module &);
168 void visit(Function *);
169 void visit(Function &);
170 void visit(BasicBlock *);
171 void visit(BasicBlock &);
Chandler Carruth0539c072012-03-31 12:42:41 +0000172
173 // Provide base case for our instruction visit.
174 bool visitInstruction(Instruction &I);
175
176 // Our visit overrides.
177 bool visitAlloca(AllocaInst &I);
178 bool visitPHI(PHINode &I);
179 bool visitGetElementPtr(GetElementPtrInst &I);
180 bool visitBitCast(BitCastInst &I);
181 bool visitPtrToInt(PtrToIntInst &I);
182 bool visitIntToPtr(IntToPtrInst &I);
183 bool visitCastInst(CastInst &I);
184 bool visitUnaryInstruction(UnaryInstruction &I);
Matt Arsenault727aa342013-07-20 04:09:00 +0000185 bool visitCmpInst(CmpInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000186 bool visitSub(BinaryOperator &I);
187 bool visitBinaryOperator(BinaryOperator &I);
188 bool visitLoad(LoadInst &I);
189 bool visitStore(StoreInst &I);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000190 bool visitExtractValue(ExtractValueInst &I);
191 bool visitInsertValue(InsertValueInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000192 bool visitCallSite(CallSite CS);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000193 bool visitReturnInst(ReturnInst &RI);
194 bool visitBranchInst(BranchInst &BI);
195 bool visitSwitchInst(SwitchInst &SI);
196 bool visitIndirectBrInst(IndirectBrInst &IBI);
197 bool visitResumeInst(ResumeInst &RI);
David Majnemer654e1302015-07-31 17:58:14 +0000198 bool visitCleanupReturnInst(CleanupReturnInst &RI);
199 bool visitCatchReturnInst(CatchReturnInst &RI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000200 bool visitUnreachableInst(UnreachableInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000201
202public:
Sean Silvaab6a6832016-07-23 04:22:50 +0000203 CallAnalyzer(const TargetTransformInfo &TTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000204 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000205 ProfileSummaryInfo *PSI, Function &Callee, CallSite CSArg,
206 const InlineParams &Params)
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000207 : TTI(TTI), GetAssumptionCache(GetAssumptionCache), PSI(PSI), F(Callee),
208 CandidateCS(CSArg), Params(Params), Threshold(Params.DefaultThreshold),
209 Cost(0), IsCallerRecursive(false), IsRecursiveCall(false),
210 ExposesReturnsTwice(false), HasDynamicAlloca(false),
211 ContainsNoDuplicateCall(false), HasReturn(false), HasIndirectBr(false),
212 HasFrameEscape(false), AllocatedSize(0), NumInstructions(0),
213 NumVectorInstructions(0), FiftyPercentVectorBonus(0),
214 TenPercentVectorBonus(0), VectorBonus(0), NumConstantArgs(0),
215 NumConstantOffsetPtrArgs(0), NumAllocaArgs(0), NumConstantPtrCmps(0),
216 NumConstantPtrDiffs(0), NumInstructionsSimplified(0),
217 SROACostSavings(0), SROACostSavingsLost(0) {}
Chandler Carruth0539c072012-03-31 12:42:41 +0000218
219 bool analyzeCall(CallSite CS);
220
221 int getThreshold() { return Threshold; }
222 int getCost() { return Cost; }
223
224 // Keep a bunch of stats about the cost savings found so we can print them
225 // out when debugging.
226 unsigned NumConstantArgs;
227 unsigned NumConstantOffsetPtrArgs;
228 unsigned NumAllocaArgs;
229 unsigned NumConstantPtrCmps;
230 unsigned NumConstantPtrDiffs;
231 unsigned NumInstructionsSimplified;
232 unsigned SROACostSavings;
233 unsigned SROACostSavingsLost;
234
235 void dump();
236};
237
238} // namespace
239
240/// \brief Test whether the given value is an Alloca-derived function argument.
241bool CallAnalyzer::isAllocaDerivedArg(Value *V) {
242 return SROAArgValues.count(V);
Owen Andersona08318a2010-09-09 16:56:42 +0000243}
244
Chandler Carruth0539c072012-03-31 12:42:41 +0000245/// \brief Lookup the SROA-candidate argument and cost iterator which V maps to.
246/// Returns false if V does not map to a SROA-candidate.
247bool CallAnalyzer::lookupSROAArgAndCost(
248 Value *V, Value *&Arg, DenseMap<Value *, int>::iterator &CostIt) {
249 if (SROAArgValues.empty() || SROAArgCosts.empty())
250 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000251
Chandler Carruth0539c072012-03-31 12:42:41 +0000252 DenseMap<Value *, Value *>::iterator ArgIt = SROAArgValues.find(V);
253 if (ArgIt == SROAArgValues.end())
254 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000255
Chandler Carruth0539c072012-03-31 12:42:41 +0000256 Arg = ArgIt->second;
257 CostIt = SROAArgCosts.find(Arg);
258 return CostIt != SROAArgCosts.end();
Chandler Carruth783b7192012-03-09 02:49:36 +0000259}
260
Chandler Carruth0539c072012-03-31 12:42:41 +0000261/// \brief Disable SROA for the candidate marked by this cost iterator.
Chandler Carruth783b7192012-03-09 02:49:36 +0000262///
Benjamin Kramerbde91762012-06-02 10:20:22 +0000263/// This marks the candidate as no longer viable for SROA, and adds the cost
Chandler Carruth0539c072012-03-31 12:42:41 +0000264/// savings associated with it back into the inline cost measurement.
265void CallAnalyzer::disableSROA(DenseMap<Value *, int>::iterator CostIt) {
266 // If we're no longer able to perform SROA we need to undo its cost savings
267 // and prevent subsequent analysis.
268 Cost += CostIt->second;
269 SROACostSavings -= CostIt->second;
270 SROACostSavingsLost += CostIt->second;
271 SROAArgCosts.erase(CostIt);
272}
273
274/// \brief If 'V' maps to a SROA candidate, disable SROA for it.
275void CallAnalyzer::disableSROA(Value *V) {
276 Value *SROAArg;
277 DenseMap<Value *, int>::iterator CostIt;
278 if (lookupSROAArgAndCost(V, SROAArg, CostIt))
279 disableSROA(CostIt);
280}
281
282/// \brief Accumulate the given cost for a particular SROA candidate.
283void CallAnalyzer::accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
284 int InstructionCost) {
285 CostIt->second += InstructionCost;
286 SROACostSavings += InstructionCost;
287}
288
Chandler Carruth0539c072012-03-31 12:42:41 +0000289/// \brief Check whether a GEP's indices are all constant.
290///
291/// Respects any simplified values known during the analysis of this callsite.
292bool CallAnalyzer::isGEPOffsetConstant(GetElementPtrInst &GEP) {
293 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
294 if (!isa<Constant>(*I) && !SimplifiedValues.lookup(*I))
Chandler Carruth783b7192012-03-09 02:49:36 +0000295 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000296
Chandler Carruth0539c072012-03-31 12:42:41 +0000297 return true;
298}
299
300/// \brief Accumulate a constant GEP offset into an APInt if possible.
301///
302/// Returns false if unable to compute the offset for any reason. Respects any
303/// simplified values known during the analysis of this callsite.
304bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000305 const DataLayout &DL = F.getParent()->getDataLayout();
306 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +0000307 assert(IntPtrWidth == Offset.getBitWidth());
308
309 for (gep_type_iterator GTI = gep_type_begin(GEP), GTE = gep_type_end(GEP);
310 GTI != GTE; ++GTI) {
311 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand());
312 if (!OpC)
313 if (Constant *SimpleOp = SimplifiedValues.lookup(GTI.getOperand()))
314 OpC = dyn_cast<ConstantInt>(SimpleOp);
315 if (!OpC)
Chandler Carruth783b7192012-03-09 02:49:36 +0000316 return false;
Chad Rosier567556a2016-04-28 14:47:23 +0000317 if (OpC->isZero())
318 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000319
Chandler Carruth0539c072012-03-31 12:42:41 +0000320 // Handle a struct index, which adds its field offset to the pointer.
Peter Collingbourneab85225b2016-12-02 02:24:42 +0000321 if (StructType *STy = GTI.getStructTypeOrNull()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000322 unsigned ElementIdx = OpC->getZExtValue();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000323 const StructLayout *SL = DL.getStructLayout(STy);
Chandler Carruth0539c072012-03-31 12:42:41 +0000324 Offset += APInt(IntPtrWidth, SL->getElementOffset(ElementIdx));
325 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000326 }
Chandler Carruth783b7192012-03-09 02:49:36 +0000327
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000328 APInt TypeSize(IntPtrWidth, DL.getTypeAllocSize(GTI.getIndexedType()));
Chandler Carruth0539c072012-03-31 12:42:41 +0000329 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize;
330 }
331 return true;
332}
333
334bool CallAnalyzer::visitAlloca(AllocaInst &I) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000335 // Check whether inlining will turn a dynamic alloca into a static
Sanjay Patel0f153422016-05-09 21:51:53 +0000336 // alloca and handle that case.
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000337 if (I.isArrayAllocation()) {
Sanjay Patel0f153422016-05-09 21:51:53 +0000338 Constant *Size = SimplifiedValues.lookup(I.getArraySize());
339 if (auto *AllocSize = dyn_cast_or_null<ConstantInt>(Size)) {
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000340 const DataLayout &DL = F.getParent()->getDataLayout();
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000341 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000342 AllocatedSize = SaturatingMultiplyAdd(
343 AllocSize->getLimitedValue(), DL.getTypeAllocSize(Ty), AllocatedSize);
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000344 return Base::visitAlloca(I);
345 }
346 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000347
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000348 // Accumulate the allocated size.
349 if (I.isStaticAlloca()) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000350 const DataLayout &DL = F.getParent()->getDataLayout();
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000351 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000352 AllocatedSize = SaturatingAdd(DL.getTypeAllocSize(Ty), AllocatedSize);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000353 }
354
Bob Wilsona5b0dc82012-11-19 07:04:35 +0000355 // We will happily inline static alloca instructions.
356 if (I.isStaticAlloca())
Chandler Carruth0539c072012-03-31 12:42:41 +0000357 return Base::visitAlloca(I);
358
359 // FIXME: This is overly conservative. Dynamic allocas are inefficient for
360 // a variety of reasons, and so we would like to not inline them into
361 // functions which don't currently have a dynamic alloca. This simply
362 // disables inlining altogether in the presence of a dynamic alloca.
363 HasDynamicAlloca = true;
364 return false;
365}
366
367bool CallAnalyzer::visitPHI(PHINode &I) {
368 // FIXME: We should potentially be tracking values through phi nodes,
369 // especially when they collapse to a single value due to deleted CFG edges
370 // during inlining.
371
372 // FIXME: We need to propagate SROA *disabling* through phi nodes, even
373 // though we don't want to propagate it's bonuses. The idea is to disable
374 // SROA if it *might* be used in an inappropriate manner.
375
376 // Phi nodes are always zero-cost.
377 return true;
378}
379
380bool CallAnalyzer::visitGetElementPtr(GetElementPtrInst &I) {
381 Value *SROAArg;
382 DenseMap<Value *, int>::iterator CostIt;
Chad Rosier567556a2016-04-28 14:47:23 +0000383 bool SROACandidate =
384 lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt);
Chandler Carruth0539c072012-03-31 12:42:41 +0000385
386 // Try to fold GEPs of constant-offset call site argument pointers. This
387 // requires target data and inbounds GEPs.
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000388 if (I.isInBounds()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000389 // Check if we have a base + offset for the pointer.
390 Value *Ptr = I.getPointerOperand();
391 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Ptr);
392 if (BaseAndOffset.first) {
393 // Check if the offset of this GEP is constant, and if so accumulate it
394 // into Offset.
395 if (!accumulateGEPOffset(cast<GEPOperator>(I), BaseAndOffset.second)) {
396 // Non-constant GEPs aren't folded, and disable SROA.
397 if (SROACandidate)
398 disableSROA(CostIt);
399 return false;
400 }
401
402 // Add the result as a new mapping to Base + Offset.
403 ConstantOffsetPtrs[&I] = BaseAndOffset;
404
405 // Also handle SROA candidates here, we already know that the GEP is
406 // all-constant indexed.
407 if (SROACandidate)
408 SROAArgValues[&I] = SROAArg;
409
Chandler Carruth783b7192012-03-09 02:49:36 +0000410 return true;
411 }
412 }
413
Chandler Carruth0539c072012-03-31 12:42:41 +0000414 if (isGEPOffsetConstant(I)) {
415 if (SROACandidate)
416 SROAArgValues[&I] = SROAArg;
417
418 // Constant GEPs are modeled as free.
419 return true;
420 }
421
422 // Variable GEPs will require math and will disable SROA.
423 if (SROACandidate)
424 disableSROA(CostIt);
Chandler Carruth783b7192012-03-09 02:49:36 +0000425 return false;
426}
427
Chandler Carruth0539c072012-03-31 12:42:41 +0000428bool CallAnalyzer::visitBitCast(BitCastInst &I) {
429 // Propagate constants through bitcasts.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000430 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
431 if (!COp)
432 COp = SimplifiedValues.lookup(I.getOperand(0));
433 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000434 if (Constant *C = ConstantExpr::getBitCast(COp, I.getType())) {
435 SimplifiedValues[&I] = C;
436 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000437 }
Owen Andersona08318a2010-09-09 16:56:42 +0000438
Chandler Carruth0539c072012-03-31 12:42:41 +0000439 // Track base/offsets through casts
Chad Rosier567556a2016-04-28 14:47:23 +0000440 std::pair<Value *, APInt> BaseAndOffset =
441 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000442 // Casts don't change the offset, just wrap it up.
443 if (BaseAndOffset.first)
444 ConstantOffsetPtrs[&I] = BaseAndOffset;
445
446 // Also look for SROA candidates here.
447 Value *SROAArg;
448 DenseMap<Value *, int>::iterator CostIt;
449 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
450 SROAArgValues[&I] = SROAArg;
451
452 // Bitcasts are always zero cost.
453 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000454}
455
Chandler Carruth0539c072012-03-31 12:42:41 +0000456bool CallAnalyzer::visitPtrToInt(PtrToIntInst &I) {
457 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000458 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
459 if (!COp)
460 COp = SimplifiedValues.lookup(I.getOperand(0));
461 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000462 if (Constant *C = ConstantExpr::getPtrToInt(COp, I.getType())) {
463 SimplifiedValues[&I] = C;
464 return true;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000465 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000466
467 // Track base/offset pairs when converted to a plain integer provided the
468 // integer is large enough to represent the pointer.
469 unsigned IntegerSize = I.getType()->getScalarSizeInBits();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000470 const DataLayout &DL = F.getParent()->getDataLayout();
Mehdi Amini46a43552015-03-04 18:43:29 +0000471 if (IntegerSize >= DL.getPointerSizeInBits()) {
Chad Rosier567556a2016-04-28 14:47:23 +0000472 std::pair<Value *, APInt> BaseAndOffset =
473 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000474 if (BaseAndOffset.first)
475 ConstantOffsetPtrs[&I] = BaseAndOffset;
476 }
477
478 // This is really weird. Technically, ptrtoint will disable SROA. However,
479 // unless that ptrtoint is *used* somewhere in the live basic blocks after
480 // inlining, it will be nuked, and SROA should proceed. All of the uses which
481 // would block SROA would also block SROA if applied directly to a pointer,
482 // and so we can just add the integer in here. The only places where SROA is
483 // preserved either cannot fire on an integer, or won't in-and-of themselves
484 // disable SROA (ext) w/o some later use that we would see and disable.
485 Value *SROAArg;
486 DenseMap<Value *, int>::iterator CostIt;
487 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
488 SROAArgValues[&I] = SROAArg;
489
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000490 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000491}
492
Chandler Carruth0539c072012-03-31 12:42:41 +0000493bool CallAnalyzer::visitIntToPtr(IntToPtrInst &I) {
494 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000495 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
496 if (!COp)
497 COp = SimplifiedValues.lookup(I.getOperand(0));
498 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000499 if (Constant *C = ConstantExpr::getIntToPtr(COp, I.getType())) {
500 SimplifiedValues[&I] = C;
501 return true;
502 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000503
Chandler Carruth0539c072012-03-31 12:42:41 +0000504 // Track base/offset pairs when round-tripped through a pointer without
505 // modifications provided the integer is not too large.
506 Value *Op = I.getOperand(0);
507 unsigned IntegerSize = Op->getType()->getScalarSizeInBits();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000508 const DataLayout &DL = F.getParent()->getDataLayout();
Mehdi Amini46a43552015-03-04 18:43:29 +0000509 if (IntegerSize <= DL.getPointerSizeInBits()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000510 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op);
511 if (BaseAndOffset.first)
512 ConstantOffsetPtrs[&I] = BaseAndOffset;
513 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000514
Chandler Carruth0539c072012-03-31 12:42:41 +0000515 // "Propagate" SROA here in the same manner as we do for ptrtoint above.
516 Value *SROAArg;
517 DenseMap<Value *, int>::iterator CostIt;
518 if (lookupSROAArgAndCost(Op, SROAArg, CostIt))
519 SROAArgValues[&I] = SROAArg;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000520
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000521 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000522}
523
524bool CallAnalyzer::visitCastInst(CastInst &I) {
525 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000526 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
527 if (!COp)
528 COp = SimplifiedValues.lookup(I.getOperand(0));
529 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000530 if (Constant *C = ConstantExpr::getCast(I.getOpcode(), COp, I.getType())) {
531 SimplifiedValues[&I] = C;
532 return true;
533 }
534
535 // Disable SROA in the face of arbitrary casts we don't whitelist elsewhere.
536 disableSROA(I.getOperand(0));
537
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000538 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000539}
540
541bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) {
542 Value *Operand = I.getOperand(0);
Jakub Staszak7b9e0b92013-03-07 20:01:19 +0000543 Constant *COp = dyn_cast<Constant>(Operand);
544 if (!COp)
545 COp = SimplifiedValues.lookup(Operand);
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000546 if (COp) {
547 const DataLayout &DL = F.getParent()->getDataLayout();
Manuel Jacobe9024592016-01-21 06:33:22 +0000548 if (Constant *C = ConstantFoldInstOperands(&I, COp, DL)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000549 SimplifiedValues[&I] = C;
550 return true;
551 }
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000552 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000553
554 // Disable any SROA on the argument to arbitrary unary operators.
555 disableSROA(Operand);
556
557 return false;
558}
559
Philip Reames9b5c9582015-06-26 20:51:17 +0000560bool CallAnalyzer::paramHasAttr(Argument *A, Attribute::AttrKind Attr) {
561 unsigned ArgNo = A->getArgNo();
Chad Rosier567556a2016-04-28 14:47:23 +0000562 return CandidateCS.paramHasAttr(ArgNo + 1, Attr);
Philip Reames9b5c9582015-06-26 20:51:17 +0000563}
564
565bool CallAnalyzer::isKnownNonNullInCallee(Value *V) {
566 // Does the *call site* have the NonNull attribute set on an argument? We
567 // use the attribute on the call site to memoize any analysis done in the
568 // caller. This will also trip if the callee function has a non-null
569 // parameter attribute, but that's a less interesting case because hopefully
570 // the callee would already have been simplified based on that.
571 if (Argument *A = dyn_cast<Argument>(V))
572 if (paramHasAttr(A, Attribute::NonNull))
573 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000574
Philip Reames9b5c9582015-06-26 20:51:17 +0000575 // Is this an alloca in the caller? This is distinct from the attribute case
576 // above because attributes aren't updated within the inliner itself and we
577 // always want to catch the alloca derived case.
578 if (isAllocaDerivedArg(V))
579 // We can actually predict the result of comparisons between an
580 // alloca-derived value and null. Note that this fires regardless of
581 // SROA firing.
582 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000583
Philip Reames9b5c9582015-06-26 20:51:17 +0000584 return false;
585}
586
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000587bool CallAnalyzer::allowSizeGrowth(CallSite CS) {
588 // If the normal destination of the invoke or the parent block of the call
589 // site is unreachable-terminated, there is little point in inlining this
590 // unless there is literally zero cost.
591 // FIXME: Note that it is possible that an unreachable-terminated block has a
592 // hot entry. For example, in below scenario inlining hot_call_X() may be
593 // beneficial :
594 // main() {
595 // hot_call_1();
596 // ...
597 // hot_call_N()
598 // exit(0);
599 // }
600 // For now, we are not handling this corner case here as it is rare in real
601 // code. In future, we should elaborate this based on BPI and BFI in more
602 // general threshold adjusting heuristics in updateThreshold().
603 Instruction *Instr = CS.getInstruction();
604 if (InvokeInst *II = dyn_cast<InvokeInst>(Instr)) {
605 if (isa<UnreachableInst>(II->getNormalDest()->getTerminator()))
606 return false;
607 } else if (isa<UnreachableInst>(Instr->getParent()->getTerminator()))
608 return false;
609
610 return true;
611}
612
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000613void CallAnalyzer::updateThreshold(CallSite CS, Function &Callee) {
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000614 // If no size growth is allowed for this inlining, set Threshold to 0.
615 if (!allowSizeGrowth(CS)) {
616 Threshold = 0;
617 return;
618 }
619
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000620 Function *Caller = CS.getCaller();
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000621
622 // return min(A, B) if B is valid.
623 auto MinIfValid = [](int A, Optional<int> B) {
624 return B ? std::min(A, B.getValue()) : A;
625 };
626
Easwaran Raman0d58fca2016-08-11 03:58:05 +0000627 // return max(A, B) if B is valid.
628 auto MaxIfValid = [](int A, Optional<int> B) {
629 return B ? std::max(A, B.getValue()) : A;
630 };
631
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000632 // Use the OptMinSizeThreshold or OptSizeThreshold knob if they are available
633 // and reduce the threshold if the caller has the necessary attribute.
634 if (Caller->optForMinSize())
635 Threshold = MinIfValid(Threshold, Params.OptMinSizeThreshold);
636 else if (Caller->optForSize())
637 Threshold = MinIfValid(Threshold, Params.OptSizeThreshold);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000638
Dehao Chen9232f982016-07-11 16:48:54 +0000639 bool HotCallsite = false;
640 uint64_t TotalWeight;
Dehao Chene1c7c572016-08-05 20:49:04 +0000641 if (CS.getInstruction()->extractProfTotalWeight(TotalWeight) &&
642 PSI->isHotCount(TotalWeight)) {
643 HotCallsite = true;
644 }
Dehao Chen9232f982016-07-11 16:48:54 +0000645
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000646 // Listen to the inlinehint attribute or profile based hotness information
647 // when it would increase the threshold and the caller does not need to
648 // minimize its size.
Easwaran Raman71069cf2016-06-09 22:23:21 +0000649 bool InlineHint = Callee.hasFnAttribute(Attribute::InlineHint) ||
Dehao Chen84287ab2016-10-10 21:47:28 +0000650 PSI->isFunctionEntryHot(&Callee);
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000651 if (InlineHint && !Caller->optForMinSize())
Easwaran Raman0d58fca2016-08-11 03:58:05 +0000652 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000653
Easwaran Raman0d58fca2016-08-11 03:58:05 +0000654 if (HotCallsite && !Caller->optForMinSize())
655 Threshold = MaxIfValid(Threshold, Params.HotCallSiteThreshold);
Dehao Chende39cb92016-08-05 20:28:41 +0000656
Dehao Chen84287ab2016-10-10 21:47:28 +0000657 bool ColdCallee = PSI->isFunctionEntryCold(&Callee);
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000658 // For cold callees, use the ColdThreshold knob if it is available and reduces
659 // the threshold.
660 if (ColdCallee)
661 Threshold = MinIfValid(Threshold, Params.ColdThreshold);
Justin Lebar8650a4d2016-04-15 01:38:48 +0000662
663 // Finally, take the target-specific inlining threshold multiplier into
664 // account.
665 Threshold *= TTI.getInliningThresholdMultiplier();
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000666}
667
Matt Arsenault727aa342013-07-20 04:09:00 +0000668bool CallAnalyzer::visitCmpInst(CmpInst &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000669 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
670 // First try to handle simplified comparisons.
671 if (!isa<Constant>(LHS))
672 if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS))
673 LHS = SimpleLHS;
674 if (!isa<Constant>(RHS))
675 if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS))
676 RHS = SimpleRHS;
Matt Arsenault727aa342013-07-20 04:09:00 +0000677 if (Constant *CLHS = dyn_cast<Constant>(LHS)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000678 if (Constant *CRHS = dyn_cast<Constant>(RHS))
Chad Rosier567556a2016-04-28 14:47:23 +0000679 if (Constant *C =
680 ConstantExpr::getCompare(I.getPredicate(), CLHS, CRHS)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000681 SimplifiedValues[&I] = C;
682 return true;
683 }
Matt Arsenault727aa342013-07-20 04:09:00 +0000684 }
685
686 if (I.getOpcode() == Instruction::FCmp)
687 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000688
689 // Otherwise look for a comparison between constant offset pointers with
690 // a common base.
691 Value *LHSBase, *RHSBase;
692 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000693 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000694 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000695 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000696 if (RHSBase && LHSBase == RHSBase) {
697 // We have common bases, fold the icmp to a constant based on the
698 // offsets.
699 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
700 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
701 if (Constant *C = ConstantExpr::getICmp(I.getPredicate(), CLHS, CRHS)) {
702 SimplifiedValues[&I] = C;
703 ++NumConstantPtrCmps;
704 return true;
705 }
706 }
707 }
708
709 // If the comparison is an equality comparison with null, we can simplify it
Philip Reames9b5c9582015-06-26 20:51:17 +0000710 // if we know the value (argument) can't be null
711 if (I.isEquality() && isa<ConstantPointerNull>(I.getOperand(1)) &&
712 isKnownNonNullInCallee(I.getOperand(0))) {
713 bool IsNotEqual = I.getPredicate() == CmpInst::ICMP_NE;
714 SimplifiedValues[&I] = IsNotEqual ? ConstantInt::getTrue(I.getType())
715 : ConstantInt::getFalse(I.getType());
716 return true;
717 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000718 // Finally check for SROA candidates in comparisons.
719 Value *SROAArg;
720 DenseMap<Value *, int>::iterator CostIt;
721 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) {
722 if (isa<ConstantPointerNull>(I.getOperand(1))) {
723 accumulateSROACost(CostIt, InlineConstants::InstrCost);
724 return true;
725 }
726
727 disableSROA(CostIt);
728 }
729
730 return false;
731}
732
733bool CallAnalyzer::visitSub(BinaryOperator &I) {
734 // Try to handle a special case: we can fold computing the difference of two
735 // constant-related pointers.
736 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
737 Value *LHSBase, *RHSBase;
738 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000739 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000740 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000741 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000742 if (RHSBase && LHSBase == RHSBase) {
743 // We have common bases, fold the subtract to a constant based on the
744 // offsets.
745 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
746 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
747 if (Constant *C = ConstantExpr::getSub(CLHS, CRHS)) {
748 SimplifiedValues[&I] = C;
749 ++NumConstantPtrDiffs;
750 return true;
751 }
752 }
753 }
754
755 // Otherwise, fall back to the generic logic for simplifying and handling
756 // instructions.
757 return Base::visitSub(I);
758}
759
760bool CallAnalyzer::visitBinaryOperator(BinaryOperator &I) {
761 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000762 const DataLayout &DL = F.getParent()->getDataLayout();
Chandler Carruth0539c072012-03-31 12:42:41 +0000763 if (!isa<Constant>(LHS))
764 if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS))
765 LHS = SimpleLHS;
766 if (!isa<Constant>(RHS))
767 if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS))
768 RHS = SimpleRHS;
Michael Zolotukhin4e8598e2015-02-06 20:02:51 +0000769 Value *SimpleV = nullptr;
770 if (auto FI = dyn_cast<FPMathOperator>(&I))
771 SimpleV =
772 SimplifyFPBinOp(I.getOpcode(), LHS, RHS, FI->getFastMathFlags(), DL);
773 else
774 SimpleV = SimplifyBinOp(I.getOpcode(), LHS, RHS, DL);
775
Chandler Carruth0539c072012-03-31 12:42:41 +0000776 if (Constant *C = dyn_cast_or_null<Constant>(SimpleV)) {
777 SimplifiedValues[&I] = C;
778 return true;
779 }
780
781 // Disable any SROA on arguments to arbitrary, unsimplified binary operators.
782 disableSROA(LHS);
783 disableSROA(RHS);
784
785 return false;
786}
787
788bool CallAnalyzer::visitLoad(LoadInst &I) {
789 Value *SROAArg;
790 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000791 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000792 if (I.isSimple()) {
793 accumulateSROACost(CostIt, InlineConstants::InstrCost);
794 return true;
795 }
796
797 disableSROA(CostIt);
798 }
799
800 return false;
801}
802
803bool CallAnalyzer::visitStore(StoreInst &I) {
804 Value *SROAArg;
805 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000806 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000807 if (I.isSimple()) {
808 accumulateSROACost(CostIt, InlineConstants::InstrCost);
809 return true;
810 }
811
812 disableSROA(CostIt);
813 }
814
815 return false;
816}
817
Chandler Carruth753e21d2012-12-28 14:23:32 +0000818bool CallAnalyzer::visitExtractValue(ExtractValueInst &I) {
819 // Constant folding for extract value is trivial.
820 Constant *C = dyn_cast<Constant>(I.getAggregateOperand());
821 if (!C)
822 C = SimplifiedValues.lookup(I.getAggregateOperand());
823 if (C) {
824 SimplifiedValues[&I] = ConstantExpr::getExtractValue(C, I.getIndices());
825 return true;
826 }
827
828 // SROA can look through these but give them a cost.
829 return false;
830}
831
832bool CallAnalyzer::visitInsertValue(InsertValueInst &I) {
833 // Constant folding for insert value is trivial.
834 Constant *AggC = dyn_cast<Constant>(I.getAggregateOperand());
835 if (!AggC)
836 AggC = SimplifiedValues.lookup(I.getAggregateOperand());
837 Constant *InsertedC = dyn_cast<Constant>(I.getInsertedValueOperand());
838 if (!InsertedC)
839 InsertedC = SimplifiedValues.lookup(I.getInsertedValueOperand());
840 if (AggC && InsertedC) {
Chad Rosier567556a2016-04-28 14:47:23 +0000841 SimplifiedValues[&I] =
842 ConstantExpr::getInsertValue(AggC, InsertedC, I.getIndices());
Chandler Carruth753e21d2012-12-28 14:23:32 +0000843 return true;
844 }
845
846 // SROA can look through these but give them a cost.
847 return false;
848}
849
850/// \brief Try to simplify a call site.
851///
852/// Takes a concrete function and callsite and tries to actually simplify it by
853/// analyzing the arguments and call itself with instsimplify. Returns true if
854/// it has simplified the callsite to some other entity (a constant), making it
855/// free.
856bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
857 // FIXME: Using the instsimplify logic directly for this is inefficient
858 // because we have to continually rebuild the argument list even when no
859 // simplifications can be performed. Until that is fixed with remapping
860 // inside of instsimplify, directly constant fold calls here.
861 if (!canConstantFoldCallTo(F))
862 return false;
863
864 // Try to re-map the arguments to constants.
865 SmallVector<Constant *, 4> ConstantArgs;
866 ConstantArgs.reserve(CS.arg_size());
Chad Rosier567556a2016-04-28 14:47:23 +0000867 for (CallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end(); I != E;
868 ++I) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000869 Constant *C = dyn_cast<Constant>(*I);
870 if (!C)
871 C = dyn_cast_or_null<Constant>(SimplifiedValues.lookup(*I));
872 if (!C)
873 return false; // This argument doesn't map to a constant.
874
875 ConstantArgs.push_back(C);
876 }
877 if (Constant *C = ConstantFoldCall(F, ConstantArgs)) {
878 SimplifiedValues[CS.getInstruction()] = C;
879 return true;
880 }
881
882 return false;
883}
884
Chandler Carruth0539c072012-03-31 12:42:41 +0000885bool CallAnalyzer::visitCallSite(CallSite CS) {
Chandler Carruth37d25de2013-12-13 08:00:01 +0000886 if (CS.hasFnAttr(Attribute::ReturnsTwice) &&
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +0000887 !F.hasFnAttribute(Attribute::ReturnsTwice)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000888 // This aborts the entire analysis.
889 ExposesReturnsTwice = true;
890 return false;
891 }
Chad Rosier567556a2016-04-28 14:47:23 +0000892 if (CS.isCall() && cast<CallInst>(CS.getInstruction())->cannotDuplicate())
James Molloy4f6fb952012-12-20 16:04:27 +0000893 ContainsNoDuplicateCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000894
Chandler Carruth0539c072012-03-31 12:42:41 +0000895 if (Function *F = CS.getCalledFunction()) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000896 // When we have a concrete function, first try to simplify it directly.
897 if (simplifyCallSite(F, CS))
898 return true;
899
900 // Next check if it is an intrinsic we know about.
901 // FIXME: Lift this into part of the InstVisitor.
902 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
903 switch (II->getIntrinsicID()) {
904 default:
905 return Base::visitCallSite(CS);
906
Peter Collingbourne7dd8dbf2016-04-22 21:18:02 +0000907 case Intrinsic::load_relative:
908 // This is normally lowered to 4 LLVM instructions.
909 Cost += 3 * InlineConstants::InstrCost;
910 return false;
911
Chandler Carruth753e21d2012-12-28 14:23:32 +0000912 case Intrinsic::memset:
913 case Intrinsic::memcpy:
914 case Intrinsic::memmove:
915 // SROA can usually chew through these intrinsics, but they aren't free.
916 return false;
Reid Kleckner60381792015-07-07 22:25:32 +0000917 case Intrinsic::localescape:
Reid Kleckner223de262015-04-14 20:38:14 +0000918 HasFrameEscape = true;
919 return false;
Chandler Carruth753e21d2012-12-28 14:23:32 +0000920 }
921 }
922
Chandler Carruth0539c072012-03-31 12:42:41 +0000923 if (F == CS.getInstruction()->getParent()->getParent()) {
924 // This flag will fully abort the analysis, so don't bother with anything
925 // else.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000926 IsRecursiveCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000927 return false;
928 }
929
Chandler Carruth0ba8db42013-01-22 11:26:02 +0000930 if (TTI.isLoweredToCall(F)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000931 // We account for the average 1 instruction per call argument setup
932 // here.
933 Cost += CS.arg_size() * InlineConstants::InstrCost;
934
935 // Everything other than inline ASM will also have a significant cost
936 // merely from making the call.
937 if (!isa<InlineAsm>(CS.getCalledValue()))
938 Cost += InlineConstants::CallPenalty;
939 }
940
941 return Base::visitCallSite(CS);
942 }
943
944 // Otherwise we're in a very special case -- an indirect function call. See
945 // if we can be particularly clever about this.
946 Value *Callee = CS.getCalledValue();
947
948 // First, pay the price of the argument setup. We account for the average
949 // 1 instruction per call argument setup here.
950 Cost += CS.arg_size() * InlineConstants::InstrCost;
951
952 // Next, check if this happens to be an indirect function call to a known
953 // function in this inline context. If not, we've done all we can.
954 Function *F = dyn_cast_or_null<Function>(SimplifiedValues.lookup(Callee));
955 if (!F)
956 return Base::visitCallSite(CS);
957
958 // If we have a constant that we are calling as a function, we can peer
959 // through it and see the function target. This happens not infrequently
960 // during devirtualization and so we want to give it a hefty bonus for
961 // inlining, but cap that bonus in the event that inlining wouldn't pan
962 // out. Pretend to inline the function, with a custom threshold.
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000963 auto IndirectCallParams = Params;
964 IndirectCallParams.DefaultThreshold = InlineConstants::IndirectCallThreshold;
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000965 CallAnalyzer CA(TTI, GetAssumptionCache, PSI, *F, CS, IndirectCallParams);
Chandler Carruth0539c072012-03-31 12:42:41 +0000966 if (CA.analyzeCall(CS)) {
967 // We were able to inline the indirect call! Subtract the cost from the
Easwaran Raman6d90d9f2015-12-07 21:21:20 +0000968 // threshold to get the bonus we want to apply, but don't go below zero.
969 Cost -= std::max(0, CA.getThreshold() - CA.getCost());
Chandler Carruth0539c072012-03-31 12:42:41 +0000970 }
971
972 return Base::visitCallSite(CS);
973}
974
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000975bool CallAnalyzer::visitReturnInst(ReturnInst &RI) {
976 // At least one return instruction will be free after inlining.
977 bool Free = !HasReturn;
978 HasReturn = true;
979 return Free;
980}
981
982bool CallAnalyzer::visitBranchInst(BranchInst &BI) {
983 // We model unconditional branches as essentially free -- they really
984 // shouldn't exist at all, but handling them makes the behavior of the
985 // inliner more regular and predictable. Interestingly, conditional branches
986 // which will fold away are also free.
987 return BI.isUnconditional() || isa<ConstantInt>(BI.getCondition()) ||
988 dyn_cast_or_null<ConstantInt>(
989 SimplifiedValues.lookup(BI.getCondition()));
990}
991
992bool CallAnalyzer::visitSwitchInst(SwitchInst &SI) {
993 // We model unconditional switches as free, see the comments on handling
994 // branches.
Chandler Carruthe01fd5f2014-04-28 08:52:44 +0000995 if (isa<ConstantInt>(SI.getCondition()))
996 return true;
997 if (Value *V = SimplifiedValues.lookup(SI.getCondition()))
998 if (isa<ConstantInt>(V))
999 return true;
1000
1001 // Otherwise, we need to accumulate a cost proportional to the number of
1002 // distinct successor blocks. This fan-out in the CFG cannot be represented
1003 // for free even if we can represent the core switch as a jumptable that
1004 // takes a single instruction.
1005 //
1006 // NB: We convert large switches which are just used to initialize large phi
1007 // nodes to lookup tables instead in simplify-cfg, so this shouldn't prevent
1008 // inlining those. It will prevent inlining in cases where the optimization
1009 // does not (yet) fire.
1010 SmallPtrSet<BasicBlock *, 8> SuccessorBlocks;
1011 SuccessorBlocks.insert(SI.getDefaultDest());
1012 for (auto I = SI.case_begin(), E = SI.case_end(); I != E; ++I)
1013 SuccessorBlocks.insert(I.getCaseSuccessor());
1014 // Add cost corresponding to the number of distinct destinations. The first
1015 // we model as free because of fallthrough.
1016 Cost += (SuccessorBlocks.size() - 1) * InlineConstants::InstrCost;
1017 return false;
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001018}
1019
1020bool CallAnalyzer::visitIndirectBrInst(IndirectBrInst &IBI) {
1021 // We never want to inline functions that contain an indirectbr. This is
1022 // incorrect because all the blockaddress's (in static global initializers
1023 // for example) would be referring to the original function, and this
1024 // indirect jump would jump from the inlined copy of the function into the
1025 // original function which is extremely undefined behavior.
1026 // FIXME: This logic isn't really right; we can safely inline functions with
1027 // indirectbr's as long as no other function or global references the
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001028 // blockaddress of a block within the current function.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001029 HasIndirectBr = true;
1030 return false;
1031}
1032
1033bool CallAnalyzer::visitResumeInst(ResumeInst &RI) {
1034 // FIXME: It's not clear that a single instruction is an accurate model for
1035 // the inline cost of a resume instruction.
1036 return false;
1037}
1038
David Majnemer654e1302015-07-31 17:58:14 +00001039bool CallAnalyzer::visitCleanupReturnInst(CleanupReturnInst &CRI) {
1040 // FIXME: It's not clear that a single instruction is an accurate model for
1041 // the inline cost of a cleanupret instruction.
1042 return false;
1043}
1044
1045bool CallAnalyzer::visitCatchReturnInst(CatchReturnInst &CRI) {
1046 // FIXME: It's not clear that a single instruction is an accurate model for
Joseph Tremoulet8220bcc2015-08-23 00:26:33 +00001047 // the inline cost of a catchret instruction.
David Majnemer654e1302015-07-31 17:58:14 +00001048 return false;
1049}
1050
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001051bool CallAnalyzer::visitUnreachableInst(UnreachableInst &I) {
1052 // FIXME: It might be reasonably to discount the cost of instructions leading
1053 // to unreachable as they have the lowest possible impact on both runtime and
1054 // code size.
1055 return true; // No actual code is needed for unreachable.
1056}
1057
Chandler Carruth0539c072012-03-31 12:42:41 +00001058bool CallAnalyzer::visitInstruction(Instruction &I) {
Chandler Carruthda7513a2012-05-04 00:58:03 +00001059 // Some instructions are free. All of the free intrinsics can also be
1060 // handled by SROA, etc.
Chandler Carruthb8cf5102013-01-21 12:05:16 +00001061 if (TargetTransformInfo::TCC_Free == TTI.getUserCost(&I))
Chandler Carruthda7513a2012-05-04 00:58:03 +00001062 return true;
1063
Chandler Carruth0539c072012-03-31 12:42:41 +00001064 // We found something we don't understand or can't handle. Mark any SROA-able
1065 // values in the operand list as no longer viable.
1066 for (User::op_iterator OI = I.op_begin(), OE = I.op_end(); OI != OE; ++OI)
1067 disableSROA(*OI);
1068
1069 return false;
1070}
1071
Chandler Carruth0539c072012-03-31 12:42:41 +00001072/// \brief Analyze a basic block for its contribution to the inline cost.
1073///
1074/// This method walks the analyzer over every instruction in the given basic
1075/// block and accounts for their cost during inlining at this callsite. It
1076/// aborts early if the threshold has been exceeded or an impossible to inline
1077/// construct has been detected. It returns false if inlining is no longer
1078/// viable, and true if inlining remains viable.
Hal Finkel57f03dd2014-09-07 13:49:57 +00001079bool CallAnalyzer::analyzeBlock(BasicBlock *BB,
1080 SmallPtrSetImpl<const Value *> &EphValues) {
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001081 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
Chandler Carruth6b4cc8b2014-02-01 10:38:17 +00001082 // FIXME: Currently, the number of instructions in a function regardless of
1083 // our ability to simplify them during inline to constants or dead code,
1084 // are actually used by the vector bonus heuristic. As long as that's true,
1085 // we have to special case debug intrinsics here to prevent differences in
1086 // inlining due to debug symbols. Eventually, the number of unsimplified
1087 // instructions shouldn't factor into the cost computation, but until then,
1088 // hack around it here.
1089 if (isa<DbgInfoIntrinsic>(I))
1090 continue;
1091
Hal Finkel57f03dd2014-09-07 13:49:57 +00001092 // Skip ephemeral values.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001093 if (EphValues.count(&*I))
Hal Finkel57f03dd2014-09-07 13:49:57 +00001094 continue;
1095
Chandler Carruth0539c072012-03-31 12:42:41 +00001096 ++NumInstructions;
1097 if (isa<ExtractElementInst>(I) || I->getType()->isVectorTy())
1098 ++NumVectorInstructions;
1099
Sanjay Patele9434e82015-09-15 15:26:25 +00001100 // If the instruction is floating point, and the target says this operation
1101 // is expensive or the function has the "use-soft-float" attribute, this may
1102 // eventually become a library call. Treat the cost as such.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001103 if (I->getType()->isFloatingPointTy()) {
1104 bool hasSoftFloatAttr = false;
1105
Sanjay Patele9434e82015-09-15 15:26:25 +00001106 // If the function has the "use-soft-float" attribute, mark it as
1107 // expensive.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001108 if (F.hasFnAttribute("use-soft-float")) {
1109 Attribute Attr = F.getFnAttribute("use-soft-float");
1110 StringRef Val = Attr.getValueAsString();
1111 if (Val == "true")
1112 hasSoftFloatAttr = true;
1113 }
1114
1115 if (TTI.getFPOpCost(I->getType()) == TargetTransformInfo::TCC_Expensive ||
1116 hasSoftFloatAttr)
1117 Cost += InlineConstants::CallPenalty;
1118 }
1119
Chandler Carruth0539c072012-03-31 12:42:41 +00001120 // If the instruction simplified to a constant, there is no cost to this
1121 // instruction. Visit the instructions using our InstVisitor to account for
1122 // all of the per-instruction logic. The visit tree returns true if we
1123 // consumed the instruction in any way, and false if the instruction's base
1124 // cost should count against inlining.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001125 if (Base::visit(&*I))
Chandler Carruth0539c072012-03-31 12:42:41 +00001126 ++NumInstructionsSimplified;
1127 else
1128 Cost += InlineConstants::InstrCost;
1129
1130 // If the visit this instruction detected an uninlinable pattern, abort.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001131 if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca ||
Reid Kleckner223de262015-04-14 20:38:14 +00001132 HasIndirectBr || HasFrameEscape)
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001133 return false;
1134
1135 // If the caller is a recursive function then we don't want to inline
1136 // functions which allocate a lot of stack space because it would increase
1137 // the caller stack usage dramatically.
1138 if (IsCallerRecursive &&
1139 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller)
Chandler Carruth0539c072012-03-31 12:42:41 +00001140 return false;
1141
Chandler Carrutha004f222015-05-27 02:49:05 +00001142 // Check if we've past the maximum possible threshold so we don't spin in
1143 // huge basic blocks that will never inline.
1144 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001145 return false;
1146 }
1147
1148 return true;
1149}
1150
1151/// \brief Compute the base pointer and cumulative constant offsets for V.
1152///
1153/// This strips all constant offsets off of V, leaving it the base pointer, and
1154/// accumulates the total constant offset applied in the returned constant. It
1155/// returns 0 if V is not a pointer, and returns the constant '0' if there are
1156/// no constant offsets applied.
1157ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001158 if (!V->getType()->isPointerTy())
Craig Topper353eda42014-04-24 06:44:33 +00001159 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001160
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001161 const DataLayout &DL = F.getParent()->getDataLayout();
1162 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +00001163 APInt Offset = APInt::getNullValue(IntPtrWidth);
1164
1165 // Even though we don't look through PHI nodes, we could be called on an
1166 // instruction in an unreachable block, which may be on a cycle.
1167 SmallPtrSet<Value *, 4> Visited;
1168 Visited.insert(V);
1169 do {
1170 if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
1171 if (!GEP->isInBounds() || !accumulateGEPOffset(*GEP, Offset))
Craig Topper353eda42014-04-24 06:44:33 +00001172 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001173 V = GEP->getPointerOperand();
1174 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
1175 V = cast<Operator>(V)->getOperand(0);
1176 } else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
Sanjoy Das5ce32722016-04-08 00:48:30 +00001177 if (GA->isInterposable())
Chandler Carruth0539c072012-03-31 12:42:41 +00001178 break;
1179 V = GA->getAliasee();
1180 } else {
1181 break;
1182 }
1183 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
David Blaikie70573dc2014-11-19 07:49:26 +00001184 } while (Visited.insert(V).second);
Chandler Carruth0539c072012-03-31 12:42:41 +00001185
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001186 Type *IntPtrTy = DL.getIntPtrType(V->getContext());
Chandler Carruth0539c072012-03-31 12:42:41 +00001187 return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset));
1188}
1189
1190/// \brief Analyze a call site for potential inlining.
1191///
1192/// Returns true if inlining this call is viable, and false if it is not
1193/// viable. It computes the cost and adjusts the threshold based on numerous
1194/// factors and heuristics. If this method returns false but the computed cost
1195/// is below the computed threshold, then inlining was forcibly disabled by
Bob Wilson266802d2012-11-19 07:04:30 +00001196/// some artifact of the routine.
Chandler Carruth0539c072012-03-31 12:42:41 +00001197bool CallAnalyzer::analyzeCall(CallSite CS) {
Chandler Carruth7ae90d42012-04-11 10:15:10 +00001198 ++NumCallsAnalyzed;
1199
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001200 // Perform some tweaks to the cost and threshold based on the direct
1201 // callsite information.
Chandler Carruth0539c072012-03-31 12:42:41 +00001202
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001203 // We want to more aggressively inline vector-dense kernels, so up the
1204 // threshold, and we'll lower it if the % of vector instructions gets too
Chandler Carrutha004f222015-05-27 02:49:05 +00001205 // low. Note that these bonuses are some what arbitrary and evolved over time
1206 // by accident as much as because they are principled bonuses.
1207 //
1208 // FIXME: It would be nice to remove all such bonuses. At least it would be
1209 // nice to base the bonus values on something more scientific.
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001210 assert(NumInstructions == 0);
1211 assert(NumVectorInstructions == 0);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001212
1213 // Update the threshold based on callsite properties
1214 updateThreshold(CS, F);
1215
Chandler Carrutha004f222015-05-27 02:49:05 +00001216 FiftyPercentVectorBonus = 3 * Threshold / 2;
1217 TenPercentVectorBonus = 3 * Threshold / 4;
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001218 const DataLayout &DL = F.getParent()->getDataLayout();
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001219
Chandler Carrutha004f222015-05-27 02:49:05 +00001220 // Track whether the post-inlining function would have more than one basic
1221 // block. A single basic block is often intended for inlining. Balloon the
1222 // threshold by 50% until we pass the single-BB phase.
1223 bool SingleBB = true;
1224 int SingleBBBonus = Threshold / 2;
1225
1226 // Speculatively apply all possible bonuses to Threshold. If cost exceeds
1227 // this Threshold any time, and cost cannot decrease, we can stop processing
1228 // the rest of the function body.
1229 Threshold += (SingleBBBonus + FiftyPercentVectorBonus);
1230
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001231 // Give out bonuses per argument, as the instructions setting them up will
1232 // be gone after inlining.
1233 for (unsigned I = 0, E = CS.arg_size(); I != E; ++I) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001234 if (CS.isByValArgument(I)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001235 // We approximate the number of loads and stores needed by dividing the
1236 // size of the byval type by the target's pointer size.
1237 PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType());
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001238 unsigned TypeSize = DL.getTypeSizeInBits(PTy->getElementType());
1239 unsigned PointerSize = DL.getPointerSizeInBits();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001240 // Ceiling division.
1241 unsigned NumStores = (TypeSize + PointerSize - 1) / PointerSize;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001242
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001243 // If it generates more than 8 stores it is likely to be expanded as an
1244 // inline memcpy so we take that as an upper bound. Otherwise we assume
1245 // one load and one store per word copied.
1246 // FIXME: The maxStoresPerMemcpy setting from the target should be used
1247 // here instead of a magic number of 8, but it's not available via
1248 // DataLayout.
1249 NumStores = std::min(NumStores, 8U);
1250
1251 Cost -= 2 * NumStores * InlineConstants::InstrCost;
1252 } else {
1253 // For non-byval arguments subtract off one instruction per call
1254 // argument.
1255 Cost -= InlineConstants::InstrCost;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001256 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001257 }
James Molloy6df8f272016-11-14 11:14:41 +00001258 // The call instruction also disappears after inlining.
1259 Cost -= InlineConstants::InstrCost + InlineConstants::CallPenalty;
1260
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001261 // If there is only one call of the function, and it has internal linkage,
1262 // the cost of inlining it drops dramatically.
Chad Rosier567556a2016-04-28 14:47:23 +00001263 bool OnlyOneCallAndLocalLinkage =
1264 F.hasLocalLinkage() && F.hasOneUse() && &F == CS.getCalledFunction();
James Molloy4f6fb952012-12-20 16:04:27 +00001265 if (OnlyOneCallAndLocalLinkage)
Piotr Padlewskid89875c2016-08-10 21:15:22 +00001266 Cost -= InlineConstants::LastCallToStaticBonus;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001267
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001268 // If this function uses the coldcc calling convention, prefer not to inline
1269 // it.
1270 if (F.getCallingConv() == CallingConv::Cold)
1271 Cost += InlineConstants::ColdccPenalty;
1272
1273 // Check if we're done. This can happen due to bonuses and penalties.
1274 if (Cost > Threshold)
1275 return false;
1276
Chandler Carruth0539c072012-03-31 12:42:41 +00001277 if (F.empty())
1278 return true;
1279
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001280 Function *Caller = CS.getInstruction()->getParent()->getParent();
1281 // Check if the caller function is recursive itself.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001282 for (User *U : Caller->users()) {
1283 CallSite Site(U);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001284 if (!Site)
1285 continue;
1286 Instruction *I = Site.getInstruction();
1287 if (I->getParent()->getParent() == Caller) {
1288 IsCallerRecursive = true;
1289 break;
1290 }
1291 }
1292
Chandler Carruth0539c072012-03-31 12:42:41 +00001293 // Populate our simplified values by mapping from function arguments to call
1294 // arguments with known important simplifications.
1295 CallSite::arg_iterator CAI = CS.arg_begin();
1296 for (Function::arg_iterator FAI = F.arg_begin(), FAE = F.arg_end();
1297 FAI != FAE; ++FAI, ++CAI) {
1298 assert(CAI != CS.arg_end());
1299 if (Constant *C = dyn_cast<Constant>(CAI))
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001300 SimplifiedValues[&*FAI] = C;
Chandler Carruth0539c072012-03-31 12:42:41 +00001301
1302 Value *PtrArg = *CAI;
1303 if (ConstantInt *C = stripAndComputeInBoundsConstantOffsets(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001304 ConstantOffsetPtrs[&*FAI] = std::make_pair(PtrArg, C->getValue());
Chandler Carruth0539c072012-03-31 12:42:41 +00001305
1306 // We can SROA any pointer arguments derived from alloca instructions.
1307 if (isa<AllocaInst>(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001308 SROAArgValues[&*FAI] = PtrArg;
Chandler Carruth0539c072012-03-31 12:42:41 +00001309 SROAArgCosts[PtrArg] = 0;
1310 }
1311 }
1312 }
1313 NumConstantArgs = SimplifiedValues.size();
1314 NumConstantOffsetPtrArgs = ConstantOffsetPtrs.size();
1315 NumAllocaArgs = SROAArgValues.size();
1316
Hal Finkel57f03dd2014-09-07 13:49:57 +00001317 // FIXME: If a caller has multiple calls to a callee, we end up recomputing
1318 // the ephemeral values multiple times (and they're completely determined by
1319 // the callee, so this is purely duplicate work).
1320 SmallPtrSet<const Value *, 32> EphValues;
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001321 CodeMetrics::collectEphemeralValues(&F, &GetAssumptionCache(F), EphValues);
Hal Finkel57f03dd2014-09-07 13:49:57 +00001322
Chandler Carruth0539c072012-03-31 12:42:41 +00001323 // The worklist of live basic blocks in the callee *after* inlining. We avoid
1324 // adding basic blocks of the callee which can be proven to be dead for this
1325 // particular call site in order to get more accurate cost estimates. This
1326 // requires a somewhat heavyweight iteration pattern: we need to walk the
1327 // basic blocks in a breadth-first order as we insert live successors. To
1328 // accomplish this, prioritizing for small iterations because we exit after
1329 // crossing our threshold, we use a small-size optimized SetVector.
1330 typedef SetVector<BasicBlock *, SmallVector<BasicBlock *, 16>,
Chad Rosier567556a2016-04-28 14:47:23 +00001331 SmallPtrSet<BasicBlock *, 16>>
1332 BBSetVector;
Chandler Carruth0539c072012-03-31 12:42:41 +00001333 BBSetVector BBWorklist;
1334 BBWorklist.insert(&F.getEntryBlock());
1335 // Note that we *must not* cache the size, this loop grows the worklist.
1336 for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) {
1337 // Bail out the moment we cross the threshold. This means we'll under-count
1338 // the cost, but only when undercounting doesn't matter.
Chandler Carrutha004f222015-05-27 02:49:05 +00001339 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001340 break;
1341
1342 BasicBlock *BB = BBWorklist[Idx];
1343 if (BB->empty())
Chandler Carruth4d1d34f2012-03-14 23:19:53 +00001344 continue;
Dan Gohman4552e3c2009-10-13 18:30:07 +00001345
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001346 // Disallow inlining a blockaddress. A blockaddress only has defined
1347 // behavior for an indirect branch in the same function, and we do not
1348 // currently support inlining indirect branches. But, the inliner may not
1349 // see an indirect branch that ends up being dead code at a particular call
1350 // site. If the blockaddress escapes the function, e.g., via a global
1351 // variable, inlining may lead to an invalid cross-function reference.
1352 if (BB->hasAddressTaken())
1353 return false;
1354
Chandler Carruth0539c072012-03-31 12:42:41 +00001355 // Analyze the cost of this block. If we blow through the threshold, this
1356 // returns false, and we can bail on out.
Easwaran Ramand295b002016-04-13 21:20:22 +00001357 if (!analyzeBlock(BB, EphValues))
1358 return false;
Eric Christopher46308e62011-02-01 01:16:32 +00001359
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001360 TerminatorInst *TI = BB->getTerminator();
1361
Chandler Carruth0539c072012-03-31 12:42:41 +00001362 // Add in the live successors by first checking whether we have terminator
1363 // that may be simplified based on the values simplified by this call.
1364 if (BranchInst *BI = dyn_cast<BranchInst>(TI)) {
1365 if (BI->isConditional()) {
1366 Value *Cond = BI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001367 if (ConstantInt *SimpleCond =
1368 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001369 BBWorklist.insert(BI->getSuccessor(SimpleCond->isZero() ? 1 : 0));
1370 continue;
Eric Christopher46308e62011-02-01 01:16:32 +00001371 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001372 }
1373 } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) {
1374 Value *Cond = SI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001375 if (ConstantInt *SimpleCond =
1376 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001377 BBWorklist.insert(SI->findCaseValue(SimpleCond).getCaseSuccessor());
1378 continue;
1379 }
1380 }
Eric Christopher46308e62011-02-01 01:16:32 +00001381
Chandler Carruth0539c072012-03-31 12:42:41 +00001382 // If we're unable to select a particular successor, just count all of
1383 // them.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001384 for (unsigned TIdx = 0, TSize = TI->getNumSuccessors(); TIdx != TSize;
1385 ++TIdx)
Chandler Carruth0539c072012-03-31 12:42:41 +00001386 BBWorklist.insert(TI->getSuccessor(TIdx));
1387
1388 // If we had any successors at this point, than post-inlining is likely to
1389 // have them as well. Note that we assume any basic blocks which existed
1390 // due to branches or switches which folded above will also fold after
1391 // inlining.
1392 if (SingleBB && TI->getNumSuccessors() > 1) {
1393 // Take off the bonus we applied to the threshold.
1394 Threshold -= SingleBBBonus;
1395 SingleBB = false;
Eric Christopher46308e62011-02-01 01:16:32 +00001396 }
1397 }
Andrew Trickcaa500b2011-10-01 01:27:56 +00001398
Chandler Carruthcb5beb32013-12-12 11:59:26 +00001399 // If this is a noduplicate call, we can still inline as long as
James Molloy4f6fb952012-12-20 16:04:27 +00001400 // inlining this would cause the removal of the caller (so the instruction
1401 // is not actually duplicated, just moved).
1402 if (!OnlyOneCallAndLocalLinkage && ContainsNoDuplicateCall)
1403 return false;
1404
Chandler Carrutha004f222015-05-27 02:49:05 +00001405 // We applied the maximum possible vector bonus at the beginning. Now,
1406 // subtract the excess bonus, if any, from the Threshold before
1407 // comparing against Cost.
1408 if (NumVectorInstructions <= NumInstructions / 10)
1409 Threshold -= FiftyPercentVectorBonus;
1410 else if (NumVectorInstructions <= NumInstructions / 2)
1411 Threshold -= (FiftyPercentVectorBonus - TenPercentVectorBonus);
Chandler Carruth0539c072012-03-31 12:42:41 +00001412
Hans Wennborg00ab73d2016-02-05 20:32:42 +00001413 return Cost < std::max(1, Threshold);
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001414}
1415
Manman Ren49d684e2012-09-12 05:06:18 +00001416#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
Chandler Carruth0539c072012-03-31 12:42:41 +00001417/// \brief Dump stats about this call's analysis.
Yaron Kereneb2a2542016-01-29 20:50:44 +00001418LLVM_DUMP_METHOD void CallAnalyzer::dump() {
Eric Christophera13839f2014-02-26 23:27:16 +00001419#define DEBUG_PRINT_STAT(x) dbgs() << " " #x ": " << x << "\n"
Chandler Carruth0539c072012-03-31 12:42:41 +00001420 DEBUG_PRINT_STAT(NumConstantArgs);
1421 DEBUG_PRINT_STAT(NumConstantOffsetPtrArgs);
1422 DEBUG_PRINT_STAT(NumAllocaArgs);
1423 DEBUG_PRINT_STAT(NumConstantPtrCmps);
1424 DEBUG_PRINT_STAT(NumConstantPtrDiffs);
1425 DEBUG_PRINT_STAT(NumInstructionsSimplified);
Chandler Carrutha004f222015-05-27 02:49:05 +00001426 DEBUG_PRINT_STAT(NumInstructions);
Chandler Carruth0539c072012-03-31 12:42:41 +00001427 DEBUG_PRINT_STAT(SROACostSavings);
1428 DEBUG_PRINT_STAT(SROACostSavingsLost);
James Molloy4f6fb952012-12-20 16:04:27 +00001429 DEBUG_PRINT_STAT(ContainsNoDuplicateCall);
Chandler Carruth394e34f2014-01-31 22:32:32 +00001430 DEBUG_PRINT_STAT(Cost);
1431 DEBUG_PRINT_STAT(Threshold);
Chandler Carruth0539c072012-03-31 12:42:41 +00001432#undef DEBUG_PRINT_STAT
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001433}
Manman Renc3366cc2012-09-06 19:55:56 +00001434#endif
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001435
Akira Hatanaka5af7ace2015-11-13 01:44:32 +00001436/// \brief Test that two functions either have or have not the given attribute
1437/// at the same time.
Chad Rosier567556a2016-04-28 14:47:23 +00001438template <typename AttrKind>
Akira Hatanaka5af7ace2015-11-13 01:44:32 +00001439static bool attributeMatches(Function *F1, Function *F2, AttrKind Attr) {
1440 return F1->getFnAttribute(Attr) == F2->getFnAttribute(Attr);
1441}
1442
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001443/// \brief Test that there are no attribute conflicts between Caller and Callee
1444/// that prevent inlining.
1445static bool functionsHaveCompatibleAttributes(Function *Caller,
Eric Christopher4371b132015-07-02 01:11:47 +00001446 Function *Callee,
1447 TargetTransformInfo &TTI) {
Eric Christopherd566fb12015-07-29 22:09:48 +00001448 return TTI.areInlineCompatible(Caller, Callee) &&
Akira Hatanaka1cb242e2015-12-22 23:57:37 +00001449 AttributeFuncs::areInlineCompatible(*Caller, *Callee);
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001450}
1451
Sean Silvaab6a6832016-07-23 04:22:50 +00001452InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001453 CallSite CS, const InlineParams &Params, TargetTransformInfo &CalleeTTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001454 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Sean Silvaab6a6832016-07-23 04:22:50 +00001455 ProfileSummaryInfo *PSI) {
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001456 return getInlineCost(CS, CS.getCalledFunction(), Params, CalleeTTI,
1457 GetAssumptionCache, PSI);
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001458}
1459
Sean Silvaab6a6832016-07-23 04:22:50 +00001460InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001461 CallSite CS, Function *Callee, const InlineParams &Params,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001462 TargetTransformInfo &CalleeTTI,
1463 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
1464 ProfileSummaryInfo *PSI) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001465
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001466 // Cannot inline indirect calls.
1467 if (!Callee)
1468 return llvm::InlineCost::getNever();
1469
1470 // Calls to functions with always-inline attributes should be inlined
1471 // whenever possible.
Peter Collingbourne68a88972014-05-19 18:25:54 +00001472 if (CS.hasFnAttr(Attribute::AlwaysInline)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001473 if (isInlineViable(*Callee))
1474 return llvm::InlineCost::getAlways();
1475 return llvm::InlineCost::getNever();
1476 }
1477
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001478 // Never inline functions with conflicting attributes (unless callee has
1479 // always-inline attribute).
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001480 if (!functionsHaveCompatibleAttributes(CS.getCaller(), Callee, CalleeTTI))
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001481 return llvm::InlineCost::getNever();
1482
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001483 // Don't inline this call if the caller has the optnone attribute.
1484 if (CS.getCaller()->hasFnAttribute(Attribute::OptimizeNone))
1485 return llvm::InlineCost::getNever();
1486
Sanjoy Das5ce32722016-04-08 00:48:30 +00001487 // Don't inline functions which can be interposed at link-time. Don't inline
1488 // functions marked noinline or call sites marked noinline.
Craig Topper107b1872016-12-09 02:18:04 +00001489 // Note: inlining non-exact non-interposable functions is fine, since we know
Sanjoy Das5ce32722016-04-08 00:48:30 +00001490 // we have *a* correct implementation of the source level function.
Chad Rosier567556a2016-04-28 14:47:23 +00001491 if (Callee->isInterposable() || Callee->hasFnAttribute(Attribute::NoInline) ||
1492 CS.isNoInline())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001493 return llvm::InlineCost::getNever();
1494
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001495 DEBUG(llvm::dbgs() << " Analyzing call of " << Callee->getName()
Chad Rosier567556a2016-04-28 14:47:23 +00001496 << "...\n");
Andrew Trickcaa500b2011-10-01 01:27:56 +00001497
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001498 CallAnalyzer CA(CalleeTTI, GetAssumptionCache, PSI, *Callee, CS, Params);
Chandler Carruth0539c072012-03-31 12:42:41 +00001499 bool ShouldInline = CA.analyzeCall(CS);
Dan Gohman4552e3c2009-10-13 18:30:07 +00001500
Chandler Carruth0539c072012-03-31 12:42:41 +00001501 DEBUG(CA.dump());
1502
1503 // Check if there was a reason to force inlining or no inlining.
1504 if (!ShouldInline && CA.getCost() < CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001505 return InlineCost::getNever();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001506 if (ShouldInline && CA.getCost() >= CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001507 return InlineCost::getAlways();
Andrew Trickcaa500b2011-10-01 01:27:56 +00001508
Chandler Carruth0539c072012-03-31 12:42:41 +00001509 return llvm::InlineCost::get(CA.getCost(), CA.getThreshold());
Dan Gohman4552e3c2009-10-13 18:30:07 +00001510}
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001511
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001512bool llvm::isInlineViable(Function &F) {
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001513 bool ReturnsTwice = F.hasFnAttribute(Attribute::ReturnsTwice);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001514 for (Function::iterator BI = F.begin(), BE = F.end(); BI != BE; ++BI) {
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001515 // Disallow inlining of functions which contain indirect branches or
1516 // blockaddresses.
1517 if (isa<IndirectBrInst>(BI->getTerminator()) || BI->hasAddressTaken())
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001518 return false;
1519
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001520 for (auto &II : *BI) {
1521 CallSite CS(&II);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001522 if (!CS)
1523 continue;
1524
1525 // Disallow recursive calls.
1526 if (&F == CS.getCalledFunction())
1527 return false;
1528
1529 // Disallow calls which expose returns-twice to a function not previously
1530 // attributed as such.
1531 if (!ReturnsTwice && CS.isCall() &&
1532 cast<CallInst>(CS.getInstruction())->canReturnTwice())
1533 return false;
Reid Kleckner223de262015-04-14 20:38:14 +00001534
Reid Kleckner60381792015-07-07 22:25:32 +00001535 // Disallow inlining functions that call @llvm.localescape. Doing this
Reid Kleckner223de262015-04-14 20:38:14 +00001536 // correctly would require major changes to the inliner.
1537 if (CS.getCalledFunction() &&
1538 CS.getCalledFunction()->getIntrinsicID() ==
Reid Kleckner60381792015-07-07 22:25:32 +00001539 llvm::Intrinsic::localescape)
Reid Kleckner223de262015-04-14 20:38:14 +00001540 return false;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001541 }
1542 }
1543
1544 return true;
1545}
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001546
1547// APIs to create InlineParams based on command line flags and/or other
1548// parameters.
1549
1550InlineParams llvm::getInlineParams(int Threshold) {
1551 InlineParams Params;
1552
1553 // This field is the threshold to use for a callee by default. This is
1554 // derived from one or more of:
1555 // * optimization or size-optimization levels,
1556 // * a value passed to createFunctionInliningPass function, or
1557 // * the -inline-threshold flag.
1558 // If the -inline-threshold flag is explicitly specified, that is used
1559 // irrespective of anything else.
1560 if (InlineThreshold.getNumOccurrences() > 0)
1561 Params.DefaultThreshold = InlineThreshold;
1562 else
1563 Params.DefaultThreshold = Threshold;
1564
1565 // Set the HintThreshold knob from the -inlinehint-threshold.
1566 Params.HintThreshold = HintThreshold;
1567
1568 // Set the HotCallSiteThreshold knob from the -hot-callsite-threshold.
1569 Params.HotCallSiteThreshold = HotCallSiteThreshold;
1570
1571 // Set the OptMinSizeThreshold and OptSizeThreshold params only if the
1572 // Set the OptMinSizeThreshold and OptSizeThreshold params only if the
1573 // -inlinehint-threshold commandline option is not explicitly given. If that
1574 // option is present, then its value applies even for callees with size and
1575 // minsize attributes.
1576 // If the -inline-threshold is not specified, set the ColdThreshold from the
1577 // -inlinecold-threshold even if it is not explicitly passed. If
1578 // -inline-threshold is specified, then -inlinecold-threshold needs to be
1579 // explicitly specified to set the ColdThreshold knob
1580 if (InlineThreshold.getNumOccurrences() == 0) {
1581 Params.OptMinSizeThreshold = InlineConstants::OptMinSizeThreshold;
1582 Params.OptSizeThreshold = InlineConstants::OptSizeThreshold;
1583 Params.ColdThreshold = ColdThreshold;
1584 } else if (ColdThreshold.getNumOccurrences() > 0) {
1585 Params.ColdThreshold = ColdThreshold;
1586 }
1587 return Params;
1588}
1589
1590InlineParams llvm::getInlineParams() {
1591 return getInlineParams(InlineThreshold);
1592}
1593
1594// Compute the default threshold for inlining based on the opt level and the
1595// size opt level.
1596static int computeThresholdFromOptLevels(unsigned OptLevel,
1597 unsigned SizeOptLevel) {
1598 if (OptLevel > 2)
1599 return InlineConstants::OptAggressiveThreshold;
1600 if (SizeOptLevel == 1) // -Os
1601 return InlineConstants::OptSizeThreshold;
1602 if (SizeOptLevel == 2) // -Oz
1603 return InlineConstants::OptMinSizeThreshold;
1604 return InlineThreshold;
1605}
1606
1607InlineParams llvm::getInlineParams(unsigned OptLevel, unsigned SizeOptLevel) {
1608 return getInlineParams(computeThresholdFromOptLevels(OptLevel, SizeOptLevel));
1609}