blob: 77ad6f1e166fd3ba5d8ddf9d88bacf1993705534 [file] [log] [blame]
Dan Gohman4552e3c2009-10-13 18:30:07 +00001//===- InlineCost.cpp - Cost analysis for inliner -------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements inline cost analysis.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Analysis/InlineCost.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000015#include "llvm/ADT/STLExtras.h"
16#include "llvm/ADT/SetVector.h"
17#include "llvm/ADT/SmallPtrSet.h"
18#include "llvm/ADT/SmallVector.h"
19#include "llvm/ADT/Statistic.h"
Daniel Jasperaec2fa32016-12-19 08:22:17 +000020#include "llvm/Analysis/AssumptionCache.h"
Easwaran Raman12585b02017-01-20 22:44:04 +000021#include "llvm/Analysis/BlockFrequencyInfo.h"
Hal Finkel57f03dd2014-09-07 13:49:57 +000022#include "llvm/Analysis/CodeMetrics.h"
Chandler Carruthd9903882015-01-14 11:23:27 +000023#include "llvm/Analysis/ConstantFolding.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000024#include "llvm/Analysis/InstructionSimplify.h"
Easwaran Raman71069cf2016-06-09 22:23:21 +000025#include "llvm/Analysis/ProfileSummaryInfo.h"
Chandler Carruth42f3dce2013-01-21 11:55:09 +000026#include "llvm/Analysis/TargetTransformInfo.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000027#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000028#include "llvm/IR/CallingConv.h"
29#include "llvm/IR/DataLayout.h"
Chandler Carruth03eb0de2014-03-04 10:40:04 +000030#include "llvm/IR/GetElementPtrTypeIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000031#include "llvm/IR/GlobalAlias.h"
Chandler Carruth7da14f12014-03-06 03:23:41 +000032#include "llvm/IR/InstVisitor.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000033#include "llvm/IR/IntrinsicInst.h"
34#include "llvm/IR/Operator.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000035#include "llvm/Support/Debug.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000036#include "llvm/Support/raw_ostream.h"
Eric Christopher2dfbd7e2011-02-05 00:49:15 +000037
Dan Gohman4552e3c2009-10-13 18:30:07 +000038using namespace llvm;
39
Chandler Carruthf1221bd2014-04-22 02:48:03 +000040#define DEBUG_TYPE "inline-cost"
41
Chandler Carruth7ae90d42012-04-11 10:15:10 +000042STATISTIC(NumCallsAnalyzed, "Number of call sites analyzed");
43
Easwaran Raman1c57cc22016-08-10 00:48:04 +000044static cl::opt<int> InlineThreshold(
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000045 "inline-threshold", cl::Hidden, cl::init(225), cl::ZeroOrMore,
46 cl::desc("Control the amount of inlining to perform (default = 225)"));
47
48static cl::opt<int> HintThreshold(
49 "inlinehint-threshold", cl::Hidden, cl::init(325),
50 cl::desc("Threshold for inlining functions with inline hint"));
51
Easwaran Raman12585b02017-01-20 22:44:04 +000052static cl::opt<int>
53 ColdCallSiteThreshold("inline-cold-callsite-threshold", cl::Hidden,
54 cl::init(45),
55 cl::desc("Threshold for inlining cold callsites"));
56
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000057// We introduce this threshold to help performance of instrumentation based
58// PGO before we actually hook up inliner with analysis passes such as BPI and
59// BFI.
60static cl::opt<int> ColdThreshold(
Easwaran Ramanc103ef82017-05-11 21:36:28 +000061 "inlinecold-threshold", cl::Hidden, cl::init(45),
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000062 cl::desc("Threshold for inlining functions with cold attribute"));
63
Dehao Chende39cb92016-08-05 20:28:41 +000064static cl::opt<int>
65 HotCallSiteThreshold("hot-callsite-threshold", cl::Hidden, cl::init(3000),
66 cl::ZeroOrMore,
67 cl::desc("Threshold for hot callsites "));
68
Chandler Carruth0539c072012-03-31 12:42:41 +000069namespace {
Chandler Carrutha3089552012-03-14 07:32:53 +000070
Chandler Carruth0539c072012-03-31 12:42:41 +000071class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> {
72 typedef InstVisitor<CallAnalyzer, bool> Base;
73 friend class InstVisitor<CallAnalyzer, bool>;
Owen Andersona08318a2010-09-09 16:56:42 +000074
Chandler Carruth42f3dce2013-01-21 11:55:09 +000075 /// The TargetTransformInfo available for this compilation.
76 const TargetTransformInfo &TTI;
77
Daniel Jasperaec2fa32016-12-19 08:22:17 +000078 /// Getter for the cache of @llvm.assume intrinsics.
79 std::function<AssumptionCache &(Function &)> &GetAssumptionCache;
80
Easwaran Raman12585b02017-01-20 22:44:04 +000081 /// Getter for BlockFrequencyInfo
82 Optional<function_ref<BlockFrequencyInfo &(Function &)>> &GetBFI;
83
Easwaran Raman71069cf2016-06-09 22:23:21 +000084 /// Profile summary information.
85 ProfileSummaryInfo *PSI;
86
Piotr Padlewskif3d122c2016-09-30 21:05:49 +000087 /// The called function.
Chandler Carruth0539c072012-03-31 12:42:41 +000088 Function &F;
Owen Andersona08318a2010-09-09 16:56:42 +000089
Eric Christopher85be8ca2017-04-15 06:14:50 +000090 // Cache the DataLayout since we use it a lot.
91 const DataLayout &DL;
92
Piotr Padlewskif3d122c2016-09-30 21:05:49 +000093 /// The candidate callsite being analyzed. Please do not use this to do
94 /// analysis in the caller function; we want the inline cost query to be
95 /// easily cacheable. Instead, use the cover function paramHasAttr.
Philip Reames9b5c9582015-06-26 20:51:17 +000096 CallSite CandidateCS;
97
Piotr Padlewskif3d122c2016-09-30 21:05:49 +000098 /// Tunable parameters that control the analysis.
Easwaran Raman1c57cc22016-08-10 00:48:04 +000099 const InlineParams &Params;
100
Chandler Carruth0539c072012-03-31 12:42:41 +0000101 int Threshold;
102 int Cost;
Owen Andersona08318a2010-09-09 16:56:42 +0000103
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000104 bool IsCallerRecursive;
105 bool IsRecursiveCall;
Chandler Carruth0539c072012-03-31 12:42:41 +0000106 bool ExposesReturnsTwice;
107 bool HasDynamicAlloca;
James Molloy4f6fb952012-12-20 16:04:27 +0000108 bool ContainsNoDuplicateCall;
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000109 bool HasReturn;
110 bool HasIndirectBr;
Reid Kleckner223de262015-04-14 20:38:14 +0000111 bool HasFrameEscape;
James Molloy4f6fb952012-12-20 16:04:27 +0000112
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000113 /// Number of bytes allocated statically by the callee.
114 uint64_t AllocatedSize;
Chandler Carruth0539c072012-03-31 12:42:41 +0000115 unsigned NumInstructions, NumVectorInstructions;
116 int FiftyPercentVectorBonus, TenPercentVectorBonus;
117 int VectorBonus;
118
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000119 /// While we walk the potentially-inlined instructions, we build up and
120 /// maintain a mapping of simplified values specific to this callsite. The
121 /// idea is to propagate any special information we have about arguments to
122 /// this call through the inlinable section of the function, and account for
123 /// likely simplifications post-inlining. The most important aspect we track
124 /// is CFG altering simplifications -- when we prove a basic block dead, that
125 /// can cause dramatic shifts in the cost of inlining a function.
Chandler Carruth0539c072012-03-31 12:42:41 +0000126 DenseMap<Value *, Constant *> SimplifiedValues;
127
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000128 /// Keep track of the values which map back (through function arguments) to
129 /// allocas on the caller stack which could be simplified through SROA.
Chandler Carruth0539c072012-03-31 12:42:41 +0000130 DenseMap<Value *, Value *> SROAArgValues;
131
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000132 /// The mapping of caller Alloca values to their accumulated cost savings. If
133 /// we have to disable SROA for one of the allocas, this tells us how much
134 /// cost must be added.
Chandler Carruth0539c072012-03-31 12:42:41 +0000135 DenseMap<Value *, int> SROAArgCosts;
136
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000137 /// Keep track of values which map to a pointer base and constant offset.
Chad Rosier567556a2016-04-28 14:47:23 +0000138 DenseMap<Value *, std::pair<Value *, APInt>> ConstantOffsetPtrs;
Chandler Carruth0539c072012-03-31 12:42:41 +0000139
140 // Custom simplification helper routines.
141 bool isAllocaDerivedArg(Value *V);
142 bool lookupSROAArgAndCost(Value *V, Value *&Arg,
143 DenseMap<Value *, int>::iterator &CostIt);
144 void disableSROA(DenseMap<Value *, int>::iterator CostIt);
145 void disableSROA(Value *V);
146 void accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
147 int InstructionCost);
Haicheng Wu201b1912017-01-20 18:51:22 +0000148 bool isGEPFree(GetElementPtrInst &GEP);
Chandler Carruth0539c072012-03-31 12:42:41 +0000149 bool accumulateGEPOffset(GEPOperator &GEP, APInt &Offset);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000150 bool simplifyCallSite(Function *F, CallSite CS);
Easwaran Raman617f6362017-02-18 17:22:52 +0000151 template <typename Callable>
152 bool simplifyInstruction(Instruction &I, Callable Evaluate);
Chandler Carruth0539c072012-03-31 12:42:41 +0000153 ConstantInt *stripAndComputeInBoundsConstantOffsets(Value *&V);
154
Philip Reames9b5c9582015-06-26 20:51:17 +0000155 /// Return true if the given argument to the function being considered for
156 /// inlining has the given attribute set either at the call site or the
157 /// function declaration. Primarily used to inspect call site specific
158 /// attributes since these can be more precise than the ones on the callee
Easwaran Raman3676da42015-12-03 19:03:20 +0000159 /// itself.
Philip Reames9b5c9582015-06-26 20:51:17 +0000160 bool paramHasAttr(Argument *A, Attribute::AttrKind Attr);
Chad Rosier567556a2016-04-28 14:47:23 +0000161
Philip Reames9b5c9582015-06-26 20:51:17 +0000162 /// Return true if the given value is known non null within the callee if
Easwaran Raman3676da42015-12-03 19:03:20 +0000163 /// inlined through this particular callsite.
Philip Reames9b5c9582015-06-26 20:51:17 +0000164 bool isKnownNonNullInCallee(Value *V);
165
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000166 /// Update Threshold based on callsite properties such as callee
167 /// attributes and callee hotness for PGO builds. The Callee is explicitly
168 /// passed to support analyzing indirect calls whose target is inferred by
169 /// analysis.
170 void updateThreshold(CallSite CS, Function &Callee);
171
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000172 /// Return true if size growth is allowed when inlining the callee at CS.
173 bool allowSizeGrowth(CallSite CS);
174
Chandler Carruth0539c072012-03-31 12:42:41 +0000175 // Custom analysis routines.
Hal Finkel57f03dd2014-09-07 13:49:57 +0000176 bool analyzeBlock(BasicBlock *BB, SmallPtrSetImpl<const Value *> &EphValues);
Chandler Carruth0539c072012-03-31 12:42:41 +0000177
178 // Disable several entry points to the visitor so we don't accidentally use
179 // them by declaring but not defining them here.
Chad Rosier567556a2016-04-28 14:47:23 +0000180 void visit(Module *);
181 void visit(Module &);
182 void visit(Function *);
183 void visit(Function &);
184 void visit(BasicBlock *);
185 void visit(BasicBlock &);
Chandler Carruth0539c072012-03-31 12:42:41 +0000186
187 // Provide base case for our instruction visit.
188 bool visitInstruction(Instruction &I);
189
190 // Our visit overrides.
191 bool visitAlloca(AllocaInst &I);
192 bool visitPHI(PHINode &I);
193 bool visitGetElementPtr(GetElementPtrInst &I);
194 bool visitBitCast(BitCastInst &I);
195 bool visitPtrToInt(PtrToIntInst &I);
196 bool visitIntToPtr(IntToPtrInst &I);
197 bool visitCastInst(CastInst &I);
198 bool visitUnaryInstruction(UnaryInstruction &I);
Matt Arsenault727aa342013-07-20 04:09:00 +0000199 bool visitCmpInst(CmpInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000200 bool visitSub(BinaryOperator &I);
201 bool visitBinaryOperator(BinaryOperator &I);
202 bool visitLoad(LoadInst &I);
203 bool visitStore(StoreInst &I);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000204 bool visitExtractValue(ExtractValueInst &I);
205 bool visitInsertValue(InsertValueInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000206 bool visitCallSite(CallSite CS);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000207 bool visitReturnInst(ReturnInst &RI);
208 bool visitBranchInst(BranchInst &BI);
209 bool visitSwitchInst(SwitchInst &SI);
210 bool visitIndirectBrInst(IndirectBrInst &IBI);
211 bool visitResumeInst(ResumeInst &RI);
David Majnemer654e1302015-07-31 17:58:14 +0000212 bool visitCleanupReturnInst(CleanupReturnInst &RI);
213 bool visitCatchReturnInst(CatchReturnInst &RI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000214 bool visitUnreachableInst(UnreachableInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000215
216public:
Sean Silvaab6a6832016-07-23 04:22:50 +0000217 CallAnalyzer(const TargetTransformInfo &TTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000218 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +0000219 Optional<function_ref<BlockFrequencyInfo &(Function &)>> &GetBFI,
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000220 ProfileSummaryInfo *PSI, Function &Callee, CallSite CSArg,
221 const InlineParams &Params)
Easwaran Raman12585b02017-01-20 22:44:04 +0000222 : TTI(TTI), GetAssumptionCache(GetAssumptionCache), GetBFI(GetBFI),
Eric Christopher85be8ca2017-04-15 06:14:50 +0000223 PSI(PSI), F(Callee), DL(F.getParent()->getDataLayout()),
224 CandidateCS(CSArg), Params(Params), Threshold(Params.DefaultThreshold),
225 Cost(0), IsCallerRecursive(false), IsRecursiveCall(false),
226 ExposesReturnsTwice(false), HasDynamicAlloca(false),
227 ContainsNoDuplicateCall(false), HasReturn(false), HasIndirectBr(false),
228 HasFrameEscape(false), AllocatedSize(0), NumInstructions(0),
229 NumVectorInstructions(0), FiftyPercentVectorBonus(0),
230 TenPercentVectorBonus(0), VectorBonus(0), NumConstantArgs(0),
231 NumConstantOffsetPtrArgs(0), NumAllocaArgs(0), NumConstantPtrCmps(0),
232 NumConstantPtrDiffs(0), NumInstructionsSimplified(0),
233 SROACostSavings(0), SROACostSavingsLost(0) {}
Chandler Carruth0539c072012-03-31 12:42:41 +0000234
235 bool analyzeCall(CallSite CS);
236
237 int getThreshold() { return Threshold; }
238 int getCost() { return Cost; }
239
240 // Keep a bunch of stats about the cost savings found so we can print them
241 // out when debugging.
242 unsigned NumConstantArgs;
243 unsigned NumConstantOffsetPtrArgs;
244 unsigned NumAllocaArgs;
245 unsigned NumConstantPtrCmps;
246 unsigned NumConstantPtrDiffs;
247 unsigned NumInstructionsSimplified;
248 unsigned SROACostSavings;
249 unsigned SROACostSavingsLost;
250
251 void dump();
252};
253
254} // namespace
255
256/// \brief Test whether the given value is an Alloca-derived function argument.
257bool CallAnalyzer::isAllocaDerivedArg(Value *V) {
258 return SROAArgValues.count(V);
Owen Andersona08318a2010-09-09 16:56:42 +0000259}
260
Chandler Carruth0539c072012-03-31 12:42:41 +0000261/// \brief Lookup the SROA-candidate argument and cost iterator which V maps to.
262/// Returns false if V does not map to a SROA-candidate.
263bool CallAnalyzer::lookupSROAArgAndCost(
264 Value *V, Value *&Arg, DenseMap<Value *, int>::iterator &CostIt) {
265 if (SROAArgValues.empty() || SROAArgCosts.empty())
266 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000267
Chandler Carruth0539c072012-03-31 12:42:41 +0000268 DenseMap<Value *, Value *>::iterator ArgIt = SROAArgValues.find(V);
269 if (ArgIt == SROAArgValues.end())
270 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000271
Chandler Carruth0539c072012-03-31 12:42:41 +0000272 Arg = ArgIt->second;
273 CostIt = SROAArgCosts.find(Arg);
274 return CostIt != SROAArgCosts.end();
Chandler Carruth783b7192012-03-09 02:49:36 +0000275}
276
Chandler Carruth0539c072012-03-31 12:42:41 +0000277/// \brief Disable SROA for the candidate marked by this cost iterator.
Chandler Carruth783b7192012-03-09 02:49:36 +0000278///
Benjamin Kramerbde91762012-06-02 10:20:22 +0000279/// This marks the candidate as no longer viable for SROA, and adds the cost
Chandler Carruth0539c072012-03-31 12:42:41 +0000280/// savings associated with it back into the inline cost measurement.
281void CallAnalyzer::disableSROA(DenseMap<Value *, int>::iterator CostIt) {
282 // If we're no longer able to perform SROA we need to undo its cost savings
283 // and prevent subsequent analysis.
284 Cost += CostIt->second;
285 SROACostSavings -= CostIt->second;
286 SROACostSavingsLost += CostIt->second;
287 SROAArgCosts.erase(CostIt);
288}
289
290/// \brief If 'V' maps to a SROA candidate, disable SROA for it.
291void CallAnalyzer::disableSROA(Value *V) {
292 Value *SROAArg;
293 DenseMap<Value *, int>::iterator CostIt;
294 if (lookupSROAArgAndCost(V, SROAArg, CostIt))
295 disableSROA(CostIt);
296}
297
298/// \brief Accumulate the given cost for a particular SROA candidate.
299void CallAnalyzer::accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
300 int InstructionCost) {
301 CostIt->second += InstructionCost;
302 SROACostSavings += InstructionCost;
303}
304
Chandler Carruth0539c072012-03-31 12:42:41 +0000305/// \brief Accumulate a constant GEP offset into an APInt if possible.
306///
307/// Returns false if unable to compute the offset for any reason. Respects any
308/// simplified values known during the analysis of this callsite.
309bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000310 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +0000311 assert(IntPtrWidth == Offset.getBitWidth());
312
313 for (gep_type_iterator GTI = gep_type_begin(GEP), GTE = gep_type_end(GEP);
314 GTI != GTE; ++GTI) {
315 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand());
316 if (!OpC)
317 if (Constant *SimpleOp = SimplifiedValues.lookup(GTI.getOperand()))
318 OpC = dyn_cast<ConstantInt>(SimpleOp);
319 if (!OpC)
Chandler Carruth783b7192012-03-09 02:49:36 +0000320 return false;
Chad Rosier567556a2016-04-28 14:47:23 +0000321 if (OpC->isZero())
322 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000323
Chandler Carruth0539c072012-03-31 12:42:41 +0000324 // Handle a struct index, which adds its field offset to the pointer.
Peter Collingbourneab85225b2016-12-02 02:24:42 +0000325 if (StructType *STy = GTI.getStructTypeOrNull()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000326 unsigned ElementIdx = OpC->getZExtValue();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000327 const StructLayout *SL = DL.getStructLayout(STy);
Chandler Carruth0539c072012-03-31 12:42:41 +0000328 Offset += APInt(IntPtrWidth, SL->getElementOffset(ElementIdx));
329 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000330 }
Chandler Carruth783b7192012-03-09 02:49:36 +0000331
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000332 APInt TypeSize(IntPtrWidth, DL.getTypeAllocSize(GTI.getIndexedType()));
Chandler Carruth0539c072012-03-31 12:42:41 +0000333 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize;
334 }
335 return true;
336}
337
Haicheng Wu201b1912017-01-20 18:51:22 +0000338/// \brief Use TTI to check whether a GEP is free.
339///
340/// Respects any simplified values known during the analysis of this callsite.
341bool CallAnalyzer::isGEPFree(GetElementPtrInst &GEP) {
342 SmallVector<Value *, 4> Indices;
343 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
344 if (Constant *SimpleOp = SimplifiedValues.lookup(*I))
345 Indices.push_back(SimpleOp);
346 else
347 Indices.push_back(*I);
348 return TargetTransformInfo::TCC_Free ==
349 TTI.getGEPCost(GEP.getSourceElementType(), GEP.getPointerOperand(),
350 Indices);
351}
352
Chandler Carruth0539c072012-03-31 12:42:41 +0000353bool CallAnalyzer::visitAlloca(AllocaInst &I) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000354 // Check whether inlining will turn a dynamic alloca into a static
Sanjay Patel0f153422016-05-09 21:51:53 +0000355 // alloca and handle that case.
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000356 if (I.isArrayAllocation()) {
Sanjay Patel0f153422016-05-09 21:51:53 +0000357 Constant *Size = SimplifiedValues.lookup(I.getArraySize());
358 if (auto *AllocSize = dyn_cast_or_null<ConstantInt>(Size)) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000359 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000360 AllocatedSize = SaturatingMultiplyAdd(
361 AllocSize->getLimitedValue(), DL.getTypeAllocSize(Ty), AllocatedSize);
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000362 return Base::visitAlloca(I);
363 }
364 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000365
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000366 // Accumulate the allocated size.
367 if (I.isStaticAlloca()) {
368 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000369 AllocatedSize = SaturatingAdd(DL.getTypeAllocSize(Ty), AllocatedSize);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000370 }
371
Bob Wilsona5b0dc82012-11-19 07:04:35 +0000372 // We will happily inline static alloca instructions.
373 if (I.isStaticAlloca())
Chandler Carruth0539c072012-03-31 12:42:41 +0000374 return Base::visitAlloca(I);
375
376 // FIXME: This is overly conservative. Dynamic allocas are inefficient for
377 // a variety of reasons, and so we would like to not inline them into
378 // functions which don't currently have a dynamic alloca. This simply
379 // disables inlining altogether in the presence of a dynamic alloca.
380 HasDynamicAlloca = true;
381 return false;
382}
383
384bool CallAnalyzer::visitPHI(PHINode &I) {
385 // FIXME: We should potentially be tracking values through phi nodes,
386 // especially when they collapse to a single value due to deleted CFG edges
387 // during inlining.
388
389 // FIXME: We need to propagate SROA *disabling* through phi nodes, even
390 // though we don't want to propagate it's bonuses. The idea is to disable
391 // SROA if it *might* be used in an inappropriate manner.
392
393 // Phi nodes are always zero-cost.
394 return true;
395}
396
397bool CallAnalyzer::visitGetElementPtr(GetElementPtrInst &I) {
398 Value *SROAArg;
399 DenseMap<Value *, int>::iterator CostIt;
Chad Rosier567556a2016-04-28 14:47:23 +0000400 bool SROACandidate =
401 lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt);
Chandler Carruth0539c072012-03-31 12:42:41 +0000402
403 // Try to fold GEPs of constant-offset call site argument pointers. This
404 // requires target data and inbounds GEPs.
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000405 if (I.isInBounds()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000406 // Check if we have a base + offset for the pointer.
407 Value *Ptr = I.getPointerOperand();
408 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Ptr);
409 if (BaseAndOffset.first) {
410 // Check if the offset of this GEP is constant, and if so accumulate it
411 // into Offset.
412 if (!accumulateGEPOffset(cast<GEPOperator>(I), BaseAndOffset.second)) {
413 // Non-constant GEPs aren't folded, and disable SROA.
414 if (SROACandidate)
415 disableSROA(CostIt);
Haicheng Wu201b1912017-01-20 18:51:22 +0000416 return isGEPFree(I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000417 }
418
419 // Add the result as a new mapping to Base + Offset.
420 ConstantOffsetPtrs[&I] = BaseAndOffset;
421
422 // Also handle SROA candidates here, we already know that the GEP is
423 // all-constant indexed.
424 if (SROACandidate)
425 SROAArgValues[&I] = SROAArg;
426
Chandler Carruth783b7192012-03-09 02:49:36 +0000427 return true;
428 }
429 }
430
Easwaran Ramana8b9cdc2017-02-25 00:10:22 +0000431 // Lambda to check whether a GEP's indices are all constant.
432 auto IsGEPOffsetConstant = [&](GetElementPtrInst &GEP) {
433 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
434 if (!isa<Constant>(*I) && !SimplifiedValues.lookup(*I))
435 return false;
436 return true;
437 };
438
439 if (IsGEPOffsetConstant(I)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000440 if (SROACandidate)
441 SROAArgValues[&I] = SROAArg;
442
443 // Constant GEPs are modeled as free.
444 return true;
445 }
446
447 // Variable GEPs will require math and will disable SROA.
448 if (SROACandidate)
449 disableSROA(CostIt);
Haicheng Wu201b1912017-01-20 18:51:22 +0000450 return isGEPFree(I);
Chandler Carruth783b7192012-03-09 02:49:36 +0000451}
452
Easwaran Raman617f6362017-02-18 17:22:52 +0000453/// Simplify \p I if its operands are constants and update SimplifiedValues.
454/// \p Evaluate is a callable specific to instruction type that evaluates the
455/// instruction when all the operands are constants.
456template <typename Callable>
457bool CallAnalyzer::simplifyInstruction(Instruction &I, Callable Evaluate) {
458 SmallVector<Constant *, 2> COps;
459 for (Value *Op : I.operands()) {
460 Constant *COp = dyn_cast<Constant>(Op);
461 if (!COp)
462 COp = SimplifiedValues.lookup(Op);
463 if (!COp)
464 return false;
465 COps.push_back(COp);
466 }
467 auto *C = Evaluate(COps);
468 if (!C)
469 return false;
470 SimplifiedValues[&I] = C;
471 return true;
472}
473
Chandler Carruth0539c072012-03-31 12:42:41 +0000474bool CallAnalyzer::visitBitCast(BitCastInst &I) {
475 // Propagate constants through bitcasts.
Easwaran Raman617f6362017-02-18 17:22:52 +0000476 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
477 return ConstantExpr::getBitCast(COps[0], I.getType());
478 }))
479 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000480
Chandler Carruth0539c072012-03-31 12:42:41 +0000481 // Track base/offsets through casts
Chad Rosier567556a2016-04-28 14:47:23 +0000482 std::pair<Value *, APInt> BaseAndOffset =
483 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000484 // Casts don't change the offset, just wrap it up.
485 if (BaseAndOffset.first)
486 ConstantOffsetPtrs[&I] = BaseAndOffset;
487
488 // Also look for SROA candidates here.
489 Value *SROAArg;
490 DenseMap<Value *, int>::iterator CostIt;
491 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
492 SROAArgValues[&I] = SROAArg;
493
494 // Bitcasts are always zero cost.
495 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000496}
497
Chandler Carruth0539c072012-03-31 12:42:41 +0000498bool CallAnalyzer::visitPtrToInt(PtrToIntInst &I) {
499 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000500 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
501 return ConstantExpr::getPtrToInt(COps[0], I.getType());
502 }))
503 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000504
505 // Track base/offset pairs when converted to a plain integer provided the
506 // integer is large enough to represent the pointer.
507 unsigned IntegerSize = I.getType()->getScalarSizeInBits();
Mehdi Amini46a43552015-03-04 18:43:29 +0000508 if (IntegerSize >= DL.getPointerSizeInBits()) {
Chad Rosier567556a2016-04-28 14:47:23 +0000509 std::pair<Value *, APInt> BaseAndOffset =
510 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000511 if (BaseAndOffset.first)
512 ConstantOffsetPtrs[&I] = BaseAndOffset;
513 }
514
515 // This is really weird. Technically, ptrtoint will disable SROA. However,
516 // unless that ptrtoint is *used* somewhere in the live basic blocks after
517 // inlining, it will be nuked, and SROA should proceed. All of the uses which
518 // would block SROA would also block SROA if applied directly to a pointer,
519 // and so we can just add the integer in here. The only places where SROA is
520 // preserved either cannot fire on an integer, or won't in-and-of themselves
521 // disable SROA (ext) w/o some later use that we would see and disable.
522 Value *SROAArg;
523 DenseMap<Value *, int>::iterator CostIt;
524 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
525 SROAArgValues[&I] = SROAArg;
526
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000527 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000528}
529
Chandler Carruth0539c072012-03-31 12:42:41 +0000530bool CallAnalyzer::visitIntToPtr(IntToPtrInst &I) {
531 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000532 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
533 return ConstantExpr::getIntToPtr(COps[0], I.getType());
534 }))
535 return true;
Dan Gohman4552e3c2009-10-13 18:30:07 +0000536
Chandler Carruth0539c072012-03-31 12:42:41 +0000537 // Track base/offset pairs when round-tripped through a pointer without
538 // modifications provided the integer is not too large.
539 Value *Op = I.getOperand(0);
540 unsigned IntegerSize = Op->getType()->getScalarSizeInBits();
Mehdi Amini46a43552015-03-04 18:43:29 +0000541 if (IntegerSize <= DL.getPointerSizeInBits()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000542 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op);
543 if (BaseAndOffset.first)
544 ConstantOffsetPtrs[&I] = BaseAndOffset;
545 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000546
Chandler Carruth0539c072012-03-31 12:42:41 +0000547 // "Propagate" SROA here in the same manner as we do for ptrtoint above.
548 Value *SROAArg;
549 DenseMap<Value *, int>::iterator CostIt;
550 if (lookupSROAArgAndCost(Op, SROAArg, CostIt))
551 SROAArgValues[&I] = SROAArg;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000552
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000553 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000554}
555
556bool CallAnalyzer::visitCastInst(CastInst &I) {
557 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000558 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
559 return ConstantExpr::getCast(I.getOpcode(), COps[0], I.getType());
560 }))
561 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000562
563 // Disable SROA in the face of arbitrary casts we don't whitelist elsewhere.
564 disableSROA(I.getOperand(0));
565
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000566 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000567}
568
569bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) {
570 Value *Operand = I.getOperand(0);
Easwaran Raman617f6362017-02-18 17:22:52 +0000571 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
Easwaran Raman617f6362017-02-18 17:22:52 +0000572 return ConstantFoldInstOperands(&I, COps[0], DL);
573 }))
574 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000575
576 // Disable any SROA on the argument to arbitrary unary operators.
577 disableSROA(Operand);
578
579 return false;
580}
581
Philip Reames9b5c9582015-06-26 20:51:17 +0000582bool CallAnalyzer::paramHasAttr(Argument *A, Attribute::AttrKind Attr) {
Reid Klecknerfb502d22017-04-14 20:19:02 +0000583 return CandidateCS.paramHasAttr(A->getArgNo(), Attr);
Philip Reames9b5c9582015-06-26 20:51:17 +0000584}
585
586bool CallAnalyzer::isKnownNonNullInCallee(Value *V) {
587 // Does the *call site* have the NonNull attribute set on an argument? We
588 // use the attribute on the call site to memoize any analysis done in the
589 // caller. This will also trip if the callee function has a non-null
590 // parameter attribute, but that's a less interesting case because hopefully
591 // the callee would already have been simplified based on that.
592 if (Argument *A = dyn_cast<Argument>(V))
593 if (paramHasAttr(A, Attribute::NonNull))
594 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000595
Philip Reames9b5c9582015-06-26 20:51:17 +0000596 // Is this an alloca in the caller? This is distinct from the attribute case
597 // above because attributes aren't updated within the inliner itself and we
598 // always want to catch the alloca derived case.
599 if (isAllocaDerivedArg(V))
600 // We can actually predict the result of comparisons between an
601 // alloca-derived value and null. Note that this fires regardless of
602 // SROA firing.
603 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000604
Philip Reames9b5c9582015-06-26 20:51:17 +0000605 return false;
606}
607
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000608bool CallAnalyzer::allowSizeGrowth(CallSite CS) {
609 // If the normal destination of the invoke or the parent block of the call
610 // site is unreachable-terminated, there is little point in inlining this
611 // unless there is literally zero cost.
612 // FIXME: Note that it is possible that an unreachable-terminated block has a
613 // hot entry. For example, in below scenario inlining hot_call_X() may be
614 // beneficial :
615 // main() {
616 // hot_call_1();
617 // ...
618 // hot_call_N()
619 // exit(0);
620 // }
621 // For now, we are not handling this corner case here as it is rare in real
622 // code. In future, we should elaborate this based on BPI and BFI in more
623 // general threshold adjusting heuristics in updateThreshold().
624 Instruction *Instr = CS.getInstruction();
625 if (InvokeInst *II = dyn_cast<InvokeInst>(Instr)) {
626 if (isa<UnreachableInst>(II->getNormalDest()->getTerminator()))
627 return false;
628 } else if (isa<UnreachableInst>(Instr->getParent()->getTerminator()))
629 return false;
630
631 return true;
632}
633
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000634void CallAnalyzer::updateThreshold(CallSite CS, Function &Callee) {
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000635 // If no size growth is allowed for this inlining, set Threshold to 0.
636 if (!allowSizeGrowth(CS)) {
637 Threshold = 0;
638 return;
639 }
640
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000641 Function *Caller = CS.getCaller();
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000642
643 // return min(A, B) if B is valid.
644 auto MinIfValid = [](int A, Optional<int> B) {
645 return B ? std::min(A, B.getValue()) : A;
646 };
647
Easwaran Raman0d58fca2016-08-11 03:58:05 +0000648 // return max(A, B) if B is valid.
649 auto MaxIfValid = [](int A, Optional<int> B) {
650 return B ? std::max(A, B.getValue()) : A;
651 };
652
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000653 // Use the OptMinSizeThreshold or OptSizeThreshold knob if they are available
654 // and reduce the threshold if the caller has the necessary attribute.
655 if (Caller->optForMinSize())
656 Threshold = MinIfValid(Threshold, Params.OptMinSizeThreshold);
657 else if (Caller->optForSize())
658 Threshold = MinIfValid(Threshold, Params.OptSizeThreshold);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000659
Easwaran Ramane08b1392017-01-09 21:56:26 +0000660 // Adjust the threshold based on inlinehint attribute and profile based
661 // hotness information if the caller does not have MinSize attribute.
662 if (!Caller->optForMinSize()) {
663 if (Callee.hasFnAttribute(Attribute::InlineHint))
664 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
665 if (PSI) {
Easwaran Raman12585b02017-01-20 22:44:04 +0000666 BlockFrequencyInfo *CallerBFI = GetBFI ? &((*GetBFI)(*Caller)) : nullptr;
Easwaran Raman3cd14792017-05-16 21:18:09 +0000667 // FIXME: After switching to the new passmanager, simplify the logic below
668 // by checking only the callsite hotness/coldness. The check for CallerBFI
669 // exists only because we do not have BFI available with the old PM.
670 //
671 // Use callee's hotness information only if we have no way of determining
672 // callsite's hotness information. Callsite hotness can be determined if
673 // sample profile is used (which adds hotness metadata to calls) or if
674 // caller's BlockFrequencyInfo is available.
675 if (CallerBFI || PSI->hasSampleProfile()) {
676 if (PSI->isHotCallSite(CS, CallerBFI)) {
677 DEBUG(dbgs() << "Hot callsite.\n");
678 Threshold = Params.HotCallSiteThreshold.getValue();
679 } else if (PSI->isColdCallSite(CS, CallerBFI)) {
680 DEBUG(dbgs() << "Cold callsite.\n");
681 Threshold = MinIfValid(Threshold, Params.ColdCallSiteThreshold);
682 }
683 } else {
684 if (PSI->isFunctionEntryHot(&Callee)) {
685 DEBUG(dbgs() << "Hot callee.\n");
686 // If callsite hotness can not be determined, we may still know
687 // that the callee is hot and treat it as a weaker hint for threshold
688 // increase.
689 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
690 } else if (PSI->isFunctionEntryCold(&Callee)) {
691 DEBUG(dbgs() << "Cold callee.\n");
692 Threshold = MinIfValid(Threshold, Params.ColdThreshold);
693 }
Easwaran Ramane08b1392017-01-09 21:56:26 +0000694 }
695 }
Dehao Chene1c7c572016-08-05 20:49:04 +0000696 }
Dehao Chen9232f982016-07-11 16:48:54 +0000697
Justin Lebar8650a4d2016-04-15 01:38:48 +0000698 // Finally, take the target-specific inlining threshold multiplier into
699 // account.
700 Threshold *= TTI.getInliningThresholdMultiplier();
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000701}
702
Matt Arsenault727aa342013-07-20 04:09:00 +0000703bool CallAnalyzer::visitCmpInst(CmpInst &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000704 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
705 // First try to handle simplified comparisons.
Easwaran Raman617f6362017-02-18 17:22:52 +0000706 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
707 return ConstantExpr::getCompare(I.getPredicate(), COps[0], COps[1]);
708 }))
709 return true;
Matt Arsenault727aa342013-07-20 04:09:00 +0000710
711 if (I.getOpcode() == Instruction::FCmp)
712 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000713
714 // Otherwise look for a comparison between constant offset pointers with
715 // a common base.
716 Value *LHSBase, *RHSBase;
717 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000718 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000719 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000720 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000721 if (RHSBase && LHSBase == RHSBase) {
722 // We have common bases, fold the icmp to a constant based on the
723 // offsets.
724 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
725 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
726 if (Constant *C = ConstantExpr::getICmp(I.getPredicate(), CLHS, CRHS)) {
727 SimplifiedValues[&I] = C;
728 ++NumConstantPtrCmps;
729 return true;
730 }
731 }
732 }
733
734 // If the comparison is an equality comparison with null, we can simplify it
Philip Reames9b5c9582015-06-26 20:51:17 +0000735 // if we know the value (argument) can't be null
736 if (I.isEquality() && isa<ConstantPointerNull>(I.getOperand(1)) &&
737 isKnownNonNullInCallee(I.getOperand(0))) {
738 bool IsNotEqual = I.getPredicate() == CmpInst::ICMP_NE;
739 SimplifiedValues[&I] = IsNotEqual ? ConstantInt::getTrue(I.getType())
740 : ConstantInt::getFalse(I.getType());
741 return true;
742 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000743 // Finally check for SROA candidates in comparisons.
744 Value *SROAArg;
745 DenseMap<Value *, int>::iterator CostIt;
746 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) {
747 if (isa<ConstantPointerNull>(I.getOperand(1))) {
748 accumulateSROACost(CostIt, InlineConstants::InstrCost);
749 return true;
750 }
751
752 disableSROA(CostIt);
753 }
754
755 return false;
756}
757
758bool CallAnalyzer::visitSub(BinaryOperator &I) {
759 // Try to handle a special case: we can fold computing the difference of two
760 // constant-related pointers.
761 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
762 Value *LHSBase, *RHSBase;
763 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000764 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000765 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000766 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000767 if (RHSBase && LHSBase == RHSBase) {
768 // We have common bases, fold the subtract to a constant based on the
769 // offsets.
770 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
771 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
772 if (Constant *C = ConstantExpr::getSub(CLHS, CRHS)) {
773 SimplifiedValues[&I] = C;
774 ++NumConstantPtrDiffs;
775 return true;
776 }
777 }
778 }
779
780 // Otherwise, fall back to the generic logic for simplifying and handling
781 // instructions.
782 return Base::visitSub(I);
783}
784
785bool CallAnalyzer::visitBinaryOperator(BinaryOperator &I) {
786 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
Easwaran Raman617f6362017-02-18 17:22:52 +0000787 auto Evaluate = [&](SmallVectorImpl<Constant *> &COps) {
788 Value *SimpleV = nullptr;
Easwaran Raman617f6362017-02-18 17:22:52 +0000789 if (auto FI = dyn_cast<FPMathOperator>(&I))
790 SimpleV = SimplifyFPBinOp(I.getOpcode(), COps[0], COps[1],
791 FI->getFastMathFlags(), DL);
792 else
793 SimpleV = SimplifyBinOp(I.getOpcode(), COps[0], COps[1], DL);
794 return dyn_cast_or_null<Constant>(SimpleV);
795 };
Michael Zolotukhin4e8598e2015-02-06 20:02:51 +0000796
Easwaran Raman617f6362017-02-18 17:22:52 +0000797 if (simplifyInstruction(I, Evaluate))
Chandler Carruth0539c072012-03-31 12:42:41 +0000798 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000799
800 // Disable any SROA on arguments to arbitrary, unsimplified binary operators.
801 disableSROA(LHS);
802 disableSROA(RHS);
803
804 return false;
805}
806
807bool CallAnalyzer::visitLoad(LoadInst &I) {
808 Value *SROAArg;
809 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000810 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000811 if (I.isSimple()) {
812 accumulateSROACost(CostIt, InlineConstants::InstrCost);
813 return true;
814 }
815
816 disableSROA(CostIt);
817 }
818
819 return false;
820}
821
822bool CallAnalyzer::visitStore(StoreInst &I) {
823 Value *SROAArg;
824 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000825 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000826 if (I.isSimple()) {
827 accumulateSROACost(CostIt, InlineConstants::InstrCost);
828 return true;
829 }
830
831 disableSROA(CostIt);
832 }
833
834 return false;
835}
836
Chandler Carruth753e21d2012-12-28 14:23:32 +0000837bool CallAnalyzer::visitExtractValue(ExtractValueInst &I) {
838 // Constant folding for extract value is trivial.
Easwaran Raman617f6362017-02-18 17:22:52 +0000839 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
840 return ConstantExpr::getExtractValue(COps[0], I.getIndices());
841 }))
Chandler Carruth753e21d2012-12-28 14:23:32 +0000842 return true;
Chandler Carruth753e21d2012-12-28 14:23:32 +0000843
844 // SROA can look through these but give them a cost.
845 return false;
846}
847
848bool CallAnalyzer::visitInsertValue(InsertValueInst &I) {
849 // Constant folding for insert value is trivial.
Easwaran Raman617f6362017-02-18 17:22:52 +0000850 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
851 return ConstantExpr::getInsertValue(/*AggregateOperand*/ COps[0],
852 /*InsertedValueOperand*/ COps[1],
853 I.getIndices());
854 }))
Chandler Carruth753e21d2012-12-28 14:23:32 +0000855 return true;
Chandler Carruth753e21d2012-12-28 14:23:32 +0000856
857 // SROA can look through these but give them a cost.
858 return false;
859}
860
861/// \brief Try to simplify a call site.
862///
863/// Takes a concrete function and callsite and tries to actually simplify it by
864/// analyzing the arguments and call itself with instsimplify. Returns true if
865/// it has simplified the callsite to some other entity (a constant), making it
866/// free.
867bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
868 // FIXME: Using the instsimplify logic directly for this is inefficient
869 // because we have to continually rebuild the argument list even when no
870 // simplifications can be performed. Until that is fixed with remapping
871 // inside of instsimplify, directly constant fold calls here.
Andrew Kaylor647025f2017-06-09 23:18:11 +0000872 if (!canConstantFoldCallTo(CS, F))
Chandler Carruth753e21d2012-12-28 14:23:32 +0000873 return false;
874
875 // Try to re-map the arguments to constants.
876 SmallVector<Constant *, 4> ConstantArgs;
877 ConstantArgs.reserve(CS.arg_size());
Chad Rosier567556a2016-04-28 14:47:23 +0000878 for (CallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end(); I != E;
879 ++I) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000880 Constant *C = dyn_cast<Constant>(*I);
881 if (!C)
882 C = dyn_cast_or_null<Constant>(SimplifiedValues.lookup(*I));
883 if (!C)
884 return false; // This argument doesn't map to a constant.
885
886 ConstantArgs.push_back(C);
887 }
Andrew Kaylor647025f2017-06-09 23:18:11 +0000888 if (Constant *C = ConstantFoldCall(CS, F, ConstantArgs)) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000889 SimplifiedValues[CS.getInstruction()] = C;
890 return true;
891 }
892
893 return false;
894}
895
Chandler Carruth0539c072012-03-31 12:42:41 +0000896bool CallAnalyzer::visitCallSite(CallSite CS) {
Chandler Carruth37d25de2013-12-13 08:00:01 +0000897 if (CS.hasFnAttr(Attribute::ReturnsTwice) &&
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +0000898 !F.hasFnAttribute(Attribute::ReturnsTwice)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000899 // This aborts the entire analysis.
900 ExposesReturnsTwice = true;
901 return false;
902 }
Chad Rosier567556a2016-04-28 14:47:23 +0000903 if (CS.isCall() && cast<CallInst>(CS.getInstruction())->cannotDuplicate())
James Molloy4f6fb952012-12-20 16:04:27 +0000904 ContainsNoDuplicateCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000905
Chandler Carruth0539c072012-03-31 12:42:41 +0000906 if (Function *F = CS.getCalledFunction()) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000907 // When we have a concrete function, first try to simplify it directly.
908 if (simplifyCallSite(F, CS))
909 return true;
910
911 // Next check if it is an intrinsic we know about.
912 // FIXME: Lift this into part of the InstVisitor.
913 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
914 switch (II->getIntrinsicID()) {
915 default:
916 return Base::visitCallSite(CS);
917
Peter Collingbourne7dd8dbf2016-04-22 21:18:02 +0000918 case Intrinsic::load_relative:
919 // This is normally lowered to 4 LLVM instructions.
920 Cost += 3 * InlineConstants::InstrCost;
921 return false;
922
Chandler Carruth753e21d2012-12-28 14:23:32 +0000923 case Intrinsic::memset:
924 case Intrinsic::memcpy:
925 case Intrinsic::memmove:
926 // SROA can usually chew through these intrinsics, but they aren't free.
927 return false;
Reid Kleckner60381792015-07-07 22:25:32 +0000928 case Intrinsic::localescape:
Reid Kleckner223de262015-04-14 20:38:14 +0000929 HasFrameEscape = true;
930 return false;
Chandler Carruth753e21d2012-12-28 14:23:32 +0000931 }
932 }
933
Chandler Carruth0539c072012-03-31 12:42:41 +0000934 if (F == CS.getInstruction()->getParent()->getParent()) {
935 // This flag will fully abort the analysis, so don't bother with anything
936 // else.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000937 IsRecursiveCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000938 return false;
939 }
940
Chandler Carruth0ba8db42013-01-22 11:26:02 +0000941 if (TTI.isLoweredToCall(F)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000942 // We account for the average 1 instruction per call argument setup
943 // here.
944 Cost += CS.arg_size() * InlineConstants::InstrCost;
945
946 // Everything other than inline ASM will also have a significant cost
947 // merely from making the call.
948 if (!isa<InlineAsm>(CS.getCalledValue()))
949 Cost += InlineConstants::CallPenalty;
950 }
951
952 return Base::visitCallSite(CS);
953 }
954
955 // Otherwise we're in a very special case -- an indirect function call. See
956 // if we can be particularly clever about this.
957 Value *Callee = CS.getCalledValue();
958
959 // First, pay the price of the argument setup. We account for the average
960 // 1 instruction per call argument setup here.
961 Cost += CS.arg_size() * InlineConstants::InstrCost;
962
963 // Next, check if this happens to be an indirect function call to a known
964 // function in this inline context. If not, we've done all we can.
965 Function *F = dyn_cast_or_null<Function>(SimplifiedValues.lookup(Callee));
966 if (!F)
967 return Base::visitCallSite(CS);
968
969 // If we have a constant that we are calling as a function, we can peer
970 // through it and see the function target. This happens not infrequently
971 // during devirtualization and so we want to give it a hefty bonus for
972 // inlining, but cap that bonus in the event that inlining wouldn't pan
973 // out. Pretend to inline the function, with a custom threshold.
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000974 auto IndirectCallParams = Params;
975 IndirectCallParams.DefaultThreshold = InlineConstants::IndirectCallThreshold;
Easwaran Raman12585b02017-01-20 22:44:04 +0000976 CallAnalyzer CA(TTI, GetAssumptionCache, GetBFI, PSI, *F, CS,
977 IndirectCallParams);
Chandler Carruth0539c072012-03-31 12:42:41 +0000978 if (CA.analyzeCall(CS)) {
979 // We were able to inline the indirect call! Subtract the cost from the
Easwaran Raman6d90d9f2015-12-07 21:21:20 +0000980 // threshold to get the bonus we want to apply, but don't go below zero.
981 Cost -= std::max(0, CA.getThreshold() - CA.getCost());
Chandler Carruth0539c072012-03-31 12:42:41 +0000982 }
983
984 return Base::visitCallSite(CS);
985}
986
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000987bool CallAnalyzer::visitReturnInst(ReturnInst &RI) {
988 // At least one return instruction will be free after inlining.
989 bool Free = !HasReturn;
990 HasReturn = true;
991 return Free;
992}
993
994bool CallAnalyzer::visitBranchInst(BranchInst &BI) {
995 // We model unconditional branches as essentially free -- they really
996 // shouldn't exist at all, but handling them makes the behavior of the
997 // inliner more regular and predictable. Interestingly, conditional branches
998 // which will fold away are also free.
999 return BI.isUnconditional() || isa<ConstantInt>(BI.getCondition()) ||
1000 dyn_cast_or_null<ConstantInt>(
1001 SimplifiedValues.lookup(BI.getCondition()));
1002}
1003
1004bool CallAnalyzer::visitSwitchInst(SwitchInst &SI) {
1005 // We model unconditional switches as free, see the comments on handling
1006 // branches.
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001007 if (isa<ConstantInt>(SI.getCondition()))
1008 return true;
1009 if (Value *V = SimplifiedValues.lookup(SI.getCondition()))
1010 if (isa<ConstantInt>(V))
1011 return true;
1012
Jun Bum Lim2960d412017-06-02 20:42:54 +00001013 // Assume the most general case where the swith is lowered into
1014 // either a jump table, bit test, or a balanced binary tree consisting of
1015 // case clusters without merging adjacent clusters with the same
1016 // destination. We do not consider the switches that are lowered with a mix
1017 // of jump table/bit test/binary search tree. The cost of the switch is
1018 // proportional to the size of the tree or the size of jump table range.
1019 //
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001020 // NB: We convert large switches which are just used to initialize large phi
1021 // nodes to lookup tables instead in simplify-cfg, so this shouldn't prevent
1022 // inlining those. It will prevent inlining in cases where the optimization
1023 // does not (yet) fire.
Jun Bum Lim2960d412017-06-02 20:42:54 +00001024
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001025 // Maximum valid cost increased in this function.
1026 int CostUpperBound = INT_MAX - InlineConstants::InstrCost - 1;
1027
Jun Bum Lim2960d412017-06-02 20:42:54 +00001028 // Exit early for a large switch, assuming one case needs at least one
1029 // instruction.
1030 // FIXME: This is not true for a bit test, but ignore such case for now to
1031 // save compile-time.
1032 int64_t CostLowerBound =
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001033 std::min((int64_t)CostUpperBound,
Jun Bum Lim2960d412017-06-02 20:42:54 +00001034 (int64_t)SI.getNumCases() * InlineConstants::InstrCost + Cost);
1035
1036 if (CostLowerBound > Threshold) {
1037 Cost = CostLowerBound;
1038 return false;
1039 }
1040
1041 unsigned JumpTableSize = 0;
1042 unsigned NumCaseCluster =
1043 TTI.getEstimatedNumberOfCaseClusters(SI, JumpTableSize);
1044
1045 // If suitable for a jump table, consider the cost for the table size and
1046 // branch to destination.
1047 if (JumpTableSize) {
1048 int64_t JTCost = (int64_t)JumpTableSize * InlineConstants::InstrCost +
1049 4 * InlineConstants::InstrCost;
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001050
1051 Cost = std::min((int64_t)CostUpperBound, JTCost + Cost);
Jun Bum Lim2960d412017-06-02 20:42:54 +00001052 return false;
1053 }
1054
1055 // Considering forming a binary search, we should find the number of nodes
1056 // which is same as the number of comparisons when lowered. For a given
1057 // number of clusters, n, we can define a recursive function, f(n), to find
1058 // the number of nodes in the tree. The recursion is :
1059 // f(n) = 1 + f(n/2) + f (n - n/2), when n > 3,
1060 // and f(n) = n, when n <= 3.
1061 // This will lead a binary tree where the leaf should be either f(2) or f(3)
1062 // when n > 3. So, the number of comparisons from leaves should be n, while
1063 // the number of non-leaf should be :
1064 // 2^(log2(n) - 1) - 1
1065 // = 2^log2(n) * 2^-1 - 1
1066 // = n / 2 - 1.
1067 // Considering comparisons from leaf and non-leaf nodes, we can estimate the
1068 // number of comparisons in a simple closed form :
1069 // n + n / 2 - 1 = n * 3 / 2 - 1
1070 if (NumCaseCluster <= 3) {
1071 // Suppose a comparison includes one compare and one conditional branch.
1072 Cost += NumCaseCluster * 2 * InlineConstants::InstrCost;
1073 return false;
1074 }
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001075
1076 int64_t ExpectedNumberOfCompare = 3 * (int64_t)NumCaseCluster / 2 - 1;
1077 int64_t SwitchCost =
Jun Bum Lim2960d412017-06-02 20:42:54 +00001078 ExpectedNumberOfCompare * 2 * InlineConstants::InstrCost;
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001079
1080 Cost = std::min((int64_t)CostUpperBound, SwitchCost + Cost);
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001081 return false;
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001082}
1083
1084bool CallAnalyzer::visitIndirectBrInst(IndirectBrInst &IBI) {
1085 // We never want to inline functions that contain an indirectbr. This is
1086 // incorrect because all the blockaddress's (in static global initializers
1087 // for example) would be referring to the original function, and this
1088 // indirect jump would jump from the inlined copy of the function into the
1089 // original function which is extremely undefined behavior.
1090 // FIXME: This logic isn't really right; we can safely inline functions with
1091 // indirectbr's as long as no other function or global references the
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001092 // blockaddress of a block within the current function.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001093 HasIndirectBr = true;
1094 return false;
1095}
1096
1097bool CallAnalyzer::visitResumeInst(ResumeInst &RI) {
1098 // FIXME: It's not clear that a single instruction is an accurate model for
1099 // the inline cost of a resume instruction.
1100 return false;
1101}
1102
David Majnemer654e1302015-07-31 17:58:14 +00001103bool CallAnalyzer::visitCleanupReturnInst(CleanupReturnInst &CRI) {
1104 // FIXME: It's not clear that a single instruction is an accurate model for
1105 // the inline cost of a cleanupret instruction.
1106 return false;
1107}
1108
1109bool CallAnalyzer::visitCatchReturnInst(CatchReturnInst &CRI) {
1110 // FIXME: It's not clear that a single instruction is an accurate model for
Joseph Tremoulet8220bcc2015-08-23 00:26:33 +00001111 // the inline cost of a catchret instruction.
David Majnemer654e1302015-07-31 17:58:14 +00001112 return false;
1113}
1114
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001115bool CallAnalyzer::visitUnreachableInst(UnreachableInst &I) {
1116 // FIXME: It might be reasonably to discount the cost of instructions leading
1117 // to unreachable as they have the lowest possible impact on both runtime and
1118 // code size.
1119 return true; // No actual code is needed for unreachable.
1120}
1121
Chandler Carruth0539c072012-03-31 12:42:41 +00001122bool CallAnalyzer::visitInstruction(Instruction &I) {
Chandler Carruthda7513a2012-05-04 00:58:03 +00001123 // Some instructions are free. All of the free intrinsics can also be
1124 // handled by SROA, etc.
Chandler Carruthb8cf5102013-01-21 12:05:16 +00001125 if (TargetTransformInfo::TCC_Free == TTI.getUserCost(&I))
Chandler Carruthda7513a2012-05-04 00:58:03 +00001126 return true;
1127
Chandler Carruth0539c072012-03-31 12:42:41 +00001128 // We found something we don't understand or can't handle. Mark any SROA-able
1129 // values in the operand list as no longer viable.
1130 for (User::op_iterator OI = I.op_begin(), OE = I.op_end(); OI != OE; ++OI)
1131 disableSROA(*OI);
1132
1133 return false;
1134}
1135
Chandler Carruth0539c072012-03-31 12:42:41 +00001136/// \brief Analyze a basic block for its contribution to the inline cost.
1137///
1138/// This method walks the analyzer over every instruction in the given basic
1139/// block and accounts for their cost during inlining at this callsite. It
1140/// aborts early if the threshold has been exceeded or an impossible to inline
1141/// construct has been detected. It returns false if inlining is no longer
1142/// viable, and true if inlining remains viable.
Hal Finkel57f03dd2014-09-07 13:49:57 +00001143bool CallAnalyzer::analyzeBlock(BasicBlock *BB,
1144 SmallPtrSetImpl<const Value *> &EphValues) {
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001145 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
Chandler Carruth6b4cc8b2014-02-01 10:38:17 +00001146 // FIXME: Currently, the number of instructions in a function regardless of
1147 // our ability to simplify them during inline to constants or dead code,
1148 // are actually used by the vector bonus heuristic. As long as that's true,
1149 // we have to special case debug intrinsics here to prevent differences in
1150 // inlining due to debug symbols. Eventually, the number of unsimplified
1151 // instructions shouldn't factor into the cost computation, but until then,
1152 // hack around it here.
1153 if (isa<DbgInfoIntrinsic>(I))
1154 continue;
1155
Hal Finkel57f03dd2014-09-07 13:49:57 +00001156 // Skip ephemeral values.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001157 if (EphValues.count(&*I))
Hal Finkel57f03dd2014-09-07 13:49:57 +00001158 continue;
1159
Chandler Carruth0539c072012-03-31 12:42:41 +00001160 ++NumInstructions;
1161 if (isa<ExtractElementInst>(I) || I->getType()->isVectorTy())
1162 ++NumVectorInstructions;
1163
Sanjay Patele9434e82015-09-15 15:26:25 +00001164 // If the instruction is floating point, and the target says this operation
1165 // is expensive or the function has the "use-soft-float" attribute, this may
1166 // eventually become a library call. Treat the cost as such.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001167 if (I->getType()->isFloatingPointTy()) {
Sanjay Patele9434e82015-09-15 15:26:25 +00001168 // If the function has the "use-soft-float" attribute, mark it as
1169 // expensive.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001170 if (TTI.getFPOpCost(I->getType()) == TargetTransformInfo::TCC_Expensive ||
Eric Christopher908ed7f2017-04-15 06:14:52 +00001171 (F.getFnAttribute("use-soft-float").getValueAsString() == "true"))
Cameron Esfahani17177d12015-02-05 02:09:33 +00001172 Cost += InlineConstants::CallPenalty;
1173 }
1174
Chandler Carruth0539c072012-03-31 12:42:41 +00001175 // If the instruction simplified to a constant, there is no cost to this
1176 // instruction. Visit the instructions using our InstVisitor to account for
1177 // all of the per-instruction logic. The visit tree returns true if we
1178 // consumed the instruction in any way, and false if the instruction's base
1179 // cost should count against inlining.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001180 if (Base::visit(&*I))
Chandler Carruth0539c072012-03-31 12:42:41 +00001181 ++NumInstructionsSimplified;
1182 else
1183 Cost += InlineConstants::InstrCost;
1184
1185 // If the visit this instruction detected an uninlinable pattern, abort.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001186 if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca ||
Reid Kleckner223de262015-04-14 20:38:14 +00001187 HasIndirectBr || HasFrameEscape)
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001188 return false;
1189
1190 // If the caller is a recursive function then we don't want to inline
1191 // functions which allocate a lot of stack space because it would increase
1192 // the caller stack usage dramatically.
1193 if (IsCallerRecursive &&
1194 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller)
Chandler Carruth0539c072012-03-31 12:42:41 +00001195 return false;
1196
Chandler Carrutha004f222015-05-27 02:49:05 +00001197 // Check if we've past the maximum possible threshold so we don't spin in
1198 // huge basic blocks that will never inline.
1199 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001200 return false;
1201 }
1202
1203 return true;
1204}
1205
1206/// \brief Compute the base pointer and cumulative constant offsets for V.
1207///
1208/// This strips all constant offsets off of V, leaving it the base pointer, and
1209/// accumulates the total constant offset applied in the returned constant. It
1210/// returns 0 if V is not a pointer, and returns the constant '0' if there are
1211/// no constant offsets applied.
1212ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001213 if (!V->getType()->isPointerTy())
Craig Topper353eda42014-04-24 06:44:33 +00001214 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001215
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001216 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +00001217 APInt Offset = APInt::getNullValue(IntPtrWidth);
1218
1219 // Even though we don't look through PHI nodes, we could be called on an
1220 // instruction in an unreachable block, which may be on a cycle.
1221 SmallPtrSet<Value *, 4> Visited;
1222 Visited.insert(V);
1223 do {
1224 if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
1225 if (!GEP->isInBounds() || !accumulateGEPOffset(*GEP, Offset))
Craig Topper353eda42014-04-24 06:44:33 +00001226 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001227 V = GEP->getPointerOperand();
1228 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
1229 V = cast<Operator>(V)->getOperand(0);
1230 } else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
Sanjoy Das5ce32722016-04-08 00:48:30 +00001231 if (GA->isInterposable())
Chandler Carruth0539c072012-03-31 12:42:41 +00001232 break;
1233 V = GA->getAliasee();
1234 } else {
1235 break;
1236 }
1237 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
David Blaikie70573dc2014-11-19 07:49:26 +00001238 } while (Visited.insert(V).second);
Chandler Carruth0539c072012-03-31 12:42:41 +00001239
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001240 Type *IntPtrTy = DL.getIntPtrType(V->getContext());
Chandler Carruth0539c072012-03-31 12:42:41 +00001241 return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset));
1242}
1243
1244/// \brief Analyze a call site for potential inlining.
1245///
1246/// Returns true if inlining this call is viable, and false if it is not
1247/// viable. It computes the cost and adjusts the threshold based on numerous
1248/// factors and heuristics. If this method returns false but the computed cost
1249/// is below the computed threshold, then inlining was forcibly disabled by
Bob Wilson266802d2012-11-19 07:04:30 +00001250/// some artifact of the routine.
Chandler Carruth0539c072012-03-31 12:42:41 +00001251bool CallAnalyzer::analyzeCall(CallSite CS) {
Chandler Carruth7ae90d42012-04-11 10:15:10 +00001252 ++NumCallsAnalyzed;
1253
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001254 // Perform some tweaks to the cost and threshold based on the direct
1255 // callsite information.
Chandler Carruth0539c072012-03-31 12:42:41 +00001256
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001257 // We want to more aggressively inline vector-dense kernels, so up the
1258 // threshold, and we'll lower it if the % of vector instructions gets too
Chandler Carrutha004f222015-05-27 02:49:05 +00001259 // low. Note that these bonuses are some what arbitrary and evolved over time
1260 // by accident as much as because they are principled bonuses.
1261 //
1262 // FIXME: It would be nice to remove all such bonuses. At least it would be
1263 // nice to base the bonus values on something more scientific.
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001264 assert(NumInstructions == 0);
1265 assert(NumVectorInstructions == 0);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001266
1267 // Update the threshold based on callsite properties
1268 updateThreshold(CS, F);
1269
Chandler Carrutha004f222015-05-27 02:49:05 +00001270 FiftyPercentVectorBonus = 3 * Threshold / 2;
1271 TenPercentVectorBonus = 3 * Threshold / 4;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001272
Chandler Carrutha004f222015-05-27 02:49:05 +00001273 // Track whether the post-inlining function would have more than one basic
1274 // block. A single basic block is often intended for inlining. Balloon the
1275 // threshold by 50% until we pass the single-BB phase.
1276 bool SingleBB = true;
1277 int SingleBBBonus = Threshold / 2;
1278
1279 // Speculatively apply all possible bonuses to Threshold. If cost exceeds
1280 // this Threshold any time, and cost cannot decrease, we can stop processing
1281 // the rest of the function body.
1282 Threshold += (SingleBBBonus + FiftyPercentVectorBonus);
1283
Xinliang David Li351d9b02017-05-02 05:38:41 +00001284 // Give out bonuses for the callsite, as the instructions setting them up
1285 // will be gone after inlining.
1286 Cost -= getCallsiteCost(CS, DL);
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001287
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001288 // If there is only one call of the function, and it has internal linkage,
1289 // the cost of inlining it drops dramatically.
Chad Rosier567556a2016-04-28 14:47:23 +00001290 bool OnlyOneCallAndLocalLinkage =
1291 F.hasLocalLinkage() && F.hasOneUse() && &F == CS.getCalledFunction();
James Molloy4f6fb952012-12-20 16:04:27 +00001292 if (OnlyOneCallAndLocalLinkage)
Piotr Padlewskid89875c2016-08-10 21:15:22 +00001293 Cost -= InlineConstants::LastCallToStaticBonus;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001294
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001295 // If this function uses the coldcc calling convention, prefer not to inline
1296 // it.
1297 if (F.getCallingConv() == CallingConv::Cold)
1298 Cost += InlineConstants::ColdccPenalty;
1299
1300 // Check if we're done. This can happen due to bonuses and penalties.
1301 if (Cost > Threshold)
1302 return false;
1303
Chandler Carruth0539c072012-03-31 12:42:41 +00001304 if (F.empty())
1305 return true;
1306
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001307 Function *Caller = CS.getInstruction()->getParent()->getParent();
1308 // Check if the caller function is recursive itself.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001309 for (User *U : Caller->users()) {
1310 CallSite Site(U);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001311 if (!Site)
1312 continue;
1313 Instruction *I = Site.getInstruction();
1314 if (I->getParent()->getParent() == Caller) {
1315 IsCallerRecursive = true;
1316 break;
1317 }
1318 }
1319
Chandler Carruth0539c072012-03-31 12:42:41 +00001320 // Populate our simplified values by mapping from function arguments to call
1321 // arguments with known important simplifications.
1322 CallSite::arg_iterator CAI = CS.arg_begin();
1323 for (Function::arg_iterator FAI = F.arg_begin(), FAE = F.arg_end();
1324 FAI != FAE; ++FAI, ++CAI) {
1325 assert(CAI != CS.arg_end());
1326 if (Constant *C = dyn_cast<Constant>(CAI))
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001327 SimplifiedValues[&*FAI] = C;
Chandler Carruth0539c072012-03-31 12:42:41 +00001328
1329 Value *PtrArg = *CAI;
1330 if (ConstantInt *C = stripAndComputeInBoundsConstantOffsets(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001331 ConstantOffsetPtrs[&*FAI] = std::make_pair(PtrArg, C->getValue());
Chandler Carruth0539c072012-03-31 12:42:41 +00001332
1333 // We can SROA any pointer arguments derived from alloca instructions.
1334 if (isa<AllocaInst>(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001335 SROAArgValues[&*FAI] = PtrArg;
Chandler Carruth0539c072012-03-31 12:42:41 +00001336 SROAArgCosts[PtrArg] = 0;
1337 }
1338 }
1339 }
1340 NumConstantArgs = SimplifiedValues.size();
1341 NumConstantOffsetPtrArgs = ConstantOffsetPtrs.size();
1342 NumAllocaArgs = SROAArgValues.size();
1343
Hal Finkel57f03dd2014-09-07 13:49:57 +00001344 // FIXME: If a caller has multiple calls to a callee, we end up recomputing
1345 // the ephemeral values multiple times (and they're completely determined by
1346 // the callee, so this is purely duplicate work).
1347 SmallPtrSet<const Value *, 32> EphValues;
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001348 CodeMetrics::collectEphemeralValues(&F, &GetAssumptionCache(F), EphValues);
Hal Finkel57f03dd2014-09-07 13:49:57 +00001349
Chandler Carruth0539c072012-03-31 12:42:41 +00001350 // The worklist of live basic blocks in the callee *after* inlining. We avoid
1351 // adding basic blocks of the callee which can be proven to be dead for this
1352 // particular call site in order to get more accurate cost estimates. This
1353 // requires a somewhat heavyweight iteration pattern: we need to walk the
1354 // basic blocks in a breadth-first order as we insert live successors. To
1355 // accomplish this, prioritizing for small iterations because we exit after
1356 // crossing our threshold, we use a small-size optimized SetVector.
1357 typedef SetVector<BasicBlock *, SmallVector<BasicBlock *, 16>,
Chad Rosier567556a2016-04-28 14:47:23 +00001358 SmallPtrSet<BasicBlock *, 16>>
1359 BBSetVector;
Chandler Carruth0539c072012-03-31 12:42:41 +00001360 BBSetVector BBWorklist;
1361 BBWorklist.insert(&F.getEntryBlock());
1362 // Note that we *must not* cache the size, this loop grows the worklist.
1363 for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) {
1364 // Bail out the moment we cross the threshold. This means we'll under-count
1365 // the cost, but only when undercounting doesn't matter.
Chandler Carrutha004f222015-05-27 02:49:05 +00001366 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001367 break;
1368
1369 BasicBlock *BB = BBWorklist[Idx];
1370 if (BB->empty())
Chandler Carruth4d1d34f2012-03-14 23:19:53 +00001371 continue;
Dan Gohman4552e3c2009-10-13 18:30:07 +00001372
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001373 // Disallow inlining a blockaddress. A blockaddress only has defined
1374 // behavior for an indirect branch in the same function, and we do not
1375 // currently support inlining indirect branches. But, the inliner may not
1376 // see an indirect branch that ends up being dead code at a particular call
1377 // site. If the blockaddress escapes the function, e.g., via a global
1378 // variable, inlining may lead to an invalid cross-function reference.
1379 if (BB->hasAddressTaken())
1380 return false;
1381
Chandler Carruth0539c072012-03-31 12:42:41 +00001382 // Analyze the cost of this block. If we blow through the threshold, this
1383 // returns false, and we can bail on out.
Easwaran Ramand295b002016-04-13 21:20:22 +00001384 if (!analyzeBlock(BB, EphValues))
1385 return false;
Eric Christopher46308e62011-02-01 01:16:32 +00001386
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001387 TerminatorInst *TI = BB->getTerminator();
1388
Chandler Carruth0539c072012-03-31 12:42:41 +00001389 // Add in the live successors by first checking whether we have terminator
1390 // that may be simplified based on the values simplified by this call.
1391 if (BranchInst *BI = dyn_cast<BranchInst>(TI)) {
1392 if (BI->isConditional()) {
1393 Value *Cond = BI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001394 if (ConstantInt *SimpleCond =
1395 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001396 BBWorklist.insert(BI->getSuccessor(SimpleCond->isZero() ? 1 : 0));
1397 continue;
Eric Christopher46308e62011-02-01 01:16:32 +00001398 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001399 }
1400 } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) {
1401 Value *Cond = SI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001402 if (ConstantInt *SimpleCond =
1403 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Chandler Carruth927d8e62017-04-12 07:27:28 +00001404 BBWorklist.insert(SI->findCaseValue(SimpleCond)->getCaseSuccessor());
Chandler Carruth0539c072012-03-31 12:42:41 +00001405 continue;
1406 }
1407 }
Eric Christopher46308e62011-02-01 01:16:32 +00001408
Chandler Carruth0539c072012-03-31 12:42:41 +00001409 // If we're unable to select a particular successor, just count all of
1410 // them.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001411 for (unsigned TIdx = 0, TSize = TI->getNumSuccessors(); TIdx != TSize;
1412 ++TIdx)
Chandler Carruth0539c072012-03-31 12:42:41 +00001413 BBWorklist.insert(TI->getSuccessor(TIdx));
1414
1415 // If we had any successors at this point, than post-inlining is likely to
1416 // have them as well. Note that we assume any basic blocks which existed
1417 // due to branches or switches which folded above will also fold after
1418 // inlining.
1419 if (SingleBB && TI->getNumSuccessors() > 1) {
1420 // Take off the bonus we applied to the threshold.
1421 Threshold -= SingleBBBonus;
1422 SingleBB = false;
Eric Christopher46308e62011-02-01 01:16:32 +00001423 }
1424 }
Andrew Trickcaa500b2011-10-01 01:27:56 +00001425
Chandler Carruthcb5beb32013-12-12 11:59:26 +00001426 // If this is a noduplicate call, we can still inline as long as
James Molloy4f6fb952012-12-20 16:04:27 +00001427 // inlining this would cause the removal of the caller (so the instruction
1428 // is not actually duplicated, just moved).
1429 if (!OnlyOneCallAndLocalLinkage && ContainsNoDuplicateCall)
1430 return false;
1431
Chandler Carrutha004f222015-05-27 02:49:05 +00001432 // We applied the maximum possible vector bonus at the beginning. Now,
1433 // subtract the excess bonus, if any, from the Threshold before
1434 // comparing against Cost.
1435 if (NumVectorInstructions <= NumInstructions / 10)
1436 Threshold -= FiftyPercentVectorBonus;
1437 else if (NumVectorInstructions <= NumInstructions / 2)
1438 Threshold -= (FiftyPercentVectorBonus - TenPercentVectorBonus);
Chandler Carruth0539c072012-03-31 12:42:41 +00001439
Hans Wennborg00ab73d2016-02-05 20:32:42 +00001440 return Cost < std::max(1, Threshold);
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001441}
1442
Manman Ren49d684e2012-09-12 05:06:18 +00001443#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
Chandler Carruth0539c072012-03-31 12:42:41 +00001444/// \brief Dump stats about this call's analysis.
Yaron Kereneb2a2542016-01-29 20:50:44 +00001445LLVM_DUMP_METHOD void CallAnalyzer::dump() {
Eric Christophera13839f2014-02-26 23:27:16 +00001446#define DEBUG_PRINT_STAT(x) dbgs() << " " #x ": " << x << "\n"
Chandler Carruth0539c072012-03-31 12:42:41 +00001447 DEBUG_PRINT_STAT(NumConstantArgs);
1448 DEBUG_PRINT_STAT(NumConstantOffsetPtrArgs);
1449 DEBUG_PRINT_STAT(NumAllocaArgs);
1450 DEBUG_PRINT_STAT(NumConstantPtrCmps);
1451 DEBUG_PRINT_STAT(NumConstantPtrDiffs);
1452 DEBUG_PRINT_STAT(NumInstructionsSimplified);
Chandler Carrutha004f222015-05-27 02:49:05 +00001453 DEBUG_PRINT_STAT(NumInstructions);
Chandler Carruth0539c072012-03-31 12:42:41 +00001454 DEBUG_PRINT_STAT(SROACostSavings);
1455 DEBUG_PRINT_STAT(SROACostSavingsLost);
James Molloy4f6fb952012-12-20 16:04:27 +00001456 DEBUG_PRINT_STAT(ContainsNoDuplicateCall);
Chandler Carruth394e34f2014-01-31 22:32:32 +00001457 DEBUG_PRINT_STAT(Cost);
1458 DEBUG_PRINT_STAT(Threshold);
Chandler Carruth0539c072012-03-31 12:42:41 +00001459#undef DEBUG_PRINT_STAT
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001460}
Manman Renc3366cc2012-09-06 19:55:56 +00001461#endif
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001462
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001463/// \brief Test that there are no attribute conflicts between Caller and Callee
1464/// that prevent inlining.
1465static bool functionsHaveCompatibleAttributes(Function *Caller,
Eric Christopher4371b132015-07-02 01:11:47 +00001466 Function *Callee,
1467 TargetTransformInfo &TTI) {
Eric Christopherd566fb12015-07-29 22:09:48 +00001468 return TTI.areInlineCompatible(Caller, Callee) &&
Akira Hatanaka1cb242e2015-12-22 23:57:37 +00001469 AttributeFuncs::areInlineCompatible(*Caller, *Callee);
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001470}
1471
Xinliang David Li351d9b02017-05-02 05:38:41 +00001472int llvm::getCallsiteCost(CallSite CS, const DataLayout &DL) {
1473 int Cost = 0;
1474 for (unsigned I = 0, E = CS.arg_size(); I != E; ++I) {
1475 if (CS.isByValArgument(I)) {
1476 // We approximate the number of loads and stores needed by dividing the
1477 // size of the byval type by the target's pointer size.
1478 PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType());
1479 unsigned TypeSize = DL.getTypeSizeInBits(PTy->getElementType());
1480 unsigned PointerSize = DL.getPointerSizeInBits();
1481 // Ceiling division.
1482 unsigned NumStores = (TypeSize + PointerSize - 1) / PointerSize;
1483
1484 // If it generates more than 8 stores it is likely to be expanded as an
1485 // inline memcpy so we take that as an upper bound. Otherwise we assume
1486 // one load and one store per word copied.
1487 // FIXME: The maxStoresPerMemcpy setting from the target should be used
1488 // here instead of a magic number of 8, but it's not available via
1489 // DataLayout.
1490 NumStores = std::min(NumStores, 8U);
1491
1492 Cost += 2 * NumStores * InlineConstants::InstrCost;
1493 } else {
1494 // For non-byval arguments subtract off one instruction per call
1495 // argument.
1496 Cost += InlineConstants::InstrCost;
1497 }
1498 }
1499 // The call instruction also disappears after inlining.
1500 Cost += InlineConstants::InstrCost + InlineConstants::CallPenalty;
1501 return Cost;
1502}
1503
Sean Silvaab6a6832016-07-23 04:22:50 +00001504InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001505 CallSite CS, const InlineParams &Params, TargetTransformInfo &CalleeTTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001506 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +00001507 Optional<function_ref<BlockFrequencyInfo &(Function &)>> GetBFI,
Sean Silvaab6a6832016-07-23 04:22:50 +00001508 ProfileSummaryInfo *PSI) {
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001509 return getInlineCost(CS, CS.getCalledFunction(), Params, CalleeTTI,
Easwaran Raman12585b02017-01-20 22:44:04 +00001510 GetAssumptionCache, GetBFI, PSI);
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001511}
1512
Sean Silvaab6a6832016-07-23 04:22:50 +00001513InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001514 CallSite CS, Function *Callee, const InlineParams &Params,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001515 TargetTransformInfo &CalleeTTI,
1516 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +00001517 Optional<function_ref<BlockFrequencyInfo &(Function &)>> GetBFI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001518 ProfileSummaryInfo *PSI) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001519
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001520 // Cannot inline indirect calls.
1521 if (!Callee)
1522 return llvm::InlineCost::getNever();
1523
1524 // Calls to functions with always-inline attributes should be inlined
1525 // whenever possible.
Peter Collingbourne68a88972014-05-19 18:25:54 +00001526 if (CS.hasFnAttr(Attribute::AlwaysInline)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001527 if (isInlineViable(*Callee))
1528 return llvm::InlineCost::getAlways();
1529 return llvm::InlineCost::getNever();
1530 }
1531
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001532 // Never inline functions with conflicting attributes (unless callee has
1533 // always-inline attribute).
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001534 if (!functionsHaveCompatibleAttributes(CS.getCaller(), Callee, CalleeTTI))
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001535 return llvm::InlineCost::getNever();
1536
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001537 // Don't inline this call if the caller has the optnone attribute.
1538 if (CS.getCaller()->hasFnAttribute(Attribute::OptimizeNone))
1539 return llvm::InlineCost::getNever();
1540
Sanjoy Das5ce32722016-04-08 00:48:30 +00001541 // Don't inline functions which can be interposed at link-time. Don't inline
1542 // functions marked noinline or call sites marked noinline.
Craig Topper107b1872016-12-09 02:18:04 +00001543 // Note: inlining non-exact non-interposable functions is fine, since we know
Sanjoy Das5ce32722016-04-08 00:48:30 +00001544 // we have *a* correct implementation of the source level function.
Chad Rosier567556a2016-04-28 14:47:23 +00001545 if (Callee->isInterposable() || Callee->hasFnAttribute(Attribute::NoInline) ||
1546 CS.isNoInline())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001547 return llvm::InlineCost::getNever();
1548
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001549 DEBUG(llvm::dbgs() << " Analyzing call of " << Callee->getName()
Chad Rosier567556a2016-04-28 14:47:23 +00001550 << "...\n");
Andrew Trickcaa500b2011-10-01 01:27:56 +00001551
Easwaran Raman12585b02017-01-20 22:44:04 +00001552 CallAnalyzer CA(CalleeTTI, GetAssumptionCache, GetBFI, PSI, *Callee, CS,
1553 Params);
Chandler Carruth0539c072012-03-31 12:42:41 +00001554 bool ShouldInline = CA.analyzeCall(CS);
Dan Gohman4552e3c2009-10-13 18:30:07 +00001555
Chandler Carruth0539c072012-03-31 12:42:41 +00001556 DEBUG(CA.dump());
1557
1558 // Check if there was a reason to force inlining or no inlining.
1559 if (!ShouldInline && CA.getCost() < CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001560 return InlineCost::getNever();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001561 if (ShouldInline && CA.getCost() >= CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001562 return InlineCost::getAlways();
Andrew Trickcaa500b2011-10-01 01:27:56 +00001563
Chandler Carruth0539c072012-03-31 12:42:41 +00001564 return llvm::InlineCost::get(CA.getCost(), CA.getThreshold());
Dan Gohman4552e3c2009-10-13 18:30:07 +00001565}
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001566
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001567bool llvm::isInlineViable(Function &F) {
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001568 bool ReturnsTwice = F.hasFnAttribute(Attribute::ReturnsTwice);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001569 for (Function::iterator BI = F.begin(), BE = F.end(); BI != BE; ++BI) {
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001570 // Disallow inlining of functions which contain indirect branches or
1571 // blockaddresses.
1572 if (isa<IndirectBrInst>(BI->getTerminator()) || BI->hasAddressTaken())
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001573 return false;
1574
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001575 for (auto &II : *BI) {
1576 CallSite CS(&II);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001577 if (!CS)
1578 continue;
1579
1580 // Disallow recursive calls.
1581 if (&F == CS.getCalledFunction())
1582 return false;
1583
1584 // Disallow calls which expose returns-twice to a function not previously
1585 // attributed as such.
1586 if (!ReturnsTwice && CS.isCall() &&
1587 cast<CallInst>(CS.getInstruction())->canReturnTwice())
1588 return false;
Reid Kleckner223de262015-04-14 20:38:14 +00001589
Reid Kleckner60381792015-07-07 22:25:32 +00001590 // Disallow inlining functions that call @llvm.localescape. Doing this
Reid Kleckner223de262015-04-14 20:38:14 +00001591 // correctly would require major changes to the inliner.
1592 if (CS.getCalledFunction() &&
1593 CS.getCalledFunction()->getIntrinsicID() ==
Reid Kleckner60381792015-07-07 22:25:32 +00001594 llvm::Intrinsic::localescape)
Reid Kleckner223de262015-04-14 20:38:14 +00001595 return false;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001596 }
1597 }
1598
1599 return true;
1600}
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001601
1602// APIs to create InlineParams based on command line flags and/or other
1603// parameters.
1604
1605InlineParams llvm::getInlineParams(int Threshold) {
1606 InlineParams Params;
1607
1608 // This field is the threshold to use for a callee by default. This is
1609 // derived from one or more of:
1610 // * optimization or size-optimization levels,
1611 // * a value passed to createFunctionInliningPass function, or
1612 // * the -inline-threshold flag.
1613 // If the -inline-threshold flag is explicitly specified, that is used
1614 // irrespective of anything else.
1615 if (InlineThreshold.getNumOccurrences() > 0)
1616 Params.DefaultThreshold = InlineThreshold;
1617 else
1618 Params.DefaultThreshold = Threshold;
1619
1620 // Set the HintThreshold knob from the -inlinehint-threshold.
1621 Params.HintThreshold = HintThreshold;
1622
1623 // Set the HotCallSiteThreshold knob from the -hot-callsite-threshold.
1624 Params.HotCallSiteThreshold = HotCallSiteThreshold;
1625
Easwaran Raman12585b02017-01-20 22:44:04 +00001626 // Set the ColdCallSiteThreshold knob from the -inline-cold-callsite-threshold.
1627 Params.ColdCallSiteThreshold = ColdCallSiteThreshold;
1628
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001629 // Set the OptMinSizeThreshold and OptSizeThreshold params only if the
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001630 // -inlinehint-threshold commandline option is not explicitly given. If that
1631 // option is present, then its value applies even for callees with size and
1632 // minsize attributes.
1633 // If the -inline-threshold is not specified, set the ColdThreshold from the
1634 // -inlinecold-threshold even if it is not explicitly passed. If
1635 // -inline-threshold is specified, then -inlinecold-threshold needs to be
1636 // explicitly specified to set the ColdThreshold knob
1637 if (InlineThreshold.getNumOccurrences() == 0) {
1638 Params.OptMinSizeThreshold = InlineConstants::OptMinSizeThreshold;
1639 Params.OptSizeThreshold = InlineConstants::OptSizeThreshold;
1640 Params.ColdThreshold = ColdThreshold;
1641 } else if (ColdThreshold.getNumOccurrences() > 0) {
1642 Params.ColdThreshold = ColdThreshold;
1643 }
1644 return Params;
1645}
1646
1647InlineParams llvm::getInlineParams() {
1648 return getInlineParams(InlineThreshold);
1649}
1650
1651// Compute the default threshold for inlining based on the opt level and the
1652// size opt level.
1653static int computeThresholdFromOptLevels(unsigned OptLevel,
1654 unsigned SizeOptLevel) {
1655 if (OptLevel > 2)
1656 return InlineConstants::OptAggressiveThreshold;
1657 if (SizeOptLevel == 1) // -Os
1658 return InlineConstants::OptSizeThreshold;
1659 if (SizeOptLevel == 2) // -Oz
1660 return InlineConstants::OptMinSizeThreshold;
1661 return InlineThreshold;
1662}
1663
1664InlineParams llvm::getInlineParams(unsigned OptLevel, unsigned SizeOptLevel) {
1665 return getInlineParams(computeThresholdFromOptLevels(OptLevel, SizeOptLevel));
1666}