blob: 8be2ee7881a793a2cf6da44058e9a6185ac0a21b [file] [log] [blame]
Dan Gohman4552e3c2009-10-13 18:30:07 +00001//===- InlineCost.cpp - Cost analysis for inliner -------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements inline cost analysis.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Analysis/InlineCost.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000015#include "llvm/ADT/STLExtras.h"
16#include "llvm/ADT/SetVector.h"
17#include "llvm/ADT/SmallPtrSet.h"
18#include "llvm/ADT/SmallVector.h"
19#include "llvm/ADT/Statistic.h"
Daniel Jasperaec2fa32016-12-19 08:22:17 +000020#include "llvm/Analysis/AssumptionCache.h"
Easwaran Raman12585b02017-01-20 22:44:04 +000021#include "llvm/Analysis/BlockFrequencyInfo.h"
Hal Finkel57f03dd2014-09-07 13:49:57 +000022#include "llvm/Analysis/CodeMetrics.h"
Chandler Carruthd9903882015-01-14 11:23:27 +000023#include "llvm/Analysis/ConstantFolding.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000024#include "llvm/Analysis/InstructionSimplify.h"
Easwaran Raman71069cf2016-06-09 22:23:21 +000025#include "llvm/Analysis/ProfileSummaryInfo.h"
Chandler Carruth42f3dce2013-01-21 11:55:09 +000026#include "llvm/Analysis/TargetTransformInfo.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000027#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000028#include "llvm/IR/CallingConv.h"
29#include "llvm/IR/DataLayout.h"
Chandler Carruth03eb0de2014-03-04 10:40:04 +000030#include "llvm/IR/GetElementPtrTypeIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000031#include "llvm/IR/GlobalAlias.h"
Chandler Carruth7da14f12014-03-06 03:23:41 +000032#include "llvm/IR/InstVisitor.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000033#include "llvm/IR/IntrinsicInst.h"
34#include "llvm/IR/Operator.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000035#include "llvm/Support/Debug.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000036#include "llvm/Support/raw_ostream.h"
Eric Christopher2dfbd7e2011-02-05 00:49:15 +000037
Dan Gohman4552e3c2009-10-13 18:30:07 +000038using namespace llvm;
39
Chandler Carruthf1221bd2014-04-22 02:48:03 +000040#define DEBUG_TYPE "inline-cost"
41
Chandler Carruth7ae90d42012-04-11 10:15:10 +000042STATISTIC(NumCallsAnalyzed, "Number of call sites analyzed");
43
Easwaran Raman1c57cc22016-08-10 00:48:04 +000044static cl::opt<int> InlineThreshold(
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000045 "inline-threshold", cl::Hidden, cl::init(225), cl::ZeroOrMore,
46 cl::desc("Control the amount of inlining to perform (default = 225)"));
47
48static cl::opt<int> HintThreshold(
49 "inlinehint-threshold", cl::Hidden, cl::init(325),
50 cl::desc("Threshold for inlining functions with inline hint"));
51
Easwaran Raman12585b02017-01-20 22:44:04 +000052static cl::opt<int>
53 ColdCallSiteThreshold("inline-cold-callsite-threshold", cl::Hidden,
54 cl::init(45),
55 cl::desc("Threshold for inlining cold callsites"));
56
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000057// We introduce this threshold to help performance of instrumentation based
58// PGO before we actually hook up inliner with analysis passes such as BPI and
59// BFI.
60static cl::opt<int> ColdThreshold(
Easwaran Ramanc103ef82017-05-11 21:36:28 +000061 "inlinecold-threshold", cl::Hidden, cl::init(45),
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000062 cl::desc("Threshold for inlining functions with cold attribute"));
63
Dehao Chende39cb92016-08-05 20:28:41 +000064static cl::opt<int>
65 HotCallSiteThreshold("hot-callsite-threshold", cl::Hidden, cl::init(3000),
66 cl::ZeroOrMore,
67 cl::desc("Threshold for hot callsites "));
68
Easwaran Ramanc5fa6352017-06-27 23:11:18 +000069static cl::opt<int> ColdCallSiteRelFreq(
70 "cold-callsite-rel-freq", cl::Hidden, cl::init(2), cl::ZeroOrMore,
71 cl::desc("Maxmimum block frequency, expressed as a percentage of caller's "
72 "entry frequency, for a callsite to be cold in the absence of "
73 "profile information."));
74
Chandler Carruth0539c072012-03-31 12:42:41 +000075namespace {
Chandler Carrutha3089552012-03-14 07:32:53 +000076
Chandler Carruth0539c072012-03-31 12:42:41 +000077class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> {
78 typedef InstVisitor<CallAnalyzer, bool> Base;
79 friend class InstVisitor<CallAnalyzer, bool>;
Owen Andersona08318a2010-09-09 16:56:42 +000080
Chandler Carruth42f3dce2013-01-21 11:55:09 +000081 /// The TargetTransformInfo available for this compilation.
82 const TargetTransformInfo &TTI;
83
Daniel Jasperaec2fa32016-12-19 08:22:17 +000084 /// Getter for the cache of @llvm.assume intrinsics.
85 std::function<AssumptionCache &(Function &)> &GetAssumptionCache;
86
Easwaran Raman12585b02017-01-20 22:44:04 +000087 /// Getter for BlockFrequencyInfo
88 Optional<function_ref<BlockFrequencyInfo &(Function &)>> &GetBFI;
89
Easwaran Raman71069cf2016-06-09 22:23:21 +000090 /// Profile summary information.
91 ProfileSummaryInfo *PSI;
92
Piotr Padlewskif3d122c2016-09-30 21:05:49 +000093 /// The called function.
Chandler Carruth0539c072012-03-31 12:42:41 +000094 Function &F;
Owen Andersona08318a2010-09-09 16:56:42 +000095
Eric Christopher85be8ca2017-04-15 06:14:50 +000096 // Cache the DataLayout since we use it a lot.
97 const DataLayout &DL;
98
Piotr Padlewskif3d122c2016-09-30 21:05:49 +000099 /// The candidate callsite being analyzed. Please do not use this to do
100 /// analysis in the caller function; we want the inline cost query to be
101 /// easily cacheable. Instead, use the cover function paramHasAttr.
Philip Reames9b5c9582015-06-26 20:51:17 +0000102 CallSite CandidateCS;
103
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000104 /// Tunable parameters that control the analysis.
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000105 const InlineParams &Params;
106
Chandler Carruth0539c072012-03-31 12:42:41 +0000107 int Threshold;
108 int Cost;
Owen Andersona08318a2010-09-09 16:56:42 +0000109
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000110 bool IsCallerRecursive;
111 bool IsRecursiveCall;
Chandler Carruth0539c072012-03-31 12:42:41 +0000112 bool ExposesReturnsTwice;
113 bool HasDynamicAlloca;
James Molloy4f6fb952012-12-20 16:04:27 +0000114 bool ContainsNoDuplicateCall;
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000115 bool HasReturn;
116 bool HasIndirectBr;
Reid Kleckner223de262015-04-14 20:38:14 +0000117 bool HasFrameEscape;
James Molloy4f6fb952012-12-20 16:04:27 +0000118
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000119 /// Number of bytes allocated statically by the callee.
120 uint64_t AllocatedSize;
Chandler Carruth0539c072012-03-31 12:42:41 +0000121 unsigned NumInstructions, NumVectorInstructions;
122 int FiftyPercentVectorBonus, TenPercentVectorBonus;
123 int VectorBonus;
124
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000125 /// While we walk the potentially-inlined instructions, we build up and
126 /// maintain a mapping of simplified values specific to this callsite. The
127 /// idea is to propagate any special information we have about arguments to
128 /// this call through the inlinable section of the function, and account for
129 /// likely simplifications post-inlining. The most important aspect we track
130 /// is CFG altering simplifications -- when we prove a basic block dead, that
131 /// can cause dramatic shifts in the cost of inlining a function.
Chandler Carruth0539c072012-03-31 12:42:41 +0000132 DenseMap<Value *, Constant *> SimplifiedValues;
133
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000134 /// Keep track of the values which map back (through function arguments) to
135 /// allocas on the caller stack which could be simplified through SROA.
Chandler Carruth0539c072012-03-31 12:42:41 +0000136 DenseMap<Value *, Value *> SROAArgValues;
137
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000138 /// The mapping of caller Alloca values to their accumulated cost savings. If
139 /// we have to disable SROA for one of the allocas, this tells us how much
140 /// cost must be added.
Chandler Carruth0539c072012-03-31 12:42:41 +0000141 DenseMap<Value *, int> SROAArgCosts;
142
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000143 /// Keep track of values which map to a pointer base and constant offset.
Chad Rosier567556a2016-04-28 14:47:23 +0000144 DenseMap<Value *, std::pair<Value *, APInt>> ConstantOffsetPtrs;
Chandler Carruth0539c072012-03-31 12:42:41 +0000145
146 // Custom simplification helper routines.
147 bool isAllocaDerivedArg(Value *V);
148 bool lookupSROAArgAndCost(Value *V, Value *&Arg,
149 DenseMap<Value *, int>::iterator &CostIt);
150 void disableSROA(DenseMap<Value *, int>::iterator CostIt);
151 void disableSROA(Value *V);
152 void accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
153 int InstructionCost);
Haicheng Wu201b1912017-01-20 18:51:22 +0000154 bool isGEPFree(GetElementPtrInst &GEP);
Chandler Carruth0539c072012-03-31 12:42:41 +0000155 bool accumulateGEPOffset(GEPOperator &GEP, APInt &Offset);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000156 bool simplifyCallSite(Function *F, CallSite CS);
Easwaran Raman617f6362017-02-18 17:22:52 +0000157 template <typename Callable>
158 bool simplifyInstruction(Instruction &I, Callable Evaluate);
Chandler Carruth0539c072012-03-31 12:42:41 +0000159 ConstantInt *stripAndComputeInBoundsConstantOffsets(Value *&V);
160
Philip Reames9b5c9582015-06-26 20:51:17 +0000161 /// Return true if the given argument to the function being considered for
162 /// inlining has the given attribute set either at the call site or the
163 /// function declaration. Primarily used to inspect call site specific
164 /// attributes since these can be more precise than the ones on the callee
Easwaran Raman3676da42015-12-03 19:03:20 +0000165 /// itself.
Philip Reames9b5c9582015-06-26 20:51:17 +0000166 bool paramHasAttr(Argument *A, Attribute::AttrKind Attr);
Chad Rosier567556a2016-04-28 14:47:23 +0000167
Philip Reames9b5c9582015-06-26 20:51:17 +0000168 /// Return true if the given value is known non null within the callee if
Easwaran Raman3676da42015-12-03 19:03:20 +0000169 /// inlined through this particular callsite.
Philip Reames9b5c9582015-06-26 20:51:17 +0000170 bool isKnownNonNullInCallee(Value *V);
171
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000172 /// Update Threshold based on callsite properties such as callee
173 /// attributes and callee hotness for PGO builds. The Callee is explicitly
174 /// passed to support analyzing indirect calls whose target is inferred by
175 /// analysis.
176 void updateThreshold(CallSite CS, Function &Callee);
177
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000178 /// Return true if size growth is allowed when inlining the callee at CS.
179 bool allowSizeGrowth(CallSite CS);
180
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000181 /// Return true if \p CS is a cold callsite.
182 bool isColdCallSite(CallSite CS, BlockFrequencyInfo *CallerBFI);
183
Chandler Carruth0539c072012-03-31 12:42:41 +0000184 // Custom analysis routines.
Hal Finkel57f03dd2014-09-07 13:49:57 +0000185 bool analyzeBlock(BasicBlock *BB, SmallPtrSetImpl<const Value *> &EphValues);
Chandler Carruth0539c072012-03-31 12:42:41 +0000186
187 // Disable several entry points to the visitor so we don't accidentally use
188 // them by declaring but not defining them here.
Chad Rosier567556a2016-04-28 14:47:23 +0000189 void visit(Module *);
190 void visit(Module &);
191 void visit(Function *);
192 void visit(Function &);
193 void visit(BasicBlock *);
194 void visit(BasicBlock &);
Chandler Carruth0539c072012-03-31 12:42:41 +0000195
196 // Provide base case for our instruction visit.
197 bool visitInstruction(Instruction &I);
198
199 // Our visit overrides.
200 bool visitAlloca(AllocaInst &I);
201 bool visitPHI(PHINode &I);
202 bool visitGetElementPtr(GetElementPtrInst &I);
203 bool visitBitCast(BitCastInst &I);
204 bool visitPtrToInt(PtrToIntInst &I);
205 bool visitIntToPtr(IntToPtrInst &I);
206 bool visitCastInst(CastInst &I);
207 bool visitUnaryInstruction(UnaryInstruction &I);
Matt Arsenault727aa342013-07-20 04:09:00 +0000208 bool visitCmpInst(CmpInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000209 bool visitSub(BinaryOperator &I);
210 bool visitBinaryOperator(BinaryOperator &I);
211 bool visitLoad(LoadInst &I);
212 bool visitStore(StoreInst &I);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000213 bool visitExtractValue(ExtractValueInst &I);
214 bool visitInsertValue(InsertValueInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000215 bool visitCallSite(CallSite CS);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000216 bool visitReturnInst(ReturnInst &RI);
217 bool visitBranchInst(BranchInst &BI);
218 bool visitSwitchInst(SwitchInst &SI);
219 bool visitIndirectBrInst(IndirectBrInst &IBI);
220 bool visitResumeInst(ResumeInst &RI);
David Majnemer654e1302015-07-31 17:58:14 +0000221 bool visitCleanupReturnInst(CleanupReturnInst &RI);
222 bool visitCatchReturnInst(CatchReturnInst &RI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000223 bool visitUnreachableInst(UnreachableInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000224
225public:
Sean Silvaab6a6832016-07-23 04:22:50 +0000226 CallAnalyzer(const TargetTransformInfo &TTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000227 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +0000228 Optional<function_ref<BlockFrequencyInfo &(Function &)>> &GetBFI,
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000229 ProfileSummaryInfo *PSI, Function &Callee, CallSite CSArg,
230 const InlineParams &Params)
Easwaran Raman12585b02017-01-20 22:44:04 +0000231 : TTI(TTI), GetAssumptionCache(GetAssumptionCache), GetBFI(GetBFI),
Eric Christopher85be8ca2017-04-15 06:14:50 +0000232 PSI(PSI), F(Callee), DL(F.getParent()->getDataLayout()),
233 CandidateCS(CSArg), Params(Params), Threshold(Params.DefaultThreshold),
234 Cost(0), IsCallerRecursive(false), IsRecursiveCall(false),
235 ExposesReturnsTwice(false), HasDynamicAlloca(false),
236 ContainsNoDuplicateCall(false), HasReturn(false), HasIndirectBr(false),
237 HasFrameEscape(false), AllocatedSize(0), NumInstructions(0),
238 NumVectorInstructions(0), FiftyPercentVectorBonus(0),
239 TenPercentVectorBonus(0), VectorBonus(0), NumConstantArgs(0),
240 NumConstantOffsetPtrArgs(0), NumAllocaArgs(0), NumConstantPtrCmps(0),
241 NumConstantPtrDiffs(0), NumInstructionsSimplified(0),
242 SROACostSavings(0), SROACostSavingsLost(0) {}
Chandler Carruth0539c072012-03-31 12:42:41 +0000243
244 bool analyzeCall(CallSite CS);
245
246 int getThreshold() { return Threshold; }
247 int getCost() { return Cost; }
248
249 // Keep a bunch of stats about the cost savings found so we can print them
250 // out when debugging.
251 unsigned NumConstantArgs;
252 unsigned NumConstantOffsetPtrArgs;
253 unsigned NumAllocaArgs;
254 unsigned NumConstantPtrCmps;
255 unsigned NumConstantPtrDiffs;
256 unsigned NumInstructionsSimplified;
257 unsigned SROACostSavings;
258 unsigned SROACostSavingsLost;
259
260 void dump();
261};
262
263} // namespace
264
265/// \brief Test whether the given value is an Alloca-derived function argument.
266bool CallAnalyzer::isAllocaDerivedArg(Value *V) {
267 return SROAArgValues.count(V);
Owen Andersona08318a2010-09-09 16:56:42 +0000268}
269
Chandler Carruth0539c072012-03-31 12:42:41 +0000270/// \brief Lookup the SROA-candidate argument and cost iterator which V maps to.
271/// Returns false if V does not map to a SROA-candidate.
272bool CallAnalyzer::lookupSROAArgAndCost(
273 Value *V, Value *&Arg, DenseMap<Value *, int>::iterator &CostIt) {
274 if (SROAArgValues.empty() || SROAArgCosts.empty())
275 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000276
Chandler Carruth0539c072012-03-31 12:42:41 +0000277 DenseMap<Value *, Value *>::iterator ArgIt = SROAArgValues.find(V);
278 if (ArgIt == SROAArgValues.end())
279 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000280
Chandler Carruth0539c072012-03-31 12:42:41 +0000281 Arg = ArgIt->second;
282 CostIt = SROAArgCosts.find(Arg);
283 return CostIt != SROAArgCosts.end();
Chandler Carruth783b7192012-03-09 02:49:36 +0000284}
285
Chandler Carruth0539c072012-03-31 12:42:41 +0000286/// \brief Disable SROA for the candidate marked by this cost iterator.
Chandler Carruth783b7192012-03-09 02:49:36 +0000287///
Benjamin Kramerbde91762012-06-02 10:20:22 +0000288/// This marks the candidate as no longer viable for SROA, and adds the cost
Chandler Carruth0539c072012-03-31 12:42:41 +0000289/// savings associated with it back into the inline cost measurement.
290void CallAnalyzer::disableSROA(DenseMap<Value *, int>::iterator CostIt) {
291 // If we're no longer able to perform SROA we need to undo its cost savings
292 // and prevent subsequent analysis.
293 Cost += CostIt->second;
294 SROACostSavings -= CostIt->second;
295 SROACostSavingsLost += CostIt->second;
296 SROAArgCosts.erase(CostIt);
297}
298
299/// \brief If 'V' maps to a SROA candidate, disable SROA for it.
300void CallAnalyzer::disableSROA(Value *V) {
301 Value *SROAArg;
302 DenseMap<Value *, int>::iterator CostIt;
303 if (lookupSROAArgAndCost(V, SROAArg, CostIt))
304 disableSROA(CostIt);
305}
306
307/// \brief Accumulate the given cost for a particular SROA candidate.
308void CallAnalyzer::accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
309 int InstructionCost) {
310 CostIt->second += InstructionCost;
311 SROACostSavings += InstructionCost;
312}
313
Chandler Carruth0539c072012-03-31 12:42:41 +0000314/// \brief Accumulate a constant GEP offset into an APInt if possible.
315///
316/// Returns false if unable to compute the offset for any reason. Respects any
317/// simplified values known during the analysis of this callsite.
318bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000319 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +0000320 assert(IntPtrWidth == Offset.getBitWidth());
321
322 for (gep_type_iterator GTI = gep_type_begin(GEP), GTE = gep_type_end(GEP);
323 GTI != GTE; ++GTI) {
324 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand());
325 if (!OpC)
326 if (Constant *SimpleOp = SimplifiedValues.lookup(GTI.getOperand()))
327 OpC = dyn_cast<ConstantInt>(SimpleOp);
328 if (!OpC)
Chandler Carruth783b7192012-03-09 02:49:36 +0000329 return false;
Chad Rosier567556a2016-04-28 14:47:23 +0000330 if (OpC->isZero())
331 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000332
Chandler Carruth0539c072012-03-31 12:42:41 +0000333 // Handle a struct index, which adds its field offset to the pointer.
Peter Collingbourneab85225b2016-12-02 02:24:42 +0000334 if (StructType *STy = GTI.getStructTypeOrNull()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000335 unsigned ElementIdx = OpC->getZExtValue();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000336 const StructLayout *SL = DL.getStructLayout(STy);
Chandler Carruth0539c072012-03-31 12:42:41 +0000337 Offset += APInt(IntPtrWidth, SL->getElementOffset(ElementIdx));
338 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000339 }
Chandler Carruth783b7192012-03-09 02:49:36 +0000340
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000341 APInt TypeSize(IntPtrWidth, DL.getTypeAllocSize(GTI.getIndexedType()));
Chandler Carruth0539c072012-03-31 12:42:41 +0000342 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize;
343 }
344 return true;
345}
346
Haicheng Wu201b1912017-01-20 18:51:22 +0000347/// \brief Use TTI to check whether a GEP is free.
348///
349/// Respects any simplified values known during the analysis of this callsite.
350bool CallAnalyzer::isGEPFree(GetElementPtrInst &GEP) {
Evgeny Astigeevich61c1bd52017-07-27 12:49:27 +0000351 SmallVector<Value *, 4> Operands;
352 Operands.push_back(GEP.getOperand(0));
Haicheng Wu201b1912017-01-20 18:51:22 +0000353 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
354 if (Constant *SimpleOp = SimplifiedValues.lookup(*I))
Evgeny Astigeevich61c1bd52017-07-27 12:49:27 +0000355 Operands.push_back(SimpleOp);
Haicheng Wu201b1912017-01-20 18:51:22 +0000356 else
Evgeny Astigeevich61c1bd52017-07-27 12:49:27 +0000357 Operands.push_back(*I);
358 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&GEP, Operands);
Haicheng Wu201b1912017-01-20 18:51:22 +0000359}
360
Chandler Carruth0539c072012-03-31 12:42:41 +0000361bool CallAnalyzer::visitAlloca(AllocaInst &I) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000362 // Check whether inlining will turn a dynamic alloca into a static
Sanjay Patel0f153422016-05-09 21:51:53 +0000363 // alloca and handle that case.
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000364 if (I.isArrayAllocation()) {
Sanjay Patel0f153422016-05-09 21:51:53 +0000365 Constant *Size = SimplifiedValues.lookup(I.getArraySize());
366 if (auto *AllocSize = dyn_cast_or_null<ConstantInt>(Size)) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000367 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000368 AllocatedSize = SaturatingMultiplyAdd(
369 AllocSize->getLimitedValue(), DL.getTypeAllocSize(Ty), AllocatedSize);
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000370 return Base::visitAlloca(I);
371 }
372 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000373
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000374 // Accumulate the allocated size.
375 if (I.isStaticAlloca()) {
376 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000377 AllocatedSize = SaturatingAdd(DL.getTypeAllocSize(Ty), AllocatedSize);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000378 }
379
Bob Wilsona5b0dc82012-11-19 07:04:35 +0000380 // We will happily inline static alloca instructions.
381 if (I.isStaticAlloca())
Chandler Carruth0539c072012-03-31 12:42:41 +0000382 return Base::visitAlloca(I);
383
384 // FIXME: This is overly conservative. Dynamic allocas are inefficient for
385 // a variety of reasons, and so we would like to not inline them into
386 // functions which don't currently have a dynamic alloca. This simply
387 // disables inlining altogether in the presence of a dynamic alloca.
388 HasDynamicAlloca = true;
389 return false;
390}
391
392bool CallAnalyzer::visitPHI(PHINode &I) {
393 // FIXME: We should potentially be tracking values through phi nodes,
394 // especially when they collapse to a single value due to deleted CFG edges
395 // during inlining.
396
397 // FIXME: We need to propagate SROA *disabling* through phi nodes, even
398 // though we don't want to propagate it's bonuses. The idea is to disable
399 // SROA if it *might* be used in an inappropriate manner.
400
401 // Phi nodes are always zero-cost.
402 return true;
403}
404
405bool CallAnalyzer::visitGetElementPtr(GetElementPtrInst &I) {
406 Value *SROAArg;
407 DenseMap<Value *, int>::iterator CostIt;
Chad Rosier567556a2016-04-28 14:47:23 +0000408 bool SROACandidate =
409 lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt);
Chandler Carruth0539c072012-03-31 12:42:41 +0000410
411 // Try to fold GEPs of constant-offset call site argument pointers. This
412 // requires target data and inbounds GEPs.
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000413 if (I.isInBounds()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000414 // Check if we have a base + offset for the pointer.
415 Value *Ptr = I.getPointerOperand();
416 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Ptr);
417 if (BaseAndOffset.first) {
418 // Check if the offset of this GEP is constant, and if so accumulate it
419 // into Offset.
420 if (!accumulateGEPOffset(cast<GEPOperator>(I), BaseAndOffset.second)) {
421 // Non-constant GEPs aren't folded, and disable SROA.
422 if (SROACandidate)
423 disableSROA(CostIt);
Haicheng Wu201b1912017-01-20 18:51:22 +0000424 return isGEPFree(I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000425 }
426
427 // Add the result as a new mapping to Base + Offset.
428 ConstantOffsetPtrs[&I] = BaseAndOffset;
429
430 // Also handle SROA candidates here, we already know that the GEP is
431 // all-constant indexed.
432 if (SROACandidate)
433 SROAArgValues[&I] = SROAArg;
434
Chandler Carruth783b7192012-03-09 02:49:36 +0000435 return true;
436 }
437 }
438
Easwaran Ramana8b9cdc2017-02-25 00:10:22 +0000439 // Lambda to check whether a GEP's indices are all constant.
440 auto IsGEPOffsetConstant = [&](GetElementPtrInst &GEP) {
441 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
442 if (!isa<Constant>(*I) && !SimplifiedValues.lookup(*I))
443 return false;
444 return true;
445 };
446
447 if (IsGEPOffsetConstant(I)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000448 if (SROACandidate)
449 SROAArgValues[&I] = SROAArg;
450
451 // Constant GEPs are modeled as free.
452 return true;
453 }
454
455 // Variable GEPs will require math and will disable SROA.
456 if (SROACandidate)
457 disableSROA(CostIt);
Haicheng Wu201b1912017-01-20 18:51:22 +0000458 return isGEPFree(I);
Chandler Carruth783b7192012-03-09 02:49:36 +0000459}
460
Easwaran Raman617f6362017-02-18 17:22:52 +0000461/// Simplify \p I if its operands are constants and update SimplifiedValues.
462/// \p Evaluate is a callable specific to instruction type that evaluates the
463/// instruction when all the operands are constants.
464template <typename Callable>
465bool CallAnalyzer::simplifyInstruction(Instruction &I, Callable Evaluate) {
466 SmallVector<Constant *, 2> COps;
467 for (Value *Op : I.operands()) {
468 Constant *COp = dyn_cast<Constant>(Op);
469 if (!COp)
470 COp = SimplifiedValues.lookup(Op);
471 if (!COp)
472 return false;
473 COps.push_back(COp);
474 }
475 auto *C = Evaluate(COps);
476 if (!C)
477 return false;
478 SimplifiedValues[&I] = C;
479 return true;
480}
481
Chandler Carruth0539c072012-03-31 12:42:41 +0000482bool CallAnalyzer::visitBitCast(BitCastInst &I) {
483 // Propagate constants through bitcasts.
Easwaran Raman617f6362017-02-18 17:22:52 +0000484 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
485 return ConstantExpr::getBitCast(COps[0], I.getType());
486 }))
487 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000488
Chandler Carruth0539c072012-03-31 12:42:41 +0000489 // Track base/offsets through casts
Chad Rosier567556a2016-04-28 14:47:23 +0000490 std::pair<Value *, APInt> BaseAndOffset =
491 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000492 // Casts don't change the offset, just wrap it up.
493 if (BaseAndOffset.first)
494 ConstantOffsetPtrs[&I] = BaseAndOffset;
495
496 // Also look for SROA candidates here.
497 Value *SROAArg;
498 DenseMap<Value *, int>::iterator CostIt;
499 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
500 SROAArgValues[&I] = SROAArg;
501
502 // Bitcasts are always zero cost.
503 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000504}
505
Chandler Carruth0539c072012-03-31 12:42:41 +0000506bool CallAnalyzer::visitPtrToInt(PtrToIntInst &I) {
507 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000508 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
509 return ConstantExpr::getPtrToInt(COps[0], I.getType());
510 }))
511 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000512
513 // Track base/offset pairs when converted to a plain integer provided the
514 // integer is large enough to represent the pointer.
515 unsigned IntegerSize = I.getType()->getScalarSizeInBits();
Mehdi Amini46a43552015-03-04 18:43:29 +0000516 if (IntegerSize >= DL.getPointerSizeInBits()) {
Chad Rosier567556a2016-04-28 14:47:23 +0000517 std::pair<Value *, APInt> BaseAndOffset =
518 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000519 if (BaseAndOffset.first)
520 ConstantOffsetPtrs[&I] = BaseAndOffset;
521 }
522
523 // This is really weird. Technically, ptrtoint will disable SROA. However,
524 // unless that ptrtoint is *used* somewhere in the live basic blocks after
525 // inlining, it will be nuked, and SROA should proceed. All of the uses which
526 // would block SROA would also block SROA if applied directly to a pointer,
527 // and so we can just add the integer in here. The only places where SROA is
528 // preserved either cannot fire on an integer, or won't in-and-of themselves
529 // disable SROA (ext) w/o some later use that we would see and disable.
530 Value *SROAArg;
531 DenseMap<Value *, int>::iterator CostIt;
532 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
533 SROAArgValues[&I] = SROAArg;
534
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000535 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000536}
537
Chandler Carruth0539c072012-03-31 12:42:41 +0000538bool CallAnalyzer::visitIntToPtr(IntToPtrInst &I) {
539 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000540 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
541 return ConstantExpr::getIntToPtr(COps[0], I.getType());
542 }))
543 return true;
Dan Gohman4552e3c2009-10-13 18:30:07 +0000544
Chandler Carruth0539c072012-03-31 12:42:41 +0000545 // Track base/offset pairs when round-tripped through a pointer without
546 // modifications provided the integer is not too large.
547 Value *Op = I.getOperand(0);
548 unsigned IntegerSize = Op->getType()->getScalarSizeInBits();
Mehdi Amini46a43552015-03-04 18:43:29 +0000549 if (IntegerSize <= DL.getPointerSizeInBits()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000550 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op);
551 if (BaseAndOffset.first)
552 ConstantOffsetPtrs[&I] = BaseAndOffset;
553 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000554
Chandler Carruth0539c072012-03-31 12:42:41 +0000555 // "Propagate" SROA here in the same manner as we do for ptrtoint above.
556 Value *SROAArg;
557 DenseMap<Value *, int>::iterator CostIt;
558 if (lookupSROAArgAndCost(Op, SROAArg, CostIt))
559 SROAArgValues[&I] = SROAArg;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000560
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000561 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000562}
563
564bool CallAnalyzer::visitCastInst(CastInst &I) {
565 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000566 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
567 return ConstantExpr::getCast(I.getOpcode(), COps[0], I.getType());
568 }))
569 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000570
571 // Disable SROA in the face of arbitrary casts we don't whitelist elsewhere.
572 disableSROA(I.getOperand(0));
573
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000574 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000575}
576
577bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) {
578 Value *Operand = I.getOperand(0);
Easwaran Raman617f6362017-02-18 17:22:52 +0000579 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
Easwaran Raman617f6362017-02-18 17:22:52 +0000580 return ConstantFoldInstOperands(&I, COps[0], DL);
581 }))
582 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000583
584 // Disable any SROA on the argument to arbitrary unary operators.
585 disableSROA(Operand);
586
587 return false;
588}
589
Philip Reames9b5c9582015-06-26 20:51:17 +0000590bool CallAnalyzer::paramHasAttr(Argument *A, Attribute::AttrKind Attr) {
Reid Klecknerfb502d22017-04-14 20:19:02 +0000591 return CandidateCS.paramHasAttr(A->getArgNo(), Attr);
Philip Reames9b5c9582015-06-26 20:51:17 +0000592}
593
594bool CallAnalyzer::isKnownNonNullInCallee(Value *V) {
595 // Does the *call site* have the NonNull attribute set on an argument? We
596 // use the attribute on the call site to memoize any analysis done in the
597 // caller. This will also trip if the callee function has a non-null
598 // parameter attribute, but that's a less interesting case because hopefully
599 // the callee would already have been simplified based on that.
600 if (Argument *A = dyn_cast<Argument>(V))
601 if (paramHasAttr(A, Attribute::NonNull))
602 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000603
Philip Reames9b5c9582015-06-26 20:51:17 +0000604 // Is this an alloca in the caller? This is distinct from the attribute case
605 // above because attributes aren't updated within the inliner itself and we
606 // always want to catch the alloca derived case.
607 if (isAllocaDerivedArg(V))
608 // We can actually predict the result of comparisons between an
609 // alloca-derived value and null. Note that this fires regardless of
610 // SROA firing.
611 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000612
Philip Reames9b5c9582015-06-26 20:51:17 +0000613 return false;
614}
615
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000616bool CallAnalyzer::allowSizeGrowth(CallSite CS) {
617 // If the normal destination of the invoke or the parent block of the call
618 // site is unreachable-terminated, there is little point in inlining this
619 // unless there is literally zero cost.
620 // FIXME: Note that it is possible that an unreachable-terminated block has a
621 // hot entry. For example, in below scenario inlining hot_call_X() may be
622 // beneficial :
623 // main() {
624 // hot_call_1();
625 // ...
626 // hot_call_N()
627 // exit(0);
628 // }
629 // For now, we are not handling this corner case here as it is rare in real
630 // code. In future, we should elaborate this based on BPI and BFI in more
631 // general threshold adjusting heuristics in updateThreshold().
632 Instruction *Instr = CS.getInstruction();
633 if (InvokeInst *II = dyn_cast<InvokeInst>(Instr)) {
634 if (isa<UnreachableInst>(II->getNormalDest()->getTerminator()))
635 return false;
636 } else if (isa<UnreachableInst>(Instr->getParent()->getTerminator()))
637 return false;
638
639 return true;
640}
641
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000642bool CallAnalyzer::isColdCallSite(CallSite CS, BlockFrequencyInfo *CallerBFI) {
643 // If global profile summary is available, then callsite's coldness is
644 // determined based on that.
645 if (PSI->hasProfileSummary())
646 return PSI->isColdCallSite(CS, CallerBFI);
647 if (!CallerBFI)
648 return false;
649
650 // In the absence of global profile summary, determine if the callsite is cold
651 // relative to caller's entry. We could potentially cache the computation of
652 // scaled entry frequency, but the added complexity is not worth it unless
653 // this scaling shows up high in the profiles.
654 const BranchProbability ColdProb(ColdCallSiteRelFreq, 100);
655 auto CallSiteBB = CS.getInstruction()->getParent();
656 auto CallSiteFreq = CallerBFI->getBlockFreq(CallSiteBB);
657 auto CallerEntryFreq =
658 CallerBFI->getBlockFreq(&(CS.getCaller()->getEntryBlock()));
659 return CallSiteFreq < CallerEntryFreq * ColdProb;
660}
661
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000662void CallAnalyzer::updateThreshold(CallSite CS, Function &Callee) {
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000663 // If no size growth is allowed for this inlining, set Threshold to 0.
664 if (!allowSizeGrowth(CS)) {
665 Threshold = 0;
666 return;
667 }
668
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000669 Function *Caller = CS.getCaller();
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000670
671 // return min(A, B) if B is valid.
672 auto MinIfValid = [](int A, Optional<int> B) {
673 return B ? std::min(A, B.getValue()) : A;
674 };
675
Easwaran Raman0d58fca2016-08-11 03:58:05 +0000676 // return max(A, B) if B is valid.
677 auto MaxIfValid = [](int A, Optional<int> B) {
678 return B ? std::max(A, B.getValue()) : A;
679 };
680
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000681 // Use the OptMinSizeThreshold or OptSizeThreshold knob if they are available
682 // and reduce the threshold if the caller has the necessary attribute.
683 if (Caller->optForMinSize())
684 Threshold = MinIfValid(Threshold, Params.OptMinSizeThreshold);
685 else if (Caller->optForSize())
686 Threshold = MinIfValid(Threshold, Params.OptSizeThreshold);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000687
Easwaran Ramane08b1392017-01-09 21:56:26 +0000688 // Adjust the threshold based on inlinehint attribute and profile based
689 // hotness information if the caller does not have MinSize attribute.
690 if (!Caller->optForMinSize()) {
691 if (Callee.hasFnAttribute(Attribute::InlineHint))
692 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
693 if (PSI) {
Easwaran Raman12585b02017-01-20 22:44:04 +0000694 BlockFrequencyInfo *CallerBFI = GetBFI ? &((*GetBFI)(*Caller)) : nullptr;
Easwaran Raman3cd14792017-05-16 21:18:09 +0000695 // FIXME: After switching to the new passmanager, simplify the logic below
696 // by checking only the callsite hotness/coldness. The check for CallerBFI
697 // exists only because we do not have BFI available with the old PM.
698 //
699 // Use callee's hotness information only if we have no way of determining
700 // callsite's hotness information. Callsite hotness can be determined if
701 // sample profile is used (which adds hotness metadata to calls) or if
702 // caller's BlockFrequencyInfo is available.
703 if (CallerBFI || PSI->hasSampleProfile()) {
704 if (PSI->isHotCallSite(CS, CallerBFI)) {
705 DEBUG(dbgs() << "Hot callsite.\n");
706 Threshold = Params.HotCallSiteThreshold.getValue();
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000707 } else if (isColdCallSite(CS, CallerBFI)) {
Easwaran Raman3cd14792017-05-16 21:18:09 +0000708 DEBUG(dbgs() << "Cold callsite.\n");
709 Threshold = MinIfValid(Threshold, Params.ColdCallSiteThreshold);
710 }
711 } else {
712 if (PSI->isFunctionEntryHot(&Callee)) {
713 DEBUG(dbgs() << "Hot callee.\n");
714 // If callsite hotness can not be determined, we may still know
715 // that the callee is hot and treat it as a weaker hint for threshold
716 // increase.
717 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
718 } else if (PSI->isFunctionEntryCold(&Callee)) {
719 DEBUG(dbgs() << "Cold callee.\n");
720 Threshold = MinIfValid(Threshold, Params.ColdThreshold);
721 }
Easwaran Ramane08b1392017-01-09 21:56:26 +0000722 }
723 }
Dehao Chene1c7c572016-08-05 20:49:04 +0000724 }
Dehao Chen9232f982016-07-11 16:48:54 +0000725
Justin Lebar8650a4d2016-04-15 01:38:48 +0000726 // Finally, take the target-specific inlining threshold multiplier into
727 // account.
728 Threshold *= TTI.getInliningThresholdMultiplier();
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000729}
730
Matt Arsenault727aa342013-07-20 04:09:00 +0000731bool CallAnalyzer::visitCmpInst(CmpInst &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000732 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
733 // First try to handle simplified comparisons.
Easwaran Raman617f6362017-02-18 17:22:52 +0000734 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
735 return ConstantExpr::getCompare(I.getPredicate(), COps[0], COps[1]);
736 }))
737 return true;
Matt Arsenault727aa342013-07-20 04:09:00 +0000738
739 if (I.getOpcode() == Instruction::FCmp)
740 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000741
742 // Otherwise look for a comparison between constant offset pointers with
743 // a common base.
744 Value *LHSBase, *RHSBase;
745 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000746 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000747 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000748 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000749 if (RHSBase && LHSBase == RHSBase) {
750 // We have common bases, fold the icmp to a constant based on the
751 // offsets.
752 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
753 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
754 if (Constant *C = ConstantExpr::getICmp(I.getPredicate(), CLHS, CRHS)) {
755 SimplifiedValues[&I] = C;
756 ++NumConstantPtrCmps;
757 return true;
758 }
759 }
760 }
761
762 // If the comparison is an equality comparison with null, we can simplify it
Philip Reames9b5c9582015-06-26 20:51:17 +0000763 // if we know the value (argument) can't be null
764 if (I.isEquality() && isa<ConstantPointerNull>(I.getOperand(1)) &&
765 isKnownNonNullInCallee(I.getOperand(0))) {
766 bool IsNotEqual = I.getPredicate() == CmpInst::ICMP_NE;
767 SimplifiedValues[&I] = IsNotEqual ? ConstantInt::getTrue(I.getType())
768 : ConstantInt::getFalse(I.getType());
769 return true;
770 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000771 // Finally check for SROA candidates in comparisons.
772 Value *SROAArg;
773 DenseMap<Value *, int>::iterator CostIt;
774 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) {
775 if (isa<ConstantPointerNull>(I.getOperand(1))) {
776 accumulateSROACost(CostIt, InlineConstants::InstrCost);
777 return true;
778 }
779
780 disableSROA(CostIt);
781 }
782
783 return false;
784}
785
786bool CallAnalyzer::visitSub(BinaryOperator &I) {
787 // Try to handle a special case: we can fold computing the difference of two
788 // constant-related pointers.
789 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
790 Value *LHSBase, *RHSBase;
791 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000792 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000793 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000794 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000795 if (RHSBase && LHSBase == RHSBase) {
796 // We have common bases, fold the subtract to a constant based on the
797 // offsets.
798 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
799 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
800 if (Constant *C = ConstantExpr::getSub(CLHS, CRHS)) {
801 SimplifiedValues[&I] = C;
802 ++NumConstantPtrDiffs;
803 return true;
804 }
805 }
806 }
807
808 // Otherwise, fall back to the generic logic for simplifying and handling
809 // instructions.
810 return Base::visitSub(I);
811}
812
813bool CallAnalyzer::visitBinaryOperator(BinaryOperator &I) {
814 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
Easwaran Raman617f6362017-02-18 17:22:52 +0000815 auto Evaluate = [&](SmallVectorImpl<Constant *> &COps) {
816 Value *SimpleV = nullptr;
Easwaran Raman617f6362017-02-18 17:22:52 +0000817 if (auto FI = dyn_cast<FPMathOperator>(&I))
818 SimpleV = SimplifyFPBinOp(I.getOpcode(), COps[0], COps[1],
819 FI->getFastMathFlags(), DL);
820 else
821 SimpleV = SimplifyBinOp(I.getOpcode(), COps[0], COps[1], DL);
822 return dyn_cast_or_null<Constant>(SimpleV);
823 };
Michael Zolotukhin4e8598e2015-02-06 20:02:51 +0000824
Easwaran Raman617f6362017-02-18 17:22:52 +0000825 if (simplifyInstruction(I, Evaluate))
Chandler Carruth0539c072012-03-31 12:42:41 +0000826 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000827
828 // Disable any SROA on arguments to arbitrary, unsimplified binary operators.
829 disableSROA(LHS);
830 disableSROA(RHS);
831
832 return false;
833}
834
835bool CallAnalyzer::visitLoad(LoadInst &I) {
836 Value *SROAArg;
837 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000838 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000839 if (I.isSimple()) {
840 accumulateSROACost(CostIt, InlineConstants::InstrCost);
841 return true;
842 }
843
844 disableSROA(CostIt);
845 }
846
847 return false;
848}
849
850bool CallAnalyzer::visitStore(StoreInst &I) {
851 Value *SROAArg;
852 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000853 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000854 if (I.isSimple()) {
855 accumulateSROACost(CostIt, InlineConstants::InstrCost);
856 return true;
857 }
858
859 disableSROA(CostIt);
860 }
861
862 return false;
863}
864
Chandler Carruth753e21d2012-12-28 14:23:32 +0000865bool CallAnalyzer::visitExtractValue(ExtractValueInst &I) {
866 // Constant folding for extract value is trivial.
Easwaran Raman617f6362017-02-18 17:22:52 +0000867 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
868 return ConstantExpr::getExtractValue(COps[0], I.getIndices());
869 }))
Chandler Carruth753e21d2012-12-28 14:23:32 +0000870 return true;
Chandler Carruth753e21d2012-12-28 14:23:32 +0000871
872 // SROA can look through these but give them a cost.
873 return false;
874}
875
876bool CallAnalyzer::visitInsertValue(InsertValueInst &I) {
877 // Constant folding for insert value is trivial.
Easwaran Raman617f6362017-02-18 17:22:52 +0000878 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
879 return ConstantExpr::getInsertValue(/*AggregateOperand*/ COps[0],
880 /*InsertedValueOperand*/ COps[1],
881 I.getIndices());
882 }))
Chandler Carruth753e21d2012-12-28 14:23:32 +0000883 return true;
Chandler Carruth753e21d2012-12-28 14:23:32 +0000884
885 // SROA can look through these but give them a cost.
886 return false;
887}
888
889/// \brief Try to simplify a call site.
890///
891/// Takes a concrete function and callsite and tries to actually simplify it by
892/// analyzing the arguments and call itself with instsimplify. Returns true if
893/// it has simplified the callsite to some other entity (a constant), making it
894/// free.
895bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
896 // FIXME: Using the instsimplify logic directly for this is inefficient
897 // because we have to continually rebuild the argument list even when no
898 // simplifications can be performed. Until that is fixed with remapping
899 // inside of instsimplify, directly constant fold calls here.
Andrew Kaylor647025f2017-06-09 23:18:11 +0000900 if (!canConstantFoldCallTo(CS, F))
Chandler Carruth753e21d2012-12-28 14:23:32 +0000901 return false;
902
903 // Try to re-map the arguments to constants.
904 SmallVector<Constant *, 4> ConstantArgs;
905 ConstantArgs.reserve(CS.arg_size());
Chad Rosier567556a2016-04-28 14:47:23 +0000906 for (CallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end(); I != E;
907 ++I) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000908 Constant *C = dyn_cast<Constant>(*I);
909 if (!C)
910 C = dyn_cast_or_null<Constant>(SimplifiedValues.lookup(*I));
911 if (!C)
912 return false; // This argument doesn't map to a constant.
913
914 ConstantArgs.push_back(C);
915 }
Andrew Kaylor647025f2017-06-09 23:18:11 +0000916 if (Constant *C = ConstantFoldCall(CS, F, ConstantArgs)) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000917 SimplifiedValues[CS.getInstruction()] = C;
918 return true;
919 }
920
921 return false;
922}
923
Chandler Carruth0539c072012-03-31 12:42:41 +0000924bool CallAnalyzer::visitCallSite(CallSite CS) {
Chandler Carruth37d25de2013-12-13 08:00:01 +0000925 if (CS.hasFnAttr(Attribute::ReturnsTwice) &&
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +0000926 !F.hasFnAttribute(Attribute::ReturnsTwice)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000927 // This aborts the entire analysis.
928 ExposesReturnsTwice = true;
929 return false;
930 }
Chad Rosier567556a2016-04-28 14:47:23 +0000931 if (CS.isCall() && cast<CallInst>(CS.getInstruction())->cannotDuplicate())
James Molloy4f6fb952012-12-20 16:04:27 +0000932 ContainsNoDuplicateCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000933
Chandler Carruth0539c072012-03-31 12:42:41 +0000934 if (Function *F = CS.getCalledFunction()) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000935 // When we have a concrete function, first try to simplify it directly.
936 if (simplifyCallSite(F, CS))
937 return true;
938
939 // Next check if it is an intrinsic we know about.
940 // FIXME: Lift this into part of the InstVisitor.
941 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
942 switch (II->getIntrinsicID()) {
943 default:
944 return Base::visitCallSite(CS);
945
Peter Collingbourne7dd8dbf2016-04-22 21:18:02 +0000946 case Intrinsic::load_relative:
947 // This is normally lowered to 4 LLVM instructions.
948 Cost += 3 * InlineConstants::InstrCost;
949 return false;
950
Chandler Carruth753e21d2012-12-28 14:23:32 +0000951 case Intrinsic::memset:
952 case Intrinsic::memcpy:
953 case Intrinsic::memmove:
954 // SROA can usually chew through these intrinsics, but they aren't free.
955 return false;
Reid Kleckner60381792015-07-07 22:25:32 +0000956 case Intrinsic::localescape:
Reid Kleckner223de262015-04-14 20:38:14 +0000957 HasFrameEscape = true;
958 return false;
Chandler Carruth753e21d2012-12-28 14:23:32 +0000959 }
960 }
961
Chandler Carruth0539c072012-03-31 12:42:41 +0000962 if (F == CS.getInstruction()->getParent()->getParent()) {
963 // This flag will fully abort the analysis, so don't bother with anything
964 // else.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000965 IsRecursiveCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000966 return false;
967 }
968
Chandler Carruth0ba8db42013-01-22 11:26:02 +0000969 if (TTI.isLoweredToCall(F)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000970 // We account for the average 1 instruction per call argument setup
971 // here.
972 Cost += CS.arg_size() * InlineConstants::InstrCost;
973
974 // Everything other than inline ASM will also have a significant cost
975 // merely from making the call.
976 if (!isa<InlineAsm>(CS.getCalledValue()))
977 Cost += InlineConstants::CallPenalty;
978 }
979
980 return Base::visitCallSite(CS);
981 }
982
983 // Otherwise we're in a very special case -- an indirect function call. See
984 // if we can be particularly clever about this.
985 Value *Callee = CS.getCalledValue();
986
987 // First, pay the price of the argument setup. We account for the average
988 // 1 instruction per call argument setup here.
989 Cost += CS.arg_size() * InlineConstants::InstrCost;
990
991 // Next, check if this happens to be an indirect function call to a known
992 // function in this inline context. If not, we've done all we can.
993 Function *F = dyn_cast_or_null<Function>(SimplifiedValues.lookup(Callee));
994 if (!F)
995 return Base::visitCallSite(CS);
996
997 // If we have a constant that we are calling as a function, we can peer
998 // through it and see the function target. This happens not infrequently
999 // during devirtualization and so we want to give it a hefty bonus for
1000 // inlining, but cap that bonus in the event that inlining wouldn't pan
1001 // out. Pretend to inline the function, with a custom threshold.
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001002 auto IndirectCallParams = Params;
1003 IndirectCallParams.DefaultThreshold = InlineConstants::IndirectCallThreshold;
Easwaran Raman12585b02017-01-20 22:44:04 +00001004 CallAnalyzer CA(TTI, GetAssumptionCache, GetBFI, PSI, *F, CS,
1005 IndirectCallParams);
Chandler Carruth0539c072012-03-31 12:42:41 +00001006 if (CA.analyzeCall(CS)) {
1007 // We were able to inline the indirect call! Subtract the cost from the
Easwaran Raman6d90d9f2015-12-07 21:21:20 +00001008 // threshold to get the bonus we want to apply, but don't go below zero.
1009 Cost -= std::max(0, CA.getThreshold() - CA.getCost());
Chandler Carruth0539c072012-03-31 12:42:41 +00001010 }
1011
1012 return Base::visitCallSite(CS);
1013}
1014
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001015bool CallAnalyzer::visitReturnInst(ReturnInst &RI) {
1016 // At least one return instruction will be free after inlining.
1017 bool Free = !HasReturn;
1018 HasReturn = true;
1019 return Free;
1020}
1021
1022bool CallAnalyzer::visitBranchInst(BranchInst &BI) {
1023 // We model unconditional branches as essentially free -- they really
1024 // shouldn't exist at all, but handling them makes the behavior of the
1025 // inliner more regular and predictable. Interestingly, conditional branches
1026 // which will fold away are also free.
1027 return BI.isUnconditional() || isa<ConstantInt>(BI.getCondition()) ||
1028 dyn_cast_or_null<ConstantInt>(
1029 SimplifiedValues.lookup(BI.getCondition()));
1030}
1031
1032bool CallAnalyzer::visitSwitchInst(SwitchInst &SI) {
1033 // We model unconditional switches as free, see the comments on handling
1034 // branches.
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001035 if (isa<ConstantInt>(SI.getCondition()))
1036 return true;
1037 if (Value *V = SimplifiedValues.lookup(SI.getCondition()))
1038 if (isa<ConstantInt>(V))
1039 return true;
1040
Eric Christopher7ad02ee2017-06-28 21:10:31 +00001041 // Assume the most general case where the switch is lowered into
Jun Bum Lim2960d412017-06-02 20:42:54 +00001042 // either a jump table, bit test, or a balanced binary tree consisting of
1043 // case clusters without merging adjacent clusters with the same
1044 // destination. We do not consider the switches that are lowered with a mix
1045 // of jump table/bit test/binary search tree. The cost of the switch is
1046 // proportional to the size of the tree or the size of jump table range.
1047 //
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001048 // NB: We convert large switches which are just used to initialize large phi
1049 // nodes to lookup tables instead in simplify-cfg, so this shouldn't prevent
1050 // inlining those. It will prevent inlining in cases where the optimization
1051 // does not (yet) fire.
Jun Bum Lim2960d412017-06-02 20:42:54 +00001052
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001053 // Maximum valid cost increased in this function.
1054 int CostUpperBound = INT_MAX - InlineConstants::InstrCost - 1;
1055
Jun Bum Lim2960d412017-06-02 20:42:54 +00001056 // Exit early for a large switch, assuming one case needs at least one
1057 // instruction.
1058 // FIXME: This is not true for a bit test, but ignore such case for now to
1059 // save compile-time.
1060 int64_t CostLowerBound =
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001061 std::min((int64_t)CostUpperBound,
Jun Bum Lim2960d412017-06-02 20:42:54 +00001062 (int64_t)SI.getNumCases() * InlineConstants::InstrCost + Cost);
1063
1064 if (CostLowerBound > Threshold) {
1065 Cost = CostLowerBound;
1066 return false;
1067 }
1068
1069 unsigned JumpTableSize = 0;
1070 unsigned NumCaseCluster =
1071 TTI.getEstimatedNumberOfCaseClusters(SI, JumpTableSize);
1072
1073 // If suitable for a jump table, consider the cost for the table size and
1074 // branch to destination.
1075 if (JumpTableSize) {
1076 int64_t JTCost = (int64_t)JumpTableSize * InlineConstants::InstrCost +
1077 4 * InlineConstants::InstrCost;
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001078
1079 Cost = std::min((int64_t)CostUpperBound, JTCost + Cost);
Jun Bum Lim2960d412017-06-02 20:42:54 +00001080 return false;
1081 }
1082
1083 // Considering forming a binary search, we should find the number of nodes
1084 // which is same as the number of comparisons when lowered. For a given
1085 // number of clusters, n, we can define a recursive function, f(n), to find
1086 // the number of nodes in the tree. The recursion is :
1087 // f(n) = 1 + f(n/2) + f (n - n/2), when n > 3,
1088 // and f(n) = n, when n <= 3.
1089 // This will lead a binary tree where the leaf should be either f(2) or f(3)
1090 // when n > 3. So, the number of comparisons from leaves should be n, while
1091 // the number of non-leaf should be :
1092 // 2^(log2(n) - 1) - 1
1093 // = 2^log2(n) * 2^-1 - 1
1094 // = n / 2 - 1.
1095 // Considering comparisons from leaf and non-leaf nodes, we can estimate the
1096 // number of comparisons in a simple closed form :
1097 // n + n / 2 - 1 = n * 3 / 2 - 1
1098 if (NumCaseCluster <= 3) {
1099 // Suppose a comparison includes one compare and one conditional branch.
1100 Cost += NumCaseCluster * 2 * InlineConstants::InstrCost;
1101 return false;
1102 }
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001103
1104 int64_t ExpectedNumberOfCompare = 3 * (int64_t)NumCaseCluster / 2 - 1;
1105 int64_t SwitchCost =
Jun Bum Lim2960d412017-06-02 20:42:54 +00001106 ExpectedNumberOfCompare * 2 * InlineConstants::InstrCost;
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001107
1108 Cost = std::min((int64_t)CostUpperBound, SwitchCost + Cost);
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001109 return false;
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001110}
1111
1112bool CallAnalyzer::visitIndirectBrInst(IndirectBrInst &IBI) {
1113 // We never want to inline functions that contain an indirectbr. This is
1114 // incorrect because all the blockaddress's (in static global initializers
1115 // for example) would be referring to the original function, and this
1116 // indirect jump would jump from the inlined copy of the function into the
1117 // original function which is extremely undefined behavior.
1118 // FIXME: This logic isn't really right; we can safely inline functions with
1119 // indirectbr's as long as no other function or global references the
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001120 // blockaddress of a block within the current function.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001121 HasIndirectBr = true;
1122 return false;
1123}
1124
1125bool CallAnalyzer::visitResumeInst(ResumeInst &RI) {
1126 // FIXME: It's not clear that a single instruction is an accurate model for
1127 // the inline cost of a resume instruction.
1128 return false;
1129}
1130
David Majnemer654e1302015-07-31 17:58:14 +00001131bool CallAnalyzer::visitCleanupReturnInst(CleanupReturnInst &CRI) {
1132 // FIXME: It's not clear that a single instruction is an accurate model for
1133 // the inline cost of a cleanupret instruction.
1134 return false;
1135}
1136
1137bool CallAnalyzer::visitCatchReturnInst(CatchReturnInst &CRI) {
1138 // FIXME: It's not clear that a single instruction is an accurate model for
Joseph Tremoulet8220bcc2015-08-23 00:26:33 +00001139 // the inline cost of a catchret instruction.
David Majnemer654e1302015-07-31 17:58:14 +00001140 return false;
1141}
1142
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001143bool CallAnalyzer::visitUnreachableInst(UnreachableInst &I) {
1144 // FIXME: It might be reasonably to discount the cost of instructions leading
1145 // to unreachable as they have the lowest possible impact on both runtime and
1146 // code size.
1147 return true; // No actual code is needed for unreachable.
1148}
1149
Chandler Carruth0539c072012-03-31 12:42:41 +00001150bool CallAnalyzer::visitInstruction(Instruction &I) {
Chandler Carruthda7513a2012-05-04 00:58:03 +00001151 // Some instructions are free. All of the free intrinsics can also be
1152 // handled by SROA, etc.
Chandler Carruthb8cf5102013-01-21 12:05:16 +00001153 if (TargetTransformInfo::TCC_Free == TTI.getUserCost(&I))
Chandler Carruthda7513a2012-05-04 00:58:03 +00001154 return true;
1155
Chandler Carruth0539c072012-03-31 12:42:41 +00001156 // We found something we don't understand or can't handle. Mark any SROA-able
1157 // values in the operand list as no longer viable.
1158 for (User::op_iterator OI = I.op_begin(), OE = I.op_end(); OI != OE; ++OI)
1159 disableSROA(*OI);
1160
1161 return false;
1162}
1163
Chandler Carruth0539c072012-03-31 12:42:41 +00001164/// \brief Analyze a basic block for its contribution to the inline cost.
1165///
1166/// This method walks the analyzer over every instruction in the given basic
1167/// block and accounts for their cost during inlining at this callsite. It
1168/// aborts early if the threshold has been exceeded or an impossible to inline
1169/// construct has been detected. It returns false if inlining is no longer
1170/// viable, and true if inlining remains viable.
Hal Finkel57f03dd2014-09-07 13:49:57 +00001171bool CallAnalyzer::analyzeBlock(BasicBlock *BB,
1172 SmallPtrSetImpl<const Value *> &EphValues) {
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001173 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
Chandler Carruth6b4cc8b2014-02-01 10:38:17 +00001174 // FIXME: Currently, the number of instructions in a function regardless of
1175 // our ability to simplify them during inline to constants or dead code,
1176 // are actually used by the vector bonus heuristic. As long as that's true,
1177 // we have to special case debug intrinsics here to prevent differences in
1178 // inlining due to debug symbols. Eventually, the number of unsimplified
1179 // instructions shouldn't factor into the cost computation, but until then,
1180 // hack around it here.
1181 if (isa<DbgInfoIntrinsic>(I))
1182 continue;
1183
Hal Finkel57f03dd2014-09-07 13:49:57 +00001184 // Skip ephemeral values.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001185 if (EphValues.count(&*I))
Hal Finkel57f03dd2014-09-07 13:49:57 +00001186 continue;
1187
Chandler Carruth0539c072012-03-31 12:42:41 +00001188 ++NumInstructions;
1189 if (isa<ExtractElementInst>(I) || I->getType()->isVectorTy())
1190 ++NumVectorInstructions;
1191
Sanjay Patele9434e82015-09-15 15:26:25 +00001192 // If the instruction is floating point, and the target says this operation
1193 // is expensive or the function has the "use-soft-float" attribute, this may
1194 // eventually become a library call. Treat the cost as such.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001195 if (I->getType()->isFloatingPointTy()) {
Sanjay Patele9434e82015-09-15 15:26:25 +00001196 // If the function has the "use-soft-float" attribute, mark it as
1197 // expensive.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001198 if (TTI.getFPOpCost(I->getType()) == TargetTransformInfo::TCC_Expensive ||
Eric Christopher908ed7f2017-04-15 06:14:52 +00001199 (F.getFnAttribute("use-soft-float").getValueAsString() == "true"))
Cameron Esfahani17177d12015-02-05 02:09:33 +00001200 Cost += InlineConstants::CallPenalty;
1201 }
1202
Chandler Carruth0539c072012-03-31 12:42:41 +00001203 // If the instruction simplified to a constant, there is no cost to this
1204 // instruction. Visit the instructions using our InstVisitor to account for
1205 // all of the per-instruction logic. The visit tree returns true if we
1206 // consumed the instruction in any way, and false if the instruction's base
1207 // cost should count against inlining.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001208 if (Base::visit(&*I))
Chandler Carruth0539c072012-03-31 12:42:41 +00001209 ++NumInstructionsSimplified;
1210 else
1211 Cost += InlineConstants::InstrCost;
1212
1213 // If the visit this instruction detected an uninlinable pattern, abort.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001214 if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca ||
Reid Kleckner223de262015-04-14 20:38:14 +00001215 HasIndirectBr || HasFrameEscape)
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001216 return false;
1217
1218 // If the caller is a recursive function then we don't want to inline
1219 // functions which allocate a lot of stack space because it would increase
1220 // the caller stack usage dramatically.
1221 if (IsCallerRecursive &&
1222 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller)
Chandler Carruth0539c072012-03-31 12:42:41 +00001223 return false;
1224
Chandler Carrutha004f222015-05-27 02:49:05 +00001225 // Check if we've past the maximum possible threshold so we don't spin in
1226 // huge basic blocks that will never inline.
1227 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001228 return false;
1229 }
1230
1231 return true;
1232}
1233
1234/// \brief Compute the base pointer and cumulative constant offsets for V.
1235///
1236/// This strips all constant offsets off of V, leaving it the base pointer, and
1237/// accumulates the total constant offset applied in the returned constant. It
1238/// returns 0 if V is not a pointer, and returns the constant '0' if there are
1239/// no constant offsets applied.
1240ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001241 if (!V->getType()->isPointerTy())
Craig Topper353eda42014-04-24 06:44:33 +00001242 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001243
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001244 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +00001245 APInt Offset = APInt::getNullValue(IntPtrWidth);
1246
1247 // Even though we don't look through PHI nodes, we could be called on an
1248 // instruction in an unreachable block, which may be on a cycle.
1249 SmallPtrSet<Value *, 4> Visited;
1250 Visited.insert(V);
1251 do {
1252 if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
1253 if (!GEP->isInBounds() || !accumulateGEPOffset(*GEP, Offset))
Craig Topper353eda42014-04-24 06:44:33 +00001254 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001255 V = GEP->getPointerOperand();
1256 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
1257 V = cast<Operator>(V)->getOperand(0);
1258 } else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
Sanjoy Das5ce32722016-04-08 00:48:30 +00001259 if (GA->isInterposable())
Chandler Carruth0539c072012-03-31 12:42:41 +00001260 break;
1261 V = GA->getAliasee();
1262 } else {
1263 break;
1264 }
1265 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
David Blaikie70573dc2014-11-19 07:49:26 +00001266 } while (Visited.insert(V).second);
Chandler Carruth0539c072012-03-31 12:42:41 +00001267
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001268 Type *IntPtrTy = DL.getIntPtrType(V->getContext());
Chandler Carruth0539c072012-03-31 12:42:41 +00001269 return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset));
1270}
1271
1272/// \brief Analyze a call site for potential inlining.
1273///
1274/// Returns true if inlining this call is viable, and false if it is not
1275/// viable. It computes the cost and adjusts the threshold based on numerous
1276/// factors and heuristics. If this method returns false but the computed cost
1277/// is below the computed threshold, then inlining was forcibly disabled by
Bob Wilson266802d2012-11-19 07:04:30 +00001278/// some artifact of the routine.
Chandler Carruth0539c072012-03-31 12:42:41 +00001279bool CallAnalyzer::analyzeCall(CallSite CS) {
Chandler Carruth7ae90d42012-04-11 10:15:10 +00001280 ++NumCallsAnalyzed;
1281
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001282 // Perform some tweaks to the cost and threshold based on the direct
1283 // callsite information.
Chandler Carruth0539c072012-03-31 12:42:41 +00001284
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001285 // We want to more aggressively inline vector-dense kernels, so up the
1286 // threshold, and we'll lower it if the % of vector instructions gets too
Chandler Carrutha004f222015-05-27 02:49:05 +00001287 // low. Note that these bonuses are some what arbitrary and evolved over time
1288 // by accident as much as because they are principled bonuses.
1289 //
1290 // FIXME: It would be nice to remove all such bonuses. At least it would be
1291 // nice to base the bonus values on something more scientific.
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001292 assert(NumInstructions == 0);
1293 assert(NumVectorInstructions == 0);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001294
1295 // Update the threshold based on callsite properties
1296 updateThreshold(CS, F);
1297
Chandler Carrutha004f222015-05-27 02:49:05 +00001298 FiftyPercentVectorBonus = 3 * Threshold / 2;
1299 TenPercentVectorBonus = 3 * Threshold / 4;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001300
Chandler Carrutha004f222015-05-27 02:49:05 +00001301 // Track whether the post-inlining function would have more than one basic
1302 // block. A single basic block is often intended for inlining. Balloon the
1303 // threshold by 50% until we pass the single-BB phase.
1304 bool SingleBB = true;
1305 int SingleBBBonus = Threshold / 2;
1306
1307 // Speculatively apply all possible bonuses to Threshold. If cost exceeds
1308 // this Threshold any time, and cost cannot decrease, we can stop processing
1309 // the rest of the function body.
1310 Threshold += (SingleBBBonus + FiftyPercentVectorBonus);
1311
Xinliang David Li351d9b02017-05-02 05:38:41 +00001312 // Give out bonuses for the callsite, as the instructions setting them up
1313 // will be gone after inlining.
1314 Cost -= getCallsiteCost(CS, DL);
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001315
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001316 // If there is only one call of the function, and it has internal linkage,
1317 // the cost of inlining it drops dramatically.
Chad Rosier567556a2016-04-28 14:47:23 +00001318 bool OnlyOneCallAndLocalLinkage =
1319 F.hasLocalLinkage() && F.hasOneUse() && &F == CS.getCalledFunction();
James Molloy4f6fb952012-12-20 16:04:27 +00001320 if (OnlyOneCallAndLocalLinkage)
Piotr Padlewskid89875c2016-08-10 21:15:22 +00001321 Cost -= InlineConstants::LastCallToStaticBonus;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001322
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001323 // If this function uses the coldcc calling convention, prefer not to inline
1324 // it.
1325 if (F.getCallingConv() == CallingConv::Cold)
1326 Cost += InlineConstants::ColdccPenalty;
1327
1328 // Check if we're done. This can happen due to bonuses and penalties.
1329 if (Cost > Threshold)
1330 return false;
1331
Chandler Carruth0539c072012-03-31 12:42:41 +00001332 if (F.empty())
1333 return true;
1334
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001335 Function *Caller = CS.getInstruction()->getParent()->getParent();
1336 // Check if the caller function is recursive itself.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001337 for (User *U : Caller->users()) {
1338 CallSite Site(U);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001339 if (!Site)
1340 continue;
1341 Instruction *I = Site.getInstruction();
1342 if (I->getParent()->getParent() == Caller) {
1343 IsCallerRecursive = true;
1344 break;
1345 }
1346 }
1347
Chandler Carruth0539c072012-03-31 12:42:41 +00001348 // Populate our simplified values by mapping from function arguments to call
1349 // arguments with known important simplifications.
1350 CallSite::arg_iterator CAI = CS.arg_begin();
1351 for (Function::arg_iterator FAI = F.arg_begin(), FAE = F.arg_end();
1352 FAI != FAE; ++FAI, ++CAI) {
1353 assert(CAI != CS.arg_end());
1354 if (Constant *C = dyn_cast<Constant>(CAI))
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001355 SimplifiedValues[&*FAI] = C;
Chandler Carruth0539c072012-03-31 12:42:41 +00001356
1357 Value *PtrArg = *CAI;
1358 if (ConstantInt *C = stripAndComputeInBoundsConstantOffsets(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001359 ConstantOffsetPtrs[&*FAI] = std::make_pair(PtrArg, C->getValue());
Chandler Carruth0539c072012-03-31 12:42:41 +00001360
1361 // We can SROA any pointer arguments derived from alloca instructions.
1362 if (isa<AllocaInst>(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001363 SROAArgValues[&*FAI] = PtrArg;
Chandler Carruth0539c072012-03-31 12:42:41 +00001364 SROAArgCosts[PtrArg] = 0;
1365 }
1366 }
1367 }
1368 NumConstantArgs = SimplifiedValues.size();
1369 NumConstantOffsetPtrArgs = ConstantOffsetPtrs.size();
1370 NumAllocaArgs = SROAArgValues.size();
1371
Hal Finkel57f03dd2014-09-07 13:49:57 +00001372 // FIXME: If a caller has multiple calls to a callee, we end up recomputing
1373 // the ephemeral values multiple times (and they're completely determined by
1374 // the callee, so this is purely duplicate work).
1375 SmallPtrSet<const Value *, 32> EphValues;
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001376 CodeMetrics::collectEphemeralValues(&F, &GetAssumptionCache(F), EphValues);
Hal Finkel57f03dd2014-09-07 13:49:57 +00001377
Chandler Carruth0539c072012-03-31 12:42:41 +00001378 // The worklist of live basic blocks in the callee *after* inlining. We avoid
1379 // adding basic blocks of the callee which can be proven to be dead for this
1380 // particular call site in order to get more accurate cost estimates. This
1381 // requires a somewhat heavyweight iteration pattern: we need to walk the
1382 // basic blocks in a breadth-first order as we insert live successors. To
1383 // accomplish this, prioritizing for small iterations because we exit after
1384 // crossing our threshold, we use a small-size optimized SetVector.
1385 typedef SetVector<BasicBlock *, SmallVector<BasicBlock *, 16>,
Chad Rosier567556a2016-04-28 14:47:23 +00001386 SmallPtrSet<BasicBlock *, 16>>
1387 BBSetVector;
Chandler Carruth0539c072012-03-31 12:42:41 +00001388 BBSetVector BBWorklist;
1389 BBWorklist.insert(&F.getEntryBlock());
1390 // Note that we *must not* cache the size, this loop grows the worklist.
1391 for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) {
1392 // Bail out the moment we cross the threshold. This means we'll under-count
1393 // the cost, but only when undercounting doesn't matter.
Chandler Carrutha004f222015-05-27 02:49:05 +00001394 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001395 break;
1396
1397 BasicBlock *BB = BBWorklist[Idx];
1398 if (BB->empty())
Chandler Carruth4d1d34f2012-03-14 23:19:53 +00001399 continue;
Dan Gohman4552e3c2009-10-13 18:30:07 +00001400
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001401 // Disallow inlining a blockaddress. A blockaddress only has defined
1402 // behavior for an indirect branch in the same function, and we do not
1403 // currently support inlining indirect branches. But, the inliner may not
1404 // see an indirect branch that ends up being dead code at a particular call
1405 // site. If the blockaddress escapes the function, e.g., via a global
1406 // variable, inlining may lead to an invalid cross-function reference.
1407 if (BB->hasAddressTaken())
1408 return false;
1409
Chandler Carruth0539c072012-03-31 12:42:41 +00001410 // Analyze the cost of this block. If we blow through the threshold, this
1411 // returns false, and we can bail on out.
Easwaran Ramand295b002016-04-13 21:20:22 +00001412 if (!analyzeBlock(BB, EphValues))
1413 return false;
Eric Christopher46308e62011-02-01 01:16:32 +00001414
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001415 TerminatorInst *TI = BB->getTerminator();
1416
Chandler Carruth0539c072012-03-31 12:42:41 +00001417 // Add in the live successors by first checking whether we have terminator
1418 // that may be simplified based on the values simplified by this call.
1419 if (BranchInst *BI = dyn_cast<BranchInst>(TI)) {
1420 if (BI->isConditional()) {
1421 Value *Cond = BI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001422 if (ConstantInt *SimpleCond =
1423 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001424 BBWorklist.insert(BI->getSuccessor(SimpleCond->isZero() ? 1 : 0));
1425 continue;
Eric Christopher46308e62011-02-01 01:16:32 +00001426 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001427 }
1428 } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) {
1429 Value *Cond = SI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001430 if (ConstantInt *SimpleCond =
1431 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Chandler Carruth927d8e62017-04-12 07:27:28 +00001432 BBWorklist.insert(SI->findCaseValue(SimpleCond)->getCaseSuccessor());
Chandler Carruth0539c072012-03-31 12:42:41 +00001433 continue;
1434 }
1435 }
Eric Christopher46308e62011-02-01 01:16:32 +00001436
Chandler Carruth0539c072012-03-31 12:42:41 +00001437 // If we're unable to select a particular successor, just count all of
1438 // them.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001439 for (unsigned TIdx = 0, TSize = TI->getNumSuccessors(); TIdx != TSize;
1440 ++TIdx)
Chandler Carruth0539c072012-03-31 12:42:41 +00001441 BBWorklist.insert(TI->getSuccessor(TIdx));
1442
1443 // If we had any successors at this point, than post-inlining is likely to
1444 // have them as well. Note that we assume any basic blocks which existed
1445 // due to branches or switches which folded above will also fold after
1446 // inlining.
1447 if (SingleBB && TI->getNumSuccessors() > 1) {
1448 // Take off the bonus we applied to the threshold.
1449 Threshold -= SingleBBBonus;
1450 SingleBB = false;
Eric Christopher46308e62011-02-01 01:16:32 +00001451 }
1452 }
Andrew Trickcaa500b2011-10-01 01:27:56 +00001453
Chandler Carruthcb5beb32013-12-12 11:59:26 +00001454 // If this is a noduplicate call, we can still inline as long as
James Molloy4f6fb952012-12-20 16:04:27 +00001455 // inlining this would cause the removal of the caller (so the instruction
1456 // is not actually duplicated, just moved).
1457 if (!OnlyOneCallAndLocalLinkage && ContainsNoDuplicateCall)
1458 return false;
1459
Chandler Carrutha004f222015-05-27 02:49:05 +00001460 // We applied the maximum possible vector bonus at the beginning. Now,
1461 // subtract the excess bonus, if any, from the Threshold before
1462 // comparing against Cost.
1463 if (NumVectorInstructions <= NumInstructions / 10)
1464 Threshold -= FiftyPercentVectorBonus;
1465 else if (NumVectorInstructions <= NumInstructions / 2)
1466 Threshold -= (FiftyPercentVectorBonus - TenPercentVectorBonus);
Chandler Carruth0539c072012-03-31 12:42:41 +00001467
Hans Wennborg00ab73d2016-02-05 20:32:42 +00001468 return Cost < std::max(1, Threshold);
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001469}
1470
Manman Ren49d684e2012-09-12 05:06:18 +00001471#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
Chandler Carruth0539c072012-03-31 12:42:41 +00001472/// \brief Dump stats about this call's analysis.
Yaron Kereneb2a2542016-01-29 20:50:44 +00001473LLVM_DUMP_METHOD void CallAnalyzer::dump() {
Eric Christophera13839f2014-02-26 23:27:16 +00001474#define DEBUG_PRINT_STAT(x) dbgs() << " " #x ": " << x << "\n"
Chandler Carruth0539c072012-03-31 12:42:41 +00001475 DEBUG_PRINT_STAT(NumConstantArgs);
1476 DEBUG_PRINT_STAT(NumConstantOffsetPtrArgs);
1477 DEBUG_PRINT_STAT(NumAllocaArgs);
1478 DEBUG_PRINT_STAT(NumConstantPtrCmps);
1479 DEBUG_PRINT_STAT(NumConstantPtrDiffs);
1480 DEBUG_PRINT_STAT(NumInstructionsSimplified);
Chandler Carrutha004f222015-05-27 02:49:05 +00001481 DEBUG_PRINT_STAT(NumInstructions);
Chandler Carruth0539c072012-03-31 12:42:41 +00001482 DEBUG_PRINT_STAT(SROACostSavings);
1483 DEBUG_PRINT_STAT(SROACostSavingsLost);
James Molloy4f6fb952012-12-20 16:04:27 +00001484 DEBUG_PRINT_STAT(ContainsNoDuplicateCall);
Chandler Carruth394e34f2014-01-31 22:32:32 +00001485 DEBUG_PRINT_STAT(Cost);
1486 DEBUG_PRINT_STAT(Threshold);
Chandler Carruth0539c072012-03-31 12:42:41 +00001487#undef DEBUG_PRINT_STAT
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001488}
Manman Renc3366cc2012-09-06 19:55:56 +00001489#endif
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001490
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001491/// \brief Test that there are no attribute conflicts between Caller and Callee
1492/// that prevent inlining.
1493static bool functionsHaveCompatibleAttributes(Function *Caller,
Eric Christopher4371b132015-07-02 01:11:47 +00001494 Function *Callee,
1495 TargetTransformInfo &TTI) {
Eric Christopherd566fb12015-07-29 22:09:48 +00001496 return TTI.areInlineCompatible(Caller, Callee) &&
Akira Hatanaka1cb242e2015-12-22 23:57:37 +00001497 AttributeFuncs::areInlineCompatible(*Caller, *Callee);
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001498}
1499
Xinliang David Li351d9b02017-05-02 05:38:41 +00001500int llvm::getCallsiteCost(CallSite CS, const DataLayout &DL) {
1501 int Cost = 0;
1502 for (unsigned I = 0, E = CS.arg_size(); I != E; ++I) {
1503 if (CS.isByValArgument(I)) {
1504 // We approximate the number of loads and stores needed by dividing the
1505 // size of the byval type by the target's pointer size.
1506 PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType());
1507 unsigned TypeSize = DL.getTypeSizeInBits(PTy->getElementType());
1508 unsigned PointerSize = DL.getPointerSizeInBits();
1509 // Ceiling division.
1510 unsigned NumStores = (TypeSize + PointerSize - 1) / PointerSize;
1511
1512 // If it generates more than 8 stores it is likely to be expanded as an
1513 // inline memcpy so we take that as an upper bound. Otherwise we assume
1514 // one load and one store per word copied.
1515 // FIXME: The maxStoresPerMemcpy setting from the target should be used
1516 // here instead of a magic number of 8, but it's not available via
1517 // DataLayout.
1518 NumStores = std::min(NumStores, 8U);
1519
1520 Cost += 2 * NumStores * InlineConstants::InstrCost;
1521 } else {
1522 // For non-byval arguments subtract off one instruction per call
1523 // argument.
1524 Cost += InlineConstants::InstrCost;
1525 }
1526 }
1527 // The call instruction also disappears after inlining.
1528 Cost += InlineConstants::InstrCost + InlineConstants::CallPenalty;
1529 return Cost;
1530}
1531
Sean Silvaab6a6832016-07-23 04:22:50 +00001532InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001533 CallSite CS, const InlineParams &Params, TargetTransformInfo &CalleeTTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001534 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +00001535 Optional<function_ref<BlockFrequencyInfo &(Function &)>> GetBFI,
Sean Silvaab6a6832016-07-23 04:22:50 +00001536 ProfileSummaryInfo *PSI) {
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001537 return getInlineCost(CS, CS.getCalledFunction(), Params, CalleeTTI,
Easwaran Raman12585b02017-01-20 22:44:04 +00001538 GetAssumptionCache, GetBFI, PSI);
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001539}
1540
Sean Silvaab6a6832016-07-23 04:22:50 +00001541InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001542 CallSite CS, Function *Callee, const InlineParams &Params,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001543 TargetTransformInfo &CalleeTTI,
1544 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +00001545 Optional<function_ref<BlockFrequencyInfo &(Function &)>> GetBFI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001546 ProfileSummaryInfo *PSI) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001547
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001548 // Cannot inline indirect calls.
1549 if (!Callee)
1550 return llvm::InlineCost::getNever();
1551
1552 // Calls to functions with always-inline attributes should be inlined
1553 // whenever possible.
Peter Collingbourne68a88972014-05-19 18:25:54 +00001554 if (CS.hasFnAttr(Attribute::AlwaysInline)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001555 if (isInlineViable(*Callee))
1556 return llvm::InlineCost::getAlways();
1557 return llvm::InlineCost::getNever();
1558 }
1559
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001560 // Never inline functions with conflicting attributes (unless callee has
1561 // always-inline attribute).
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001562 if (!functionsHaveCompatibleAttributes(CS.getCaller(), Callee, CalleeTTI))
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001563 return llvm::InlineCost::getNever();
1564
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001565 // Don't inline this call if the caller has the optnone attribute.
1566 if (CS.getCaller()->hasFnAttribute(Attribute::OptimizeNone))
1567 return llvm::InlineCost::getNever();
1568
Sanjoy Das5ce32722016-04-08 00:48:30 +00001569 // Don't inline functions which can be interposed at link-time. Don't inline
1570 // functions marked noinline or call sites marked noinline.
Craig Topper107b1872016-12-09 02:18:04 +00001571 // Note: inlining non-exact non-interposable functions is fine, since we know
Sanjoy Das5ce32722016-04-08 00:48:30 +00001572 // we have *a* correct implementation of the source level function.
Chad Rosier567556a2016-04-28 14:47:23 +00001573 if (Callee->isInterposable() || Callee->hasFnAttribute(Attribute::NoInline) ||
1574 CS.isNoInline())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001575 return llvm::InlineCost::getNever();
1576
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001577 DEBUG(llvm::dbgs() << " Analyzing call of " << Callee->getName()
Chad Rosier567556a2016-04-28 14:47:23 +00001578 << "...\n");
Andrew Trickcaa500b2011-10-01 01:27:56 +00001579
Easwaran Raman12585b02017-01-20 22:44:04 +00001580 CallAnalyzer CA(CalleeTTI, GetAssumptionCache, GetBFI, PSI, *Callee, CS,
1581 Params);
Chandler Carruth0539c072012-03-31 12:42:41 +00001582 bool ShouldInline = CA.analyzeCall(CS);
Dan Gohman4552e3c2009-10-13 18:30:07 +00001583
Chandler Carruth0539c072012-03-31 12:42:41 +00001584 DEBUG(CA.dump());
1585
1586 // Check if there was a reason to force inlining or no inlining.
1587 if (!ShouldInline && CA.getCost() < CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001588 return InlineCost::getNever();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001589 if (ShouldInline && CA.getCost() >= CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001590 return InlineCost::getAlways();
Andrew Trickcaa500b2011-10-01 01:27:56 +00001591
Chandler Carruth0539c072012-03-31 12:42:41 +00001592 return llvm::InlineCost::get(CA.getCost(), CA.getThreshold());
Dan Gohman4552e3c2009-10-13 18:30:07 +00001593}
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001594
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001595bool llvm::isInlineViable(Function &F) {
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001596 bool ReturnsTwice = F.hasFnAttribute(Attribute::ReturnsTwice);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001597 for (Function::iterator BI = F.begin(), BE = F.end(); BI != BE; ++BI) {
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001598 // Disallow inlining of functions which contain indirect branches or
1599 // blockaddresses.
1600 if (isa<IndirectBrInst>(BI->getTerminator()) || BI->hasAddressTaken())
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001601 return false;
1602
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001603 for (auto &II : *BI) {
1604 CallSite CS(&II);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001605 if (!CS)
1606 continue;
1607
1608 // Disallow recursive calls.
1609 if (&F == CS.getCalledFunction())
1610 return false;
1611
1612 // Disallow calls which expose returns-twice to a function not previously
1613 // attributed as such.
1614 if (!ReturnsTwice && CS.isCall() &&
1615 cast<CallInst>(CS.getInstruction())->canReturnTwice())
1616 return false;
Reid Kleckner223de262015-04-14 20:38:14 +00001617
Reid Kleckner60381792015-07-07 22:25:32 +00001618 // Disallow inlining functions that call @llvm.localescape. Doing this
Reid Kleckner223de262015-04-14 20:38:14 +00001619 // correctly would require major changes to the inliner.
1620 if (CS.getCalledFunction() &&
1621 CS.getCalledFunction()->getIntrinsicID() ==
Reid Kleckner60381792015-07-07 22:25:32 +00001622 llvm::Intrinsic::localescape)
Reid Kleckner223de262015-04-14 20:38:14 +00001623 return false;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001624 }
1625 }
1626
1627 return true;
1628}
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001629
1630// APIs to create InlineParams based on command line flags and/or other
1631// parameters.
1632
1633InlineParams llvm::getInlineParams(int Threshold) {
1634 InlineParams Params;
1635
1636 // This field is the threshold to use for a callee by default. This is
1637 // derived from one or more of:
1638 // * optimization or size-optimization levels,
1639 // * a value passed to createFunctionInliningPass function, or
1640 // * the -inline-threshold flag.
1641 // If the -inline-threshold flag is explicitly specified, that is used
1642 // irrespective of anything else.
1643 if (InlineThreshold.getNumOccurrences() > 0)
1644 Params.DefaultThreshold = InlineThreshold;
1645 else
1646 Params.DefaultThreshold = Threshold;
1647
1648 // Set the HintThreshold knob from the -inlinehint-threshold.
1649 Params.HintThreshold = HintThreshold;
1650
1651 // Set the HotCallSiteThreshold knob from the -hot-callsite-threshold.
1652 Params.HotCallSiteThreshold = HotCallSiteThreshold;
1653
Easwaran Raman12585b02017-01-20 22:44:04 +00001654 // Set the ColdCallSiteThreshold knob from the -inline-cold-callsite-threshold.
1655 Params.ColdCallSiteThreshold = ColdCallSiteThreshold;
1656
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001657 // Set the OptMinSizeThreshold and OptSizeThreshold params only if the
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001658 // -inlinehint-threshold commandline option is not explicitly given. If that
1659 // option is present, then its value applies even for callees with size and
1660 // minsize attributes.
1661 // If the -inline-threshold is not specified, set the ColdThreshold from the
1662 // -inlinecold-threshold even if it is not explicitly passed. If
1663 // -inline-threshold is specified, then -inlinecold-threshold needs to be
1664 // explicitly specified to set the ColdThreshold knob
1665 if (InlineThreshold.getNumOccurrences() == 0) {
1666 Params.OptMinSizeThreshold = InlineConstants::OptMinSizeThreshold;
1667 Params.OptSizeThreshold = InlineConstants::OptSizeThreshold;
1668 Params.ColdThreshold = ColdThreshold;
1669 } else if (ColdThreshold.getNumOccurrences() > 0) {
1670 Params.ColdThreshold = ColdThreshold;
1671 }
1672 return Params;
1673}
1674
1675InlineParams llvm::getInlineParams() {
1676 return getInlineParams(InlineThreshold);
1677}
1678
1679// Compute the default threshold for inlining based on the opt level and the
1680// size opt level.
1681static int computeThresholdFromOptLevels(unsigned OptLevel,
1682 unsigned SizeOptLevel) {
1683 if (OptLevel > 2)
1684 return InlineConstants::OptAggressiveThreshold;
1685 if (SizeOptLevel == 1) // -Os
1686 return InlineConstants::OptSizeThreshold;
1687 if (SizeOptLevel == 2) // -Oz
1688 return InlineConstants::OptMinSizeThreshold;
1689 return InlineThreshold;
1690}
1691
1692InlineParams llvm::getInlineParams(unsigned OptLevel, unsigned SizeOptLevel) {
1693 return getInlineParams(computeThresholdFromOptLevels(OptLevel, SizeOptLevel));
1694}