blob: 5b83f27eca5a35d31408ab6de7521d809ecc7135 [file] [log] [blame]
Dan Gohman4552e3c2009-10-13 18:30:07 +00001//===- InlineCost.cpp - Cost analysis for inliner -------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements inline cost analysis.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Analysis/InlineCost.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000015#include "llvm/ADT/STLExtras.h"
16#include "llvm/ADT/SetVector.h"
17#include "llvm/ADT/SmallPtrSet.h"
18#include "llvm/ADT/SmallVector.h"
19#include "llvm/ADT/Statistic.h"
Daniel Jasperaec2fa32016-12-19 08:22:17 +000020#include "llvm/Analysis/AssumptionCache.h"
Easwaran Raman12585b02017-01-20 22:44:04 +000021#include "llvm/Analysis/BlockFrequencyInfo.h"
Hal Finkel57f03dd2014-09-07 13:49:57 +000022#include "llvm/Analysis/CodeMetrics.h"
Chandler Carruthd9903882015-01-14 11:23:27 +000023#include "llvm/Analysis/ConstantFolding.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000024#include "llvm/Analysis/InstructionSimplify.h"
Easwaran Raman71069cf2016-06-09 22:23:21 +000025#include "llvm/Analysis/ProfileSummaryInfo.h"
Chandler Carruth42f3dce2013-01-21 11:55:09 +000026#include "llvm/Analysis/TargetTransformInfo.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000027#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000028#include "llvm/IR/CallingConv.h"
29#include "llvm/IR/DataLayout.h"
Chandler Carruth03eb0de2014-03-04 10:40:04 +000030#include "llvm/IR/GetElementPtrTypeIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000031#include "llvm/IR/GlobalAlias.h"
Chandler Carruth7da14f12014-03-06 03:23:41 +000032#include "llvm/IR/InstVisitor.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000033#include "llvm/IR/IntrinsicInst.h"
34#include "llvm/IR/Operator.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000035#include "llvm/Support/Debug.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000036#include "llvm/Support/raw_ostream.h"
Eric Christopher2dfbd7e2011-02-05 00:49:15 +000037
Dan Gohman4552e3c2009-10-13 18:30:07 +000038using namespace llvm;
39
Chandler Carruthf1221bd2014-04-22 02:48:03 +000040#define DEBUG_TYPE "inline-cost"
41
Chandler Carruth7ae90d42012-04-11 10:15:10 +000042STATISTIC(NumCallsAnalyzed, "Number of call sites analyzed");
43
Easwaran Raman1c57cc22016-08-10 00:48:04 +000044static cl::opt<int> InlineThreshold(
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000045 "inline-threshold", cl::Hidden, cl::init(225), cl::ZeroOrMore,
46 cl::desc("Control the amount of inlining to perform (default = 225)"));
47
48static cl::opt<int> HintThreshold(
49 "inlinehint-threshold", cl::Hidden, cl::init(325),
50 cl::desc("Threshold for inlining functions with inline hint"));
51
Easwaran Raman12585b02017-01-20 22:44:04 +000052static cl::opt<int>
53 ColdCallSiteThreshold("inline-cold-callsite-threshold", cl::Hidden,
54 cl::init(45),
55 cl::desc("Threshold for inlining cold callsites"));
56
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000057// We introduce this threshold to help performance of instrumentation based
58// PGO before we actually hook up inliner with analysis passes such as BPI and
59// BFI.
60static cl::opt<int> ColdThreshold(
61 "inlinecold-threshold", cl::Hidden, cl::init(225),
62 cl::desc("Threshold for inlining functions with cold attribute"));
63
Dehao Chende39cb92016-08-05 20:28:41 +000064static cl::opt<int>
65 HotCallSiteThreshold("hot-callsite-threshold", cl::Hidden, cl::init(3000),
66 cl::ZeroOrMore,
67 cl::desc("Threshold for hot callsites "));
68
Chandler Carruth0539c072012-03-31 12:42:41 +000069namespace {
Chandler Carrutha3089552012-03-14 07:32:53 +000070
Chandler Carruth0539c072012-03-31 12:42:41 +000071class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> {
72 typedef InstVisitor<CallAnalyzer, bool> Base;
73 friend class InstVisitor<CallAnalyzer, bool>;
Owen Andersona08318a2010-09-09 16:56:42 +000074
Chandler Carruth42f3dce2013-01-21 11:55:09 +000075 /// The TargetTransformInfo available for this compilation.
76 const TargetTransformInfo &TTI;
77
Daniel Jasperaec2fa32016-12-19 08:22:17 +000078 /// Getter for the cache of @llvm.assume intrinsics.
79 std::function<AssumptionCache &(Function &)> &GetAssumptionCache;
80
Easwaran Raman12585b02017-01-20 22:44:04 +000081 /// Getter for BlockFrequencyInfo
82 Optional<function_ref<BlockFrequencyInfo &(Function &)>> &GetBFI;
83
Easwaran Raman71069cf2016-06-09 22:23:21 +000084 /// Profile summary information.
85 ProfileSummaryInfo *PSI;
86
Piotr Padlewskif3d122c2016-09-30 21:05:49 +000087 /// The called function.
Chandler Carruth0539c072012-03-31 12:42:41 +000088 Function &F;
Owen Andersona08318a2010-09-09 16:56:42 +000089
Piotr Padlewskif3d122c2016-09-30 21:05:49 +000090 /// The candidate callsite being analyzed. Please do not use this to do
91 /// analysis in the caller function; we want the inline cost query to be
92 /// easily cacheable. Instead, use the cover function paramHasAttr.
Philip Reames9b5c9582015-06-26 20:51:17 +000093 CallSite CandidateCS;
94
Piotr Padlewskif3d122c2016-09-30 21:05:49 +000095 /// Tunable parameters that control the analysis.
Easwaran Raman1c57cc22016-08-10 00:48:04 +000096 const InlineParams &Params;
97
Chandler Carruth0539c072012-03-31 12:42:41 +000098 int Threshold;
99 int Cost;
Owen Andersona08318a2010-09-09 16:56:42 +0000100
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000101 bool IsCallerRecursive;
102 bool IsRecursiveCall;
Chandler Carruth0539c072012-03-31 12:42:41 +0000103 bool ExposesReturnsTwice;
104 bool HasDynamicAlloca;
James Molloy4f6fb952012-12-20 16:04:27 +0000105 bool ContainsNoDuplicateCall;
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000106 bool HasReturn;
107 bool HasIndirectBr;
Reid Kleckner223de262015-04-14 20:38:14 +0000108 bool HasFrameEscape;
James Molloy4f6fb952012-12-20 16:04:27 +0000109
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000110 /// Number of bytes allocated statically by the callee.
111 uint64_t AllocatedSize;
Chandler Carruth0539c072012-03-31 12:42:41 +0000112 unsigned NumInstructions, NumVectorInstructions;
113 int FiftyPercentVectorBonus, TenPercentVectorBonus;
114 int VectorBonus;
115
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000116 /// While we walk the potentially-inlined instructions, we build up and
117 /// maintain a mapping of simplified values specific to this callsite. The
118 /// idea is to propagate any special information we have about arguments to
119 /// this call through the inlinable section of the function, and account for
120 /// likely simplifications post-inlining. The most important aspect we track
121 /// is CFG altering simplifications -- when we prove a basic block dead, that
122 /// can cause dramatic shifts in the cost of inlining a function.
Chandler Carruth0539c072012-03-31 12:42:41 +0000123 DenseMap<Value *, Constant *> SimplifiedValues;
124
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000125 /// Keep track of the values which map back (through function arguments) to
126 /// allocas on the caller stack which could be simplified through SROA.
Chandler Carruth0539c072012-03-31 12:42:41 +0000127 DenseMap<Value *, Value *> SROAArgValues;
128
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000129 /// The mapping of caller Alloca values to their accumulated cost savings. If
130 /// we have to disable SROA for one of the allocas, this tells us how much
131 /// cost must be added.
Chandler Carruth0539c072012-03-31 12:42:41 +0000132 DenseMap<Value *, int> SROAArgCosts;
133
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000134 /// Keep track of values which map to a pointer base and constant offset.
Chad Rosier567556a2016-04-28 14:47:23 +0000135 DenseMap<Value *, std::pair<Value *, APInt>> ConstantOffsetPtrs;
Chandler Carruth0539c072012-03-31 12:42:41 +0000136
137 // Custom simplification helper routines.
138 bool isAllocaDerivedArg(Value *V);
139 bool lookupSROAArgAndCost(Value *V, Value *&Arg,
140 DenseMap<Value *, int>::iterator &CostIt);
141 void disableSROA(DenseMap<Value *, int>::iterator CostIt);
142 void disableSROA(Value *V);
143 void accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
144 int InstructionCost);
Chandler Carruth0539c072012-03-31 12:42:41 +0000145 bool isGEPOffsetConstant(GetElementPtrInst &GEP);
Haicheng Wu201b1912017-01-20 18:51:22 +0000146 bool isGEPFree(GetElementPtrInst &GEP);
Chandler Carruth0539c072012-03-31 12:42:41 +0000147 bool accumulateGEPOffset(GEPOperator &GEP, APInt &Offset);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000148 bool simplifyCallSite(Function *F, CallSite CS);
Easwaran Raman617f6362017-02-18 17:22:52 +0000149 template <typename Callable>
150 bool simplifyInstruction(Instruction &I, Callable Evaluate);
Chandler Carruth0539c072012-03-31 12:42:41 +0000151 ConstantInt *stripAndComputeInBoundsConstantOffsets(Value *&V);
152
Philip Reames9b5c9582015-06-26 20:51:17 +0000153 /// Return true if the given argument to the function being considered for
154 /// inlining has the given attribute set either at the call site or the
155 /// function declaration. Primarily used to inspect call site specific
156 /// attributes since these can be more precise than the ones on the callee
Easwaran Raman3676da42015-12-03 19:03:20 +0000157 /// itself.
Philip Reames9b5c9582015-06-26 20:51:17 +0000158 bool paramHasAttr(Argument *A, Attribute::AttrKind Attr);
Chad Rosier567556a2016-04-28 14:47:23 +0000159
Philip Reames9b5c9582015-06-26 20:51:17 +0000160 /// Return true if the given value is known non null within the callee if
Easwaran Raman3676da42015-12-03 19:03:20 +0000161 /// inlined through this particular callsite.
Philip Reames9b5c9582015-06-26 20:51:17 +0000162 bool isKnownNonNullInCallee(Value *V);
163
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000164 /// Update Threshold based on callsite properties such as callee
165 /// attributes and callee hotness for PGO builds. The Callee is explicitly
166 /// passed to support analyzing indirect calls whose target is inferred by
167 /// analysis.
168 void updateThreshold(CallSite CS, Function &Callee);
169
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000170 /// Return true if size growth is allowed when inlining the callee at CS.
171 bool allowSizeGrowth(CallSite CS);
172
Chandler Carruth0539c072012-03-31 12:42:41 +0000173 // Custom analysis routines.
Hal Finkel57f03dd2014-09-07 13:49:57 +0000174 bool analyzeBlock(BasicBlock *BB, SmallPtrSetImpl<const Value *> &EphValues);
Chandler Carruth0539c072012-03-31 12:42:41 +0000175
176 // Disable several entry points to the visitor so we don't accidentally use
177 // them by declaring but not defining them here.
Chad Rosier567556a2016-04-28 14:47:23 +0000178 void visit(Module *);
179 void visit(Module &);
180 void visit(Function *);
181 void visit(Function &);
182 void visit(BasicBlock *);
183 void visit(BasicBlock &);
Chandler Carruth0539c072012-03-31 12:42:41 +0000184
185 // Provide base case for our instruction visit.
186 bool visitInstruction(Instruction &I);
187
188 // Our visit overrides.
189 bool visitAlloca(AllocaInst &I);
190 bool visitPHI(PHINode &I);
191 bool visitGetElementPtr(GetElementPtrInst &I);
192 bool visitBitCast(BitCastInst &I);
193 bool visitPtrToInt(PtrToIntInst &I);
194 bool visitIntToPtr(IntToPtrInst &I);
195 bool visitCastInst(CastInst &I);
196 bool visitUnaryInstruction(UnaryInstruction &I);
Matt Arsenault727aa342013-07-20 04:09:00 +0000197 bool visitCmpInst(CmpInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000198 bool visitSub(BinaryOperator &I);
199 bool visitBinaryOperator(BinaryOperator &I);
200 bool visitLoad(LoadInst &I);
201 bool visitStore(StoreInst &I);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000202 bool visitExtractValue(ExtractValueInst &I);
203 bool visitInsertValue(InsertValueInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000204 bool visitCallSite(CallSite CS);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000205 bool visitReturnInst(ReturnInst &RI);
206 bool visitBranchInst(BranchInst &BI);
207 bool visitSwitchInst(SwitchInst &SI);
208 bool visitIndirectBrInst(IndirectBrInst &IBI);
209 bool visitResumeInst(ResumeInst &RI);
David Majnemer654e1302015-07-31 17:58:14 +0000210 bool visitCleanupReturnInst(CleanupReturnInst &RI);
211 bool visitCatchReturnInst(CatchReturnInst &RI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000212 bool visitUnreachableInst(UnreachableInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000213
214public:
Sean Silvaab6a6832016-07-23 04:22:50 +0000215 CallAnalyzer(const TargetTransformInfo &TTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000216 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +0000217 Optional<function_ref<BlockFrequencyInfo &(Function &)>> &GetBFI,
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000218 ProfileSummaryInfo *PSI, Function &Callee, CallSite CSArg,
219 const InlineParams &Params)
Easwaran Raman12585b02017-01-20 22:44:04 +0000220 : TTI(TTI), GetAssumptionCache(GetAssumptionCache), GetBFI(GetBFI),
221 PSI(PSI), F(Callee), CandidateCS(CSArg), Params(Params),
222 Threshold(Params.DefaultThreshold), Cost(0), IsCallerRecursive(false),
223 IsRecursiveCall(false), ExposesReturnsTwice(false),
224 HasDynamicAlloca(false), ContainsNoDuplicateCall(false),
225 HasReturn(false), HasIndirectBr(false), HasFrameEscape(false),
226 AllocatedSize(0), NumInstructions(0), NumVectorInstructions(0),
227 FiftyPercentVectorBonus(0), TenPercentVectorBonus(0), VectorBonus(0),
228 NumConstantArgs(0), NumConstantOffsetPtrArgs(0), NumAllocaArgs(0),
229 NumConstantPtrCmps(0), NumConstantPtrDiffs(0),
230 NumInstructionsSimplified(0), SROACostSavings(0),
231 SROACostSavingsLost(0) {}
Chandler Carruth0539c072012-03-31 12:42:41 +0000232
233 bool analyzeCall(CallSite CS);
234
235 int getThreshold() { return Threshold; }
236 int getCost() { return Cost; }
237
238 // Keep a bunch of stats about the cost savings found so we can print them
239 // out when debugging.
240 unsigned NumConstantArgs;
241 unsigned NumConstantOffsetPtrArgs;
242 unsigned NumAllocaArgs;
243 unsigned NumConstantPtrCmps;
244 unsigned NumConstantPtrDiffs;
245 unsigned NumInstructionsSimplified;
246 unsigned SROACostSavings;
247 unsigned SROACostSavingsLost;
248
249 void dump();
250};
251
252} // namespace
253
254/// \brief Test whether the given value is an Alloca-derived function argument.
255bool CallAnalyzer::isAllocaDerivedArg(Value *V) {
256 return SROAArgValues.count(V);
Owen Andersona08318a2010-09-09 16:56:42 +0000257}
258
Chandler Carruth0539c072012-03-31 12:42:41 +0000259/// \brief Lookup the SROA-candidate argument and cost iterator which V maps to.
260/// Returns false if V does not map to a SROA-candidate.
261bool CallAnalyzer::lookupSROAArgAndCost(
262 Value *V, Value *&Arg, DenseMap<Value *, int>::iterator &CostIt) {
263 if (SROAArgValues.empty() || SROAArgCosts.empty())
264 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000265
Chandler Carruth0539c072012-03-31 12:42:41 +0000266 DenseMap<Value *, Value *>::iterator ArgIt = SROAArgValues.find(V);
267 if (ArgIt == SROAArgValues.end())
268 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000269
Chandler Carruth0539c072012-03-31 12:42:41 +0000270 Arg = ArgIt->second;
271 CostIt = SROAArgCosts.find(Arg);
272 return CostIt != SROAArgCosts.end();
Chandler Carruth783b7192012-03-09 02:49:36 +0000273}
274
Chandler Carruth0539c072012-03-31 12:42:41 +0000275/// \brief Disable SROA for the candidate marked by this cost iterator.
Chandler Carruth783b7192012-03-09 02:49:36 +0000276///
Benjamin Kramerbde91762012-06-02 10:20:22 +0000277/// This marks the candidate as no longer viable for SROA, and adds the cost
Chandler Carruth0539c072012-03-31 12:42:41 +0000278/// savings associated with it back into the inline cost measurement.
279void CallAnalyzer::disableSROA(DenseMap<Value *, int>::iterator CostIt) {
280 // If we're no longer able to perform SROA we need to undo its cost savings
281 // and prevent subsequent analysis.
282 Cost += CostIt->second;
283 SROACostSavings -= CostIt->second;
284 SROACostSavingsLost += CostIt->second;
285 SROAArgCosts.erase(CostIt);
286}
287
288/// \brief If 'V' maps to a SROA candidate, disable SROA for it.
289void CallAnalyzer::disableSROA(Value *V) {
290 Value *SROAArg;
291 DenseMap<Value *, int>::iterator CostIt;
292 if (lookupSROAArgAndCost(V, SROAArg, CostIt))
293 disableSROA(CostIt);
294}
295
296/// \brief Accumulate the given cost for a particular SROA candidate.
297void CallAnalyzer::accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
298 int InstructionCost) {
299 CostIt->second += InstructionCost;
300 SROACostSavings += InstructionCost;
301}
302
Chandler Carruth0539c072012-03-31 12:42:41 +0000303/// \brief Check whether a GEP's indices are all constant.
304///
305/// Respects any simplified values known during the analysis of this callsite.
306bool CallAnalyzer::isGEPOffsetConstant(GetElementPtrInst &GEP) {
307 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
308 if (!isa<Constant>(*I) && !SimplifiedValues.lookup(*I))
Chandler Carruth783b7192012-03-09 02:49:36 +0000309 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000310
Chandler Carruth0539c072012-03-31 12:42:41 +0000311 return true;
312}
313
314/// \brief Accumulate a constant GEP offset into an APInt if possible.
315///
316/// Returns false if unable to compute the offset for any reason. Respects any
317/// simplified values known during the analysis of this callsite.
318bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000319 const DataLayout &DL = F.getParent()->getDataLayout();
320 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +0000321 assert(IntPtrWidth == Offset.getBitWidth());
322
323 for (gep_type_iterator GTI = gep_type_begin(GEP), GTE = gep_type_end(GEP);
324 GTI != GTE; ++GTI) {
325 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand());
326 if (!OpC)
327 if (Constant *SimpleOp = SimplifiedValues.lookup(GTI.getOperand()))
328 OpC = dyn_cast<ConstantInt>(SimpleOp);
329 if (!OpC)
Chandler Carruth783b7192012-03-09 02:49:36 +0000330 return false;
Chad Rosier567556a2016-04-28 14:47:23 +0000331 if (OpC->isZero())
332 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000333
Chandler Carruth0539c072012-03-31 12:42:41 +0000334 // Handle a struct index, which adds its field offset to the pointer.
Peter Collingbourneab85225b2016-12-02 02:24:42 +0000335 if (StructType *STy = GTI.getStructTypeOrNull()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000336 unsigned ElementIdx = OpC->getZExtValue();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000337 const StructLayout *SL = DL.getStructLayout(STy);
Chandler Carruth0539c072012-03-31 12:42:41 +0000338 Offset += APInt(IntPtrWidth, SL->getElementOffset(ElementIdx));
339 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000340 }
Chandler Carruth783b7192012-03-09 02:49:36 +0000341
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000342 APInt TypeSize(IntPtrWidth, DL.getTypeAllocSize(GTI.getIndexedType()));
Chandler Carruth0539c072012-03-31 12:42:41 +0000343 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize;
344 }
345 return true;
346}
347
Haicheng Wu201b1912017-01-20 18:51:22 +0000348/// \brief Use TTI to check whether a GEP is free.
349///
350/// Respects any simplified values known during the analysis of this callsite.
351bool CallAnalyzer::isGEPFree(GetElementPtrInst &GEP) {
352 SmallVector<Value *, 4> Indices;
353 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
354 if (Constant *SimpleOp = SimplifiedValues.lookup(*I))
355 Indices.push_back(SimpleOp);
356 else
357 Indices.push_back(*I);
358 return TargetTransformInfo::TCC_Free ==
359 TTI.getGEPCost(GEP.getSourceElementType(), GEP.getPointerOperand(),
360 Indices);
361}
362
Chandler Carruth0539c072012-03-31 12:42:41 +0000363bool CallAnalyzer::visitAlloca(AllocaInst &I) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000364 // Check whether inlining will turn a dynamic alloca into a static
Sanjay Patel0f153422016-05-09 21:51:53 +0000365 // alloca and handle that case.
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000366 if (I.isArrayAllocation()) {
Sanjay Patel0f153422016-05-09 21:51:53 +0000367 Constant *Size = SimplifiedValues.lookup(I.getArraySize());
368 if (auto *AllocSize = dyn_cast_or_null<ConstantInt>(Size)) {
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000369 const DataLayout &DL = F.getParent()->getDataLayout();
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000370 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000371 AllocatedSize = SaturatingMultiplyAdd(
372 AllocSize->getLimitedValue(), DL.getTypeAllocSize(Ty), AllocatedSize);
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000373 return Base::visitAlloca(I);
374 }
375 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000376
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000377 // Accumulate the allocated size.
378 if (I.isStaticAlloca()) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000379 const DataLayout &DL = F.getParent()->getDataLayout();
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000380 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000381 AllocatedSize = SaturatingAdd(DL.getTypeAllocSize(Ty), AllocatedSize);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000382 }
383
Bob Wilsona5b0dc82012-11-19 07:04:35 +0000384 // We will happily inline static alloca instructions.
385 if (I.isStaticAlloca())
Chandler Carruth0539c072012-03-31 12:42:41 +0000386 return Base::visitAlloca(I);
387
388 // FIXME: This is overly conservative. Dynamic allocas are inefficient for
389 // a variety of reasons, and so we would like to not inline them into
390 // functions which don't currently have a dynamic alloca. This simply
391 // disables inlining altogether in the presence of a dynamic alloca.
392 HasDynamicAlloca = true;
393 return false;
394}
395
396bool CallAnalyzer::visitPHI(PHINode &I) {
397 // FIXME: We should potentially be tracking values through phi nodes,
398 // especially when they collapse to a single value due to deleted CFG edges
399 // during inlining.
400
401 // FIXME: We need to propagate SROA *disabling* through phi nodes, even
402 // though we don't want to propagate it's bonuses. The idea is to disable
403 // SROA if it *might* be used in an inappropriate manner.
404
405 // Phi nodes are always zero-cost.
406 return true;
407}
408
409bool CallAnalyzer::visitGetElementPtr(GetElementPtrInst &I) {
410 Value *SROAArg;
411 DenseMap<Value *, int>::iterator CostIt;
Chad Rosier567556a2016-04-28 14:47:23 +0000412 bool SROACandidate =
413 lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt);
Chandler Carruth0539c072012-03-31 12:42:41 +0000414
415 // Try to fold GEPs of constant-offset call site argument pointers. This
416 // requires target data and inbounds GEPs.
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000417 if (I.isInBounds()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000418 // Check if we have a base + offset for the pointer.
419 Value *Ptr = I.getPointerOperand();
420 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Ptr);
421 if (BaseAndOffset.first) {
422 // Check if the offset of this GEP is constant, and if so accumulate it
423 // into Offset.
424 if (!accumulateGEPOffset(cast<GEPOperator>(I), BaseAndOffset.second)) {
425 // Non-constant GEPs aren't folded, and disable SROA.
426 if (SROACandidate)
427 disableSROA(CostIt);
Haicheng Wu201b1912017-01-20 18:51:22 +0000428 return isGEPFree(I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000429 }
430
431 // Add the result as a new mapping to Base + Offset.
432 ConstantOffsetPtrs[&I] = BaseAndOffset;
433
434 // Also handle SROA candidates here, we already know that the GEP is
435 // all-constant indexed.
436 if (SROACandidate)
437 SROAArgValues[&I] = SROAArg;
438
Chandler Carruth783b7192012-03-09 02:49:36 +0000439 return true;
440 }
441 }
442
Chandler Carruth0539c072012-03-31 12:42:41 +0000443 if (isGEPOffsetConstant(I)) {
444 if (SROACandidate)
445 SROAArgValues[&I] = SROAArg;
446
447 // Constant GEPs are modeled as free.
448 return true;
449 }
450
451 // Variable GEPs will require math and will disable SROA.
452 if (SROACandidate)
453 disableSROA(CostIt);
Haicheng Wu201b1912017-01-20 18:51:22 +0000454 return isGEPFree(I);
Chandler Carruth783b7192012-03-09 02:49:36 +0000455}
456
Easwaran Raman617f6362017-02-18 17:22:52 +0000457/// Simplify \p I if its operands are constants and update SimplifiedValues.
458/// \p Evaluate is a callable specific to instruction type that evaluates the
459/// instruction when all the operands are constants.
460template <typename Callable>
461bool CallAnalyzer::simplifyInstruction(Instruction &I, Callable Evaluate) {
462 SmallVector<Constant *, 2> COps;
463 for (Value *Op : I.operands()) {
464 Constant *COp = dyn_cast<Constant>(Op);
465 if (!COp)
466 COp = SimplifiedValues.lookup(Op);
467 if (!COp)
468 return false;
469 COps.push_back(COp);
470 }
471 auto *C = Evaluate(COps);
472 if (!C)
473 return false;
474 SimplifiedValues[&I] = C;
475 return true;
476}
477
Chandler Carruth0539c072012-03-31 12:42:41 +0000478bool CallAnalyzer::visitBitCast(BitCastInst &I) {
479 // Propagate constants through bitcasts.
Easwaran Raman617f6362017-02-18 17:22:52 +0000480 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
481 return ConstantExpr::getBitCast(COps[0], I.getType());
482 }))
483 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000484
Chandler Carruth0539c072012-03-31 12:42:41 +0000485 // Track base/offsets through casts
Chad Rosier567556a2016-04-28 14:47:23 +0000486 std::pair<Value *, APInt> BaseAndOffset =
487 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000488 // Casts don't change the offset, just wrap it up.
489 if (BaseAndOffset.first)
490 ConstantOffsetPtrs[&I] = BaseAndOffset;
491
492 // Also look for SROA candidates here.
493 Value *SROAArg;
494 DenseMap<Value *, int>::iterator CostIt;
495 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
496 SROAArgValues[&I] = SROAArg;
497
498 // Bitcasts are always zero cost.
499 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000500}
501
Chandler Carruth0539c072012-03-31 12:42:41 +0000502bool CallAnalyzer::visitPtrToInt(PtrToIntInst &I) {
503 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000504 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
505 return ConstantExpr::getPtrToInt(COps[0], I.getType());
506 }))
507 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000508
509 // Track base/offset pairs when converted to a plain integer provided the
510 // integer is large enough to represent the pointer.
511 unsigned IntegerSize = I.getType()->getScalarSizeInBits();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000512 const DataLayout &DL = F.getParent()->getDataLayout();
Mehdi Amini46a43552015-03-04 18:43:29 +0000513 if (IntegerSize >= DL.getPointerSizeInBits()) {
Chad Rosier567556a2016-04-28 14:47:23 +0000514 std::pair<Value *, APInt> BaseAndOffset =
515 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000516 if (BaseAndOffset.first)
517 ConstantOffsetPtrs[&I] = BaseAndOffset;
518 }
519
520 // This is really weird. Technically, ptrtoint will disable SROA. However,
521 // unless that ptrtoint is *used* somewhere in the live basic blocks after
522 // inlining, it will be nuked, and SROA should proceed. All of the uses which
523 // would block SROA would also block SROA if applied directly to a pointer,
524 // and so we can just add the integer in here. The only places where SROA is
525 // preserved either cannot fire on an integer, or won't in-and-of themselves
526 // disable SROA (ext) w/o some later use that we would see and disable.
527 Value *SROAArg;
528 DenseMap<Value *, int>::iterator CostIt;
529 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
530 SROAArgValues[&I] = SROAArg;
531
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000532 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000533}
534
Chandler Carruth0539c072012-03-31 12:42:41 +0000535bool CallAnalyzer::visitIntToPtr(IntToPtrInst &I) {
536 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000537 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
538 return ConstantExpr::getIntToPtr(COps[0], I.getType());
539 }))
540 return true;
Dan Gohman4552e3c2009-10-13 18:30:07 +0000541
Chandler Carruth0539c072012-03-31 12:42:41 +0000542 // Track base/offset pairs when round-tripped through a pointer without
543 // modifications provided the integer is not too large.
544 Value *Op = I.getOperand(0);
545 unsigned IntegerSize = Op->getType()->getScalarSizeInBits();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000546 const DataLayout &DL = F.getParent()->getDataLayout();
Mehdi Amini46a43552015-03-04 18:43:29 +0000547 if (IntegerSize <= DL.getPointerSizeInBits()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000548 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op);
549 if (BaseAndOffset.first)
550 ConstantOffsetPtrs[&I] = BaseAndOffset;
551 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000552
Chandler Carruth0539c072012-03-31 12:42:41 +0000553 // "Propagate" SROA here in the same manner as we do for ptrtoint above.
554 Value *SROAArg;
555 DenseMap<Value *, int>::iterator CostIt;
556 if (lookupSROAArgAndCost(Op, SROAArg, CostIt))
557 SROAArgValues[&I] = SROAArg;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000558
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000559 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000560}
561
562bool CallAnalyzer::visitCastInst(CastInst &I) {
563 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000564 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
565 return ConstantExpr::getCast(I.getOpcode(), COps[0], I.getType());
566 }))
567 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000568
569 // Disable SROA in the face of arbitrary casts we don't whitelist elsewhere.
570 disableSROA(I.getOperand(0));
571
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000572 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000573}
574
575bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) {
576 Value *Operand = I.getOperand(0);
Easwaran Raman617f6362017-02-18 17:22:52 +0000577 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
578 const DataLayout &DL = F.getParent()->getDataLayout();
579 return ConstantFoldInstOperands(&I, COps[0], DL);
580 }))
581 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000582
583 // Disable any SROA on the argument to arbitrary unary operators.
584 disableSROA(Operand);
585
586 return false;
587}
588
Philip Reames9b5c9582015-06-26 20:51:17 +0000589bool CallAnalyzer::paramHasAttr(Argument *A, Attribute::AttrKind Attr) {
590 unsigned ArgNo = A->getArgNo();
Chad Rosier567556a2016-04-28 14:47:23 +0000591 return CandidateCS.paramHasAttr(ArgNo + 1, Attr);
Philip Reames9b5c9582015-06-26 20:51:17 +0000592}
593
594bool CallAnalyzer::isKnownNonNullInCallee(Value *V) {
595 // Does the *call site* have the NonNull attribute set on an argument? We
596 // use the attribute on the call site to memoize any analysis done in the
597 // caller. This will also trip if the callee function has a non-null
598 // parameter attribute, but that's a less interesting case because hopefully
599 // the callee would already have been simplified based on that.
600 if (Argument *A = dyn_cast<Argument>(V))
601 if (paramHasAttr(A, Attribute::NonNull))
602 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000603
Philip Reames9b5c9582015-06-26 20:51:17 +0000604 // Is this an alloca in the caller? This is distinct from the attribute case
605 // above because attributes aren't updated within the inliner itself and we
606 // always want to catch the alloca derived case.
607 if (isAllocaDerivedArg(V))
608 // We can actually predict the result of comparisons between an
609 // alloca-derived value and null. Note that this fires regardless of
610 // SROA firing.
611 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000612
Philip Reames9b5c9582015-06-26 20:51:17 +0000613 return false;
614}
615
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000616bool CallAnalyzer::allowSizeGrowth(CallSite CS) {
617 // If the normal destination of the invoke or the parent block of the call
618 // site is unreachable-terminated, there is little point in inlining this
619 // unless there is literally zero cost.
620 // FIXME: Note that it is possible that an unreachable-terminated block has a
621 // hot entry. For example, in below scenario inlining hot_call_X() may be
622 // beneficial :
623 // main() {
624 // hot_call_1();
625 // ...
626 // hot_call_N()
627 // exit(0);
628 // }
629 // For now, we are not handling this corner case here as it is rare in real
630 // code. In future, we should elaborate this based on BPI and BFI in more
631 // general threshold adjusting heuristics in updateThreshold().
632 Instruction *Instr = CS.getInstruction();
633 if (InvokeInst *II = dyn_cast<InvokeInst>(Instr)) {
634 if (isa<UnreachableInst>(II->getNormalDest()->getTerminator()))
635 return false;
636 } else if (isa<UnreachableInst>(Instr->getParent()->getTerminator()))
637 return false;
638
639 return true;
640}
641
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000642void CallAnalyzer::updateThreshold(CallSite CS, Function &Callee) {
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000643 // If no size growth is allowed for this inlining, set Threshold to 0.
644 if (!allowSizeGrowth(CS)) {
645 Threshold = 0;
646 return;
647 }
648
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000649 Function *Caller = CS.getCaller();
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000650
651 // return min(A, B) if B is valid.
652 auto MinIfValid = [](int A, Optional<int> B) {
653 return B ? std::min(A, B.getValue()) : A;
654 };
655
Easwaran Raman0d58fca2016-08-11 03:58:05 +0000656 // return max(A, B) if B is valid.
657 auto MaxIfValid = [](int A, Optional<int> B) {
658 return B ? std::max(A, B.getValue()) : A;
659 };
660
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000661 // Use the OptMinSizeThreshold or OptSizeThreshold knob if they are available
662 // and reduce the threshold if the caller has the necessary attribute.
663 if (Caller->optForMinSize())
664 Threshold = MinIfValid(Threshold, Params.OptMinSizeThreshold);
665 else if (Caller->optForSize())
666 Threshold = MinIfValid(Threshold, Params.OptSizeThreshold);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000667
Easwaran Ramane08b1392017-01-09 21:56:26 +0000668 // Adjust the threshold based on inlinehint attribute and profile based
669 // hotness information if the caller does not have MinSize attribute.
670 if (!Caller->optForMinSize()) {
671 if (Callee.hasFnAttribute(Attribute::InlineHint))
672 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
673 if (PSI) {
Easwaran Raman12585b02017-01-20 22:44:04 +0000674 BlockFrequencyInfo *CallerBFI = GetBFI ? &((*GetBFI)(*Caller)) : nullptr;
675 if (PSI->isHotCallSite(CS, CallerBFI)) {
676 DEBUG(dbgs() << "Hot callsite.\n");
Easwaran Ramane08b1392017-01-09 21:56:26 +0000677 Threshold = MaxIfValid(Threshold, Params.HotCallSiteThreshold);
678 } else if (PSI->isFunctionEntryHot(&Callee)) {
Easwaran Raman12585b02017-01-20 22:44:04 +0000679 DEBUG(dbgs() << "Hot callee.\n");
Easwaran Ramane08b1392017-01-09 21:56:26 +0000680 // If callsite hotness can not be determined, we may still know
681 // that the callee is hot and treat it as a weaker hint for threshold
682 // increase.
683 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
Easwaran Raman12585b02017-01-20 22:44:04 +0000684 } else if (PSI->isColdCallSite(CS, CallerBFI)) {
685 DEBUG(dbgs() << "Cold callsite.\n");
686 Threshold = MinIfValid(Threshold, Params.ColdCallSiteThreshold);
Easwaran Ramane08b1392017-01-09 21:56:26 +0000687 } else if (PSI->isFunctionEntryCold(&Callee)) {
Easwaran Raman12585b02017-01-20 22:44:04 +0000688 DEBUG(dbgs() << "Cold callee.\n");
Easwaran Ramane08b1392017-01-09 21:56:26 +0000689 Threshold = MinIfValid(Threshold, Params.ColdThreshold);
690 }
691 }
Dehao Chene1c7c572016-08-05 20:49:04 +0000692 }
Dehao Chen9232f982016-07-11 16:48:54 +0000693
Justin Lebar8650a4d2016-04-15 01:38:48 +0000694 // Finally, take the target-specific inlining threshold multiplier into
695 // account.
696 Threshold *= TTI.getInliningThresholdMultiplier();
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000697}
698
Matt Arsenault727aa342013-07-20 04:09:00 +0000699bool CallAnalyzer::visitCmpInst(CmpInst &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000700 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
701 // First try to handle simplified comparisons.
Easwaran Raman617f6362017-02-18 17:22:52 +0000702 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
703 return ConstantExpr::getCompare(I.getPredicate(), COps[0], COps[1]);
704 }))
705 return true;
Matt Arsenault727aa342013-07-20 04:09:00 +0000706
707 if (I.getOpcode() == Instruction::FCmp)
708 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000709
710 // Otherwise look for a comparison between constant offset pointers with
711 // a common base.
712 Value *LHSBase, *RHSBase;
713 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000714 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000715 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000716 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000717 if (RHSBase && LHSBase == RHSBase) {
718 // We have common bases, fold the icmp to a constant based on the
719 // offsets.
720 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
721 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
722 if (Constant *C = ConstantExpr::getICmp(I.getPredicate(), CLHS, CRHS)) {
723 SimplifiedValues[&I] = C;
724 ++NumConstantPtrCmps;
725 return true;
726 }
727 }
728 }
729
730 // If the comparison is an equality comparison with null, we can simplify it
Philip Reames9b5c9582015-06-26 20:51:17 +0000731 // if we know the value (argument) can't be null
732 if (I.isEquality() && isa<ConstantPointerNull>(I.getOperand(1)) &&
733 isKnownNonNullInCallee(I.getOperand(0))) {
734 bool IsNotEqual = I.getPredicate() == CmpInst::ICMP_NE;
735 SimplifiedValues[&I] = IsNotEqual ? ConstantInt::getTrue(I.getType())
736 : ConstantInt::getFalse(I.getType());
737 return true;
738 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000739 // Finally check for SROA candidates in comparisons.
740 Value *SROAArg;
741 DenseMap<Value *, int>::iterator CostIt;
742 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) {
743 if (isa<ConstantPointerNull>(I.getOperand(1))) {
744 accumulateSROACost(CostIt, InlineConstants::InstrCost);
745 return true;
746 }
747
748 disableSROA(CostIt);
749 }
750
751 return false;
752}
753
754bool CallAnalyzer::visitSub(BinaryOperator &I) {
755 // Try to handle a special case: we can fold computing the difference of two
756 // constant-related pointers.
757 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
758 Value *LHSBase, *RHSBase;
759 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000760 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000761 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000762 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000763 if (RHSBase && LHSBase == RHSBase) {
764 // We have common bases, fold the subtract to a constant based on the
765 // offsets.
766 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
767 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
768 if (Constant *C = ConstantExpr::getSub(CLHS, CRHS)) {
769 SimplifiedValues[&I] = C;
770 ++NumConstantPtrDiffs;
771 return true;
772 }
773 }
774 }
775
776 // Otherwise, fall back to the generic logic for simplifying and handling
777 // instructions.
778 return Base::visitSub(I);
779}
780
781bool CallAnalyzer::visitBinaryOperator(BinaryOperator &I) {
782 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
Easwaran Raman617f6362017-02-18 17:22:52 +0000783 auto Evaluate = [&](SmallVectorImpl<Constant *> &COps) {
784 Value *SimpleV = nullptr;
785 const DataLayout &DL = F.getParent()->getDataLayout();
786 if (auto FI = dyn_cast<FPMathOperator>(&I))
787 SimpleV = SimplifyFPBinOp(I.getOpcode(), COps[0], COps[1],
788 FI->getFastMathFlags(), DL);
789 else
790 SimpleV = SimplifyBinOp(I.getOpcode(), COps[0], COps[1], DL);
791 return dyn_cast_or_null<Constant>(SimpleV);
792 };
Michael Zolotukhin4e8598e2015-02-06 20:02:51 +0000793
Easwaran Raman617f6362017-02-18 17:22:52 +0000794 if (simplifyInstruction(I, Evaluate))
Chandler Carruth0539c072012-03-31 12:42:41 +0000795 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000796
797 // Disable any SROA on arguments to arbitrary, unsimplified binary operators.
798 disableSROA(LHS);
799 disableSROA(RHS);
800
801 return false;
802}
803
804bool CallAnalyzer::visitLoad(LoadInst &I) {
805 Value *SROAArg;
806 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000807 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000808 if (I.isSimple()) {
809 accumulateSROACost(CostIt, InlineConstants::InstrCost);
810 return true;
811 }
812
813 disableSROA(CostIt);
814 }
815
816 return false;
817}
818
819bool CallAnalyzer::visitStore(StoreInst &I) {
820 Value *SROAArg;
821 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000822 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000823 if (I.isSimple()) {
824 accumulateSROACost(CostIt, InlineConstants::InstrCost);
825 return true;
826 }
827
828 disableSROA(CostIt);
829 }
830
831 return false;
832}
833
Chandler Carruth753e21d2012-12-28 14:23:32 +0000834bool CallAnalyzer::visitExtractValue(ExtractValueInst &I) {
835 // Constant folding for extract value is trivial.
Easwaran Raman617f6362017-02-18 17:22:52 +0000836 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
837 return ConstantExpr::getExtractValue(COps[0], I.getIndices());
838 }))
Chandler Carruth753e21d2012-12-28 14:23:32 +0000839 return true;
Chandler Carruth753e21d2012-12-28 14:23:32 +0000840
841 // SROA can look through these but give them a cost.
842 return false;
843}
844
845bool CallAnalyzer::visitInsertValue(InsertValueInst &I) {
846 // Constant folding for insert value is trivial.
Easwaran Raman617f6362017-02-18 17:22:52 +0000847 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
848 return ConstantExpr::getInsertValue(/*AggregateOperand*/ COps[0],
849 /*InsertedValueOperand*/ COps[1],
850 I.getIndices());
851 }))
Chandler Carruth753e21d2012-12-28 14:23:32 +0000852 return true;
Chandler Carruth753e21d2012-12-28 14:23:32 +0000853
854 // SROA can look through these but give them a cost.
855 return false;
856}
857
858/// \brief Try to simplify a call site.
859///
860/// Takes a concrete function and callsite and tries to actually simplify it by
861/// analyzing the arguments and call itself with instsimplify. Returns true if
862/// it has simplified the callsite to some other entity (a constant), making it
863/// free.
864bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
865 // FIXME: Using the instsimplify logic directly for this is inefficient
866 // because we have to continually rebuild the argument list even when no
867 // simplifications can be performed. Until that is fixed with remapping
868 // inside of instsimplify, directly constant fold calls here.
869 if (!canConstantFoldCallTo(F))
870 return false;
871
872 // Try to re-map the arguments to constants.
873 SmallVector<Constant *, 4> ConstantArgs;
874 ConstantArgs.reserve(CS.arg_size());
Chad Rosier567556a2016-04-28 14:47:23 +0000875 for (CallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end(); I != E;
876 ++I) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000877 Constant *C = dyn_cast<Constant>(*I);
878 if (!C)
879 C = dyn_cast_or_null<Constant>(SimplifiedValues.lookup(*I));
880 if (!C)
881 return false; // This argument doesn't map to a constant.
882
883 ConstantArgs.push_back(C);
884 }
885 if (Constant *C = ConstantFoldCall(F, ConstantArgs)) {
886 SimplifiedValues[CS.getInstruction()] = C;
887 return true;
888 }
889
890 return false;
891}
892
Chandler Carruth0539c072012-03-31 12:42:41 +0000893bool CallAnalyzer::visitCallSite(CallSite CS) {
Chandler Carruth37d25de2013-12-13 08:00:01 +0000894 if (CS.hasFnAttr(Attribute::ReturnsTwice) &&
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +0000895 !F.hasFnAttribute(Attribute::ReturnsTwice)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000896 // This aborts the entire analysis.
897 ExposesReturnsTwice = true;
898 return false;
899 }
Chad Rosier567556a2016-04-28 14:47:23 +0000900 if (CS.isCall() && cast<CallInst>(CS.getInstruction())->cannotDuplicate())
James Molloy4f6fb952012-12-20 16:04:27 +0000901 ContainsNoDuplicateCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000902
Chandler Carruth0539c072012-03-31 12:42:41 +0000903 if (Function *F = CS.getCalledFunction()) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000904 // When we have a concrete function, first try to simplify it directly.
905 if (simplifyCallSite(F, CS))
906 return true;
907
908 // Next check if it is an intrinsic we know about.
909 // FIXME: Lift this into part of the InstVisitor.
910 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
911 switch (II->getIntrinsicID()) {
912 default:
913 return Base::visitCallSite(CS);
914
Peter Collingbourne7dd8dbf2016-04-22 21:18:02 +0000915 case Intrinsic::load_relative:
916 // This is normally lowered to 4 LLVM instructions.
917 Cost += 3 * InlineConstants::InstrCost;
918 return false;
919
Chandler Carruth753e21d2012-12-28 14:23:32 +0000920 case Intrinsic::memset:
921 case Intrinsic::memcpy:
922 case Intrinsic::memmove:
923 // SROA can usually chew through these intrinsics, but they aren't free.
924 return false;
Reid Kleckner60381792015-07-07 22:25:32 +0000925 case Intrinsic::localescape:
Reid Kleckner223de262015-04-14 20:38:14 +0000926 HasFrameEscape = true;
927 return false;
Chandler Carruth753e21d2012-12-28 14:23:32 +0000928 }
929 }
930
Chandler Carruth0539c072012-03-31 12:42:41 +0000931 if (F == CS.getInstruction()->getParent()->getParent()) {
932 // This flag will fully abort the analysis, so don't bother with anything
933 // else.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000934 IsRecursiveCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000935 return false;
936 }
937
Chandler Carruth0ba8db42013-01-22 11:26:02 +0000938 if (TTI.isLoweredToCall(F)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000939 // We account for the average 1 instruction per call argument setup
940 // here.
941 Cost += CS.arg_size() * InlineConstants::InstrCost;
942
943 // Everything other than inline ASM will also have a significant cost
944 // merely from making the call.
945 if (!isa<InlineAsm>(CS.getCalledValue()))
946 Cost += InlineConstants::CallPenalty;
947 }
948
949 return Base::visitCallSite(CS);
950 }
951
952 // Otherwise we're in a very special case -- an indirect function call. See
953 // if we can be particularly clever about this.
954 Value *Callee = CS.getCalledValue();
955
956 // First, pay the price of the argument setup. We account for the average
957 // 1 instruction per call argument setup here.
958 Cost += CS.arg_size() * InlineConstants::InstrCost;
959
960 // Next, check if this happens to be an indirect function call to a known
961 // function in this inline context. If not, we've done all we can.
962 Function *F = dyn_cast_or_null<Function>(SimplifiedValues.lookup(Callee));
963 if (!F)
964 return Base::visitCallSite(CS);
965
966 // If we have a constant that we are calling as a function, we can peer
967 // through it and see the function target. This happens not infrequently
968 // during devirtualization and so we want to give it a hefty bonus for
969 // inlining, but cap that bonus in the event that inlining wouldn't pan
970 // out. Pretend to inline the function, with a custom threshold.
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000971 auto IndirectCallParams = Params;
972 IndirectCallParams.DefaultThreshold = InlineConstants::IndirectCallThreshold;
Easwaran Raman12585b02017-01-20 22:44:04 +0000973 CallAnalyzer CA(TTI, GetAssumptionCache, GetBFI, PSI, *F, CS,
974 IndirectCallParams);
Chandler Carruth0539c072012-03-31 12:42:41 +0000975 if (CA.analyzeCall(CS)) {
976 // We were able to inline the indirect call! Subtract the cost from the
Easwaran Raman6d90d9f2015-12-07 21:21:20 +0000977 // threshold to get the bonus we want to apply, but don't go below zero.
978 Cost -= std::max(0, CA.getThreshold() - CA.getCost());
Chandler Carruth0539c072012-03-31 12:42:41 +0000979 }
980
981 return Base::visitCallSite(CS);
982}
983
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000984bool CallAnalyzer::visitReturnInst(ReturnInst &RI) {
985 // At least one return instruction will be free after inlining.
986 bool Free = !HasReturn;
987 HasReturn = true;
988 return Free;
989}
990
991bool CallAnalyzer::visitBranchInst(BranchInst &BI) {
992 // We model unconditional branches as essentially free -- they really
993 // shouldn't exist at all, but handling them makes the behavior of the
994 // inliner more regular and predictable. Interestingly, conditional branches
995 // which will fold away are also free.
996 return BI.isUnconditional() || isa<ConstantInt>(BI.getCondition()) ||
997 dyn_cast_or_null<ConstantInt>(
998 SimplifiedValues.lookup(BI.getCondition()));
999}
1000
1001bool CallAnalyzer::visitSwitchInst(SwitchInst &SI) {
1002 // We model unconditional switches as free, see the comments on handling
1003 // branches.
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001004 if (isa<ConstantInt>(SI.getCondition()))
1005 return true;
1006 if (Value *V = SimplifiedValues.lookup(SI.getCondition()))
1007 if (isa<ConstantInt>(V))
1008 return true;
1009
1010 // Otherwise, we need to accumulate a cost proportional to the number of
1011 // distinct successor blocks. This fan-out in the CFG cannot be represented
1012 // for free even if we can represent the core switch as a jumptable that
1013 // takes a single instruction.
1014 //
1015 // NB: We convert large switches which are just used to initialize large phi
1016 // nodes to lookup tables instead in simplify-cfg, so this shouldn't prevent
1017 // inlining those. It will prevent inlining in cases where the optimization
1018 // does not (yet) fire.
1019 SmallPtrSet<BasicBlock *, 8> SuccessorBlocks;
1020 SuccessorBlocks.insert(SI.getDefaultDest());
1021 for (auto I = SI.case_begin(), E = SI.case_end(); I != E; ++I)
1022 SuccessorBlocks.insert(I.getCaseSuccessor());
1023 // Add cost corresponding to the number of distinct destinations. The first
1024 // we model as free because of fallthrough.
1025 Cost += (SuccessorBlocks.size() - 1) * InlineConstants::InstrCost;
1026 return false;
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001027}
1028
1029bool CallAnalyzer::visitIndirectBrInst(IndirectBrInst &IBI) {
1030 // We never want to inline functions that contain an indirectbr. This is
1031 // incorrect because all the blockaddress's (in static global initializers
1032 // for example) would be referring to the original function, and this
1033 // indirect jump would jump from the inlined copy of the function into the
1034 // original function which is extremely undefined behavior.
1035 // FIXME: This logic isn't really right; we can safely inline functions with
1036 // indirectbr's as long as no other function or global references the
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001037 // blockaddress of a block within the current function.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001038 HasIndirectBr = true;
1039 return false;
1040}
1041
1042bool CallAnalyzer::visitResumeInst(ResumeInst &RI) {
1043 // FIXME: It's not clear that a single instruction is an accurate model for
1044 // the inline cost of a resume instruction.
1045 return false;
1046}
1047
David Majnemer654e1302015-07-31 17:58:14 +00001048bool CallAnalyzer::visitCleanupReturnInst(CleanupReturnInst &CRI) {
1049 // FIXME: It's not clear that a single instruction is an accurate model for
1050 // the inline cost of a cleanupret instruction.
1051 return false;
1052}
1053
1054bool CallAnalyzer::visitCatchReturnInst(CatchReturnInst &CRI) {
1055 // FIXME: It's not clear that a single instruction is an accurate model for
Joseph Tremoulet8220bcc2015-08-23 00:26:33 +00001056 // the inline cost of a catchret instruction.
David Majnemer654e1302015-07-31 17:58:14 +00001057 return false;
1058}
1059
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001060bool CallAnalyzer::visitUnreachableInst(UnreachableInst &I) {
1061 // FIXME: It might be reasonably to discount the cost of instructions leading
1062 // to unreachable as they have the lowest possible impact on both runtime and
1063 // code size.
1064 return true; // No actual code is needed for unreachable.
1065}
1066
Chandler Carruth0539c072012-03-31 12:42:41 +00001067bool CallAnalyzer::visitInstruction(Instruction &I) {
Chandler Carruthda7513a2012-05-04 00:58:03 +00001068 // Some instructions are free. All of the free intrinsics can also be
1069 // handled by SROA, etc.
Chandler Carruthb8cf5102013-01-21 12:05:16 +00001070 if (TargetTransformInfo::TCC_Free == TTI.getUserCost(&I))
Chandler Carruthda7513a2012-05-04 00:58:03 +00001071 return true;
1072
Chandler Carruth0539c072012-03-31 12:42:41 +00001073 // We found something we don't understand or can't handle. Mark any SROA-able
1074 // values in the operand list as no longer viable.
1075 for (User::op_iterator OI = I.op_begin(), OE = I.op_end(); OI != OE; ++OI)
1076 disableSROA(*OI);
1077
1078 return false;
1079}
1080
Chandler Carruth0539c072012-03-31 12:42:41 +00001081/// \brief Analyze a basic block for its contribution to the inline cost.
1082///
1083/// This method walks the analyzer over every instruction in the given basic
1084/// block and accounts for their cost during inlining at this callsite. It
1085/// aborts early if the threshold has been exceeded or an impossible to inline
1086/// construct has been detected. It returns false if inlining is no longer
1087/// viable, and true if inlining remains viable.
Hal Finkel57f03dd2014-09-07 13:49:57 +00001088bool CallAnalyzer::analyzeBlock(BasicBlock *BB,
1089 SmallPtrSetImpl<const Value *> &EphValues) {
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001090 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
Chandler Carruth6b4cc8b2014-02-01 10:38:17 +00001091 // FIXME: Currently, the number of instructions in a function regardless of
1092 // our ability to simplify them during inline to constants or dead code,
1093 // are actually used by the vector bonus heuristic. As long as that's true,
1094 // we have to special case debug intrinsics here to prevent differences in
1095 // inlining due to debug symbols. Eventually, the number of unsimplified
1096 // instructions shouldn't factor into the cost computation, but until then,
1097 // hack around it here.
1098 if (isa<DbgInfoIntrinsic>(I))
1099 continue;
1100
Hal Finkel57f03dd2014-09-07 13:49:57 +00001101 // Skip ephemeral values.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001102 if (EphValues.count(&*I))
Hal Finkel57f03dd2014-09-07 13:49:57 +00001103 continue;
1104
Chandler Carruth0539c072012-03-31 12:42:41 +00001105 ++NumInstructions;
1106 if (isa<ExtractElementInst>(I) || I->getType()->isVectorTy())
1107 ++NumVectorInstructions;
1108
Sanjay Patele9434e82015-09-15 15:26:25 +00001109 // If the instruction is floating point, and the target says this operation
1110 // is expensive or the function has the "use-soft-float" attribute, this may
1111 // eventually become a library call. Treat the cost as such.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001112 if (I->getType()->isFloatingPointTy()) {
1113 bool hasSoftFloatAttr = false;
1114
Sanjay Patele9434e82015-09-15 15:26:25 +00001115 // If the function has the "use-soft-float" attribute, mark it as
1116 // expensive.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001117 if (F.hasFnAttribute("use-soft-float")) {
1118 Attribute Attr = F.getFnAttribute("use-soft-float");
1119 StringRef Val = Attr.getValueAsString();
1120 if (Val == "true")
1121 hasSoftFloatAttr = true;
1122 }
1123
1124 if (TTI.getFPOpCost(I->getType()) == TargetTransformInfo::TCC_Expensive ||
1125 hasSoftFloatAttr)
1126 Cost += InlineConstants::CallPenalty;
1127 }
1128
Chandler Carruth0539c072012-03-31 12:42:41 +00001129 // If the instruction simplified to a constant, there is no cost to this
1130 // instruction. Visit the instructions using our InstVisitor to account for
1131 // all of the per-instruction logic. The visit tree returns true if we
1132 // consumed the instruction in any way, and false if the instruction's base
1133 // cost should count against inlining.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001134 if (Base::visit(&*I))
Chandler Carruth0539c072012-03-31 12:42:41 +00001135 ++NumInstructionsSimplified;
1136 else
1137 Cost += InlineConstants::InstrCost;
1138
1139 // If the visit this instruction detected an uninlinable pattern, abort.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001140 if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca ||
Reid Kleckner223de262015-04-14 20:38:14 +00001141 HasIndirectBr || HasFrameEscape)
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001142 return false;
1143
1144 // If the caller is a recursive function then we don't want to inline
1145 // functions which allocate a lot of stack space because it would increase
1146 // the caller stack usage dramatically.
1147 if (IsCallerRecursive &&
1148 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller)
Chandler Carruth0539c072012-03-31 12:42:41 +00001149 return false;
1150
Chandler Carrutha004f222015-05-27 02:49:05 +00001151 // Check if we've past the maximum possible threshold so we don't spin in
1152 // huge basic blocks that will never inline.
1153 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001154 return false;
1155 }
1156
1157 return true;
1158}
1159
1160/// \brief Compute the base pointer and cumulative constant offsets for V.
1161///
1162/// This strips all constant offsets off of V, leaving it the base pointer, and
1163/// accumulates the total constant offset applied in the returned constant. It
1164/// returns 0 if V is not a pointer, and returns the constant '0' if there are
1165/// no constant offsets applied.
1166ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001167 if (!V->getType()->isPointerTy())
Craig Topper353eda42014-04-24 06:44:33 +00001168 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001169
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001170 const DataLayout &DL = F.getParent()->getDataLayout();
1171 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +00001172 APInt Offset = APInt::getNullValue(IntPtrWidth);
1173
1174 // Even though we don't look through PHI nodes, we could be called on an
1175 // instruction in an unreachable block, which may be on a cycle.
1176 SmallPtrSet<Value *, 4> Visited;
1177 Visited.insert(V);
1178 do {
1179 if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
1180 if (!GEP->isInBounds() || !accumulateGEPOffset(*GEP, Offset))
Craig Topper353eda42014-04-24 06:44:33 +00001181 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001182 V = GEP->getPointerOperand();
1183 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
1184 V = cast<Operator>(V)->getOperand(0);
1185 } else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
Sanjoy Das5ce32722016-04-08 00:48:30 +00001186 if (GA->isInterposable())
Chandler Carruth0539c072012-03-31 12:42:41 +00001187 break;
1188 V = GA->getAliasee();
1189 } else {
1190 break;
1191 }
1192 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
David Blaikie70573dc2014-11-19 07:49:26 +00001193 } while (Visited.insert(V).second);
Chandler Carruth0539c072012-03-31 12:42:41 +00001194
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001195 Type *IntPtrTy = DL.getIntPtrType(V->getContext());
Chandler Carruth0539c072012-03-31 12:42:41 +00001196 return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset));
1197}
1198
1199/// \brief Analyze a call site for potential inlining.
1200///
1201/// Returns true if inlining this call is viable, and false if it is not
1202/// viable. It computes the cost and adjusts the threshold based on numerous
1203/// factors and heuristics. If this method returns false but the computed cost
1204/// is below the computed threshold, then inlining was forcibly disabled by
Bob Wilson266802d2012-11-19 07:04:30 +00001205/// some artifact of the routine.
Chandler Carruth0539c072012-03-31 12:42:41 +00001206bool CallAnalyzer::analyzeCall(CallSite CS) {
Chandler Carruth7ae90d42012-04-11 10:15:10 +00001207 ++NumCallsAnalyzed;
1208
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001209 // Perform some tweaks to the cost and threshold based on the direct
1210 // callsite information.
Chandler Carruth0539c072012-03-31 12:42:41 +00001211
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001212 // We want to more aggressively inline vector-dense kernels, so up the
1213 // threshold, and we'll lower it if the % of vector instructions gets too
Chandler Carrutha004f222015-05-27 02:49:05 +00001214 // low. Note that these bonuses are some what arbitrary and evolved over time
1215 // by accident as much as because they are principled bonuses.
1216 //
1217 // FIXME: It would be nice to remove all such bonuses. At least it would be
1218 // nice to base the bonus values on something more scientific.
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001219 assert(NumInstructions == 0);
1220 assert(NumVectorInstructions == 0);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001221
1222 // Update the threshold based on callsite properties
1223 updateThreshold(CS, F);
1224
Chandler Carrutha004f222015-05-27 02:49:05 +00001225 FiftyPercentVectorBonus = 3 * Threshold / 2;
1226 TenPercentVectorBonus = 3 * Threshold / 4;
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001227 const DataLayout &DL = F.getParent()->getDataLayout();
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001228
Chandler Carrutha004f222015-05-27 02:49:05 +00001229 // Track whether the post-inlining function would have more than one basic
1230 // block. A single basic block is often intended for inlining. Balloon the
1231 // threshold by 50% until we pass the single-BB phase.
1232 bool SingleBB = true;
1233 int SingleBBBonus = Threshold / 2;
1234
1235 // Speculatively apply all possible bonuses to Threshold. If cost exceeds
1236 // this Threshold any time, and cost cannot decrease, we can stop processing
1237 // the rest of the function body.
1238 Threshold += (SingleBBBonus + FiftyPercentVectorBonus);
1239
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001240 // Give out bonuses per argument, as the instructions setting them up will
1241 // be gone after inlining.
1242 for (unsigned I = 0, E = CS.arg_size(); I != E; ++I) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001243 if (CS.isByValArgument(I)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001244 // We approximate the number of loads and stores needed by dividing the
1245 // size of the byval type by the target's pointer size.
1246 PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType());
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001247 unsigned TypeSize = DL.getTypeSizeInBits(PTy->getElementType());
1248 unsigned PointerSize = DL.getPointerSizeInBits();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001249 // Ceiling division.
1250 unsigned NumStores = (TypeSize + PointerSize - 1) / PointerSize;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001251
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001252 // If it generates more than 8 stores it is likely to be expanded as an
1253 // inline memcpy so we take that as an upper bound. Otherwise we assume
1254 // one load and one store per word copied.
1255 // FIXME: The maxStoresPerMemcpy setting from the target should be used
1256 // here instead of a magic number of 8, but it's not available via
1257 // DataLayout.
1258 NumStores = std::min(NumStores, 8U);
1259
1260 Cost -= 2 * NumStores * InlineConstants::InstrCost;
1261 } else {
1262 // For non-byval arguments subtract off one instruction per call
1263 // argument.
1264 Cost -= InlineConstants::InstrCost;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001265 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001266 }
James Molloy6df8f272016-11-14 11:14:41 +00001267 // The call instruction also disappears after inlining.
1268 Cost -= InlineConstants::InstrCost + InlineConstants::CallPenalty;
1269
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001270 // If there is only one call of the function, and it has internal linkage,
1271 // the cost of inlining it drops dramatically.
Chad Rosier567556a2016-04-28 14:47:23 +00001272 bool OnlyOneCallAndLocalLinkage =
1273 F.hasLocalLinkage() && F.hasOneUse() && &F == CS.getCalledFunction();
James Molloy4f6fb952012-12-20 16:04:27 +00001274 if (OnlyOneCallAndLocalLinkage)
Piotr Padlewskid89875c2016-08-10 21:15:22 +00001275 Cost -= InlineConstants::LastCallToStaticBonus;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001276
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001277 // If this function uses the coldcc calling convention, prefer not to inline
1278 // it.
1279 if (F.getCallingConv() == CallingConv::Cold)
1280 Cost += InlineConstants::ColdccPenalty;
1281
1282 // Check if we're done. This can happen due to bonuses and penalties.
1283 if (Cost > Threshold)
1284 return false;
1285
Chandler Carruth0539c072012-03-31 12:42:41 +00001286 if (F.empty())
1287 return true;
1288
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001289 Function *Caller = CS.getInstruction()->getParent()->getParent();
1290 // Check if the caller function is recursive itself.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001291 for (User *U : Caller->users()) {
1292 CallSite Site(U);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001293 if (!Site)
1294 continue;
1295 Instruction *I = Site.getInstruction();
1296 if (I->getParent()->getParent() == Caller) {
1297 IsCallerRecursive = true;
1298 break;
1299 }
1300 }
1301
Chandler Carruth0539c072012-03-31 12:42:41 +00001302 // Populate our simplified values by mapping from function arguments to call
1303 // arguments with known important simplifications.
1304 CallSite::arg_iterator CAI = CS.arg_begin();
1305 for (Function::arg_iterator FAI = F.arg_begin(), FAE = F.arg_end();
1306 FAI != FAE; ++FAI, ++CAI) {
1307 assert(CAI != CS.arg_end());
1308 if (Constant *C = dyn_cast<Constant>(CAI))
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001309 SimplifiedValues[&*FAI] = C;
Chandler Carruth0539c072012-03-31 12:42:41 +00001310
1311 Value *PtrArg = *CAI;
1312 if (ConstantInt *C = stripAndComputeInBoundsConstantOffsets(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001313 ConstantOffsetPtrs[&*FAI] = std::make_pair(PtrArg, C->getValue());
Chandler Carruth0539c072012-03-31 12:42:41 +00001314
1315 // We can SROA any pointer arguments derived from alloca instructions.
1316 if (isa<AllocaInst>(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001317 SROAArgValues[&*FAI] = PtrArg;
Chandler Carruth0539c072012-03-31 12:42:41 +00001318 SROAArgCosts[PtrArg] = 0;
1319 }
1320 }
1321 }
1322 NumConstantArgs = SimplifiedValues.size();
1323 NumConstantOffsetPtrArgs = ConstantOffsetPtrs.size();
1324 NumAllocaArgs = SROAArgValues.size();
1325
Hal Finkel57f03dd2014-09-07 13:49:57 +00001326 // FIXME: If a caller has multiple calls to a callee, we end up recomputing
1327 // the ephemeral values multiple times (and they're completely determined by
1328 // the callee, so this is purely duplicate work).
1329 SmallPtrSet<const Value *, 32> EphValues;
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001330 CodeMetrics::collectEphemeralValues(&F, &GetAssumptionCache(F), EphValues);
Hal Finkel57f03dd2014-09-07 13:49:57 +00001331
Chandler Carruth0539c072012-03-31 12:42:41 +00001332 // The worklist of live basic blocks in the callee *after* inlining. We avoid
1333 // adding basic blocks of the callee which can be proven to be dead for this
1334 // particular call site in order to get more accurate cost estimates. This
1335 // requires a somewhat heavyweight iteration pattern: we need to walk the
1336 // basic blocks in a breadth-first order as we insert live successors. To
1337 // accomplish this, prioritizing for small iterations because we exit after
1338 // crossing our threshold, we use a small-size optimized SetVector.
1339 typedef SetVector<BasicBlock *, SmallVector<BasicBlock *, 16>,
Chad Rosier567556a2016-04-28 14:47:23 +00001340 SmallPtrSet<BasicBlock *, 16>>
1341 BBSetVector;
Chandler Carruth0539c072012-03-31 12:42:41 +00001342 BBSetVector BBWorklist;
1343 BBWorklist.insert(&F.getEntryBlock());
1344 // Note that we *must not* cache the size, this loop grows the worklist.
1345 for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) {
1346 // Bail out the moment we cross the threshold. This means we'll under-count
1347 // the cost, but only when undercounting doesn't matter.
Chandler Carrutha004f222015-05-27 02:49:05 +00001348 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001349 break;
1350
1351 BasicBlock *BB = BBWorklist[Idx];
1352 if (BB->empty())
Chandler Carruth4d1d34f2012-03-14 23:19:53 +00001353 continue;
Dan Gohman4552e3c2009-10-13 18:30:07 +00001354
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001355 // Disallow inlining a blockaddress. A blockaddress only has defined
1356 // behavior for an indirect branch in the same function, and we do not
1357 // currently support inlining indirect branches. But, the inliner may not
1358 // see an indirect branch that ends up being dead code at a particular call
1359 // site. If the blockaddress escapes the function, e.g., via a global
1360 // variable, inlining may lead to an invalid cross-function reference.
1361 if (BB->hasAddressTaken())
1362 return false;
1363
Chandler Carruth0539c072012-03-31 12:42:41 +00001364 // Analyze the cost of this block. If we blow through the threshold, this
1365 // returns false, and we can bail on out.
Easwaran Ramand295b002016-04-13 21:20:22 +00001366 if (!analyzeBlock(BB, EphValues))
1367 return false;
Eric Christopher46308e62011-02-01 01:16:32 +00001368
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001369 TerminatorInst *TI = BB->getTerminator();
1370
Chandler Carruth0539c072012-03-31 12:42:41 +00001371 // Add in the live successors by first checking whether we have terminator
1372 // that may be simplified based on the values simplified by this call.
1373 if (BranchInst *BI = dyn_cast<BranchInst>(TI)) {
1374 if (BI->isConditional()) {
1375 Value *Cond = BI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001376 if (ConstantInt *SimpleCond =
1377 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001378 BBWorklist.insert(BI->getSuccessor(SimpleCond->isZero() ? 1 : 0));
1379 continue;
Eric Christopher46308e62011-02-01 01:16:32 +00001380 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001381 }
1382 } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) {
1383 Value *Cond = SI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001384 if (ConstantInt *SimpleCond =
1385 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001386 BBWorklist.insert(SI->findCaseValue(SimpleCond).getCaseSuccessor());
1387 continue;
1388 }
1389 }
Eric Christopher46308e62011-02-01 01:16:32 +00001390
Chandler Carruth0539c072012-03-31 12:42:41 +00001391 // If we're unable to select a particular successor, just count all of
1392 // them.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001393 for (unsigned TIdx = 0, TSize = TI->getNumSuccessors(); TIdx != TSize;
1394 ++TIdx)
Chandler Carruth0539c072012-03-31 12:42:41 +00001395 BBWorklist.insert(TI->getSuccessor(TIdx));
1396
1397 // If we had any successors at this point, than post-inlining is likely to
1398 // have them as well. Note that we assume any basic blocks which existed
1399 // due to branches or switches which folded above will also fold after
1400 // inlining.
1401 if (SingleBB && TI->getNumSuccessors() > 1) {
1402 // Take off the bonus we applied to the threshold.
1403 Threshold -= SingleBBBonus;
1404 SingleBB = false;
Eric Christopher46308e62011-02-01 01:16:32 +00001405 }
1406 }
Andrew Trickcaa500b2011-10-01 01:27:56 +00001407
Chandler Carruthcb5beb32013-12-12 11:59:26 +00001408 // If this is a noduplicate call, we can still inline as long as
James Molloy4f6fb952012-12-20 16:04:27 +00001409 // inlining this would cause the removal of the caller (so the instruction
1410 // is not actually duplicated, just moved).
1411 if (!OnlyOneCallAndLocalLinkage && ContainsNoDuplicateCall)
1412 return false;
1413
Chandler Carrutha004f222015-05-27 02:49:05 +00001414 // We applied the maximum possible vector bonus at the beginning. Now,
1415 // subtract the excess bonus, if any, from the Threshold before
1416 // comparing against Cost.
1417 if (NumVectorInstructions <= NumInstructions / 10)
1418 Threshold -= FiftyPercentVectorBonus;
1419 else if (NumVectorInstructions <= NumInstructions / 2)
1420 Threshold -= (FiftyPercentVectorBonus - TenPercentVectorBonus);
Chandler Carruth0539c072012-03-31 12:42:41 +00001421
Hans Wennborg00ab73d2016-02-05 20:32:42 +00001422 return Cost < std::max(1, Threshold);
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001423}
1424
Manman Ren49d684e2012-09-12 05:06:18 +00001425#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
Chandler Carruth0539c072012-03-31 12:42:41 +00001426/// \brief Dump stats about this call's analysis.
Yaron Kereneb2a2542016-01-29 20:50:44 +00001427LLVM_DUMP_METHOD void CallAnalyzer::dump() {
Eric Christophera13839f2014-02-26 23:27:16 +00001428#define DEBUG_PRINT_STAT(x) dbgs() << " " #x ": " << x << "\n"
Chandler Carruth0539c072012-03-31 12:42:41 +00001429 DEBUG_PRINT_STAT(NumConstantArgs);
1430 DEBUG_PRINT_STAT(NumConstantOffsetPtrArgs);
1431 DEBUG_PRINT_STAT(NumAllocaArgs);
1432 DEBUG_PRINT_STAT(NumConstantPtrCmps);
1433 DEBUG_PRINT_STAT(NumConstantPtrDiffs);
1434 DEBUG_PRINT_STAT(NumInstructionsSimplified);
Chandler Carrutha004f222015-05-27 02:49:05 +00001435 DEBUG_PRINT_STAT(NumInstructions);
Chandler Carruth0539c072012-03-31 12:42:41 +00001436 DEBUG_PRINT_STAT(SROACostSavings);
1437 DEBUG_PRINT_STAT(SROACostSavingsLost);
James Molloy4f6fb952012-12-20 16:04:27 +00001438 DEBUG_PRINT_STAT(ContainsNoDuplicateCall);
Chandler Carruth394e34f2014-01-31 22:32:32 +00001439 DEBUG_PRINT_STAT(Cost);
1440 DEBUG_PRINT_STAT(Threshold);
Chandler Carruth0539c072012-03-31 12:42:41 +00001441#undef DEBUG_PRINT_STAT
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001442}
Manman Renc3366cc2012-09-06 19:55:56 +00001443#endif
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001444
Akira Hatanaka5af7ace2015-11-13 01:44:32 +00001445/// \brief Test that two functions either have or have not the given attribute
1446/// at the same time.
Chad Rosier567556a2016-04-28 14:47:23 +00001447template <typename AttrKind>
Akira Hatanaka5af7ace2015-11-13 01:44:32 +00001448static bool attributeMatches(Function *F1, Function *F2, AttrKind Attr) {
1449 return F1->getFnAttribute(Attr) == F2->getFnAttribute(Attr);
1450}
1451
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001452/// \brief Test that there are no attribute conflicts between Caller and Callee
1453/// that prevent inlining.
1454static bool functionsHaveCompatibleAttributes(Function *Caller,
Eric Christopher4371b132015-07-02 01:11:47 +00001455 Function *Callee,
1456 TargetTransformInfo &TTI) {
Eric Christopherd566fb12015-07-29 22:09:48 +00001457 return TTI.areInlineCompatible(Caller, Callee) &&
Akira Hatanaka1cb242e2015-12-22 23:57:37 +00001458 AttributeFuncs::areInlineCompatible(*Caller, *Callee);
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001459}
1460
Sean Silvaab6a6832016-07-23 04:22:50 +00001461InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001462 CallSite CS, const InlineParams &Params, TargetTransformInfo &CalleeTTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001463 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +00001464 Optional<function_ref<BlockFrequencyInfo &(Function &)>> GetBFI,
Sean Silvaab6a6832016-07-23 04:22:50 +00001465 ProfileSummaryInfo *PSI) {
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001466 return getInlineCost(CS, CS.getCalledFunction(), Params, CalleeTTI,
Easwaran Raman12585b02017-01-20 22:44:04 +00001467 GetAssumptionCache, GetBFI, PSI);
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001468}
1469
Sean Silvaab6a6832016-07-23 04:22:50 +00001470InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001471 CallSite CS, Function *Callee, const InlineParams &Params,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001472 TargetTransformInfo &CalleeTTI,
1473 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +00001474 Optional<function_ref<BlockFrequencyInfo &(Function &)>> GetBFI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001475 ProfileSummaryInfo *PSI) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001476
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001477 // Cannot inline indirect calls.
1478 if (!Callee)
1479 return llvm::InlineCost::getNever();
1480
1481 // Calls to functions with always-inline attributes should be inlined
1482 // whenever possible.
Peter Collingbourne68a88972014-05-19 18:25:54 +00001483 if (CS.hasFnAttr(Attribute::AlwaysInline)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001484 if (isInlineViable(*Callee))
1485 return llvm::InlineCost::getAlways();
1486 return llvm::InlineCost::getNever();
1487 }
1488
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001489 // Never inline functions with conflicting attributes (unless callee has
1490 // always-inline attribute).
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001491 if (!functionsHaveCompatibleAttributes(CS.getCaller(), Callee, CalleeTTI))
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001492 return llvm::InlineCost::getNever();
1493
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001494 // Don't inline this call if the caller has the optnone attribute.
1495 if (CS.getCaller()->hasFnAttribute(Attribute::OptimizeNone))
1496 return llvm::InlineCost::getNever();
1497
Sanjoy Das5ce32722016-04-08 00:48:30 +00001498 // Don't inline functions which can be interposed at link-time. Don't inline
1499 // functions marked noinline or call sites marked noinline.
Craig Topper107b1872016-12-09 02:18:04 +00001500 // Note: inlining non-exact non-interposable functions is fine, since we know
Sanjoy Das5ce32722016-04-08 00:48:30 +00001501 // we have *a* correct implementation of the source level function.
Chad Rosier567556a2016-04-28 14:47:23 +00001502 if (Callee->isInterposable() || Callee->hasFnAttribute(Attribute::NoInline) ||
1503 CS.isNoInline())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001504 return llvm::InlineCost::getNever();
1505
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001506 DEBUG(llvm::dbgs() << " Analyzing call of " << Callee->getName()
Chad Rosier567556a2016-04-28 14:47:23 +00001507 << "...\n");
Andrew Trickcaa500b2011-10-01 01:27:56 +00001508
Easwaran Raman12585b02017-01-20 22:44:04 +00001509 CallAnalyzer CA(CalleeTTI, GetAssumptionCache, GetBFI, PSI, *Callee, CS,
1510 Params);
Chandler Carruth0539c072012-03-31 12:42:41 +00001511 bool ShouldInline = CA.analyzeCall(CS);
Dan Gohman4552e3c2009-10-13 18:30:07 +00001512
Chandler Carruth0539c072012-03-31 12:42:41 +00001513 DEBUG(CA.dump());
1514
1515 // Check if there was a reason to force inlining or no inlining.
1516 if (!ShouldInline && CA.getCost() < CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001517 return InlineCost::getNever();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001518 if (ShouldInline && CA.getCost() >= CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001519 return InlineCost::getAlways();
Andrew Trickcaa500b2011-10-01 01:27:56 +00001520
Chandler Carruth0539c072012-03-31 12:42:41 +00001521 return llvm::InlineCost::get(CA.getCost(), CA.getThreshold());
Dan Gohman4552e3c2009-10-13 18:30:07 +00001522}
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001523
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001524bool llvm::isInlineViable(Function &F) {
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001525 bool ReturnsTwice = F.hasFnAttribute(Attribute::ReturnsTwice);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001526 for (Function::iterator BI = F.begin(), BE = F.end(); BI != BE; ++BI) {
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001527 // Disallow inlining of functions which contain indirect branches or
1528 // blockaddresses.
1529 if (isa<IndirectBrInst>(BI->getTerminator()) || BI->hasAddressTaken())
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001530 return false;
1531
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001532 for (auto &II : *BI) {
1533 CallSite CS(&II);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001534 if (!CS)
1535 continue;
1536
1537 // Disallow recursive calls.
1538 if (&F == CS.getCalledFunction())
1539 return false;
1540
1541 // Disallow calls which expose returns-twice to a function not previously
1542 // attributed as such.
1543 if (!ReturnsTwice && CS.isCall() &&
1544 cast<CallInst>(CS.getInstruction())->canReturnTwice())
1545 return false;
Reid Kleckner223de262015-04-14 20:38:14 +00001546
Reid Kleckner60381792015-07-07 22:25:32 +00001547 // Disallow inlining functions that call @llvm.localescape. Doing this
Reid Kleckner223de262015-04-14 20:38:14 +00001548 // correctly would require major changes to the inliner.
1549 if (CS.getCalledFunction() &&
1550 CS.getCalledFunction()->getIntrinsicID() ==
Reid Kleckner60381792015-07-07 22:25:32 +00001551 llvm::Intrinsic::localescape)
Reid Kleckner223de262015-04-14 20:38:14 +00001552 return false;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001553 }
1554 }
1555
1556 return true;
1557}
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001558
1559// APIs to create InlineParams based on command line flags and/or other
1560// parameters.
1561
1562InlineParams llvm::getInlineParams(int Threshold) {
1563 InlineParams Params;
1564
1565 // This field is the threshold to use for a callee by default. This is
1566 // derived from one or more of:
1567 // * optimization or size-optimization levels,
1568 // * a value passed to createFunctionInliningPass function, or
1569 // * the -inline-threshold flag.
1570 // If the -inline-threshold flag is explicitly specified, that is used
1571 // irrespective of anything else.
1572 if (InlineThreshold.getNumOccurrences() > 0)
1573 Params.DefaultThreshold = InlineThreshold;
1574 else
1575 Params.DefaultThreshold = Threshold;
1576
1577 // Set the HintThreshold knob from the -inlinehint-threshold.
1578 Params.HintThreshold = HintThreshold;
1579
1580 // Set the HotCallSiteThreshold knob from the -hot-callsite-threshold.
1581 Params.HotCallSiteThreshold = HotCallSiteThreshold;
1582
Easwaran Raman12585b02017-01-20 22:44:04 +00001583 // Set the ColdCallSiteThreshold knob from the -inline-cold-callsite-threshold.
1584 Params.ColdCallSiteThreshold = ColdCallSiteThreshold;
1585
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001586 // Set the OptMinSizeThreshold and OptSizeThreshold params only if the
1587 // Set the OptMinSizeThreshold and OptSizeThreshold params only if the
1588 // -inlinehint-threshold commandline option is not explicitly given. If that
1589 // option is present, then its value applies even for callees with size and
1590 // minsize attributes.
1591 // If the -inline-threshold is not specified, set the ColdThreshold from the
1592 // -inlinecold-threshold even if it is not explicitly passed. If
1593 // -inline-threshold is specified, then -inlinecold-threshold needs to be
1594 // explicitly specified to set the ColdThreshold knob
1595 if (InlineThreshold.getNumOccurrences() == 0) {
1596 Params.OptMinSizeThreshold = InlineConstants::OptMinSizeThreshold;
1597 Params.OptSizeThreshold = InlineConstants::OptSizeThreshold;
1598 Params.ColdThreshold = ColdThreshold;
1599 } else if (ColdThreshold.getNumOccurrences() > 0) {
1600 Params.ColdThreshold = ColdThreshold;
1601 }
1602 return Params;
1603}
1604
1605InlineParams llvm::getInlineParams() {
1606 return getInlineParams(InlineThreshold);
1607}
1608
1609// Compute the default threshold for inlining based on the opt level and the
1610// size opt level.
1611static int computeThresholdFromOptLevels(unsigned OptLevel,
1612 unsigned SizeOptLevel) {
1613 if (OptLevel > 2)
1614 return InlineConstants::OptAggressiveThreshold;
1615 if (SizeOptLevel == 1) // -Os
1616 return InlineConstants::OptSizeThreshold;
1617 if (SizeOptLevel == 2) // -Oz
1618 return InlineConstants::OptMinSizeThreshold;
1619 return InlineThreshold;
1620}
1621
1622InlineParams llvm::getInlineParams(unsigned OptLevel, unsigned SizeOptLevel) {
1623 return getInlineParams(computeThresholdFromOptLevels(OptLevel, SizeOptLevel));
1624}