blob: 120071045cb7dedfbbd54c5b2c79c83804936446 [file] [log] [blame]
Dan Gohman4552e3c2009-10-13 18:30:07 +00001//===- InlineCost.cpp - Cost analysis for inliner -------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements inline cost analysis.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Analysis/InlineCost.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000015#include "llvm/ADT/STLExtras.h"
16#include "llvm/ADT/SetVector.h"
17#include "llvm/ADT/SmallPtrSet.h"
18#include "llvm/ADT/SmallVector.h"
19#include "llvm/ADT/Statistic.h"
Daniel Jasperaec2fa32016-12-19 08:22:17 +000020#include "llvm/Analysis/AssumptionCache.h"
Easwaran Raman12585b02017-01-20 22:44:04 +000021#include "llvm/Analysis/BlockFrequencyInfo.h"
Hal Finkel57f03dd2014-09-07 13:49:57 +000022#include "llvm/Analysis/CodeMetrics.h"
Chandler Carruthd9903882015-01-14 11:23:27 +000023#include "llvm/Analysis/ConstantFolding.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000024#include "llvm/Analysis/InstructionSimplify.h"
Easwaran Raman71069cf2016-06-09 22:23:21 +000025#include "llvm/Analysis/ProfileSummaryInfo.h"
Chandler Carruth42f3dce2013-01-21 11:55:09 +000026#include "llvm/Analysis/TargetTransformInfo.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000027#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000028#include "llvm/IR/CallingConv.h"
29#include "llvm/IR/DataLayout.h"
Chandler Carruth03eb0de2014-03-04 10:40:04 +000030#include "llvm/IR/GetElementPtrTypeIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000031#include "llvm/IR/GlobalAlias.h"
Chandler Carruth7da14f12014-03-06 03:23:41 +000032#include "llvm/IR/InstVisitor.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000033#include "llvm/IR/IntrinsicInst.h"
34#include "llvm/IR/Operator.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000035#include "llvm/Support/Debug.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000036#include "llvm/Support/raw_ostream.h"
Eric Christopher2dfbd7e2011-02-05 00:49:15 +000037
Dan Gohman4552e3c2009-10-13 18:30:07 +000038using namespace llvm;
39
Chandler Carruthf1221bd2014-04-22 02:48:03 +000040#define DEBUG_TYPE "inline-cost"
41
Chandler Carruth7ae90d42012-04-11 10:15:10 +000042STATISTIC(NumCallsAnalyzed, "Number of call sites analyzed");
43
Easwaran Raman1c57cc22016-08-10 00:48:04 +000044static cl::opt<int> InlineThreshold(
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000045 "inline-threshold", cl::Hidden, cl::init(225), cl::ZeroOrMore,
46 cl::desc("Control the amount of inlining to perform (default = 225)"));
47
48static cl::opt<int> HintThreshold(
49 "inlinehint-threshold", cl::Hidden, cl::init(325),
50 cl::desc("Threshold for inlining functions with inline hint"));
51
Easwaran Raman12585b02017-01-20 22:44:04 +000052static cl::opt<int>
53 ColdCallSiteThreshold("inline-cold-callsite-threshold", cl::Hidden,
54 cl::init(45),
55 cl::desc("Threshold for inlining cold callsites"));
56
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000057// We introduce this threshold to help performance of instrumentation based
58// PGO before we actually hook up inliner with analysis passes such as BPI and
59// BFI.
60static cl::opt<int> ColdThreshold(
61 "inlinecold-threshold", cl::Hidden, cl::init(225),
62 cl::desc("Threshold for inlining functions with cold attribute"));
63
Dehao Chende39cb92016-08-05 20:28:41 +000064static cl::opt<int>
65 HotCallSiteThreshold("hot-callsite-threshold", cl::Hidden, cl::init(3000),
66 cl::ZeroOrMore,
67 cl::desc("Threshold for hot callsites "));
68
Chandler Carruth0539c072012-03-31 12:42:41 +000069namespace {
Chandler Carrutha3089552012-03-14 07:32:53 +000070
Chandler Carruth0539c072012-03-31 12:42:41 +000071class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> {
72 typedef InstVisitor<CallAnalyzer, bool> Base;
73 friend class InstVisitor<CallAnalyzer, bool>;
Owen Andersona08318a2010-09-09 16:56:42 +000074
Chandler Carruth42f3dce2013-01-21 11:55:09 +000075 /// The TargetTransformInfo available for this compilation.
76 const TargetTransformInfo &TTI;
77
Daniel Jasperaec2fa32016-12-19 08:22:17 +000078 /// Getter for the cache of @llvm.assume intrinsics.
79 std::function<AssumptionCache &(Function &)> &GetAssumptionCache;
80
Easwaran Raman12585b02017-01-20 22:44:04 +000081 /// Getter for BlockFrequencyInfo
82 Optional<function_ref<BlockFrequencyInfo &(Function &)>> &GetBFI;
83
Easwaran Raman71069cf2016-06-09 22:23:21 +000084 /// Profile summary information.
85 ProfileSummaryInfo *PSI;
86
Piotr Padlewskif3d122c2016-09-30 21:05:49 +000087 /// The called function.
Chandler Carruth0539c072012-03-31 12:42:41 +000088 Function &F;
Owen Andersona08318a2010-09-09 16:56:42 +000089
Piotr Padlewskif3d122c2016-09-30 21:05:49 +000090 /// The candidate callsite being analyzed. Please do not use this to do
91 /// analysis in the caller function; we want the inline cost query to be
92 /// easily cacheable. Instead, use the cover function paramHasAttr.
Philip Reames9b5c9582015-06-26 20:51:17 +000093 CallSite CandidateCS;
94
Piotr Padlewskif3d122c2016-09-30 21:05:49 +000095 /// Tunable parameters that control the analysis.
Easwaran Raman1c57cc22016-08-10 00:48:04 +000096 const InlineParams &Params;
97
Chandler Carruth0539c072012-03-31 12:42:41 +000098 int Threshold;
99 int Cost;
Owen Andersona08318a2010-09-09 16:56:42 +0000100
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000101 bool IsCallerRecursive;
102 bool IsRecursiveCall;
Chandler Carruth0539c072012-03-31 12:42:41 +0000103 bool ExposesReturnsTwice;
104 bool HasDynamicAlloca;
James Molloy4f6fb952012-12-20 16:04:27 +0000105 bool ContainsNoDuplicateCall;
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000106 bool HasReturn;
107 bool HasIndirectBr;
Reid Kleckner223de262015-04-14 20:38:14 +0000108 bool HasFrameEscape;
James Molloy4f6fb952012-12-20 16:04:27 +0000109
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000110 /// Number of bytes allocated statically by the callee.
111 uint64_t AllocatedSize;
Chandler Carruth0539c072012-03-31 12:42:41 +0000112 unsigned NumInstructions, NumVectorInstructions;
113 int FiftyPercentVectorBonus, TenPercentVectorBonus;
114 int VectorBonus;
115
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000116 /// While we walk the potentially-inlined instructions, we build up and
117 /// maintain a mapping of simplified values specific to this callsite. The
118 /// idea is to propagate any special information we have about arguments to
119 /// this call through the inlinable section of the function, and account for
120 /// likely simplifications post-inlining. The most important aspect we track
121 /// is CFG altering simplifications -- when we prove a basic block dead, that
122 /// can cause dramatic shifts in the cost of inlining a function.
Chandler Carruth0539c072012-03-31 12:42:41 +0000123 DenseMap<Value *, Constant *> SimplifiedValues;
124
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000125 /// Keep track of the values which map back (through function arguments) to
126 /// allocas on the caller stack which could be simplified through SROA.
Chandler Carruth0539c072012-03-31 12:42:41 +0000127 DenseMap<Value *, Value *> SROAArgValues;
128
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000129 /// The mapping of caller Alloca values to their accumulated cost savings. If
130 /// we have to disable SROA for one of the allocas, this tells us how much
131 /// cost must be added.
Chandler Carruth0539c072012-03-31 12:42:41 +0000132 DenseMap<Value *, int> SROAArgCosts;
133
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000134 /// Keep track of values which map to a pointer base and constant offset.
Chad Rosier567556a2016-04-28 14:47:23 +0000135 DenseMap<Value *, std::pair<Value *, APInt>> ConstantOffsetPtrs;
Chandler Carruth0539c072012-03-31 12:42:41 +0000136
137 // Custom simplification helper routines.
138 bool isAllocaDerivedArg(Value *V);
139 bool lookupSROAArgAndCost(Value *V, Value *&Arg,
140 DenseMap<Value *, int>::iterator &CostIt);
141 void disableSROA(DenseMap<Value *, int>::iterator CostIt);
142 void disableSROA(Value *V);
143 void accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
144 int InstructionCost);
Chandler Carruth0539c072012-03-31 12:42:41 +0000145 bool isGEPOffsetConstant(GetElementPtrInst &GEP);
Haicheng Wu201b1912017-01-20 18:51:22 +0000146 bool isGEPFree(GetElementPtrInst &GEP);
Chandler Carruth0539c072012-03-31 12:42:41 +0000147 bool accumulateGEPOffset(GEPOperator &GEP, APInt &Offset);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000148 bool simplifyCallSite(Function *F, CallSite CS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000149 ConstantInt *stripAndComputeInBoundsConstantOffsets(Value *&V);
150
Philip Reames9b5c9582015-06-26 20:51:17 +0000151 /// Return true if the given argument to the function being considered for
152 /// inlining has the given attribute set either at the call site or the
153 /// function declaration. Primarily used to inspect call site specific
154 /// attributes since these can be more precise than the ones on the callee
Easwaran Raman3676da42015-12-03 19:03:20 +0000155 /// itself.
Philip Reames9b5c9582015-06-26 20:51:17 +0000156 bool paramHasAttr(Argument *A, Attribute::AttrKind Attr);
Chad Rosier567556a2016-04-28 14:47:23 +0000157
Philip Reames9b5c9582015-06-26 20:51:17 +0000158 /// Return true if the given value is known non null within the callee if
Easwaran Raman3676da42015-12-03 19:03:20 +0000159 /// inlined through this particular callsite.
Philip Reames9b5c9582015-06-26 20:51:17 +0000160 bool isKnownNonNullInCallee(Value *V);
161
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000162 /// Update Threshold based on callsite properties such as callee
163 /// attributes and callee hotness for PGO builds. The Callee is explicitly
164 /// passed to support analyzing indirect calls whose target is inferred by
165 /// analysis.
166 void updateThreshold(CallSite CS, Function &Callee);
167
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000168 /// Return true if size growth is allowed when inlining the callee at CS.
169 bool allowSizeGrowth(CallSite CS);
170
Chandler Carruth0539c072012-03-31 12:42:41 +0000171 // Custom analysis routines.
Hal Finkel57f03dd2014-09-07 13:49:57 +0000172 bool analyzeBlock(BasicBlock *BB, SmallPtrSetImpl<const Value *> &EphValues);
Chandler Carruth0539c072012-03-31 12:42:41 +0000173
174 // Disable several entry points to the visitor so we don't accidentally use
175 // them by declaring but not defining them here.
Chad Rosier567556a2016-04-28 14:47:23 +0000176 void visit(Module *);
177 void visit(Module &);
178 void visit(Function *);
179 void visit(Function &);
180 void visit(BasicBlock *);
181 void visit(BasicBlock &);
Chandler Carruth0539c072012-03-31 12:42:41 +0000182
183 // Provide base case for our instruction visit.
184 bool visitInstruction(Instruction &I);
185
186 // Our visit overrides.
187 bool visitAlloca(AllocaInst &I);
188 bool visitPHI(PHINode &I);
189 bool visitGetElementPtr(GetElementPtrInst &I);
190 bool visitBitCast(BitCastInst &I);
191 bool visitPtrToInt(PtrToIntInst &I);
192 bool visitIntToPtr(IntToPtrInst &I);
193 bool visitCastInst(CastInst &I);
194 bool visitUnaryInstruction(UnaryInstruction &I);
Matt Arsenault727aa342013-07-20 04:09:00 +0000195 bool visitCmpInst(CmpInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000196 bool visitSub(BinaryOperator &I);
197 bool visitBinaryOperator(BinaryOperator &I);
198 bool visitLoad(LoadInst &I);
199 bool visitStore(StoreInst &I);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000200 bool visitExtractValue(ExtractValueInst &I);
201 bool visitInsertValue(InsertValueInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000202 bool visitCallSite(CallSite CS);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000203 bool visitReturnInst(ReturnInst &RI);
204 bool visitBranchInst(BranchInst &BI);
205 bool visitSwitchInst(SwitchInst &SI);
206 bool visitIndirectBrInst(IndirectBrInst &IBI);
207 bool visitResumeInst(ResumeInst &RI);
David Majnemer654e1302015-07-31 17:58:14 +0000208 bool visitCleanupReturnInst(CleanupReturnInst &RI);
209 bool visitCatchReturnInst(CatchReturnInst &RI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000210 bool visitUnreachableInst(UnreachableInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000211
212public:
Sean Silvaab6a6832016-07-23 04:22:50 +0000213 CallAnalyzer(const TargetTransformInfo &TTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000214 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +0000215 Optional<function_ref<BlockFrequencyInfo &(Function &)>> &GetBFI,
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000216 ProfileSummaryInfo *PSI, Function &Callee, CallSite CSArg,
217 const InlineParams &Params)
Easwaran Raman12585b02017-01-20 22:44:04 +0000218 : TTI(TTI), GetAssumptionCache(GetAssumptionCache), GetBFI(GetBFI),
219 PSI(PSI), F(Callee), CandidateCS(CSArg), Params(Params),
220 Threshold(Params.DefaultThreshold), Cost(0), IsCallerRecursive(false),
221 IsRecursiveCall(false), ExposesReturnsTwice(false),
222 HasDynamicAlloca(false), ContainsNoDuplicateCall(false),
223 HasReturn(false), HasIndirectBr(false), HasFrameEscape(false),
224 AllocatedSize(0), NumInstructions(0), NumVectorInstructions(0),
225 FiftyPercentVectorBonus(0), TenPercentVectorBonus(0), VectorBonus(0),
226 NumConstantArgs(0), NumConstantOffsetPtrArgs(0), NumAllocaArgs(0),
227 NumConstantPtrCmps(0), NumConstantPtrDiffs(0),
228 NumInstructionsSimplified(0), SROACostSavings(0),
229 SROACostSavingsLost(0) {}
Chandler Carruth0539c072012-03-31 12:42:41 +0000230
231 bool analyzeCall(CallSite CS);
232
233 int getThreshold() { return Threshold; }
234 int getCost() { return Cost; }
235
236 // Keep a bunch of stats about the cost savings found so we can print them
237 // out when debugging.
238 unsigned NumConstantArgs;
239 unsigned NumConstantOffsetPtrArgs;
240 unsigned NumAllocaArgs;
241 unsigned NumConstantPtrCmps;
242 unsigned NumConstantPtrDiffs;
243 unsigned NumInstructionsSimplified;
244 unsigned SROACostSavings;
245 unsigned SROACostSavingsLost;
246
247 void dump();
248};
249
250} // namespace
251
252/// \brief Test whether the given value is an Alloca-derived function argument.
253bool CallAnalyzer::isAllocaDerivedArg(Value *V) {
254 return SROAArgValues.count(V);
Owen Andersona08318a2010-09-09 16:56:42 +0000255}
256
Chandler Carruth0539c072012-03-31 12:42:41 +0000257/// \brief Lookup the SROA-candidate argument and cost iterator which V maps to.
258/// Returns false if V does not map to a SROA-candidate.
259bool CallAnalyzer::lookupSROAArgAndCost(
260 Value *V, Value *&Arg, DenseMap<Value *, int>::iterator &CostIt) {
261 if (SROAArgValues.empty() || SROAArgCosts.empty())
262 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000263
Chandler Carruth0539c072012-03-31 12:42:41 +0000264 DenseMap<Value *, Value *>::iterator ArgIt = SROAArgValues.find(V);
265 if (ArgIt == SROAArgValues.end())
266 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000267
Chandler Carruth0539c072012-03-31 12:42:41 +0000268 Arg = ArgIt->second;
269 CostIt = SROAArgCosts.find(Arg);
270 return CostIt != SROAArgCosts.end();
Chandler Carruth783b7192012-03-09 02:49:36 +0000271}
272
Chandler Carruth0539c072012-03-31 12:42:41 +0000273/// \brief Disable SROA for the candidate marked by this cost iterator.
Chandler Carruth783b7192012-03-09 02:49:36 +0000274///
Benjamin Kramerbde91762012-06-02 10:20:22 +0000275/// This marks the candidate as no longer viable for SROA, and adds the cost
Chandler Carruth0539c072012-03-31 12:42:41 +0000276/// savings associated with it back into the inline cost measurement.
277void CallAnalyzer::disableSROA(DenseMap<Value *, int>::iterator CostIt) {
278 // If we're no longer able to perform SROA we need to undo its cost savings
279 // and prevent subsequent analysis.
280 Cost += CostIt->second;
281 SROACostSavings -= CostIt->second;
282 SROACostSavingsLost += CostIt->second;
283 SROAArgCosts.erase(CostIt);
284}
285
286/// \brief If 'V' maps to a SROA candidate, disable SROA for it.
287void CallAnalyzer::disableSROA(Value *V) {
288 Value *SROAArg;
289 DenseMap<Value *, int>::iterator CostIt;
290 if (lookupSROAArgAndCost(V, SROAArg, CostIt))
291 disableSROA(CostIt);
292}
293
294/// \brief Accumulate the given cost for a particular SROA candidate.
295void CallAnalyzer::accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
296 int InstructionCost) {
297 CostIt->second += InstructionCost;
298 SROACostSavings += InstructionCost;
299}
300
Chandler Carruth0539c072012-03-31 12:42:41 +0000301/// \brief Check whether a GEP's indices are all constant.
302///
303/// Respects any simplified values known during the analysis of this callsite.
304bool CallAnalyzer::isGEPOffsetConstant(GetElementPtrInst &GEP) {
305 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
306 if (!isa<Constant>(*I) && !SimplifiedValues.lookup(*I))
Chandler Carruth783b7192012-03-09 02:49:36 +0000307 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000308
Chandler Carruth0539c072012-03-31 12:42:41 +0000309 return true;
310}
311
312/// \brief Accumulate a constant GEP offset into an APInt if possible.
313///
314/// Returns false if unable to compute the offset for any reason. Respects any
315/// simplified values known during the analysis of this callsite.
316bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000317 const DataLayout &DL = F.getParent()->getDataLayout();
318 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +0000319 assert(IntPtrWidth == Offset.getBitWidth());
320
321 for (gep_type_iterator GTI = gep_type_begin(GEP), GTE = gep_type_end(GEP);
322 GTI != GTE; ++GTI) {
323 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand());
324 if (!OpC)
325 if (Constant *SimpleOp = SimplifiedValues.lookup(GTI.getOperand()))
326 OpC = dyn_cast<ConstantInt>(SimpleOp);
327 if (!OpC)
Chandler Carruth783b7192012-03-09 02:49:36 +0000328 return false;
Chad Rosier567556a2016-04-28 14:47:23 +0000329 if (OpC->isZero())
330 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000331
Chandler Carruth0539c072012-03-31 12:42:41 +0000332 // Handle a struct index, which adds its field offset to the pointer.
Peter Collingbourneab85225b2016-12-02 02:24:42 +0000333 if (StructType *STy = GTI.getStructTypeOrNull()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000334 unsigned ElementIdx = OpC->getZExtValue();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000335 const StructLayout *SL = DL.getStructLayout(STy);
Chandler Carruth0539c072012-03-31 12:42:41 +0000336 Offset += APInt(IntPtrWidth, SL->getElementOffset(ElementIdx));
337 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000338 }
Chandler Carruth783b7192012-03-09 02:49:36 +0000339
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000340 APInt TypeSize(IntPtrWidth, DL.getTypeAllocSize(GTI.getIndexedType()));
Chandler Carruth0539c072012-03-31 12:42:41 +0000341 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize;
342 }
343 return true;
344}
345
Haicheng Wu201b1912017-01-20 18:51:22 +0000346/// \brief Use TTI to check whether a GEP is free.
347///
348/// Respects any simplified values known during the analysis of this callsite.
349bool CallAnalyzer::isGEPFree(GetElementPtrInst &GEP) {
350 SmallVector<Value *, 4> Indices;
351 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
352 if (Constant *SimpleOp = SimplifiedValues.lookup(*I))
353 Indices.push_back(SimpleOp);
354 else
355 Indices.push_back(*I);
356 return TargetTransformInfo::TCC_Free ==
357 TTI.getGEPCost(GEP.getSourceElementType(), GEP.getPointerOperand(),
358 Indices);
359}
360
Chandler Carruth0539c072012-03-31 12:42:41 +0000361bool CallAnalyzer::visitAlloca(AllocaInst &I) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000362 // Check whether inlining will turn a dynamic alloca into a static
Sanjay Patel0f153422016-05-09 21:51:53 +0000363 // alloca and handle that case.
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000364 if (I.isArrayAllocation()) {
Sanjay Patel0f153422016-05-09 21:51:53 +0000365 Constant *Size = SimplifiedValues.lookup(I.getArraySize());
366 if (auto *AllocSize = dyn_cast_or_null<ConstantInt>(Size)) {
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000367 const DataLayout &DL = F.getParent()->getDataLayout();
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000368 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000369 AllocatedSize = SaturatingMultiplyAdd(
370 AllocSize->getLimitedValue(), DL.getTypeAllocSize(Ty), AllocatedSize);
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000371 return Base::visitAlloca(I);
372 }
373 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000374
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000375 // Accumulate the allocated size.
376 if (I.isStaticAlloca()) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000377 const DataLayout &DL = F.getParent()->getDataLayout();
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000378 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000379 AllocatedSize = SaturatingAdd(DL.getTypeAllocSize(Ty), AllocatedSize);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000380 }
381
Bob Wilsona5b0dc82012-11-19 07:04:35 +0000382 // We will happily inline static alloca instructions.
383 if (I.isStaticAlloca())
Chandler Carruth0539c072012-03-31 12:42:41 +0000384 return Base::visitAlloca(I);
385
386 // FIXME: This is overly conservative. Dynamic allocas are inefficient for
387 // a variety of reasons, and so we would like to not inline them into
388 // functions which don't currently have a dynamic alloca. This simply
389 // disables inlining altogether in the presence of a dynamic alloca.
390 HasDynamicAlloca = true;
391 return false;
392}
393
394bool CallAnalyzer::visitPHI(PHINode &I) {
395 // FIXME: We should potentially be tracking values through phi nodes,
396 // especially when they collapse to a single value due to deleted CFG edges
397 // during inlining.
398
399 // FIXME: We need to propagate SROA *disabling* through phi nodes, even
400 // though we don't want to propagate it's bonuses. The idea is to disable
401 // SROA if it *might* be used in an inappropriate manner.
402
403 // Phi nodes are always zero-cost.
404 return true;
405}
406
407bool CallAnalyzer::visitGetElementPtr(GetElementPtrInst &I) {
408 Value *SROAArg;
409 DenseMap<Value *, int>::iterator CostIt;
Chad Rosier567556a2016-04-28 14:47:23 +0000410 bool SROACandidate =
411 lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt);
Chandler Carruth0539c072012-03-31 12:42:41 +0000412
413 // Try to fold GEPs of constant-offset call site argument pointers. This
414 // requires target data and inbounds GEPs.
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000415 if (I.isInBounds()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000416 // Check if we have a base + offset for the pointer.
417 Value *Ptr = I.getPointerOperand();
418 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Ptr);
419 if (BaseAndOffset.first) {
420 // Check if the offset of this GEP is constant, and if so accumulate it
421 // into Offset.
422 if (!accumulateGEPOffset(cast<GEPOperator>(I), BaseAndOffset.second)) {
423 // Non-constant GEPs aren't folded, and disable SROA.
424 if (SROACandidate)
425 disableSROA(CostIt);
Haicheng Wu201b1912017-01-20 18:51:22 +0000426 return isGEPFree(I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000427 }
428
429 // Add the result as a new mapping to Base + Offset.
430 ConstantOffsetPtrs[&I] = BaseAndOffset;
431
432 // Also handle SROA candidates here, we already know that the GEP is
433 // all-constant indexed.
434 if (SROACandidate)
435 SROAArgValues[&I] = SROAArg;
436
Chandler Carruth783b7192012-03-09 02:49:36 +0000437 return true;
438 }
439 }
440
Chandler Carruth0539c072012-03-31 12:42:41 +0000441 if (isGEPOffsetConstant(I)) {
442 if (SROACandidate)
443 SROAArgValues[&I] = SROAArg;
444
445 // Constant GEPs are modeled as free.
446 return true;
447 }
448
449 // Variable GEPs will require math and will disable SROA.
450 if (SROACandidate)
451 disableSROA(CostIt);
Haicheng Wu201b1912017-01-20 18:51:22 +0000452 return isGEPFree(I);
Chandler Carruth783b7192012-03-09 02:49:36 +0000453}
454
Chandler Carruth0539c072012-03-31 12:42:41 +0000455bool CallAnalyzer::visitBitCast(BitCastInst &I) {
456 // Propagate constants through bitcasts.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000457 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
458 if (!COp)
459 COp = SimplifiedValues.lookup(I.getOperand(0));
460 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000461 if (Constant *C = ConstantExpr::getBitCast(COp, I.getType())) {
462 SimplifiedValues[&I] = C;
463 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000464 }
Owen Andersona08318a2010-09-09 16:56:42 +0000465
Chandler Carruth0539c072012-03-31 12:42:41 +0000466 // Track base/offsets through casts
Chad Rosier567556a2016-04-28 14:47:23 +0000467 std::pair<Value *, APInt> BaseAndOffset =
468 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000469 // Casts don't change the offset, just wrap it up.
470 if (BaseAndOffset.first)
471 ConstantOffsetPtrs[&I] = BaseAndOffset;
472
473 // Also look for SROA candidates here.
474 Value *SROAArg;
475 DenseMap<Value *, int>::iterator CostIt;
476 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
477 SROAArgValues[&I] = SROAArg;
478
479 // Bitcasts are always zero cost.
480 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000481}
482
Chandler Carruth0539c072012-03-31 12:42:41 +0000483bool CallAnalyzer::visitPtrToInt(PtrToIntInst &I) {
484 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000485 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
486 if (!COp)
487 COp = SimplifiedValues.lookup(I.getOperand(0));
488 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000489 if (Constant *C = ConstantExpr::getPtrToInt(COp, I.getType())) {
490 SimplifiedValues[&I] = C;
491 return true;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000492 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000493
494 // Track base/offset pairs when converted to a plain integer provided the
495 // integer is large enough to represent the pointer.
496 unsigned IntegerSize = I.getType()->getScalarSizeInBits();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000497 const DataLayout &DL = F.getParent()->getDataLayout();
Mehdi Amini46a43552015-03-04 18:43:29 +0000498 if (IntegerSize >= DL.getPointerSizeInBits()) {
Chad Rosier567556a2016-04-28 14:47:23 +0000499 std::pair<Value *, APInt> BaseAndOffset =
500 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000501 if (BaseAndOffset.first)
502 ConstantOffsetPtrs[&I] = BaseAndOffset;
503 }
504
505 // This is really weird. Technically, ptrtoint will disable SROA. However,
506 // unless that ptrtoint is *used* somewhere in the live basic blocks after
507 // inlining, it will be nuked, and SROA should proceed. All of the uses which
508 // would block SROA would also block SROA if applied directly to a pointer,
509 // and so we can just add the integer in here. The only places where SROA is
510 // preserved either cannot fire on an integer, or won't in-and-of themselves
511 // disable SROA (ext) w/o some later use that we would see and disable.
512 Value *SROAArg;
513 DenseMap<Value *, int>::iterator CostIt;
514 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
515 SROAArgValues[&I] = SROAArg;
516
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000517 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000518}
519
Chandler Carruth0539c072012-03-31 12:42:41 +0000520bool CallAnalyzer::visitIntToPtr(IntToPtrInst &I) {
521 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000522 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
523 if (!COp)
524 COp = SimplifiedValues.lookup(I.getOperand(0));
525 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000526 if (Constant *C = ConstantExpr::getIntToPtr(COp, I.getType())) {
527 SimplifiedValues[&I] = C;
528 return true;
529 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000530
Chandler Carruth0539c072012-03-31 12:42:41 +0000531 // Track base/offset pairs when round-tripped through a pointer without
532 // modifications provided the integer is not too large.
533 Value *Op = I.getOperand(0);
534 unsigned IntegerSize = Op->getType()->getScalarSizeInBits();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000535 const DataLayout &DL = F.getParent()->getDataLayout();
Mehdi Amini46a43552015-03-04 18:43:29 +0000536 if (IntegerSize <= DL.getPointerSizeInBits()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000537 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op);
538 if (BaseAndOffset.first)
539 ConstantOffsetPtrs[&I] = BaseAndOffset;
540 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000541
Chandler Carruth0539c072012-03-31 12:42:41 +0000542 // "Propagate" SROA here in the same manner as we do for ptrtoint above.
543 Value *SROAArg;
544 DenseMap<Value *, int>::iterator CostIt;
545 if (lookupSROAArgAndCost(Op, SROAArg, CostIt))
546 SROAArgValues[&I] = SROAArg;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000547
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000548 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000549}
550
551bool CallAnalyzer::visitCastInst(CastInst &I) {
552 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000553 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
554 if (!COp)
555 COp = SimplifiedValues.lookup(I.getOperand(0));
556 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000557 if (Constant *C = ConstantExpr::getCast(I.getOpcode(), COp, I.getType())) {
558 SimplifiedValues[&I] = C;
559 return true;
560 }
561
562 // Disable SROA in the face of arbitrary casts we don't whitelist elsewhere.
563 disableSROA(I.getOperand(0));
564
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000565 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000566}
567
568bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) {
569 Value *Operand = I.getOperand(0);
Jakub Staszak7b9e0b92013-03-07 20:01:19 +0000570 Constant *COp = dyn_cast<Constant>(Operand);
571 if (!COp)
572 COp = SimplifiedValues.lookup(Operand);
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000573 if (COp) {
574 const DataLayout &DL = F.getParent()->getDataLayout();
Manuel Jacobe9024592016-01-21 06:33:22 +0000575 if (Constant *C = ConstantFoldInstOperands(&I, COp, DL)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000576 SimplifiedValues[&I] = C;
577 return true;
578 }
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000579 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000580
581 // Disable any SROA on the argument to arbitrary unary operators.
582 disableSROA(Operand);
583
584 return false;
585}
586
Philip Reames9b5c9582015-06-26 20:51:17 +0000587bool CallAnalyzer::paramHasAttr(Argument *A, Attribute::AttrKind Attr) {
588 unsigned ArgNo = A->getArgNo();
Chad Rosier567556a2016-04-28 14:47:23 +0000589 return CandidateCS.paramHasAttr(ArgNo + 1, Attr);
Philip Reames9b5c9582015-06-26 20:51:17 +0000590}
591
592bool CallAnalyzer::isKnownNonNullInCallee(Value *V) {
593 // Does the *call site* have the NonNull attribute set on an argument? We
594 // use the attribute on the call site to memoize any analysis done in the
595 // caller. This will also trip if the callee function has a non-null
596 // parameter attribute, but that's a less interesting case because hopefully
597 // the callee would already have been simplified based on that.
598 if (Argument *A = dyn_cast<Argument>(V))
599 if (paramHasAttr(A, Attribute::NonNull))
600 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000601
Philip Reames9b5c9582015-06-26 20:51:17 +0000602 // Is this an alloca in the caller? This is distinct from the attribute case
603 // above because attributes aren't updated within the inliner itself and we
604 // always want to catch the alloca derived case.
605 if (isAllocaDerivedArg(V))
606 // We can actually predict the result of comparisons between an
607 // alloca-derived value and null. Note that this fires regardless of
608 // SROA firing.
609 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000610
Philip Reames9b5c9582015-06-26 20:51:17 +0000611 return false;
612}
613
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000614bool CallAnalyzer::allowSizeGrowth(CallSite CS) {
615 // If the normal destination of the invoke or the parent block of the call
616 // site is unreachable-terminated, there is little point in inlining this
617 // unless there is literally zero cost.
618 // FIXME: Note that it is possible that an unreachable-terminated block has a
619 // hot entry. For example, in below scenario inlining hot_call_X() may be
620 // beneficial :
621 // main() {
622 // hot_call_1();
623 // ...
624 // hot_call_N()
625 // exit(0);
626 // }
627 // For now, we are not handling this corner case here as it is rare in real
628 // code. In future, we should elaborate this based on BPI and BFI in more
629 // general threshold adjusting heuristics in updateThreshold().
630 Instruction *Instr = CS.getInstruction();
631 if (InvokeInst *II = dyn_cast<InvokeInst>(Instr)) {
632 if (isa<UnreachableInst>(II->getNormalDest()->getTerminator()))
633 return false;
634 } else if (isa<UnreachableInst>(Instr->getParent()->getTerminator()))
635 return false;
636
637 return true;
638}
639
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000640void CallAnalyzer::updateThreshold(CallSite CS, Function &Callee) {
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000641 // If no size growth is allowed for this inlining, set Threshold to 0.
642 if (!allowSizeGrowth(CS)) {
643 Threshold = 0;
644 return;
645 }
646
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000647 Function *Caller = CS.getCaller();
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000648
649 // return min(A, B) if B is valid.
650 auto MinIfValid = [](int A, Optional<int> B) {
651 return B ? std::min(A, B.getValue()) : A;
652 };
653
Easwaran Raman0d58fca2016-08-11 03:58:05 +0000654 // return max(A, B) if B is valid.
655 auto MaxIfValid = [](int A, Optional<int> B) {
656 return B ? std::max(A, B.getValue()) : A;
657 };
658
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000659 // Use the OptMinSizeThreshold or OptSizeThreshold knob if they are available
660 // and reduce the threshold if the caller has the necessary attribute.
661 if (Caller->optForMinSize())
662 Threshold = MinIfValid(Threshold, Params.OptMinSizeThreshold);
663 else if (Caller->optForSize())
664 Threshold = MinIfValid(Threshold, Params.OptSizeThreshold);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000665
Easwaran Ramane08b1392017-01-09 21:56:26 +0000666 // Adjust the threshold based on inlinehint attribute and profile based
667 // hotness information if the caller does not have MinSize attribute.
668 if (!Caller->optForMinSize()) {
669 if (Callee.hasFnAttribute(Attribute::InlineHint))
670 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
671 if (PSI) {
Easwaran Raman12585b02017-01-20 22:44:04 +0000672 BlockFrequencyInfo *CallerBFI = GetBFI ? &((*GetBFI)(*Caller)) : nullptr;
673 if (PSI->isHotCallSite(CS, CallerBFI)) {
674 DEBUG(dbgs() << "Hot callsite.\n");
Easwaran Ramane08b1392017-01-09 21:56:26 +0000675 Threshold = MaxIfValid(Threshold, Params.HotCallSiteThreshold);
676 } else if (PSI->isFunctionEntryHot(&Callee)) {
Easwaran Raman12585b02017-01-20 22:44:04 +0000677 DEBUG(dbgs() << "Hot callee.\n");
Easwaran Ramane08b1392017-01-09 21:56:26 +0000678 // If callsite hotness can not be determined, we may still know
679 // that the callee is hot and treat it as a weaker hint for threshold
680 // increase.
681 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
Easwaran Raman12585b02017-01-20 22:44:04 +0000682 } else if (PSI->isColdCallSite(CS, CallerBFI)) {
683 DEBUG(dbgs() << "Cold callsite.\n");
684 Threshold = MinIfValid(Threshold, Params.ColdCallSiteThreshold);
Easwaran Ramane08b1392017-01-09 21:56:26 +0000685 } else if (PSI->isFunctionEntryCold(&Callee)) {
Easwaran Raman12585b02017-01-20 22:44:04 +0000686 DEBUG(dbgs() << "Cold callee.\n");
Easwaran Ramane08b1392017-01-09 21:56:26 +0000687 Threshold = MinIfValid(Threshold, Params.ColdThreshold);
688 }
689 }
Dehao Chene1c7c572016-08-05 20:49:04 +0000690 }
Dehao Chen9232f982016-07-11 16:48:54 +0000691
Justin Lebar8650a4d2016-04-15 01:38:48 +0000692 // Finally, take the target-specific inlining threshold multiplier into
693 // account.
694 Threshold *= TTI.getInliningThresholdMultiplier();
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000695}
696
Matt Arsenault727aa342013-07-20 04:09:00 +0000697bool CallAnalyzer::visitCmpInst(CmpInst &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000698 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
699 // First try to handle simplified comparisons.
700 if (!isa<Constant>(LHS))
701 if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS))
702 LHS = SimpleLHS;
703 if (!isa<Constant>(RHS))
704 if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS))
705 RHS = SimpleRHS;
Matt Arsenault727aa342013-07-20 04:09:00 +0000706 if (Constant *CLHS = dyn_cast<Constant>(LHS)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000707 if (Constant *CRHS = dyn_cast<Constant>(RHS))
Chad Rosier567556a2016-04-28 14:47:23 +0000708 if (Constant *C =
709 ConstantExpr::getCompare(I.getPredicate(), CLHS, CRHS)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000710 SimplifiedValues[&I] = C;
711 return true;
712 }
Matt Arsenault727aa342013-07-20 04:09:00 +0000713 }
714
715 if (I.getOpcode() == Instruction::FCmp)
716 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000717
718 // Otherwise look for a comparison between constant offset pointers with
719 // a common base.
720 Value *LHSBase, *RHSBase;
721 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000722 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000723 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000724 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000725 if (RHSBase && LHSBase == RHSBase) {
726 // We have common bases, fold the icmp to a constant based on the
727 // offsets.
728 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
729 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
730 if (Constant *C = ConstantExpr::getICmp(I.getPredicate(), CLHS, CRHS)) {
731 SimplifiedValues[&I] = C;
732 ++NumConstantPtrCmps;
733 return true;
734 }
735 }
736 }
737
738 // If the comparison is an equality comparison with null, we can simplify it
Philip Reames9b5c9582015-06-26 20:51:17 +0000739 // if we know the value (argument) can't be null
740 if (I.isEquality() && isa<ConstantPointerNull>(I.getOperand(1)) &&
741 isKnownNonNullInCallee(I.getOperand(0))) {
742 bool IsNotEqual = I.getPredicate() == CmpInst::ICMP_NE;
743 SimplifiedValues[&I] = IsNotEqual ? ConstantInt::getTrue(I.getType())
744 : ConstantInt::getFalse(I.getType());
745 return true;
746 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000747 // Finally check for SROA candidates in comparisons.
748 Value *SROAArg;
749 DenseMap<Value *, int>::iterator CostIt;
750 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) {
751 if (isa<ConstantPointerNull>(I.getOperand(1))) {
752 accumulateSROACost(CostIt, InlineConstants::InstrCost);
753 return true;
754 }
755
756 disableSROA(CostIt);
757 }
758
759 return false;
760}
761
762bool CallAnalyzer::visitSub(BinaryOperator &I) {
763 // Try to handle a special case: we can fold computing the difference of two
764 // constant-related pointers.
765 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
766 Value *LHSBase, *RHSBase;
767 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000768 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000769 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000770 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000771 if (RHSBase && LHSBase == RHSBase) {
772 // We have common bases, fold the subtract to a constant based on the
773 // offsets.
774 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
775 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
776 if (Constant *C = ConstantExpr::getSub(CLHS, CRHS)) {
777 SimplifiedValues[&I] = C;
778 ++NumConstantPtrDiffs;
779 return true;
780 }
781 }
782 }
783
784 // Otherwise, fall back to the generic logic for simplifying and handling
785 // instructions.
786 return Base::visitSub(I);
787}
788
789bool CallAnalyzer::visitBinaryOperator(BinaryOperator &I) {
790 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000791 const DataLayout &DL = F.getParent()->getDataLayout();
Chandler Carruth0539c072012-03-31 12:42:41 +0000792 if (!isa<Constant>(LHS))
793 if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS))
794 LHS = SimpleLHS;
795 if (!isa<Constant>(RHS))
796 if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS))
797 RHS = SimpleRHS;
Michael Zolotukhin4e8598e2015-02-06 20:02:51 +0000798 Value *SimpleV = nullptr;
799 if (auto FI = dyn_cast<FPMathOperator>(&I))
800 SimpleV =
801 SimplifyFPBinOp(I.getOpcode(), LHS, RHS, FI->getFastMathFlags(), DL);
802 else
803 SimpleV = SimplifyBinOp(I.getOpcode(), LHS, RHS, DL);
804
Chandler Carruth0539c072012-03-31 12:42:41 +0000805 if (Constant *C = dyn_cast_or_null<Constant>(SimpleV)) {
806 SimplifiedValues[&I] = C;
807 return true;
808 }
809
810 // Disable any SROA on arguments to arbitrary, unsimplified binary operators.
811 disableSROA(LHS);
812 disableSROA(RHS);
813
814 return false;
815}
816
817bool CallAnalyzer::visitLoad(LoadInst &I) {
818 Value *SROAArg;
819 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000820 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000821 if (I.isSimple()) {
822 accumulateSROACost(CostIt, InlineConstants::InstrCost);
823 return true;
824 }
825
826 disableSROA(CostIt);
827 }
828
829 return false;
830}
831
832bool CallAnalyzer::visitStore(StoreInst &I) {
833 Value *SROAArg;
834 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000835 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000836 if (I.isSimple()) {
837 accumulateSROACost(CostIt, InlineConstants::InstrCost);
838 return true;
839 }
840
841 disableSROA(CostIt);
842 }
843
844 return false;
845}
846
Chandler Carruth753e21d2012-12-28 14:23:32 +0000847bool CallAnalyzer::visitExtractValue(ExtractValueInst &I) {
848 // Constant folding for extract value is trivial.
849 Constant *C = dyn_cast<Constant>(I.getAggregateOperand());
850 if (!C)
851 C = SimplifiedValues.lookup(I.getAggregateOperand());
852 if (C) {
853 SimplifiedValues[&I] = ConstantExpr::getExtractValue(C, I.getIndices());
854 return true;
855 }
856
857 // SROA can look through these but give them a cost.
858 return false;
859}
860
861bool CallAnalyzer::visitInsertValue(InsertValueInst &I) {
862 // Constant folding for insert value is trivial.
863 Constant *AggC = dyn_cast<Constant>(I.getAggregateOperand());
864 if (!AggC)
865 AggC = SimplifiedValues.lookup(I.getAggregateOperand());
866 Constant *InsertedC = dyn_cast<Constant>(I.getInsertedValueOperand());
867 if (!InsertedC)
868 InsertedC = SimplifiedValues.lookup(I.getInsertedValueOperand());
869 if (AggC && InsertedC) {
Chad Rosier567556a2016-04-28 14:47:23 +0000870 SimplifiedValues[&I] =
871 ConstantExpr::getInsertValue(AggC, InsertedC, I.getIndices());
Chandler Carruth753e21d2012-12-28 14:23:32 +0000872 return true;
873 }
874
875 // SROA can look through these but give them a cost.
876 return false;
877}
878
879/// \brief Try to simplify a call site.
880///
881/// Takes a concrete function and callsite and tries to actually simplify it by
882/// analyzing the arguments and call itself with instsimplify. Returns true if
883/// it has simplified the callsite to some other entity (a constant), making it
884/// free.
885bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
886 // FIXME: Using the instsimplify logic directly for this is inefficient
887 // because we have to continually rebuild the argument list even when no
888 // simplifications can be performed. Until that is fixed with remapping
889 // inside of instsimplify, directly constant fold calls here.
890 if (!canConstantFoldCallTo(F))
891 return false;
892
893 // Try to re-map the arguments to constants.
894 SmallVector<Constant *, 4> ConstantArgs;
895 ConstantArgs.reserve(CS.arg_size());
Chad Rosier567556a2016-04-28 14:47:23 +0000896 for (CallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end(); I != E;
897 ++I) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000898 Constant *C = dyn_cast<Constant>(*I);
899 if (!C)
900 C = dyn_cast_or_null<Constant>(SimplifiedValues.lookup(*I));
901 if (!C)
902 return false; // This argument doesn't map to a constant.
903
904 ConstantArgs.push_back(C);
905 }
906 if (Constant *C = ConstantFoldCall(F, ConstantArgs)) {
907 SimplifiedValues[CS.getInstruction()] = C;
908 return true;
909 }
910
911 return false;
912}
913
Chandler Carruth0539c072012-03-31 12:42:41 +0000914bool CallAnalyzer::visitCallSite(CallSite CS) {
Chandler Carruth37d25de2013-12-13 08:00:01 +0000915 if (CS.hasFnAttr(Attribute::ReturnsTwice) &&
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +0000916 !F.hasFnAttribute(Attribute::ReturnsTwice)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000917 // This aborts the entire analysis.
918 ExposesReturnsTwice = true;
919 return false;
920 }
Chad Rosier567556a2016-04-28 14:47:23 +0000921 if (CS.isCall() && cast<CallInst>(CS.getInstruction())->cannotDuplicate())
James Molloy4f6fb952012-12-20 16:04:27 +0000922 ContainsNoDuplicateCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000923
Chandler Carruth0539c072012-03-31 12:42:41 +0000924 if (Function *F = CS.getCalledFunction()) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000925 // When we have a concrete function, first try to simplify it directly.
926 if (simplifyCallSite(F, CS))
927 return true;
928
929 // Next check if it is an intrinsic we know about.
930 // FIXME: Lift this into part of the InstVisitor.
931 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
932 switch (II->getIntrinsicID()) {
933 default:
934 return Base::visitCallSite(CS);
935
Peter Collingbourne7dd8dbf2016-04-22 21:18:02 +0000936 case Intrinsic::load_relative:
937 // This is normally lowered to 4 LLVM instructions.
938 Cost += 3 * InlineConstants::InstrCost;
939 return false;
940
Chandler Carruth753e21d2012-12-28 14:23:32 +0000941 case Intrinsic::memset:
942 case Intrinsic::memcpy:
943 case Intrinsic::memmove:
944 // SROA can usually chew through these intrinsics, but they aren't free.
945 return false;
Reid Kleckner60381792015-07-07 22:25:32 +0000946 case Intrinsic::localescape:
Reid Kleckner223de262015-04-14 20:38:14 +0000947 HasFrameEscape = true;
948 return false;
Chandler Carruth753e21d2012-12-28 14:23:32 +0000949 }
950 }
951
Chandler Carruth0539c072012-03-31 12:42:41 +0000952 if (F == CS.getInstruction()->getParent()->getParent()) {
953 // This flag will fully abort the analysis, so don't bother with anything
954 // else.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000955 IsRecursiveCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000956 return false;
957 }
958
Chandler Carruth0ba8db42013-01-22 11:26:02 +0000959 if (TTI.isLoweredToCall(F)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000960 // We account for the average 1 instruction per call argument setup
961 // here.
962 Cost += CS.arg_size() * InlineConstants::InstrCost;
963
964 // Everything other than inline ASM will also have a significant cost
965 // merely from making the call.
966 if (!isa<InlineAsm>(CS.getCalledValue()))
967 Cost += InlineConstants::CallPenalty;
968 }
969
970 return Base::visitCallSite(CS);
971 }
972
973 // Otherwise we're in a very special case -- an indirect function call. See
974 // if we can be particularly clever about this.
975 Value *Callee = CS.getCalledValue();
976
977 // First, pay the price of the argument setup. We account for the average
978 // 1 instruction per call argument setup here.
979 Cost += CS.arg_size() * InlineConstants::InstrCost;
980
981 // Next, check if this happens to be an indirect function call to a known
982 // function in this inline context. If not, we've done all we can.
983 Function *F = dyn_cast_or_null<Function>(SimplifiedValues.lookup(Callee));
984 if (!F)
985 return Base::visitCallSite(CS);
986
987 // If we have a constant that we are calling as a function, we can peer
988 // through it and see the function target. This happens not infrequently
989 // during devirtualization and so we want to give it a hefty bonus for
990 // inlining, but cap that bonus in the event that inlining wouldn't pan
991 // out. Pretend to inline the function, with a custom threshold.
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000992 auto IndirectCallParams = Params;
993 IndirectCallParams.DefaultThreshold = InlineConstants::IndirectCallThreshold;
Easwaran Raman12585b02017-01-20 22:44:04 +0000994 CallAnalyzer CA(TTI, GetAssumptionCache, GetBFI, PSI, *F, CS,
995 IndirectCallParams);
Chandler Carruth0539c072012-03-31 12:42:41 +0000996 if (CA.analyzeCall(CS)) {
997 // We were able to inline the indirect call! Subtract the cost from the
Easwaran Raman6d90d9f2015-12-07 21:21:20 +0000998 // threshold to get the bonus we want to apply, but don't go below zero.
999 Cost -= std::max(0, CA.getThreshold() - CA.getCost());
Chandler Carruth0539c072012-03-31 12:42:41 +00001000 }
1001
1002 return Base::visitCallSite(CS);
1003}
1004
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001005bool CallAnalyzer::visitReturnInst(ReturnInst &RI) {
1006 // At least one return instruction will be free after inlining.
1007 bool Free = !HasReturn;
1008 HasReturn = true;
1009 return Free;
1010}
1011
1012bool CallAnalyzer::visitBranchInst(BranchInst &BI) {
1013 // We model unconditional branches as essentially free -- they really
1014 // shouldn't exist at all, but handling them makes the behavior of the
1015 // inliner more regular and predictable. Interestingly, conditional branches
1016 // which will fold away are also free.
1017 return BI.isUnconditional() || isa<ConstantInt>(BI.getCondition()) ||
1018 dyn_cast_or_null<ConstantInt>(
1019 SimplifiedValues.lookup(BI.getCondition()));
1020}
1021
1022bool CallAnalyzer::visitSwitchInst(SwitchInst &SI) {
1023 // We model unconditional switches as free, see the comments on handling
1024 // branches.
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001025 if (isa<ConstantInt>(SI.getCondition()))
1026 return true;
1027 if (Value *V = SimplifiedValues.lookup(SI.getCondition()))
1028 if (isa<ConstantInt>(V))
1029 return true;
1030
1031 // Otherwise, we need to accumulate a cost proportional to the number of
1032 // distinct successor blocks. This fan-out in the CFG cannot be represented
1033 // for free even if we can represent the core switch as a jumptable that
1034 // takes a single instruction.
1035 //
1036 // NB: We convert large switches which are just used to initialize large phi
1037 // nodes to lookup tables instead in simplify-cfg, so this shouldn't prevent
1038 // inlining those. It will prevent inlining in cases where the optimization
1039 // does not (yet) fire.
1040 SmallPtrSet<BasicBlock *, 8> SuccessorBlocks;
1041 SuccessorBlocks.insert(SI.getDefaultDest());
1042 for (auto I = SI.case_begin(), E = SI.case_end(); I != E; ++I)
1043 SuccessorBlocks.insert(I.getCaseSuccessor());
1044 // Add cost corresponding to the number of distinct destinations. The first
1045 // we model as free because of fallthrough.
1046 Cost += (SuccessorBlocks.size() - 1) * InlineConstants::InstrCost;
1047 return false;
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001048}
1049
1050bool CallAnalyzer::visitIndirectBrInst(IndirectBrInst &IBI) {
1051 // We never want to inline functions that contain an indirectbr. This is
1052 // incorrect because all the blockaddress's (in static global initializers
1053 // for example) would be referring to the original function, and this
1054 // indirect jump would jump from the inlined copy of the function into the
1055 // original function which is extremely undefined behavior.
1056 // FIXME: This logic isn't really right; we can safely inline functions with
1057 // indirectbr's as long as no other function or global references the
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001058 // blockaddress of a block within the current function.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001059 HasIndirectBr = true;
1060 return false;
1061}
1062
1063bool CallAnalyzer::visitResumeInst(ResumeInst &RI) {
1064 // FIXME: It's not clear that a single instruction is an accurate model for
1065 // the inline cost of a resume instruction.
1066 return false;
1067}
1068
David Majnemer654e1302015-07-31 17:58:14 +00001069bool CallAnalyzer::visitCleanupReturnInst(CleanupReturnInst &CRI) {
1070 // FIXME: It's not clear that a single instruction is an accurate model for
1071 // the inline cost of a cleanupret instruction.
1072 return false;
1073}
1074
1075bool CallAnalyzer::visitCatchReturnInst(CatchReturnInst &CRI) {
1076 // FIXME: It's not clear that a single instruction is an accurate model for
Joseph Tremoulet8220bcc2015-08-23 00:26:33 +00001077 // the inline cost of a catchret instruction.
David Majnemer654e1302015-07-31 17:58:14 +00001078 return false;
1079}
1080
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001081bool CallAnalyzer::visitUnreachableInst(UnreachableInst &I) {
1082 // FIXME: It might be reasonably to discount the cost of instructions leading
1083 // to unreachable as they have the lowest possible impact on both runtime and
1084 // code size.
1085 return true; // No actual code is needed for unreachable.
1086}
1087
Chandler Carruth0539c072012-03-31 12:42:41 +00001088bool CallAnalyzer::visitInstruction(Instruction &I) {
Chandler Carruthda7513a2012-05-04 00:58:03 +00001089 // Some instructions are free. All of the free intrinsics can also be
1090 // handled by SROA, etc.
Chandler Carruthb8cf5102013-01-21 12:05:16 +00001091 if (TargetTransformInfo::TCC_Free == TTI.getUserCost(&I))
Chandler Carruthda7513a2012-05-04 00:58:03 +00001092 return true;
1093
Chandler Carruth0539c072012-03-31 12:42:41 +00001094 // We found something we don't understand or can't handle. Mark any SROA-able
1095 // values in the operand list as no longer viable.
1096 for (User::op_iterator OI = I.op_begin(), OE = I.op_end(); OI != OE; ++OI)
1097 disableSROA(*OI);
1098
1099 return false;
1100}
1101
Chandler Carruth0539c072012-03-31 12:42:41 +00001102/// \brief Analyze a basic block for its contribution to the inline cost.
1103///
1104/// This method walks the analyzer over every instruction in the given basic
1105/// block and accounts for their cost during inlining at this callsite. It
1106/// aborts early if the threshold has been exceeded or an impossible to inline
1107/// construct has been detected. It returns false if inlining is no longer
1108/// viable, and true if inlining remains viable.
Hal Finkel57f03dd2014-09-07 13:49:57 +00001109bool CallAnalyzer::analyzeBlock(BasicBlock *BB,
1110 SmallPtrSetImpl<const Value *> &EphValues) {
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001111 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
Chandler Carruth6b4cc8b2014-02-01 10:38:17 +00001112 // FIXME: Currently, the number of instructions in a function regardless of
1113 // our ability to simplify them during inline to constants or dead code,
1114 // are actually used by the vector bonus heuristic. As long as that's true,
1115 // we have to special case debug intrinsics here to prevent differences in
1116 // inlining due to debug symbols. Eventually, the number of unsimplified
1117 // instructions shouldn't factor into the cost computation, but until then,
1118 // hack around it here.
1119 if (isa<DbgInfoIntrinsic>(I))
1120 continue;
1121
Hal Finkel57f03dd2014-09-07 13:49:57 +00001122 // Skip ephemeral values.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001123 if (EphValues.count(&*I))
Hal Finkel57f03dd2014-09-07 13:49:57 +00001124 continue;
1125
Chandler Carruth0539c072012-03-31 12:42:41 +00001126 ++NumInstructions;
1127 if (isa<ExtractElementInst>(I) || I->getType()->isVectorTy())
1128 ++NumVectorInstructions;
1129
Sanjay Patele9434e82015-09-15 15:26:25 +00001130 // If the instruction is floating point, and the target says this operation
1131 // is expensive or the function has the "use-soft-float" attribute, this may
1132 // eventually become a library call. Treat the cost as such.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001133 if (I->getType()->isFloatingPointTy()) {
1134 bool hasSoftFloatAttr = false;
1135
Sanjay Patele9434e82015-09-15 15:26:25 +00001136 // If the function has the "use-soft-float" attribute, mark it as
1137 // expensive.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001138 if (F.hasFnAttribute("use-soft-float")) {
1139 Attribute Attr = F.getFnAttribute("use-soft-float");
1140 StringRef Val = Attr.getValueAsString();
1141 if (Val == "true")
1142 hasSoftFloatAttr = true;
1143 }
1144
1145 if (TTI.getFPOpCost(I->getType()) == TargetTransformInfo::TCC_Expensive ||
1146 hasSoftFloatAttr)
1147 Cost += InlineConstants::CallPenalty;
1148 }
1149
Chandler Carruth0539c072012-03-31 12:42:41 +00001150 // If the instruction simplified to a constant, there is no cost to this
1151 // instruction. Visit the instructions using our InstVisitor to account for
1152 // all of the per-instruction logic. The visit tree returns true if we
1153 // consumed the instruction in any way, and false if the instruction's base
1154 // cost should count against inlining.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001155 if (Base::visit(&*I))
Chandler Carruth0539c072012-03-31 12:42:41 +00001156 ++NumInstructionsSimplified;
1157 else
1158 Cost += InlineConstants::InstrCost;
1159
1160 // If the visit this instruction detected an uninlinable pattern, abort.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001161 if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca ||
Reid Kleckner223de262015-04-14 20:38:14 +00001162 HasIndirectBr || HasFrameEscape)
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001163 return false;
1164
1165 // If the caller is a recursive function then we don't want to inline
1166 // functions which allocate a lot of stack space because it would increase
1167 // the caller stack usage dramatically.
1168 if (IsCallerRecursive &&
1169 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller)
Chandler Carruth0539c072012-03-31 12:42:41 +00001170 return false;
1171
Chandler Carrutha004f222015-05-27 02:49:05 +00001172 // Check if we've past the maximum possible threshold so we don't spin in
1173 // huge basic blocks that will never inline.
1174 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001175 return false;
1176 }
1177
1178 return true;
1179}
1180
1181/// \brief Compute the base pointer and cumulative constant offsets for V.
1182///
1183/// This strips all constant offsets off of V, leaving it the base pointer, and
1184/// accumulates the total constant offset applied in the returned constant. It
1185/// returns 0 if V is not a pointer, and returns the constant '0' if there are
1186/// no constant offsets applied.
1187ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001188 if (!V->getType()->isPointerTy())
Craig Topper353eda42014-04-24 06:44:33 +00001189 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001190
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001191 const DataLayout &DL = F.getParent()->getDataLayout();
1192 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +00001193 APInt Offset = APInt::getNullValue(IntPtrWidth);
1194
1195 // Even though we don't look through PHI nodes, we could be called on an
1196 // instruction in an unreachable block, which may be on a cycle.
1197 SmallPtrSet<Value *, 4> Visited;
1198 Visited.insert(V);
1199 do {
1200 if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
1201 if (!GEP->isInBounds() || !accumulateGEPOffset(*GEP, Offset))
Craig Topper353eda42014-04-24 06:44:33 +00001202 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001203 V = GEP->getPointerOperand();
1204 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
1205 V = cast<Operator>(V)->getOperand(0);
1206 } else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
Sanjoy Das5ce32722016-04-08 00:48:30 +00001207 if (GA->isInterposable())
Chandler Carruth0539c072012-03-31 12:42:41 +00001208 break;
1209 V = GA->getAliasee();
1210 } else {
1211 break;
1212 }
1213 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
David Blaikie70573dc2014-11-19 07:49:26 +00001214 } while (Visited.insert(V).second);
Chandler Carruth0539c072012-03-31 12:42:41 +00001215
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001216 Type *IntPtrTy = DL.getIntPtrType(V->getContext());
Chandler Carruth0539c072012-03-31 12:42:41 +00001217 return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset));
1218}
1219
1220/// \brief Analyze a call site for potential inlining.
1221///
1222/// Returns true if inlining this call is viable, and false if it is not
1223/// viable. It computes the cost and adjusts the threshold based on numerous
1224/// factors and heuristics. If this method returns false but the computed cost
1225/// is below the computed threshold, then inlining was forcibly disabled by
Bob Wilson266802d2012-11-19 07:04:30 +00001226/// some artifact of the routine.
Chandler Carruth0539c072012-03-31 12:42:41 +00001227bool CallAnalyzer::analyzeCall(CallSite CS) {
Chandler Carruth7ae90d42012-04-11 10:15:10 +00001228 ++NumCallsAnalyzed;
1229
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001230 // Perform some tweaks to the cost and threshold based on the direct
1231 // callsite information.
Chandler Carruth0539c072012-03-31 12:42:41 +00001232
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001233 // We want to more aggressively inline vector-dense kernels, so up the
1234 // threshold, and we'll lower it if the % of vector instructions gets too
Chandler Carrutha004f222015-05-27 02:49:05 +00001235 // low. Note that these bonuses are some what arbitrary and evolved over time
1236 // by accident as much as because they are principled bonuses.
1237 //
1238 // FIXME: It would be nice to remove all such bonuses. At least it would be
1239 // nice to base the bonus values on something more scientific.
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001240 assert(NumInstructions == 0);
1241 assert(NumVectorInstructions == 0);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001242
1243 // Update the threshold based on callsite properties
1244 updateThreshold(CS, F);
1245
Chandler Carrutha004f222015-05-27 02:49:05 +00001246 FiftyPercentVectorBonus = 3 * Threshold / 2;
1247 TenPercentVectorBonus = 3 * Threshold / 4;
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001248 const DataLayout &DL = F.getParent()->getDataLayout();
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001249
Chandler Carrutha004f222015-05-27 02:49:05 +00001250 // Track whether the post-inlining function would have more than one basic
1251 // block. A single basic block is often intended for inlining. Balloon the
1252 // threshold by 50% until we pass the single-BB phase.
1253 bool SingleBB = true;
1254 int SingleBBBonus = Threshold / 2;
1255
1256 // Speculatively apply all possible bonuses to Threshold. If cost exceeds
1257 // this Threshold any time, and cost cannot decrease, we can stop processing
1258 // the rest of the function body.
1259 Threshold += (SingleBBBonus + FiftyPercentVectorBonus);
1260
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001261 // Give out bonuses per argument, as the instructions setting them up will
1262 // be gone after inlining.
1263 for (unsigned I = 0, E = CS.arg_size(); I != E; ++I) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001264 if (CS.isByValArgument(I)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001265 // We approximate the number of loads and stores needed by dividing the
1266 // size of the byval type by the target's pointer size.
1267 PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType());
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001268 unsigned TypeSize = DL.getTypeSizeInBits(PTy->getElementType());
1269 unsigned PointerSize = DL.getPointerSizeInBits();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001270 // Ceiling division.
1271 unsigned NumStores = (TypeSize + PointerSize - 1) / PointerSize;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001272
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001273 // If it generates more than 8 stores it is likely to be expanded as an
1274 // inline memcpy so we take that as an upper bound. Otherwise we assume
1275 // one load and one store per word copied.
1276 // FIXME: The maxStoresPerMemcpy setting from the target should be used
1277 // here instead of a magic number of 8, but it's not available via
1278 // DataLayout.
1279 NumStores = std::min(NumStores, 8U);
1280
1281 Cost -= 2 * NumStores * InlineConstants::InstrCost;
1282 } else {
1283 // For non-byval arguments subtract off one instruction per call
1284 // argument.
1285 Cost -= InlineConstants::InstrCost;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001286 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001287 }
James Molloy6df8f272016-11-14 11:14:41 +00001288 // The call instruction also disappears after inlining.
1289 Cost -= InlineConstants::InstrCost + InlineConstants::CallPenalty;
1290
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001291 // If there is only one call of the function, and it has internal linkage,
1292 // the cost of inlining it drops dramatically.
Chad Rosier567556a2016-04-28 14:47:23 +00001293 bool OnlyOneCallAndLocalLinkage =
1294 F.hasLocalLinkage() && F.hasOneUse() && &F == CS.getCalledFunction();
James Molloy4f6fb952012-12-20 16:04:27 +00001295 if (OnlyOneCallAndLocalLinkage)
Piotr Padlewskid89875c2016-08-10 21:15:22 +00001296 Cost -= InlineConstants::LastCallToStaticBonus;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001297
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001298 // If this function uses the coldcc calling convention, prefer not to inline
1299 // it.
1300 if (F.getCallingConv() == CallingConv::Cold)
1301 Cost += InlineConstants::ColdccPenalty;
1302
1303 // Check if we're done. This can happen due to bonuses and penalties.
1304 if (Cost > Threshold)
1305 return false;
1306
Chandler Carruth0539c072012-03-31 12:42:41 +00001307 if (F.empty())
1308 return true;
1309
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001310 Function *Caller = CS.getInstruction()->getParent()->getParent();
1311 // Check if the caller function is recursive itself.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001312 for (User *U : Caller->users()) {
1313 CallSite Site(U);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001314 if (!Site)
1315 continue;
1316 Instruction *I = Site.getInstruction();
1317 if (I->getParent()->getParent() == Caller) {
1318 IsCallerRecursive = true;
1319 break;
1320 }
1321 }
1322
Chandler Carruth0539c072012-03-31 12:42:41 +00001323 // Populate our simplified values by mapping from function arguments to call
1324 // arguments with known important simplifications.
1325 CallSite::arg_iterator CAI = CS.arg_begin();
1326 for (Function::arg_iterator FAI = F.arg_begin(), FAE = F.arg_end();
1327 FAI != FAE; ++FAI, ++CAI) {
1328 assert(CAI != CS.arg_end());
1329 if (Constant *C = dyn_cast<Constant>(CAI))
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001330 SimplifiedValues[&*FAI] = C;
Chandler Carruth0539c072012-03-31 12:42:41 +00001331
1332 Value *PtrArg = *CAI;
1333 if (ConstantInt *C = stripAndComputeInBoundsConstantOffsets(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001334 ConstantOffsetPtrs[&*FAI] = std::make_pair(PtrArg, C->getValue());
Chandler Carruth0539c072012-03-31 12:42:41 +00001335
1336 // We can SROA any pointer arguments derived from alloca instructions.
1337 if (isa<AllocaInst>(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001338 SROAArgValues[&*FAI] = PtrArg;
Chandler Carruth0539c072012-03-31 12:42:41 +00001339 SROAArgCosts[PtrArg] = 0;
1340 }
1341 }
1342 }
1343 NumConstantArgs = SimplifiedValues.size();
1344 NumConstantOffsetPtrArgs = ConstantOffsetPtrs.size();
1345 NumAllocaArgs = SROAArgValues.size();
1346
Hal Finkel57f03dd2014-09-07 13:49:57 +00001347 // FIXME: If a caller has multiple calls to a callee, we end up recomputing
1348 // the ephemeral values multiple times (and they're completely determined by
1349 // the callee, so this is purely duplicate work).
1350 SmallPtrSet<const Value *, 32> EphValues;
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001351 CodeMetrics::collectEphemeralValues(&F, &GetAssumptionCache(F), EphValues);
Hal Finkel57f03dd2014-09-07 13:49:57 +00001352
Chandler Carruth0539c072012-03-31 12:42:41 +00001353 // The worklist of live basic blocks in the callee *after* inlining. We avoid
1354 // adding basic blocks of the callee which can be proven to be dead for this
1355 // particular call site in order to get more accurate cost estimates. This
1356 // requires a somewhat heavyweight iteration pattern: we need to walk the
1357 // basic blocks in a breadth-first order as we insert live successors. To
1358 // accomplish this, prioritizing for small iterations because we exit after
1359 // crossing our threshold, we use a small-size optimized SetVector.
1360 typedef SetVector<BasicBlock *, SmallVector<BasicBlock *, 16>,
Chad Rosier567556a2016-04-28 14:47:23 +00001361 SmallPtrSet<BasicBlock *, 16>>
1362 BBSetVector;
Chandler Carruth0539c072012-03-31 12:42:41 +00001363 BBSetVector BBWorklist;
1364 BBWorklist.insert(&F.getEntryBlock());
1365 // Note that we *must not* cache the size, this loop grows the worklist.
1366 for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) {
1367 // Bail out the moment we cross the threshold. This means we'll under-count
1368 // the cost, but only when undercounting doesn't matter.
Chandler Carrutha004f222015-05-27 02:49:05 +00001369 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001370 break;
1371
1372 BasicBlock *BB = BBWorklist[Idx];
1373 if (BB->empty())
Chandler Carruth4d1d34f2012-03-14 23:19:53 +00001374 continue;
Dan Gohman4552e3c2009-10-13 18:30:07 +00001375
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001376 // Disallow inlining a blockaddress. A blockaddress only has defined
1377 // behavior for an indirect branch in the same function, and we do not
1378 // currently support inlining indirect branches. But, the inliner may not
1379 // see an indirect branch that ends up being dead code at a particular call
1380 // site. If the blockaddress escapes the function, e.g., via a global
1381 // variable, inlining may lead to an invalid cross-function reference.
1382 if (BB->hasAddressTaken())
1383 return false;
1384
Chandler Carruth0539c072012-03-31 12:42:41 +00001385 // Analyze the cost of this block. If we blow through the threshold, this
1386 // returns false, and we can bail on out.
Easwaran Ramand295b002016-04-13 21:20:22 +00001387 if (!analyzeBlock(BB, EphValues))
1388 return false;
Eric Christopher46308e62011-02-01 01:16:32 +00001389
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001390 TerminatorInst *TI = BB->getTerminator();
1391
Chandler Carruth0539c072012-03-31 12:42:41 +00001392 // Add in the live successors by first checking whether we have terminator
1393 // that may be simplified based on the values simplified by this call.
1394 if (BranchInst *BI = dyn_cast<BranchInst>(TI)) {
1395 if (BI->isConditional()) {
1396 Value *Cond = BI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001397 if (ConstantInt *SimpleCond =
1398 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001399 BBWorklist.insert(BI->getSuccessor(SimpleCond->isZero() ? 1 : 0));
1400 continue;
Eric Christopher46308e62011-02-01 01:16:32 +00001401 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001402 }
1403 } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) {
1404 Value *Cond = SI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001405 if (ConstantInt *SimpleCond =
1406 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001407 BBWorklist.insert(SI->findCaseValue(SimpleCond).getCaseSuccessor());
1408 continue;
1409 }
1410 }
Eric Christopher46308e62011-02-01 01:16:32 +00001411
Chandler Carruth0539c072012-03-31 12:42:41 +00001412 // If we're unable to select a particular successor, just count all of
1413 // them.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001414 for (unsigned TIdx = 0, TSize = TI->getNumSuccessors(); TIdx != TSize;
1415 ++TIdx)
Chandler Carruth0539c072012-03-31 12:42:41 +00001416 BBWorklist.insert(TI->getSuccessor(TIdx));
1417
1418 // If we had any successors at this point, than post-inlining is likely to
1419 // have them as well. Note that we assume any basic blocks which existed
1420 // due to branches or switches which folded above will also fold after
1421 // inlining.
1422 if (SingleBB && TI->getNumSuccessors() > 1) {
1423 // Take off the bonus we applied to the threshold.
1424 Threshold -= SingleBBBonus;
1425 SingleBB = false;
Eric Christopher46308e62011-02-01 01:16:32 +00001426 }
1427 }
Andrew Trickcaa500b2011-10-01 01:27:56 +00001428
Chandler Carruthcb5beb32013-12-12 11:59:26 +00001429 // If this is a noduplicate call, we can still inline as long as
James Molloy4f6fb952012-12-20 16:04:27 +00001430 // inlining this would cause the removal of the caller (so the instruction
1431 // is not actually duplicated, just moved).
1432 if (!OnlyOneCallAndLocalLinkage && ContainsNoDuplicateCall)
1433 return false;
1434
Chandler Carrutha004f222015-05-27 02:49:05 +00001435 // We applied the maximum possible vector bonus at the beginning. Now,
1436 // subtract the excess bonus, if any, from the Threshold before
1437 // comparing against Cost.
1438 if (NumVectorInstructions <= NumInstructions / 10)
1439 Threshold -= FiftyPercentVectorBonus;
1440 else if (NumVectorInstructions <= NumInstructions / 2)
1441 Threshold -= (FiftyPercentVectorBonus - TenPercentVectorBonus);
Chandler Carruth0539c072012-03-31 12:42:41 +00001442
Hans Wennborg00ab73d2016-02-05 20:32:42 +00001443 return Cost < std::max(1, Threshold);
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001444}
1445
Manman Ren49d684e2012-09-12 05:06:18 +00001446#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
Chandler Carruth0539c072012-03-31 12:42:41 +00001447/// \brief Dump stats about this call's analysis.
Yaron Kereneb2a2542016-01-29 20:50:44 +00001448LLVM_DUMP_METHOD void CallAnalyzer::dump() {
Eric Christophera13839f2014-02-26 23:27:16 +00001449#define DEBUG_PRINT_STAT(x) dbgs() << " " #x ": " << x << "\n"
Chandler Carruth0539c072012-03-31 12:42:41 +00001450 DEBUG_PRINT_STAT(NumConstantArgs);
1451 DEBUG_PRINT_STAT(NumConstantOffsetPtrArgs);
1452 DEBUG_PRINT_STAT(NumAllocaArgs);
1453 DEBUG_PRINT_STAT(NumConstantPtrCmps);
1454 DEBUG_PRINT_STAT(NumConstantPtrDiffs);
1455 DEBUG_PRINT_STAT(NumInstructionsSimplified);
Chandler Carrutha004f222015-05-27 02:49:05 +00001456 DEBUG_PRINT_STAT(NumInstructions);
Chandler Carruth0539c072012-03-31 12:42:41 +00001457 DEBUG_PRINT_STAT(SROACostSavings);
1458 DEBUG_PRINT_STAT(SROACostSavingsLost);
James Molloy4f6fb952012-12-20 16:04:27 +00001459 DEBUG_PRINT_STAT(ContainsNoDuplicateCall);
Chandler Carruth394e34f2014-01-31 22:32:32 +00001460 DEBUG_PRINT_STAT(Cost);
1461 DEBUG_PRINT_STAT(Threshold);
Chandler Carruth0539c072012-03-31 12:42:41 +00001462#undef DEBUG_PRINT_STAT
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001463}
Manman Renc3366cc2012-09-06 19:55:56 +00001464#endif
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001465
Akira Hatanaka5af7ace2015-11-13 01:44:32 +00001466/// \brief Test that two functions either have or have not the given attribute
1467/// at the same time.
Chad Rosier567556a2016-04-28 14:47:23 +00001468template <typename AttrKind>
Akira Hatanaka5af7ace2015-11-13 01:44:32 +00001469static bool attributeMatches(Function *F1, Function *F2, AttrKind Attr) {
1470 return F1->getFnAttribute(Attr) == F2->getFnAttribute(Attr);
1471}
1472
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001473/// \brief Test that there are no attribute conflicts between Caller and Callee
1474/// that prevent inlining.
1475static bool functionsHaveCompatibleAttributes(Function *Caller,
Eric Christopher4371b132015-07-02 01:11:47 +00001476 Function *Callee,
1477 TargetTransformInfo &TTI) {
Eric Christopherd566fb12015-07-29 22:09:48 +00001478 return TTI.areInlineCompatible(Caller, Callee) &&
Akira Hatanaka1cb242e2015-12-22 23:57:37 +00001479 AttributeFuncs::areInlineCompatible(*Caller, *Callee);
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001480}
1481
Sean Silvaab6a6832016-07-23 04:22:50 +00001482InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001483 CallSite CS, const InlineParams &Params, TargetTransformInfo &CalleeTTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001484 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +00001485 Optional<function_ref<BlockFrequencyInfo &(Function &)>> GetBFI,
Sean Silvaab6a6832016-07-23 04:22:50 +00001486 ProfileSummaryInfo *PSI) {
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001487 return getInlineCost(CS, CS.getCalledFunction(), Params, CalleeTTI,
Easwaran Raman12585b02017-01-20 22:44:04 +00001488 GetAssumptionCache, GetBFI, PSI);
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001489}
1490
Sean Silvaab6a6832016-07-23 04:22:50 +00001491InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001492 CallSite CS, Function *Callee, const InlineParams &Params,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001493 TargetTransformInfo &CalleeTTI,
1494 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +00001495 Optional<function_ref<BlockFrequencyInfo &(Function &)>> GetBFI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001496 ProfileSummaryInfo *PSI) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001497
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001498 // Cannot inline indirect calls.
1499 if (!Callee)
1500 return llvm::InlineCost::getNever();
1501
1502 // Calls to functions with always-inline attributes should be inlined
1503 // whenever possible.
Peter Collingbourne68a88972014-05-19 18:25:54 +00001504 if (CS.hasFnAttr(Attribute::AlwaysInline)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001505 if (isInlineViable(*Callee))
1506 return llvm::InlineCost::getAlways();
1507 return llvm::InlineCost::getNever();
1508 }
1509
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001510 // Never inline functions with conflicting attributes (unless callee has
1511 // always-inline attribute).
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001512 if (!functionsHaveCompatibleAttributes(CS.getCaller(), Callee, CalleeTTI))
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001513 return llvm::InlineCost::getNever();
1514
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001515 // Don't inline this call if the caller has the optnone attribute.
1516 if (CS.getCaller()->hasFnAttribute(Attribute::OptimizeNone))
1517 return llvm::InlineCost::getNever();
1518
Sanjoy Das5ce32722016-04-08 00:48:30 +00001519 // Don't inline functions which can be interposed at link-time. Don't inline
1520 // functions marked noinline or call sites marked noinline.
Craig Topper107b1872016-12-09 02:18:04 +00001521 // Note: inlining non-exact non-interposable functions is fine, since we know
Sanjoy Das5ce32722016-04-08 00:48:30 +00001522 // we have *a* correct implementation of the source level function.
Chad Rosier567556a2016-04-28 14:47:23 +00001523 if (Callee->isInterposable() || Callee->hasFnAttribute(Attribute::NoInline) ||
1524 CS.isNoInline())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001525 return llvm::InlineCost::getNever();
1526
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001527 DEBUG(llvm::dbgs() << " Analyzing call of " << Callee->getName()
Chad Rosier567556a2016-04-28 14:47:23 +00001528 << "...\n");
Andrew Trickcaa500b2011-10-01 01:27:56 +00001529
Easwaran Raman12585b02017-01-20 22:44:04 +00001530 CallAnalyzer CA(CalleeTTI, GetAssumptionCache, GetBFI, PSI, *Callee, CS,
1531 Params);
Chandler Carruth0539c072012-03-31 12:42:41 +00001532 bool ShouldInline = CA.analyzeCall(CS);
Dan Gohman4552e3c2009-10-13 18:30:07 +00001533
Chandler Carruth0539c072012-03-31 12:42:41 +00001534 DEBUG(CA.dump());
1535
1536 // Check if there was a reason to force inlining or no inlining.
1537 if (!ShouldInline && CA.getCost() < CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001538 return InlineCost::getNever();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001539 if (ShouldInline && CA.getCost() >= CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001540 return InlineCost::getAlways();
Andrew Trickcaa500b2011-10-01 01:27:56 +00001541
Chandler Carruth0539c072012-03-31 12:42:41 +00001542 return llvm::InlineCost::get(CA.getCost(), CA.getThreshold());
Dan Gohman4552e3c2009-10-13 18:30:07 +00001543}
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001544
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001545bool llvm::isInlineViable(Function &F) {
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001546 bool ReturnsTwice = F.hasFnAttribute(Attribute::ReturnsTwice);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001547 for (Function::iterator BI = F.begin(), BE = F.end(); BI != BE; ++BI) {
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001548 // Disallow inlining of functions which contain indirect branches or
1549 // blockaddresses.
1550 if (isa<IndirectBrInst>(BI->getTerminator()) || BI->hasAddressTaken())
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001551 return false;
1552
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001553 for (auto &II : *BI) {
1554 CallSite CS(&II);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001555 if (!CS)
1556 continue;
1557
1558 // Disallow recursive calls.
1559 if (&F == CS.getCalledFunction())
1560 return false;
1561
1562 // Disallow calls which expose returns-twice to a function not previously
1563 // attributed as such.
1564 if (!ReturnsTwice && CS.isCall() &&
1565 cast<CallInst>(CS.getInstruction())->canReturnTwice())
1566 return false;
Reid Kleckner223de262015-04-14 20:38:14 +00001567
Reid Kleckner60381792015-07-07 22:25:32 +00001568 // Disallow inlining functions that call @llvm.localescape. Doing this
Reid Kleckner223de262015-04-14 20:38:14 +00001569 // correctly would require major changes to the inliner.
1570 if (CS.getCalledFunction() &&
1571 CS.getCalledFunction()->getIntrinsicID() ==
Reid Kleckner60381792015-07-07 22:25:32 +00001572 llvm::Intrinsic::localescape)
Reid Kleckner223de262015-04-14 20:38:14 +00001573 return false;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001574 }
1575 }
1576
1577 return true;
1578}
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001579
1580// APIs to create InlineParams based on command line flags and/or other
1581// parameters.
1582
1583InlineParams llvm::getInlineParams(int Threshold) {
1584 InlineParams Params;
1585
1586 // This field is the threshold to use for a callee by default. This is
1587 // derived from one or more of:
1588 // * optimization or size-optimization levels,
1589 // * a value passed to createFunctionInliningPass function, or
1590 // * the -inline-threshold flag.
1591 // If the -inline-threshold flag is explicitly specified, that is used
1592 // irrespective of anything else.
1593 if (InlineThreshold.getNumOccurrences() > 0)
1594 Params.DefaultThreshold = InlineThreshold;
1595 else
1596 Params.DefaultThreshold = Threshold;
1597
1598 // Set the HintThreshold knob from the -inlinehint-threshold.
1599 Params.HintThreshold = HintThreshold;
1600
1601 // Set the HotCallSiteThreshold knob from the -hot-callsite-threshold.
1602 Params.HotCallSiteThreshold = HotCallSiteThreshold;
1603
Easwaran Raman12585b02017-01-20 22:44:04 +00001604 // Set the ColdCallSiteThreshold knob from the -inline-cold-callsite-threshold.
1605 Params.ColdCallSiteThreshold = ColdCallSiteThreshold;
1606
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001607 // Set the OptMinSizeThreshold and OptSizeThreshold params only if the
1608 // Set the OptMinSizeThreshold and OptSizeThreshold params only if the
1609 // -inlinehint-threshold commandline option is not explicitly given. If that
1610 // option is present, then its value applies even for callees with size and
1611 // minsize attributes.
1612 // If the -inline-threshold is not specified, set the ColdThreshold from the
1613 // -inlinecold-threshold even if it is not explicitly passed. If
1614 // -inline-threshold is specified, then -inlinecold-threshold needs to be
1615 // explicitly specified to set the ColdThreshold knob
1616 if (InlineThreshold.getNumOccurrences() == 0) {
1617 Params.OptMinSizeThreshold = InlineConstants::OptMinSizeThreshold;
1618 Params.OptSizeThreshold = InlineConstants::OptSizeThreshold;
1619 Params.ColdThreshold = ColdThreshold;
1620 } else if (ColdThreshold.getNumOccurrences() > 0) {
1621 Params.ColdThreshold = ColdThreshold;
1622 }
1623 return Params;
1624}
1625
1626InlineParams llvm::getInlineParams() {
1627 return getInlineParams(InlineThreshold);
1628}
1629
1630// Compute the default threshold for inlining based on the opt level and the
1631// size opt level.
1632static int computeThresholdFromOptLevels(unsigned OptLevel,
1633 unsigned SizeOptLevel) {
1634 if (OptLevel > 2)
1635 return InlineConstants::OptAggressiveThreshold;
1636 if (SizeOptLevel == 1) // -Os
1637 return InlineConstants::OptSizeThreshold;
1638 if (SizeOptLevel == 2) // -Oz
1639 return InlineConstants::OptMinSizeThreshold;
1640 return InlineThreshold;
1641}
1642
1643InlineParams llvm::getInlineParams(unsigned OptLevel, unsigned SizeOptLevel) {
1644 return getInlineParams(computeThresholdFromOptLevels(OptLevel, SizeOptLevel));
1645}