blob: 9f648df12d09269752ce1e42db753a67a093496f [file] [log] [blame]
Dan Gohman4552e3c2009-10-13 18:30:07 +00001//===- InlineCost.cpp - Cost analysis for inliner -------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements inline cost analysis.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Analysis/InlineCost.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000015#include "llvm/ADT/STLExtras.h"
16#include "llvm/ADT/SetVector.h"
17#include "llvm/ADT/SmallPtrSet.h"
18#include "llvm/ADT/SmallVector.h"
19#include "llvm/ADT/Statistic.h"
Chandler Carruth66b31302015-01-04 12:03:27 +000020#include "llvm/Analysis/AssumptionCache.h"
Hal Finkel57f03dd2014-09-07 13:49:57 +000021#include "llvm/Analysis/CodeMetrics.h"
Chandler Carruthd9903882015-01-14 11:23:27 +000022#include "llvm/Analysis/ConstantFolding.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000023#include "llvm/Analysis/InstructionSimplify.h"
Easwaran Raman71069cf2016-06-09 22:23:21 +000024#include "llvm/Analysis/ProfileSummaryInfo.h"
Chandler Carruth42f3dce2013-01-21 11:55:09 +000025#include "llvm/Analysis/TargetTransformInfo.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000026#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000027#include "llvm/IR/CallingConv.h"
28#include "llvm/IR/DataLayout.h"
Chandler Carruth03eb0de2014-03-04 10:40:04 +000029#include "llvm/IR/GetElementPtrTypeIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000030#include "llvm/IR/GlobalAlias.h"
Chandler Carruth7da14f12014-03-06 03:23:41 +000031#include "llvm/IR/InstVisitor.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000032#include "llvm/IR/IntrinsicInst.h"
33#include "llvm/IR/Operator.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000034#include "llvm/Support/Debug.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000035#include "llvm/Support/raw_ostream.h"
Eric Christopher2dfbd7e2011-02-05 00:49:15 +000036
Dan Gohman4552e3c2009-10-13 18:30:07 +000037using namespace llvm;
38
Chandler Carruthf1221bd2014-04-22 02:48:03 +000039#define DEBUG_TYPE "inline-cost"
40
Chandler Carruth7ae90d42012-04-11 10:15:10 +000041STATISTIC(NumCallsAnalyzed, "Number of call sites analyzed");
42
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000043// Threshold to use when optsize is specified (and there is no
44// -inline-threshold).
45const int OptSizeThreshold = 75;
46
47// Threshold to use when -Oz is specified (and there is no -inline-threshold).
48const int OptMinSizeThreshold = 25;
49
50// Threshold to use when -O[34] is specified (and there is no
51// -inline-threshold).
52const int OptAggressiveThreshold = 275;
53
54static cl::opt<int> DefaultInlineThreshold(
55 "inline-threshold", cl::Hidden, cl::init(225), cl::ZeroOrMore,
56 cl::desc("Control the amount of inlining to perform (default = 225)"));
57
58static cl::opt<int> HintThreshold(
59 "inlinehint-threshold", cl::Hidden, cl::init(325),
60 cl::desc("Threshold for inlining functions with inline hint"));
61
62// We introduce this threshold to help performance of instrumentation based
63// PGO before we actually hook up inliner with analysis passes such as BPI and
64// BFI.
65static cl::opt<int> ColdThreshold(
66 "inlinecold-threshold", cl::Hidden, cl::init(225),
67 cl::desc("Threshold for inlining functions with cold attribute"));
68
Dehao Chende39cb92016-08-05 20:28:41 +000069static cl::opt<int>
70 HotCallSiteThreshold("hot-callsite-threshold", cl::Hidden, cl::init(3000),
71 cl::ZeroOrMore,
72 cl::desc("Threshold for hot callsites "));
73
Chandler Carruth0539c072012-03-31 12:42:41 +000074namespace {
Chandler Carrutha3089552012-03-14 07:32:53 +000075
Chandler Carruth0539c072012-03-31 12:42:41 +000076class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> {
77 typedef InstVisitor<CallAnalyzer, bool> Base;
78 friend class InstVisitor<CallAnalyzer, bool>;
Owen Andersona08318a2010-09-09 16:56:42 +000079
Chandler Carruth42f3dce2013-01-21 11:55:09 +000080 /// The TargetTransformInfo available for this compilation.
81 const TargetTransformInfo &TTI;
82
Sean Silvaab6a6832016-07-23 04:22:50 +000083 /// Getter for the cache of @llvm.assume intrinsics.
84 std::function<AssumptionCache &(Function &)> &GetAssumptionCache;
Hal Finkel57f03dd2014-09-07 13:49:57 +000085
Easwaran Raman71069cf2016-06-09 22:23:21 +000086 /// Profile summary information.
87 ProfileSummaryInfo *PSI;
88
Chandler Carruth0539c072012-03-31 12:42:41 +000089 // The called function.
90 Function &F;
Owen Andersona08318a2010-09-09 16:56:42 +000091
Philip Reames9b5c9582015-06-26 20:51:17 +000092 // The candidate callsite being analyzed. Please do not use this to do
93 // analysis in the caller function; we want the inline cost query to be
94 // easily cacheable. Instead, use the cover function paramHasAttr.
95 CallSite CandidateCS;
96
Chandler Carruth0539c072012-03-31 12:42:41 +000097 int Threshold;
98 int Cost;
Owen Andersona08318a2010-09-09 16:56:42 +000099
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000100 bool IsCallerRecursive;
101 bool IsRecursiveCall;
Chandler Carruth0539c072012-03-31 12:42:41 +0000102 bool ExposesReturnsTwice;
103 bool HasDynamicAlloca;
James Molloy4f6fb952012-12-20 16:04:27 +0000104 bool ContainsNoDuplicateCall;
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000105 bool HasReturn;
106 bool HasIndirectBr;
Reid Kleckner223de262015-04-14 20:38:14 +0000107 bool HasFrameEscape;
James Molloy4f6fb952012-12-20 16:04:27 +0000108
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000109 /// Number of bytes allocated statically by the callee.
110 uint64_t AllocatedSize;
Chandler Carruth0539c072012-03-31 12:42:41 +0000111 unsigned NumInstructions, NumVectorInstructions;
112 int FiftyPercentVectorBonus, TenPercentVectorBonus;
113 int VectorBonus;
114
115 // While we walk the potentially-inlined instructions, we build up and
116 // maintain a mapping of simplified values specific to this callsite. The
117 // idea is to propagate any special information we have about arguments to
118 // this call through the inlinable section of the function, and account for
119 // likely simplifications post-inlining. The most important aspect we track
120 // is CFG altering simplifications -- when we prove a basic block dead, that
121 // can cause dramatic shifts in the cost of inlining a function.
122 DenseMap<Value *, Constant *> SimplifiedValues;
123
124 // Keep track of the values which map back (through function arguments) to
125 // allocas on the caller stack which could be simplified through SROA.
126 DenseMap<Value *, Value *> SROAArgValues;
127
128 // The mapping of caller Alloca values to their accumulated cost savings. If
129 // we have to disable SROA for one of the allocas, this tells us how much
130 // cost must be added.
131 DenseMap<Value *, int> SROAArgCosts;
132
133 // Keep track of values which map to a pointer base and constant offset.
Chad Rosier567556a2016-04-28 14:47:23 +0000134 DenseMap<Value *, std::pair<Value *, APInt>> ConstantOffsetPtrs;
Chandler Carruth0539c072012-03-31 12:42:41 +0000135
136 // Custom simplification helper routines.
137 bool isAllocaDerivedArg(Value *V);
138 bool lookupSROAArgAndCost(Value *V, Value *&Arg,
139 DenseMap<Value *, int>::iterator &CostIt);
140 void disableSROA(DenseMap<Value *, int>::iterator CostIt);
141 void disableSROA(Value *V);
142 void accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
143 int InstructionCost);
Chandler Carruth0539c072012-03-31 12:42:41 +0000144 bool isGEPOffsetConstant(GetElementPtrInst &GEP);
145 bool accumulateGEPOffset(GEPOperator &GEP, APInt &Offset);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000146 bool simplifyCallSite(Function *F, CallSite CS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000147 ConstantInt *stripAndComputeInBoundsConstantOffsets(Value *&V);
148
Philip Reames9b5c9582015-06-26 20:51:17 +0000149 /// Return true if the given argument to the function being considered for
150 /// inlining has the given attribute set either at the call site or the
151 /// function declaration. Primarily used to inspect call site specific
152 /// attributes since these can be more precise than the ones on the callee
Easwaran Raman3676da42015-12-03 19:03:20 +0000153 /// itself.
Philip Reames9b5c9582015-06-26 20:51:17 +0000154 bool paramHasAttr(Argument *A, Attribute::AttrKind Attr);
Chad Rosier567556a2016-04-28 14:47:23 +0000155
Philip Reames9b5c9582015-06-26 20:51:17 +0000156 /// Return true if the given value is known non null within the callee if
Easwaran Raman3676da42015-12-03 19:03:20 +0000157 /// inlined through this particular callsite.
Philip Reames9b5c9582015-06-26 20:51:17 +0000158 bool isKnownNonNullInCallee(Value *V);
159
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000160 /// Update Threshold based on callsite properties such as callee
161 /// attributes and callee hotness for PGO builds. The Callee is explicitly
162 /// passed to support analyzing indirect calls whose target is inferred by
163 /// analysis.
164 void updateThreshold(CallSite CS, Function &Callee);
165
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000166 /// Return true if size growth is allowed when inlining the callee at CS.
167 bool allowSizeGrowth(CallSite CS);
168
Chandler Carruth0539c072012-03-31 12:42:41 +0000169 // Custom analysis routines.
Hal Finkel57f03dd2014-09-07 13:49:57 +0000170 bool analyzeBlock(BasicBlock *BB, SmallPtrSetImpl<const Value *> &EphValues);
Chandler Carruth0539c072012-03-31 12:42:41 +0000171
172 // Disable several entry points to the visitor so we don't accidentally use
173 // them by declaring but not defining them here.
Chad Rosier567556a2016-04-28 14:47:23 +0000174 void visit(Module *);
175 void visit(Module &);
176 void visit(Function *);
177 void visit(Function &);
178 void visit(BasicBlock *);
179 void visit(BasicBlock &);
Chandler Carruth0539c072012-03-31 12:42:41 +0000180
181 // Provide base case for our instruction visit.
182 bool visitInstruction(Instruction &I);
183
184 // Our visit overrides.
185 bool visitAlloca(AllocaInst &I);
186 bool visitPHI(PHINode &I);
187 bool visitGetElementPtr(GetElementPtrInst &I);
188 bool visitBitCast(BitCastInst &I);
189 bool visitPtrToInt(PtrToIntInst &I);
190 bool visitIntToPtr(IntToPtrInst &I);
191 bool visitCastInst(CastInst &I);
192 bool visitUnaryInstruction(UnaryInstruction &I);
Matt Arsenault727aa342013-07-20 04:09:00 +0000193 bool visitCmpInst(CmpInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000194 bool visitSub(BinaryOperator &I);
195 bool visitBinaryOperator(BinaryOperator &I);
196 bool visitLoad(LoadInst &I);
197 bool visitStore(StoreInst &I);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000198 bool visitExtractValue(ExtractValueInst &I);
199 bool visitInsertValue(InsertValueInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000200 bool visitCallSite(CallSite CS);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000201 bool visitReturnInst(ReturnInst &RI);
202 bool visitBranchInst(BranchInst &BI);
203 bool visitSwitchInst(SwitchInst &SI);
204 bool visitIndirectBrInst(IndirectBrInst &IBI);
205 bool visitResumeInst(ResumeInst &RI);
David Majnemer654e1302015-07-31 17:58:14 +0000206 bool visitCleanupReturnInst(CleanupReturnInst &RI);
207 bool visitCatchReturnInst(CatchReturnInst &RI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000208 bool visitUnreachableInst(UnreachableInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000209
210public:
Sean Silvaab6a6832016-07-23 04:22:50 +0000211 CallAnalyzer(const TargetTransformInfo &TTI,
212 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman71069cf2016-06-09 22:23:21 +0000213 ProfileSummaryInfo *PSI, Function &Callee, int Threshold,
214 CallSite CSArg)
Sean Silvaab6a6832016-07-23 04:22:50 +0000215 : TTI(TTI), GetAssumptionCache(GetAssumptionCache), PSI(PSI), F(Callee),
216 CandidateCS(CSArg), Threshold(Threshold), Cost(0),
217 IsCallerRecursive(false), IsRecursiveCall(false),
218 ExposesReturnsTwice(false), HasDynamicAlloca(false),
219 ContainsNoDuplicateCall(false), HasReturn(false), HasIndirectBr(false),
220 HasFrameEscape(false), AllocatedSize(0), NumInstructions(0),
221 NumVectorInstructions(0), FiftyPercentVectorBonus(0),
222 TenPercentVectorBonus(0), VectorBonus(0), NumConstantArgs(0),
223 NumConstantOffsetPtrArgs(0), NumAllocaArgs(0), NumConstantPtrCmps(0),
224 NumConstantPtrDiffs(0), NumInstructionsSimplified(0),
225 SROACostSavings(0), SROACostSavingsLost(0) {}
Chandler Carruth0539c072012-03-31 12:42:41 +0000226
227 bool analyzeCall(CallSite CS);
228
229 int getThreshold() { return Threshold; }
230 int getCost() { return Cost; }
231
232 // Keep a bunch of stats about the cost savings found so we can print them
233 // out when debugging.
234 unsigned NumConstantArgs;
235 unsigned NumConstantOffsetPtrArgs;
236 unsigned NumAllocaArgs;
237 unsigned NumConstantPtrCmps;
238 unsigned NumConstantPtrDiffs;
239 unsigned NumInstructionsSimplified;
240 unsigned SROACostSavings;
241 unsigned SROACostSavingsLost;
242
243 void dump();
244};
245
246} // namespace
247
248/// \brief Test whether the given value is an Alloca-derived function argument.
249bool CallAnalyzer::isAllocaDerivedArg(Value *V) {
250 return SROAArgValues.count(V);
Owen Andersona08318a2010-09-09 16:56:42 +0000251}
252
Chandler Carruth0539c072012-03-31 12:42:41 +0000253/// \brief Lookup the SROA-candidate argument and cost iterator which V maps to.
254/// Returns false if V does not map to a SROA-candidate.
255bool CallAnalyzer::lookupSROAArgAndCost(
256 Value *V, Value *&Arg, DenseMap<Value *, int>::iterator &CostIt) {
257 if (SROAArgValues.empty() || SROAArgCosts.empty())
258 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000259
Chandler Carruth0539c072012-03-31 12:42:41 +0000260 DenseMap<Value *, Value *>::iterator ArgIt = SROAArgValues.find(V);
261 if (ArgIt == SROAArgValues.end())
262 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000263
Chandler Carruth0539c072012-03-31 12:42:41 +0000264 Arg = ArgIt->second;
265 CostIt = SROAArgCosts.find(Arg);
266 return CostIt != SROAArgCosts.end();
Chandler Carruth783b7192012-03-09 02:49:36 +0000267}
268
Chandler Carruth0539c072012-03-31 12:42:41 +0000269/// \brief Disable SROA for the candidate marked by this cost iterator.
Chandler Carruth783b7192012-03-09 02:49:36 +0000270///
Benjamin Kramerbde91762012-06-02 10:20:22 +0000271/// This marks the candidate as no longer viable for SROA, and adds the cost
Chandler Carruth0539c072012-03-31 12:42:41 +0000272/// savings associated with it back into the inline cost measurement.
273void CallAnalyzer::disableSROA(DenseMap<Value *, int>::iterator CostIt) {
274 // If we're no longer able to perform SROA we need to undo its cost savings
275 // and prevent subsequent analysis.
276 Cost += CostIt->second;
277 SROACostSavings -= CostIt->second;
278 SROACostSavingsLost += CostIt->second;
279 SROAArgCosts.erase(CostIt);
280}
281
282/// \brief If 'V' maps to a SROA candidate, disable SROA for it.
283void CallAnalyzer::disableSROA(Value *V) {
284 Value *SROAArg;
285 DenseMap<Value *, int>::iterator CostIt;
286 if (lookupSROAArgAndCost(V, SROAArg, CostIt))
287 disableSROA(CostIt);
288}
289
290/// \brief Accumulate the given cost for a particular SROA candidate.
291void CallAnalyzer::accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
292 int InstructionCost) {
293 CostIt->second += InstructionCost;
294 SROACostSavings += InstructionCost;
295}
296
Chandler Carruth0539c072012-03-31 12:42:41 +0000297/// \brief Check whether a GEP's indices are all constant.
298///
299/// Respects any simplified values known during the analysis of this callsite.
300bool CallAnalyzer::isGEPOffsetConstant(GetElementPtrInst &GEP) {
301 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
302 if (!isa<Constant>(*I) && !SimplifiedValues.lookup(*I))
Chandler Carruth783b7192012-03-09 02:49:36 +0000303 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000304
Chandler Carruth0539c072012-03-31 12:42:41 +0000305 return true;
306}
307
308/// \brief Accumulate a constant GEP offset into an APInt if possible.
309///
310/// Returns false if unable to compute the offset for any reason. Respects any
311/// simplified values known during the analysis of this callsite.
312bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000313 const DataLayout &DL = F.getParent()->getDataLayout();
314 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +0000315 assert(IntPtrWidth == Offset.getBitWidth());
316
317 for (gep_type_iterator GTI = gep_type_begin(GEP), GTE = gep_type_end(GEP);
318 GTI != GTE; ++GTI) {
319 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand());
320 if (!OpC)
321 if (Constant *SimpleOp = SimplifiedValues.lookup(GTI.getOperand()))
322 OpC = dyn_cast<ConstantInt>(SimpleOp);
323 if (!OpC)
Chandler Carruth783b7192012-03-09 02:49:36 +0000324 return false;
Chad Rosier567556a2016-04-28 14:47:23 +0000325 if (OpC->isZero())
326 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000327
Chandler Carruth0539c072012-03-31 12:42:41 +0000328 // Handle a struct index, which adds its field offset to the pointer.
329 if (StructType *STy = dyn_cast<StructType>(*GTI)) {
330 unsigned ElementIdx = OpC->getZExtValue();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000331 const StructLayout *SL = DL.getStructLayout(STy);
Chandler Carruth0539c072012-03-31 12:42:41 +0000332 Offset += APInt(IntPtrWidth, SL->getElementOffset(ElementIdx));
333 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000334 }
Chandler Carruth783b7192012-03-09 02:49:36 +0000335
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000336 APInt TypeSize(IntPtrWidth, DL.getTypeAllocSize(GTI.getIndexedType()));
Chandler Carruth0539c072012-03-31 12:42:41 +0000337 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize;
338 }
339 return true;
340}
341
342bool CallAnalyzer::visitAlloca(AllocaInst &I) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000343 // Check whether inlining will turn a dynamic alloca into a static
Sanjay Patel0f153422016-05-09 21:51:53 +0000344 // alloca and handle that case.
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000345 if (I.isArrayAllocation()) {
Sanjay Patel0f153422016-05-09 21:51:53 +0000346 Constant *Size = SimplifiedValues.lookup(I.getArraySize());
347 if (auto *AllocSize = dyn_cast_or_null<ConstantInt>(Size)) {
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000348 const DataLayout &DL = F.getParent()->getDataLayout();
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000349 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000350 AllocatedSize = SaturatingMultiplyAdd(
351 AllocSize->getLimitedValue(), DL.getTypeAllocSize(Ty), AllocatedSize);
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000352 return Base::visitAlloca(I);
353 }
354 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000355
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000356 // Accumulate the allocated size.
357 if (I.isStaticAlloca()) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000358 const DataLayout &DL = F.getParent()->getDataLayout();
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000359 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000360 AllocatedSize = SaturatingAdd(DL.getTypeAllocSize(Ty), AllocatedSize);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000361 }
362
Bob Wilsona5b0dc82012-11-19 07:04:35 +0000363 // We will happily inline static alloca instructions.
364 if (I.isStaticAlloca())
Chandler Carruth0539c072012-03-31 12:42:41 +0000365 return Base::visitAlloca(I);
366
367 // FIXME: This is overly conservative. Dynamic allocas are inefficient for
368 // a variety of reasons, and so we would like to not inline them into
369 // functions which don't currently have a dynamic alloca. This simply
370 // disables inlining altogether in the presence of a dynamic alloca.
371 HasDynamicAlloca = true;
372 return false;
373}
374
375bool CallAnalyzer::visitPHI(PHINode &I) {
376 // FIXME: We should potentially be tracking values through phi nodes,
377 // especially when they collapse to a single value due to deleted CFG edges
378 // during inlining.
379
380 // FIXME: We need to propagate SROA *disabling* through phi nodes, even
381 // though we don't want to propagate it's bonuses. The idea is to disable
382 // SROA if it *might* be used in an inappropriate manner.
383
384 // Phi nodes are always zero-cost.
385 return true;
386}
387
388bool CallAnalyzer::visitGetElementPtr(GetElementPtrInst &I) {
389 Value *SROAArg;
390 DenseMap<Value *, int>::iterator CostIt;
Chad Rosier567556a2016-04-28 14:47:23 +0000391 bool SROACandidate =
392 lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt);
Chandler Carruth0539c072012-03-31 12:42:41 +0000393
394 // Try to fold GEPs of constant-offset call site argument pointers. This
395 // requires target data and inbounds GEPs.
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000396 if (I.isInBounds()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000397 // Check if we have a base + offset for the pointer.
398 Value *Ptr = I.getPointerOperand();
399 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Ptr);
400 if (BaseAndOffset.first) {
401 // Check if the offset of this GEP is constant, and if so accumulate it
402 // into Offset.
403 if (!accumulateGEPOffset(cast<GEPOperator>(I), BaseAndOffset.second)) {
404 // Non-constant GEPs aren't folded, and disable SROA.
405 if (SROACandidate)
406 disableSROA(CostIt);
407 return false;
408 }
409
410 // Add the result as a new mapping to Base + Offset.
411 ConstantOffsetPtrs[&I] = BaseAndOffset;
412
413 // Also handle SROA candidates here, we already know that the GEP is
414 // all-constant indexed.
415 if (SROACandidate)
416 SROAArgValues[&I] = SROAArg;
417
Chandler Carruth783b7192012-03-09 02:49:36 +0000418 return true;
419 }
420 }
421
Chandler Carruth0539c072012-03-31 12:42:41 +0000422 if (isGEPOffsetConstant(I)) {
423 if (SROACandidate)
424 SROAArgValues[&I] = SROAArg;
425
426 // Constant GEPs are modeled as free.
427 return true;
428 }
429
430 // Variable GEPs will require math and will disable SROA.
431 if (SROACandidate)
432 disableSROA(CostIt);
Chandler Carruth783b7192012-03-09 02:49:36 +0000433 return false;
434}
435
Chandler Carruth0539c072012-03-31 12:42:41 +0000436bool CallAnalyzer::visitBitCast(BitCastInst &I) {
437 // Propagate constants through bitcasts.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000438 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
439 if (!COp)
440 COp = SimplifiedValues.lookup(I.getOperand(0));
441 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000442 if (Constant *C = ConstantExpr::getBitCast(COp, I.getType())) {
443 SimplifiedValues[&I] = C;
444 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000445 }
Owen Andersona08318a2010-09-09 16:56:42 +0000446
Chandler Carruth0539c072012-03-31 12:42:41 +0000447 // Track base/offsets through casts
Chad Rosier567556a2016-04-28 14:47:23 +0000448 std::pair<Value *, APInt> BaseAndOffset =
449 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000450 // Casts don't change the offset, just wrap it up.
451 if (BaseAndOffset.first)
452 ConstantOffsetPtrs[&I] = BaseAndOffset;
453
454 // Also look for SROA candidates here.
455 Value *SROAArg;
456 DenseMap<Value *, int>::iterator CostIt;
457 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
458 SROAArgValues[&I] = SROAArg;
459
460 // Bitcasts are always zero cost.
461 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000462}
463
Chandler Carruth0539c072012-03-31 12:42:41 +0000464bool CallAnalyzer::visitPtrToInt(PtrToIntInst &I) {
465 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000466 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
467 if (!COp)
468 COp = SimplifiedValues.lookup(I.getOperand(0));
469 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000470 if (Constant *C = ConstantExpr::getPtrToInt(COp, I.getType())) {
471 SimplifiedValues[&I] = C;
472 return true;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000473 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000474
475 // Track base/offset pairs when converted to a plain integer provided the
476 // integer is large enough to represent the pointer.
477 unsigned IntegerSize = I.getType()->getScalarSizeInBits();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000478 const DataLayout &DL = F.getParent()->getDataLayout();
Mehdi Amini46a43552015-03-04 18:43:29 +0000479 if (IntegerSize >= DL.getPointerSizeInBits()) {
Chad Rosier567556a2016-04-28 14:47:23 +0000480 std::pair<Value *, APInt> BaseAndOffset =
481 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000482 if (BaseAndOffset.first)
483 ConstantOffsetPtrs[&I] = BaseAndOffset;
484 }
485
486 // This is really weird. Technically, ptrtoint will disable SROA. However,
487 // unless that ptrtoint is *used* somewhere in the live basic blocks after
488 // inlining, it will be nuked, and SROA should proceed. All of the uses which
489 // would block SROA would also block SROA if applied directly to a pointer,
490 // and so we can just add the integer in here. The only places where SROA is
491 // preserved either cannot fire on an integer, or won't in-and-of themselves
492 // disable SROA (ext) w/o some later use that we would see and disable.
493 Value *SROAArg;
494 DenseMap<Value *, int>::iterator CostIt;
495 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
496 SROAArgValues[&I] = SROAArg;
497
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000498 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000499}
500
Chandler Carruth0539c072012-03-31 12:42:41 +0000501bool CallAnalyzer::visitIntToPtr(IntToPtrInst &I) {
502 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000503 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
504 if (!COp)
505 COp = SimplifiedValues.lookup(I.getOperand(0));
506 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000507 if (Constant *C = ConstantExpr::getIntToPtr(COp, I.getType())) {
508 SimplifiedValues[&I] = C;
509 return true;
510 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000511
Chandler Carruth0539c072012-03-31 12:42:41 +0000512 // Track base/offset pairs when round-tripped through a pointer without
513 // modifications provided the integer is not too large.
514 Value *Op = I.getOperand(0);
515 unsigned IntegerSize = Op->getType()->getScalarSizeInBits();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000516 const DataLayout &DL = F.getParent()->getDataLayout();
Mehdi Amini46a43552015-03-04 18:43:29 +0000517 if (IntegerSize <= DL.getPointerSizeInBits()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000518 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op);
519 if (BaseAndOffset.first)
520 ConstantOffsetPtrs[&I] = BaseAndOffset;
521 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000522
Chandler Carruth0539c072012-03-31 12:42:41 +0000523 // "Propagate" SROA here in the same manner as we do for ptrtoint above.
524 Value *SROAArg;
525 DenseMap<Value *, int>::iterator CostIt;
526 if (lookupSROAArgAndCost(Op, SROAArg, CostIt))
527 SROAArgValues[&I] = SROAArg;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000528
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000529 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000530}
531
532bool CallAnalyzer::visitCastInst(CastInst &I) {
533 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000534 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
535 if (!COp)
536 COp = SimplifiedValues.lookup(I.getOperand(0));
537 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000538 if (Constant *C = ConstantExpr::getCast(I.getOpcode(), COp, I.getType())) {
539 SimplifiedValues[&I] = C;
540 return true;
541 }
542
543 // Disable SROA in the face of arbitrary casts we don't whitelist elsewhere.
544 disableSROA(I.getOperand(0));
545
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000546 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000547}
548
549bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) {
550 Value *Operand = I.getOperand(0);
Jakub Staszak7b9e0b92013-03-07 20:01:19 +0000551 Constant *COp = dyn_cast<Constant>(Operand);
552 if (!COp)
553 COp = SimplifiedValues.lookup(Operand);
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000554 if (COp) {
555 const DataLayout &DL = F.getParent()->getDataLayout();
Manuel Jacobe9024592016-01-21 06:33:22 +0000556 if (Constant *C = ConstantFoldInstOperands(&I, COp, DL)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000557 SimplifiedValues[&I] = C;
558 return true;
559 }
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000560 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000561
562 // Disable any SROA on the argument to arbitrary unary operators.
563 disableSROA(Operand);
564
565 return false;
566}
567
Philip Reames9b5c9582015-06-26 20:51:17 +0000568bool CallAnalyzer::paramHasAttr(Argument *A, Attribute::AttrKind Attr) {
569 unsigned ArgNo = A->getArgNo();
Chad Rosier567556a2016-04-28 14:47:23 +0000570 return CandidateCS.paramHasAttr(ArgNo + 1, Attr);
Philip Reames9b5c9582015-06-26 20:51:17 +0000571}
572
573bool CallAnalyzer::isKnownNonNullInCallee(Value *V) {
574 // Does the *call site* have the NonNull attribute set on an argument? We
575 // use the attribute on the call site to memoize any analysis done in the
576 // caller. This will also trip if the callee function has a non-null
577 // parameter attribute, but that's a less interesting case because hopefully
578 // the callee would already have been simplified based on that.
579 if (Argument *A = dyn_cast<Argument>(V))
580 if (paramHasAttr(A, Attribute::NonNull))
581 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000582
Philip Reames9b5c9582015-06-26 20:51:17 +0000583 // Is this an alloca in the caller? This is distinct from the attribute case
584 // above because attributes aren't updated within the inliner itself and we
585 // always want to catch the alloca derived case.
586 if (isAllocaDerivedArg(V))
587 // We can actually predict the result of comparisons between an
588 // alloca-derived value and null. Note that this fires regardless of
589 // SROA firing.
590 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000591
Philip Reames9b5c9582015-06-26 20:51:17 +0000592 return false;
593}
594
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000595bool CallAnalyzer::allowSizeGrowth(CallSite CS) {
596 // If the normal destination of the invoke or the parent block of the call
597 // site is unreachable-terminated, there is little point in inlining this
598 // unless there is literally zero cost.
599 // FIXME: Note that it is possible that an unreachable-terminated block has a
600 // hot entry. For example, in below scenario inlining hot_call_X() may be
601 // beneficial :
602 // main() {
603 // hot_call_1();
604 // ...
605 // hot_call_N()
606 // exit(0);
607 // }
608 // For now, we are not handling this corner case here as it is rare in real
609 // code. In future, we should elaborate this based on BPI and BFI in more
610 // general threshold adjusting heuristics in updateThreshold().
611 Instruction *Instr = CS.getInstruction();
612 if (InvokeInst *II = dyn_cast<InvokeInst>(Instr)) {
613 if (isa<UnreachableInst>(II->getNormalDest()->getTerminator()))
614 return false;
615 } else if (isa<UnreachableInst>(Instr->getParent()->getTerminator()))
616 return false;
617
618 return true;
619}
620
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000621void CallAnalyzer::updateThreshold(CallSite CS, Function &Callee) {
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000622 // If no size growth is allowed for this inlining, set Threshold to 0.
623 if (!allowSizeGrowth(CS)) {
624 Threshold = 0;
625 return;
626 }
627
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000628 Function *Caller = CS.getCaller();
Easwaran Ramanbb578ef2016-05-19 23:02:09 +0000629 if (DefaultInlineThreshold.getNumOccurrences() > 0) {
630 // Explicitly specified -inline-threhold overrides the threshold passed to
631 // CallAnalyzer's constructor.
632 Threshold = DefaultInlineThreshold;
633 } else {
634 // If -inline-threshold is not given, listen to the optsize and minsize
635 // attributes when they would decrease the threshold.
Easwaran Raman30a93c12016-01-28 23:44:41 +0000636 if (Caller->optForMinSize() && OptMinSizeThreshold < Threshold)
637 Threshold = OptMinSizeThreshold;
638 else if (Caller->optForSize() && OptSizeThreshold < Threshold)
639 Threshold = OptSizeThreshold;
640 }
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000641
Dehao Chen9232f982016-07-11 16:48:54 +0000642 bool HotCallsite = false;
Dehao Chende39cb92016-08-05 20:28:41 +0000643 bool ColdCallsite = false;
Dehao Chen9232f982016-07-11 16:48:54 +0000644 uint64_t TotalWeight;
Dehao Chende39cb92016-08-05 20:28:41 +0000645 if (CS.getInstruction()->extractProfTotalWeight(TotalWeight))
646 if (PSI->isHotCount(TotalWeight))
647 HotCallsite = true;
648 else if (PSI->isColdCount(TotalWeight))
649 ColdCallsite = true;
Dehao Chen9232f982016-07-11 16:48:54 +0000650
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000651 // Listen to the inlinehint attribute or profile based hotness information
652 // when it would increase the threshold and the caller does not need to
653 // minimize its size.
Easwaran Raman71069cf2016-06-09 22:23:21 +0000654 bool InlineHint = Callee.hasFnAttribute(Attribute::InlineHint) ||
Dehao Chende39cb92016-08-05 20:28:41 +0000655 PSI->isHotFunction(&Callee);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000656 if (InlineHint && HintThreshold > Threshold && !Caller->optForMinSize())
657 Threshold = HintThreshold;
658
Dehao Chende39cb92016-08-05 20:28:41 +0000659 if (HotCallsite && HotCallSiteThreshold > Threshold &&
660 !Caller->optForMinSize())
661 Threshold = HotCallSiteThreshold;
662
Easwaran Raman71069cf2016-06-09 22:23:21 +0000663 bool ColdCallee = PSI->isColdFunction(&Callee);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000664 // Command line argument for DefaultInlineThreshold will override the default
665 // ColdThreshold. If we have -inline-threshold but no -inlinecold-threshold,
666 // do not use the default cold threshold even if it is smaller.
667 if ((DefaultInlineThreshold.getNumOccurrences() == 0 ||
668 ColdThreshold.getNumOccurrences() > 0) &&
Dehao Chende39cb92016-08-05 20:28:41 +0000669 (ColdCallee || ColdCallsite) && ColdThreshold < Threshold)
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000670 Threshold = ColdThreshold;
Justin Lebar8650a4d2016-04-15 01:38:48 +0000671
672 // Finally, take the target-specific inlining threshold multiplier into
673 // account.
674 Threshold *= TTI.getInliningThresholdMultiplier();
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000675}
676
Matt Arsenault727aa342013-07-20 04:09:00 +0000677bool CallAnalyzer::visitCmpInst(CmpInst &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000678 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
679 // First try to handle simplified comparisons.
680 if (!isa<Constant>(LHS))
681 if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS))
682 LHS = SimpleLHS;
683 if (!isa<Constant>(RHS))
684 if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS))
685 RHS = SimpleRHS;
Matt Arsenault727aa342013-07-20 04:09:00 +0000686 if (Constant *CLHS = dyn_cast<Constant>(LHS)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000687 if (Constant *CRHS = dyn_cast<Constant>(RHS))
Chad Rosier567556a2016-04-28 14:47:23 +0000688 if (Constant *C =
689 ConstantExpr::getCompare(I.getPredicate(), CLHS, CRHS)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000690 SimplifiedValues[&I] = C;
691 return true;
692 }
Matt Arsenault727aa342013-07-20 04:09:00 +0000693 }
694
695 if (I.getOpcode() == Instruction::FCmp)
696 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000697
698 // Otherwise look for a comparison between constant offset pointers with
699 // a common base.
700 Value *LHSBase, *RHSBase;
701 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000702 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000703 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000704 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000705 if (RHSBase && LHSBase == RHSBase) {
706 // We have common bases, fold the icmp to a constant based on the
707 // offsets.
708 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
709 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
710 if (Constant *C = ConstantExpr::getICmp(I.getPredicate(), CLHS, CRHS)) {
711 SimplifiedValues[&I] = C;
712 ++NumConstantPtrCmps;
713 return true;
714 }
715 }
716 }
717
718 // If the comparison is an equality comparison with null, we can simplify it
Philip Reames9b5c9582015-06-26 20:51:17 +0000719 // if we know the value (argument) can't be null
720 if (I.isEquality() && isa<ConstantPointerNull>(I.getOperand(1)) &&
721 isKnownNonNullInCallee(I.getOperand(0))) {
722 bool IsNotEqual = I.getPredicate() == CmpInst::ICMP_NE;
723 SimplifiedValues[&I] = IsNotEqual ? ConstantInt::getTrue(I.getType())
724 : ConstantInt::getFalse(I.getType());
725 return true;
726 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000727 // Finally check for SROA candidates in comparisons.
728 Value *SROAArg;
729 DenseMap<Value *, int>::iterator CostIt;
730 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) {
731 if (isa<ConstantPointerNull>(I.getOperand(1))) {
732 accumulateSROACost(CostIt, InlineConstants::InstrCost);
733 return true;
734 }
735
736 disableSROA(CostIt);
737 }
738
739 return false;
740}
741
742bool CallAnalyzer::visitSub(BinaryOperator &I) {
743 // Try to handle a special case: we can fold computing the difference of two
744 // constant-related pointers.
745 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
746 Value *LHSBase, *RHSBase;
747 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000748 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000749 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000750 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000751 if (RHSBase && LHSBase == RHSBase) {
752 // We have common bases, fold the subtract to a constant based on the
753 // offsets.
754 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
755 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
756 if (Constant *C = ConstantExpr::getSub(CLHS, CRHS)) {
757 SimplifiedValues[&I] = C;
758 ++NumConstantPtrDiffs;
759 return true;
760 }
761 }
762 }
763
764 // Otherwise, fall back to the generic logic for simplifying and handling
765 // instructions.
766 return Base::visitSub(I);
767}
768
769bool CallAnalyzer::visitBinaryOperator(BinaryOperator &I) {
770 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000771 const DataLayout &DL = F.getParent()->getDataLayout();
Chandler Carruth0539c072012-03-31 12:42:41 +0000772 if (!isa<Constant>(LHS))
773 if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS))
774 LHS = SimpleLHS;
775 if (!isa<Constant>(RHS))
776 if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS))
777 RHS = SimpleRHS;
Michael Zolotukhin4e8598e2015-02-06 20:02:51 +0000778 Value *SimpleV = nullptr;
779 if (auto FI = dyn_cast<FPMathOperator>(&I))
780 SimpleV =
781 SimplifyFPBinOp(I.getOpcode(), LHS, RHS, FI->getFastMathFlags(), DL);
782 else
783 SimpleV = SimplifyBinOp(I.getOpcode(), LHS, RHS, DL);
784
Chandler Carruth0539c072012-03-31 12:42:41 +0000785 if (Constant *C = dyn_cast_or_null<Constant>(SimpleV)) {
786 SimplifiedValues[&I] = C;
787 return true;
788 }
789
790 // Disable any SROA on arguments to arbitrary, unsimplified binary operators.
791 disableSROA(LHS);
792 disableSROA(RHS);
793
794 return false;
795}
796
797bool CallAnalyzer::visitLoad(LoadInst &I) {
798 Value *SROAArg;
799 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000800 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000801 if (I.isSimple()) {
802 accumulateSROACost(CostIt, InlineConstants::InstrCost);
803 return true;
804 }
805
806 disableSROA(CostIt);
807 }
808
809 return false;
810}
811
812bool CallAnalyzer::visitStore(StoreInst &I) {
813 Value *SROAArg;
814 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000815 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000816 if (I.isSimple()) {
817 accumulateSROACost(CostIt, InlineConstants::InstrCost);
818 return true;
819 }
820
821 disableSROA(CostIt);
822 }
823
824 return false;
825}
826
Chandler Carruth753e21d2012-12-28 14:23:32 +0000827bool CallAnalyzer::visitExtractValue(ExtractValueInst &I) {
828 // Constant folding for extract value is trivial.
829 Constant *C = dyn_cast<Constant>(I.getAggregateOperand());
830 if (!C)
831 C = SimplifiedValues.lookup(I.getAggregateOperand());
832 if (C) {
833 SimplifiedValues[&I] = ConstantExpr::getExtractValue(C, I.getIndices());
834 return true;
835 }
836
837 // SROA can look through these but give them a cost.
838 return false;
839}
840
841bool CallAnalyzer::visitInsertValue(InsertValueInst &I) {
842 // Constant folding for insert value is trivial.
843 Constant *AggC = dyn_cast<Constant>(I.getAggregateOperand());
844 if (!AggC)
845 AggC = SimplifiedValues.lookup(I.getAggregateOperand());
846 Constant *InsertedC = dyn_cast<Constant>(I.getInsertedValueOperand());
847 if (!InsertedC)
848 InsertedC = SimplifiedValues.lookup(I.getInsertedValueOperand());
849 if (AggC && InsertedC) {
Chad Rosier567556a2016-04-28 14:47:23 +0000850 SimplifiedValues[&I] =
851 ConstantExpr::getInsertValue(AggC, InsertedC, I.getIndices());
Chandler Carruth753e21d2012-12-28 14:23:32 +0000852 return true;
853 }
854
855 // SROA can look through these but give them a cost.
856 return false;
857}
858
859/// \brief Try to simplify a call site.
860///
861/// Takes a concrete function and callsite and tries to actually simplify it by
862/// analyzing the arguments and call itself with instsimplify. Returns true if
863/// it has simplified the callsite to some other entity (a constant), making it
864/// free.
865bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
866 // FIXME: Using the instsimplify logic directly for this is inefficient
867 // because we have to continually rebuild the argument list even when no
868 // simplifications can be performed. Until that is fixed with remapping
869 // inside of instsimplify, directly constant fold calls here.
870 if (!canConstantFoldCallTo(F))
871 return false;
872
873 // Try to re-map the arguments to constants.
874 SmallVector<Constant *, 4> ConstantArgs;
875 ConstantArgs.reserve(CS.arg_size());
Chad Rosier567556a2016-04-28 14:47:23 +0000876 for (CallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end(); I != E;
877 ++I) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000878 Constant *C = dyn_cast<Constant>(*I);
879 if (!C)
880 C = dyn_cast_or_null<Constant>(SimplifiedValues.lookup(*I));
881 if (!C)
882 return false; // This argument doesn't map to a constant.
883
884 ConstantArgs.push_back(C);
885 }
886 if (Constant *C = ConstantFoldCall(F, ConstantArgs)) {
887 SimplifiedValues[CS.getInstruction()] = C;
888 return true;
889 }
890
891 return false;
892}
893
Chandler Carruth0539c072012-03-31 12:42:41 +0000894bool CallAnalyzer::visitCallSite(CallSite CS) {
Chandler Carruth37d25de2013-12-13 08:00:01 +0000895 if (CS.hasFnAttr(Attribute::ReturnsTwice) &&
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +0000896 !F.hasFnAttribute(Attribute::ReturnsTwice)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000897 // This aborts the entire analysis.
898 ExposesReturnsTwice = true;
899 return false;
900 }
Chad Rosier567556a2016-04-28 14:47:23 +0000901 if (CS.isCall() && cast<CallInst>(CS.getInstruction())->cannotDuplicate())
James Molloy4f6fb952012-12-20 16:04:27 +0000902 ContainsNoDuplicateCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000903
Chandler Carruth0539c072012-03-31 12:42:41 +0000904 if (Function *F = CS.getCalledFunction()) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000905 // When we have a concrete function, first try to simplify it directly.
906 if (simplifyCallSite(F, CS))
907 return true;
908
909 // Next check if it is an intrinsic we know about.
910 // FIXME: Lift this into part of the InstVisitor.
911 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
912 switch (II->getIntrinsicID()) {
913 default:
914 return Base::visitCallSite(CS);
915
Peter Collingbourne7dd8dbf2016-04-22 21:18:02 +0000916 case Intrinsic::load_relative:
917 // This is normally lowered to 4 LLVM instructions.
918 Cost += 3 * InlineConstants::InstrCost;
919 return false;
920
Chandler Carruth753e21d2012-12-28 14:23:32 +0000921 case Intrinsic::memset:
922 case Intrinsic::memcpy:
923 case Intrinsic::memmove:
924 // SROA can usually chew through these intrinsics, but they aren't free.
925 return false;
Reid Kleckner60381792015-07-07 22:25:32 +0000926 case Intrinsic::localescape:
Reid Kleckner223de262015-04-14 20:38:14 +0000927 HasFrameEscape = true;
928 return false;
Chandler Carruth753e21d2012-12-28 14:23:32 +0000929 }
930 }
931
Chandler Carruth0539c072012-03-31 12:42:41 +0000932 if (F == CS.getInstruction()->getParent()->getParent()) {
933 // This flag will fully abort the analysis, so don't bother with anything
934 // else.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000935 IsRecursiveCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000936 return false;
937 }
938
Chandler Carruth0ba8db42013-01-22 11:26:02 +0000939 if (TTI.isLoweredToCall(F)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000940 // We account for the average 1 instruction per call argument setup
941 // here.
942 Cost += CS.arg_size() * InlineConstants::InstrCost;
943
944 // Everything other than inline ASM will also have a significant cost
945 // merely from making the call.
946 if (!isa<InlineAsm>(CS.getCalledValue()))
947 Cost += InlineConstants::CallPenalty;
948 }
949
950 return Base::visitCallSite(CS);
951 }
952
953 // Otherwise we're in a very special case -- an indirect function call. See
954 // if we can be particularly clever about this.
955 Value *Callee = CS.getCalledValue();
956
957 // First, pay the price of the argument setup. We account for the average
958 // 1 instruction per call argument setup here.
959 Cost += CS.arg_size() * InlineConstants::InstrCost;
960
961 // Next, check if this happens to be an indirect function call to a known
962 // function in this inline context. If not, we've done all we can.
963 Function *F = dyn_cast_or_null<Function>(SimplifiedValues.lookup(Callee));
964 if (!F)
965 return Base::visitCallSite(CS);
966
967 // If we have a constant that we are calling as a function, we can peer
968 // through it and see the function target. This happens not infrequently
969 // during devirtualization and so we want to give it a hefty bonus for
970 // inlining, but cap that bonus in the event that inlining wouldn't pan
971 // out. Pretend to inline the function, with a custom threshold.
Sean Silvaab6a6832016-07-23 04:22:50 +0000972 CallAnalyzer CA(TTI, GetAssumptionCache, PSI, *F,
973 InlineConstants::IndirectCallThreshold, CS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000974 if (CA.analyzeCall(CS)) {
975 // We were able to inline the indirect call! Subtract the cost from the
Easwaran Raman6d90d9f2015-12-07 21:21:20 +0000976 // threshold to get the bonus we want to apply, but don't go below zero.
977 Cost -= std::max(0, CA.getThreshold() - CA.getCost());
Chandler Carruth0539c072012-03-31 12:42:41 +0000978 }
979
980 return Base::visitCallSite(CS);
981}
982
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000983bool CallAnalyzer::visitReturnInst(ReturnInst &RI) {
984 // At least one return instruction will be free after inlining.
985 bool Free = !HasReturn;
986 HasReturn = true;
987 return Free;
988}
989
990bool CallAnalyzer::visitBranchInst(BranchInst &BI) {
991 // We model unconditional branches as essentially free -- they really
992 // shouldn't exist at all, but handling them makes the behavior of the
993 // inliner more regular and predictable. Interestingly, conditional branches
994 // which will fold away are also free.
995 return BI.isUnconditional() || isa<ConstantInt>(BI.getCondition()) ||
996 dyn_cast_or_null<ConstantInt>(
997 SimplifiedValues.lookup(BI.getCondition()));
998}
999
1000bool CallAnalyzer::visitSwitchInst(SwitchInst &SI) {
1001 // We model unconditional switches as free, see the comments on handling
1002 // branches.
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001003 if (isa<ConstantInt>(SI.getCondition()))
1004 return true;
1005 if (Value *V = SimplifiedValues.lookup(SI.getCondition()))
1006 if (isa<ConstantInt>(V))
1007 return true;
1008
1009 // Otherwise, we need to accumulate a cost proportional to the number of
1010 // distinct successor blocks. This fan-out in the CFG cannot be represented
1011 // for free even if we can represent the core switch as a jumptable that
1012 // takes a single instruction.
1013 //
1014 // NB: We convert large switches which are just used to initialize large phi
1015 // nodes to lookup tables instead in simplify-cfg, so this shouldn't prevent
1016 // inlining those. It will prevent inlining in cases where the optimization
1017 // does not (yet) fire.
1018 SmallPtrSet<BasicBlock *, 8> SuccessorBlocks;
1019 SuccessorBlocks.insert(SI.getDefaultDest());
1020 for (auto I = SI.case_begin(), E = SI.case_end(); I != E; ++I)
1021 SuccessorBlocks.insert(I.getCaseSuccessor());
1022 // Add cost corresponding to the number of distinct destinations. The first
1023 // we model as free because of fallthrough.
1024 Cost += (SuccessorBlocks.size() - 1) * InlineConstants::InstrCost;
1025 return false;
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001026}
1027
1028bool CallAnalyzer::visitIndirectBrInst(IndirectBrInst &IBI) {
1029 // We never want to inline functions that contain an indirectbr. This is
1030 // incorrect because all the blockaddress's (in static global initializers
1031 // for example) would be referring to the original function, and this
1032 // indirect jump would jump from the inlined copy of the function into the
1033 // original function which is extremely undefined behavior.
1034 // FIXME: This logic isn't really right; we can safely inline functions with
1035 // indirectbr's as long as no other function or global references the
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001036 // blockaddress of a block within the current function.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001037 HasIndirectBr = true;
1038 return false;
1039}
1040
1041bool CallAnalyzer::visitResumeInst(ResumeInst &RI) {
1042 // FIXME: It's not clear that a single instruction is an accurate model for
1043 // the inline cost of a resume instruction.
1044 return false;
1045}
1046
David Majnemer654e1302015-07-31 17:58:14 +00001047bool CallAnalyzer::visitCleanupReturnInst(CleanupReturnInst &CRI) {
1048 // FIXME: It's not clear that a single instruction is an accurate model for
1049 // the inline cost of a cleanupret instruction.
1050 return false;
1051}
1052
1053bool CallAnalyzer::visitCatchReturnInst(CatchReturnInst &CRI) {
1054 // FIXME: It's not clear that a single instruction is an accurate model for
Joseph Tremoulet8220bcc2015-08-23 00:26:33 +00001055 // the inline cost of a catchret instruction.
David Majnemer654e1302015-07-31 17:58:14 +00001056 return false;
1057}
1058
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001059bool CallAnalyzer::visitUnreachableInst(UnreachableInst &I) {
1060 // FIXME: It might be reasonably to discount the cost of instructions leading
1061 // to unreachable as they have the lowest possible impact on both runtime and
1062 // code size.
1063 return true; // No actual code is needed for unreachable.
1064}
1065
Chandler Carruth0539c072012-03-31 12:42:41 +00001066bool CallAnalyzer::visitInstruction(Instruction &I) {
Chandler Carruthda7513a2012-05-04 00:58:03 +00001067 // Some instructions are free. All of the free intrinsics can also be
1068 // handled by SROA, etc.
Chandler Carruthb8cf5102013-01-21 12:05:16 +00001069 if (TargetTransformInfo::TCC_Free == TTI.getUserCost(&I))
Chandler Carruthda7513a2012-05-04 00:58:03 +00001070 return true;
1071
Chandler Carruth0539c072012-03-31 12:42:41 +00001072 // We found something we don't understand or can't handle. Mark any SROA-able
1073 // values in the operand list as no longer viable.
1074 for (User::op_iterator OI = I.op_begin(), OE = I.op_end(); OI != OE; ++OI)
1075 disableSROA(*OI);
1076
1077 return false;
1078}
1079
Chandler Carruth0539c072012-03-31 12:42:41 +00001080/// \brief Analyze a basic block for its contribution to the inline cost.
1081///
1082/// This method walks the analyzer over every instruction in the given basic
1083/// block and accounts for their cost during inlining at this callsite. It
1084/// aborts early if the threshold has been exceeded or an impossible to inline
1085/// construct has been detected. It returns false if inlining is no longer
1086/// viable, and true if inlining remains viable.
Hal Finkel57f03dd2014-09-07 13:49:57 +00001087bool CallAnalyzer::analyzeBlock(BasicBlock *BB,
1088 SmallPtrSetImpl<const Value *> &EphValues) {
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001089 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
Chandler Carruth6b4cc8b2014-02-01 10:38:17 +00001090 // FIXME: Currently, the number of instructions in a function regardless of
1091 // our ability to simplify them during inline to constants or dead code,
1092 // are actually used by the vector bonus heuristic. As long as that's true,
1093 // we have to special case debug intrinsics here to prevent differences in
1094 // inlining due to debug symbols. Eventually, the number of unsimplified
1095 // instructions shouldn't factor into the cost computation, but until then,
1096 // hack around it here.
1097 if (isa<DbgInfoIntrinsic>(I))
1098 continue;
1099
Hal Finkel57f03dd2014-09-07 13:49:57 +00001100 // Skip ephemeral values.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001101 if (EphValues.count(&*I))
Hal Finkel57f03dd2014-09-07 13:49:57 +00001102 continue;
1103
Chandler Carruth0539c072012-03-31 12:42:41 +00001104 ++NumInstructions;
1105 if (isa<ExtractElementInst>(I) || I->getType()->isVectorTy())
1106 ++NumVectorInstructions;
1107
Sanjay Patele9434e82015-09-15 15:26:25 +00001108 // If the instruction is floating point, and the target says this operation
1109 // is expensive or the function has the "use-soft-float" attribute, this may
1110 // eventually become a library call. Treat the cost as such.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001111 if (I->getType()->isFloatingPointTy()) {
1112 bool hasSoftFloatAttr = false;
1113
Sanjay Patele9434e82015-09-15 15:26:25 +00001114 // If the function has the "use-soft-float" attribute, mark it as
1115 // expensive.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001116 if (F.hasFnAttribute("use-soft-float")) {
1117 Attribute Attr = F.getFnAttribute("use-soft-float");
1118 StringRef Val = Attr.getValueAsString();
1119 if (Val == "true")
1120 hasSoftFloatAttr = true;
1121 }
1122
1123 if (TTI.getFPOpCost(I->getType()) == TargetTransformInfo::TCC_Expensive ||
1124 hasSoftFloatAttr)
1125 Cost += InlineConstants::CallPenalty;
1126 }
1127
Chandler Carruth0539c072012-03-31 12:42:41 +00001128 // If the instruction simplified to a constant, there is no cost to this
1129 // instruction. Visit the instructions using our InstVisitor to account for
1130 // all of the per-instruction logic. The visit tree returns true if we
1131 // consumed the instruction in any way, and false if the instruction's base
1132 // cost should count against inlining.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001133 if (Base::visit(&*I))
Chandler Carruth0539c072012-03-31 12:42:41 +00001134 ++NumInstructionsSimplified;
1135 else
1136 Cost += InlineConstants::InstrCost;
1137
1138 // If the visit this instruction detected an uninlinable pattern, abort.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001139 if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca ||
Reid Kleckner223de262015-04-14 20:38:14 +00001140 HasIndirectBr || HasFrameEscape)
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001141 return false;
1142
1143 // If the caller is a recursive function then we don't want to inline
1144 // functions which allocate a lot of stack space because it would increase
1145 // the caller stack usage dramatically.
1146 if (IsCallerRecursive &&
1147 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller)
Chandler Carruth0539c072012-03-31 12:42:41 +00001148 return false;
1149
Chandler Carrutha004f222015-05-27 02:49:05 +00001150 // Check if we've past the maximum possible threshold so we don't spin in
1151 // huge basic blocks that will never inline.
1152 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001153 return false;
1154 }
1155
1156 return true;
1157}
1158
1159/// \brief Compute the base pointer and cumulative constant offsets for V.
1160///
1161/// This strips all constant offsets off of V, leaving it the base pointer, and
1162/// accumulates the total constant offset applied in the returned constant. It
1163/// returns 0 if V is not a pointer, and returns the constant '0' if there are
1164/// no constant offsets applied.
1165ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001166 if (!V->getType()->isPointerTy())
Craig Topper353eda42014-04-24 06:44:33 +00001167 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001168
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001169 const DataLayout &DL = F.getParent()->getDataLayout();
1170 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +00001171 APInt Offset = APInt::getNullValue(IntPtrWidth);
1172
1173 // Even though we don't look through PHI nodes, we could be called on an
1174 // instruction in an unreachable block, which may be on a cycle.
1175 SmallPtrSet<Value *, 4> Visited;
1176 Visited.insert(V);
1177 do {
1178 if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
1179 if (!GEP->isInBounds() || !accumulateGEPOffset(*GEP, Offset))
Craig Topper353eda42014-04-24 06:44:33 +00001180 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001181 V = GEP->getPointerOperand();
1182 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
1183 V = cast<Operator>(V)->getOperand(0);
1184 } else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
Sanjoy Das5ce32722016-04-08 00:48:30 +00001185 if (GA->isInterposable())
Chandler Carruth0539c072012-03-31 12:42:41 +00001186 break;
1187 V = GA->getAliasee();
1188 } else {
1189 break;
1190 }
1191 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
David Blaikie70573dc2014-11-19 07:49:26 +00001192 } while (Visited.insert(V).second);
Chandler Carruth0539c072012-03-31 12:42:41 +00001193
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001194 Type *IntPtrTy = DL.getIntPtrType(V->getContext());
Chandler Carruth0539c072012-03-31 12:42:41 +00001195 return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset));
1196}
1197
1198/// \brief Analyze a call site for potential inlining.
1199///
1200/// Returns true if inlining this call is viable, and false if it is not
1201/// viable. It computes the cost and adjusts the threshold based on numerous
1202/// factors and heuristics. If this method returns false but the computed cost
1203/// is below the computed threshold, then inlining was forcibly disabled by
Bob Wilson266802d2012-11-19 07:04:30 +00001204/// some artifact of the routine.
Chandler Carruth0539c072012-03-31 12:42:41 +00001205bool CallAnalyzer::analyzeCall(CallSite CS) {
Chandler Carruth7ae90d42012-04-11 10:15:10 +00001206 ++NumCallsAnalyzed;
1207
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001208 // Perform some tweaks to the cost and threshold based on the direct
1209 // callsite information.
Chandler Carruth0539c072012-03-31 12:42:41 +00001210
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001211 // We want to more aggressively inline vector-dense kernels, so up the
1212 // threshold, and we'll lower it if the % of vector instructions gets too
Chandler Carrutha004f222015-05-27 02:49:05 +00001213 // low. Note that these bonuses are some what arbitrary and evolved over time
1214 // by accident as much as because they are principled bonuses.
1215 //
1216 // FIXME: It would be nice to remove all such bonuses. At least it would be
1217 // nice to base the bonus values on something more scientific.
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001218 assert(NumInstructions == 0);
1219 assert(NumVectorInstructions == 0);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001220
1221 // Update the threshold based on callsite properties
1222 updateThreshold(CS, F);
1223
Chandler Carrutha004f222015-05-27 02:49:05 +00001224 FiftyPercentVectorBonus = 3 * Threshold / 2;
1225 TenPercentVectorBonus = 3 * Threshold / 4;
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001226 const DataLayout &DL = F.getParent()->getDataLayout();
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001227
Chandler Carrutha004f222015-05-27 02:49:05 +00001228 // Track whether the post-inlining function would have more than one basic
1229 // block. A single basic block is often intended for inlining. Balloon the
1230 // threshold by 50% until we pass the single-BB phase.
1231 bool SingleBB = true;
1232 int SingleBBBonus = Threshold / 2;
1233
1234 // Speculatively apply all possible bonuses to Threshold. If cost exceeds
1235 // this Threshold any time, and cost cannot decrease, we can stop processing
1236 // the rest of the function body.
1237 Threshold += (SingleBBBonus + FiftyPercentVectorBonus);
1238
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001239 // Give out bonuses per argument, as the instructions setting them up will
1240 // be gone after inlining.
1241 for (unsigned I = 0, E = CS.arg_size(); I != E; ++I) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001242 if (CS.isByValArgument(I)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001243 // We approximate the number of loads and stores needed by dividing the
1244 // size of the byval type by the target's pointer size.
1245 PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType());
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001246 unsigned TypeSize = DL.getTypeSizeInBits(PTy->getElementType());
1247 unsigned PointerSize = DL.getPointerSizeInBits();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001248 // Ceiling division.
1249 unsigned NumStores = (TypeSize + PointerSize - 1) / PointerSize;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001250
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001251 // If it generates more than 8 stores it is likely to be expanded as an
1252 // inline memcpy so we take that as an upper bound. Otherwise we assume
1253 // one load and one store per word copied.
1254 // FIXME: The maxStoresPerMemcpy setting from the target should be used
1255 // here instead of a magic number of 8, but it's not available via
1256 // DataLayout.
1257 NumStores = std::min(NumStores, 8U);
1258
1259 Cost -= 2 * NumStores * InlineConstants::InstrCost;
1260 } else {
1261 // For non-byval arguments subtract off one instruction per call
1262 // argument.
1263 Cost -= InlineConstants::InstrCost;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001264 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001265 }
1266
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001267 // If there is only one call of the function, and it has internal linkage,
1268 // the cost of inlining it drops dramatically.
Chad Rosier567556a2016-04-28 14:47:23 +00001269 bool OnlyOneCallAndLocalLinkage =
1270 F.hasLocalLinkage() && F.hasOneUse() && &F == CS.getCalledFunction();
James Molloy4f6fb952012-12-20 16:04:27 +00001271 if (OnlyOneCallAndLocalLinkage)
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001272 Cost += InlineConstants::LastCallToStaticBonus;
1273
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001274 // If this function uses the coldcc calling convention, prefer not to inline
1275 // it.
1276 if (F.getCallingConv() == CallingConv::Cold)
1277 Cost += InlineConstants::ColdccPenalty;
1278
1279 // Check if we're done. This can happen due to bonuses and penalties.
1280 if (Cost > Threshold)
1281 return false;
1282
Chandler Carruth0539c072012-03-31 12:42:41 +00001283 if (F.empty())
1284 return true;
1285
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001286 Function *Caller = CS.getInstruction()->getParent()->getParent();
1287 // Check if the caller function is recursive itself.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001288 for (User *U : Caller->users()) {
1289 CallSite Site(U);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001290 if (!Site)
1291 continue;
1292 Instruction *I = Site.getInstruction();
1293 if (I->getParent()->getParent() == Caller) {
1294 IsCallerRecursive = true;
1295 break;
1296 }
1297 }
1298
Chandler Carruth0539c072012-03-31 12:42:41 +00001299 // Populate our simplified values by mapping from function arguments to call
1300 // arguments with known important simplifications.
1301 CallSite::arg_iterator CAI = CS.arg_begin();
1302 for (Function::arg_iterator FAI = F.arg_begin(), FAE = F.arg_end();
1303 FAI != FAE; ++FAI, ++CAI) {
1304 assert(CAI != CS.arg_end());
1305 if (Constant *C = dyn_cast<Constant>(CAI))
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001306 SimplifiedValues[&*FAI] = C;
Chandler Carruth0539c072012-03-31 12:42:41 +00001307
1308 Value *PtrArg = *CAI;
1309 if (ConstantInt *C = stripAndComputeInBoundsConstantOffsets(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001310 ConstantOffsetPtrs[&*FAI] = std::make_pair(PtrArg, C->getValue());
Chandler Carruth0539c072012-03-31 12:42:41 +00001311
1312 // We can SROA any pointer arguments derived from alloca instructions.
1313 if (isa<AllocaInst>(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001314 SROAArgValues[&*FAI] = PtrArg;
Chandler Carruth0539c072012-03-31 12:42:41 +00001315 SROAArgCosts[PtrArg] = 0;
1316 }
1317 }
1318 }
1319 NumConstantArgs = SimplifiedValues.size();
1320 NumConstantOffsetPtrArgs = ConstantOffsetPtrs.size();
1321 NumAllocaArgs = SROAArgValues.size();
1322
Hal Finkel57f03dd2014-09-07 13:49:57 +00001323 // FIXME: If a caller has multiple calls to a callee, we end up recomputing
1324 // the ephemeral values multiple times (and they're completely determined by
1325 // the callee, so this is purely duplicate work).
1326 SmallPtrSet<const Value *, 32> EphValues;
Sean Silvaab6a6832016-07-23 04:22:50 +00001327 CodeMetrics::collectEphemeralValues(&F, &GetAssumptionCache(F), EphValues);
Hal Finkel57f03dd2014-09-07 13:49:57 +00001328
Chandler Carruth0539c072012-03-31 12:42:41 +00001329 // The worklist of live basic blocks in the callee *after* inlining. We avoid
1330 // adding basic blocks of the callee which can be proven to be dead for this
1331 // particular call site in order to get more accurate cost estimates. This
1332 // requires a somewhat heavyweight iteration pattern: we need to walk the
1333 // basic blocks in a breadth-first order as we insert live successors. To
1334 // accomplish this, prioritizing for small iterations because we exit after
1335 // crossing our threshold, we use a small-size optimized SetVector.
1336 typedef SetVector<BasicBlock *, SmallVector<BasicBlock *, 16>,
Chad Rosier567556a2016-04-28 14:47:23 +00001337 SmallPtrSet<BasicBlock *, 16>>
1338 BBSetVector;
Chandler Carruth0539c072012-03-31 12:42:41 +00001339 BBSetVector BBWorklist;
1340 BBWorklist.insert(&F.getEntryBlock());
1341 // Note that we *must not* cache the size, this loop grows the worklist.
1342 for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) {
1343 // Bail out the moment we cross the threshold. This means we'll under-count
1344 // the cost, but only when undercounting doesn't matter.
Chandler Carrutha004f222015-05-27 02:49:05 +00001345 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001346 break;
1347
1348 BasicBlock *BB = BBWorklist[Idx];
1349 if (BB->empty())
Chandler Carruth4d1d34f2012-03-14 23:19:53 +00001350 continue;
Dan Gohman4552e3c2009-10-13 18:30:07 +00001351
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001352 // Disallow inlining a blockaddress. A blockaddress only has defined
1353 // behavior for an indirect branch in the same function, and we do not
1354 // currently support inlining indirect branches. But, the inliner may not
1355 // see an indirect branch that ends up being dead code at a particular call
1356 // site. If the blockaddress escapes the function, e.g., via a global
1357 // variable, inlining may lead to an invalid cross-function reference.
1358 if (BB->hasAddressTaken())
1359 return false;
1360
Chandler Carruth0539c072012-03-31 12:42:41 +00001361 // Analyze the cost of this block. If we blow through the threshold, this
1362 // returns false, and we can bail on out.
Easwaran Ramand295b002016-04-13 21:20:22 +00001363 if (!analyzeBlock(BB, EphValues))
1364 return false;
Eric Christopher46308e62011-02-01 01:16:32 +00001365
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001366 TerminatorInst *TI = BB->getTerminator();
1367
Chandler Carruth0539c072012-03-31 12:42:41 +00001368 // Add in the live successors by first checking whether we have terminator
1369 // that may be simplified based on the values simplified by this call.
1370 if (BranchInst *BI = dyn_cast<BranchInst>(TI)) {
1371 if (BI->isConditional()) {
1372 Value *Cond = BI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001373 if (ConstantInt *SimpleCond =
1374 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001375 BBWorklist.insert(BI->getSuccessor(SimpleCond->isZero() ? 1 : 0));
1376 continue;
Eric Christopher46308e62011-02-01 01:16:32 +00001377 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001378 }
1379 } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) {
1380 Value *Cond = SI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001381 if (ConstantInt *SimpleCond =
1382 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001383 BBWorklist.insert(SI->findCaseValue(SimpleCond).getCaseSuccessor());
1384 continue;
1385 }
1386 }
Eric Christopher46308e62011-02-01 01:16:32 +00001387
Chandler Carruth0539c072012-03-31 12:42:41 +00001388 // If we're unable to select a particular successor, just count all of
1389 // them.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001390 for (unsigned TIdx = 0, TSize = TI->getNumSuccessors(); TIdx != TSize;
1391 ++TIdx)
Chandler Carruth0539c072012-03-31 12:42:41 +00001392 BBWorklist.insert(TI->getSuccessor(TIdx));
1393
1394 // If we had any successors at this point, than post-inlining is likely to
1395 // have them as well. Note that we assume any basic blocks which existed
1396 // due to branches or switches which folded above will also fold after
1397 // inlining.
1398 if (SingleBB && TI->getNumSuccessors() > 1) {
1399 // Take off the bonus we applied to the threshold.
1400 Threshold -= SingleBBBonus;
1401 SingleBB = false;
Eric Christopher46308e62011-02-01 01:16:32 +00001402 }
1403 }
Andrew Trickcaa500b2011-10-01 01:27:56 +00001404
Chandler Carruthcb5beb32013-12-12 11:59:26 +00001405 // If this is a noduplicate call, we can still inline as long as
James Molloy4f6fb952012-12-20 16:04:27 +00001406 // inlining this would cause the removal of the caller (so the instruction
1407 // is not actually duplicated, just moved).
1408 if (!OnlyOneCallAndLocalLinkage && ContainsNoDuplicateCall)
1409 return false;
1410
Chandler Carrutha004f222015-05-27 02:49:05 +00001411 // We applied the maximum possible vector bonus at the beginning. Now,
1412 // subtract the excess bonus, if any, from the Threshold before
1413 // comparing against Cost.
1414 if (NumVectorInstructions <= NumInstructions / 10)
1415 Threshold -= FiftyPercentVectorBonus;
1416 else if (NumVectorInstructions <= NumInstructions / 2)
1417 Threshold -= (FiftyPercentVectorBonus - TenPercentVectorBonus);
Chandler Carruth0539c072012-03-31 12:42:41 +00001418
Hans Wennborg00ab73d2016-02-05 20:32:42 +00001419 return Cost < std::max(1, Threshold);
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001420}
1421
Manman Ren49d684e2012-09-12 05:06:18 +00001422#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
Chandler Carruth0539c072012-03-31 12:42:41 +00001423/// \brief Dump stats about this call's analysis.
Yaron Kereneb2a2542016-01-29 20:50:44 +00001424LLVM_DUMP_METHOD void CallAnalyzer::dump() {
Eric Christophera13839f2014-02-26 23:27:16 +00001425#define DEBUG_PRINT_STAT(x) dbgs() << " " #x ": " << x << "\n"
Chandler Carruth0539c072012-03-31 12:42:41 +00001426 DEBUG_PRINT_STAT(NumConstantArgs);
1427 DEBUG_PRINT_STAT(NumConstantOffsetPtrArgs);
1428 DEBUG_PRINT_STAT(NumAllocaArgs);
1429 DEBUG_PRINT_STAT(NumConstantPtrCmps);
1430 DEBUG_PRINT_STAT(NumConstantPtrDiffs);
1431 DEBUG_PRINT_STAT(NumInstructionsSimplified);
Chandler Carrutha004f222015-05-27 02:49:05 +00001432 DEBUG_PRINT_STAT(NumInstructions);
Chandler Carruth0539c072012-03-31 12:42:41 +00001433 DEBUG_PRINT_STAT(SROACostSavings);
1434 DEBUG_PRINT_STAT(SROACostSavingsLost);
James Molloy4f6fb952012-12-20 16:04:27 +00001435 DEBUG_PRINT_STAT(ContainsNoDuplicateCall);
Chandler Carruth394e34f2014-01-31 22:32:32 +00001436 DEBUG_PRINT_STAT(Cost);
1437 DEBUG_PRINT_STAT(Threshold);
Chandler Carruth0539c072012-03-31 12:42:41 +00001438#undef DEBUG_PRINT_STAT
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001439}
Manman Renc3366cc2012-09-06 19:55:56 +00001440#endif
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001441
Akira Hatanaka5af7ace2015-11-13 01:44:32 +00001442/// \brief Test that two functions either have or have not the given attribute
1443/// at the same time.
Chad Rosier567556a2016-04-28 14:47:23 +00001444template <typename AttrKind>
Akira Hatanaka5af7ace2015-11-13 01:44:32 +00001445static bool attributeMatches(Function *F1, Function *F2, AttrKind Attr) {
1446 return F1->getFnAttribute(Attr) == F2->getFnAttribute(Attr);
1447}
1448
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001449/// \brief Test that there are no attribute conflicts between Caller and Callee
1450/// that prevent inlining.
1451static bool functionsHaveCompatibleAttributes(Function *Caller,
Eric Christopher4371b132015-07-02 01:11:47 +00001452 Function *Callee,
1453 TargetTransformInfo &TTI) {
Eric Christopherd566fb12015-07-29 22:09:48 +00001454 return TTI.areInlineCompatible(Caller, Callee) &&
Akira Hatanaka1cb242e2015-12-22 23:57:37 +00001455 AttributeFuncs::areInlineCompatible(*Caller, *Callee);
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001456}
1457
Sean Silvaab6a6832016-07-23 04:22:50 +00001458InlineCost llvm::getInlineCost(
1459 CallSite CS, int DefaultThreshold, TargetTransformInfo &CalleeTTI,
1460 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
1461 ProfileSummaryInfo *PSI) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001462 return getInlineCost(CS, CS.getCalledFunction(), DefaultThreshold, CalleeTTI,
Sean Silvaab6a6832016-07-23 04:22:50 +00001463 GetAssumptionCache, PSI);
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001464}
1465
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001466int llvm::computeThresholdFromOptLevels(unsigned OptLevel,
1467 unsigned SizeOptLevel) {
1468 if (OptLevel > 2)
1469 return OptAggressiveThreshold;
1470 if (SizeOptLevel == 1) // -Os
1471 return OptSizeThreshold;
1472 if (SizeOptLevel == 2) // -Oz
1473 return OptMinSizeThreshold;
1474 return DefaultInlineThreshold;
1475}
1476
1477int llvm::getDefaultInlineThreshold() { return DefaultInlineThreshold; }
1478
Sean Silvaab6a6832016-07-23 04:22:50 +00001479InlineCost llvm::getInlineCost(
1480 CallSite CS, Function *Callee, int DefaultThreshold,
1481 TargetTransformInfo &CalleeTTI,
1482 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
1483 ProfileSummaryInfo *PSI) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001484
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001485 // Cannot inline indirect calls.
1486 if (!Callee)
1487 return llvm::InlineCost::getNever();
1488
1489 // Calls to functions with always-inline attributes should be inlined
1490 // whenever possible.
Peter Collingbourne68a88972014-05-19 18:25:54 +00001491 if (CS.hasFnAttr(Attribute::AlwaysInline)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001492 if (isInlineViable(*Callee))
1493 return llvm::InlineCost::getAlways();
1494 return llvm::InlineCost::getNever();
1495 }
1496
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001497 // Never inline functions with conflicting attributes (unless callee has
1498 // always-inline attribute).
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001499 if (!functionsHaveCompatibleAttributes(CS.getCaller(), Callee, CalleeTTI))
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001500 return llvm::InlineCost::getNever();
1501
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001502 // Don't inline this call if the caller has the optnone attribute.
1503 if (CS.getCaller()->hasFnAttribute(Attribute::OptimizeNone))
1504 return llvm::InlineCost::getNever();
1505
Sanjoy Das5ce32722016-04-08 00:48:30 +00001506 // Don't inline functions which can be interposed at link-time. Don't inline
1507 // functions marked noinline or call sites marked noinline.
1508 // Note: inlining non-exact non-interposable fucntions is fine, since we know
1509 // we have *a* correct implementation of the source level function.
Chad Rosier567556a2016-04-28 14:47:23 +00001510 if (Callee->isInterposable() || Callee->hasFnAttribute(Attribute::NoInline) ||
1511 CS.isNoInline())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001512 return llvm::InlineCost::getNever();
1513
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001514 DEBUG(llvm::dbgs() << " Analyzing call of " << Callee->getName()
Chad Rosier567556a2016-04-28 14:47:23 +00001515 << "...\n");
Andrew Trickcaa500b2011-10-01 01:27:56 +00001516
Sean Silvaab6a6832016-07-23 04:22:50 +00001517 CallAnalyzer CA(CalleeTTI, GetAssumptionCache, PSI, *Callee, DefaultThreshold, CS);
Chandler Carruth0539c072012-03-31 12:42:41 +00001518 bool ShouldInline = CA.analyzeCall(CS);
Dan Gohman4552e3c2009-10-13 18:30:07 +00001519
Chandler Carruth0539c072012-03-31 12:42:41 +00001520 DEBUG(CA.dump());
1521
1522 // Check if there was a reason to force inlining or no inlining.
1523 if (!ShouldInline && CA.getCost() < CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001524 return InlineCost::getNever();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001525 if (ShouldInline && CA.getCost() >= CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001526 return InlineCost::getAlways();
Andrew Trickcaa500b2011-10-01 01:27:56 +00001527
Chandler Carruth0539c072012-03-31 12:42:41 +00001528 return llvm::InlineCost::get(CA.getCost(), CA.getThreshold());
Dan Gohman4552e3c2009-10-13 18:30:07 +00001529}
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001530
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001531bool llvm::isInlineViable(Function &F) {
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001532 bool ReturnsTwice = F.hasFnAttribute(Attribute::ReturnsTwice);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001533 for (Function::iterator BI = F.begin(), BE = F.end(); BI != BE; ++BI) {
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001534 // Disallow inlining of functions which contain indirect branches or
1535 // blockaddresses.
1536 if (isa<IndirectBrInst>(BI->getTerminator()) || BI->hasAddressTaken())
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001537 return false;
1538
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001539 for (auto &II : *BI) {
1540 CallSite CS(&II);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001541 if (!CS)
1542 continue;
1543
1544 // Disallow recursive calls.
1545 if (&F == CS.getCalledFunction())
1546 return false;
1547
1548 // Disallow calls which expose returns-twice to a function not previously
1549 // attributed as such.
1550 if (!ReturnsTwice && CS.isCall() &&
1551 cast<CallInst>(CS.getInstruction())->canReturnTwice())
1552 return false;
Reid Kleckner223de262015-04-14 20:38:14 +00001553
Reid Kleckner60381792015-07-07 22:25:32 +00001554 // Disallow inlining functions that call @llvm.localescape. Doing this
Reid Kleckner223de262015-04-14 20:38:14 +00001555 // correctly would require major changes to the inliner.
1556 if (CS.getCalledFunction() &&
1557 CS.getCalledFunction()->getIntrinsicID() ==
Reid Kleckner60381792015-07-07 22:25:32 +00001558 llvm::Intrinsic::localescape)
Reid Kleckner223de262015-04-14 20:38:14 +00001559 return false;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001560 }
1561 }
1562
1563 return true;
1564}