blob: f6a9edae88d69cd3089d177a1e55ecf6c9e564fb [file] [log] [blame]
Dan Gohman4552e3c2009-10-13 18:30:07 +00001//===- InlineCost.cpp - Cost analysis for inliner -------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements inline cost analysis.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Analysis/InlineCost.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000015#include "llvm/ADT/STLExtras.h"
16#include "llvm/ADT/SetVector.h"
17#include "llvm/ADT/SmallPtrSet.h"
18#include "llvm/ADT/SmallVector.h"
19#include "llvm/ADT/Statistic.h"
Chandler Carruth66b31302015-01-04 12:03:27 +000020#include "llvm/Analysis/AssumptionCache.h"
Hal Finkel57f03dd2014-09-07 13:49:57 +000021#include "llvm/Analysis/CodeMetrics.h"
Chandler Carruthd9903882015-01-14 11:23:27 +000022#include "llvm/Analysis/ConstantFolding.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000023#include "llvm/Analysis/InstructionSimplify.h"
Easwaran Raman71069cf2016-06-09 22:23:21 +000024#include "llvm/Analysis/ProfileSummaryInfo.h"
Chandler Carruth42f3dce2013-01-21 11:55:09 +000025#include "llvm/Analysis/TargetTransformInfo.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000026#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000027#include "llvm/IR/CallingConv.h"
28#include "llvm/IR/DataLayout.h"
Chandler Carruth03eb0de2014-03-04 10:40:04 +000029#include "llvm/IR/GetElementPtrTypeIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000030#include "llvm/IR/GlobalAlias.h"
Chandler Carruth7da14f12014-03-06 03:23:41 +000031#include "llvm/IR/InstVisitor.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000032#include "llvm/IR/IntrinsicInst.h"
33#include "llvm/IR/Operator.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000034#include "llvm/Support/Debug.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000035#include "llvm/Support/raw_ostream.h"
Eric Christopher2dfbd7e2011-02-05 00:49:15 +000036
Dan Gohman4552e3c2009-10-13 18:30:07 +000037using namespace llvm;
38
Chandler Carruthf1221bd2014-04-22 02:48:03 +000039#define DEBUG_TYPE "inline-cost"
40
Chandler Carruth7ae90d42012-04-11 10:15:10 +000041STATISTIC(NumCallsAnalyzed, "Number of call sites analyzed");
42
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000043// Threshold to use when optsize is specified (and there is no
44// -inline-threshold).
45const int OptSizeThreshold = 75;
46
47// Threshold to use when -Oz is specified (and there is no -inline-threshold).
48const int OptMinSizeThreshold = 25;
49
50// Threshold to use when -O[34] is specified (and there is no
51// -inline-threshold).
52const int OptAggressiveThreshold = 275;
53
54static cl::opt<int> DefaultInlineThreshold(
55 "inline-threshold", cl::Hidden, cl::init(225), cl::ZeroOrMore,
56 cl::desc("Control the amount of inlining to perform (default = 225)"));
57
58static cl::opt<int> HintThreshold(
59 "inlinehint-threshold", cl::Hidden, cl::init(325),
60 cl::desc("Threshold for inlining functions with inline hint"));
61
62// We introduce this threshold to help performance of instrumentation based
63// PGO before we actually hook up inliner with analysis passes such as BPI and
64// BFI.
65static cl::opt<int> ColdThreshold(
66 "inlinecold-threshold", cl::Hidden, cl::init(225),
67 cl::desc("Threshold for inlining functions with cold attribute"));
68
Chandler Carruth0539c072012-03-31 12:42:41 +000069namespace {
Chandler Carrutha3089552012-03-14 07:32:53 +000070
Chandler Carruth0539c072012-03-31 12:42:41 +000071class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> {
72 typedef InstVisitor<CallAnalyzer, bool> Base;
73 friend class InstVisitor<CallAnalyzer, bool>;
Owen Andersona08318a2010-09-09 16:56:42 +000074
Chandler Carruth42f3dce2013-01-21 11:55:09 +000075 /// The TargetTransformInfo available for this compilation.
76 const TargetTransformInfo &TTI;
77
Hal Finkel57f03dd2014-09-07 13:49:57 +000078 /// The cache of @llvm.assume intrinsics.
Bjorn Steinbrink6f972a12015-02-12 21:04:22 +000079 AssumptionCacheTracker *ACT;
Hal Finkel57f03dd2014-09-07 13:49:57 +000080
Easwaran Raman71069cf2016-06-09 22:23:21 +000081 /// Profile summary information.
82 ProfileSummaryInfo *PSI;
83
Chandler Carruth0539c072012-03-31 12:42:41 +000084 // The called function.
85 Function &F;
Owen Andersona08318a2010-09-09 16:56:42 +000086
Philip Reames9b5c9582015-06-26 20:51:17 +000087 // The candidate callsite being analyzed. Please do not use this to do
88 // analysis in the caller function; we want the inline cost query to be
89 // easily cacheable. Instead, use the cover function paramHasAttr.
90 CallSite CandidateCS;
91
Chandler Carruth0539c072012-03-31 12:42:41 +000092 int Threshold;
93 int Cost;
Owen Andersona08318a2010-09-09 16:56:42 +000094
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +000095 bool IsCallerRecursive;
96 bool IsRecursiveCall;
Chandler Carruth0539c072012-03-31 12:42:41 +000097 bool ExposesReturnsTwice;
98 bool HasDynamicAlloca;
James Molloy4f6fb952012-12-20 16:04:27 +000099 bool ContainsNoDuplicateCall;
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000100 bool HasReturn;
101 bool HasIndirectBr;
Reid Kleckner223de262015-04-14 20:38:14 +0000102 bool HasFrameEscape;
James Molloy4f6fb952012-12-20 16:04:27 +0000103
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000104 /// Number of bytes allocated statically by the callee.
105 uint64_t AllocatedSize;
Chandler Carruth0539c072012-03-31 12:42:41 +0000106 unsigned NumInstructions, NumVectorInstructions;
107 int FiftyPercentVectorBonus, TenPercentVectorBonus;
108 int VectorBonus;
109
110 // While we walk the potentially-inlined instructions, we build up and
111 // maintain a mapping of simplified values specific to this callsite. The
112 // idea is to propagate any special information we have about arguments to
113 // this call through the inlinable section of the function, and account for
114 // likely simplifications post-inlining. The most important aspect we track
115 // is CFG altering simplifications -- when we prove a basic block dead, that
116 // can cause dramatic shifts in the cost of inlining a function.
117 DenseMap<Value *, Constant *> SimplifiedValues;
118
119 // Keep track of the values which map back (through function arguments) to
120 // allocas on the caller stack which could be simplified through SROA.
121 DenseMap<Value *, Value *> SROAArgValues;
122
123 // The mapping of caller Alloca values to their accumulated cost savings. If
124 // we have to disable SROA for one of the allocas, this tells us how much
125 // cost must be added.
126 DenseMap<Value *, int> SROAArgCosts;
127
128 // Keep track of values which map to a pointer base and constant offset.
Chad Rosier567556a2016-04-28 14:47:23 +0000129 DenseMap<Value *, std::pair<Value *, APInt>> ConstantOffsetPtrs;
Chandler Carruth0539c072012-03-31 12:42:41 +0000130
131 // Custom simplification helper routines.
132 bool isAllocaDerivedArg(Value *V);
133 bool lookupSROAArgAndCost(Value *V, Value *&Arg,
134 DenseMap<Value *, int>::iterator &CostIt);
135 void disableSROA(DenseMap<Value *, int>::iterator CostIt);
136 void disableSROA(Value *V);
137 void accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
138 int InstructionCost);
Chandler Carruth0539c072012-03-31 12:42:41 +0000139 bool isGEPOffsetConstant(GetElementPtrInst &GEP);
140 bool accumulateGEPOffset(GEPOperator &GEP, APInt &Offset);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000141 bool simplifyCallSite(Function *F, CallSite CS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000142 ConstantInt *stripAndComputeInBoundsConstantOffsets(Value *&V);
143
Philip Reames9b5c9582015-06-26 20:51:17 +0000144 /// Return true if the given argument to the function being considered for
145 /// inlining has the given attribute set either at the call site or the
146 /// function declaration. Primarily used to inspect call site specific
147 /// attributes since these can be more precise than the ones on the callee
Easwaran Raman3676da42015-12-03 19:03:20 +0000148 /// itself.
Philip Reames9b5c9582015-06-26 20:51:17 +0000149 bool paramHasAttr(Argument *A, Attribute::AttrKind Attr);
Chad Rosier567556a2016-04-28 14:47:23 +0000150
Philip Reames9b5c9582015-06-26 20:51:17 +0000151 /// Return true if the given value is known non null within the callee if
Easwaran Raman3676da42015-12-03 19:03:20 +0000152 /// inlined through this particular callsite.
Philip Reames9b5c9582015-06-26 20:51:17 +0000153 bool isKnownNonNullInCallee(Value *V);
154
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000155 /// Update Threshold based on callsite properties such as callee
156 /// attributes and callee hotness for PGO builds. The Callee is explicitly
157 /// passed to support analyzing indirect calls whose target is inferred by
158 /// analysis.
159 void updateThreshold(CallSite CS, Function &Callee);
160
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000161 /// Return true if size growth is allowed when inlining the callee at CS.
162 bool allowSizeGrowth(CallSite CS);
163
Chandler Carruth0539c072012-03-31 12:42:41 +0000164 // Custom analysis routines.
Hal Finkel57f03dd2014-09-07 13:49:57 +0000165 bool analyzeBlock(BasicBlock *BB, SmallPtrSetImpl<const Value *> &EphValues);
Chandler Carruth0539c072012-03-31 12:42:41 +0000166
167 // Disable several entry points to the visitor so we don't accidentally use
168 // them by declaring but not defining them here.
Chad Rosier567556a2016-04-28 14:47:23 +0000169 void visit(Module *);
170 void visit(Module &);
171 void visit(Function *);
172 void visit(Function &);
173 void visit(BasicBlock *);
174 void visit(BasicBlock &);
Chandler Carruth0539c072012-03-31 12:42:41 +0000175
176 // Provide base case for our instruction visit.
177 bool visitInstruction(Instruction &I);
178
179 // Our visit overrides.
180 bool visitAlloca(AllocaInst &I);
181 bool visitPHI(PHINode &I);
182 bool visitGetElementPtr(GetElementPtrInst &I);
183 bool visitBitCast(BitCastInst &I);
184 bool visitPtrToInt(PtrToIntInst &I);
185 bool visitIntToPtr(IntToPtrInst &I);
186 bool visitCastInst(CastInst &I);
187 bool visitUnaryInstruction(UnaryInstruction &I);
Matt Arsenault727aa342013-07-20 04:09:00 +0000188 bool visitCmpInst(CmpInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000189 bool visitSub(BinaryOperator &I);
190 bool visitBinaryOperator(BinaryOperator &I);
191 bool visitLoad(LoadInst &I);
192 bool visitStore(StoreInst &I);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000193 bool visitExtractValue(ExtractValueInst &I);
194 bool visitInsertValue(InsertValueInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000195 bool visitCallSite(CallSite CS);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000196 bool visitReturnInst(ReturnInst &RI);
197 bool visitBranchInst(BranchInst &BI);
198 bool visitSwitchInst(SwitchInst &SI);
199 bool visitIndirectBrInst(IndirectBrInst &IBI);
200 bool visitResumeInst(ResumeInst &RI);
David Majnemer654e1302015-07-31 17:58:14 +0000201 bool visitCleanupReturnInst(CleanupReturnInst &RI);
202 bool visitCatchReturnInst(CatchReturnInst &RI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000203 bool visitUnreachableInst(UnreachableInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000204
205public:
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000206 CallAnalyzer(const TargetTransformInfo &TTI, AssumptionCacheTracker *ACT,
Easwaran Raman71069cf2016-06-09 22:23:21 +0000207 ProfileSummaryInfo *PSI, Function &Callee, int Threshold,
208 CallSite CSArg)
209 : TTI(TTI), ACT(ACT), PSI(PSI), F(Callee), CandidateCS(CSArg),
210 Threshold(Threshold), Cost(0), IsCallerRecursive(false),
211 IsRecursiveCall(false), ExposesReturnsTwice(false),
212 HasDynamicAlloca(false), ContainsNoDuplicateCall(false),
213 HasReturn(false), HasIndirectBr(false), HasFrameEscape(false),
214 AllocatedSize(0), NumInstructions(0), NumVectorInstructions(0),
215 FiftyPercentVectorBonus(0), TenPercentVectorBonus(0), VectorBonus(0),
216 NumConstantArgs(0), NumConstantOffsetPtrArgs(0), NumAllocaArgs(0),
217 NumConstantPtrCmps(0), NumConstantPtrDiffs(0),
218 NumInstructionsSimplified(0), SROACostSavings(0),
219 SROACostSavingsLost(0) {}
Chandler Carruth0539c072012-03-31 12:42:41 +0000220
221 bool analyzeCall(CallSite CS);
222
223 int getThreshold() { return Threshold; }
224 int getCost() { return Cost; }
225
226 // Keep a bunch of stats about the cost savings found so we can print them
227 // out when debugging.
228 unsigned NumConstantArgs;
229 unsigned NumConstantOffsetPtrArgs;
230 unsigned NumAllocaArgs;
231 unsigned NumConstantPtrCmps;
232 unsigned NumConstantPtrDiffs;
233 unsigned NumInstructionsSimplified;
234 unsigned SROACostSavings;
235 unsigned SROACostSavingsLost;
236
237 void dump();
238};
239
240} // namespace
241
242/// \brief Test whether the given value is an Alloca-derived function argument.
243bool CallAnalyzer::isAllocaDerivedArg(Value *V) {
244 return SROAArgValues.count(V);
Owen Andersona08318a2010-09-09 16:56:42 +0000245}
246
Chandler Carruth0539c072012-03-31 12:42:41 +0000247/// \brief Lookup the SROA-candidate argument and cost iterator which V maps to.
248/// Returns false if V does not map to a SROA-candidate.
249bool CallAnalyzer::lookupSROAArgAndCost(
250 Value *V, Value *&Arg, DenseMap<Value *, int>::iterator &CostIt) {
251 if (SROAArgValues.empty() || SROAArgCosts.empty())
252 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000253
Chandler Carruth0539c072012-03-31 12:42:41 +0000254 DenseMap<Value *, Value *>::iterator ArgIt = SROAArgValues.find(V);
255 if (ArgIt == SROAArgValues.end())
256 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000257
Chandler Carruth0539c072012-03-31 12:42:41 +0000258 Arg = ArgIt->second;
259 CostIt = SROAArgCosts.find(Arg);
260 return CostIt != SROAArgCosts.end();
Chandler Carruth783b7192012-03-09 02:49:36 +0000261}
262
Chandler Carruth0539c072012-03-31 12:42:41 +0000263/// \brief Disable SROA for the candidate marked by this cost iterator.
Chandler Carruth783b7192012-03-09 02:49:36 +0000264///
Benjamin Kramerbde91762012-06-02 10:20:22 +0000265/// This marks the candidate as no longer viable for SROA, and adds the cost
Chandler Carruth0539c072012-03-31 12:42:41 +0000266/// savings associated with it back into the inline cost measurement.
267void CallAnalyzer::disableSROA(DenseMap<Value *, int>::iterator CostIt) {
268 // If we're no longer able to perform SROA we need to undo its cost savings
269 // and prevent subsequent analysis.
270 Cost += CostIt->second;
271 SROACostSavings -= CostIt->second;
272 SROACostSavingsLost += CostIt->second;
273 SROAArgCosts.erase(CostIt);
274}
275
276/// \brief If 'V' maps to a SROA candidate, disable SROA for it.
277void CallAnalyzer::disableSROA(Value *V) {
278 Value *SROAArg;
279 DenseMap<Value *, int>::iterator CostIt;
280 if (lookupSROAArgAndCost(V, SROAArg, CostIt))
281 disableSROA(CostIt);
282}
283
284/// \brief Accumulate the given cost for a particular SROA candidate.
285void CallAnalyzer::accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
286 int InstructionCost) {
287 CostIt->second += InstructionCost;
288 SROACostSavings += InstructionCost;
289}
290
Chandler Carruth0539c072012-03-31 12:42:41 +0000291/// \brief Check whether a GEP's indices are all constant.
292///
293/// Respects any simplified values known during the analysis of this callsite.
294bool CallAnalyzer::isGEPOffsetConstant(GetElementPtrInst &GEP) {
295 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
296 if (!isa<Constant>(*I) && !SimplifiedValues.lookup(*I))
Chandler Carruth783b7192012-03-09 02:49:36 +0000297 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000298
Chandler Carruth0539c072012-03-31 12:42:41 +0000299 return true;
300}
301
302/// \brief Accumulate a constant GEP offset into an APInt if possible.
303///
304/// Returns false if unable to compute the offset for any reason. Respects any
305/// simplified values known during the analysis of this callsite.
306bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000307 const DataLayout &DL = F.getParent()->getDataLayout();
308 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +0000309 assert(IntPtrWidth == Offset.getBitWidth());
310
311 for (gep_type_iterator GTI = gep_type_begin(GEP), GTE = gep_type_end(GEP);
312 GTI != GTE; ++GTI) {
313 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand());
314 if (!OpC)
315 if (Constant *SimpleOp = SimplifiedValues.lookup(GTI.getOperand()))
316 OpC = dyn_cast<ConstantInt>(SimpleOp);
317 if (!OpC)
Chandler Carruth783b7192012-03-09 02:49:36 +0000318 return false;
Chad Rosier567556a2016-04-28 14:47:23 +0000319 if (OpC->isZero())
320 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000321
Chandler Carruth0539c072012-03-31 12:42:41 +0000322 // Handle a struct index, which adds its field offset to the pointer.
323 if (StructType *STy = dyn_cast<StructType>(*GTI)) {
324 unsigned ElementIdx = OpC->getZExtValue();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000325 const StructLayout *SL = DL.getStructLayout(STy);
Chandler Carruth0539c072012-03-31 12:42:41 +0000326 Offset += APInt(IntPtrWidth, SL->getElementOffset(ElementIdx));
327 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000328 }
Chandler Carruth783b7192012-03-09 02:49:36 +0000329
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000330 APInt TypeSize(IntPtrWidth, DL.getTypeAllocSize(GTI.getIndexedType()));
Chandler Carruth0539c072012-03-31 12:42:41 +0000331 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize;
332 }
333 return true;
334}
335
336bool CallAnalyzer::visitAlloca(AllocaInst &I) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000337 // Check whether inlining will turn a dynamic alloca into a static
Sanjay Patel0f153422016-05-09 21:51:53 +0000338 // alloca and handle that case.
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000339 if (I.isArrayAllocation()) {
Sanjay Patel0f153422016-05-09 21:51:53 +0000340 Constant *Size = SimplifiedValues.lookup(I.getArraySize());
341 if (auto *AllocSize = dyn_cast_or_null<ConstantInt>(Size)) {
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000342 const DataLayout &DL = F.getParent()->getDataLayout();
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000343 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000344 AllocatedSize = SaturatingMultiplyAdd(
345 AllocSize->getLimitedValue(), DL.getTypeAllocSize(Ty), AllocatedSize);
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000346 return Base::visitAlloca(I);
347 }
348 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000349
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000350 // Accumulate the allocated size.
351 if (I.isStaticAlloca()) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000352 const DataLayout &DL = F.getParent()->getDataLayout();
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000353 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000354 AllocatedSize = SaturatingAdd(DL.getTypeAllocSize(Ty), AllocatedSize);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000355 }
356
Bob Wilsona5b0dc82012-11-19 07:04:35 +0000357 // We will happily inline static alloca instructions.
358 if (I.isStaticAlloca())
Chandler Carruth0539c072012-03-31 12:42:41 +0000359 return Base::visitAlloca(I);
360
361 // FIXME: This is overly conservative. Dynamic allocas are inefficient for
362 // a variety of reasons, and so we would like to not inline them into
363 // functions which don't currently have a dynamic alloca. This simply
364 // disables inlining altogether in the presence of a dynamic alloca.
365 HasDynamicAlloca = true;
366 return false;
367}
368
369bool CallAnalyzer::visitPHI(PHINode &I) {
370 // FIXME: We should potentially be tracking values through phi nodes,
371 // especially when they collapse to a single value due to deleted CFG edges
372 // during inlining.
373
374 // FIXME: We need to propagate SROA *disabling* through phi nodes, even
375 // though we don't want to propagate it's bonuses. The idea is to disable
376 // SROA if it *might* be used in an inappropriate manner.
377
378 // Phi nodes are always zero-cost.
379 return true;
380}
381
382bool CallAnalyzer::visitGetElementPtr(GetElementPtrInst &I) {
383 Value *SROAArg;
384 DenseMap<Value *, int>::iterator CostIt;
Chad Rosier567556a2016-04-28 14:47:23 +0000385 bool SROACandidate =
386 lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt);
Chandler Carruth0539c072012-03-31 12:42:41 +0000387
388 // Try to fold GEPs of constant-offset call site argument pointers. This
389 // requires target data and inbounds GEPs.
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000390 if (I.isInBounds()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000391 // Check if we have a base + offset for the pointer.
392 Value *Ptr = I.getPointerOperand();
393 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Ptr);
394 if (BaseAndOffset.first) {
395 // Check if the offset of this GEP is constant, and if so accumulate it
396 // into Offset.
397 if (!accumulateGEPOffset(cast<GEPOperator>(I), BaseAndOffset.second)) {
398 // Non-constant GEPs aren't folded, and disable SROA.
399 if (SROACandidate)
400 disableSROA(CostIt);
401 return false;
402 }
403
404 // Add the result as a new mapping to Base + Offset.
405 ConstantOffsetPtrs[&I] = BaseAndOffset;
406
407 // Also handle SROA candidates here, we already know that the GEP is
408 // all-constant indexed.
409 if (SROACandidate)
410 SROAArgValues[&I] = SROAArg;
411
Chandler Carruth783b7192012-03-09 02:49:36 +0000412 return true;
413 }
414 }
415
Chandler Carruth0539c072012-03-31 12:42:41 +0000416 if (isGEPOffsetConstant(I)) {
417 if (SROACandidate)
418 SROAArgValues[&I] = SROAArg;
419
420 // Constant GEPs are modeled as free.
421 return true;
422 }
423
424 // Variable GEPs will require math and will disable SROA.
425 if (SROACandidate)
426 disableSROA(CostIt);
Chandler Carruth783b7192012-03-09 02:49:36 +0000427 return false;
428}
429
Chandler Carruth0539c072012-03-31 12:42:41 +0000430bool CallAnalyzer::visitBitCast(BitCastInst &I) {
431 // Propagate constants through bitcasts.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000432 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
433 if (!COp)
434 COp = SimplifiedValues.lookup(I.getOperand(0));
435 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000436 if (Constant *C = ConstantExpr::getBitCast(COp, I.getType())) {
437 SimplifiedValues[&I] = C;
438 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000439 }
Owen Andersona08318a2010-09-09 16:56:42 +0000440
Chandler Carruth0539c072012-03-31 12:42:41 +0000441 // Track base/offsets through casts
Chad Rosier567556a2016-04-28 14:47:23 +0000442 std::pair<Value *, APInt> BaseAndOffset =
443 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000444 // Casts don't change the offset, just wrap it up.
445 if (BaseAndOffset.first)
446 ConstantOffsetPtrs[&I] = BaseAndOffset;
447
448 // Also look for SROA candidates here.
449 Value *SROAArg;
450 DenseMap<Value *, int>::iterator CostIt;
451 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
452 SROAArgValues[&I] = SROAArg;
453
454 // Bitcasts are always zero cost.
455 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000456}
457
Chandler Carruth0539c072012-03-31 12:42:41 +0000458bool CallAnalyzer::visitPtrToInt(PtrToIntInst &I) {
459 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000460 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
461 if (!COp)
462 COp = SimplifiedValues.lookup(I.getOperand(0));
463 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000464 if (Constant *C = ConstantExpr::getPtrToInt(COp, I.getType())) {
465 SimplifiedValues[&I] = C;
466 return true;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000467 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000468
469 // Track base/offset pairs when converted to a plain integer provided the
470 // integer is large enough to represent the pointer.
471 unsigned IntegerSize = I.getType()->getScalarSizeInBits();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000472 const DataLayout &DL = F.getParent()->getDataLayout();
Mehdi Amini46a43552015-03-04 18:43:29 +0000473 if (IntegerSize >= DL.getPointerSizeInBits()) {
Chad Rosier567556a2016-04-28 14:47:23 +0000474 std::pair<Value *, APInt> BaseAndOffset =
475 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000476 if (BaseAndOffset.first)
477 ConstantOffsetPtrs[&I] = BaseAndOffset;
478 }
479
480 // This is really weird. Technically, ptrtoint will disable SROA. However,
481 // unless that ptrtoint is *used* somewhere in the live basic blocks after
482 // inlining, it will be nuked, and SROA should proceed. All of the uses which
483 // would block SROA would also block SROA if applied directly to a pointer,
484 // and so we can just add the integer in here. The only places where SROA is
485 // preserved either cannot fire on an integer, or won't in-and-of themselves
486 // disable SROA (ext) w/o some later use that we would see and disable.
487 Value *SROAArg;
488 DenseMap<Value *, int>::iterator CostIt;
489 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
490 SROAArgValues[&I] = SROAArg;
491
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000492 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000493}
494
Chandler Carruth0539c072012-03-31 12:42:41 +0000495bool CallAnalyzer::visitIntToPtr(IntToPtrInst &I) {
496 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000497 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
498 if (!COp)
499 COp = SimplifiedValues.lookup(I.getOperand(0));
500 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000501 if (Constant *C = ConstantExpr::getIntToPtr(COp, I.getType())) {
502 SimplifiedValues[&I] = C;
503 return true;
504 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000505
Chandler Carruth0539c072012-03-31 12:42:41 +0000506 // Track base/offset pairs when round-tripped through a pointer without
507 // modifications provided the integer is not too large.
508 Value *Op = I.getOperand(0);
509 unsigned IntegerSize = Op->getType()->getScalarSizeInBits();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000510 const DataLayout &DL = F.getParent()->getDataLayout();
Mehdi Amini46a43552015-03-04 18:43:29 +0000511 if (IntegerSize <= DL.getPointerSizeInBits()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000512 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op);
513 if (BaseAndOffset.first)
514 ConstantOffsetPtrs[&I] = BaseAndOffset;
515 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000516
Chandler Carruth0539c072012-03-31 12:42:41 +0000517 // "Propagate" SROA here in the same manner as we do for ptrtoint above.
518 Value *SROAArg;
519 DenseMap<Value *, int>::iterator CostIt;
520 if (lookupSROAArgAndCost(Op, SROAArg, CostIt))
521 SROAArgValues[&I] = SROAArg;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000522
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000523 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000524}
525
526bool CallAnalyzer::visitCastInst(CastInst &I) {
527 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000528 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
529 if (!COp)
530 COp = SimplifiedValues.lookup(I.getOperand(0));
531 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000532 if (Constant *C = ConstantExpr::getCast(I.getOpcode(), COp, I.getType())) {
533 SimplifiedValues[&I] = C;
534 return true;
535 }
536
537 // Disable SROA in the face of arbitrary casts we don't whitelist elsewhere.
538 disableSROA(I.getOperand(0));
539
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000540 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000541}
542
543bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) {
544 Value *Operand = I.getOperand(0);
Jakub Staszak7b9e0b92013-03-07 20:01:19 +0000545 Constant *COp = dyn_cast<Constant>(Operand);
546 if (!COp)
547 COp = SimplifiedValues.lookup(Operand);
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000548 if (COp) {
549 const DataLayout &DL = F.getParent()->getDataLayout();
Manuel Jacobe9024592016-01-21 06:33:22 +0000550 if (Constant *C = ConstantFoldInstOperands(&I, COp, DL)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000551 SimplifiedValues[&I] = C;
552 return true;
553 }
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000554 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000555
556 // Disable any SROA on the argument to arbitrary unary operators.
557 disableSROA(Operand);
558
559 return false;
560}
561
Philip Reames9b5c9582015-06-26 20:51:17 +0000562bool CallAnalyzer::paramHasAttr(Argument *A, Attribute::AttrKind Attr) {
563 unsigned ArgNo = A->getArgNo();
Chad Rosier567556a2016-04-28 14:47:23 +0000564 return CandidateCS.paramHasAttr(ArgNo + 1, Attr);
Philip Reames9b5c9582015-06-26 20:51:17 +0000565}
566
567bool CallAnalyzer::isKnownNonNullInCallee(Value *V) {
568 // Does the *call site* have the NonNull attribute set on an argument? We
569 // use the attribute on the call site to memoize any analysis done in the
570 // caller. This will also trip if the callee function has a non-null
571 // parameter attribute, but that's a less interesting case because hopefully
572 // the callee would already have been simplified based on that.
573 if (Argument *A = dyn_cast<Argument>(V))
574 if (paramHasAttr(A, Attribute::NonNull))
575 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000576
Philip Reames9b5c9582015-06-26 20:51:17 +0000577 // Is this an alloca in the caller? This is distinct from the attribute case
578 // above because attributes aren't updated within the inliner itself and we
579 // always want to catch the alloca derived case.
580 if (isAllocaDerivedArg(V))
581 // We can actually predict the result of comparisons between an
582 // alloca-derived value and null. Note that this fires regardless of
583 // SROA firing.
584 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000585
Philip Reames9b5c9582015-06-26 20:51:17 +0000586 return false;
587}
588
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000589bool CallAnalyzer::allowSizeGrowth(CallSite CS) {
590 // If the normal destination of the invoke or the parent block of the call
591 // site is unreachable-terminated, there is little point in inlining this
592 // unless there is literally zero cost.
593 // FIXME: Note that it is possible that an unreachable-terminated block has a
594 // hot entry. For example, in below scenario inlining hot_call_X() may be
595 // beneficial :
596 // main() {
597 // hot_call_1();
598 // ...
599 // hot_call_N()
600 // exit(0);
601 // }
602 // For now, we are not handling this corner case here as it is rare in real
603 // code. In future, we should elaborate this based on BPI and BFI in more
604 // general threshold adjusting heuristics in updateThreshold().
605 Instruction *Instr = CS.getInstruction();
606 if (InvokeInst *II = dyn_cast<InvokeInst>(Instr)) {
607 if (isa<UnreachableInst>(II->getNormalDest()->getTerminator()))
608 return false;
609 } else if (isa<UnreachableInst>(Instr->getParent()->getTerminator()))
610 return false;
611
612 return true;
613}
614
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000615void CallAnalyzer::updateThreshold(CallSite CS, Function &Callee) {
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000616 // If no size growth is allowed for this inlining, set Threshold to 0.
617 if (!allowSizeGrowth(CS)) {
618 Threshold = 0;
619 return;
620 }
621
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000622 Function *Caller = CS.getCaller();
Easwaran Ramanbb578ef2016-05-19 23:02:09 +0000623 if (DefaultInlineThreshold.getNumOccurrences() > 0) {
624 // Explicitly specified -inline-threhold overrides the threshold passed to
625 // CallAnalyzer's constructor.
626 Threshold = DefaultInlineThreshold;
627 } else {
628 // If -inline-threshold is not given, listen to the optsize and minsize
629 // attributes when they would decrease the threshold.
Easwaran Raman30a93c12016-01-28 23:44:41 +0000630 if (Caller->optForMinSize() && OptMinSizeThreshold < Threshold)
631 Threshold = OptMinSizeThreshold;
632 else if (Caller->optForSize() && OptSizeThreshold < Threshold)
633 Threshold = OptSizeThreshold;
634 }
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000635
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000636 // Listen to the inlinehint attribute or profile based hotness information
637 // when it would increase the threshold and the caller does not need to
638 // minimize its size.
Easwaran Raman71069cf2016-06-09 22:23:21 +0000639 bool InlineHint = Callee.hasFnAttribute(Attribute::InlineHint) ||
640 PSI->isHotFunction(&Callee);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000641 if (InlineHint && HintThreshold > Threshold && !Caller->optForMinSize())
642 Threshold = HintThreshold;
643
Easwaran Raman71069cf2016-06-09 22:23:21 +0000644 bool ColdCallee = PSI->isColdFunction(&Callee);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000645 // Command line argument for DefaultInlineThreshold will override the default
646 // ColdThreshold. If we have -inline-threshold but no -inlinecold-threshold,
647 // do not use the default cold threshold even if it is smaller.
648 if ((DefaultInlineThreshold.getNumOccurrences() == 0 ||
649 ColdThreshold.getNumOccurrences() > 0) &&
650 ColdCallee && ColdThreshold < Threshold)
651 Threshold = ColdThreshold;
Justin Lebar8650a4d2016-04-15 01:38:48 +0000652
653 // Finally, take the target-specific inlining threshold multiplier into
654 // account.
655 Threshold *= TTI.getInliningThresholdMultiplier();
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000656}
657
Matt Arsenault727aa342013-07-20 04:09:00 +0000658bool CallAnalyzer::visitCmpInst(CmpInst &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000659 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
660 // First try to handle simplified comparisons.
661 if (!isa<Constant>(LHS))
662 if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS))
663 LHS = SimpleLHS;
664 if (!isa<Constant>(RHS))
665 if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS))
666 RHS = SimpleRHS;
Matt Arsenault727aa342013-07-20 04:09:00 +0000667 if (Constant *CLHS = dyn_cast<Constant>(LHS)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000668 if (Constant *CRHS = dyn_cast<Constant>(RHS))
Chad Rosier567556a2016-04-28 14:47:23 +0000669 if (Constant *C =
670 ConstantExpr::getCompare(I.getPredicate(), CLHS, CRHS)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000671 SimplifiedValues[&I] = C;
672 return true;
673 }
Matt Arsenault727aa342013-07-20 04:09:00 +0000674 }
675
676 if (I.getOpcode() == Instruction::FCmp)
677 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000678
679 // Otherwise look for a comparison between constant offset pointers with
680 // a common base.
681 Value *LHSBase, *RHSBase;
682 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000683 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000684 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000685 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000686 if (RHSBase && LHSBase == RHSBase) {
687 // We have common bases, fold the icmp to a constant based on the
688 // offsets.
689 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
690 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
691 if (Constant *C = ConstantExpr::getICmp(I.getPredicate(), CLHS, CRHS)) {
692 SimplifiedValues[&I] = C;
693 ++NumConstantPtrCmps;
694 return true;
695 }
696 }
697 }
698
699 // If the comparison is an equality comparison with null, we can simplify it
Philip Reames9b5c9582015-06-26 20:51:17 +0000700 // if we know the value (argument) can't be null
701 if (I.isEquality() && isa<ConstantPointerNull>(I.getOperand(1)) &&
702 isKnownNonNullInCallee(I.getOperand(0))) {
703 bool IsNotEqual = I.getPredicate() == CmpInst::ICMP_NE;
704 SimplifiedValues[&I] = IsNotEqual ? ConstantInt::getTrue(I.getType())
705 : ConstantInt::getFalse(I.getType());
706 return true;
707 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000708 // Finally check for SROA candidates in comparisons.
709 Value *SROAArg;
710 DenseMap<Value *, int>::iterator CostIt;
711 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) {
712 if (isa<ConstantPointerNull>(I.getOperand(1))) {
713 accumulateSROACost(CostIt, InlineConstants::InstrCost);
714 return true;
715 }
716
717 disableSROA(CostIt);
718 }
719
720 return false;
721}
722
723bool CallAnalyzer::visitSub(BinaryOperator &I) {
724 // Try to handle a special case: we can fold computing the difference of two
725 // constant-related pointers.
726 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
727 Value *LHSBase, *RHSBase;
728 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000729 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000730 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000731 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000732 if (RHSBase && LHSBase == RHSBase) {
733 // We have common bases, fold the subtract to a constant based on the
734 // offsets.
735 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
736 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
737 if (Constant *C = ConstantExpr::getSub(CLHS, CRHS)) {
738 SimplifiedValues[&I] = C;
739 ++NumConstantPtrDiffs;
740 return true;
741 }
742 }
743 }
744
745 // Otherwise, fall back to the generic logic for simplifying and handling
746 // instructions.
747 return Base::visitSub(I);
748}
749
750bool CallAnalyzer::visitBinaryOperator(BinaryOperator &I) {
751 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000752 const DataLayout &DL = F.getParent()->getDataLayout();
Chandler Carruth0539c072012-03-31 12:42:41 +0000753 if (!isa<Constant>(LHS))
754 if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS))
755 LHS = SimpleLHS;
756 if (!isa<Constant>(RHS))
757 if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS))
758 RHS = SimpleRHS;
Michael Zolotukhin4e8598e2015-02-06 20:02:51 +0000759 Value *SimpleV = nullptr;
760 if (auto FI = dyn_cast<FPMathOperator>(&I))
761 SimpleV =
762 SimplifyFPBinOp(I.getOpcode(), LHS, RHS, FI->getFastMathFlags(), DL);
763 else
764 SimpleV = SimplifyBinOp(I.getOpcode(), LHS, RHS, DL);
765
Chandler Carruth0539c072012-03-31 12:42:41 +0000766 if (Constant *C = dyn_cast_or_null<Constant>(SimpleV)) {
767 SimplifiedValues[&I] = C;
768 return true;
769 }
770
771 // Disable any SROA on arguments to arbitrary, unsimplified binary operators.
772 disableSROA(LHS);
773 disableSROA(RHS);
774
775 return false;
776}
777
778bool CallAnalyzer::visitLoad(LoadInst &I) {
779 Value *SROAArg;
780 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000781 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000782 if (I.isSimple()) {
783 accumulateSROACost(CostIt, InlineConstants::InstrCost);
784 return true;
785 }
786
787 disableSROA(CostIt);
788 }
789
790 return false;
791}
792
793bool CallAnalyzer::visitStore(StoreInst &I) {
794 Value *SROAArg;
795 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000796 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000797 if (I.isSimple()) {
798 accumulateSROACost(CostIt, InlineConstants::InstrCost);
799 return true;
800 }
801
802 disableSROA(CostIt);
803 }
804
805 return false;
806}
807
Chandler Carruth753e21d2012-12-28 14:23:32 +0000808bool CallAnalyzer::visitExtractValue(ExtractValueInst &I) {
809 // Constant folding for extract value is trivial.
810 Constant *C = dyn_cast<Constant>(I.getAggregateOperand());
811 if (!C)
812 C = SimplifiedValues.lookup(I.getAggregateOperand());
813 if (C) {
814 SimplifiedValues[&I] = ConstantExpr::getExtractValue(C, I.getIndices());
815 return true;
816 }
817
818 // SROA can look through these but give them a cost.
819 return false;
820}
821
822bool CallAnalyzer::visitInsertValue(InsertValueInst &I) {
823 // Constant folding for insert value is trivial.
824 Constant *AggC = dyn_cast<Constant>(I.getAggregateOperand());
825 if (!AggC)
826 AggC = SimplifiedValues.lookup(I.getAggregateOperand());
827 Constant *InsertedC = dyn_cast<Constant>(I.getInsertedValueOperand());
828 if (!InsertedC)
829 InsertedC = SimplifiedValues.lookup(I.getInsertedValueOperand());
830 if (AggC && InsertedC) {
Chad Rosier567556a2016-04-28 14:47:23 +0000831 SimplifiedValues[&I] =
832 ConstantExpr::getInsertValue(AggC, InsertedC, I.getIndices());
Chandler Carruth753e21d2012-12-28 14:23:32 +0000833 return true;
834 }
835
836 // SROA can look through these but give them a cost.
837 return false;
838}
839
840/// \brief Try to simplify a call site.
841///
842/// Takes a concrete function and callsite and tries to actually simplify it by
843/// analyzing the arguments and call itself with instsimplify. Returns true if
844/// it has simplified the callsite to some other entity (a constant), making it
845/// free.
846bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
847 // FIXME: Using the instsimplify logic directly for this is inefficient
848 // because we have to continually rebuild the argument list even when no
849 // simplifications can be performed. Until that is fixed with remapping
850 // inside of instsimplify, directly constant fold calls here.
851 if (!canConstantFoldCallTo(F))
852 return false;
853
854 // Try to re-map the arguments to constants.
855 SmallVector<Constant *, 4> ConstantArgs;
856 ConstantArgs.reserve(CS.arg_size());
Chad Rosier567556a2016-04-28 14:47:23 +0000857 for (CallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end(); I != E;
858 ++I) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000859 Constant *C = dyn_cast<Constant>(*I);
860 if (!C)
861 C = dyn_cast_or_null<Constant>(SimplifiedValues.lookup(*I));
862 if (!C)
863 return false; // This argument doesn't map to a constant.
864
865 ConstantArgs.push_back(C);
866 }
867 if (Constant *C = ConstantFoldCall(F, ConstantArgs)) {
868 SimplifiedValues[CS.getInstruction()] = C;
869 return true;
870 }
871
872 return false;
873}
874
Chandler Carruth0539c072012-03-31 12:42:41 +0000875bool CallAnalyzer::visitCallSite(CallSite CS) {
Chandler Carruth37d25de2013-12-13 08:00:01 +0000876 if (CS.hasFnAttr(Attribute::ReturnsTwice) &&
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +0000877 !F.hasFnAttribute(Attribute::ReturnsTwice)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000878 // This aborts the entire analysis.
879 ExposesReturnsTwice = true;
880 return false;
881 }
Chad Rosier567556a2016-04-28 14:47:23 +0000882 if (CS.isCall() && cast<CallInst>(CS.getInstruction())->cannotDuplicate())
James Molloy4f6fb952012-12-20 16:04:27 +0000883 ContainsNoDuplicateCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000884
Chandler Carruth0539c072012-03-31 12:42:41 +0000885 if (Function *F = CS.getCalledFunction()) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000886 // When we have a concrete function, first try to simplify it directly.
887 if (simplifyCallSite(F, CS))
888 return true;
889
890 // Next check if it is an intrinsic we know about.
891 // FIXME: Lift this into part of the InstVisitor.
892 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
893 switch (II->getIntrinsicID()) {
894 default:
895 return Base::visitCallSite(CS);
896
Peter Collingbourne7dd8dbf2016-04-22 21:18:02 +0000897 case Intrinsic::load_relative:
898 // This is normally lowered to 4 LLVM instructions.
899 Cost += 3 * InlineConstants::InstrCost;
900 return false;
901
Chandler Carruth753e21d2012-12-28 14:23:32 +0000902 case Intrinsic::memset:
903 case Intrinsic::memcpy:
904 case Intrinsic::memmove:
905 // SROA can usually chew through these intrinsics, but they aren't free.
906 return false;
Reid Kleckner60381792015-07-07 22:25:32 +0000907 case Intrinsic::localescape:
Reid Kleckner223de262015-04-14 20:38:14 +0000908 HasFrameEscape = true;
909 return false;
Chandler Carruth753e21d2012-12-28 14:23:32 +0000910 }
911 }
912
Chandler Carruth0539c072012-03-31 12:42:41 +0000913 if (F == CS.getInstruction()->getParent()->getParent()) {
914 // This flag will fully abort the analysis, so don't bother with anything
915 // else.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000916 IsRecursiveCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000917 return false;
918 }
919
Chandler Carruth0ba8db42013-01-22 11:26:02 +0000920 if (TTI.isLoweredToCall(F)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000921 // We account for the average 1 instruction per call argument setup
922 // here.
923 Cost += CS.arg_size() * InlineConstants::InstrCost;
924
925 // Everything other than inline ASM will also have a significant cost
926 // merely from making the call.
927 if (!isa<InlineAsm>(CS.getCalledValue()))
928 Cost += InlineConstants::CallPenalty;
929 }
930
931 return Base::visitCallSite(CS);
932 }
933
934 // Otherwise we're in a very special case -- an indirect function call. See
935 // if we can be particularly clever about this.
936 Value *Callee = CS.getCalledValue();
937
938 // First, pay the price of the argument setup. We account for the average
939 // 1 instruction per call argument setup here.
940 Cost += CS.arg_size() * InlineConstants::InstrCost;
941
942 // Next, check if this happens to be an indirect function call to a known
943 // function in this inline context. If not, we've done all we can.
944 Function *F = dyn_cast_or_null<Function>(SimplifiedValues.lookup(Callee));
945 if (!F)
946 return Base::visitCallSite(CS);
947
948 // If we have a constant that we are calling as a function, we can peer
949 // through it and see the function target. This happens not infrequently
950 // during devirtualization and so we want to give it a hefty bonus for
951 // inlining, but cap that bonus in the event that inlining wouldn't pan
952 // out. Pretend to inline the function, with a custom threshold.
Easwaran Raman71069cf2016-06-09 22:23:21 +0000953 CallAnalyzer CA(TTI, ACT, PSI, *F, InlineConstants::IndirectCallThreshold,
954 CS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000955 if (CA.analyzeCall(CS)) {
956 // We were able to inline the indirect call! Subtract the cost from the
Easwaran Raman6d90d9f2015-12-07 21:21:20 +0000957 // threshold to get the bonus we want to apply, but don't go below zero.
958 Cost -= std::max(0, CA.getThreshold() - CA.getCost());
Chandler Carruth0539c072012-03-31 12:42:41 +0000959 }
960
961 return Base::visitCallSite(CS);
962}
963
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000964bool CallAnalyzer::visitReturnInst(ReturnInst &RI) {
965 // At least one return instruction will be free after inlining.
966 bool Free = !HasReturn;
967 HasReturn = true;
968 return Free;
969}
970
971bool CallAnalyzer::visitBranchInst(BranchInst &BI) {
972 // We model unconditional branches as essentially free -- they really
973 // shouldn't exist at all, but handling them makes the behavior of the
974 // inliner more regular and predictable. Interestingly, conditional branches
975 // which will fold away are also free.
976 return BI.isUnconditional() || isa<ConstantInt>(BI.getCondition()) ||
977 dyn_cast_or_null<ConstantInt>(
978 SimplifiedValues.lookup(BI.getCondition()));
979}
980
981bool CallAnalyzer::visitSwitchInst(SwitchInst &SI) {
982 // We model unconditional switches as free, see the comments on handling
983 // branches.
Chandler Carruthe01fd5f2014-04-28 08:52:44 +0000984 if (isa<ConstantInt>(SI.getCondition()))
985 return true;
986 if (Value *V = SimplifiedValues.lookup(SI.getCondition()))
987 if (isa<ConstantInt>(V))
988 return true;
989
990 // Otherwise, we need to accumulate a cost proportional to the number of
991 // distinct successor blocks. This fan-out in the CFG cannot be represented
992 // for free even if we can represent the core switch as a jumptable that
993 // takes a single instruction.
994 //
995 // NB: We convert large switches which are just used to initialize large phi
996 // nodes to lookup tables instead in simplify-cfg, so this shouldn't prevent
997 // inlining those. It will prevent inlining in cases where the optimization
998 // does not (yet) fire.
999 SmallPtrSet<BasicBlock *, 8> SuccessorBlocks;
1000 SuccessorBlocks.insert(SI.getDefaultDest());
1001 for (auto I = SI.case_begin(), E = SI.case_end(); I != E; ++I)
1002 SuccessorBlocks.insert(I.getCaseSuccessor());
1003 // Add cost corresponding to the number of distinct destinations. The first
1004 // we model as free because of fallthrough.
1005 Cost += (SuccessorBlocks.size() - 1) * InlineConstants::InstrCost;
1006 return false;
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001007}
1008
1009bool CallAnalyzer::visitIndirectBrInst(IndirectBrInst &IBI) {
1010 // We never want to inline functions that contain an indirectbr. This is
1011 // incorrect because all the blockaddress's (in static global initializers
1012 // for example) would be referring to the original function, and this
1013 // indirect jump would jump from the inlined copy of the function into the
1014 // original function which is extremely undefined behavior.
1015 // FIXME: This logic isn't really right; we can safely inline functions with
1016 // indirectbr's as long as no other function or global references the
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001017 // blockaddress of a block within the current function.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001018 HasIndirectBr = true;
1019 return false;
1020}
1021
1022bool CallAnalyzer::visitResumeInst(ResumeInst &RI) {
1023 // FIXME: It's not clear that a single instruction is an accurate model for
1024 // the inline cost of a resume instruction.
1025 return false;
1026}
1027
David Majnemer654e1302015-07-31 17:58:14 +00001028bool CallAnalyzer::visitCleanupReturnInst(CleanupReturnInst &CRI) {
1029 // FIXME: It's not clear that a single instruction is an accurate model for
1030 // the inline cost of a cleanupret instruction.
1031 return false;
1032}
1033
1034bool CallAnalyzer::visitCatchReturnInst(CatchReturnInst &CRI) {
1035 // FIXME: It's not clear that a single instruction is an accurate model for
Joseph Tremoulet8220bcc2015-08-23 00:26:33 +00001036 // the inline cost of a catchret instruction.
David Majnemer654e1302015-07-31 17:58:14 +00001037 return false;
1038}
1039
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001040bool CallAnalyzer::visitUnreachableInst(UnreachableInst &I) {
1041 // FIXME: It might be reasonably to discount the cost of instructions leading
1042 // to unreachable as they have the lowest possible impact on both runtime and
1043 // code size.
1044 return true; // No actual code is needed for unreachable.
1045}
1046
Chandler Carruth0539c072012-03-31 12:42:41 +00001047bool CallAnalyzer::visitInstruction(Instruction &I) {
Chandler Carruthda7513a2012-05-04 00:58:03 +00001048 // Some instructions are free. All of the free intrinsics can also be
1049 // handled by SROA, etc.
Chandler Carruthb8cf5102013-01-21 12:05:16 +00001050 if (TargetTransformInfo::TCC_Free == TTI.getUserCost(&I))
Chandler Carruthda7513a2012-05-04 00:58:03 +00001051 return true;
1052
Chandler Carruth0539c072012-03-31 12:42:41 +00001053 // We found something we don't understand or can't handle. Mark any SROA-able
1054 // values in the operand list as no longer viable.
1055 for (User::op_iterator OI = I.op_begin(), OE = I.op_end(); OI != OE; ++OI)
1056 disableSROA(*OI);
1057
1058 return false;
1059}
1060
Chandler Carruth0539c072012-03-31 12:42:41 +00001061/// \brief Analyze a basic block for its contribution to the inline cost.
1062///
1063/// This method walks the analyzer over every instruction in the given basic
1064/// block and accounts for their cost during inlining at this callsite. It
1065/// aborts early if the threshold has been exceeded or an impossible to inline
1066/// construct has been detected. It returns false if inlining is no longer
1067/// viable, and true if inlining remains viable.
Hal Finkel57f03dd2014-09-07 13:49:57 +00001068bool CallAnalyzer::analyzeBlock(BasicBlock *BB,
1069 SmallPtrSetImpl<const Value *> &EphValues) {
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001070 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
Chandler Carruth6b4cc8b2014-02-01 10:38:17 +00001071 // FIXME: Currently, the number of instructions in a function regardless of
1072 // our ability to simplify them during inline to constants or dead code,
1073 // are actually used by the vector bonus heuristic. As long as that's true,
1074 // we have to special case debug intrinsics here to prevent differences in
1075 // inlining due to debug symbols. Eventually, the number of unsimplified
1076 // instructions shouldn't factor into the cost computation, but until then,
1077 // hack around it here.
1078 if (isa<DbgInfoIntrinsic>(I))
1079 continue;
1080
Hal Finkel57f03dd2014-09-07 13:49:57 +00001081 // Skip ephemeral values.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001082 if (EphValues.count(&*I))
Hal Finkel57f03dd2014-09-07 13:49:57 +00001083 continue;
1084
Chandler Carruth0539c072012-03-31 12:42:41 +00001085 ++NumInstructions;
1086 if (isa<ExtractElementInst>(I) || I->getType()->isVectorTy())
1087 ++NumVectorInstructions;
1088
Sanjay Patele9434e82015-09-15 15:26:25 +00001089 // If the instruction is floating point, and the target says this operation
1090 // is expensive or the function has the "use-soft-float" attribute, this may
1091 // eventually become a library call. Treat the cost as such.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001092 if (I->getType()->isFloatingPointTy()) {
1093 bool hasSoftFloatAttr = false;
1094
Sanjay Patele9434e82015-09-15 15:26:25 +00001095 // If the function has the "use-soft-float" attribute, mark it as
1096 // expensive.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001097 if (F.hasFnAttribute("use-soft-float")) {
1098 Attribute Attr = F.getFnAttribute("use-soft-float");
1099 StringRef Val = Attr.getValueAsString();
1100 if (Val == "true")
1101 hasSoftFloatAttr = true;
1102 }
1103
1104 if (TTI.getFPOpCost(I->getType()) == TargetTransformInfo::TCC_Expensive ||
1105 hasSoftFloatAttr)
1106 Cost += InlineConstants::CallPenalty;
1107 }
1108
Chandler Carruth0539c072012-03-31 12:42:41 +00001109 // If the instruction simplified to a constant, there is no cost to this
1110 // instruction. Visit the instructions using our InstVisitor to account for
1111 // all of the per-instruction logic. The visit tree returns true if we
1112 // consumed the instruction in any way, and false if the instruction's base
1113 // cost should count against inlining.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001114 if (Base::visit(&*I))
Chandler Carruth0539c072012-03-31 12:42:41 +00001115 ++NumInstructionsSimplified;
1116 else
1117 Cost += InlineConstants::InstrCost;
1118
1119 // If the visit this instruction detected an uninlinable pattern, abort.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001120 if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca ||
Reid Kleckner223de262015-04-14 20:38:14 +00001121 HasIndirectBr || HasFrameEscape)
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001122 return false;
1123
1124 // If the caller is a recursive function then we don't want to inline
1125 // functions which allocate a lot of stack space because it would increase
1126 // the caller stack usage dramatically.
1127 if (IsCallerRecursive &&
1128 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller)
Chandler Carruth0539c072012-03-31 12:42:41 +00001129 return false;
1130
Chandler Carrutha004f222015-05-27 02:49:05 +00001131 // Check if we've past the maximum possible threshold so we don't spin in
1132 // huge basic blocks that will never inline.
1133 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001134 return false;
1135 }
1136
1137 return true;
1138}
1139
1140/// \brief Compute the base pointer and cumulative constant offsets for V.
1141///
1142/// This strips all constant offsets off of V, leaving it the base pointer, and
1143/// accumulates the total constant offset applied in the returned constant. It
1144/// returns 0 if V is not a pointer, and returns the constant '0' if there are
1145/// no constant offsets applied.
1146ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001147 if (!V->getType()->isPointerTy())
Craig Topper353eda42014-04-24 06:44:33 +00001148 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001149
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001150 const DataLayout &DL = F.getParent()->getDataLayout();
1151 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +00001152 APInt Offset = APInt::getNullValue(IntPtrWidth);
1153
1154 // Even though we don't look through PHI nodes, we could be called on an
1155 // instruction in an unreachable block, which may be on a cycle.
1156 SmallPtrSet<Value *, 4> Visited;
1157 Visited.insert(V);
1158 do {
1159 if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
1160 if (!GEP->isInBounds() || !accumulateGEPOffset(*GEP, Offset))
Craig Topper353eda42014-04-24 06:44:33 +00001161 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001162 V = GEP->getPointerOperand();
1163 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
1164 V = cast<Operator>(V)->getOperand(0);
1165 } else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
Sanjoy Das5ce32722016-04-08 00:48:30 +00001166 if (GA->isInterposable())
Chandler Carruth0539c072012-03-31 12:42:41 +00001167 break;
1168 V = GA->getAliasee();
1169 } else {
1170 break;
1171 }
1172 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
David Blaikie70573dc2014-11-19 07:49:26 +00001173 } while (Visited.insert(V).second);
Chandler Carruth0539c072012-03-31 12:42:41 +00001174
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001175 Type *IntPtrTy = DL.getIntPtrType(V->getContext());
Chandler Carruth0539c072012-03-31 12:42:41 +00001176 return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset));
1177}
1178
1179/// \brief Analyze a call site for potential inlining.
1180///
1181/// Returns true if inlining this call is viable, and false if it is not
1182/// viable. It computes the cost and adjusts the threshold based on numerous
1183/// factors and heuristics. If this method returns false but the computed cost
1184/// is below the computed threshold, then inlining was forcibly disabled by
Bob Wilson266802d2012-11-19 07:04:30 +00001185/// some artifact of the routine.
Chandler Carruth0539c072012-03-31 12:42:41 +00001186bool CallAnalyzer::analyzeCall(CallSite CS) {
Chandler Carruth7ae90d42012-04-11 10:15:10 +00001187 ++NumCallsAnalyzed;
1188
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001189 // Perform some tweaks to the cost and threshold based on the direct
1190 // callsite information.
Chandler Carruth0539c072012-03-31 12:42:41 +00001191
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001192 // We want to more aggressively inline vector-dense kernels, so up the
1193 // threshold, and we'll lower it if the % of vector instructions gets too
Chandler Carrutha004f222015-05-27 02:49:05 +00001194 // low. Note that these bonuses are some what arbitrary and evolved over time
1195 // by accident as much as because they are principled bonuses.
1196 //
1197 // FIXME: It would be nice to remove all such bonuses. At least it would be
1198 // nice to base the bonus values on something more scientific.
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001199 assert(NumInstructions == 0);
1200 assert(NumVectorInstructions == 0);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001201
1202 // Update the threshold based on callsite properties
1203 updateThreshold(CS, F);
1204
Chandler Carrutha004f222015-05-27 02:49:05 +00001205 FiftyPercentVectorBonus = 3 * Threshold / 2;
1206 TenPercentVectorBonus = 3 * Threshold / 4;
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001207 const DataLayout &DL = F.getParent()->getDataLayout();
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001208
Chandler Carrutha004f222015-05-27 02:49:05 +00001209 // Track whether the post-inlining function would have more than one basic
1210 // block. A single basic block is often intended for inlining. Balloon the
1211 // threshold by 50% until we pass the single-BB phase.
1212 bool SingleBB = true;
1213 int SingleBBBonus = Threshold / 2;
1214
1215 // Speculatively apply all possible bonuses to Threshold. If cost exceeds
1216 // this Threshold any time, and cost cannot decrease, we can stop processing
1217 // the rest of the function body.
1218 Threshold += (SingleBBBonus + FiftyPercentVectorBonus);
1219
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001220 // Give out bonuses per argument, as the instructions setting them up will
1221 // be gone after inlining.
1222 for (unsigned I = 0, E = CS.arg_size(); I != E; ++I) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001223 if (CS.isByValArgument(I)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001224 // We approximate the number of loads and stores needed by dividing the
1225 // size of the byval type by the target's pointer size.
1226 PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType());
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001227 unsigned TypeSize = DL.getTypeSizeInBits(PTy->getElementType());
1228 unsigned PointerSize = DL.getPointerSizeInBits();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001229 // Ceiling division.
1230 unsigned NumStores = (TypeSize + PointerSize - 1) / PointerSize;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001231
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001232 // If it generates more than 8 stores it is likely to be expanded as an
1233 // inline memcpy so we take that as an upper bound. Otherwise we assume
1234 // one load and one store per word copied.
1235 // FIXME: The maxStoresPerMemcpy setting from the target should be used
1236 // here instead of a magic number of 8, but it's not available via
1237 // DataLayout.
1238 NumStores = std::min(NumStores, 8U);
1239
1240 Cost -= 2 * NumStores * InlineConstants::InstrCost;
1241 } else {
1242 // For non-byval arguments subtract off one instruction per call
1243 // argument.
1244 Cost -= InlineConstants::InstrCost;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001245 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001246 }
1247
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001248 // If there is only one call of the function, and it has internal linkage,
1249 // the cost of inlining it drops dramatically.
Chad Rosier567556a2016-04-28 14:47:23 +00001250 bool OnlyOneCallAndLocalLinkage =
1251 F.hasLocalLinkage() && F.hasOneUse() && &F == CS.getCalledFunction();
James Molloy4f6fb952012-12-20 16:04:27 +00001252 if (OnlyOneCallAndLocalLinkage)
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001253 Cost += InlineConstants::LastCallToStaticBonus;
1254
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001255 // If this function uses the coldcc calling convention, prefer not to inline
1256 // it.
1257 if (F.getCallingConv() == CallingConv::Cold)
1258 Cost += InlineConstants::ColdccPenalty;
1259
1260 // Check if we're done. This can happen due to bonuses and penalties.
1261 if (Cost > Threshold)
1262 return false;
1263
Chandler Carruth0539c072012-03-31 12:42:41 +00001264 if (F.empty())
1265 return true;
1266
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001267 Function *Caller = CS.getInstruction()->getParent()->getParent();
1268 // Check if the caller function is recursive itself.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001269 for (User *U : Caller->users()) {
1270 CallSite Site(U);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001271 if (!Site)
1272 continue;
1273 Instruction *I = Site.getInstruction();
1274 if (I->getParent()->getParent() == Caller) {
1275 IsCallerRecursive = true;
1276 break;
1277 }
1278 }
1279
Chandler Carruth0539c072012-03-31 12:42:41 +00001280 // Populate our simplified values by mapping from function arguments to call
1281 // arguments with known important simplifications.
1282 CallSite::arg_iterator CAI = CS.arg_begin();
1283 for (Function::arg_iterator FAI = F.arg_begin(), FAE = F.arg_end();
1284 FAI != FAE; ++FAI, ++CAI) {
1285 assert(CAI != CS.arg_end());
1286 if (Constant *C = dyn_cast<Constant>(CAI))
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001287 SimplifiedValues[&*FAI] = C;
Chandler Carruth0539c072012-03-31 12:42:41 +00001288
1289 Value *PtrArg = *CAI;
1290 if (ConstantInt *C = stripAndComputeInBoundsConstantOffsets(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001291 ConstantOffsetPtrs[&*FAI] = std::make_pair(PtrArg, C->getValue());
Chandler Carruth0539c072012-03-31 12:42:41 +00001292
1293 // We can SROA any pointer arguments derived from alloca instructions.
1294 if (isa<AllocaInst>(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001295 SROAArgValues[&*FAI] = PtrArg;
Chandler Carruth0539c072012-03-31 12:42:41 +00001296 SROAArgCosts[PtrArg] = 0;
1297 }
1298 }
1299 }
1300 NumConstantArgs = SimplifiedValues.size();
1301 NumConstantOffsetPtrArgs = ConstantOffsetPtrs.size();
1302 NumAllocaArgs = SROAArgValues.size();
1303
Hal Finkel57f03dd2014-09-07 13:49:57 +00001304 // FIXME: If a caller has multiple calls to a callee, we end up recomputing
1305 // the ephemeral values multiple times (and they're completely determined by
1306 // the callee, so this is purely duplicate work).
1307 SmallPtrSet<const Value *, 32> EphValues;
Chad Rosier567556a2016-04-28 14:47:23 +00001308 CodeMetrics::collectEphemeralValues(&F, &ACT->getAssumptionCache(F),
1309 EphValues);
Hal Finkel57f03dd2014-09-07 13:49:57 +00001310
Chandler Carruth0539c072012-03-31 12:42:41 +00001311 // The worklist of live basic blocks in the callee *after* inlining. We avoid
1312 // adding basic blocks of the callee which can be proven to be dead for this
1313 // particular call site in order to get more accurate cost estimates. This
1314 // requires a somewhat heavyweight iteration pattern: we need to walk the
1315 // basic blocks in a breadth-first order as we insert live successors. To
1316 // accomplish this, prioritizing for small iterations because we exit after
1317 // crossing our threshold, we use a small-size optimized SetVector.
1318 typedef SetVector<BasicBlock *, SmallVector<BasicBlock *, 16>,
Chad Rosier567556a2016-04-28 14:47:23 +00001319 SmallPtrSet<BasicBlock *, 16>>
1320 BBSetVector;
Chandler Carruth0539c072012-03-31 12:42:41 +00001321 BBSetVector BBWorklist;
1322 BBWorklist.insert(&F.getEntryBlock());
1323 // Note that we *must not* cache the size, this loop grows the worklist.
1324 for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) {
1325 // Bail out the moment we cross the threshold. This means we'll under-count
1326 // the cost, but only when undercounting doesn't matter.
Chandler Carrutha004f222015-05-27 02:49:05 +00001327 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001328 break;
1329
1330 BasicBlock *BB = BBWorklist[Idx];
1331 if (BB->empty())
Chandler Carruth4d1d34f2012-03-14 23:19:53 +00001332 continue;
Dan Gohman4552e3c2009-10-13 18:30:07 +00001333
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001334 // Disallow inlining a blockaddress. A blockaddress only has defined
1335 // behavior for an indirect branch in the same function, and we do not
1336 // currently support inlining indirect branches. But, the inliner may not
1337 // see an indirect branch that ends up being dead code at a particular call
1338 // site. If the blockaddress escapes the function, e.g., via a global
1339 // variable, inlining may lead to an invalid cross-function reference.
1340 if (BB->hasAddressTaken())
1341 return false;
1342
Chandler Carruth0539c072012-03-31 12:42:41 +00001343 // Analyze the cost of this block. If we blow through the threshold, this
1344 // returns false, and we can bail on out.
Easwaran Ramand295b002016-04-13 21:20:22 +00001345 if (!analyzeBlock(BB, EphValues))
1346 return false;
Eric Christopher46308e62011-02-01 01:16:32 +00001347
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001348 TerminatorInst *TI = BB->getTerminator();
1349
Chandler Carruth0539c072012-03-31 12:42:41 +00001350 // Add in the live successors by first checking whether we have terminator
1351 // that may be simplified based on the values simplified by this call.
1352 if (BranchInst *BI = dyn_cast<BranchInst>(TI)) {
1353 if (BI->isConditional()) {
1354 Value *Cond = BI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001355 if (ConstantInt *SimpleCond =
1356 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001357 BBWorklist.insert(BI->getSuccessor(SimpleCond->isZero() ? 1 : 0));
1358 continue;
Eric Christopher46308e62011-02-01 01:16:32 +00001359 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001360 }
1361 } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) {
1362 Value *Cond = SI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001363 if (ConstantInt *SimpleCond =
1364 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001365 BBWorklist.insert(SI->findCaseValue(SimpleCond).getCaseSuccessor());
1366 continue;
1367 }
1368 }
Eric Christopher46308e62011-02-01 01:16:32 +00001369
Chandler Carruth0539c072012-03-31 12:42:41 +00001370 // If we're unable to select a particular successor, just count all of
1371 // them.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001372 for (unsigned TIdx = 0, TSize = TI->getNumSuccessors(); TIdx != TSize;
1373 ++TIdx)
Chandler Carruth0539c072012-03-31 12:42:41 +00001374 BBWorklist.insert(TI->getSuccessor(TIdx));
1375
1376 // If we had any successors at this point, than post-inlining is likely to
1377 // have them as well. Note that we assume any basic blocks which existed
1378 // due to branches or switches which folded above will also fold after
1379 // inlining.
1380 if (SingleBB && TI->getNumSuccessors() > 1) {
1381 // Take off the bonus we applied to the threshold.
1382 Threshold -= SingleBBBonus;
1383 SingleBB = false;
Eric Christopher46308e62011-02-01 01:16:32 +00001384 }
1385 }
Andrew Trickcaa500b2011-10-01 01:27:56 +00001386
Chandler Carruthcb5beb32013-12-12 11:59:26 +00001387 // If this is a noduplicate call, we can still inline as long as
James Molloy4f6fb952012-12-20 16:04:27 +00001388 // inlining this would cause the removal of the caller (so the instruction
1389 // is not actually duplicated, just moved).
1390 if (!OnlyOneCallAndLocalLinkage && ContainsNoDuplicateCall)
1391 return false;
1392
Chandler Carrutha004f222015-05-27 02:49:05 +00001393 // We applied the maximum possible vector bonus at the beginning. Now,
1394 // subtract the excess bonus, if any, from the Threshold before
1395 // comparing against Cost.
1396 if (NumVectorInstructions <= NumInstructions / 10)
1397 Threshold -= FiftyPercentVectorBonus;
1398 else if (NumVectorInstructions <= NumInstructions / 2)
1399 Threshold -= (FiftyPercentVectorBonus - TenPercentVectorBonus);
Chandler Carruth0539c072012-03-31 12:42:41 +00001400
Hans Wennborg00ab73d2016-02-05 20:32:42 +00001401 return Cost < std::max(1, Threshold);
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001402}
1403
Manman Ren49d684e2012-09-12 05:06:18 +00001404#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
Chandler Carruth0539c072012-03-31 12:42:41 +00001405/// \brief Dump stats about this call's analysis.
Yaron Kereneb2a2542016-01-29 20:50:44 +00001406LLVM_DUMP_METHOD void CallAnalyzer::dump() {
Eric Christophera13839f2014-02-26 23:27:16 +00001407#define DEBUG_PRINT_STAT(x) dbgs() << " " #x ": " << x << "\n"
Chandler Carruth0539c072012-03-31 12:42:41 +00001408 DEBUG_PRINT_STAT(NumConstantArgs);
1409 DEBUG_PRINT_STAT(NumConstantOffsetPtrArgs);
1410 DEBUG_PRINT_STAT(NumAllocaArgs);
1411 DEBUG_PRINT_STAT(NumConstantPtrCmps);
1412 DEBUG_PRINT_STAT(NumConstantPtrDiffs);
1413 DEBUG_PRINT_STAT(NumInstructionsSimplified);
Chandler Carrutha004f222015-05-27 02:49:05 +00001414 DEBUG_PRINT_STAT(NumInstructions);
Chandler Carruth0539c072012-03-31 12:42:41 +00001415 DEBUG_PRINT_STAT(SROACostSavings);
1416 DEBUG_PRINT_STAT(SROACostSavingsLost);
James Molloy4f6fb952012-12-20 16:04:27 +00001417 DEBUG_PRINT_STAT(ContainsNoDuplicateCall);
Chandler Carruth394e34f2014-01-31 22:32:32 +00001418 DEBUG_PRINT_STAT(Cost);
1419 DEBUG_PRINT_STAT(Threshold);
Chandler Carruth0539c072012-03-31 12:42:41 +00001420#undef DEBUG_PRINT_STAT
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001421}
Manman Renc3366cc2012-09-06 19:55:56 +00001422#endif
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001423
Akira Hatanaka5af7ace2015-11-13 01:44:32 +00001424/// \brief Test that two functions either have or have not the given attribute
1425/// at the same time.
Chad Rosier567556a2016-04-28 14:47:23 +00001426template <typename AttrKind>
Akira Hatanaka5af7ace2015-11-13 01:44:32 +00001427static bool attributeMatches(Function *F1, Function *F2, AttrKind Attr) {
1428 return F1->getFnAttribute(Attr) == F2->getFnAttribute(Attr);
1429}
1430
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001431/// \brief Test that there are no attribute conflicts between Caller and Callee
1432/// that prevent inlining.
1433static bool functionsHaveCompatibleAttributes(Function *Caller,
Eric Christopher4371b132015-07-02 01:11:47 +00001434 Function *Callee,
1435 TargetTransformInfo &TTI) {
Eric Christopherd566fb12015-07-29 22:09:48 +00001436 return TTI.areInlineCompatible(Caller, Callee) &&
Akira Hatanaka1cb242e2015-12-22 23:57:37 +00001437 AttributeFuncs::areInlineCompatible(*Caller, *Callee);
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001438}
1439
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001440InlineCost llvm::getInlineCost(CallSite CS, int DefaultThreshold,
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001441 TargetTransformInfo &CalleeTTI,
Easwaran Raman71069cf2016-06-09 22:23:21 +00001442 AssumptionCacheTracker *ACT,
1443 ProfileSummaryInfo *PSI) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001444 return getInlineCost(CS, CS.getCalledFunction(), DefaultThreshold, CalleeTTI,
Easwaran Raman71069cf2016-06-09 22:23:21 +00001445 ACT, PSI);
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001446}
1447
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001448int llvm::computeThresholdFromOptLevels(unsigned OptLevel,
1449 unsigned SizeOptLevel) {
1450 if (OptLevel > 2)
1451 return OptAggressiveThreshold;
1452 if (SizeOptLevel == 1) // -Os
1453 return OptSizeThreshold;
1454 if (SizeOptLevel == 2) // -Oz
1455 return OptMinSizeThreshold;
1456 return DefaultInlineThreshold;
1457}
1458
1459int llvm::getDefaultInlineThreshold() { return DefaultInlineThreshold; }
1460
1461InlineCost llvm::getInlineCost(CallSite CS, Function *Callee,
1462 int DefaultThreshold,
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001463 TargetTransformInfo &CalleeTTI,
Easwaran Raman71069cf2016-06-09 22:23:21 +00001464 AssumptionCacheTracker *ACT,
1465 ProfileSummaryInfo *PSI) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001466
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001467 // Cannot inline indirect calls.
1468 if (!Callee)
1469 return llvm::InlineCost::getNever();
1470
1471 // Calls to functions with always-inline attributes should be inlined
1472 // whenever possible.
Peter Collingbourne68a88972014-05-19 18:25:54 +00001473 if (CS.hasFnAttr(Attribute::AlwaysInline)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001474 if (isInlineViable(*Callee))
1475 return llvm::InlineCost::getAlways();
1476 return llvm::InlineCost::getNever();
1477 }
1478
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001479 // Never inline functions with conflicting attributes (unless callee has
1480 // always-inline attribute).
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001481 if (!functionsHaveCompatibleAttributes(CS.getCaller(), Callee, CalleeTTI))
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001482 return llvm::InlineCost::getNever();
1483
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001484 // Don't inline this call if the caller has the optnone attribute.
1485 if (CS.getCaller()->hasFnAttribute(Attribute::OptimizeNone))
1486 return llvm::InlineCost::getNever();
1487
Sanjoy Das5ce32722016-04-08 00:48:30 +00001488 // Don't inline functions which can be interposed at link-time. Don't inline
1489 // functions marked noinline or call sites marked noinline.
1490 // Note: inlining non-exact non-interposable fucntions is fine, since we know
1491 // we have *a* correct implementation of the source level function.
Chad Rosier567556a2016-04-28 14:47:23 +00001492 if (Callee->isInterposable() || Callee->hasFnAttribute(Attribute::NoInline) ||
1493 CS.isNoInline())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001494 return llvm::InlineCost::getNever();
1495
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001496 DEBUG(llvm::dbgs() << " Analyzing call of " << Callee->getName()
Chad Rosier567556a2016-04-28 14:47:23 +00001497 << "...\n");
Andrew Trickcaa500b2011-10-01 01:27:56 +00001498
Easwaran Raman71069cf2016-06-09 22:23:21 +00001499 CallAnalyzer CA(CalleeTTI, ACT, PSI, *Callee, DefaultThreshold, CS);
Chandler Carruth0539c072012-03-31 12:42:41 +00001500 bool ShouldInline = CA.analyzeCall(CS);
Dan Gohman4552e3c2009-10-13 18:30:07 +00001501
Chandler Carruth0539c072012-03-31 12:42:41 +00001502 DEBUG(CA.dump());
1503
1504 // Check if there was a reason to force inlining or no inlining.
1505 if (!ShouldInline && CA.getCost() < CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001506 return InlineCost::getNever();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001507 if (ShouldInline && CA.getCost() >= CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001508 return InlineCost::getAlways();
Andrew Trickcaa500b2011-10-01 01:27:56 +00001509
Chandler Carruth0539c072012-03-31 12:42:41 +00001510 return llvm::InlineCost::get(CA.getCost(), CA.getThreshold());
Dan Gohman4552e3c2009-10-13 18:30:07 +00001511}
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001512
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001513bool llvm::isInlineViable(Function &F) {
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001514 bool ReturnsTwice = F.hasFnAttribute(Attribute::ReturnsTwice);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001515 for (Function::iterator BI = F.begin(), BE = F.end(); BI != BE; ++BI) {
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001516 // Disallow inlining of functions which contain indirect branches or
1517 // blockaddresses.
1518 if (isa<IndirectBrInst>(BI->getTerminator()) || BI->hasAddressTaken())
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001519 return false;
1520
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001521 for (auto &II : *BI) {
1522 CallSite CS(&II);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001523 if (!CS)
1524 continue;
1525
1526 // Disallow recursive calls.
1527 if (&F == CS.getCalledFunction())
1528 return false;
1529
1530 // Disallow calls which expose returns-twice to a function not previously
1531 // attributed as such.
1532 if (!ReturnsTwice && CS.isCall() &&
1533 cast<CallInst>(CS.getInstruction())->canReturnTwice())
1534 return false;
Reid Kleckner223de262015-04-14 20:38:14 +00001535
Reid Kleckner60381792015-07-07 22:25:32 +00001536 // Disallow inlining functions that call @llvm.localescape. Doing this
Reid Kleckner223de262015-04-14 20:38:14 +00001537 // correctly would require major changes to the inliner.
1538 if (CS.getCalledFunction() &&
1539 CS.getCalledFunction()->getIntrinsicID() ==
Reid Kleckner60381792015-07-07 22:25:32 +00001540 llvm::Intrinsic::localescape)
Reid Kleckner223de262015-04-14 20:38:14 +00001541 return false;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001542 }
1543 }
1544
1545 return true;
1546}