blob: 54205bcc34127f46876fc1230f1e73e5133844e7 [file] [log] [blame]
Dan Gohman4552e3c2009-10-13 18:30:07 +00001//===- InlineCost.cpp - Cost analysis for inliner -------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements inline cost analysis.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Analysis/InlineCost.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000015#include "llvm/ADT/STLExtras.h"
16#include "llvm/ADT/SetVector.h"
17#include "llvm/ADT/SmallPtrSet.h"
18#include "llvm/ADT/SmallVector.h"
19#include "llvm/ADT/Statistic.h"
Chandler Carruth66b31302015-01-04 12:03:27 +000020#include "llvm/Analysis/AssumptionCache.h"
Hal Finkel57f03dd2014-09-07 13:49:57 +000021#include "llvm/Analysis/CodeMetrics.h"
Chandler Carruthd9903882015-01-14 11:23:27 +000022#include "llvm/Analysis/ConstantFolding.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000023#include "llvm/Analysis/InstructionSimplify.h"
Easwaran Raman71069cf2016-06-09 22:23:21 +000024#include "llvm/Analysis/ProfileSummaryInfo.h"
Chandler Carruth42f3dce2013-01-21 11:55:09 +000025#include "llvm/Analysis/TargetTransformInfo.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000026#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000027#include "llvm/IR/CallingConv.h"
28#include "llvm/IR/DataLayout.h"
Chandler Carruth03eb0de2014-03-04 10:40:04 +000029#include "llvm/IR/GetElementPtrTypeIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000030#include "llvm/IR/GlobalAlias.h"
Chandler Carruth7da14f12014-03-06 03:23:41 +000031#include "llvm/IR/InstVisitor.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000032#include "llvm/IR/IntrinsicInst.h"
33#include "llvm/IR/Operator.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000034#include "llvm/Support/Debug.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000035#include "llvm/Support/raw_ostream.h"
Eric Christopher2dfbd7e2011-02-05 00:49:15 +000036
Dan Gohman4552e3c2009-10-13 18:30:07 +000037using namespace llvm;
38
Chandler Carruthf1221bd2014-04-22 02:48:03 +000039#define DEBUG_TYPE "inline-cost"
40
Chandler Carruth7ae90d42012-04-11 10:15:10 +000041STATISTIC(NumCallsAnalyzed, "Number of call sites analyzed");
42
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000043// Threshold to use when optsize is specified (and there is no
44// -inline-threshold).
45const int OptSizeThreshold = 75;
46
47// Threshold to use when -Oz is specified (and there is no -inline-threshold).
48const int OptMinSizeThreshold = 25;
49
50// Threshold to use when -O[34] is specified (and there is no
51// -inline-threshold).
52const int OptAggressiveThreshold = 275;
53
54static cl::opt<int> DefaultInlineThreshold(
55 "inline-threshold", cl::Hidden, cl::init(225), cl::ZeroOrMore,
56 cl::desc("Control the amount of inlining to perform (default = 225)"));
57
58static cl::opt<int> HintThreshold(
59 "inlinehint-threshold", cl::Hidden, cl::init(325),
60 cl::desc("Threshold for inlining functions with inline hint"));
61
62// We introduce this threshold to help performance of instrumentation based
63// PGO before we actually hook up inliner with analysis passes such as BPI and
64// BFI.
65static cl::opt<int> ColdThreshold(
66 "inlinecold-threshold", cl::Hidden, cl::init(225),
67 cl::desc("Threshold for inlining functions with cold attribute"));
68
Chandler Carruth0539c072012-03-31 12:42:41 +000069namespace {
Chandler Carrutha3089552012-03-14 07:32:53 +000070
Chandler Carruth0539c072012-03-31 12:42:41 +000071class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> {
72 typedef InstVisitor<CallAnalyzer, bool> Base;
73 friend class InstVisitor<CallAnalyzer, bool>;
Owen Andersona08318a2010-09-09 16:56:42 +000074
Chandler Carruth42f3dce2013-01-21 11:55:09 +000075 /// The TargetTransformInfo available for this compilation.
76 const TargetTransformInfo &TTI;
77
Sean Silvaab6a6832016-07-23 04:22:50 +000078 /// Getter for the cache of @llvm.assume intrinsics.
79 std::function<AssumptionCache &(Function &)> &GetAssumptionCache;
Hal Finkel57f03dd2014-09-07 13:49:57 +000080
Easwaran Raman71069cf2016-06-09 22:23:21 +000081 /// Profile summary information.
82 ProfileSummaryInfo *PSI;
83
Chandler Carruth0539c072012-03-31 12:42:41 +000084 // The called function.
85 Function &F;
Owen Andersona08318a2010-09-09 16:56:42 +000086
Philip Reames9b5c9582015-06-26 20:51:17 +000087 // The candidate callsite being analyzed. Please do not use this to do
88 // analysis in the caller function; we want the inline cost query to be
89 // easily cacheable. Instead, use the cover function paramHasAttr.
90 CallSite CandidateCS;
91
Chandler Carruth0539c072012-03-31 12:42:41 +000092 int Threshold;
93 int Cost;
Owen Andersona08318a2010-09-09 16:56:42 +000094
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +000095 bool IsCallerRecursive;
96 bool IsRecursiveCall;
Chandler Carruth0539c072012-03-31 12:42:41 +000097 bool ExposesReturnsTwice;
98 bool HasDynamicAlloca;
James Molloy4f6fb952012-12-20 16:04:27 +000099 bool ContainsNoDuplicateCall;
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000100 bool HasReturn;
101 bool HasIndirectBr;
Reid Kleckner223de262015-04-14 20:38:14 +0000102 bool HasFrameEscape;
James Molloy4f6fb952012-12-20 16:04:27 +0000103
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000104 /// Number of bytes allocated statically by the callee.
105 uint64_t AllocatedSize;
Chandler Carruth0539c072012-03-31 12:42:41 +0000106 unsigned NumInstructions, NumVectorInstructions;
107 int FiftyPercentVectorBonus, TenPercentVectorBonus;
108 int VectorBonus;
109
110 // While we walk the potentially-inlined instructions, we build up and
111 // maintain a mapping of simplified values specific to this callsite. The
112 // idea is to propagate any special information we have about arguments to
113 // this call through the inlinable section of the function, and account for
114 // likely simplifications post-inlining. The most important aspect we track
115 // is CFG altering simplifications -- when we prove a basic block dead, that
116 // can cause dramatic shifts in the cost of inlining a function.
117 DenseMap<Value *, Constant *> SimplifiedValues;
118
119 // Keep track of the values which map back (through function arguments) to
120 // allocas on the caller stack which could be simplified through SROA.
121 DenseMap<Value *, Value *> SROAArgValues;
122
123 // The mapping of caller Alloca values to their accumulated cost savings. If
124 // we have to disable SROA for one of the allocas, this tells us how much
125 // cost must be added.
126 DenseMap<Value *, int> SROAArgCosts;
127
128 // Keep track of values which map to a pointer base and constant offset.
Chad Rosier567556a2016-04-28 14:47:23 +0000129 DenseMap<Value *, std::pair<Value *, APInt>> ConstantOffsetPtrs;
Chandler Carruth0539c072012-03-31 12:42:41 +0000130
131 // Custom simplification helper routines.
132 bool isAllocaDerivedArg(Value *V);
133 bool lookupSROAArgAndCost(Value *V, Value *&Arg,
134 DenseMap<Value *, int>::iterator &CostIt);
135 void disableSROA(DenseMap<Value *, int>::iterator CostIt);
136 void disableSROA(Value *V);
137 void accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
138 int InstructionCost);
Chandler Carruth0539c072012-03-31 12:42:41 +0000139 bool isGEPOffsetConstant(GetElementPtrInst &GEP);
140 bool accumulateGEPOffset(GEPOperator &GEP, APInt &Offset);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000141 bool simplifyCallSite(Function *F, CallSite CS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000142 ConstantInt *stripAndComputeInBoundsConstantOffsets(Value *&V);
143
Philip Reames9b5c9582015-06-26 20:51:17 +0000144 /// Return true if the given argument to the function being considered for
145 /// inlining has the given attribute set either at the call site or the
146 /// function declaration. Primarily used to inspect call site specific
147 /// attributes since these can be more precise than the ones on the callee
Easwaran Raman3676da42015-12-03 19:03:20 +0000148 /// itself.
Philip Reames9b5c9582015-06-26 20:51:17 +0000149 bool paramHasAttr(Argument *A, Attribute::AttrKind Attr);
Chad Rosier567556a2016-04-28 14:47:23 +0000150
Philip Reames9b5c9582015-06-26 20:51:17 +0000151 /// Return true if the given value is known non null within the callee if
Easwaran Raman3676da42015-12-03 19:03:20 +0000152 /// inlined through this particular callsite.
Philip Reames9b5c9582015-06-26 20:51:17 +0000153 bool isKnownNonNullInCallee(Value *V);
154
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000155 /// Update Threshold based on callsite properties such as callee
156 /// attributes and callee hotness for PGO builds. The Callee is explicitly
157 /// passed to support analyzing indirect calls whose target is inferred by
158 /// analysis.
159 void updateThreshold(CallSite CS, Function &Callee);
160
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000161 /// Return true if size growth is allowed when inlining the callee at CS.
162 bool allowSizeGrowth(CallSite CS);
163
Chandler Carruth0539c072012-03-31 12:42:41 +0000164 // Custom analysis routines.
Hal Finkel57f03dd2014-09-07 13:49:57 +0000165 bool analyzeBlock(BasicBlock *BB, SmallPtrSetImpl<const Value *> &EphValues);
Chandler Carruth0539c072012-03-31 12:42:41 +0000166
167 // Disable several entry points to the visitor so we don't accidentally use
168 // them by declaring but not defining them here.
Chad Rosier567556a2016-04-28 14:47:23 +0000169 void visit(Module *);
170 void visit(Module &);
171 void visit(Function *);
172 void visit(Function &);
173 void visit(BasicBlock *);
174 void visit(BasicBlock &);
Chandler Carruth0539c072012-03-31 12:42:41 +0000175
176 // Provide base case for our instruction visit.
177 bool visitInstruction(Instruction &I);
178
179 // Our visit overrides.
180 bool visitAlloca(AllocaInst &I);
181 bool visitPHI(PHINode &I);
182 bool visitGetElementPtr(GetElementPtrInst &I);
183 bool visitBitCast(BitCastInst &I);
184 bool visitPtrToInt(PtrToIntInst &I);
185 bool visitIntToPtr(IntToPtrInst &I);
186 bool visitCastInst(CastInst &I);
187 bool visitUnaryInstruction(UnaryInstruction &I);
Matt Arsenault727aa342013-07-20 04:09:00 +0000188 bool visitCmpInst(CmpInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000189 bool visitSub(BinaryOperator &I);
190 bool visitBinaryOperator(BinaryOperator &I);
191 bool visitLoad(LoadInst &I);
192 bool visitStore(StoreInst &I);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000193 bool visitExtractValue(ExtractValueInst &I);
194 bool visitInsertValue(InsertValueInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000195 bool visitCallSite(CallSite CS);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000196 bool visitReturnInst(ReturnInst &RI);
197 bool visitBranchInst(BranchInst &BI);
198 bool visitSwitchInst(SwitchInst &SI);
199 bool visitIndirectBrInst(IndirectBrInst &IBI);
200 bool visitResumeInst(ResumeInst &RI);
David Majnemer654e1302015-07-31 17:58:14 +0000201 bool visitCleanupReturnInst(CleanupReturnInst &RI);
202 bool visitCatchReturnInst(CatchReturnInst &RI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000203 bool visitUnreachableInst(UnreachableInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000204
205public:
Sean Silvaab6a6832016-07-23 04:22:50 +0000206 CallAnalyzer(const TargetTransformInfo &TTI,
207 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman71069cf2016-06-09 22:23:21 +0000208 ProfileSummaryInfo *PSI, Function &Callee, int Threshold,
209 CallSite CSArg)
Sean Silvaab6a6832016-07-23 04:22:50 +0000210 : TTI(TTI), GetAssumptionCache(GetAssumptionCache), PSI(PSI), F(Callee),
211 CandidateCS(CSArg), Threshold(Threshold), Cost(0),
212 IsCallerRecursive(false), IsRecursiveCall(false),
213 ExposesReturnsTwice(false), HasDynamicAlloca(false),
214 ContainsNoDuplicateCall(false), HasReturn(false), HasIndirectBr(false),
215 HasFrameEscape(false), AllocatedSize(0), NumInstructions(0),
216 NumVectorInstructions(0), FiftyPercentVectorBonus(0),
217 TenPercentVectorBonus(0), VectorBonus(0), NumConstantArgs(0),
218 NumConstantOffsetPtrArgs(0), NumAllocaArgs(0), NumConstantPtrCmps(0),
219 NumConstantPtrDiffs(0), NumInstructionsSimplified(0),
220 SROACostSavings(0), SROACostSavingsLost(0) {}
Chandler Carruth0539c072012-03-31 12:42:41 +0000221
222 bool analyzeCall(CallSite CS);
223
224 int getThreshold() { return Threshold; }
225 int getCost() { return Cost; }
226
227 // Keep a bunch of stats about the cost savings found so we can print them
228 // out when debugging.
229 unsigned NumConstantArgs;
230 unsigned NumConstantOffsetPtrArgs;
231 unsigned NumAllocaArgs;
232 unsigned NumConstantPtrCmps;
233 unsigned NumConstantPtrDiffs;
234 unsigned NumInstructionsSimplified;
235 unsigned SROACostSavings;
236 unsigned SROACostSavingsLost;
237
238 void dump();
239};
240
241} // namespace
242
243/// \brief Test whether the given value is an Alloca-derived function argument.
244bool CallAnalyzer::isAllocaDerivedArg(Value *V) {
245 return SROAArgValues.count(V);
Owen Andersona08318a2010-09-09 16:56:42 +0000246}
247
Chandler Carruth0539c072012-03-31 12:42:41 +0000248/// \brief Lookup the SROA-candidate argument and cost iterator which V maps to.
249/// Returns false if V does not map to a SROA-candidate.
250bool CallAnalyzer::lookupSROAArgAndCost(
251 Value *V, Value *&Arg, DenseMap<Value *, int>::iterator &CostIt) {
252 if (SROAArgValues.empty() || SROAArgCosts.empty())
253 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000254
Chandler Carruth0539c072012-03-31 12:42:41 +0000255 DenseMap<Value *, Value *>::iterator ArgIt = SROAArgValues.find(V);
256 if (ArgIt == SROAArgValues.end())
257 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000258
Chandler Carruth0539c072012-03-31 12:42:41 +0000259 Arg = ArgIt->second;
260 CostIt = SROAArgCosts.find(Arg);
261 return CostIt != SROAArgCosts.end();
Chandler Carruth783b7192012-03-09 02:49:36 +0000262}
263
Chandler Carruth0539c072012-03-31 12:42:41 +0000264/// \brief Disable SROA for the candidate marked by this cost iterator.
Chandler Carruth783b7192012-03-09 02:49:36 +0000265///
Benjamin Kramerbde91762012-06-02 10:20:22 +0000266/// This marks the candidate as no longer viable for SROA, and adds the cost
Chandler Carruth0539c072012-03-31 12:42:41 +0000267/// savings associated with it back into the inline cost measurement.
268void CallAnalyzer::disableSROA(DenseMap<Value *, int>::iterator CostIt) {
269 // If we're no longer able to perform SROA we need to undo its cost savings
270 // and prevent subsequent analysis.
271 Cost += CostIt->second;
272 SROACostSavings -= CostIt->second;
273 SROACostSavingsLost += CostIt->second;
274 SROAArgCosts.erase(CostIt);
275}
276
277/// \brief If 'V' maps to a SROA candidate, disable SROA for it.
278void CallAnalyzer::disableSROA(Value *V) {
279 Value *SROAArg;
280 DenseMap<Value *, int>::iterator CostIt;
281 if (lookupSROAArgAndCost(V, SROAArg, CostIt))
282 disableSROA(CostIt);
283}
284
285/// \brief Accumulate the given cost for a particular SROA candidate.
286void CallAnalyzer::accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
287 int InstructionCost) {
288 CostIt->second += InstructionCost;
289 SROACostSavings += InstructionCost;
290}
291
Chandler Carruth0539c072012-03-31 12:42:41 +0000292/// \brief Check whether a GEP's indices are all constant.
293///
294/// Respects any simplified values known during the analysis of this callsite.
295bool CallAnalyzer::isGEPOffsetConstant(GetElementPtrInst &GEP) {
296 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
297 if (!isa<Constant>(*I) && !SimplifiedValues.lookup(*I))
Chandler Carruth783b7192012-03-09 02:49:36 +0000298 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000299
Chandler Carruth0539c072012-03-31 12:42:41 +0000300 return true;
301}
302
303/// \brief Accumulate a constant GEP offset into an APInt if possible.
304///
305/// Returns false if unable to compute the offset for any reason. Respects any
306/// simplified values known during the analysis of this callsite.
307bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000308 const DataLayout &DL = F.getParent()->getDataLayout();
309 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +0000310 assert(IntPtrWidth == Offset.getBitWidth());
311
312 for (gep_type_iterator GTI = gep_type_begin(GEP), GTE = gep_type_end(GEP);
313 GTI != GTE; ++GTI) {
314 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand());
315 if (!OpC)
316 if (Constant *SimpleOp = SimplifiedValues.lookup(GTI.getOperand()))
317 OpC = dyn_cast<ConstantInt>(SimpleOp);
318 if (!OpC)
Chandler Carruth783b7192012-03-09 02:49:36 +0000319 return false;
Chad Rosier567556a2016-04-28 14:47:23 +0000320 if (OpC->isZero())
321 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000322
Chandler Carruth0539c072012-03-31 12:42:41 +0000323 // Handle a struct index, which adds its field offset to the pointer.
324 if (StructType *STy = dyn_cast<StructType>(*GTI)) {
325 unsigned ElementIdx = OpC->getZExtValue();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000326 const StructLayout *SL = DL.getStructLayout(STy);
Chandler Carruth0539c072012-03-31 12:42:41 +0000327 Offset += APInt(IntPtrWidth, SL->getElementOffset(ElementIdx));
328 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000329 }
Chandler Carruth783b7192012-03-09 02:49:36 +0000330
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000331 APInt TypeSize(IntPtrWidth, DL.getTypeAllocSize(GTI.getIndexedType()));
Chandler Carruth0539c072012-03-31 12:42:41 +0000332 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize;
333 }
334 return true;
335}
336
337bool CallAnalyzer::visitAlloca(AllocaInst &I) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000338 // Check whether inlining will turn a dynamic alloca into a static
Sanjay Patel0f153422016-05-09 21:51:53 +0000339 // alloca and handle that case.
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000340 if (I.isArrayAllocation()) {
Sanjay Patel0f153422016-05-09 21:51:53 +0000341 Constant *Size = SimplifiedValues.lookup(I.getArraySize());
342 if (auto *AllocSize = dyn_cast_or_null<ConstantInt>(Size)) {
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000343 const DataLayout &DL = F.getParent()->getDataLayout();
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000344 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000345 AllocatedSize = SaturatingMultiplyAdd(
346 AllocSize->getLimitedValue(), DL.getTypeAllocSize(Ty), AllocatedSize);
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000347 return Base::visitAlloca(I);
348 }
349 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000350
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000351 // Accumulate the allocated size.
352 if (I.isStaticAlloca()) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000353 const DataLayout &DL = F.getParent()->getDataLayout();
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000354 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000355 AllocatedSize = SaturatingAdd(DL.getTypeAllocSize(Ty), AllocatedSize);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000356 }
357
Bob Wilsona5b0dc82012-11-19 07:04:35 +0000358 // We will happily inline static alloca instructions.
359 if (I.isStaticAlloca())
Chandler Carruth0539c072012-03-31 12:42:41 +0000360 return Base::visitAlloca(I);
361
362 // FIXME: This is overly conservative. Dynamic allocas are inefficient for
363 // a variety of reasons, and so we would like to not inline them into
364 // functions which don't currently have a dynamic alloca. This simply
365 // disables inlining altogether in the presence of a dynamic alloca.
366 HasDynamicAlloca = true;
367 return false;
368}
369
370bool CallAnalyzer::visitPHI(PHINode &I) {
371 // FIXME: We should potentially be tracking values through phi nodes,
372 // especially when they collapse to a single value due to deleted CFG edges
373 // during inlining.
374
375 // FIXME: We need to propagate SROA *disabling* through phi nodes, even
376 // though we don't want to propagate it's bonuses. The idea is to disable
377 // SROA if it *might* be used in an inappropriate manner.
378
379 // Phi nodes are always zero-cost.
380 return true;
381}
382
383bool CallAnalyzer::visitGetElementPtr(GetElementPtrInst &I) {
384 Value *SROAArg;
385 DenseMap<Value *, int>::iterator CostIt;
Chad Rosier567556a2016-04-28 14:47:23 +0000386 bool SROACandidate =
387 lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt);
Chandler Carruth0539c072012-03-31 12:42:41 +0000388
389 // Try to fold GEPs of constant-offset call site argument pointers. This
390 // requires target data and inbounds GEPs.
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000391 if (I.isInBounds()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000392 // Check if we have a base + offset for the pointer.
393 Value *Ptr = I.getPointerOperand();
394 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Ptr);
395 if (BaseAndOffset.first) {
396 // Check if the offset of this GEP is constant, and if so accumulate it
397 // into Offset.
398 if (!accumulateGEPOffset(cast<GEPOperator>(I), BaseAndOffset.second)) {
399 // Non-constant GEPs aren't folded, and disable SROA.
400 if (SROACandidate)
401 disableSROA(CostIt);
402 return false;
403 }
404
405 // Add the result as a new mapping to Base + Offset.
406 ConstantOffsetPtrs[&I] = BaseAndOffset;
407
408 // Also handle SROA candidates here, we already know that the GEP is
409 // all-constant indexed.
410 if (SROACandidate)
411 SROAArgValues[&I] = SROAArg;
412
Chandler Carruth783b7192012-03-09 02:49:36 +0000413 return true;
414 }
415 }
416
Chandler Carruth0539c072012-03-31 12:42:41 +0000417 if (isGEPOffsetConstant(I)) {
418 if (SROACandidate)
419 SROAArgValues[&I] = SROAArg;
420
421 // Constant GEPs are modeled as free.
422 return true;
423 }
424
425 // Variable GEPs will require math and will disable SROA.
426 if (SROACandidate)
427 disableSROA(CostIt);
Chandler Carruth783b7192012-03-09 02:49:36 +0000428 return false;
429}
430
Chandler Carruth0539c072012-03-31 12:42:41 +0000431bool CallAnalyzer::visitBitCast(BitCastInst &I) {
432 // Propagate constants through bitcasts.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000433 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
434 if (!COp)
435 COp = SimplifiedValues.lookup(I.getOperand(0));
436 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000437 if (Constant *C = ConstantExpr::getBitCast(COp, I.getType())) {
438 SimplifiedValues[&I] = C;
439 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000440 }
Owen Andersona08318a2010-09-09 16:56:42 +0000441
Chandler Carruth0539c072012-03-31 12:42:41 +0000442 // Track base/offsets through casts
Chad Rosier567556a2016-04-28 14:47:23 +0000443 std::pair<Value *, APInt> BaseAndOffset =
444 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000445 // Casts don't change the offset, just wrap it up.
446 if (BaseAndOffset.first)
447 ConstantOffsetPtrs[&I] = BaseAndOffset;
448
449 // Also look for SROA candidates here.
450 Value *SROAArg;
451 DenseMap<Value *, int>::iterator CostIt;
452 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
453 SROAArgValues[&I] = SROAArg;
454
455 // Bitcasts are always zero cost.
456 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000457}
458
Chandler Carruth0539c072012-03-31 12:42:41 +0000459bool CallAnalyzer::visitPtrToInt(PtrToIntInst &I) {
460 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000461 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
462 if (!COp)
463 COp = SimplifiedValues.lookup(I.getOperand(0));
464 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000465 if (Constant *C = ConstantExpr::getPtrToInt(COp, I.getType())) {
466 SimplifiedValues[&I] = C;
467 return true;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000468 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000469
470 // Track base/offset pairs when converted to a plain integer provided the
471 // integer is large enough to represent the pointer.
472 unsigned IntegerSize = I.getType()->getScalarSizeInBits();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000473 const DataLayout &DL = F.getParent()->getDataLayout();
Mehdi Amini46a43552015-03-04 18:43:29 +0000474 if (IntegerSize >= DL.getPointerSizeInBits()) {
Chad Rosier567556a2016-04-28 14:47:23 +0000475 std::pair<Value *, APInt> BaseAndOffset =
476 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000477 if (BaseAndOffset.first)
478 ConstantOffsetPtrs[&I] = BaseAndOffset;
479 }
480
481 // This is really weird. Technically, ptrtoint will disable SROA. However,
482 // unless that ptrtoint is *used* somewhere in the live basic blocks after
483 // inlining, it will be nuked, and SROA should proceed. All of the uses which
484 // would block SROA would also block SROA if applied directly to a pointer,
485 // and so we can just add the integer in here. The only places where SROA is
486 // preserved either cannot fire on an integer, or won't in-and-of themselves
487 // disable SROA (ext) w/o some later use that we would see and disable.
488 Value *SROAArg;
489 DenseMap<Value *, int>::iterator CostIt;
490 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
491 SROAArgValues[&I] = SROAArg;
492
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000493 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000494}
495
Chandler Carruth0539c072012-03-31 12:42:41 +0000496bool CallAnalyzer::visitIntToPtr(IntToPtrInst &I) {
497 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000498 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
499 if (!COp)
500 COp = SimplifiedValues.lookup(I.getOperand(0));
501 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000502 if (Constant *C = ConstantExpr::getIntToPtr(COp, I.getType())) {
503 SimplifiedValues[&I] = C;
504 return true;
505 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000506
Chandler Carruth0539c072012-03-31 12:42:41 +0000507 // Track base/offset pairs when round-tripped through a pointer without
508 // modifications provided the integer is not too large.
509 Value *Op = I.getOperand(0);
510 unsigned IntegerSize = Op->getType()->getScalarSizeInBits();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000511 const DataLayout &DL = F.getParent()->getDataLayout();
Mehdi Amini46a43552015-03-04 18:43:29 +0000512 if (IntegerSize <= DL.getPointerSizeInBits()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000513 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op);
514 if (BaseAndOffset.first)
515 ConstantOffsetPtrs[&I] = BaseAndOffset;
516 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000517
Chandler Carruth0539c072012-03-31 12:42:41 +0000518 // "Propagate" SROA here in the same manner as we do for ptrtoint above.
519 Value *SROAArg;
520 DenseMap<Value *, int>::iterator CostIt;
521 if (lookupSROAArgAndCost(Op, SROAArg, CostIt))
522 SROAArgValues[&I] = SROAArg;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000523
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000524 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000525}
526
527bool CallAnalyzer::visitCastInst(CastInst &I) {
528 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000529 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
530 if (!COp)
531 COp = SimplifiedValues.lookup(I.getOperand(0));
532 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000533 if (Constant *C = ConstantExpr::getCast(I.getOpcode(), COp, I.getType())) {
534 SimplifiedValues[&I] = C;
535 return true;
536 }
537
538 // Disable SROA in the face of arbitrary casts we don't whitelist elsewhere.
539 disableSROA(I.getOperand(0));
540
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000541 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000542}
543
544bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) {
545 Value *Operand = I.getOperand(0);
Jakub Staszak7b9e0b92013-03-07 20:01:19 +0000546 Constant *COp = dyn_cast<Constant>(Operand);
547 if (!COp)
548 COp = SimplifiedValues.lookup(Operand);
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000549 if (COp) {
550 const DataLayout &DL = F.getParent()->getDataLayout();
Manuel Jacobe9024592016-01-21 06:33:22 +0000551 if (Constant *C = ConstantFoldInstOperands(&I, COp, DL)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000552 SimplifiedValues[&I] = C;
553 return true;
554 }
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000555 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000556
557 // Disable any SROA on the argument to arbitrary unary operators.
558 disableSROA(Operand);
559
560 return false;
561}
562
Philip Reames9b5c9582015-06-26 20:51:17 +0000563bool CallAnalyzer::paramHasAttr(Argument *A, Attribute::AttrKind Attr) {
564 unsigned ArgNo = A->getArgNo();
Chad Rosier567556a2016-04-28 14:47:23 +0000565 return CandidateCS.paramHasAttr(ArgNo + 1, Attr);
Philip Reames9b5c9582015-06-26 20:51:17 +0000566}
567
568bool CallAnalyzer::isKnownNonNullInCallee(Value *V) {
569 // Does the *call site* have the NonNull attribute set on an argument? We
570 // use the attribute on the call site to memoize any analysis done in the
571 // caller. This will also trip if the callee function has a non-null
572 // parameter attribute, but that's a less interesting case because hopefully
573 // the callee would already have been simplified based on that.
574 if (Argument *A = dyn_cast<Argument>(V))
575 if (paramHasAttr(A, Attribute::NonNull))
576 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000577
Philip Reames9b5c9582015-06-26 20:51:17 +0000578 // Is this an alloca in the caller? This is distinct from the attribute case
579 // above because attributes aren't updated within the inliner itself and we
580 // always want to catch the alloca derived case.
581 if (isAllocaDerivedArg(V))
582 // We can actually predict the result of comparisons between an
583 // alloca-derived value and null. Note that this fires regardless of
584 // SROA firing.
585 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000586
Philip Reames9b5c9582015-06-26 20:51:17 +0000587 return false;
588}
589
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000590bool CallAnalyzer::allowSizeGrowth(CallSite CS) {
591 // If the normal destination of the invoke or the parent block of the call
592 // site is unreachable-terminated, there is little point in inlining this
593 // unless there is literally zero cost.
594 // FIXME: Note that it is possible that an unreachable-terminated block has a
595 // hot entry. For example, in below scenario inlining hot_call_X() may be
596 // beneficial :
597 // main() {
598 // hot_call_1();
599 // ...
600 // hot_call_N()
601 // exit(0);
602 // }
603 // For now, we are not handling this corner case here as it is rare in real
604 // code. In future, we should elaborate this based on BPI and BFI in more
605 // general threshold adjusting heuristics in updateThreshold().
606 Instruction *Instr = CS.getInstruction();
607 if (InvokeInst *II = dyn_cast<InvokeInst>(Instr)) {
608 if (isa<UnreachableInst>(II->getNormalDest()->getTerminator()))
609 return false;
610 } else if (isa<UnreachableInst>(Instr->getParent()->getTerminator()))
611 return false;
612
613 return true;
614}
615
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000616void CallAnalyzer::updateThreshold(CallSite CS, Function &Callee) {
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000617 // If no size growth is allowed for this inlining, set Threshold to 0.
618 if (!allowSizeGrowth(CS)) {
619 Threshold = 0;
620 return;
621 }
622
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000623 Function *Caller = CS.getCaller();
Easwaran Ramanbb578ef2016-05-19 23:02:09 +0000624 if (DefaultInlineThreshold.getNumOccurrences() > 0) {
625 // Explicitly specified -inline-threhold overrides the threshold passed to
626 // CallAnalyzer's constructor.
627 Threshold = DefaultInlineThreshold;
628 } else {
629 // If -inline-threshold is not given, listen to the optsize and minsize
630 // attributes when they would decrease the threshold.
Easwaran Raman30a93c12016-01-28 23:44:41 +0000631 if (Caller->optForMinSize() && OptMinSizeThreshold < Threshold)
632 Threshold = OptMinSizeThreshold;
633 else if (Caller->optForSize() && OptSizeThreshold < Threshold)
634 Threshold = OptSizeThreshold;
635 }
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000636
Dehao Chen9232f982016-07-11 16:48:54 +0000637 bool HotCallsite = false;
638 uint64_t TotalWeight;
639 if (CS.getInstruction()->extractProfTotalWeight(TotalWeight) &&
640 PSI->isHotCount(TotalWeight))
641 HotCallsite = true;
642
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000643 // Listen to the inlinehint attribute or profile based hotness information
644 // when it would increase the threshold and the caller does not need to
645 // minimize its size.
Easwaran Raman71069cf2016-06-09 22:23:21 +0000646 bool InlineHint = Callee.hasFnAttribute(Attribute::InlineHint) ||
Dehao Chen9232f982016-07-11 16:48:54 +0000647 PSI->isHotFunction(&Callee) ||
648 HotCallsite;
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000649 if (InlineHint && HintThreshold > Threshold && !Caller->optForMinSize())
650 Threshold = HintThreshold;
651
Easwaran Raman71069cf2016-06-09 22:23:21 +0000652 bool ColdCallee = PSI->isColdFunction(&Callee);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000653 // Command line argument for DefaultInlineThreshold will override the default
654 // ColdThreshold. If we have -inline-threshold but no -inlinecold-threshold,
655 // do not use the default cold threshold even if it is smaller.
656 if ((DefaultInlineThreshold.getNumOccurrences() == 0 ||
657 ColdThreshold.getNumOccurrences() > 0) &&
658 ColdCallee && ColdThreshold < Threshold)
659 Threshold = ColdThreshold;
Justin Lebar8650a4d2016-04-15 01:38:48 +0000660
661 // Finally, take the target-specific inlining threshold multiplier into
662 // account.
663 Threshold *= TTI.getInliningThresholdMultiplier();
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000664}
665
Matt Arsenault727aa342013-07-20 04:09:00 +0000666bool CallAnalyzer::visitCmpInst(CmpInst &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000667 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
668 // First try to handle simplified comparisons.
669 if (!isa<Constant>(LHS))
670 if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS))
671 LHS = SimpleLHS;
672 if (!isa<Constant>(RHS))
673 if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS))
674 RHS = SimpleRHS;
Matt Arsenault727aa342013-07-20 04:09:00 +0000675 if (Constant *CLHS = dyn_cast<Constant>(LHS)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000676 if (Constant *CRHS = dyn_cast<Constant>(RHS))
Chad Rosier567556a2016-04-28 14:47:23 +0000677 if (Constant *C =
678 ConstantExpr::getCompare(I.getPredicate(), CLHS, CRHS)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000679 SimplifiedValues[&I] = C;
680 return true;
681 }
Matt Arsenault727aa342013-07-20 04:09:00 +0000682 }
683
684 if (I.getOpcode() == Instruction::FCmp)
685 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000686
687 // Otherwise look for a comparison between constant offset pointers with
688 // a common base.
689 Value *LHSBase, *RHSBase;
690 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000691 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000692 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000693 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000694 if (RHSBase && LHSBase == RHSBase) {
695 // We have common bases, fold the icmp to a constant based on the
696 // offsets.
697 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
698 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
699 if (Constant *C = ConstantExpr::getICmp(I.getPredicate(), CLHS, CRHS)) {
700 SimplifiedValues[&I] = C;
701 ++NumConstantPtrCmps;
702 return true;
703 }
704 }
705 }
706
707 // If the comparison is an equality comparison with null, we can simplify it
Philip Reames9b5c9582015-06-26 20:51:17 +0000708 // if we know the value (argument) can't be null
709 if (I.isEquality() && isa<ConstantPointerNull>(I.getOperand(1)) &&
710 isKnownNonNullInCallee(I.getOperand(0))) {
711 bool IsNotEqual = I.getPredicate() == CmpInst::ICMP_NE;
712 SimplifiedValues[&I] = IsNotEqual ? ConstantInt::getTrue(I.getType())
713 : ConstantInt::getFalse(I.getType());
714 return true;
715 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000716 // Finally check for SROA candidates in comparisons.
717 Value *SROAArg;
718 DenseMap<Value *, int>::iterator CostIt;
719 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) {
720 if (isa<ConstantPointerNull>(I.getOperand(1))) {
721 accumulateSROACost(CostIt, InlineConstants::InstrCost);
722 return true;
723 }
724
725 disableSROA(CostIt);
726 }
727
728 return false;
729}
730
731bool CallAnalyzer::visitSub(BinaryOperator &I) {
732 // Try to handle a special case: we can fold computing the difference of two
733 // constant-related pointers.
734 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
735 Value *LHSBase, *RHSBase;
736 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000737 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000738 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000739 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000740 if (RHSBase && LHSBase == RHSBase) {
741 // We have common bases, fold the subtract to a constant based on the
742 // offsets.
743 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
744 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
745 if (Constant *C = ConstantExpr::getSub(CLHS, CRHS)) {
746 SimplifiedValues[&I] = C;
747 ++NumConstantPtrDiffs;
748 return true;
749 }
750 }
751 }
752
753 // Otherwise, fall back to the generic logic for simplifying and handling
754 // instructions.
755 return Base::visitSub(I);
756}
757
758bool CallAnalyzer::visitBinaryOperator(BinaryOperator &I) {
759 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000760 const DataLayout &DL = F.getParent()->getDataLayout();
Chandler Carruth0539c072012-03-31 12:42:41 +0000761 if (!isa<Constant>(LHS))
762 if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS))
763 LHS = SimpleLHS;
764 if (!isa<Constant>(RHS))
765 if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS))
766 RHS = SimpleRHS;
Michael Zolotukhin4e8598e2015-02-06 20:02:51 +0000767 Value *SimpleV = nullptr;
768 if (auto FI = dyn_cast<FPMathOperator>(&I))
769 SimpleV =
770 SimplifyFPBinOp(I.getOpcode(), LHS, RHS, FI->getFastMathFlags(), DL);
771 else
772 SimpleV = SimplifyBinOp(I.getOpcode(), LHS, RHS, DL);
773
Chandler Carruth0539c072012-03-31 12:42:41 +0000774 if (Constant *C = dyn_cast_or_null<Constant>(SimpleV)) {
775 SimplifiedValues[&I] = C;
776 return true;
777 }
778
779 // Disable any SROA on arguments to arbitrary, unsimplified binary operators.
780 disableSROA(LHS);
781 disableSROA(RHS);
782
783 return false;
784}
785
786bool CallAnalyzer::visitLoad(LoadInst &I) {
787 Value *SROAArg;
788 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000789 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000790 if (I.isSimple()) {
791 accumulateSROACost(CostIt, InlineConstants::InstrCost);
792 return true;
793 }
794
795 disableSROA(CostIt);
796 }
797
798 return false;
799}
800
801bool CallAnalyzer::visitStore(StoreInst &I) {
802 Value *SROAArg;
803 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000804 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000805 if (I.isSimple()) {
806 accumulateSROACost(CostIt, InlineConstants::InstrCost);
807 return true;
808 }
809
810 disableSROA(CostIt);
811 }
812
813 return false;
814}
815
Chandler Carruth753e21d2012-12-28 14:23:32 +0000816bool CallAnalyzer::visitExtractValue(ExtractValueInst &I) {
817 // Constant folding for extract value is trivial.
818 Constant *C = dyn_cast<Constant>(I.getAggregateOperand());
819 if (!C)
820 C = SimplifiedValues.lookup(I.getAggregateOperand());
821 if (C) {
822 SimplifiedValues[&I] = ConstantExpr::getExtractValue(C, I.getIndices());
823 return true;
824 }
825
826 // SROA can look through these but give them a cost.
827 return false;
828}
829
830bool CallAnalyzer::visitInsertValue(InsertValueInst &I) {
831 // Constant folding for insert value is trivial.
832 Constant *AggC = dyn_cast<Constant>(I.getAggregateOperand());
833 if (!AggC)
834 AggC = SimplifiedValues.lookup(I.getAggregateOperand());
835 Constant *InsertedC = dyn_cast<Constant>(I.getInsertedValueOperand());
836 if (!InsertedC)
837 InsertedC = SimplifiedValues.lookup(I.getInsertedValueOperand());
838 if (AggC && InsertedC) {
Chad Rosier567556a2016-04-28 14:47:23 +0000839 SimplifiedValues[&I] =
840 ConstantExpr::getInsertValue(AggC, InsertedC, I.getIndices());
Chandler Carruth753e21d2012-12-28 14:23:32 +0000841 return true;
842 }
843
844 // SROA can look through these but give them a cost.
845 return false;
846}
847
848/// \brief Try to simplify a call site.
849///
850/// Takes a concrete function and callsite and tries to actually simplify it by
851/// analyzing the arguments and call itself with instsimplify. Returns true if
852/// it has simplified the callsite to some other entity (a constant), making it
853/// free.
854bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
855 // FIXME: Using the instsimplify logic directly for this is inefficient
856 // because we have to continually rebuild the argument list even when no
857 // simplifications can be performed. Until that is fixed with remapping
858 // inside of instsimplify, directly constant fold calls here.
859 if (!canConstantFoldCallTo(F))
860 return false;
861
862 // Try to re-map the arguments to constants.
863 SmallVector<Constant *, 4> ConstantArgs;
864 ConstantArgs.reserve(CS.arg_size());
Chad Rosier567556a2016-04-28 14:47:23 +0000865 for (CallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end(); I != E;
866 ++I) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000867 Constant *C = dyn_cast<Constant>(*I);
868 if (!C)
869 C = dyn_cast_or_null<Constant>(SimplifiedValues.lookup(*I));
870 if (!C)
871 return false; // This argument doesn't map to a constant.
872
873 ConstantArgs.push_back(C);
874 }
875 if (Constant *C = ConstantFoldCall(F, ConstantArgs)) {
876 SimplifiedValues[CS.getInstruction()] = C;
877 return true;
878 }
879
880 return false;
881}
882
Chandler Carruth0539c072012-03-31 12:42:41 +0000883bool CallAnalyzer::visitCallSite(CallSite CS) {
Chandler Carruth37d25de2013-12-13 08:00:01 +0000884 if (CS.hasFnAttr(Attribute::ReturnsTwice) &&
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +0000885 !F.hasFnAttribute(Attribute::ReturnsTwice)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000886 // This aborts the entire analysis.
887 ExposesReturnsTwice = true;
888 return false;
889 }
Chad Rosier567556a2016-04-28 14:47:23 +0000890 if (CS.isCall() && cast<CallInst>(CS.getInstruction())->cannotDuplicate())
James Molloy4f6fb952012-12-20 16:04:27 +0000891 ContainsNoDuplicateCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000892
Chandler Carruth0539c072012-03-31 12:42:41 +0000893 if (Function *F = CS.getCalledFunction()) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000894 // When we have a concrete function, first try to simplify it directly.
895 if (simplifyCallSite(F, CS))
896 return true;
897
898 // Next check if it is an intrinsic we know about.
899 // FIXME: Lift this into part of the InstVisitor.
900 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
901 switch (II->getIntrinsicID()) {
902 default:
903 return Base::visitCallSite(CS);
904
Peter Collingbourne7dd8dbf2016-04-22 21:18:02 +0000905 case Intrinsic::load_relative:
906 // This is normally lowered to 4 LLVM instructions.
907 Cost += 3 * InlineConstants::InstrCost;
908 return false;
909
Chandler Carruth753e21d2012-12-28 14:23:32 +0000910 case Intrinsic::memset:
911 case Intrinsic::memcpy:
912 case Intrinsic::memmove:
913 // SROA can usually chew through these intrinsics, but they aren't free.
914 return false;
Reid Kleckner60381792015-07-07 22:25:32 +0000915 case Intrinsic::localescape:
Reid Kleckner223de262015-04-14 20:38:14 +0000916 HasFrameEscape = true;
917 return false;
Chandler Carruth753e21d2012-12-28 14:23:32 +0000918 }
919 }
920
Chandler Carruth0539c072012-03-31 12:42:41 +0000921 if (F == CS.getInstruction()->getParent()->getParent()) {
922 // This flag will fully abort the analysis, so don't bother with anything
923 // else.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000924 IsRecursiveCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000925 return false;
926 }
927
Chandler Carruth0ba8db42013-01-22 11:26:02 +0000928 if (TTI.isLoweredToCall(F)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000929 // We account for the average 1 instruction per call argument setup
930 // here.
931 Cost += CS.arg_size() * InlineConstants::InstrCost;
932
933 // Everything other than inline ASM will also have a significant cost
934 // merely from making the call.
935 if (!isa<InlineAsm>(CS.getCalledValue()))
936 Cost += InlineConstants::CallPenalty;
937 }
938
939 return Base::visitCallSite(CS);
940 }
941
942 // Otherwise we're in a very special case -- an indirect function call. See
943 // if we can be particularly clever about this.
944 Value *Callee = CS.getCalledValue();
945
946 // First, pay the price of the argument setup. We account for the average
947 // 1 instruction per call argument setup here.
948 Cost += CS.arg_size() * InlineConstants::InstrCost;
949
950 // Next, check if this happens to be an indirect function call to a known
951 // function in this inline context. If not, we've done all we can.
952 Function *F = dyn_cast_or_null<Function>(SimplifiedValues.lookup(Callee));
953 if (!F)
954 return Base::visitCallSite(CS);
955
956 // If we have a constant that we are calling as a function, we can peer
957 // through it and see the function target. This happens not infrequently
958 // during devirtualization and so we want to give it a hefty bonus for
959 // inlining, but cap that bonus in the event that inlining wouldn't pan
960 // out. Pretend to inline the function, with a custom threshold.
Sean Silvaab6a6832016-07-23 04:22:50 +0000961 CallAnalyzer CA(TTI, GetAssumptionCache, PSI, *F,
962 InlineConstants::IndirectCallThreshold, CS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000963 if (CA.analyzeCall(CS)) {
964 // We were able to inline the indirect call! Subtract the cost from the
Easwaran Raman6d90d9f2015-12-07 21:21:20 +0000965 // threshold to get the bonus we want to apply, but don't go below zero.
966 Cost -= std::max(0, CA.getThreshold() - CA.getCost());
Chandler Carruth0539c072012-03-31 12:42:41 +0000967 }
968
969 return Base::visitCallSite(CS);
970}
971
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000972bool CallAnalyzer::visitReturnInst(ReturnInst &RI) {
973 // At least one return instruction will be free after inlining.
974 bool Free = !HasReturn;
975 HasReturn = true;
976 return Free;
977}
978
979bool CallAnalyzer::visitBranchInst(BranchInst &BI) {
980 // We model unconditional branches as essentially free -- they really
981 // shouldn't exist at all, but handling them makes the behavior of the
982 // inliner more regular and predictable. Interestingly, conditional branches
983 // which will fold away are also free.
984 return BI.isUnconditional() || isa<ConstantInt>(BI.getCondition()) ||
985 dyn_cast_or_null<ConstantInt>(
986 SimplifiedValues.lookup(BI.getCondition()));
987}
988
989bool CallAnalyzer::visitSwitchInst(SwitchInst &SI) {
990 // We model unconditional switches as free, see the comments on handling
991 // branches.
Chandler Carruthe01fd5f2014-04-28 08:52:44 +0000992 if (isa<ConstantInt>(SI.getCondition()))
993 return true;
994 if (Value *V = SimplifiedValues.lookup(SI.getCondition()))
995 if (isa<ConstantInt>(V))
996 return true;
997
998 // Otherwise, we need to accumulate a cost proportional to the number of
999 // distinct successor blocks. This fan-out in the CFG cannot be represented
1000 // for free even if we can represent the core switch as a jumptable that
1001 // takes a single instruction.
1002 //
1003 // NB: We convert large switches which are just used to initialize large phi
1004 // nodes to lookup tables instead in simplify-cfg, so this shouldn't prevent
1005 // inlining those. It will prevent inlining in cases where the optimization
1006 // does not (yet) fire.
1007 SmallPtrSet<BasicBlock *, 8> SuccessorBlocks;
1008 SuccessorBlocks.insert(SI.getDefaultDest());
1009 for (auto I = SI.case_begin(), E = SI.case_end(); I != E; ++I)
1010 SuccessorBlocks.insert(I.getCaseSuccessor());
1011 // Add cost corresponding to the number of distinct destinations. The first
1012 // we model as free because of fallthrough.
1013 Cost += (SuccessorBlocks.size() - 1) * InlineConstants::InstrCost;
1014 return false;
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001015}
1016
1017bool CallAnalyzer::visitIndirectBrInst(IndirectBrInst &IBI) {
1018 // We never want to inline functions that contain an indirectbr. This is
1019 // incorrect because all the blockaddress's (in static global initializers
1020 // for example) would be referring to the original function, and this
1021 // indirect jump would jump from the inlined copy of the function into the
1022 // original function which is extremely undefined behavior.
1023 // FIXME: This logic isn't really right; we can safely inline functions with
1024 // indirectbr's as long as no other function or global references the
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001025 // blockaddress of a block within the current function.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001026 HasIndirectBr = true;
1027 return false;
1028}
1029
1030bool CallAnalyzer::visitResumeInst(ResumeInst &RI) {
1031 // FIXME: It's not clear that a single instruction is an accurate model for
1032 // the inline cost of a resume instruction.
1033 return false;
1034}
1035
David Majnemer654e1302015-07-31 17:58:14 +00001036bool CallAnalyzer::visitCleanupReturnInst(CleanupReturnInst &CRI) {
1037 // FIXME: It's not clear that a single instruction is an accurate model for
1038 // the inline cost of a cleanupret instruction.
1039 return false;
1040}
1041
1042bool CallAnalyzer::visitCatchReturnInst(CatchReturnInst &CRI) {
1043 // FIXME: It's not clear that a single instruction is an accurate model for
Joseph Tremoulet8220bcc2015-08-23 00:26:33 +00001044 // the inline cost of a catchret instruction.
David Majnemer654e1302015-07-31 17:58:14 +00001045 return false;
1046}
1047
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001048bool CallAnalyzer::visitUnreachableInst(UnreachableInst &I) {
1049 // FIXME: It might be reasonably to discount the cost of instructions leading
1050 // to unreachable as they have the lowest possible impact on both runtime and
1051 // code size.
1052 return true; // No actual code is needed for unreachable.
1053}
1054
Chandler Carruth0539c072012-03-31 12:42:41 +00001055bool CallAnalyzer::visitInstruction(Instruction &I) {
Chandler Carruthda7513a2012-05-04 00:58:03 +00001056 // Some instructions are free. All of the free intrinsics can also be
1057 // handled by SROA, etc.
Chandler Carruthb8cf5102013-01-21 12:05:16 +00001058 if (TargetTransformInfo::TCC_Free == TTI.getUserCost(&I))
Chandler Carruthda7513a2012-05-04 00:58:03 +00001059 return true;
1060
Chandler Carruth0539c072012-03-31 12:42:41 +00001061 // We found something we don't understand or can't handle. Mark any SROA-able
1062 // values in the operand list as no longer viable.
1063 for (User::op_iterator OI = I.op_begin(), OE = I.op_end(); OI != OE; ++OI)
1064 disableSROA(*OI);
1065
1066 return false;
1067}
1068
Chandler Carruth0539c072012-03-31 12:42:41 +00001069/// \brief Analyze a basic block for its contribution to the inline cost.
1070///
1071/// This method walks the analyzer over every instruction in the given basic
1072/// block and accounts for their cost during inlining at this callsite. It
1073/// aborts early if the threshold has been exceeded or an impossible to inline
1074/// construct has been detected. It returns false if inlining is no longer
1075/// viable, and true if inlining remains viable.
Hal Finkel57f03dd2014-09-07 13:49:57 +00001076bool CallAnalyzer::analyzeBlock(BasicBlock *BB,
1077 SmallPtrSetImpl<const Value *> &EphValues) {
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001078 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
Chandler Carruth6b4cc8b2014-02-01 10:38:17 +00001079 // FIXME: Currently, the number of instructions in a function regardless of
1080 // our ability to simplify them during inline to constants or dead code,
1081 // are actually used by the vector bonus heuristic. As long as that's true,
1082 // we have to special case debug intrinsics here to prevent differences in
1083 // inlining due to debug symbols. Eventually, the number of unsimplified
1084 // instructions shouldn't factor into the cost computation, but until then,
1085 // hack around it here.
1086 if (isa<DbgInfoIntrinsic>(I))
1087 continue;
1088
Hal Finkel57f03dd2014-09-07 13:49:57 +00001089 // Skip ephemeral values.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001090 if (EphValues.count(&*I))
Hal Finkel57f03dd2014-09-07 13:49:57 +00001091 continue;
1092
Chandler Carruth0539c072012-03-31 12:42:41 +00001093 ++NumInstructions;
1094 if (isa<ExtractElementInst>(I) || I->getType()->isVectorTy())
1095 ++NumVectorInstructions;
1096
Sanjay Patele9434e82015-09-15 15:26:25 +00001097 // If the instruction is floating point, and the target says this operation
1098 // is expensive or the function has the "use-soft-float" attribute, this may
1099 // eventually become a library call. Treat the cost as such.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001100 if (I->getType()->isFloatingPointTy()) {
1101 bool hasSoftFloatAttr = false;
1102
Sanjay Patele9434e82015-09-15 15:26:25 +00001103 // If the function has the "use-soft-float" attribute, mark it as
1104 // expensive.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001105 if (F.hasFnAttribute("use-soft-float")) {
1106 Attribute Attr = F.getFnAttribute("use-soft-float");
1107 StringRef Val = Attr.getValueAsString();
1108 if (Val == "true")
1109 hasSoftFloatAttr = true;
1110 }
1111
1112 if (TTI.getFPOpCost(I->getType()) == TargetTransformInfo::TCC_Expensive ||
1113 hasSoftFloatAttr)
1114 Cost += InlineConstants::CallPenalty;
1115 }
1116
Chandler Carruth0539c072012-03-31 12:42:41 +00001117 // If the instruction simplified to a constant, there is no cost to this
1118 // instruction. Visit the instructions using our InstVisitor to account for
1119 // all of the per-instruction logic. The visit tree returns true if we
1120 // consumed the instruction in any way, and false if the instruction's base
1121 // cost should count against inlining.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001122 if (Base::visit(&*I))
Chandler Carruth0539c072012-03-31 12:42:41 +00001123 ++NumInstructionsSimplified;
1124 else
1125 Cost += InlineConstants::InstrCost;
1126
1127 // If the visit this instruction detected an uninlinable pattern, abort.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001128 if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca ||
Reid Kleckner223de262015-04-14 20:38:14 +00001129 HasIndirectBr || HasFrameEscape)
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001130 return false;
1131
1132 // If the caller is a recursive function then we don't want to inline
1133 // functions which allocate a lot of stack space because it would increase
1134 // the caller stack usage dramatically.
1135 if (IsCallerRecursive &&
1136 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller)
Chandler Carruth0539c072012-03-31 12:42:41 +00001137 return false;
1138
Chandler Carrutha004f222015-05-27 02:49:05 +00001139 // Check if we've past the maximum possible threshold so we don't spin in
1140 // huge basic blocks that will never inline.
1141 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001142 return false;
1143 }
1144
1145 return true;
1146}
1147
1148/// \brief Compute the base pointer and cumulative constant offsets for V.
1149///
1150/// This strips all constant offsets off of V, leaving it the base pointer, and
1151/// accumulates the total constant offset applied in the returned constant. It
1152/// returns 0 if V is not a pointer, and returns the constant '0' if there are
1153/// no constant offsets applied.
1154ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001155 if (!V->getType()->isPointerTy())
Craig Topper353eda42014-04-24 06:44:33 +00001156 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001157
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001158 const DataLayout &DL = F.getParent()->getDataLayout();
1159 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +00001160 APInt Offset = APInt::getNullValue(IntPtrWidth);
1161
1162 // Even though we don't look through PHI nodes, we could be called on an
1163 // instruction in an unreachable block, which may be on a cycle.
1164 SmallPtrSet<Value *, 4> Visited;
1165 Visited.insert(V);
1166 do {
1167 if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
1168 if (!GEP->isInBounds() || !accumulateGEPOffset(*GEP, Offset))
Craig Topper353eda42014-04-24 06:44:33 +00001169 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001170 V = GEP->getPointerOperand();
1171 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
1172 V = cast<Operator>(V)->getOperand(0);
1173 } else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
Sanjoy Das5ce32722016-04-08 00:48:30 +00001174 if (GA->isInterposable())
Chandler Carruth0539c072012-03-31 12:42:41 +00001175 break;
1176 V = GA->getAliasee();
1177 } else {
1178 break;
1179 }
1180 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
David Blaikie70573dc2014-11-19 07:49:26 +00001181 } while (Visited.insert(V).second);
Chandler Carruth0539c072012-03-31 12:42:41 +00001182
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001183 Type *IntPtrTy = DL.getIntPtrType(V->getContext());
Chandler Carruth0539c072012-03-31 12:42:41 +00001184 return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset));
1185}
1186
1187/// \brief Analyze a call site for potential inlining.
1188///
1189/// Returns true if inlining this call is viable, and false if it is not
1190/// viable. It computes the cost and adjusts the threshold based on numerous
1191/// factors and heuristics. If this method returns false but the computed cost
1192/// is below the computed threshold, then inlining was forcibly disabled by
Bob Wilson266802d2012-11-19 07:04:30 +00001193/// some artifact of the routine.
Chandler Carruth0539c072012-03-31 12:42:41 +00001194bool CallAnalyzer::analyzeCall(CallSite CS) {
Chandler Carruth7ae90d42012-04-11 10:15:10 +00001195 ++NumCallsAnalyzed;
1196
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001197 // Perform some tweaks to the cost and threshold based on the direct
1198 // callsite information.
Chandler Carruth0539c072012-03-31 12:42:41 +00001199
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001200 // We want to more aggressively inline vector-dense kernels, so up the
1201 // threshold, and we'll lower it if the % of vector instructions gets too
Chandler Carrutha004f222015-05-27 02:49:05 +00001202 // low. Note that these bonuses are some what arbitrary and evolved over time
1203 // by accident as much as because they are principled bonuses.
1204 //
1205 // FIXME: It would be nice to remove all such bonuses. At least it would be
1206 // nice to base the bonus values on something more scientific.
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001207 assert(NumInstructions == 0);
1208 assert(NumVectorInstructions == 0);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001209
1210 // Update the threshold based on callsite properties
1211 updateThreshold(CS, F);
1212
Chandler Carrutha004f222015-05-27 02:49:05 +00001213 FiftyPercentVectorBonus = 3 * Threshold / 2;
1214 TenPercentVectorBonus = 3 * Threshold / 4;
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001215 const DataLayout &DL = F.getParent()->getDataLayout();
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001216
Chandler Carrutha004f222015-05-27 02:49:05 +00001217 // Track whether the post-inlining function would have more than one basic
1218 // block. A single basic block is often intended for inlining. Balloon the
1219 // threshold by 50% until we pass the single-BB phase.
1220 bool SingleBB = true;
1221 int SingleBBBonus = Threshold / 2;
1222
1223 // Speculatively apply all possible bonuses to Threshold. If cost exceeds
1224 // this Threshold any time, and cost cannot decrease, we can stop processing
1225 // the rest of the function body.
1226 Threshold += (SingleBBBonus + FiftyPercentVectorBonus);
1227
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001228 // Give out bonuses per argument, as the instructions setting them up will
1229 // be gone after inlining.
1230 for (unsigned I = 0, E = CS.arg_size(); I != E; ++I) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001231 if (CS.isByValArgument(I)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001232 // We approximate the number of loads and stores needed by dividing the
1233 // size of the byval type by the target's pointer size.
1234 PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType());
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001235 unsigned TypeSize = DL.getTypeSizeInBits(PTy->getElementType());
1236 unsigned PointerSize = DL.getPointerSizeInBits();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001237 // Ceiling division.
1238 unsigned NumStores = (TypeSize + PointerSize - 1) / PointerSize;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001239
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001240 // If it generates more than 8 stores it is likely to be expanded as an
1241 // inline memcpy so we take that as an upper bound. Otherwise we assume
1242 // one load and one store per word copied.
1243 // FIXME: The maxStoresPerMemcpy setting from the target should be used
1244 // here instead of a magic number of 8, but it's not available via
1245 // DataLayout.
1246 NumStores = std::min(NumStores, 8U);
1247
1248 Cost -= 2 * NumStores * InlineConstants::InstrCost;
1249 } else {
1250 // For non-byval arguments subtract off one instruction per call
1251 // argument.
1252 Cost -= InlineConstants::InstrCost;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001253 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001254 }
1255
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001256 // If there is only one call of the function, and it has internal linkage,
1257 // the cost of inlining it drops dramatically.
Chad Rosier567556a2016-04-28 14:47:23 +00001258 bool OnlyOneCallAndLocalLinkage =
1259 F.hasLocalLinkage() && F.hasOneUse() && &F == CS.getCalledFunction();
James Molloy4f6fb952012-12-20 16:04:27 +00001260 if (OnlyOneCallAndLocalLinkage)
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001261 Cost += InlineConstants::LastCallToStaticBonus;
1262
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001263 // If this function uses the coldcc calling convention, prefer not to inline
1264 // it.
1265 if (F.getCallingConv() == CallingConv::Cold)
1266 Cost += InlineConstants::ColdccPenalty;
1267
1268 // Check if we're done. This can happen due to bonuses and penalties.
1269 if (Cost > Threshold)
1270 return false;
1271
Chandler Carruth0539c072012-03-31 12:42:41 +00001272 if (F.empty())
1273 return true;
1274
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001275 Function *Caller = CS.getInstruction()->getParent()->getParent();
1276 // Check if the caller function is recursive itself.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001277 for (User *U : Caller->users()) {
1278 CallSite Site(U);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001279 if (!Site)
1280 continue;
1281 Instruction *I = Site.getInstruction();
1282 if (I->getParent()->getParent() == Caller) {
1283 IsCallerRecursive = true;
1284 break;
1285 }
1286 }
1287
Chandler Carruth0539c072012-03-31 12:42:41 +00001288 // Populate our simplified values by mapping from function arguments to call
1289 // arguments with known important simplifications.
1290 CallSite::arg_iterator CAI = CS.arg_begin();
1291 for (Function::arg_iterator FAI = F.arg_begin(), FAE = F.arg_end();
1292 FAI != FAE; ++FAI, ++CAI) {
1293 assert(CAI != CS.arg_end());
1294 if (Constant *C = dyn_cast<Constant>(CAI))
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001295 SimplifiedValues[&*FAI] = C;
Chandler Carruth0539c072012-03-31 12:42:41 +00001296
1297 Value *PtrArg = *CAI;
1298 if (ConstantInt *C = stripAndComputeInBoundsConstantOffsets(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001299 ConstantOffsetPtrs[&*FAI] = std::make_pair(PtrArg, C->getValue());
Chandler Carruth0539c072012-03-31 12:42:41 +00001300
1301 // We can SROA any pointer arguments derived from alloca instructions.
1302 if (isa<AllocaInst>(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001303 SROAArgValues[&*FAI] = PtrArg;
Chandler Carruth0539c072012-03-31 12:42:41 +00001304 SROAArgCosts[PtrArg] = 0;
1305 }
1306 }
1307 }
1308 NumConstantArgs = SimplifiedValues.size();
1309 NumConstantOffsetPtrArgs = ConstantOffsetPtrs.size();
1310 NumAllocaArgs = SROAArgValues.size();
1311
Hal Finkel57f03dd2014-09-07 13:49:57 +00001312 // FIXME: If a caller has multiple calls to a callee, we end up recomputing
1313 // the ephemeral values multiple times (and they're completely determined by
1314 // the callee, so this is purely duplicate work).
1315 SmallPtrSet<const Value *, 32> EphValues;
Sean Silvaab6a6832016-07-23 04:22:50 +00001316 CodeMetrics::collectEphemeralValues(&F, &GetAssumptionCache(F), EphValues);
Hal Finkel57f03dd2014-09-07 13:49:57 +00001317
Chandler Carruth0539c072012-03-31 12:42:41 +00001318 // The worklist of live basic blocks in the callee *after* inlining. We avoid
1319 // adding basic blocks of the callee which can be proven to be dead for this
1320 // particular call site in order to get more accurate cost estimates. This
1321 // requires a somewhat heavyweight iteration pattern: we need to walk the
1322 // basic blocks in a breadth-first order as we insert live successors. To
1323 // accomplish this, prioritizing for small iterations because we exit after
1324 // crossing our threshold, we use a small-size optimized SetVector.
1325 typedef SetVector<BasicBlock *, SmallVector<BasicBlock *, 16>,
Chad Rosier567556a2016-04-28 14:47:23 +00001326 SmallPtrSet<BasicBlock *, 16>>
1327 BBSetVector;
Chandler Carruth0539c072012-03-31 12:42:41 +00001328 BBSetVector BBWorklist;
1329 BBWorklist.insert(&F.getEntryBlock());
1330 // Note that we *must not* cache the size, this loop grows the worklist.
1331 for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) {
1332 // Bail out the moment we cross the threshold. This means we'll under-count
1333 // the cost, but only when undercounting doesn't matter.
Chandler Carrutha004f222015-05-27 02:49:05 +00001334 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001335 break;
1336
1337 BasicBlock *BB = BBWorklist[Idx];
1338 if (BB->empty())
Chandler Carruth4d1d34f2012-03-14 23:19:53 +00001339 continue;
Dan Gohman4552e3c2009-10-13 18:30:07 +00001340
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001341 // Disallow inlining a blockaddress. A blockaddress only has defined
1342 // behavior for an indirect branch in the same function, and we do not
1343 // currently support inlining indirect branches. But, the inliner may not
1344 // see an indirect branch that ends up being dead code at a particular call
1345 // site. If the blockaddress escapes the function, e.g., via a global
1346 // variable, inlining may lead to an invalid cross-function reference.
1347 if (BB->hasAddressTaken())
1348 return false;
1349
Chandler Carruth0539c072012-03-31 12:42:41 +00001350 // Analyze the cost of this block. If we blow through the threshold, this
1351 // returns false, and we can bail on out.
Easwaran Ramand295b002016-04-13 21:20:22 +00001352 if (!analyzeBlock(BB, EphValues))
1353 return false;
Eric Christopher46308e62011-02-01 01:16:32 +00001354
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001355 TerminatorInst *TI = BB->getTerminator();
1356
Chandler Carruth0539c072012-03-31 12:42:41 +00001357 // Add in the live successors by first checking whether we have terminator
1358 // that may be simplified based on the values simplified by this call.
1359 if (BranchInst *BI = dyn_cast<BranchInst>(TI)) {
1360 if (BI->isConditional()) {
1361 Value *Cond = BI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001362 if (ConstantInt *SimpleCond =
1363 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001364 BBWorklist.insert(BI->getSuccessor(SimpleCond->isZero() ? 1 : 0));
1365 continue;
Eric Christopher46308e62011-02-01 01:16:32 +00001366 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001367 }
1368 } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) {
1369 Value *Cond = SI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001370 if (ConstantInt *SimpleCond =
1371 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001372 BBWorklist.insert(SI->findCaseValue(SimpleCond).getCaseSuccessor());
1373 continue;
1374 }
1375 }
Eric Christopher46308e62011-02-01 01:16:32 +00001376
Chandler Carruth0539c072012-03-31 12:42:41 +00001377 // If we're unable to select a particular successor, just count all of
1378 // them.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001379 for (unsigned TIdx = 0, TSize = TI->getNumSuccessors(); TIdx != TSize;
1380 ++TIdx)
Chandler Carruth0539c072012-03-31 12:42:41 +00001381 BBWorklist.insert(TI->getSuccessor(TIdx));
1382
1383 // If we had any successors at this point, than post-inlining is likely to
1384 // have them as well. Note that we assume any basic blocks which existed
1385 // due to branches or switches which folded above will also fold after
1386 // inlining.
1387 if (SingleBB && TI->getNumSuccessors() > 1) {
1388 // Take off the bonus we applied to the threshold.
1389 Threshold -= SingleBBBonus;
1390 SingleBB = false;
Eric Christopher46308e62011-02-01 01:16:32 +00001391 }
1392 }
Andrew Trickcaa500b2011-10-01 01:27:56 +00001393
Chandler Carruthcb5beb32013-12-12 11:59:26 +00001394 // If this is a noduplicate call, we can still inline as long as
James Molloy4f6fb952012-12-20 16:04:27 +00001395 // inlining this would cause the removal of the caller (so the instruction
1396 // is not actually duplicated, just moved).
1397 if (!OnlyOneCallAndLocalLinkage && ContainsNoDuplicateCall)
1398 return false;
1399
Chandler Carrutha004f222015-05-27 02:49:05 +00001400 // We applied the maximum possible vector bonus at the beginning. Now,
1401 // subtract the excess bonus, if any, from the Threshold before
1402 // comparing against Cost.
1403 if (NumVectorInstructions <= NumInstructions / 10)
1404 Threshold -= FiftyPercentVectorBonus;
1405 else if (NumVectorInstructions <= NumInstructions / 2)
1406 Threshold -= (FiftyPercentVectorBonus - TenPercentVectorBonus);
Chandler Carruth0539c072012-03-31 12:42:41 +00001407
Hans Wennborg00ab73d2016-02-05 20:32:42 +00001408 return Cost < std::max(1, Threshold);
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001409}
1410
Manman Ren49d684e2012-09-12 05:06:18 +00001411#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
Chandler Carruth0539c072012-03-31 12:42:41 +00001412/// \brief Dump stats about this call's analysis.
Yaron Kereneb2a2542016-01-29 20:50:44 +00001413LLVM_DUMP_METHOD void CallAnalyzer::dump() {
Eric Christophera13839f2014-02-26 23:27:16 +00001414#define DEBUG_PRINT_STAT(x) dbgs() << " " #x ": " << x << "\n"
Chandler Carruth0539c072012-03-31 12:42:41 +00001415 DEBUG_PRINT_STAT(NumConstantArgs);
1416 DEBUG_PRINT_STAT(NumConstantOffsetPtrArgs);
1417 DEBUG_PRINT_STAT(NumAllocaArgs);
1418 DEBUG_PRINT_STAT(NumConstantPtrCmps);
1419 DEBUG_PRINT_STAT(NumConstantPtrDiffs);
1420 DEBUG_PRINT_STAT(NumInstructionsSimplified);
Chandler Carrutha004f222015-05-27 02:49:05 +00001421 DEBUG_PRINT_STAT(NumInstructions);
Chandler Carruth0539c072012-03-31 12:42:41 +00001422 DEBUG_PRINT_STAT(SROACostSavings);
1423 DEBUG_PRINT_STAT(SROACostSavingsLost);
James Molloy4f6fb952012-12-20 16:04:27 +00001424 DEBUG_PRINT_STAT(ContainsNoDuplicateCall);
Chandler Carruth394e34f2014-01-31 22:32:32 +00001425 DEBUG_PRINT_STAT(Cost);
1426 DEBUG_PRINT_STAT(Threshold);
Chandler Carruth0539c072012-03-31 12:42:41 +00001427#undef DEBUG_PRINT_STAT
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001428}
Manman Renc3366cc2012-09-06 19:55:56 +00001429#endif
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001430
Akira Hatanaka5af7ace2015-11-13 01:44:32 +00001431/// \brief Test that two functions either have or have not the given attribute
1432/// at the same time.
Chad Rosier567556a2016-04-28 14:47:23 +00001433template <typename AttrKind>
Akira Hatanaka5af7ace2015-11-13 01:44:32 +00001434static bool attributeMatches(Function *F1, Function *F2, AttrKind Attr) {
1435 return F1->getFnAttribute(Attr) == F2->getFnAttribute(Attr);
1436}
1437
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001438/// \brief Test that there are no attribute conflicts between Caller and Callee
1439/// that prevent inlining.
1440static bool functionsHaveCompatibleAttributes(Function *Caller,
Eric Christopher4371b132015-07-02 01:11:47 +00001441 Function *Callee,
1442 TargetTransformInfo &TTI) {
Eric Christopherd566fb12015-07-29 22:09:48 +00001443 return TTI.areInlineCompatible(Caller, Callee) &&
Akira Hatanaka1cb242e2015-12-22 23:57:37 +00001444 AttributeFuncs::areInlineCompatible(*Caller, *Callee);
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001445}
1446
Sean Silvaab6a6832016-07-23 04:22:50 +00001447InlineCost llvm::getInlineCost(
1448 CallSite CS, int DefaultThreshold, TargetTransformInfo &CalleeTTI,
1449 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
1450 ProfileSummaryInfo *PSI) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001451 return getInlineCost(CS, CS.getCalledFunction(), DefaultThreshold, CalleeTTI,
Sean Silvaab6a6832016-07-23 04:22:50 +00001452 GetAssumptionCache, PSI);
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001453}
1454
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001455int llvm::computeThresholdFromOptLevels(unsigned OptLevel,
1456 unsigned SizeOptLevel) {
1457 if (OptLevel > 2)
1458 return OptAggressiveThreshold;
1459 if (SizeOptLevel == 1) // -Os
1460 return OptSizeThreshold;
1461 if (SizeOptLevel == 2) // -Oz
1462 return OptMinSizeThreshold;
1463 return DefaultInlineThreshold;
1464}
1465
1466int llvm::getDefaultInlineThreshold() { return DefaultInlineThreshold; }
1467
Sean Silvaab6a6832016-07-23 04:22:50 +00001468InlineCost llvm::getInlineCost(
1469 CallSite CS, Function *Callee, int DefaultThreshold,
1470 TargetTransformInfo &CalleeTTI,
1471 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
1472 ProfileSummaryInfo *PSI) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001473
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001474 // Cannot inline indirect calls.
1475 if (!Callee)
1476 return llvm::InlineCost::getNever();
1477
1478 // Calls to functions with always-inline attributes should be inlined
1479 // whenever possible.
Peter Collingbourne68a88972014-05-19 18:25:54 +00001480 if (CS.hasFnAttr(Attribute::AlwaysInline)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001481 if (isInlineViable(*Callee))
1482 return llvm::InlineCost::getAlways();
1483 return llvm::InlineCost::getNever();
1484 }
1485
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001486 // Never inline functions with conflicting attributes (unless callee has
1487 // always-inline attribute).
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001488 if (!functionsHaveCompatibleAttributes(CS.getCaller(), Callee, CalleeTTI))
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001489 return llvm::InlineCost::getNever();
1490
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001491 // Don't inline this call if the caller has the optnone attribute.
1492 if (CS.getCaller()->hasFnAttribute(Attribute::OptimizeNone))
1493 return llvm::InlineCost::getNever();
1494
Sanjoy Das5ce32722016-04-08 00:48:30 +00001495 // Don't inline functions which can be interposed at link-time. Don't inline
1496 // functions marked noinline or call sites marked noinline.
1497 // Note: inlining non-exact non-interposable fucntions is fine, since we know
1498 // we have *a* correct implementation of the source level function.
Chad Rosier567556a2016-04-28 14:47:23 +00001499 if (Callee->isInterposable() || Callee->hasFnAttribute(Attribute::NoInline) ||
1500 CS.isNoInline())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001501 return llvm::InlineCost::getNever();
1502
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001503 DEBUG(llvm::dbgs() << " Analyzing call of " << Callee->getName()
Chad Rosier567556a2016-04-28 14:47:23 +00001504 << "...\n");
Andrew Trickcaa500b2011-10-01 01:27:56 +00001505
Sean Silvaab6a6832016-07-23 04:22:50 +00001506 CallAnalyzer CA(CalleeTTI, GetAssumptionCache, PSI, *Callee, DefaultThreshold, CS);
Chandler Carruth0539c072012-03-31 12:42:41 +00001507 bool ShouldInline = CA.analyzeCall(CS);
Dan Gohman4552e3c2009-10-13 18:30:07 +00001508
Chandler Carruth0539c072012-03-31 12:42:41 +00001509 DEBUG(CA.dump());
1510
1511 // Check if there was a reason to force inlining or no inlining.
1512 if (!ShouldInline && CA.getCost() < CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001513 return InlineCost::getNever();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001514 if (ShouldInline && CA.getCost() >= CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001515 return InlineCost::getAlways();
Andrew Trickcaa500b2011-10-01 01:27:56 +00001516
Chandler Carruth0539c072012-03-31 12:42:41 +00001517 return llvm::InlineCost::get(CA.getCost(), CA.getThreshold());
Dan Gohman4552e3c2009-10-13 18:30:07 +00001518}
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001519
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001520bool llvm::isInlineViable(Function &F) {
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001521 bool ReturnsTwice = F.hasFnAttribute(Attribute::ReturnsTwice);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001522 for (Function::iterator BI = F.begin(), BE = F.end(); BI != BE; ++BI) {
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001523 // Disallow inlining of functions which contain indirect branches or
1524 // blockaddresses.
1525 if (isa<IndirectBrInst>(BI->getTerminator()) || BI->hasAddressTaken())
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001526 return false;
1527
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001528 for (auto &II : *BI) {
1529 CallSite CS(&II);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001530 if (!CS)
1531 continue;
1532
1533 // Disallow recursive calls.
1534 if (&F == CS.getCalledFunction())
1535 return false;
1536
1537 // Disallow calls which expose returns-twice to a function not previously
1538 // attributed as such.
1539 if (!ReturnsTwice && CS.isCall() &&
1540 cast<CallInst>(CS.getInstruction())->canReturnTwice())
1541 return false;
Reid Kleckner223de262015-04-14 20:38:14 +00001542
Reid Kleckner60381792015-07-07 22:25:32 +00001543 // Disallow inlining functions that call @llvm.localescape. Doing this
Reid Kleckner223de262015-04-14 20:38:14 +00001544 // correctly would require major changes to the inliner.
1545 if (CS.getCalledFunction() &&
1546 CS.getCalledFunction()->getIntrinsicID() ==
Reid Kleckner60381792015-07-07 22:25:32 +00001547 llvm::Intrinsic::localescape)
Reid Kleckner223de262015-04-14 20:38:14 +00001548 return false;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001549 }
1550 }
1551
1552 return true;
1553}