blob: ee36c6e5486d35b1d6a68c692063667b202f8aa2 [file] [log] [blame]
Dan Gohman4552e3c2009-10-13 18:30:07 +00001//===- InlineCost.cpp - Cost analysis for inliner -------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements inline cost analysis.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Analysis/InlineCost.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000015#include "llvm/ADT/STLExtras.h"
16#include "llvm/ADT/SetVector.h"
17#include "llvm/ADT/SmallPtrSet.h"
18#include "llvm/ADT/SmallVector.h"
19#include "llvm/ADT/Statistic.h"
Chandler Carruth66b31302015-01-04 12:03:27 +000020#include "llvm/Analysis/AssumptionCache.h"
Hal Finkel57f03dd2014-09-07 13:49:57 +000021#include "llvm/Analysis/CodeMetrics.h"
Chandler Carruthd9903882015-01-14 11:23:27 +000022#include "llvm/Analysis/ConstantFolding.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000023#include "llvm/Analysis/InstructionSimplify.h"
Chandler Carruth42f3dce2013-01-21 11:55:09 +000024#include "llvm/Analysis/TargetTransformInfo.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000025#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000026#include "llvm/IR/CallingConv.h"
27#include "llvm/IR/DataLayout.h"
Chandler Carruth03eb0de2014-03-04 10:40:04 +000028#include "llvm/IR/GetElementPtrTypeIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000029#include "llvm/IR/GlobalAlias.h"
Chandler Carruth7da14f12014-03-06 03:23:41 +000030#include "llvm/IR/InstVisitor.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000031#include "llvm/IR/IntrinsicInst.h"
32#include "llvm/IR/Operator.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000033#include "llvm/Support/Debug.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000034#include "llvm/Support/raw_ostream.h"
Eric Christopher2dfbd7e2011-02-05 00:49:15 +000035
Dan Gohman4552e3c2009-10-13 18:30:07 +000036using namespace llvm;
37
Chandler Carruthf1221bd2014-04-22 02:48:03 +000038#define DEBUG_TYPE "inline-cost"
39
Chandler Carruth7ae90d42012-04-11 10:15:10 +000040STATISTIC(NumCallsAnalyzed, "Number of call sites analyzed");
41
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000042// Threshold to use when optsize is specified (and there is no
43// -inline-threshold).
44const int OptSizeThreshold = 75;
45
46// Threshold to use when -Oz is specified (and there is no -inline-threshold).
47const int OptMinSizeThreshold = 25;
48
49// Threshold to use when -O[34] is specified (and there is no
50// -inline-threshold).
51const int OptAggressiveThreshold = 275;
52
53static cl::opt<int> DefaultInlineThreshold(
54 "inline-threshold", cl::Hidden, cl::init(225), cl::ZeroOrMore,
55 cl::desc("Control the amount of inlining to perform (default = 225)"));
56
57static cl::opt<int> HintThreshold(
58 "inlinehint-threshold", cl::Hidden, cl::init(325),
59 cl::desc("Threshold for inlining functions with inline hint"));
60
61// We introduce this threshold to help performance of instrumentation based
62// PGO before we actually hook up inliner with analysis passes such as BPI and
63// BFI.
64static cl::opt<int> ColdThreshold(
65 "inlinecold-threshold", cl::Hidden, cl::init(225),
66 cl::desc("Threshold for inlining functions with cold attribute"));
67
Chandler Carruth0539c072012-03-31 12:42:41 +000068namespace {
Chandler Carrutha3089552012-03-14 07:32:53 +000069
Chandler Carruth0539c072012-03-31 12:42:41 +000070class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> {
71 typedef InstVisitor<CallAnalyzer, bool> Base;
72 friend class InstVisitor<CallAnalyzer, bool>;
Owen Andersona08318a2010-09-09 16:56:42 +000073
Chandler Carruth42f3dce2013-01-21 11:55:09 +000074 /// The TargetTransformInfo available for this compilation.
75 const TargetTransformInfo &TTI;
76
Hal Finkel57f03dd2014-09-07 13:49:57 +000077 /// The cache of @llvm.assume intrinsics.
Bjorn Steinbrink6f972a12015-02-12 21:04:22 +000078 AssumptionCacheTracker *ACT;
Hal Finkel57f03dd2014-09-07 13:49:57 +000079
Chandler Carruth0539c072012-03-31 12:42:41 +000080 // The called function.
81 Function &F;
Owen Andersona08318a2010-09-09 16:56:42 +000082
Philip Reames9b5c9582015-06-26 20:51:17 +000083 // The candidate callsite being analyzed. Please do not use this to do
84 // analysis in the caller function; we want the inline cost query to be
85 // easily cacheable. Instead, use the cover function paramHasAttr.
86 CallSite CandidateCS;
87
Chandler Carruth0539c072012-03-31 12:42:41 +000088 int Threshold;
89 int Cost;
Owen Andersona08318a2010-09-09 16:56:42 +000090
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +000091 bool IsCallerRecursive;
92 bool IsRecursiveCall;
Chandler Carruth0539c072012-03-31 12:42:41 +000093 bool ExposesReturnsTwice;
94 bool HasDynamicAlloca;
James Molloy4f6fb952012-12-20 16:04:27 +000095 bool ContainsNoDuplicateCall;
Chandler Carruth0814d2a2013-12-13 07:59:56 +000096 bool HasReturn;
97 bool HasIndirectBr;
Reid Kleckner223de262015-04-14 20:38:14 +000098 bool HasFrameEscape;
James Molloy4f6fb952012-12-20 16:04:27 +000099
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000100 /// Number of bytes allocated statically by the callee.
101 uint64_t AllocatedSize;
Chandler Carruth0539c072012-03-31 12:42:41 +0000102 unsigned NumInstructions, NumVectorInstructions;
103 int FiftyPercentVectorBonus, TenPercentVectorBonus;
104 int VectorBonus;
105
106 // While we walk the potentially-inlined instructions, we build up and
107 // maintain a mapping of simplified values specific to this callsite. The
108 // idea is to propagate any special information we have about arguments to
109 // this call through the inlinable section of the function, and account for
110 // likely simplifications post-inlining. The most important aspect we track
111 // is CFG altering simplifications -- when we prove a basic block dead, that
112 // can cause dramatic shifts in the cost of inlining a function.
113 DenseMap<Value *, Constant *> SimplifiedValues;
114
115 // Keep track of the values which map back (through function arguments) to
116 // allocas on the caller stack which could be simplified through SROA.
117 DenseMap<Value *, Value *> SROAArgValues;
118
119 // The mapping of caller Alloca values to their accumulated cost savings. If
120 // we have to disable SROA for one of the allocas, this tells us how much
121 // cost must be added.
122 DenseMap<Value *, int> SROAArgCosts;
123
124 // Keep track of values which map to a pointer base and constant offset.
125 DenseMap<Value *, std::pair<Value *, APInt> > ConstantOffsetPtrs;
126
127 // Custom simplification helper routines.
128 bool isAllocaDerivedArg(Value *V);
129 bool lookupSROAArgAndCost(Value *V, Value *&Arg,
130 DenseMap<Value *, int>::iterator &CostIt);
131 void disableSROA(DenseMap<Value *, int>::iterator CostIt);
132 void disableSROA(Value *V);
133 void accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
134 int InstructionCost);
Chandler Carruth0539c072012-03-31 12:42:41 +0000135 bool isGEPOffsetConstant(GetElementPtrInst &GEP);
136 bool accumulateGEPOffset(GEPOperator &GEP, APInt &Offset);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000137 bool simplifyCallSite(Function *F, CallSite CS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000138 ConstantInt *stripAndComputeInBoundsConstantOffsets(Value *&V);
139
Philip Reames9b5c9582015-06-26 20:51:17 +0000140 /// Return true if the given argument to the function being considered for
141 /// inlining has the given attribute set either at the call site or the
142 /// function declaration. Primarily used to inspect call site specific
143 /// attributes since these can be more precise than the ones on the callee
Easwaran Raman3676da42015-12-03 19:03:20 +0000144 /// itself.
Philip Reames9b5c9582015-06-26 20:51:17 +0000145 bool paramHasAttr(Argument *A, Attribute::AttrKind Attr);
146
147 /// Return true if the given value is known non null within the callee if
Easwaran Raman3676da42015-12-03 19:03:20 +0000148 /// inlined through this particular callsite.
Philip Reames9b5c9582015-06-26 20:51:17 +0000149 bool isKnownNonNullInCallee(Value *V);
150
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000151 /// Update Threshold based on callsite properties such as callee
152 /// attributes and callee hotness for PGO builds. The Callee is explicitly
153 /// passed to support analyzing indirect calls whose target is inferred by
154 /// analysis.
155 void updateThreshold(CallSite CS, Function &Callee);
156
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000157 /// Return true if size growth is allowed when inlining the callee at CS.
158 bool allowSizeGrowth(CallSite CS);
159
Chandler Carruth0539c072012-03-31 12:42:41 +0000160 // Custom analysis routines.
Hal Finkel57f03dd2014-09-07 13:49:57 +0000161 bool analyzeBlock(BasicBlock *BB, SmallPtrSetImpl<const Value *> &EphValues);
Chandler Carruth0539c072012-03-31 12:42:41 +0000162
163 // Disable several entry points to the visitor so we don't accidentally use
164 // them by declaring but not defining them here.
165 void visit(Module *); void visit(Module &);
166 void visit(Function *); void visit(Function &);
167 void visit(BasicBlock *); void visit(BasicBlock &);
168
169 // Provide base case for our instruction visit.
170 bool visitInstruction(Instruction &I);
171
172 // Our visit overrides.
173 bool visitAlloca(AllocaInst &I);
174 bool visitPHI(PHINode &I);
175 bool visitGetElementPtr(GetElementPtrInst &I);
176 bool visitBitCast(BitCastInst &I);
177 bool visitPtrToInt(PtrToIntInst &I);
178 bool visitIntToPtr(IntToPtrInst &I);
179 bool visitCastInst(CastInst &I);
180 bool visitUnaryInstruction(UnaryInstruction &I);
Matt Arsenault727aa342013-07-20 04:09:00 +0000181 bool visitCmpInst(CmpInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000182 bool visitSub(BinaryOperator &I);
183 bool visitBinaryOperator(BinaryOperator &I);
184 bool visitLoad(LoadInst &I);
185 bool visitStore(StoreInst &I);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000186 bool visitExtractValue(ExtractValueInst &I);
187 bool visitInsertValue(InsertValueInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000188 bool visitCallSite(CallSite CS);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000189 bool visitReturnInst(ReturnInst &RI);
190 bool visitBranchInst(BranchInst &BI);
191 bool visitSwitchInst(SwitchInst &SI);
192 bool visitIndirectBrInst(IndirectBrInst &IBI);
193 bool visitResumeInst(ResumeInst &RI);
David Majnemer654e1302015-07-31 17:58:14 +0000194 bool visitCleanupReturnInst(CleanupReturnInst &RI);
195 bool visitCatchReturnInst(CatchReturnInst &RI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000196 bool visitUnreachableInst(UnreachableInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000197
198public:
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000199 CallAnalyzer(const TargetTransformInfo &TTI, AssumptionCacheTracker *ACT,
Easwaran Ramanb1bd3982016-03-08 00:36:35 +0000200 Function &Callee, int Threshold, CallSite CSArg)
201 : TTI(TTI), ACT(ACT), F(Callee), CandidateCS(CSArg), Threshold(Threshold),
202 Cost(0), IsCallerRecursive(false), IsRecursiveCall(false),
203 ExposesReturnsTwice(false), HasDynamicAlloca(false),
204 ContainsNoDuplicateCall(false), HasReturn(false), HasIndirectBr(false),
205 HasFrameEscape(false), AllocatedSize(0), NumInstructions(0),
206 NumVectorInstructions(0), FiftyPercentVectorBonus(0),
207 TenPercentVectorBonus(0), VectorBonus(0), NumConstantArgs(0),
208 NumConstantOffsetPtrArgs(0), NumAllocaArgs(0), NumConstantPtrCmps(0),
209 NumConstantPtrDiffs(0), NumInstructionsSimplified(0),
210 SROACostSavings(0), SROACostSavingsLost(0) {}
Chandler Carruth0539c072012-03-31 12:42:41 +0000211
212 bool analyzeCall(CallSite CS);
213
214 int getThreshold() { return Threshold; }
215 int getCost() { return Cost; }
216
217 // Keep a bunch of stats about the cost savings found so we can print them
218 // out when debugging.
219 unsigned NumConstantArgs;
220 unsigned NumConstantOffsetPtrArgs;
221 unsigned NumAllocaArgs;
222 unsigned NumConstantPtrCmps;
223 unsigned NumConstantPtrDiffs;
224 unsigned NumInstructionsSimplified;
225 unsigned SROACostSavings;
226 unsigned SROACostSavingsLost;
227
228 void dump();
229};
230
231} // namespace
232
233/// \brief Test whether the given value is an Alloca-derived function argument.
234bool CallAnalyzer::isAllocaDerivedArg(Value *V) {
235 return SROAArgValues.count(V);
Owen Andersona08318a2010-09-09 16:56:42 +0000236}
237
Chandler Carruth0539c072012-03-31 12:42:41 +0000238/// \brief Lookup the SROA-candidate argument and cost iterator which V maps to.
239/// Returns false if V does not map to a SROA-candidate.
240bool CallAnalyzer::lookupSROAArgAndCost(
241 Value *V, Value *&Arg, DenseMap<Value *, int>::iterator &CostIt) {
242 if (SROAArgValues.empty() || SROAArgCosts.empty())
243 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000244
Chandler Carruth0539c072012-03-31 12:42:41 +0000245 DenseMap<Value *, Value *>::iterator ArgIt = SROAArgValues.find(V);
246 if (ArgIt == SROAArgValues.end())
247 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000248
Chandler Carruth0539c072012-03-31 12:42:41 +0000249 Arg = ArgIt->second;
250 CostIt = SROAArgCosts.find(Arg);
251 return CostIt != SROAArgCosts.end();
Chandler Carruth783b7192012-03-09 02:49:36 +0000252}
253
Chandler Carruth0539c072012-03-31 12:42:41 +0000254/// \brief Disable SROA for the candidate marked by this cost iterator.
Chandler Carruth783b7192012-03-09 02:49:36 +0000255///
Benjamin Kramerbde91762012-06-02 10:20:22 +0000256/// This marks the candidate as no longer viable for SROA, and adds the cost
Chandler Carruth0539c072012-03-31 12:42:41 +0000257/// savings associated with it back into the inline cost measurement.
258void CallAnalyzer::disableSROA(DenseMap<Value *, int>::iterator CostIt) {
259 // If we're no longer able to perform SROA we need to undo its cost savings
260 // and prevent subsequent analysis.
261 Cost += CostIt->second;
262 SROACostSavings -= CostIt->second;
263 SROACostSavingsLost += CostIt->second;
264 SROAArgCosts.erase(CostIt);
265}
266
267/// \brief If 'V' maps to a SROA candidate, disable SROA for it.
268void CallAnalyzer::disableSROA(Value *V) {
269 Value *SROAArg;
270 DenseMap<Value *, int>::iterator CostIt;
271 if (lookupSROAArgAndCost(V, SROAArg, CostIt))
272 disableSROA(CostIt);
273}
274
275/// \brief Accumulate the given cost for a particular SROA candidate.
276void CallAnalyzer::accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
277 int InstructionCost) {
278 CostIt->second += InstructionCost;
279 SROACostSavings += InstructionCost;
280}
281
Chandler Carruth0539c072012-03-31 12:42:41 +0000282/// \brief Check whether a GEP's indices are all constant.
283///
284/// Respects any simplified values known during the analysis of this callsite.
285bool CallAnalyzer::isGEPOffsetConstant(GetElementPtrInst &GEP) {
286 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
287 if (!isa<Constant>(*I) && !SimplifiedValues.lookup(*I))
Chandler Carruth783b7192012-03-09 02:49:36 +0000288 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000289
Chandler Carruth0539c072012-03-31 12:42:41 +0000290 return true;
291}
292
293/// \brief Accumulate a constant GEP offset into an APInt if possible.
294///
295/// Returns false if unable to compute the offset for any reason. Respects any
296/// simplified values known during the analysis of this callsite.
297bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000298 const DataLayout &DL = F.getParent()->getDataLayout();
299 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +0000300 assert(IntPtrWidth == Offset.getBitWidth());
301
302 for (gep_type_iterator GTI = gep_type_begin(GEP), GTE = gep_type_end(GEP);
303 GTI != GTE; ++GTI) {
304 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand());
305 if (!OpC)
306 if (Constant *SimpleOp = SimplifiedValues.lookup(GTI.getOperand()))
307 OpC = dyn_cast<ConstantInt>(SimpleOp);
308 if (!OpC)
Chandler Carruth783b7192012-03-09 02:49:36 +0000309 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000310 if (OpC->isZero()) continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000311
Chandler Carruth0539c072012-03-31 12:42:41 +0000312 // Handle a struct index, which adds its field offset to the pointer.
313 if (StructType *STy = dyn_cast<StructType>(*GTI)) {
314 unsigned ElementIdx = OpC->getZExtValue();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000315 const StructLayout *SL = DL.getStructLayout(STy);
Chandler Carruth0539c072012-03-31 12:42:41 +0000316 Offset += APInt(IntPtrWidth, SL->getElementOffset(ElementIdx));
317 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000318 }
Chandler Carruth783b7192012-03-09 02:49:36 +0000319
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000320 APInt TypeSize(IntPtrWidth, DL.getTypeAllocSize(GTI.getIndexedType()));
Chandler Carruth0539c072012-03-31 12:42:41 +0000321 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize;
322 }
323 return true;
324}
325
326bool CallAnalyzer::visitAlloca(AllocaInst &I) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000327 // Check whether inlining will turn a dynamic alloca into a static
Chandler Carruth0539c072012-03-31 12:42:41 +0000328 // alloca, and handle that case.
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000329 if (I.isArrayAllocation()) {
330 if (Constant *Size = SimplifiedValues.lookup(I.getArraySize())) {
331 ConstantInt *AllocSize = dyn_cast<ConstantInt>(Size);
332 assert(AllocSize && "Allocation size not a constant int?");
333 Type *Ty = I.getAllocatedType();
334 AllocatedSize += Ty->getPrimitiveSizeInBits() * AllocSize->getZExtValue();
335 return Base::visitAlloca(I);
336 }
337 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000338
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000339 // Accumulate the allocated size.
340 if (I.isStaticAlloca()) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000341 const DataLayout &DL = F.getParent()->getDataLayout();
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000342 Type *Ty = I.getAllocatedType();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000343 AllocatedSize += DL.getTypeAllocSize(Ty);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000344 }
345
Bob Wilsona5b0dc82012-11-19 07:04:35 +0000346 // We will happily inline static alloca instructions.
347 if (I.isStaticAlloca())
Chandler Carruth0539c072012-03-31 12:42:41 +0000348 return Base::visitAlloca(I);
349
350 // FIXME: This is overly conservative. Dynamic allocas are inefficient for
351 // a variety of reasons, and so we would like to not inline them into
352 // functions which don't currently have a dynamic alloca. This simply
353 // disables inlining altogether in the presence of a dynamic alloca.
354 HasDynamicAlloca = true;
355 return false;
356}
357
358bool CallAnalyzer::visitPHI(PHINode &I) {
359 // FIXME: We should potentially be tracking values through phi nodes,
360 // especially when they collapse to a single value due to deleted CFG edges
361 // during inlining.
362
363 // FIXME: We need to propagate SROA *disabling* through phi nodes, even
364 // though we don't want to propagate it's bonuses. The idea is to disable
365 // SROA if it *might* be used in an inappropriate manner.
366
367 // Phi nodes are always zero-cost.
368 return true;
369}
370
371bool CallAnalyzer::visitGetElementPtr(GetElementPtrInst &I) {
372 Value *SROAArg;
373 DenseMap<Value *, int>::iterator CostIt;
374 bool SROACandidate = lookupSROAArgAndCost(I.getPointerOperand(),
375 SROAArg, CostIt);
376
377 // Try to fold GEPs of constant-offset call site argument pointers. This
378 // requires target data and inbounds GEPs.
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000379 if (I.isInBounds()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000380 // Check if we have a base + offset for the pointer.
381 Value *Ptr = I.getPointerOperand();
382 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Ptr);
383 if (BaseAndOffset.first) {
384 // Check if the offset of this GEP is constant, and if so accumulate it
385 // into Offset.
386 if (!accumulateGEPOffset(cast<GEPOperator>(I), BaseAndOffset.second)) {
387 // Non-constant GEPs aren't folded, and disable SROA.
388 if (SROACandidate)
389 disableSROA(CostIt);
390 return false;
391 }
392
393 // Add the result as a new mapping to Base + Offset.
394 ConstantOffsetPtrs[&I] = BaseAndOffset;
395
396 // Also handle SROA candidates here, we already know that the GEP is
397 // all-constant indexed.
398 if (SROACandidate)
399 SROAArgValues[&I] = SROAArg;
400
Chandler Carruth783b7192012-03-09 02:49:36 +0000401 return true;
402 }
403 }
404
Chandler Carruth0539c072012-03-31 12:42:41 +0000405 if (isGEPOffsetConstant(I)) {
406 if (SROACandidate)
407 SROAArgValues[&I] = SROAArg;
408
409 // Constant GEPs are modeled as free.
410 return true;
411 }
412
413 // Variable GEPs will require math and will disable SROA.
414 if (SROACandidate)
415 disableSROA(CostIt);
Chandler Carruth783b7192012-03-09 02:49:36 +0000416 return false;
417}
418
Chandler Carruth0539c072012-03-31 12:42:41 +0000419bool CallAnalyzer::visitBitCast(BitCastInst &I) {
420 // Propagate constants through bitcasts.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000421 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
422 if (!COp)
423 COp = SimplifiedValues.lookup(I.getOperand(0));
424 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000425 if (Constant *C = ConstantExpr::getBitCast(COp, I.getType())) {
426 SimplifiedValues[&I] = C;
427 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000428 }
Owen Andersona08318a2010-09-09 16:56:42 +0000429
Chandler Carruth0539c072012-03-31 12:42:41 +0000430 // Track base/offsets through casts
431 std::pair<Value *, APInt> BaseAndOffset
432 = ConstantOffsetPtrs.lookup(I.getOperand(0));
433 // Casts don't change the offset, just wrap it up.
434 if (BaseAndOffset.first)
435 ConstantOffsetPtrs[&I] = BaseAndOffset;
436
437 // Also look for SROA candidates here.
438 Value *SROAArg;
439 DenseMap<Value *, int>::iterator CostIt;
440 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
441 SROAArgValues[&I] = SROAArg;
442
443 // Bitcasts are always zero cost.
444 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000445}
446
Chandler Carruth0539c072012-03-31 12:42:41 +0000447bool CallAnalyzer::visitPtrToInt(PtrToIntInst &I) {
448 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000449 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
450 if (!COp)
451 COp = SimplifiedValues.lookup(I.getOperand(0));
452 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000453 if (Constant *C = ConstantExpr::getPtrToInt(COp, I.getType())) {
454 SimplifiedValues[&I] = C;
455 return true;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000456 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000457
458 // Track base/offset pairs when converted to a plain integer provided the
459 // integer is large enough to represent the pointer.
460 unsigned IntegerSize = I.getType()->getScalarSizeInBits();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000461 const DataLayout &DL = F.getParent()->getDataLayout();
Mehdi Amini46a43552015-03-04 18:43:29 +0000462 if (IntegerSize >= DL.getPointerSizeInBits()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000463 std::pair<Value *, APInt> BaseAndOffset
464 = ConstantOffsetPtrs.lookup(I.getOperand(0));
465 if (BaseAndOffset.first)
466 ConstantOffsetPtrs[&I] = BaseAndOffset;
467 }
468
469 // This is really weird. Technically, ptrtoint will disable SROA. However,
470 // unless that ptrtoint is *used* somewhere in the live basic blocks after
471 // inlining, it will be nuked, and SROA should proceed. All of the uses which
472 // would block SROA would also block SROA if applied directly to a pointer,
473 // and so we can just add the integer in here. The only places where SROA is
474 // preserved either cannot fire on an integer, or won't in-and-of themselves
475 // disable SROA (ext) w/o some later use that we would see and disable.
476 Value *SROAArg;
477 DenseMap<Value *, int>::iterator CostIt;
478 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
479 SROAArgValues[&I] = SROAArg;
480
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000481 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000482}
483
Chandler Carruth0539c072012-03-31 12:42:41 +0000484bool CallAnalyzer::visitIntToPtr(IntToPtrInst &I) {
485 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000486 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
487 if (!COp)
488 COp = SimplifiedValues.lookup(I.getOperand(0));
489 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000490 if (Constant *C = ConstantExpr::getIntToPtr(COp, I.getType())) {
491 SimplifiedValues[&I] = C;
492 return true;
493 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000494
Chandler Carruth0539c072012-03-31 12:42:41 +0000495 // Track base/offset pairs when round-tripped through a pointer without
496 // modifications provided the integer is not too large.
497 Value *Op = I.getOperand(0);
498 unsigned IntegerSize = Op->getType()->getScalarSizeInBits();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000499 const DataLayout &DL = F.getParent()->getDataLayout();
Mehdi Amini46a43552015-03-04 18:43:29 +0000500 if (IntegerSize <= DL.getPointerSizeInBits()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000501 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op);
502 if (BaseAndOffset.first)
503 ConstantOffsetPtrs[&I] = BaseAndOffset;
504 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000505
Chandler Carruth0539c072012-03-31 12:42:41 +0000506 // "Propagate" SROA here in the same manner as we do for ptrtoint above.
507 Value *SROAArg;
508 DenseMap<Value *, int>::iterator CostIt;
509 if (lookupSROAArgAndCost(Op, SROAArg, CostIt))
510 SROAArgValues[&I] = SROAArg;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000511
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000512 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000513}
514
515bool CallAnalyzer::visitCastInst(CastInst &I) {
516 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000517 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
518 if (!COp)
519 COp = SimplifiedValues.lookup(I.getOperand(0));
520 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000521 if (Constant *C = ConstantExpr::getCast(I.getOpcode(), COp, I.getType())) {
522 SimplifiedValues[&I] = C;
523 return true;
524 }
525
526 // Disable SROA in the face of arbitrary casts we don't whitelist elsewhere.
527 disableSROA(I.getOperand(0));
528
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000529 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000530}
531
532bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) {
533 Value *Operand = I.getOperand(0);
Jakub Staszak7b9e0b92013-03-07 20:01:19 +0000534 Constant *COp = dyn_cast<Constant>(Operand);
535 if (!COp)
536 COp = SimplifiedValues.lookup(Operand);
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000537 if (COp) {
538 const DataLayout &DL = F.getParent()->getDataLayout();
Manuel Jacobe9024592016-01-21 06:33:22 +0000539 if (Constant *C = ConstantFoldInstOperands(&I, COp, DL)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000540 SimplifiedValues[&I] = C;
541 return true;
542 }
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000543 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000544
545 // Disable any SROA on the argument to arbitrary unary operators.
546 disableSROA(Operand);
547
548 return false;
549}
550
Philip Reames9b5c9582015-06-26 20:51:17 +0000551bool CallAnalyzer::paramHasAttr(Argument *A, Attribute::AttrKind Attr) {
552 unsigned ArgNo = A->getArgNo();
553 return CandidateCS.paramHasAttr(ArgNo+1, Attr);
554}
555
556bool CallAnalyzer::isKnownNonNullInCallee(Value *V) {
557 // Does the *call site* have the NonNull attribute set on an argument? We
558 // use the attribute on the call site to memoize any analysis done in the
559 // caller. This will also trip if the callee function has a non-null
560 // parameter attribute, but that's a less interesting case because hopefully
561 // the callee would already have been simplified based on that.
562 if (Argument *A = dyn_cast<Argument>(V))
563 if (paramHasAttr(A, Attribute::NonNull))
564 return true;
565
566 // Is this an alloca in the caller? This is distinct from the attribute case
567 // above because attributes aren't updated within the inliner itself and we
568 // always want to catch the alloca derived case.
569 if (isAllocaDerivedArg(V))
570 // We can actually predict the result of comparisons between an
571 // alloca-derived value and null. Note that this fires regardless of
572 // SROA firing.
573 return true;
574
575 return false;
576}
577
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000578bool CallAnalyzer::allowSizeGrowth(CallSite CS) {
579 // If the normal destination of the invoke or the parent block of the call
580 // site is unreachable-terminated, there is little point in inlining this
581 // unless there is literally zero cost.
582 // FIXME: Note that it is possible that an unreachable-terminated block has a
583 // hot entry. For example, in below scenario inlining hot_call_X() may be
584 // beneficial :
585 // main() {
586 // hot_call_1();
587 // ...
588 // hot_call_N()
589 // exit(0);
590 // }
591 // For now, we are not handling this corner case here as it is rare in real
592 // code. In future, we should elaborate this based on BPI and BFI in more
593 // general threshold adjusting heuristics in updateThreshold().
594 Instruction *Instr = CS.getInstruction();
595 if (InvokeInst *II = dyn_cast<InvokeInst>(Instr)) {
596 if (isa<UnreachableInst>(II->getNormalDest()->getTerminator()))
597 return false;
598 } else if (isa<UnreachableInst>(Instr->getParent()->getTerminator()))
599 return false;
600
601 return true;
602}
603
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000604void CallAnalyzer::updateThreshold(CallSite CS, Function &Callee) {
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000605 // If no size growth is allowed for this inlining, set Threshold to 0.
606 if (!allowSizeGrowth(CS)) {
607 Threshold = 0;
608 return;
609 }
610
Easwaran Raman30a93c12016-01-28 23:44:41 +0000611 // If -inline-threshold is not given, listen to the optsize and minsize
612 // attributes when they would decrease the threshold.
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000613 Function *Caller = CS.getCaller();
614
Easwaran Raman30a93c12016-01-28 23:44:41 +0000615 if (!(DefaultInlineThreshold.getNumOccurrences() > 0)) {
616 if (Caller->optForMinSize() && OptMinSizeThreshold < Threshold)
617 Threshold = OptMinSizeThreshold;
618 else if (Caller->optForSize() && OptSizeThreshold < Threshold)
619 Threshold = OptSizeThreshold;
620 }
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000621
622 // If profile information is available, use that to adjust threshold of hot
623 // and cold functions.
624 // FIXME: The heuristic used below for determining hotness and coldness are
625 // based on preliminary SPEC tuning and may not be optimal. Replace this with
626 // a well-tuned heuristic based on *callsite* hotness and not callee hotness.
627 uint64_t FunctionCount = 0, MaxFunctionCount = 0;
628 bool HasPGOCounts = false;
Eric Liud09f15e2016-04-18 15:31:11 +0000629 if (Callee.getEntryCount() && Callee.getParent()->getMaximumFunctionCount()) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000630 HasPGOCounts = true;
631 FunctionCount = Callee.getEntryCount().getValue();
Eric Liud09f15e2016-04-18 15:31:11 +0000632 MaxFunctionCount = Callee.getParent()->getMaximumFunctionCount().getValue();
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000633 }
634
635 // Listen to the inlinehint attribute or profile based hotness information
636 // when it would increase the threshold and the caller does not need to
637 // minimize its size.
638 bool InlineHint =
639 Callee.hasFnAttribute(Attribute::InlineHint) ||
640 (HasPGOCounts &&
641 FunctionCount >= (uint64_t)(0.3 * (double)MaxFunctionCount));
642 if (InlineHint && HintThreshold > Threshold && !Caller->optForMinSize())
643 Threshold = HintThreshold;
644
645 // Listen to the cold attribute or profile based coldness information
646 // when it would decrease the threshold.
647 bool ColdCallee =
648 Callee.hasFnAttribute(Attribute::Cold) ||
649 (HasPGOCounts &&
650 FunctionCount <= (uint64_t)(0.01 * (double)MaxFunctionCount));
651 // Command line argument for DefaultInlineThreshold will override the default
652 // ColdThreshold. If we have -inline-threshold but no -inlinecold-threshold,
653 // do not use the default cold threshold even if it is smaller.
654 if ((DefaultInlineThreshold.getNumOccurrences() == 0 ||
655 ColdThreshold.getNumOccurrences() > 0) &&
656 ColdCallee && ColdThreshold < Threshold)
657 Threshold = ColdThreshold;
Justin Lebar8650a4d2016-04-15 01:38:48 +0000658
659 // Finally, take the target-specific inlining threshold multiplier into
660 // account.
661 Threshold *= TTI.getInliningThresholdMultiplier();
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000662}
663
Matt Arsenault727aa342013-07-20 04:09:00 +0000664bool CallAnalyzer::visitCmpInst(CmpInst &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000665 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
666 // First try to handle simplified comparisons.
667 if (!isa<Constant>(LHS))
668 if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS))
669 LHS = SimpleLHS;
670 if (!isa<Constant>(RHS))
671 if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS))
672 RHS = SimpleRHS;
Matt Arsenault727aa342013-07-20 04:09:00 +0000673 if (Constant *CLHS = dyn_cast<Constant>(LHS)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000674 if (Constant *CRHS = dyn_cast<Constant>(RHS))
Matt Arsenault727aa342013-07-20 04:09:00 +0000675 if (Constant *C = ConstantExpr::getCompare(I.getPredicate(), CLHS, CRHS)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000676 SimplifiedValues[&I] = C;
677 return true;
678 }
Matt Arsenault727aa342013-07-20 04:09:00 +0000679 }
680
681 if (I.getOpcode() == Instruction::FCmp)
682 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000683
684 // Otherwise look for a comparison between constant offset pointers with
685 // a common base.
686 Value *LHSBase, *RHSBase;
687 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000688 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000689 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000690 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000691 if (RHSBase && LHSBase == RHSBase) {
692 // We have common bases, fold the icmp to a constant based on the
693 // offsets.
694 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
695 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
696 if (Constant *C = ConstantExpr::getICmp(I.getPredicate(), CLHS, CRHS)) {
697 SimplifiedValues[&I] = C;
698 ++NumConstantPtrCmps;
699 return true;
700 }
701 }
702 }
703
704 // If the comparison is an equality comparison with null, we can simplify it
Philip Reames9b5c9582015-06-26 20:51:17 +0000705 // if we know the value (argument) can't be null
706 if (I.isEquality() && isa<ConstantPointerNull>(I.getOperand(1)) &&
707 isKnownNonNullInCallee(I.getOperand(0))) {
708 bool IsNotEqual = I.getPredicate() == CmpInst::ICMP_NE;
709 SimplifiedValues[&I] = IsNotEqual ? ConstantInt::getTrue(I.getType())
710 : ConstantInt::getFalse(I.getType());
711 return true;
712 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000713 // Finally check for SROA candidates in comparisons.
714 Value *SROAArg;
715 DenseMap<Value *, int>::iterator CostIt;
716 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) {
717 if (isa<ConstantPointerNull>(I.getOperand(1))) {
718 accumulateSROACost(CostIt, InlineConstants::InstrCost);
719 return true;
720 }
721
722 disableSROA(CostIt);
723 }
724
725 return false;
726}
727
728bool CallAnalyzer::visitSub(BinaryOperator &I) {
729 // Try to handle a special case: we can fold computing the difference of two
730 // constant-related pointers.
731 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
732 Value *LHSBase, *RHSBase;
733 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000734 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000735 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000736 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000737 if (RHSBase && LHSBase == RHSBase) {
738 // We have common bases, fold the subtract to a constant based on the
739 // offsets.
740 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
741 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
742 if (Constant *C = ConstantExpr::getSub(CLHS, CRHS)) {
743 SimplifiedValues[&I] = C;
744 ++NumConstantPtrDiffs;
745 return true;
746 }
747 }
748 }
749
750 // Otherwise, fall back to the generic logic for simplifying and handling
751 // instructions.
752 return Base::visitSub(I);
753}
754
755bool CallAnalyzer::visitBinaryOperator(BinaryOperator &I) {
756 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000757 const DataLayout &DL = F.getParent()->getDataLayout();
Chandler Carruth0539c072012-03-31 12:42:41 +0000758 if (!isa<Constant>(LHS))
759 if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS))
760 LHS = SimpleLHS;
761 if (!isa<Constant>(RHS))
762 if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS))
763 RHS = SimpleRHS;
Michael Zolotukhin4e8598e2015-02-06 20:02:51 +0000764 Value *SimpleV = nullptr;
765 if (auto FI = dyn_cast<FPMathOperator>(&I))
766 SimpleV =
767 SimplifyFPBinOp(I.getOpcode(), LHS, RHS, FI->getFastMathFlags(), DL);
768 else
769 SimpleV = SimplifyBinOp(I.getOpcode(), LHS, RHS, DL);
770
Chandler Carruth0539c072012-03-31 12:42:41 +0000771 if (Constant *C = dyn_cast_or_null<Constant>(SimpleV)) {
772 SimplifiedValues[&I] = C;
773 return true;
774 }
775
776 // Disable any SROA on arguments to arbitrary, unsimplified binary operators.
777 disableSROA(LHS);
778 disableSROA(RHS);
779
780 return false;
781}
782
783bool CallAnalyzer::visitLoad(LoadInst &I) {
784 Value *SROAArg;
785 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000786 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000787 if (I.isSimple()) {
788 accumulateSROACost(CostIt, InlineConstants::InstrCost);
789 return true;
790 }
791
792 disableSROA(CostIt);
793 }
794
795 return false;
796}
797
798bool CallAnalyzer::visitStore(StoreInst &I) {
799 Value *SROAArg;
800 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000801 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000802 if (I.isSimple()) {
803 accumulateSROACost(CostIt, InlineConstants::InstrCost);
804 return true;
805 }
806
807 disableSROA(CostIt);
808 }
809
810 return false;
811}
812
Chandler Carruth753e21d2012-12-28 14:23:32 +0000813bool CallAnalyzer::visitExtractValue(ExtractValueInst &I) {
814 // Constant folding for extract value is trivial.
815 Constant *C = dyn_cast<Constant>(I.getAggregateOperand());
816 if (!C)
817 C = SimplifiedValues.lookup(I.getAggregateOperand());
818 if (C) {
819 SimplifiedValues[&I] = ConstantExpr::getExtractValue(C, I.getIndices());
820 return true;
821 }
822
823 // SROA can look through these but give them a cost.
824 return false;
825}
826
827bool CallAnalyzer::visitInsertValue(InsertValueInst &I) {
828 // Constant folding for insert value is trivial.
829 Constant *AggC = dyn_cast<Constant>(I.getAggregateOperand());
830 if (!AggC)
831 AggC = SimplifiedValues.lookup(I.getAggregateOperand());
832 Constant *InsertedC = dyn_cast<Constant>(I.getInsertedValueOperand());
833 if (!InsertedC)
834 InsertedC = SimplifiedValues.lookup(I.getInsertedValueOperand());
835 if (AggC && InsertedC) {
836 SimplifiedValues[&I] = ConstantExpr::getInsertValue(AggC, InsertedC,
837 I.getIndices());
838 return true;
839 }
840
841 // SROA can look through these but give them a cost.
842 return false;
843}
844
845/// \brief Try to simplify a call site.
846///
847/// Takes a concrete function and callsite and tries to actually simplify it by
848/// analyzing the arguments and call itself with instsimplify. Returns true if
849/// it has simplified the callsite to some other entity (a constant), making it
850/// free.
851bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
852 // FIXME: Using the instsimplify logic directly for this is inefficient
853 // because we have to continually rebuild the argument list even when no
854 // simplifications can be performed. Until that is fixed with remapping
855 // inside of instsimplify, directly constant fold calls here.
856 if (!canConstantFoldCallTo(F))
857 return false;
858
859 // Try to re-map the arguments to constants.
860 SmallVector<Constant *, 4> ConstantArgs;
861 ConstantArgs.reserve(CS.arg_size());
862 for (CallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end();
863 I != E; ++I) {
864 Constant *C = dyn_cast<Constant>(*I);
865 if (!C)
866 C = dyn_cast_or_null<Constant>(SimplifiedValues.lookup(*I));
867 if (!C)
868 return false; // This argument doesn't map to a constant.
869
870 ConstantArgs.push_back(C);
871 }
872 if (Constant *C = ConstantFoldCall(F, ConstantArgs)) {
873 SimplifiedValues[CS.getInstruction()] = C;
874 return true;
875 }
876
877 return false;
878}
879
Chandler Carruth0539c072012-03-31 12:42:41 +0000880bool CallAnalyzer::visitCallSite(CallSite CS) {
Chandler Carruth37d25de2013-12-13 08:00:01 +0000881 if (CS.hasFnAttr(Attribute::ReturnsTwice) &&
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +0000882 !F.hasFnAttribute(Attribute::ReturnsTwice)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000883 // This aborts the entire analysis.
884 ExposesReturnsTwice = true;
885 return false;
886 }
James Molloy4f6fb952012-12-20 16:04:27 +0000887 if (CS.isCall() &&
Eli Bendersky576ef3c2014-03-17 16:19:07 +0000888 cast<CallInst>(CS.getInstruction())->cannotDuplicate())
James Molloy4f6fb952012-12-20 16:04:27 +0000889 ContainsNoDuplicateCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000890
Chandler Carruth0539c072012-03-31 12:42:41 +0000891 if (Function *F = CS.getCalledFunction()) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000892 // When we have a concrete function, first try to simplify it directly.
893 if (simplifyCallSite(F, CS))
894 return true;
895
896 // Next check if it is an intrinsic we know about.
897 // FIXME: Lift this into part of the InstVisitor.
898 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
899 switch (II->getIntrinsicID()) {
900 default:
901 return Base::visitCallSite(CS);
902
Peter Collingbourne7dd8dbf2016-04-22 21:18:02 +0000903 case Intrinsic::load_relative:
904 // This is normally lowered to 4 LLVM instructions.
905 Cost += 3 * InlineConstants::InstrCost;
906 return false;
907
Chandler Carruth753e21d2012-12-28 14:23:32 +0000908 case Intrinsic::memset:
909 case Intrinsic::memcpy:
910 case Intrinsic::memmove:
911 // SROA can usually chew through these intrinsics, but they aren't free.
912 return false;
Reid Kleckner60381792015-07-07 22:25:32 +0000913 case Intrinsic::localescape:
Reid Kleckner223de262015-04-14 20:38:14 +0000914 HasFrameEscape = true;
915 return false;
Chandler Carruth753e21d2012-12-28 14:23:32 +0000916 }
917 }
918
Chandler Carruth0539c072012-03-31 12:42:41 +0000919 if (F == CS.getInstruction()->getParent()->getParent()) {
920 // This flag will fully abort the analysis, so don't bother with anything
921 // else.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000922 IsRecursiveCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000923 return false;
924 }
925
Chandler Carruth0ba8db42013-01-22 11:26:02 +0000926 if (TTI.isLoweredToCall(F)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000927 // We account for the average 1 instruction per call argument setup
928 // here.
929 Cost += CS.arg_size() * InlineConstants::InstrCost;
930
931 // Everything other than inline ASM will also have a significant cost
932 // merely from making the call.
933 if (!isa<InlineAsm>(CS.getCalledValue()))
934 Cost += InlineConstants::CallPenalty;
935 }
936
937 return Base::visitCallSite(CS);
938 }
939
940 // Otherwise we're in a very special case -- an indirect function call. See
941 // if we can be particularly clever about this.
942 Value *Callee = CS.getCalledValue();
943
944 // First, pay the price of the argument setup. We account for the average
945 // 1 instruction per call argument setup here.
946 Cost += CS.arg_size() * InlineConstants::InstrCost;
947
948 // Next, check if this happens to be an indirect function call to a known
949 // function in this inline context. If not, we've done all we can.
950 Function *F = dyn_cast_or_null<Function>(SimplifiedValues.lookup(Callee));
951 if (!F)
952 return Base::visitCallSite(CS);
953
954 // If we have a constant that we are calling as a function, we can peer
955 // through it and see the function target. This happens not infrequently
956 // during devirtualization and so we want to give it a hefty bonus for
957 // inlining, but cap that bonus in the event that inlining wouldn't pan
958 // out. Pretend to inline the function, with a custom threshold.
Easwaran Ramanb1bd3982016-03-08 00:36:35 +0000959 CallAnalyzer CA(TTI, ACT, *F, InlineConstants::IndirectCallThreshold, CS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000960 if (CA.analyzeCall(CS)) {
961 // We were able to inline the indirect call! Subtract the cost from the
Easwaran Raman6d90d9f2015-12-07 21:21:20 +0000962 // threshold to get the bonus we want to apply, but don't go below zero.
963 Cost -= std::max(0, CA.getThreshold() - CA.getCost());
Chandler Carruth0539c072012-03-31 12:42:41 +0000964 }
965
966 return Base::visitCallSite(CS);
967}
968
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000969bool CallAnalyzer::visitReturnInst(ReturnInst &RI) {
970 // At least one return instruction will be free after inlining.
971 bool Free = !HasReturn;
972 HasReturn = true;
973 return Free;
974}
975
976bool CallAnalyzer::visitBranchInst(BranchInst &BI) {
977 // We model unconditional branches as essentially free -- they really
978 // shouldn't exist at all, but handling them makes the behavior of the
979 // inliner more regular and predictable. Interestingly, conditional branches
980 // which will fold away are also free.
981 return BI.isUnconditional() || isa<ConstantInt>(BI.getCondition()) ||
982 dyn_cast_or_null<ConstantInt>(
983 SimplifiedValues.lookup(BI.getCondition()));
984}
985
986bool CallAnalyzer::visitSwitchInst(SwitchInst &SI) {
987 // We model unconditional switches as free, see the comments on handling
988 // branches.
Chandler Carruthe01fd5f2014-04-28 08:52:44 +0000989 if (isa<ConstantInt>(SI.getCondition()))
990 return true;
991 if (Value *V = SimplifiedValues.lookup(SI.getCondition()))
992 if (isa<ConstantInt>(V))
993 return true;
994
995 // Otherwise, we need to accumulate a cost proportional to the number of
996 // distinct successor blocks. This fan-out in the CFG cannot be represented
997 // for free even if we can represent the core switch as a jumptable that
998 // takes a single instruction.
999 //
1000 // NB: We convert large switches which are just used to initialize large phi
1001 // nodes to lookup tables instead in simplify-cfg, so this shouldn't prevent
1002 // inlining those. It will prevent inlining in cases where the optimization
1003 // does not (yet) fire.
1004 SmallPtrSet<BasicBlock *, 8> SuccessorBlocks;
1005 SuccessorBlocks.insert(SI.getDefaultDest());
1006 for (auto I = SI.case_begin(), E = SI.case_end(); I != E; ++I)
1007 SuccessorBlocks.insert(I.getCaseSuccessor());
1008 // Add cost corresponding to the number of distinct destinations. The first
1009 // we model as free because of fallthrough.
1010 Cost += (SuccessorBlocks.size() - 1) * InlineConstants::InstrCost;
1011 return false;
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001012}
1013
1014bool CallAnalyzer::visitIndirectBrInst(IndirectBrInst &IBI) {
1015 // We never want to inline functions that contain an indirectbr. This is
1016 // incorrect because all the blockaddress's (in static global initializers
1017 // for example) would be referring to the original function, and this
1018 // indirect jump would jump from the inlined copy of the function into the
1019 // original function which is extremely undefined behavior.
1020 // FIXME: This logic isn't really right; we can safely inline functions with
1021 // indirectbr's as long as no other function or global references the
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001022 // blockaddress of a block within the current function.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001023 HasIndirectBr = true;
1024 return false;
1025}
1026
1027bool CallAnalyzer::visitResumeInst(ResumeInst &RI) {
1028 // FIXME: It's not clear that a single instruction is an accurate model for
1029 // the inline cost of a resume instruction.
1030 return false;
1031}
1032
David Majnemer654e1302015-07-31 17:58:14 +00001033bool CallAnalyzer::visitCleanupReturnInst(CleanupReturnInst &CRI) {
1034 // FIXME: It's not clear that a single instruction is an accurate model for
1035 // the inline cost of a cleanupret instruction.
1036 return false;
1037}
1038
1039bool CallAnalyzer::visitCatchReturnInst(CatchReturnInst &CRI) {
1040 // FIXME: It's not clear that a single instruction is an accurate model for
Joseph Tremoulet8220bcc2015-08-23 00:26:33 +00001041 // the inline cost of a catchret instruction.
David Majnemer654e1302015-07-31 17:58:14 +00001042 return false;
1043}
1044
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001045bool CallAnalyzer::visitUnreachableInst(UnreachableInst &I) {
1046 // FIXME: It might be reasonably to discount the cost of instructions leading
1047 // to unreachable as they have the lowest possible impact on both runtime and
1048 // code size.
1049 return true; // No actual code is needed for unreachable.
1050}
1051
Chandler Carruth0539c072012-03-31 12:42:41 +00001052bool CallAnalyzer::visitInstruction(Instruction &I) {
Chandler Carruthda7513a2012-05-04 00:58:03 +00001053 // Some instructions are free. All of the free intrinsics can also be
1054 // handled by SROA, etc.
Chandler Carruthb8cf5102013-01-21 12:05:16 +00001055 if (TargetTransformInfo::TCC_Free == TTI.getUserCost(&I))
Chandler Carruthda7513a2012-05-04 00:58:03 +00001056 return true;
1057
Chandler Carruth0539c072012-03-31 12:42:41 +00001058 // We found something we don't understand or can't handle. Mark any SROA-able
1059 // values in the operand list as no longer viable.
1060 for (User::op_iterator OI = I.op_begin(), OE = I.op_end(); OI != OE; ++OI)
1061 disableSROA(*OI);
1062
1063 return false;
1064}
1065
1066
1067/// \brief Analyze a basic block for its contribution to the inline cost.
1068///
1069/// This method walks the analyzer over every instruction in the given basic
1070/// block and accounts for their cost during inlining at this callsite. It
1071/// aborts early if the threshold has been exceeded or an impossible to inline
1072/// construct has been detected. It returns false if inlining is no longer
1073/// viable, and true if inlining remains viable.
Hal Finkel57f03dd2014-09-07 13:49:57 +00001074bool CallAnalyzer::analyzeBlock(BasicBlock *BB,
1075 SmallPtrSetImpl<const Value *> &EphValues) {
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001076 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
Chandler Carruth6b4cc8b2014-02-01 10:38:17 +00001077 // FIXME: Currently, the number of instructions in a function regardless of
1078 // our ability to simplify them during inline to constants or dead code,
1079 // are actually used by the vector bonus heuristic. As long as that's true,
1080 // we have to special case debug intrinsics here to prevent differences in
1081 // inlining due to debug symbols. Eventually, the number of unsimplified
1082 // instructions shouldn't factor into the cost computation, but until then,
1083 // hack around it here.
1084 if (isa<DbgInfoIntrinsic>(I))
1085 continue;
1086
Hal Finkel57f03dd2014-09-07 13:49:57 +00001087 // Skip ephemeral values.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001088 if (EphValues.count(&*I))
Hal Finkel57f03dd2014-09-07 13:49:57 +00001089 continue;
1090
Chandler Carruth0539c072012-03-31 12:42:41 +00001091 ++NumInstructions;
1092 if (isa<ExtractElementInst>(I) || I->getType()->isVectorTy())
1093 ++NumVectorInstructions;
1094
Sanjay Patele9434e82015-09-15 15:26:25 +00001095 // If the instruction is floating point, and the target says this operation
1096 // is expensive or the function has the "use-soft-float" attribute, this may
1097 // eventually become a library call. Treat the cost as such.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001098 if (I->getType()->isFloatingPointTy()) {
1099 bool hasSoftFloatAttr = false;
1100
Sanjay Patele9434e82015-09-15 15:26:25 +00001101 // If the function has the "use-soft-float" attribute, mark it as
1102 // expensive.
Cameron Esfahani17177d12015-02-05 02:09:33 +00001103 if (F.hasFnAttribute("use-soft-float")) {
1104 Attribute Attr = F.getFnAttribute("use-soft-float");
1105 StringRef Val = Attr.getValueAsString();
1106 if (Val == "true")
1107 hasSoftFloatAttr = true;
1108 }
1109
1110 if (TTI.getFPOpCost(I->getType()) == TargetTransformInfo::TCC_Expensive ||
1111 hasSoftFloatAttr)
1112 Cost += InlineConstants::CallPenalty;
1113 }
1114
Chandler Carruth0539c072012-03-31 12:42:41 +00001115 // If the instruction simplified to a constant, there is no cost to this
1116 // instruction. Visit the instructions using our InstVisitor to account for
1117 // all of the per-instruction logic. The visit tree returns true if we
1118 // consumed the instruction in any way, and false if the instruction's base
1119 // cost should count against inlining.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001120 if (Base::visit(&*I))
Chandler Carruth0539c072012-03-31 12:42:41 +00001121 ++NumInstructionsSimplified;
1122 else
1123 Cost += InlineConstants::InstrCost;
1124
1125 // If the visit this instruction detected an uninlinable pattern, abort.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001126 if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca ||
Reid Kleckner223de262015-04-14 20:38:14 +00001127 HasIndirectBr || HasFrameEscape)
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001128 return false;
1129
1130 // If the caller is a recursive function then we don't want to inline
1131 // functions which allocate a lot of stack space because it would increase
1132 // the caller stack usage dramatically.
1133 if (IsCallerRecursive &&
1134 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller)
Chandler Carruth0539c072012-03-31 12:42:41 +00001135 return false;
1136
Chandler Carrutha004f222015-05-27 02:49:05 +00001137 // Check if we've past the maximum possible threshold so we don't spin in
1138 // huge basic blocks that will never inline.
1139 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001140 return false;
1141 }
1142
1143 return true;
1144}
1145
1146/// \brief Compute the base pointer and cumulative constant offsets for V.
1147///
1148/// This strips all constant offsets off of V, leaving it the base pointer, and
1149/// accumulates the total constant offset applied in the returned constant. It
1150/// returns 0 if V is not a pointer, and returns the constant '0' if there are
1151/// no constant offsets applied.
1152ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001153 if (!V->getType()->isPointerTy())
Craig Topper353eda42014-04-24 06:44:33 +00001154 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001155
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001156 const DataLayout &DL = F.getParent()->getDataLayout();
1157 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +00001158 APInt Offset = APInt::getNullValue(IntPtrWidth);
1159
1160 // Even though we don't look through PHI nodes, we could be called on an
1161 // instruction in an unreachable block, which may be on a cycle.
1162 SmallPtrSet<Value *, 4> Visited;
1163 Visited.insert(V);
1164 do {
1165 if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
1166 if (!GEP->isInBounds() || !accumulateGEPOffset(*GEP, Offset))
Craig Topper353eda42014-04-24 06:44:33 +00001167 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001168 V = GEP->getPointerOperand();
1169 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
1170 V = cast<Operator>(V)->getOperand(0);
1171 } else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
Sanjoy Das5ce32722016-04-08 00:48:30 +00001172 if (GA->isInterposable())
Chandler Carruth0539c072012-03-31 12:42:41 +00001173 break;
1174 V = GA->getAliasee();
1175 } else {
1176 break;
1177 }
1178 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
David Blaikie70573dc2014-11-19 07:49:26 +00001179 } while (Visited.insert(V).second);
Chandler Carruth0539c072012-03-31 12:42:41 +00001180
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001181 Type *IntPtrTy = DL.getIntPtrType(V->getContext());
Chandler Carruth0539c072012-03-31 12:42:41 +00001182 return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset));
1183}
1184
1185/// \brief Analyze a call site for potential inlining.
1186///
1187/// Returns true if inlining this call is viable, and false if it is not
1188/// viable. It computes the cost and adjusts the threshold based on numerous
1189/// factors and heuristics. If this method returns false but the computed cost
1190/// is below the computed threshold, then inlining was forcibly disabled by
Bob Wilson266802d2012-11-19 07:04:30 +00001191/// some artifact of the routine.
Chandler Carruth0539c072012-03-31 12:42:41 +00001192bool CallAnalyzer::analyzeCall(CallSite CS) {
Chandler Carruth7ae90d42012-04-11 10:15:10 +00001193 ++NumCallsAnalyzed;
1194
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001195 // Perform some tweaks to the cost and threshold based on the direct
1196 // callsite information.
Chandler Carruth0539c072012-03-31 12:42:41 +00001197
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001198 // We want to more aggressively inline vector-dense kernels, so up the
1199 // threshold, and we'll lower it if the % of vector instructions gets too
Chandler Carrutha004f222015-05-27 02:49:05 +00001200 // low. Note that these bonuses are some what arbitrary and evolved over time
1201 // by accident as much as because they are principled bonuses.
1202 //
1203 // FIXME: It would be nice to remove all such bonuses. At least it would be
1204 // nice to base the bonus values on something more scientific.
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001205 assert(NumInstructions == 0);
1206 assert(NumVectorInstructions == 0);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001207
1208 // Update the threshold based on callsite properties
1209 updateThreshold(CS, F);
1210
Chandler Carrutha004f222015-05-27 02:49:05 +00001211 FiftyPercentVectorBonus = 3 * Threshold / 2;
1212 TenPercentVectorBonus = 3 * Threshold / 4;
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001213 const DataLayout &DL = F.getParent()->getDataLayout();
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001214
Chandler Carrutha004f222015-05-27 02:49:05 +00001215 // Track whether the post-inlining function would have more than one basic
1216 // block. A single basic block is often intended for inlining. Balloon the
1217 // threshold by 50% until we pass the single-BB phase.
1218 bool SingleBB = true;
1219 int SingleBBBonus = Threshold / 2;
1220
1221 // Speculatively apply all possible bonuses to Threshold. If cost exceeds
1222 // this Threshold any time, and cost cannot decrease, we can stop processing
1223 // the rest of the function body.
1224 Threshold += (SingleBBBonus + FiftyPercentVectorBonus);
1225
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001226 // Give out bonuses per argument, as the instructions setting them up will
1227 // be gone after inlining.
1228 for (unsigned I = 0, E = CS.arg_size(); I != E; ++I) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001229 if (CS.isByValArgument(I)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001230 // We approximate the number of loads and stores needed by dividing the
1231 // size of the byval type by the target's pointer size.
1232 PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType());
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001233 unsigned TypeSize = DL.getTypeSizeInBits(PTy->getElementType());
1234 unsigned PointerSize = DL.getPointerSizeInBits();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001235 // Ceiling division.
1236 unsigned NumStores = (TypeSize + PointerSize - 1) / PointerSize;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001237
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001238 // If it generates more than 8 stores it is likely to be expanded as an
1239 // inline memcpy so we take that as an upper bound. Otherwise we assume
1240 // one load and one store per word copied.
1241 // FIXME: The maxStoresPerMemcpy setting from the target should be used
1242 // here instead of a magic number of 8, but it's not available via
1243 // DataLayout.
1244 NumStores = std::min(NumStores, 8U);
1245
1246 Cost -= 2 * NumStores * InlineConstants::InstrCost;
1247 } else {
1248 // For non-byval arguments subtract off one instruction per call
1249 // argument.
1250 Cost -= InlineConstants::InstrCost;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001251 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001252 }
1253
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001254 // If there is only one call of the function, and it has internal linkage,
1255 // the cost of inlining it drops dramatically.
James Molloy4f6fb952012-12-20 16:04:27 +00001256 bool OnlyOneCallAndLocalLinkage = F.hasLocalLinkage() && F.hasOneUse() &&
1257 &F == CS.getCalledFunction();
1258 if (OnlyOneCallAndLocalLinkage)
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001259 Cost += InlineConstants::LastCallToStaticBonus;
1260
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001261 // If this function uses the coldcc calling convention, prefer not to inline
1262 // it.
1263 if (F.getCallingConv() == CallingConv::Cold)
1264 Cost += InlineConstants::ColdccPenalty;
1265
1266 // Check if we're done. This can happen due to bonuses and penalties.
1267 if (Cost > Threshold)
1268 return false;
1269
Chandler Carruth0539c072012-03-31 12:42:41 +00001270 if (F.empty())
1271 return true;
1272
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001273 Function *Caller = CS.getInstruction()->getParent()->getParent();
1274 // Check if the caller function is recursive itself.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001275 for (User *U : Caller->users()) {
1276 CallSite Site(U);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001277 if (!Site)
1278 continue;
1279 Instruction *I = Site.getInstruction();
1280 if (I->getParent()->getParent() == Caller) {
1281 IsCallerRecursive = true;
1282 break;
1283 }
1284 }
1285
Chandler Carruth0539c072012-03-31 12:42:41 +00001286 // Populate our simplified values by mapping from function arguments to call
1287 // arguments with known important simplifications.
1288 CallSite::arg_iterator CAI = CS.arg_begin();
1289 for (Function::arg_iterator FAI = F.arg_begin(), FAE = F.arg_end();
1290 FAI != FAE; ++FAI, ++CAI) {
1291 assert(CAI != CS.arg_end());
1292 if (Constant *C = dyn_cast<Constant>(CAI))
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001293 SimplifiedValues[&*FAI] = C;
Chandler Carruth0539c072012-03-31 12:42:41 +00001294
1295 Value *PtrArg = *CAI;
1296 if (ConstantInt *C = stripAndComputeInBoundsConstantOffsets(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001297 ConstantOffsetPtrs[&*FAI] = std::make_pair(PtrArg, C->getValue());
Chandler Carruth0539c072012-03-31 12:42:41 +00001298
1299 // We can SROA any pointer arguments derived from alloca instructions.
1300 if (isa<AllocaInst>(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001301 SROAArgValues[&*FAI] = PtrArg;
Chandler Carruth0539c072012-03-31 12:42:41 +00001302 SROAArgCosts[PtrArg] = 0;
1303 }
1304 }
1305 }
1306 NumConstantArgs = SimplifiedValues.size();
1307 NumConstantOffsetPtrArgs = ConstantOffsetPtrs.size();
1308 NumAllocaArgs = SROAArgValues.size();
1309
Hal Finkel57f03dd2014-09-07 13:49:57 +00001310 // FIXME: If a caller has multiple calls to a callee, we end up recomputing
1311 // the ephemeral values multiple times (and they're completely determined by
1312 // the callee, so this is purely duplicate work).
1313 SmallPtrSet<const Value *, 32> EphValues;
Bjorn Steinbrink6f972a12015-02-12 21:04:22 +00001314 CodeMetrics::collectEphemeralValues(&F, &ACT->getAssumptionCache(F), EphValues);
Hal Finkel57f03dd2014-09-07 13:49:57 +00001315
Chandler Carruth0539c072012-03-31 12:42:41 +00001316 // The worklist of live basic blocks in the callee *after* inlining. We avoid
1317 // adding basic blocks of the callee which can be proven to be dead for this
1318 // particular call site in order to get more accurate cost estimates. This
1319 // requires a somewhat heavyweight iteration pattern: we need to walk the
1320 // basic blocks in a breadth-first order as we insert live successors. To
1321 // accomplish this, prioritizing for small iterations because we exit after
1322 // crossing our threshold, we use a small-size optimized SetVector.
1323 typedef SetVector<BasicBlock *, SmallVector<BasicBlock *, 16>,
1324 SmallPtrSet<BasicBlock *, 16> > BBSetVector;
1325 BBSetVector BBWorklist;
1326 BBWorklist.insert(&F.getEntryBlock());
1327 // Note that we *must not* cache the size, this loop grows the worklist.
1328 for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) {
1329 // Bail out the moment we cross the threshold. This means we'll under-count
1330 // the cost, but only when undercounting doesn't matter.
Chandler Carrutha004f222015-05-27 02:49:05 +00001331 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001332 break;
1333
1334 BasicBlock *BB = BBWorklist[Idx];
1335 if (BB->empty())
Chandler Carruth4d1d34f2012-03-14 23:19:53 +00001336 continue;
Dan Gohman4552e3c2009-10-13 18:30:07 +00001337
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001338 // Disallow inlining a blockaddress. A blockaddress only has defined
1339 // behavior for an indirect branch in the same function, and we do not
1340 // currently support inlining indirect branches. But, the inliner may not
1341 // see an indirect branch that ends up being dead code at a particular call
1342 // site. If the blockaddress escapes the function, e.g., via a global
1343 // variable, inlining may lead to an invalid cross-function reference.
1344 if (BB->hasAddressTaken())
1345 return false;
1346
Chandler Carruth0539c072012-03-31 12:42:41 +00001347 // Analyze the cost of this block. If we blow through the threshold, this
1348 // returns false, and we can bail on out.
Easwaran Ramand295b002016-04-13 21:20:22 +00001349 if (!analyzeBlock(BB, EphValues))
1350 return false;
Eric Christopher46308e62011-02-01 01:16:32 +00001351
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001352 TerminatorInst *TI = BB->getTerminator();
1353
Chandler Carruth0539c072012-03-31 12:42:41 +00001354 // Add in the live successors by first checking whether we have terminator
1355 // that may be simplified based on the values simplified by this call.
1356 if (BranchInst *BI = dyn_cast<BranchInst>(TI)) {
1357 if (BI->isConditional()) {
1358 Value *Cond = BI->getCondition();
1359 if (ConstantInt *SimpleCond
1360 = dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
1361 BBWorklist.insert(BI->getSuccessor(SimpleCond->isZero() ? 1 : 0));
1362 continue;
Eric Christopher46308e62011-02-01 01:16:32 +00001363 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001364 }
1365 } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) {
1366 Value *Cond = SI->getCondition();
1367 if (ConstantInt *SimpleCond
1368 = dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
1369 BBWorklist.insert(SI->findCaseValue(SimpleCond).getCaseSuccessor());
1370 continue;
1371 }
1372 }
Eric Christopher46308e62011-02-01 01:16:32 +00001373
Chandler Carruth0539c072012-03-31 12:42:41 +00001374 // If we're unable to select a particular successor, just count all of
1375 // them.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001376 for (unsigned TIdx = 0, TSize = TI->getNumSuccessors(); TIdx != TSize;
1377 ++TIdx)
Chandler Carruth0539c072012-03-31 12:42:41 +00001378 BBWorklist.insert(TI->getSuccessor(TIdx));
1379
1380 // If we had any successors at this point, than post-inlining is likely to
1381 // have them as well. Note that we assume any basic blocks which existed
1382 // due to branches or switches which folded above will also fold after
1383 // inlining.
1384 if (SingleBB && TI->getNumSuccessors() > 1) {
1385 // Take off the bonus we applied to the threshold.
1386 Threshold -= SingleBBBonus;
1387 SingleBB = false;
Eric Christopher46308e62011-02-01 01:16:32 +00001388 }
1389 }
Andrew Trickcaa500b2011-10-01 01:27:56 +00001390
Chandler Carruthcb5beb32013-12-12 11:59:26 +00001391 // If this is a noduplicate call, we can still inline as long as
James Molloy4f6fb952012-12-20 16:04:27 +00001392 // inlining this would cause the removal of the caller (so the instruction
1393 // is not actually duplicated, just moved).
1394 if (!OnlyOneCallAndLocalLinkage && ContainsNoDuplicateCall)
1395 return false;
1396
Chandler Carrutha004f222015-05-27 02:49:05 +00001397 // We applied the maximum possible vector bonus at the beginning. Now,
1398 // subtract the excess bonus, if any, from the Threshold before
1399 // comparing against Cost.
1400 if (NumVectorInstructions <= NumInstructions / 10)
1401 Threshold -= FiftyPercentVectorBonus;
1402 else if (NumVectorInstructions <= NumInstructions / 2)
1403 Threshold -= (FiftyPercentVectorBonus - TenPercentVectorBonus);
Chandler Carruth0539c072012-03-31 12:42:41 +00001404
Hans Wennborg00ab73d2016-02-05 20:32:42 +00001405 return Cost < std::max(1, Threshold);
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001406}
1407
Manman Ren49d684e2012-09-12 05:06:18 +00001408#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
Chandler Carruth0539c072012-03-31 12:42:41 +00001409/// \brief Dump stats about this call's analysis.
Yaron Kereneb2a2542016-01-29 20:50:44 +00001410LLVM_DUMP_METHOD void CallAnalyzer::dump() {
Eric Christophera13839f2014-02-26 23:27:16 +00001411#define DEBUG_PRINT_STAT(x) dbgs() << " " #x ": " << x << "\n"
Chandler Carruth0539c072012-03-31 12:42:41 +00001412 DEBUG_PRINT_STAT(NumConstantArgs);
1413 DEBUG_PRINT_STAT(NumConstantOffsetPtrArgs);
1414 DEBUG_PRINT_STAT(NumAllocaArgs);
1415 DEBUG_PRINT_STAT(NumConstantPtrCmps);
1416 DEBUG_PRINT_STAT(NumConstantPtrDiffs);
1417 DEBUG_PRINT_STAT(NumInstructionsSimplified);
Chandler Carrutha004f222015-05-27 02:49:05 +00001418 DEBUG_PRINT_STAT(NumInstructions);
Chandler Carruth0539c072012-03-31 12:42:41 +00001419 DEBUG_PRINT_STAT(SROACostSavings);
1420 DEBUG_PRINT_STAT(SROACostSavingsLost);
James Molloy4f6fb952012-12-20 16:04:27 +00001421 DEBUG_PRINT_STAT(ContainsNoDuplicateCall);
Chandler Carruth394e34f2014-01-31 22:32:32 +00001422 DEBUG_PRINT_STAT(Cost);
1423 DEBUG_PRINT_STAT(Threshold);
Chandler Carruth0539c072012-03-31 12:42:41 +00001424#undef DEBUG_PRINT_STAT
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001425}
Manman Renc3366cc2012-09-06 19:55:56 +00001426#endif
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001427
Akira Hatanaka5af7ace2015-11-13 01:44:32 +00001428/// \brief Test that two functions either have or have not the given attribute
1429/// at the same time.
1430template<typename AttrKind>
1431static bool attributeMatches(Function *F1, Function *F2, AttrKind Attr) {
1432 return F1->getFnAttribute(Attr) == F2->getFnAttribute(Attr);
1433}
1434
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001435/// \brief Test that there are no attribute conflicts between Caller and Callee
1436/// that prevent inlining.
1437static bool functionsHaveCompatibleAttributes(Function *Caller,
Eric Christopher4371b132015-07-02 01:11:47 +00001438 Function *Callee,
1439 TargetTransformInfo &TTI) {
Eric Christopherd566fb12015-07-29 22:09:48 +00001440 return TTI.areInlineCompatible(Caller, Callee) &&
Akira Hatanaka1cb242e2015-12-22 23:57:37 +00001441 AttributeFuncs::areInlineCompatible(*Caller, *Callee);
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001442}
1443
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001444InlineCost llvm::getInlineCost(CallSite CS, int DefaultThreshold,
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001445 TargetTransformInfo &CalleeTTI,
Easwaran Ramanb1bd3982016-03-08 00:36:35 +00001446 AssumptionCacheTracker *ACT) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001447 return getInlineCost(CS, CS.getCalledFunction(), DefaultThreshold, CalleeTTI,
Easwaran Ramanb1bd3982016-03-08 00:36:35 +00001448 ACT);
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001449}
1450
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001451int llvm::computeThresholdFromOptLevels(unsigned OptLevel,
1452 unsigned SizeOptLevel) {
1453 if (OptLevel > 2)
1454 return OptAggressiveThreshold;
1455 if (SizeOptLevel == 1) // -Os
1456 return OptSizeThreshold;
1457 if (SizeOptLevel == 2) // -Oz
1458 return OptMinSizeThreshold;
1459 return DefaultInlineThreshold;
1460}
1461
1462int llvm::getDefaultInlineThreshold() { return DefaultInlineThreshold; }
1463
1464InlineCost llvm::getInlineCost(CallSite CS, Function *Callee,
1465 int DefaultThreshold,
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001466 TargetTransformInfo &CalleeTTI,
Easwaran Ramanb1bd3982016-03-08 00:36:35 +00001467 AssumptionCacheTracker *ACT) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001468
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001469 // Cannot inline indirect calls.
1470 if (!Callee)
1471 return llvm::InlineCost::getNever();
1472
1473 // Calls to functions with always-inline attributes should be inlined
1474 // whenever possible.
Peter Collingbourne68a88972014-05-19 18:25:54 +00001475 if (CS.hasFnAttr(Attribute::AlwaysInline)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001476 if (isInlineViable(*Callee))
1477 return llvm::InlineCost::getAlways();
1478 return llvm::InlineCost::getNever();
1479 }
1480
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001481 // Never inline functions with conflicting attributes (unless callee has
1482 // always-inline attribute).
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001483 if (!functionsHaveCompatibleAttributes(CS.getCaller(), Callee, CalleeTTI))
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001484 return llvm::InlineCost::getNever();
1485
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001486 // Don't inline this call if the caller has the optnone attribute.
1487 if (CS.getCaller()->hasFnAttribute(Attribute::OptimizeNone))
1488 return llvm::InlineCost::getNever();
1489
Sanjoy Das5ce32722016-04-08 00:48:30 +00001490 // Don't inline functions which can be interposed at link-time. Don't inline
1491 // functions marked noinline or call sites marked noinline.
1492 // Note: inlining non-exact non-interposable fucntions is fine, since we know
1493 // we have *a* correct implementation of the source level function.
1494 if (Callee->isInterposable() ||
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001495 Callee->hasFnAttribute(Attribute::NoInline) || CS.isNoInline())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001496 return llvm::InlineCost::getNever();
1497
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001498 DEBUG(llvm::dbgs() << " Analyzing call of " << Callee->getName()
1499 << "...\n");
Andrew Trickcaa500b2011-10-01 01:27:56 +00001500
Easwaran Ramanb1bd3982016-03-08 00:36:35 +00001501 CallAnalyzer CA(CalleeTTI, ACT, *Callee, DefaultThreshold, CS);
Chandler Carruth0539c072012-03-31 12:42:41 +00001502 bool ShouldInline = CA.analyzeCall(CS);
Dan Gohman4552e3c2009-10-13 18:30:07 +00001503
Chandler Carruth0539c072012-03-31 12:42:41 +00001504 DEBUG(CA.dump());
1505
1506 // Check if there was a reason to force inlining or no inlining.
1507 if (!ShouldInline && CA.getCost() < CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001508 return InlineCost::getNever();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001509 if (ShouldInline && CA.getCost() >= CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001510 return InlineCost::getAlways();
Andrew Trickcaa500b2011-10-01 01:27:56 +00001511
Chandler Carruth0539c072012-03-31 12:42:41 +00001512 return llvm::InlineCost::get(CA.getCost(), CA.getThreshold());
Dan Gohman4552e3c2009-10-13 18:30:07 +00001513}
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001514
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001515bool llvm::isInlineViable(Function &F) {
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001516 bool ReturnsTwice = F.hasFnAttribute(Attribute::ReturnsTwice);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001517 for (Function::iterator BI = F.begin(), BE = F.end(); BI != BE; ++BI) {
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001518 // Disallow inlining of functions which contain indirect branches or
1519 // blockaddresses.
1520 if (isa<IndirectBrInst>(BI->getTerminator()) || BI->hasAddressTaken())
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001521 return false;
1522
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001523 for (auto &II : *BI) {
1524 CallSite CS(&II);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001525 if (!CS)
1526 continue;
1527
1528 // Disallow recursive calls.
1529 if (&F == CS.getCalledFunction())
1530 return false;
1531
1532 // Disallow calls which expose returns-twice to a function not previously
1533 // attributed as such.
1534 if (!ReturnsTwice && CS.isCall() &&
1535 cast<CallInst>(CS.getInstruction())->canReturnTwice())
1536 return false;
Reid Kleckner223de262015-04-14 20:38:14 +00001537
Reid Kleckner60381792015-07-07 22:25:32 +00001538 // Disallow inlining functions that call @llvm.localescape. Doing this
Reid Kleckner223de262015-04-14 20:38:14 +00001539 // correctly would require major changes to the inliner.
1540 if (CS.getCalledFunction() &&
1541 CS.getCalledFunction()->getIntrinsicID() ==
Reid Kleckner60381792015-07-07 22:25:32 +00001542 llvm::Intrinsic::localescape)
Reid Kleckner223de262015-04-14 20:38:14 +00001543 return false;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001544 }
1545 }
1546
1547 return true;
1548}