blob: 6d53b48832d927cd2298aa5a6b0cb78e06d5e278 [file] [log] [blame]
Dan Gohman4552e3c2009-10-13 18:30:07 +00001//===- InlineCost.cpp - Cost analysis for inliner -------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements inline cost analysis.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Analysis/InlineCost.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000015#include "llvm/ADT/STLExtras.h"
16#include "llvm/ADT/SetVector.h"
17#include "llvm/ADT/SmallPtrSet.h"
18#include "llvm/ADT/SmallVector.h"
19#include "llvm/ADT/Statistic.h"
Chandler Carruth66b31302015-01-04 12:03:27 +000020#include "llvm/Analysis/AssumptionCache.h"
Hal Finkel57f03dd2014-09-07 13:49:57 +000021#include "llvm/Analysis/CodeMetrics.h"
Chandler Carruthd9903882015-01-14 11:23:27 +000022#include "llvm/Analysis/ConstantFolding.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000023#include "llvm/Analysis/InstructionSimplify.h"
Chandler Carruth42f3dce2013-01-21 11:55:09 +000024#include "llvm/Analysis/TargetTransformInfo.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000025#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000026#include "llvm/IR/CallingConv.h"
27#include "llvm/IR/DataLayout.h"
Chandler Carruth03eb0de2014-03-04 10:40:04 +000028#include "llvm/IR/GetElementPtrTypeIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000029#include "llvm/IR/GlobalAlias.h"
Chandler Carruth7da14f12014-03-06 03:23:41 +000030#include "llvm/IR/InstVisitor.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000031#include "llvm/IR/IntrinsicInst.h"
32#include "llvm/IR/Operator.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000033#include "llvm/Support/Debug.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000034#include "llvm/Support/raw_ostream.h"
Eric Christopher2dfbd7e2011-02-05 00:49:15 +000035
Dan Gohman4552e3c2009-10-13 18:30:07 +000036using namespace llvm;
37
Chandler Carruthf1221bd2014-04-22 02:48:03 +000038#define DEBUG_TYPE "inline-cost"
39
Chandler Carruth7ae90d42012-04-11 10:15:10 +000040STATISTIC(NumCallsAnalyzed, "Number of call sites analyzed");
41
Chandler Carruth0539c072012-03-31 12:42:41 +000042namespace {
Chandler Carrutha3089552012-03-14 07:32:53 +000043
Chandler Carruth0539c072012-03-31 12:42:41 +000044class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> {
45 typedef InstVisitor<CallAnalyzer, bool> Base;
46 friend class InstVisitor<CallAnalyzer, bool>;
Owen Andersona08318a2010-09-09 16:56:42 +000047
Chandler Carruth42f3dce2013-01-21 11:55:09 +000048 /// The TargetTransformInfo available for this compilation.
49 const TargetTransformInfo &TTI;
50
Hal Finkel57f03dd2014-09-07 13:49:57 +000051 /// The cache of @llvm.assume intrinsics.
Bjorn Steinbrink6f972a12015-02-12 21:04:22 +000052 AssumptionCacheTracker *ACT;
Hal Finkel57f03dd2014-09-07 13:49:57 +000053
Chandler Carruth0539c072012-03-31 12:42:41 +000054 // The called function.
55 Function &F;
Owen Andersona08318a2010-09-09 16:56:42 +000056
Philip Reames9b5c9582015-06-26 20:51:17 +000057 // The candidate callsite being analyzed. Please do not use this to do
58 // analysis in the caller function; we want the inline cost query to be
59 // easily cacheable. Instead, use the cover function paramHasAttr.
60 CallSite CandidateCS;
61
Chandler Carruth0539c072012-03-31 12:42:41 +000062 int Threshold;
63 int Cost;
Owen Andersona08318a2010-09-09 16:56:42 +000064
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +000065 bool IsCallerRecursive;
66 bool IsRecursiveCall;
Chandler Carruth0539c072012-03-31 12:42:41 +000067 bool ExposesReturnsTwice;
68 bool HasDynamicAlloca;
James Molloy4f6fb952012-12-20 16:04:27 +000069 bool ContainsNoDuplicateCall;
Chandler Carruth0814d2a2013-12-13 07:59:56 +000070 bool HasReturn;
71 bool HasIndirectBr;
Reid Kleckner223de262015-04-14 20:38:14 +000072 bool HasFrameEscape;
James Molloy4f6fb952012-12-20 16:04:27 +000073
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +000074 /// Number of bytes allocated statically by the callee.
75 uint64_t AllocatedSize;
Chandler Carruth0539c072012-03-31 12:42:41 +000076 unsigned NumInstructions, NumVectorInstructions;
77 int FiftyPercentVectorBonus, TenPercentVectorBonus;
78 int VectorBonus;
79
80 // While we walk the potentially-inlined instructions, we build up and
81 // maintain a mapping of simplified values specific to this callsite. The
82 // idea is to propagate any special information we have about arguments to
83 // this call through the inlinable section of the function, and account for
84 // likely simplifications post-inlining. The most important aspect we track
85 // is CFG altering simplifications -- when we prove a basic block dead, that
86 // can cause dramatic shifts in the cost of inlining a function.
87 DenseMap<Value *, Constant *> SimplifiedValues;
88
89 // Keep track of the values which map back (through function arguments) to
90 // allocas on the caller stack which could be simplified through SROA.
91 DenseMap<Value *, Value *> SROAArgValues;
92
93 // The mapping of caller Alloca values to their accumulated cost savings. If
94 // we have to disable SROA for one of the allocas, this tells us how much
95 // cost must be added.
96 DenseMap<Value *, int> SROAArgCosts;
97
98 // Keep track of values which map to a pointer base and constant offset.
99 DenseMap<Value *, std::pair<Value *, APInt> > ConstantOffsetPtrs;
100
101 // Custom simplification helper routines.
102 bool isAllocaDerivedArg(Value *V);
103 bool lookupSROAArgAndCost(Value *V, Value *&Arg,
104 DenseMap<Value *, int>::iterator &CostIt);
105 void disableSROA(DenseMap<Value *, int>::iterator CostIt);
106 void disableSROA(Value *V);
107 void accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
108 int InstructionCost);
Chandler Carruth0539c072012-03-31 12:42:41 +0000109 bool isGEPOffsetConstant(GetElementPtrInst &GEP);
110 bool accumulateGEPOffset(GEPOperator &GEP, APInt &Offset);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000111 bool simplifyCallSite(Function *F, CallSite CS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000112 ConstantInt *stripAndComputeInBoundsConstantOffsets(Value *&V);
113
Philip Reames9b5c9582015-06-26 20:51:17 +0000114 /// Return true if the given argument to the function being considered for
115 /// inlining has the given attribute set either at the call site or the
116 /// function declaration. Primarily used to inspect call site specific
117 /// attributes since these can be more precise than the ones on the callee
118 /// itself.
119 bool paramHasAttr(Argument *A, Attribute::AttrKind Attr);
120
121 /// Return true if the given value is known non null within the callee if
122 /// inlined through this particular callsite.
123 bool isKnownNonNullInCallee(Value *V);
124
Chandler Carruth0539c072012-03-31 12:42:41 +0000125 // Custom analysis routines.
Hal Finkel57f03dd2014-09-07 13:49:57 +0000126 bool analyzeBlock(BasicBlock *BB, SmallPtrSetImpl<const Value *> &EphValues);
Chandler Carruth0539c072012-03-31 12:42:41 +0000127
128 // Disable several entry points to the visitor so we don't accidentally use
129 // them by declaring but not defining them here.
130 void visit(Module *); void visit(Module &);
131 void visit(Function *); void visit(Function &);
132 void visit(BasicBlock *); void visit(BasicBlock &);
133
134 // Provide base case for our instruction visit.
135 bool visitInstruction(Instruction &I);
136
137 // Our visit overrides.
138 bool visitAlloca(AllocaInst &I);
139 bool visitPHI(PHINode &I);
140 bool visitGetElementPtr(GetElementPtrInst &I);
141 bool visitBitCast(BitCastInst &I);
142 bool visitPtrToInt(PtrToIntInst &I);
143 bool visitIntToPtr(IntToPtrInst &I);
144 bool visitCastInst(CastInst &I);
145 bool visitUnaryInstruction(UnaryInstruction &I);
Matt Arsenault727aa342013-07-20 04:09:00 +0000146 bool visitCmpInst(CmpInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000147 bool visitSub(BinaryOperator &I);
148 bool visitBinaryOperator(BinaryOperator &I);
149 bool visitLoad(LoadInst &I);
150 bool visitStore(StoreInst &I);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000151 bool visitExtractValue(ExtractValueInst &I);
152 bool visitInsertValue(InsertValueInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000153 bool visitCallSite(CallSite CS);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000154 bool visitReturnInst(ReturnInst &RI);
155 bool visitBranchInst(BranchInst &BI);
156 bool visitSwitchInst(SwitchInst &SI);
157 bool visitIndirectBrInst(IndirectBrInst &IBI);
158 bool visitResumeInst(ResumeInst &RI);
159 bool visitUnreachableInst(UnreachableInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000160
161public:
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000162 CallAnalyzer(const TargetTransformInfo &TTI, AssumptionCacheTracker *ACT,
Philip Reames9b5c9582015-06-26 20:51:17 +0000163 Function &Callee, int Threshold, CallSite CSArg)
164 : TTI(TTI), ACT(ACT), F(Callee), CandidateCS(CSArg), Threshold(Threshold),
165 Cost(0), IsCallerRecursive(false), IsRecursiveCall(false),
Chandler Carruth42f3dce2013-01-21 11:55:09 +0000166 ExposesReturnsTwice(false), HasDynamicAlloca(false),
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000167 ContainsNoDuplicateCall(false), HasReturn(false), HasIndirectBr(false),
Reid Kleckner223de262015-04-14 20:38:14 +0000168 HasFrameEscape(false), AllocatedSize(0), NumInstructions(0),
169 NumVectorInstructions(0), FiftyPercentVectorBonus(0),
170 TenPercentVectorBonus(0), VectorBonus(0), NumConstantArgs(0),
171 NumConstantOffsetPtrArgs(0), NumAllocaArgs(0), NumConstantPtrCmps(0),
172 NumConstantPtrDiffs(0), NumInstructionsSimplified(0),
173 SROACostSavings(0), SROACostSavingsLost(0) {}
Chandler Carruth0539c072012-03-31 12:42:41 +0000174
175 bool analyzeCall(CallSite CS);
176
177 int getThreshold() { return Threshold; }
178 int getCost() { return Cost; }
179
180 // Keep a bunch of stats about the cost savings found so we can print them
181 // out when debugging.
182 unsigned NumConstantArgs;
183 unsigned NumConstantOffsetPtrArgs;
184 unsigned NumAllocaArgs;
185 unsigned NumConstantPtrCmps;
186 unsigned NumConstantPtrDiffs;
187 unsigned NumInstructionsSimplified;
188 unsigned SROACostSavings;
189 unsigned SROACostSavingsLost;
190
191 void dump();
192};
193
194} // namespace
195
196/// \brief Test whether the given value is an Alloca-derived function argument.
197bool CallAnalyzer::isAllocaDerivedArg(Value *V) {
198 return SROAArgValues.count(V);
Owen Andersona08318a2010-09-09 16:56:42 +0000199}
200
Chandler Carruth0539c072012-03-31 12:42:41 +0000201/// \brief Lookup the SROA-candidate argument and cost iterator which V maps to.
202/// Returns false if V does not map to a SROA-candidate.
203bool CallAnalyzer::lookupSROAArgAndCost(
204 Value *V, Value *&Arg, DenseMap<Value *, int>::iterator &CostIt) {
205 if (SROAArgValues.empty() || SROAArgCosts.empty())
206 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000207
Chandler Carruth0539c072012-03-31 12:42:41 +0000208 DenseMap<Value *, Value *>::iterator ArgIt = SROAArgValues.find(V);
209 if (ArgIt == SROAArgValues.end())
210 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000211
Chandler Carruth0539c072012-03-31 12:42:41 +0000212 Arg = ArgIt->second;
213 CostIt = SROAArgCosts.find(Arg);
214 return CostIt != SROAArgCosts.end();
Chandler Carruth783b7192012-03-09 02:49:36 +0000215}
216
Chandler Carruth0539c072012-03-31 12:42:41 +0000217/// \brief Disable SROA for the candidate marked by this cost iterator.
Chandler Carruth783b7192012-03-09 02:49:36 +0000218///
Benjamin Kramerbde91762012-06-02 10:20:22 +0000219/// This marks the candidate as no longer viable for SROA, and adds the cost
Chandler Carruth0539c072012-03-31 12:42:41 +0000220/// savings associated with it back into the inline cost measurement.
221void CallAnalyzer::disableSROA(DenseMap<Value *, int>::iterator CostIt) {
222 // If we're no longer able to perform SROA we need to undo its cost savings
223 // and prevent subsequent analysis.
224 Cost += CostIt->second;
225 SROACostSavings -= CostIt->second;
226 SROACostSavingsLost += CostIt->second;
227 SROAArgCosts.erase(CostIt);
228}
229
230/// \brief If 'V' maps to a SROA candidate, disable SROA for it.
231void CallAnalyzer::disableSROA(Value *V) {
232 Value *SROAArg;
233 DenseMap<Value *, int>::iterator CostIt;
234 if (lookupSROAArgAndCost(V, SROAArg, CostIt))
235 disableSROA(CostIt);
236}
237
238/// \brief Accumulate the given cost for a particular SROA candidate.
239void CallAnalyzer::accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
240 int InstructionCost) {
241 CostIt->second += InstructionCost;
242 SROACostSavings += InstructionCost;
243}
244
Chandler Carruth0539c072012-03-31 12:42:41 +0000245/// \brief Check whether a GEP's indices are all constant.
246///
247/// Respects any simplified values known during the analysis of this callsite.
248bool CallAnalyzer::isGEPOffsetConstant(GetElementPtrInst &GEP) {
249 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
250 if (!isa<Constant>(*I) && !SimplifiedValues.lookup(*I))
Chandler Carruth783b7192012-03-09 02:49:36 +0000251 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000252
Chandler Carruth0539c072012-03-31 12:42:41 +0000253 return true;
254}
255
256/// \brief Accumulate a constant GEP offset into an APInt if possible.
257///
258/// Returns false if unable to compute the offset for any reason. Respects any
259/// simplified values known during the analysis of this callsite.
260bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000261 const DataLayout &DL = F.getParent()->getDataLayout();
262 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +0000263 assert(IntPtrWidth == Offset.getBitWidth());
264
265 for (gep_type_iterator GTI = gep_type_begin(GEP), GTE = gep_type_end(GEP);
266 GTI != GTE; ++GTI) {
267 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand());
268 if (!OpC)
269 if (Constant *SimpleOp = SimplifiedValues.lookup(GTI.getOperand()))
270 OpC = dyn_cast<ConstantInt>(SimpleOp);
271 if (!OpC)
Chandler Carruth783b7192012-03-09 02:49:36 +0000272 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000273 if (OpC->isZero()) continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000274
Chandler Carruth0539c072012-03-31 12:42:41 +0000275 // Handle a struct index, which adds its field offset to the pointer.
276 if (StructType *STy = dyn_cast<StructType>(*GTI)) {
277 unsigned ElementIdx = OpC->getZExtValue();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000278 const StructLayout *SL = DL.getStructLayout(STy);
Chandler Carruth0539c072012-03-31 12:42:41 +0000279 Offset += APInt(IntPtrWidth, SL->getElementOffset(ElementIdx));
280 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000281 }
Chandler Carruth783b7192012-03-09 02:49:36 +0000282
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000283 APInt TypeSize(IntPtrWidth, DL.getTypeAllocSize(GTI.getIndexedType()));
Chandler Carruth0539c072012-03-31 12:42:41 +0000284 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize;
285 }
286 return true;
287}
288
289bool CallAnalyzer::visitAlloca(AllocaInst &I) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000290 // Check whether inlining will turn a dynamic alloca into a static
Chandler Carruth0539c072012-03-31 12:42:41 +0000291 // alloca, and handle that case.
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000292 if (I.isArrayAllocation()) {
293 if (Constant *Size = SimplifiedValues.lookup(I.getArraySize())) {
294 ConstantInt *AllocSize = dyn_cast<ConstantInt>(Size);
295 assert(AllocSize && "Allocation size not a constant int?");
296 Type *Ty = I.getAllocatedType();
297 AllocatedSize += Ty->getPrimitiveSizeInBits() * AllocSize->getZExtValue();
298 return Base::visitAlloca(I);
299 }
300 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000301
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000302 // Accumulate the allocated size.
303 if (I.isStaticAlloca()) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000304 const DataLayout &DL = F.getParent()->getDataLayout();
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000305 Type *Ty = I.getAllocatedType();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000306 AllocatedSize += DL.getTypeAllocSize(Ty);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000307 }
308
Bob Wilsona5b0dc82012-11-19 07:04:35 +0000309 // We will happily inline static alloca instructions.
310 if (I.isStaticAlloca())
Chandler Carruth0539c072012-03-31 12:42:41 +0000311 return Base::visitAlloca(I);
312
313 // FIXME: This is overly conservative. Dynamic allocas are inefficient for
314 // a variety of reasons, and so we would like to not inline them into
315 // functions which don't currently have a dynamic alloca. This simply
316 // disables inlining altogether in the presence of a dynamic alloca.
317 HasDynamicAlloca = true;
318 return false;
319}
320
321bool CallAnalyzer::visitPHI(PHINode &I) {
322 // FIXME: We should potentially be tracking values through phi nodes,
323 // especially when they collapse to a single value due to deleted CFG edges
324 // during inlining.
325
326 // FIXME: We need to propagate SROA *disabling* through phi nodes, even
327 // though we don't want to propagate it's bonuses. The idea is to disable
328 // SROA if it *might* be used in an inappropriate manner.
329
330 // Phi nodes are always zero-cost.
331 return true;
332}
333
334bool CallAnalyzer::visitGetElementPtr(GetElementPtrInst &I) {
335 Value *SROAArg;
336 DenseMap<Value *, int>::iterator CostIt;
337 bool SROACandidate = lookupSROAArgAndCost(I.getPointerOperand(),
338 SROAArg, CostIt);
339
340 // Try to fold GEPs of constant-offset call site argument pointers. This
341 // requires target data and inbounds GEPs.
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000342 if (I.isInBounds()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000343 // Check if we have a base + offset for the pointer.
344 Value *Ptr = I.getPointerOperand();
345 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Ptr);
346 if (BaseAndOffset.first) {
347 // Check if the offset of this GEP is constant, and if so accumulate it
348 // into Offset.
349 if (!accumulateGEPOffset(cast<GEPOperator>(I), BaseAndOffset.second)) {
350 // Non-constant GEPs aren't folded, and disable SROA.
351 if (SROACandidate)
352 disableSROA(CostIt);
353 return false;
354 }
355
356 // Add the result as a new mapping to Base + Offset.
357 ConstantOffsetPtrs[&I] = BaseAndOffset;
358
359 // Also handle SROA candidates here, we already know that the GEP is
360 // all-constant indexed.
361 if (SROACandidate)
362 SROAArgValues[&I] = SROAArg;
363
Chandler Carruth783b7192012-03-09 02:49:36 +0000364 return true;
365 }
366 }
367
Chandler Carruth0539c072012-03-31 12:42:41 +0000368 if (isGEPOffsetConstant(I)) {
369 if (SROACandidate)
370 SROAArgValues[&I] = SROAArg;
371
372 // Constant GEPs are modeled as free.
373 return true;
374 }
375
376 // Variable GEPs will require math and will disable SROA.
377 if (SROACandidate)
378 disableSROA(CostIt);
Chandler Carruth783b7192012-03-09 02:49:36 +0000379 return false;
380}
381
Chandler Carruth0539c072012-03-31 12:42:41 +0000382bool CallAnalyzer::visitBitCast(BitCastInst &I) {
383 // Propagate constants through bitcasts.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000384 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
385 if (!COp)
386 COp = SimplifiedValues.lookup(I.getOperand(0));
387 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000388 if (Constant *C = ConstantExpr::getBitCast(COp, I.getType())) {
389 SimplifiedValues[&I] = C;
390 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000391 }
Owen Andersona08318a2010-09-09 16:56:42 +0000392
Chandler Carruth0539c072012-03-31 12:42:41 +0000393 // Track base/offsets through casts
394 std::pair<Value *, APInt> BaseAndOffset
395 = ConstantOffsetPtrs.lookup(I.getOperand(0));
396 // Casts don't change the offset, just wrap it up.
397 if (BaseAndOffset.first)
398 ConstantOffsetPtrs[&I] = BaseAndOffset;
399
400 // Also look for SROA candidates here.
401 Value *SROAArg;
402 DenseMap<Value *, int>::iterator CostIt;
403 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
404 SROAArgValues[&I] = SROAArg;
405
406 // Bitcasts are always zero cost.
407 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000408}
409
Chandler Carruth0539c072012-03-31 12:42:41 +0000410bool CallAnalyzer::visitPtrToInt(PtrToIntInst &I) {
411 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000412 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
413 if (!COp)
414 COp = SimplifiedValues.lookup(I.getOperand(0));
415 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000416 if (Constant *C = ConstantExpr::getPtrToInt(COp, I.getType())) {
417 SimplifiedValues[&I] = C;
418 return true;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000419 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000420
421 // Track base/offset pairs when converted to a plain integer provided the
422 // integer is large enough to represent the pointer.
423 unsigned IntegerSize = I.getType()->getScalarSizeInBits();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000424 const DataLayout &DL = F.getParent()->getDataLayout();
Mehdi Amini46a43552015-03-04 18:43:29 +0000425 if (IntegerSize >= DL.getPointerSizeInBits()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000426 std::pair<Value *, APInt> BaseAndOffset
427 = ConstantOffsetPtrs.lookup(I.getOperand(0));
428 if (BaseAndOffset.first)
429 ConstantOffsetPtrs[&I] = BaseAndOffset;
430 }
431
432 // This is really weird. Technically, ptrtoint will disable SROA. However,
433 // unless that ptrtoint is *used* somewhere in the live basic blocks after
434 // inlining, it will be nuked, and SROA should proceed. All of the uses which
435 // would block SROA would also block SROA if applied directly to a pointer,
436 // and so we can just add the integer in here. The only places where SROA is
437 // preserved either cannot fire on an integer, or won't in-and-of themselves
438 // disable SROA (ext) w/o some later use that we would see and disable.
439 Value *SROAArg;
440 DenseMap<Value *, int>::iterator CostIt;
441 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
442 SROAArgValues[&I] = SROAArg;
443
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000444 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000445}
446
Chandler Carruth0539c072012-03-31 12:42:41 +0000447bool CallAnalyzer::visitIntToPtr(IntToPtrInst &I) {
448 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000449 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
450 if (!COp)
451 COp = SimplifiedValues.lookup(I.getOperand(0));
452 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000453 if (Constant *C = ConstantExpr::getIntToPtr(COp, I.getType())) {
454 SimplifiedValues[&I] = C;
455 return true;
456 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000457
Chandler Carruth0539c072012-03-31 12:42:41 +0000458 // Track base/offset pairs when round-tripped through a pointer without
459 // modifications provided the integer is not too large.
460 Value *Op = I.getOperand(0);
461 unsigned IntegerSize = Op->getType()->getScalarSizeInBits();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000462 const DataLayout &DL = F.getParent()->getDataLayout();
Mehdi Amini46a43552015-03-04 18:43:29 +0000463 if (IntegerSize <= DL.getPointerSizeInBits()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000464 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op);
465 if (BaseAndOffset.first)
466 ConstantOffsetPtrs[&I] = BaseAndOffset;
467 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000468
Chandler Carruth0539c072012-03-31 12:42:41 +0000469 // "Propagate" SROA here in the same manner as we do for ptrtoint above.
470 Value *SROAArg;
471 DenseMap<Value *, int>::iterator CostIt;
472 if (lookupSROAArgAndCost(Op, SROAArg, CostIt))
473 SROAArgValues[&I] = SROAArg;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000474
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000475 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000476}
477
478bool CallAnalyzer::visitCastInst(CastInst &I) {
479 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000480 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
481 if (!COp)
482 COp = SimplifiedValues.lookup(I.getOperand(0));
483 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000484 if (Constant *C = ConstantExpr::getCast(I.getOpcode(), COp, I.getType())) {
485 SimplifiedValues[&I] = C;
486 return true;
487 }
488
489 // Disable SROA in the face of arbitrary casts we don't whitelist elsewhere.
490 disableSROA(I.getOperand(0));
491
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000492 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000493}
494
495bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) {
496 Value *Operand = I.getOperand(0);
Jakub Staszak7b9e0b92013-03-07 20:01:19 +0000497 Constant *COp = dyn_cast<Constant>(Operand);
498 if (!COp)
499 COp = SimplifiedValues.lookup(Operand);
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000500 if (COp) {
501 const DataLayout &DL = F.getParent()->getDataLayout();
Chandler Carruth0539c072012-03-31 12:42:41 +0000502 if (Constant *C = ConstantFoldInstOperands(I.getOpcode(), I.getType(),
Rafael Espindola37dc9e12014-02-21 00:06:31 +0000503 COp, DL)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000504 SimplifiedValues[&I] = C;
505 return true;
506 }
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000507 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000508
509 // Disable any SROA on the argument to arbitrary unary operators.
510 disableSROA(Operand);
511
512 return false;
513}
514
Philip Reames9b5c9582015-06-26 20:51:17 +0000515bool CallAnalyzer::paramHasAttr(Argument *A, Attribute::AttrKind Attr) {
516 unsigned ArgNo = A->getArgNo();
517 return CandidateCS.paramHasAttr(ArgNo+1, Attr);
518}
519
520bool CallAnalyzer::isKnownNonNullInCallee(Value *V) {
521 // Does the *call site* have the NonNull attribute set on an argument? We
522 // use the attribute on the call site to memoize any analysis done in the
523 // caller. This will also trip if the callee function has a non-null
524 // parameter attribute, but that's a less interesting case because hopefully
525 // the callee would already have been simplified based on that.
526 if (Argument *A = dyn_cast<Argument>(V))
527 if (paramHasAttr(A, Attribute::NonNull))
528 return true;
529
530 // Is this an alloca in the caller? This is distinct from the attribute case
531 // above because attributes aren't updated within the inliner itself and we
532 // always want to catch the alloca derived case.
533 if (isAllocaDerivedArg(V))
534 // We can actually predict the result of comparisons between an
535 // alloca-derived value and null. Note that this fires regardless of
536 // SROA firing.
537 return true;
538
539 return false;
540}
541
Matt Arsenault727aa342013-07-20 04:09:00 +0000542bool CallAnalyzer::visitCmpInst(CmpInst &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000543 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
544 // First try to handle simplified comparisons.
545 if (!isa<Constant>(LHS))
546 if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS))
547 LHS = SimpleLHS;
548 if (!isa<Constant>(RHS))
549 if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS))
550 RHS = SimpleRHS;
Matt Arsenault727aa342013-07-20 04:09:00 +0000551 if (Constant *CLHS = dyn_cast<Constant>(LHS)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000552 if (Constant *CRHS = dyn_cast<Constant>(RHS))
Matt Arsenault727aa342013-07-20 04:09:00 +0000553 if (Constant *C = ConstantExpr::getCompare(I.getPredicate(), CLHS, CRHS)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000554 SimplifiedValues[&I] = C;
555 return true;
556 }
Matt Arsenault727aa342013-07-20 04:09:00 +0000557 }
558
559 if (I.getOpcode() == Instruction::FCmp)
560 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000561
562 // Otherwise look for a comparison between constant offset pointers with
563 // a common base.
564 Value *LHSBase, *RHSBase;
565 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000566 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000567 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000568 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000569 if (RHSBase && LHSBase == RHSBase) {
570 // We have common bases, fold the icmp to a constant based on the
571 // offsets.
572 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
573 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
574 if (Constant *C = ConstantExpr::getICmp(I.getPredicate(), CLHS, CRHS)) {
575 SimplifiedValues[&I] = C;
576 ++NumConstantPtrCmps;
577 return true;
578 }
579 }
580 }
581
582 // If the comparison is an equality comparison with null, we can simplify it
Philip Reames9b5c9582015-06-26 20:51:17 +0000583 // if we know the value (argument) can't be null
584 if (I.isEquality() && isa<ConstantPointerNull>(I.getOperand(1)) &&
585 isKnownNonNullInCallee(I.getOperand(0))) {
586 bool IsNotEqual = I.getPredicate() == CmpInst::ICMP_NE;
587 SimplifiedValues[&I] = IsNotEqual ? ConstantInt::getTrue(I.getType())
588 : ConstantInt::getFalse(I.getType());
589 return true;
590 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000591 // Finally check for SROA candidates in comparisons.
592 Value *SROAArg;
593 DenseMap<Value *, int>::iterator CostIt;
594 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) {
595 if (isa<ConstantPointerNull>(I.getOperand(1))) {
596 accumulateSROACost(CostIt, InlineConstants::InstrCost);
597 return true;
598 }
599
600 disableSROA(CostIt);
601 }
602
603 return false;
604}
605
606bool CallAnalyzer::visitSub(BinaryOperator &I) {
607 // Try to handle a special case: we can fold computing the difference of two
608 // constant-related pointers.
609 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
610 Value *LHSBase, *RHSBase;
611 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000612 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000613 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000614 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000615 if (RHSBase && LHSBase == RHSBase) {
616 // We have common bases, fold the subtract to a constant based on the
617 // offsets.
618 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
619 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
620 if (Constant *C = ConstantExpr::getSub(CLHS, CRHS)) {
621 SimplifiedValues[&I] = C;
622 ++NumConstantPtrDiffs;
623 return true;
624 }
625 }
626 }
627
628 // Otherwise, fall back to the generic logic for simplifying and handling
629 // instructions.
630 return Base::visitSub(I);
631}
632
633bool CallAnalyzer::visitBinaryOperator(BinaryOperator &I) {
634 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000635 const DataLayout &DL = F.getParent()->getDataLayout();
Chandler Carruth0539c072012-03-31 12:42:41 +0000636 if (!isa<Constant>(LHS))
637 if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS))
638 LHS = SimpleLHS;
639 if (!isa<Constant>(RHS))
640 if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS))
641 RHS = SimpleRHS;
Michael Zolotukhin4e8598e2015-02-06 20:02:51 +0000642 Value *SimpleV = nullptr;
643 if (auto FI = dyn_cast<FPMathOperator>(&I))
644 SimpleV =
645 SimplifyFPBinOp(I.getOpcode(), LHS, RHS, FI->getFastMathFlags(), DL);
646 else
647 SimpleV = SimplifyBinOp(I.getOpcode(), LHS, RHS, DL);
648
Chandler Carruth0539c072012-03-31 12:42:41 +0000649 if (Constant *C = dyn_cast_or_null<Constant>(SimpleV)) {
650 SimplifiedValues[&I] = C;
651 return true;
652 }
653
654 // Disable any SROA on arguments to arbitrary, unsimplified binary operators.
655 disableSROA(LHS);
656 disableSROA(RHS);
657
658 return false;
659}
660
661bool CallAnalyzer::visitLoad(LoadInst &I) {
662 Value *SROAArg;
663 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000664 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000665 if (I.isSimple()) {
666 accumulateSROACost(CostIt, InlineConstants::InstrCost);
667 return true;
668 }
669
670 disableSROA(CostIt);
671 }
672
673 return false;
674}
675
676bool CallAnalyzer::visitStore(StoreInst &I) {
677 Value *SROAArg;
678 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000679 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000680 if (I.isSimple()) {
681 accumulateSROACost(CostIt, InlineConstants::InstrCost);
682 return true;
683 }
684
685 disableSROA(CostIt);
686 }
687
688 return false;
689}
690
Chandler Carruth753e21d2012-12-28 14:23:32 +0000691bool CallAnalyzer::visitExtractValue(ExtractValueInst &I) {
692 // Constant folding for extract value is trivial.
693 Constant *C = dyn_cast<Constant>(I.getAggregateOperand());
694 if (!C)
695 C = SimplifiedValues.lookup(I.getAggregateOperand());
696 if (C) {
697 SimplifiedValues[&I] = ConstantExpr::getExtractValue(C, I.getIndices());
698 return true;
699 }
700
701 // SROA can look through these but give them a cost.
702 return false;
703}
704
705bool CallAnalyzer::visitInsertValue(InsertValueInst &I) {
706 // Constant folding for insert value is trivial.
707 Constant *AggC = dyn_cast<Constant>(I.getAggregateOperand());
708 if (!AggC)
709 AggC = SimplifiedValues.lookup(I.getAggregateOperand());
710 Constant *InsertedC = dyn_cast<Constant>(I.getInsertedValueOperand());
711 if (!InsertedC)
712 InsertedC = SimplifiedValues.lookup(I.getInsertedValueOperand());
713 if (AggC && InsertedC) {
714 SimplifiedValues[&I] = ConstantExpr::getInsertValue(AggC, InsertedC,
715 I.getIndices());
716 return true;
717 }
718
719 // SROA can look through these but give them a cost.
720 return false;
721}
722
723/// \brief Try to simplify a call site.
724///
725/// Takes a concrete function and callsite and tries to actually simplify it by
726/// analyzing the arguments and call itself with instsimplify. Returns true if
727/// it has simplified the callsite to some other entity (a constant), making it
728/// free.
729bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
730 // FIXME: Using the instsimplify logic directly for this is inefficient
731 // because we have to continually rebuild the argument list even when no
732 // simplifications can be performed. Until that is fixed with remapping
733 // inside of instsimplify, directly constant fold calls here.
734 if (!canConstantFoldCallTo(F))
735 return false;
736
737 // Try to re-map the arguments to constants.
738 SmallVector<Constant *, 4> ConstantArgs;
739 ConstantArgs.reserve(CS.arg_size());
740 for (CallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end();
741 I != E; ++I) {
742 Constant *C = dyn_cast<Constant>(*I);
743 if (!C)
744 C = dyn_cast_or_null<Constant>(SimplifiedValues.lookup(*I));
745 if (!C)
746 return false; // This argument doesn't map to a constant.
747
748 ConstantArgs.push_back(C);
749 }
750 if (Constant *C = ConstantFoldCall(F, ConstantArgs)) {
751 SimplifiedValues[CS.getInstruction()] = C;
752 return true;
753 }
754
755 return false;
756}
757
Chandler Carruth0539c072012-03-31 12:42:41 +0000758bool CallAnalyzer::visitCallSite(CallSite CS) {
Chandler Carruth37d25de2013-12-13 08:00:01 +0000759 if (CS.hasFnAttr(Attribute::ReturnsTwice) &&
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +0000760 !F.hasFnAttribute(Attribute::ReturnsTwice)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000761 // This aborts the entire analysis.
762 ExposesReturnsTwice = true;
763 return false;
764 }
James Molloy4f6fb952012-12-20 16:04:27 +0000765 if (CS.isCall() &&
Eli Bendersky576ef3c2014-03-17 16:19:07 +0000766 cast<CallInst>(CS.getInstruction())->cannotDuplicate())
James Molloy4f6fb952012-12-20 16:04:27 +0000767 ContainsNoDuplicateCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000768
Chandler Carruth0539c072012-03-31 12:42:41 +0000769 if (Function *F = CS.getCalledFunction()) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000770 // When we have a concrete function, first try to simplify it directly.
771 if (simplifyCallSite(F, CS))
772 return true;
773
774 // Next check if it is an intrinsic we know about.
775 // FIXME: Lift this into part of the InstVisitor.
776 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
777 switch (II->getIntrinsicID()) {
778 default:
779 return Base::visitCallSite(CS);
780
781 case Intrinsic::memset:
782 case Intrinsic::memcpy:
783 case Intrinsic::memmove:
784 // SROA can usually chew through these intrinsics, but they aren't free.
785 return false;
Reid Kleckner60381792015-07-07 22:25:32 +0000786 case Intrinsic::localescape:
Reid Kleckner223de262015-04-14 20:38:14 +0000787 HasFrameEscape = true;
788 return false;
Chandler Carruth753e21d2012-12-28 14:23:32 +0000789 }
790 }
791
Chandler Carruth0539c072012-03-31 12:42:41 +0000792 if (F == CS.getInstruction()->getParent()->getParent()) {
793 // This flag will fully abort the analysis, so don't bother with anything
794 // else.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000795 IsRecursiveCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000796 return false;
797 }
798
Chandler Carruth0ba8db42013-01-22 11:26:02 +0000799 if (TTI.isLoweredToCall(F)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000800 // We account for the average 1 instruction per call argument setup
801 // here.
802 Cost += CS.arg_size() * InlineConstants::InstrCost;
803
804 // Everything other than inline ASM will also have a significant cost
805 // merely from making the call.
806 if (!isa<InlineAsm>(CS.getCalledValue()))
807 Cost += InlineConstants::CallPenalty;
808 }
809
810 return Base::visitCallSite(CS);
811 }
812
813 // Otherwise we're in a very special case -- an indirect function call. See
814 // if we can be particularly clever about this.
815 Value *Callee = CS.getCalledValue();
816
817 // First, pay the price of the argument setup. We account for the average
818 // 1 instruction per call argument setup here.
819 Cost += CS.arg_size() * InlineConstants::InstrCost;
820
821 // Next, check if this happens to be an indirect function call to a known
822 // function in this inline context. If not, we've done all we can.
823 Function *F = dyn_cast_or_null<Function>(SimplifiedValues.lookup(Callee));
824 if (!F)
825 return Base::visitCallSite(CS);
826
827 // If we have a constant that we are calling as a function, we can peer
828 // through it and see the function target. This happens not infrequently
829 // during devirtualization and so we want to give it a hefty bonus for
830 // inlining, but cap that bonus in the event that inlining wouldn't pan
831 // out. Pretend to inline the function, with a custom threshold.
Philip Reames9b5c9582015-06-26 20:51:17 +0000832 CallAnalyzer CA(TTI, ACT, *F, InlineConstants::IndirectCallThreshold, CS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000833 if (CA.analyzeCall(CS)) {
834 // We were able to inline the indirect call! Subtract the cost from the
835 // bonus we want to apply, but don't go below zero.
836 Cost -= std::max(0, InlineConstants::IndirectCallThreshold - CA.getCost());
837 }
838
839 return Base::visitCallSite(CS);
840}
841
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000842bool CallAnalyzer::visitReturnInst(ReturnInst &RI) {
843 // At least one return instruction will be free after inlining.
844 bool Free = !HasReturn;
845 HasReturn = true;
846 return Free;
847}
848
849bool CallAnalyzer::visitBranchInst(BranchInst &BI) {
850 // We model unconditional branches as essentially free -- they really
851 // shouldn't exist at all, but handling them makes the behavior of the
852 // inliner more regular and predictable. Interestingly, conditional branches
853 // which will fold away are also free.
854 return BI.isUnconditional() || isa<ConstantInt>(BI.getCondition()) ||
855 dyn_cast_or_null<ConstantInt>(
856 SimplifiedValues.lookup(BI.getCondition()));
857}
858
859bool CallAnalyzer::visitSwitchInst(SwitchInst &SI) {
860 // We model unconditional switches as free, see the comments on handling
861 // branches.
Chandler Carruthe01fd5f2014-04-28 08:52:44 +0000862 if (isa<ConstantInt>(SI.getCondition()))
863 return true;
864 if (Value *V = SimplifiedValues.lookup(SI.getCondition()))
865 if (isa<ConstantInt>(V))
866 return true;
867
868 // Otherwise, we need to accumulate a cost proportional to the number of
869 // distinct successor blocks. This fan-out in the CFG cannot be represented
870 // for free even if we can represent the core switch as a jumptable that
871 // takes a single instruction.
872 //
873 // NB: We convert large switches which are just used to initialize large phi
874 // nodes to lookup tables instead in simplify-cfg, so this shouldn't prevent
875 // inlining those. It will prevent inlining in cases where the optimization
876 // does not (yet) fire.
877 SmallPtrSet<BasicBlock *, 8> SuccessorBlocks;
878 SuccessorBlocks.insert(SI.getDefaultDest());
879 for (auto I = SI.case_begin(), E = SI.case_end(); I != E; ++I)
880 SuccessorBlocks.insert(I.getCaseSuccessor());
881 // Add cost corresponding to the number of distinct destinations. The first
882 // we model as free because of fallthrough.
883 Cost += (SuccessorBlocks.size() - 1) * InlineConstants::InstrCost;
884 return false;
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000885}
886
887bool CallAnalyzer::visitIndirectBrInst(IndirectBrInst &IBI) {
888 // We never want to inline functions that contain an indirectbr. This is
889 // incorrect because all the blockaddress's (in static global initializers
890 // for example) would be referring to the original function, and this
891 // indirect jump would jump from the inlined copy of the function into the
892 // original function which is extremely undefined behavior.
893 // FIXME: This logic isn't really right; we can safely inline functions with
894 // indirectbr's as long as no other function or global references the
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +0000895 // blockaddress of a block within the current function.
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000896 HasIndirectBr = true;
897 return false;
898}
899
900bool CallAnalyzer::visitResumeInst(ResumeInst &RI) {
901 // FIXME: It's not clear that a single instruction is an accurate model for
902 // the inline cost of a resume instruction.
903 return false;
904}
905
906bool CallAnalyzer::visitUnreachableInst(UnreachableInst &I) {
907 // FIXME: It might be reasonably to discount the cost of instructions leading
908 // to unreachable as they have the lowest possible impact on both runtime and
909 // code size.
910 return true; // No actual code is needed for unreachable.
911}
912
Chandler Carruth0539c072012-03-31 12:42:41 +0000913bool CallAnalyzer::visitInstruction(Instruction &I) {
Chandler Carruthda7513a2012-05-04 00:58:03 +0000914 // Some instructions are free. All of the free intrinsics can also be
915 // handled by SROA, etc.
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000916 if (TargetTransformInfo::TCC_Free == TTI.getUserCost(&I))
Chandler Carruthda7513a2012-05-04 00:58:03 +0000917 return true;
918
Chandler Carruth0539c072012-03-31 12:42:41 +0000919 // We found something we don't understand or can't handle. Mark any SROA-able
920 // values in the operand list as no longer viable.
921 for (User::op_iterator OI = I.op_begin(), OE = I.op_end(); OI != OE; ++OI)
922 disableSROA(*OI);
923
924 return false;
925}
926
927
928/// \brief Analyze a basic block for its contribution to the inline cost.
929///
930/// This method walks the analyzer over every instruction in the given basic
931/// block and accounts for their cost during inlining at this callsite. It
932/// aborts early if the threshold has been exceeded or an impossible to inline
933/// construct has been detected. It returns false if inlining is no longer
934/// viable, and true if inlining remains viable.
Hal Finkel57f03dd2014-09-07 13:49:57 +0000935bool CallAnalyzer::analyzeBlock(BasicBlock *BB,
936 SmallPtrSetImpl<const Value *> &EphValues) {
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000937 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
Chandler Carruth6b4cc8b2014-02-01 10:38:17 +0000938 // FIXME: Currently, the number of instructions in a function regardless of
939 // our ability to simplify them during inline to constants or dead code,
940 // are actually used by the vector bonus heuristic. As long as that's true,
941 // we have to special case debug intrinsics here to prevent differences in
942 // inlining due to debug symbols. Eventually, the number of unsimplified
943 // instructions shouldn't factor into the cost computation, but until then,
944 // hack around it here.
945 if (isa<DbgInfoIntrinsic>(I))
946 continue;
947
Hal Finkel57f03dd2014-09-07 13:49:57 +0000948 // Skip ephemeral values.
949 if (EphValues.count(I))
950 continue;
951
Chandler Carruth0539c072012-03-31 12:42:41 +0000952 ++NumInstructions;
953 if (isa<ExtractElementInst>(I) || I->getType()->isVectorTy())
954 ++NumVectorInstructions;
955
Cameron Esfahani17177d12015-02-05 02:09:33 +0000956 // If the instruction is floating point, and the target says this operation is
957 // expensive or the function has the "use-soft-float" attribute, this may
958 // eventually become a library call. Treat the cost as such.
959 if (I->getType()->isFloatingPointTy()) {
960 bool hasSoftFloatAttr = false;
961
962 // If the function has the "use-soft-float" attribute, mark it as expensive.
963 if (F.hasFnAttribute("use-soft-float")) {
964 Attribute Attr = F.getFnAttribute("use-soft-float");
965 StringRef Val = Attr.getValueAsString();
966 if (Val == "true")
967 hasSoftFloatAttr = true;
968 }
969
970 if (TTI.getFPOpCost(I->getType()) == TargetTransformInfo::TCC_Expensive ||
971 hasSoftFloatAttr)
972 Cost += InlineConstants::CallPenalty;
973 }
974
Chandler Carruth0539c072012-03-31 12:42:41 +0000975 // If the instruction simplified to a constant, there is no cost to this
976 // instruction. Visit the instructions using our InstVisitor to account for
977 // all of the per-instruction logic. The visit tree returns true if we
978 // consumed the instruction in any way, and false if the instruction's base
979 // cost should count against inlining.
980 if (Base::visit(I))
981 ++NumInstructionsSimplified;
982 else
983 Cost += InlineConstants::InstrCost;
984
985 // If the visit this instruction detected an uninlinable pattern, abort.
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000986 if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca ||
Reid Kleckner223de262015-04-14 20:38:14 +0000987 HasIndirectBr || HasFrameEscape)
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000988 return false;
989
990 // If the caller is a recursive function then we don't want to inline
991 // functions which allocate a lot of stack space because it would increase
992 // the caller stack usage dramatically.
993 if (IsCallerRecursive &&
994 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller)
Chandler Carruth0539c072012-03-31 12:42:41 +0000995 return false;
996
Chandler Carrutha004f222015-05-27 02:49:05 +0000997 // Check if we've past the maximum possible threshold so we don't spin in
998 // huge basic blocks that will never inline.
999 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001000 return false;
1001 }
1002
1003 return true;
1004}
1005
1006/// \brief Compute the base pointer and cumulative constant offsets for V.
1007///
1008/// This strips all constant offsets off of V, leaving it the base pointer, and
1009/// accumulates the total constant offset applied in the returned constant. It
1010/// returns 0 if V is not a pointer, and returns the constant '0' if there are
1011/// no constant offsets applied.
1012ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001013 if (!V->getType()->isPointerTy())
Craig Topper353eda42014-04-24 06:44:33 +00001014 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001015
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001016 const DataLayout &DL = F.getParent()->getDataLayout();
1017 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +00001018 APInt Offset = APInt::getNullValue(IntPtrWidth);
1019
1020 // Even though we don't look through PHI nodes, we could be called on an
1021 // instruction in an unreachable block, which may be on a cycle.
1022 SmallPtrSet<Value *, 4> Visited;
1023 Visited.insert(V);
1024 do {
1025 if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
1026 if (!GEP->isInBounds() || !accumulateGEPOffset(*GEP, Offset))
Craig Topper353eda42014-04-24 06:44:33 +00001027 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001028 V = GEP->getPointerOperand();
1029 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
1030 V = cast<Operator>(V)->getOperand(0);
1031 } else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
1032 if (GA->mayBeOverridden())
1033 break;
1034 V = GA->getAliasee();
1035 } else {
1036 break;
1037 }
1038 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
David Blaikie70573dc2014-11-19 07:49:26 +00001039 } while (Visited.insert(V).second);
Chandler Carruth0539c072012-03-31 12:42:41 +00001040
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001041 Type *IntPtrTy = DL.getIntPtrType(V->getContext());
Chandler Carruth0539c072012-03-31 12:42:41 +00001042 return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset));
1043}
1044
1045/// \brief Analyze a call site for potential inlining.
1046///
1047/// Returns true if inlining this call is viable, and false if it is not
1048/// viable. It computes the cost and adjusts the threshold based on numerous
1049/// factors and heuristics. If this method returns false but the computed cost
1050/// is below the computed threshold, then inlining was forcibly disabled by
Bob Wilson266802d2012-11-19 07:04:30 +00001051/// some artifact of the routine.
Chandler Carruth0539c072012-03-31 12:42:41 +00001052bool CallAnalyzer::analyzeCall(CallSite CS) {
Chandler Carruth7ae90d42012-04-11 10:15:10 +00001053 ++NumCallsAnalyzed;
1054
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001055 // Perform some tweaks to the cost and threshold based on the direct
1056 // callsite information.
Chandler Carruth0539c072012-03-31 12:42:41 +00001057
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001058 // We want to more aggressively inline vector-dense kernels, so up the
1059 // threshold, and we'll lower it if the % of vector instructions gets too
Chandler Carrutha004f222015-05-27 02:49:05 +00001060 // low. Note that these bonuses are some what arbitrary and evolved over time
1061 // by accident as much as because they are principled bonuses.
1062 //
1063 // FIXME: It would be nice to remove all such bonuses. At least it would be
1064 // nice to base the bonus values on something more scientific.
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001065 assert(NumInstructions == 0);
1066 assert(NumVectorInstructions == 0);
Chandler Carrutha004f222015-05-27 02:49:05 +00001067 FiftyPercentVectorBonus = 3 * Threshold / 2;
1068 TenPercentVectorBonus = 3 * Threshold / 4;
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001069 const DataLayout &DL = F.getParent()->getDataLayout();
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001070
Chandler Carrutha004f222015-05-27 02:49:05 +00001071 // Track whether the post-inlining function would have more than one basic
1072 // block. A single basic block is often intended for inlining. Balloon the
1073 // threshold by 50% until we pass the single-BB phase.
1074 bool SingleBB = true;
1075 int SingleBBBonus = Threshold / 2;
1076
1077 // Speculatively apply all possible bonuses to Threshold. If cost exceeds
1078 // this Threshold any time, and cost cannot decrease, we can stop processing
1079 // the rest of the function body.
1080 Threshold += (SingleBBBonus + FiftyPercentVectorBonus);
1081
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001082 // Give out bonuses per argument, as the instructions setting them up will
1083 // be gone after inlining.
1084 for (unsigned I = 0, E = CS.arg_size(); I != E; ++I) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001085 if (CS.isByValArgument(I)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001086 // We approximate the number of loads and stores needed by dividing the
1087 // size of the byval type by the target's pointer size.
1088 PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType());
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001089 unsigned TypeSize = DL.getTypeSizeInBits(PTy->getElementType());
1090 unsigned PointerSize = DL.getPointerSizeInBits();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001091 // Ceiling division.
1092 unsigned NumStores = (TypeSize + PointerSize - 1) / PointerSize;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001093
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001094 // If it generates more than 8 stores it is likely to be expanded as an
1095 // inline memcpy so we take that as an upper bound. Otherwise we assume
1096 // one load and one store per word copied.
1097 // FIXME: The maxStoresPerMemcpy setting from the target should be used
1098 // here instead of a magic number of 8, but it's not available via
1099 // DataLayout.
1100 NumStores = std::min(NumStores, 8U);
1101
1102 Cost -= 2 * NumStores * InlineConstants::InstrCost;
1103 } else {
1104 // For non-byval arguments subtract off one instruction per call
1105 // argument.
1106 Cost -= InlineConstants::InstrCost;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001107 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001108 }
1109
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001110 // If there is only one call of the function, and it has internal linkage,
1111 // the cost of inlining it drops dramatically.
James Molloy4f6fb952012-12-20 16:04:27 +00001112 bool OnlyOneCallAndLocalLinkage = F.hasLocalLinkage() && F.hasOneUse() &&
1113 &F == CS.getCalledFunction();
1114 if (OnlyOneCallAndLocalLinkage)
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001115 Cost += InlineConstants::LastCallToStaticBonus;
1116
1117 // If the instruction after the call, or if the normal destination of the
1118 // invoke is an unreachable instruction, the function is noreturn. As such,
1119 // there is little point in inlining this unless there is literally zero
1120 // cost.
1121 Instruction *Instr = CS.getInstruction();
1122 if (InvokeInst *II = dyn_cast<InvokeInst>(Instr)) {
1123 if (isa<UnreachableInst>(II->getNormalDest()->begin()))
Chandler Carrutha004f222015-05-27 02:49:05 +00001124 Threshold = 0;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001125 } else if (isa<UnreachableInst>(++BasicBlock::iterator(Instr)))
Chandler Carrutha004f222015-05-27 02:49:05 +00001126 Threshold = 0;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001127
1128 // If this function uses the coldcc calling convention, prefer not to inline
1129 // it.
1130 if (F.getCallingConv() == CallingConv::Cold)
1131 Cost += InlineConstants::ColdccPenalty;
1132
1133 // Check if we're done. This can happen due to bonuses and penalties.
1134 if (Cost > Threshold)
1135 return false;
1136
Chandler Carruth0539c072012-03-31 12:42:41 +00001137 if (F.empty())
1138 return true;
1139
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001140 Function *Caller = CS.getInstruction()->getParent()->getParent();
1141 // Check if the caller function is recursive itself.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001142 for (User *U : Caller->users()) {
1143 CallSite Site(U);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001144 if (!Site)
1145 continue;
1146 Instruction *I = Site.getInstruction();
1147 if (I->getParent()->getParent() == Caller) {
1148 IsCallerRecursive = true;
1149 break;
1150 }
1151 }
1152
Chandler Carruth0539c072012-03-31 12:42:41 +00001153 // Populate our simplified values by mapping from function arguments to call
1154 // arguments with known important simplifications.
1155 CallSite::arg_iterator CAI = CS.arg_begin();
1156 for (Function::arg_iterator FAI = F.arg_begin(), FAE = F.arg_end();
1157 FAI != FAE; ++FAI, ++CAI) {
1158 assert(CAI != CS.arg_end());
1159 if (Constant *C = dyn_cast<Constant>(CAI))
1160 SimplifiedValues[FAI] = C;
1161
1162 Value *PtrArg = *CAI;
1163 if (ConstantInt *C = stripAndComputeInBoundsConstantOffsets(PtrArg)) {
1164 ConstantOffsetPtrs[FAI] = std::make_pair(PtrArg, C->getValue());
1165
1166 // We can SROA any pointer arguments derived from alloca instructions.
1167 if (isa<AllocaInst>(PtrArg)) {
1168 SROAArgValues[FAI] = PtrArg;
1169 SROAArgCosts[PtrArg] = 0;
1170 }
1171 }
1172 }
1173 NumConstantArgs = SimplifiedValues.size();
1174 NumConstantOffsetPtrArgs = ConstantOffsetPtrs.size();
1175 NumAllocaArgs = SROAArgValues.size();
1176
Hal Finkel57f03dd2014-09-07 13:49:57 +00001177 // FIXME: If a caller has multiple calls to a callee, we end up recomputing
1178 // the ephemeral values multiple times (and they're completely determined by
1179 // the callee, so this is purely duplicate work).
1180 SmallPtrSet<const Value *, 32> EphValues;
Bjorn Steinbrink6f972a12015-02-12 21:04:22 +00001181 CodeMetrics::collectEphemeralValues(&F, &ACT->getAssumptionCache(F), EphValues);
Hal Finkel57f03dd2014-09-07 13:49:57 +00001182
Chandler Carruth0539c072012-03-31 12:42:41 +00001183 // The worklist of live basic blocks in the callee *after* inlining. We avoid
1184 // adding basic blocks of the callee which can be proven to be dead for this
1185 // particular call site in order to get more accurate cost estimates. This
1186 // requires a somewhat heavyweight iteration pattern: we need to walk the
1187 // basic blocks in a breadth-first order as we insert live successors. To
1188 // accomplish this, prioritizing for small iterations because we exit after
1189 // crossing our threshold, we use a small-size optimized SetVector.
1190 typedef SetVector<BasicBlock *, SmallVector<BasicBlock *, 16>,
1191 SmallPtrSet<BasicBlock *, 16> > BBSetVector;
1192 BBSetVector BBWorklist;
1193 BBWorklist.insert(&F.getEntryBlock());
1194 // Note that we *must not* cache the size, this loop grows the worklist.
1195 for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) {
1196 // Bail out the moment we cross the threshold. This means we'll under-count
1197 // the cost, but only when undercounting doesn't matter.
Chandler Carrutha004f222015-05-27 02:49:05 +00001198 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001199 break;
1200
1201 BasicBlock *BB = BBWorklist[Idx];
1202 if (BB->empty())
Chandler Carruth4d1d34f2012-03-14 23:19:53 +00001203 continue;
Dan Gohman4552e3c2009-10-13 18:30:07 +00001204
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001205 // Disallow inlining a blockaddress. A blockaddress only has defined
1206 // behavior for an indirect branch in the same function, and we do not
1207 // currently support inlining indirect branches. But, the inliner may not
1208 // see an indirect branch that ends up being dead code at a particular call
1209 // site. If the blockaddress escapes the function, e.g., via a global
1210 // variable, inlining may lead to an invalid cross-function reference.
1211 if (BB->hasAddressTaken())
1212 return false;
1213
Chandler Carruth0539c072012-03-31 12:42:41 +00001214 // Analyze the cost of this block. If we blow through the threshold, this
1215 // returns false, and we can bail on out.
Hal Finkel57f03dd2014-09-07 13:49:57 +00001216 if (!analyzeBlock(BB, EphValues)) {
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001217 if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca ||
Reid Kleckner223de262015-04-14 20:38:14 +00001218 HasIndirectBr || HasFrameEscape)
Chandler Carruth0539c072012-03-31 12:42:41 +00001219 return false;
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001220
1221 // If the caller is a recursive function then we don't want to inline
1222 // functions which allocate a lot of stack space because it would increase
1223 // the caller stack usage dramatically.
1224 if (IsCallerRecursive &&
1225 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller)
1226 return false;
1227
Chandler Carruth0539c072012-03-31 12:42:41 +00001228 break;
Eric Christopher46308e62011-02-01 01:16:32 +00001229 }
Eric Christopher46308e62011-02-01 01:16:32 +00001230
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001231 TerminatorInst *TI = BB->getTerminator();
1232
Chandler Carruth0539c072012-03-31 12:42:41 +00001233 // Add in the live successors by first checking whether we have terminator
1234 // that may be simplified based on the values simplified by this call.
1235 if (BranchInst *BI = dyn_cast<BranchInst>(TI)) {
1236 if (BI->isConditional()) {
1237 Value *Cond = BI->getCondition();
1238 if (ConstantInt *SimpleCond
1239 = dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
1240 BBWorklist.insert(BI->getSuccessor(SimpleCond->isZero() ? 1 : 0));
1241 continue;
Eric Christopher46308e62011-02-01 01:16:32 +00001242 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001243 }
1244 } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) {
1245 Value *Cond = SI->getCondition();
1246 if (ConstantInt *SimpleCond
1247 = dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
1248 BBWorklist.insert(SI->findCaseValue(SimpleCond).getCaseSuccessor());
1249 continue;
1250 }
1251 }
Eric Christopher46308e62011-02-01 01:16:32 +00001252
Chandler Carruth0539c072012-03-31 12:42:41 +00001253 // If we're unable to select a particular successor, just count all of
1254 // them.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001255 for (unsigned TIdx = 0, TSize = TI->getNumSuccessors(); TIdx != TSize;
1256 ++TIdx)
Chandler Carruth0539c072012-03-31 12:42:41 +00001257 BBWorklist.insert(TI->getSuccessor(TIdx));
1258
1259 // If we had any successors at this point, than post-inlining is likely to
1260 // have them as well. Note that we assume any basic blocks which existed
1261 // due to branches or switches which folded above will also fold after
1262 // inlining.
1263 if (SingleBB && TI->getNumSuccessors() > 1) {
1264 // Take off the bonus we applied to the threshold.
1265 Threshold -= SingleBBBonus;
1266 SingleBB = false;
Eric Christopher46308e62011-02-01 01:16:32 +00001267 }
1268 }
Andrew Trickcaa500b2011-10-01 01:27:56 +00001269
Chandler Carruthcb5beb32013-12-12 11:59:26 +00001270 // If this is a noduplicate call, we can still inline as long as
James Molloy4f6fb952012-12-20 16:04:27 +00001271 // inlining this would cause the removal of the caller (so the instruction
1272 // is not actually duplicated, just moved).
1273 if (!OnlyOneCallAndLocalLinkage && ContainsNoDuplicateCall)
1274 return false;
1275
Chandler Carrutha004f222015-05-27 02:49:05 +00001276 // We applied the maximum possible vector bonus at the beginning. Now,
1277 // subtract the excess bonus, if any, from the Threshold before
1278 // comparing against Cost.
1279 if (NumVectorInstructions <= NumInstructions / 10)
1280 Threshold -= FiftyPercentVectorBonus;
1281 else if (NumVectorInstructions <= NumInstructions / 2)
1282 Threshold -= (FiftyPercentVectorBonus - TenPercentVectorBonus);
Chandler Carruth0539c072012-03-31 12:42:41 +00001283
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001284 return Cost < Threshold;
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001285}
1286
Manman Ren49d684e2012-09-12 05:06:18 +00001287#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
Chandler Carruth0539c072012-03-31 12:42:41 +00001288/// \brief Dump stats about this call's analysis.
1289void CallAnalyzer::dump() {
Eric Christophera13839f2014-02-26 23:27:16 +00001290#define DEBUG_PRINT_STAT(x) dbgs() << " " #x ": " << x << "\n"
Chandler Carruth0539c072012-03-31 12:42:41 +00001291 DEBUG_PRINT_STAT(NumConstantArgs);
1292 DEBUG_PRINT_STAT(NumConstantOffsetPtrArgs);
1293 DEBUG_PRINT_STAT(NumAllocaArgs);
1294 DEBUG_PRINT_STAT(NumConstantPtrCmps);
1295 DEBUG_PRINT_STAT(NumConstantPtrDiffs);
1296 DEBUG_PRINT_STAT(NumInstructionsSimplified);
Chandler Carrutha004f222015-05-27 02:49:05 +00001297 DEBUG_PRINT_STAT(NumInstructions);
Chandler Carruth0539c072012-03-31 12:42:41 +00001298 DEBUG_PRINT_STAT(SROACostSavings);
1299 DEBUG_PRINT_STAT(SROACostSavingsLost);
James Molloy4f6fb952012-12-20 16:04:27 +00001300 DEBUG_PRINT_STAT(ContainsNoDuplicateCall);
Chandler Carruth394e34f2014-01-31 22:32:32 +00001301 DEBUG_PRINT_STAT(Cost);
1302 DEBUG_PRINT_STAT(Threshold);
Chandler Carruth0539c072012-03-31 12:42:41 +00001303#undef DEBUG_PRINT_STAT
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001304}
Manman Renc3366cc2012-09-06 19:55:56 +00001305#endif
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001306
Chandler Carruth4319e292013-01-21 11:39:18 +00001307INITIALIZE_PASS_BEGIN(InlineCostAnalysis, "inline-cost", "Inline Cost Analysis",
1308 true, true)
Chandler Carruth705b1852015-01-31 03:43:40 +00001309INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass)
Chandler Carruth66b31302015-01-04 12:03:27 +00001310INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker)
Chandler Carruth4319e292013-01-21 11:39:18 +00001311INITIALIZE_PASS_END(InlineCostAnalysis, "inline-cost", "Inline Cost Analysis",
1312 true, true)
1313
1314char InlineCostAnalysis::ID = 0;
1315
Rafael Espindola339430f2014-02-25 23:25:17 +00001316InlineCostAnalysis::InlineCostAnalysis() : CallGraphSCCPass(ID) {}
Chandler Carruth4319e292013-01-21 11:39:18 +00001317
1318InlineCostAnalysis::~InlineCostAnalysis() {}
1319
1320void InlineCostAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
1321 AU.setPreservesAll();
Chandler Carruth66b31302015-01-04 12:03:27 +00001322 AU.addRequired<AssumptionCacheTracker>();
Chandler Carruth705b1852015-01-31 03:43:40 +00001323 AU.addRequired<TargetTransformInfoWrapperPass>();
Chandler Carruth4319e292013-01-21 11:39:18 +00001324 CallGraphSCCPass::getAnalysisUsage(AU);
1325}
1326
1327bool InlineCostAnalysis::runOnSCC(CallGraphSCC &SCC) {
Chandler Carruthfdb9c572015-02-01 12:01:35 +00001328 TTIWP = &getAnalysis<TargetTransformInfoWrapperPass>();
Chandler Carruth66b31302015-01-04 12:03:27 +00001329 ACT = &getAnalysis<AssumptionCacheTracker>();
Chandler Carruth4319e292013-01-21 11:39:18 +00001330 return false;
1331}
1332
1333InlineCost InlineCostAnalysis::getInlineCost(CallSite CS, int Threshold) {
David Chisnallc1c9cda2012-04-06 17:27:41 +00001334 return getInlineCost(CS, CS.getCalledFunction(), Threshold);
1335}
Dan Gohman4552e3c2009-10-13 18:30:07 +00001336
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001337/// \brief Test that two functions either have or have not the given attribute
1338/// at the same time.
Akira Hatanakaf99e1912015-04-13 18:43:38 +00001339template<typename AttrKind>
1340static bool attributeMatches(Function *F1, Function *F2, AttrKind Attr) {
1341 return F1->getFnAttribute(Attr) == F2->getFnAttribute(Attr);
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001342}
1343
1344/// \brief Test that there are no attribute conflicts between Caller and Callee
1345/// that prevent inlining.
1346static bool functionsHaveCompatibleAttributes(Function *Caller,
Eric Christopher4371b132015-07-02 01:11:47 +00001347 Function *Callee,
1348 TargetTransformInfo &TTI) {
Eric Christopherd566fb12015-07-29 22:09:48 +00001349 return TTI.areInlineCompatible(Caller, Callee) &&
Akira Hatanakaf99e1912015-04-13 18:43:38 +00001350 attributeMatches(Caller, Callee, Attribute::SanitizeAddress) &&
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001351 attributeMatches(Caller, Callee, Attribute::SanitizeMemory) &&
1352 attributeMatches(Caller, Callee, Attribute::SanitizeThread);
1353}
1354
Chandler Carruth4319e292013-01-21 11:39:18 +00001355InlineCost InlineCostAnalysis::getInlineCost(CallSite CS, Function *Callee,
David Chisnallc1c9cda2012-04-06 17:27:41 +00001356 int Threshold) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001357 // Cannot inline indirect calls.
1358 if (!Callee)
1359 return llvm::InlineCost::getNever();
1360
1361 // Calls to functions with always-inline attributes should be inlined
1362 // whenever possible.
Peter Collingbourne68a88972014-05-19 18:25:54 +00001363 if (CS.hasFnAttr(Attribute::AlwaysInline)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001364 if (isInlineViable(*Callee))
1365 return llvm::InlineCost::getAlways();
1366 return llvm::InlineCost::getNever();
1367 }
1368
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001369 // Never inline functions with conflicting attributes (unless callee has
1370 // always-inline attribute).
Eric Christopher4371b132015-07-02 01:11:47 +00001371 if (!functionsHaveCompatibleAttributes(CS.getCaller(), Callee,
1372 TTIWP->getTTI(*Callee)))
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001373 return llvm::InlineCost::getNever();
1374
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001375 // Don't inline this call if the caller has the optnone attribute.
1376 if (CS.getCaller()->hasFnAttribute(Attribute::OptimizeNone))
1377 return llvm::InlineCost::getNever();
1378
Dan Gohman4552e3c2009-10-13 18:30:07 +00001379 // Don't inline functions which can be redefined at link-time to mean
Eric Christopherb1a382d2010-03-25 04:49:10 +00001380 // something else. Don't inline functions marked noinline or call sites
1381 // marked noinline.
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001382 if (Callee->mayBeOverridden() ||
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001383 Callee->hasFnAttribute(Attribute::NoInline) || CS.isNoInline())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001384 return llvm::InlineCost::getNever();
1385
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001386 DEBUG(llvm::dbgs() << " Analyzing call of " << Callee->getName()
1387 << "...\n");
Andrew Trickcaa500b2011-10-01 01:27:56 +00001388
Philip Reames9b5c9582015-06-26 20:51:17 +00001389 CallAnalyzer CA(TTIWP->getTTI(*Callee), ACT, *Callee, Threshold, CS);
Chandler Carruth0539c072012-03-31 12:42:41 +00001390 bool ShouldInline = CA.analyzeCall(CS);
Dan Gohman4552e3c2009-10-13 18:30:07 +00001391
Chandler Carruth0539c072012-03-31 12:42:41 +00001392 DEBUG(CA.dump());
1393
1394 // Check if there was a reason to force inlining or no inlining.
1395 if (!ShouldInline && CA.getCost() < CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001396 return InlineCost::getNever();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001397 if (ShouldInline && CA.getCost() >= CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001398 return InlineCost::getAlways();
Andrew Trickcaa500b2011-10-01 01:27:56 +00001399
Chandler Carruth0539c072012-03-31 12:42:41 +00001400 return llvm::InlineCost::get(CA.getCost(), CA.getThreshold());
Dan Gohman4552e3c2009-10-13 18:30:07 +00001401}
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001402
Chandler Carruth4319e292013-01-21 11:39:18 +00001403bool InlineCostAnalysis::isInlineViable(Function &F) {
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001404 bool ReturnsTwice = F.hasFnAttribute(Attribute::ReturnsTwice);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001405 for (Function::iterator BI = F.begin(), BE = F.end(); BI != BE; ++BI) {
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001406 // Disallow inlining of functions which contain indirect branches or
1407 // blockaddresses.
1408 if (isa<IndirectBrInst>(BI->getTerminator()) || BI->hasAddressTaken())
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001409 return false;
1410
1411 for (BasicBlock::iterator II = BI->begin(), IE = BI->end(); II != IE;
1412 ++II) {
1413 CallSite CS(II);
1414 if (!CS)
1415 continue;
1416
1417 // Disallow recursive calls.
1418 if (&F == CS.getCalledFunction())
1419 return false;
1420
1421 // Disallow calls which expose returns-twice to a function not previously
1422 // attributed as such.
1423 if (!ReturnsTwice && CS.isCall() &&
1424 cast<CallInst>(CS.getInstruction())->canReturnTwice())
1425 return false;
Reid Kleckner223de262015-04-14 20:38:14 +00001426
Reid Kleckner60381792015-07-07 22:25:32 +00001427 // Disallow inlining functions that call @llvm.localescape. Doing this
Reid Kleckner223de262015-04-14 20:38:14 +00001428 // correctly would require major changes to the inliner.
1429 if (CS.getCalledFunction() &&
1430 CS.getCalledFunction()->getIntrinsicID() ==
Reid Kleckner60381792015-07-07 22:25:32 +00001431 llvm::Intrinsic::localescape)
Reid Kleckner223de262015-04-14 20:38:14 +00001432 return false;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001433 }
1434 }
1435
1436 return true;
1437}