blob: 6d7d74999061f1cf505b5227e7ce9dd31f396948 [file] [log] [blame]
Dan Gohman4552e3c2009-10-13 18:30:07 +00001//===- InlineCost.cpp - Cost analysis for inliner -------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements inline cost analysis.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Analysis/InlineCost.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000015#include "llvm/ADT/STLExtras.h"
16#include "llvm/ADT/SetVector.h"
17#include "llvm/ADT/SmallPtrSet.h"
18#include "llvm/ADT/SmallVector.h"
19#include "llvm/ADT/Statistic.h"
Chandler Carruth66b31302015-01-04 12:03:27 +000020#include "llvm/Analysis/AssumptionCache.h"
Hal Finkel57f03dd2014-09-07 13:49:57 +000021#include "llvm/Analysis/CodeMetrics.h"
Chandler Carruthd9903882015-01-14 11:23:27 +000022#include "llvm/Analysis/ConstantFolding.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000023#include "llvm/Analysis/InstructionSimplify.h"
Chandler Carruth42f3dce2013-01-21 11:55:09 +000024#include "llvm/Analysis/TargetTransformInfo.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000025#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000026#include "llvm/IR/CallingConv.h"
27#include "llvm/IR/DataLayout.h"
Chandler Carruth03eb0de2014-03-04 10:40:04 +000028#include "llvm/IR/GetElementPtrTypeIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000029#include "llvm/IR/GlobalAlias.h"
Chandler Carruth7da14f12014-03-06 03:23:41 +000030#include "llvm/IR/InstVisitor.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000031#include "llvm/IR/IntrinsicInst.h"
32#include "llvm/IR/Operator.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000033#include "llvm/Support/Debug.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000034#include "llvm/Support/raw_ostream.h"
Eric Christopher2dfbd7e2011-02-05 00:49:15 +000035
Dan Gohman4552e3c2009-10-13 18:30:07 +000036using namespace llvm;
37
Chandler Carruthf1221bd2014-04-22 02:48:03 +000038#define DEBUG_TYPE "inline-cost"
39
Chandler Carruth7ae90d42012-04-11 10:15:10 +000040STATISTIC(NumCallsAnalyzed, "Number of call sites analyzed");
41
Chandler Carruth0539c072012-03-31 12:42:41 +000042namespace {
Chandler Carrutha3089552012-03-14 07:32:53 +000043
Chandler Carruth0539c072012-03-31 12:42:41 +000044class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> {
45 typedef InstVisitor<CallAnalyzer, bool> Base;
46 friend class InstVisitor<CallAnalyzer, bool>;
Owen Andersona08318a2010-09-09 16:56:42 +000047
Chandler Carruth42f3dce2013-01-21 11:55:09 +000048 /// The TargetTransformInfo available for this compilation.
49 const TargetTransformInfo &TTI;
50
Hal Finkel57f03dd2014-09-07 13:49:57 +000051 /// The cache of @llvm.assume intrinsics.
Bjorn Steinbrink6f972a12015-02-12 21:04:22 +000052 AssumptionCacheTracker *ACT;
Hal Finkel57f03dd2014-09-07 13:49:57 +000053
Chandler Carruth0539c072012-03-31 12:42:41 +000054 // The called function.
55 Function &F;
Owen Andersona08318a2010-09-09 16:56:42 +000056
Philip Reames9b5c9582015-06-26 20:51:17 +000057 // The candidate callsite being analyzed. Please do not use this to do
58 // analysis in the caller function; we want the inline cost query to be
59 // easily cacheable. Instead, use the cover function paramHasAttr.
60 CallSite CandidateCS;
61
Chandler Carruth0539c072012-03-31 12:42:41 +000062 int Threshold;
63 int Cost;
Owen Andersona08318a2010-09-09 16:56:42 +000064
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +000065 bool IsCallerRecursive;
66 bool IsRecursiveCall;
Chandler Carruth0539c072012-03-31 12:42:41 +000067 bool ExposesReturnsTwice;
68 bool HasDynamicAlloca;
James Molloy4f6fb952012-12-20 16:04:27 +000069 bool ContainsNoDuplicateCall;
Chandler Carruth0814d2a2013-12-13 07:59:56 +000070 bool HasReturn;
71 bool HasIndirectBr;
Reid Kleckner223de262015-04-14 20:38:14 +000072 bool HasFrameEscape;
James Molloy4f6fb952012-12-20 16:04:27 +000073
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +000074 /// Number of bytes allocated statically by the callee.
75 uint64_t AllocatedSize;
Chandler Carruth0539c072012-03-31 12:42:41 +000076 unsigned NumInstructions, NumVectorInstructions;
77 int FiftyPercentVectorBonus, TenPercentVectorBonus;
78 int VectorBonus;
79
80 // While we walk the potentially-inlined instructions, we build up and
81 // maintain a mapping of simplified values specific to this callsite. The
82 // idea is to propagate any special information we have about arguments to
83 // this call through the inlinable section of the function, and account for
84 // likely simplifications post-inlining. The most important aspect we track
85 // is CFG altering simplifications -- when we prove a basic block dead, that
86 // can cause dramatic shifts in the cost of inlining a function.
87 DenseMap<Value *, Constant *> SimplifiedValues;
88
89 // Keep track of the values which map back (through function arguments) to
90 // allocas on the caller stack which could be simplified through SROA.
91 DenseMap<Value *, Value *> SROAArgValues;
92
93 // The mapping of caller Alloca values to their accumulated cost savings. If
94 // we have to disable SROA for one of the allocas, this tells us how much
95 // cost must be added.
96 DenseMap<Value *, int> SROAArgCosts;
97
98 // Keep track of values which map to a pointer base and constant offset.
99 DenseMap<Value *, std::pair<Value *, APInt> > ConstantOffsetPtrs;
100
101 // Custom simplification helper routines.
102 bool isAllocaDerivedArg(Value *V);
103 bool lookupSROAArgAndCost(Value *V, Value *&Arg,
104 DenseMap<Value *, int>::iterator &CostIt);
105 void disableSROA(DenseMap<Value *, int>::iterator CostIt);
106 void disableSROA(Value *V);
107 void accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
108 int InstructionCost);
Chandler Carruth0539c072012-03-31 12:42:41 +0000109 bool isGEPOffsetConstant(GetElementPtrInst &GEP);
110 bool accumulateGEPOffset(GEPOperator &GEP, APInt &Offset);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000111 bool simplifyCallSite(Function *F, CallSite CS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000112 ConstantInt *stripAndComputeInBoundsConstantOffsets(Value *&V);
113
Philip Reames9b5c9582015-06-26 20:51:17 +0000114 /// Return true if the given argument to the function being considered for
115 /// inlining has the given attribute set either at the call site or the
116 /// function declaration. Primarily used to inspect call site specific
117 /// attributes since these can be more precise than the ones on the callee
Easwaran Raman3676da42015-12-03 19:03:20 +0000118 /// itself.
Philip Reames9b5c9582015-06-26 20:51:17 +0000119 bool paramHasAttr(Argument *A, Attribute::AttrKind Attr);
120
121 /// Return true if the given value is known non null within the callee if
Easwaran Raman3676da42015-12-03 19:03:20 +0000122 /// inlined through this particular callsite.
Philip Reames9b5c9582015-06-26 20:51:17 +0000123 bool isKnownNonNullInCallee(Value *V);
124
Chandler Carruth0539c072012-03-31 12:42:41 +0000125 // Custom analysis routines.
Hal Finkel57f03dd2014-09-07 13:49:57 +0000126 bool analyzeBlock(BasicBlock *BB, SmallPtrSetImpl<const Value *> &EphValues);
Chandler Carruth0539c072012-03-31 12:42:41 +0000127
128 // Disable several entry points to the visitor so we don't accidentally use
129 // them by declaring but not defining them here.
130 void visit(Module *); void visit(Module &);
131 void visit(Function *); void visit(Function &);
132 void visit(BasicBlock *); void visit(BasicBlock &);
133
134 // Provide base case for our instruction visit.
135 bool visitInstruction(Instruction &I);
136
137 // Our visit overrides.
138 bool visitAlloca(AllocaInst &I);
139 bool visitPHI(PHINode &I);
140 bool visitGetElementPtr(GetElementPtrInst &I);
141 bool visitBitCast(BitCastInst &I);
142 bool visitPtrToInt(PtrToIntInst &I);
143 bool visitIntToPtr(IntToPtrInst &I);
144 bool visitCastInst(CastInst &I);
145 bool visitUnaryInstruction(UnaryInstruction &I);
Matt Arsenault727aa342013-07-20 04:09:00 +0000146 bool visitCmpInst(CmpInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000147 bool visitSub(BinaryOperator &I);
148 bool visitBinaryOperator(BinaryOperator &I);
149 bool visitLoad(LoadInst &I);
150 bool visitStore(StoreInst &I);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000151 bool visitExtractValue(ExtractValueInst &I);
152 bool visitInsertValue(InsertValueInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000153 bool visitCallSite(CallSite CS);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000154 bool visitReturnInst(ReturnInst &RI);
155 bool visitBranchInst(BranchInst &BI);
156 bool visitSwitchInst(SwitchInst &SI);
157 bool visitIndirectBrInst(IndirectBrInst &IBI);
158 bool visitResumeInst(ResumeInst &RI);
David Majnemer654e1302015-07-31 17:58:14 +0000159 bool visitCleanupReturnInst(CleanupReturnInst &RI);
160 bool visitCatchReturnInst(CatchReturnInst &RI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000161 bool visitUnreachableInst(UnreachableInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000162
163public:
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000164 CallAnalyzer(const TargetTransformInfo &TTI, AssumptionCacheTracker *ACT,
Philip Reames9b5c9582015-06-26 20:51:17 +0000165 Function &Callee, int Threshold, CallSite CSArg)
166 : TTI(TTI), ACT(ACT), F(Callee), CandidateCS(CSArg), Threshold(Threshold),
167 Cost(0), IsCallerRecursive(false), IsRecursiveCall(false),
Chandler Carruth42f3dce2013-01-21 11:55:09 +0000168 ExposesReturnsTwice(false), HasDynamicAlloca(false),
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000169 ContainsNoDuplicateCall(false), HasReturn(false), HasIndirectBr(false),
Reid Kleckner223de262015-04-14 20:38:14 +0000170 HasFrameEscape(false), AllocatedSize(0), NumInstructions(0),
171 NumVectorInstructions(0), FiftyPercentVectorBonus(0),
172 TenPercentVectorBonus(0), VectorBonus(0), NumConstantArgs(0),
173 NumConstantOffsetPtrArgs(0), NumAllocaArgs(0), NumConstantPtrCmps(0),
174 NumConstantPtrDiffs(0), NumInstructionsSimplified(0),
175 SROACostSavings(0), SROACostSavingsLost(0) {}
Chandler Carruth0539c072012-03-31 12:42:41 +0000176
177 bool analyzeCall(CallSite CS);
178
179 int getThreshold() { return Threshold; }
180 int getCost() { return Cost; }
181
182 // Keep a bunch of stats about the cost savings found so we can print them
183 // out when debugging.
184 unsigned NumConstantArgs;
185 unsigned NumConstantOffsetPtrArgs;
186 unsigned NumAllocaArgs;
187 unsigned NumConstantPtrCmps;
188 unsigned NumConstantPtrDiffs;
189 unsigned NumInstructionsSimplified;
190 unsigned SROACostSavings;
191 unsigned SROACostSavingsLost;
192
193 void dump();
194};
195
196} // namespace
197
198/// \brief Test whether the given value is an Alloca-derived function argument.
199bool CallAnalyzer::isAllocaDerivedArg(Value *V) {
200 return SROAArgValues.count(V);
Owen Andersona08318a2010-09-09 16:56:42 +0000201}
202
Chandler Carruth0539c072012-03-31 12:42:41 +0000203/// \brief Lookup the SROA-candidate argument and cost iterator which V maps to.
204/// Returns false if V does not map to a SROA-candidate.
205bool CallAnalyzer::lookupSROAArgAndCost(
206 Value *V, Value *&Arg, DenseMap<Value *, int>::iterator &CostIt) {
207 if (SROAArgValues.empty() || SROAArgCosts.empty())
208 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000209
Chandler Carruth0539c072012-03-31 12:42:41 +0000210 DenseMap<Value *, Value *>::iterator ArgIt = SROAArgValues.find(V);
211 if (ArgIt == SROAArgValues.end())
212 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000213
Chandler Carruth0539c072012-03-31 12:42:41 +0000214 Arg = ArgIt->second;
215 CostIt = SROAArgCosts.find(Arg);
216 return CostIt != SROAArgCosts.end();
Chandler Carruth783b7192012-03-09 02:49:36 +0000217}
218
Chandler Carruth0539c072012-03-31 12:42:41 +0000219/// \brief Disable SROA for the candidate marked by this cost iterator.
Chandler Carruth783b7192012-03-09 02:49:36 +0000220///
Benjamin Kramerbde91762012-06-02 10:20:22 +0000221/// This marks the candidate as no longer viable for SROA, and adds the cost
Chandler Carruth0539c072012-03-31 12:42:41 +0000222/// savings associated with it back into the inline cost measurement.
223void CallAnalyzer::disableSROA(DenseMap<Value *, int>::iterator CostIt) {
224 // If we're no longer able to perform SROA we need to undo its cost savings
225 // and prevent subsequent analysis.
226 Cost += CostIt->second;
227 SROACostSavings -= CostIt->second;
228 SROACostSavingsLost += CostIt->second;
229 SROAArgCosts.erase(CostIt);
230}
231
232/// \brief If 'V' maps to a SROA candidate, disable SROA for it.
233void CallAnalyzer::disableSROA(Value *V) {
234 Value *SROAArg;
235 DenseMap<Value *, int>::iterator CostIt;
236 if (lookupSROAArgAndCost(V, SROAArg, CostIt))
237 disableSROA(CostIt);
238}
239
240/// \brief Accumulate the given cost for a particular SROA candidate.
241void CallAnalyzer::accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
242 int InstructionCost) {
243 CostIt->second += InstructionCost;
244 SROACostSavings += InstructionCost;
245}
246
Chandler Carruth0539c072012-03-31 12:42:41 +0000247/// \brief Check whether a GEP's indices are all constant.
248///
249/// Respects any simplified values known during the analysis of this callsite.
250bool CallAnalyzer::isGEPOffsetConstant(GetElementPtrInst &GEP) {
251 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
252 if (!isa<Constant>(*I) && !SimplifiedValues.lookup(*I))
Chandler Carruth783b7192012-03-09 02:49:36 +0000253 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000254
Chandler Carruth0539c072012-03-31 12:42:41 +0000255 return true;
256}
257
258/// \brief Accumulate a constant GEP offset into an APInt if possible.
259///
260/// Returns false if unable to compute the offset for any reason. Respects any
261/// simplified values known during the analysis of this callsite.
262bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000263 const DataLayout &DL = F.getParent()->getDataLayout();
264 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +0000265 assert(IntPtrWidth == Offset.getBitWidth());
266
267 for (gep_type_iterator GTI = gep_type_begin(GEP), GTE = gep_type_end(GEP);
268 GTI != GTE; ++GTI) {
269 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand());
270 if (!OpC)
271 if (Constant *SimpleOp = SimplifiedValues.lookup(GTI.getOperand()))
272 OpC = dyn_cast<ConstantInt>(SimpleOp);
273 if (!OpC)
Chandler Carruth783b7192012-03-09 02:49:36 +0000274 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000275 if (OpC->isZero()) continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000276
Chandler Carruth0539c072012-03-31 12:42:41 +0000277 // Handle a struct index, which adds its field offset to the pointer.
278 if (StructType *STy = dyn_cast<StructType>(*GTI)) {
279 unsigned ElementIdx = OpC->getZExtValue();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000280 const StructLayout *SL = DL.getStructLayout(STy);
Chandler Carruth0539c072012-03-31 12:42:41 +0000281 Offset += APInt(IntPtrWidth, SL->getElementOffset(ElementIdx));
282 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000283 }
Chandler Carruth783b7192012-03-09 02:49:36 +0000284
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000285 APInt TypeSize(IntPtrWidth, DL.getTypeAllocSize(GTI.getIndexedType()));
Chandler Carruth0539c072012-03-31 12:42:41 +0000286 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize;
287 }
288 return true;
289}
290
291bool CallAnalyzer::visitAlloca(AllocaInst &I) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000292 // Check whether inlining will turn a dynamic alloca into a static
Chandler Carruth0539c072012-03-31 12:42:41 +0000293 // alloca, and handle that case.
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000294 if (I.isArrayAllocation()) {
295 if (Constant *Size = SimplifiedValues.lookup(I.getArraySize())) {
296 ConstantInt *AllocSize = dyn_cast<ConstantInt>(Size);
297 assert(AllocSize && "Allocation size not a constant int?");
298 Type *Ty = I.getAllocatedType();
299 AllocatedSize += Ty->getPrimitiveSizeInBits() * AllocSize->getZExtValue();
300 return Base::visitAlloca(I);
301 }
302 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000303
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000304 // Accumulate the allocated size.
305 if (I.isStaticAlloca()) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000306 const DataLayout &DL = F.getParent()->getDataLayout();
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000307 Type *Ty = I.getAllocatedType();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000308 AllocatedSize += DL.getTypeAllocSize(Ty);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000309 }
310
Bob Wilsona5b0dc82012-11-19 07:04:35 +0000311 // We will happily inline static alloca instructions.
312 if (I.isStaticAlloca())
Chandler Carruth0539c072012-03-31 12:42:41 +0000313 return Base::visitAlloca(I);
314
315 // FIXME: This is overly conservative. Dynamic allocas are inefficient for
316 // a variety of reasons, and so we would like to not inline them into
317 // functions which don't currently have a dynamic alloca. This simply
318 // disables inlining altogether in the presence of a dynamic alloca.
319 HasDynamicAlloca = true;
320 return false;
321}
322
323bool CallAnalyzer::visitPHI(PHINode &I) {
324 // FIXME: We should potentially be tracking values through phi nodes,
325 // especially when they collapse to a single value due to deleted CFG edges
326 // during inlining.
327
328 // FIXME: We need to propagate SROA *disabling* through phi nodes, even
329 // though we don't want to propagate it's bonuses. The idea is to disable
330 // SROA if it *might* be used in an inappropriate manner.
331
332 // Phi nodes are always zero-cost.
333 return true;
334}
335
336bool CallAnalyzer::visitGetElementPtr(GetElementPtrInst &I) {
337 Value *SROAArg;
338 DenseMap<Value *, int>::iterator CostIt;
339 bool SROACandidate = lookupSROAArgAndCost(I.getPointerOperand(),
340 SROAArg, CostIt);
341
342 // Try to fold GEPs of constant-offset call site argument pointers. This
343 // requires target data and inbounds GEPs.
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000344 if (I.isInBounds()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000345 // Check if we have a base + offset for the pointer.
346 Value *Ptr = I.getPointerOperand();
347 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Ptr);
348 if (BaseAndOffset.first) {
349 // Check if the offset of this GEP is constant, and if so accumulate it
350 // into Offset.
351 if (!accumulateGEPOffset(cast<GEPOperator>(I), BaseAndOffset.second)) {
352 // Non-constant GEPs aren't folded, and disable SROA.
353 if (SROACandidate)
354 disableSROA(CostIt);
355 return false;
356 }
357
358 // Add the result as a new mapping to Base + Offset.
359 ConstantOffsetPtrs[&I] = BaseAndOffset;
360
361 // Also handle SROA candidates here, we already know that the GEP is
362 // all-constant indexed.
363 if (SROACandidate)
364 SROAArgValues[&I] = SROAArg;
365
Chandler Carruth783b7192012-03-09 02:49:36 +0000366 return true;
367 }
368 }
369
Chandler Carruth0539c072012-03-31 12:42:41 +0000370 if (isGEPOffsetConstant(I)) {
371 if (SROACandidate)
372 SROAArgValues[&I] = SROAArg;
373
374 // Constant GEPs are modeled as free.
375 return true;
376 }
377
378 // Variable GEPs will require math and will disable SROA.
379 if (SROACandidate)
380 disableSROA(CostIt);
Chandler Carruth783b7192012-03-09 02:49:36 +0000381 return false;
382}
383
Chandler Carruth0539c072012-03-31 12:42:41 +0000384bool CallAnalyzer::visitBitCast(BitCastInst &I) {
385 // Propagate constants through bitcasts.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000386 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
387 if (!COp)
388 COp = SimplifiedValues.lookup(I.getOperand(0));
389 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000390 if (Constant *C = ConstantExpr::getBitCast(COp, I.getType())) {
391 SimplifiedValues[&I] = C;
392 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000393 }
Owen Andersona08318a2010-09-09 16:56:42 +0000394
Chandler Carruth0539c072012-03-31 12:42:41 +0000395 // Track base/offsets through casts
396 std::pair<Value *, APInt> BaseAndOffset
397 = ConstantOffsetPtrs.lookup(I.getOperand(0));
398 // Casts don't change the offset, just wrap it up.
399 if (BaseAndOffset.first)
400 ConstantOffsetPtrs[&I] = BaseAndOffset;
401
402 // Also look for SROA candidates here.
403 Value *SROAArg;
404 DenseMap<Value *, int>::iterator CostIt;
405 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
406 SROAArgValues[&I] = SROAArg;
407
408 // Bitcasts are always zero cost.
409 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000410}
411
Chandler Carruth0539c072012-03-31 12:42:41 +0000412bool CallAnalyzer::visitPtrToInt(PtrToIntInst &I) {
413 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000414 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
415 if (!COp)
416 COp = SimplifiedValues.lookup(I.getOperand(0));
417 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000418 if (Constant *C = ConstantExpr::getPtrToInt(COp, I.getType())) {
419 SimplifiedValues[&I] = C;
420 return true;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000421 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000422
423 // Track base/offset pairs when converted to a plain integer provided the
424 // integer is large enough to represent the pointer.
425 unsigned IntegerSize = I.getType()->getScalarSizeInBits();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000426 const DataLayout &DL = F.getParent()->getDataLayout();
Mehdi Amini46a43552015-03-04 18:43:29 +0000427 if (IntegerSize >= DL.getPointerSizeInBits()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000428 std::pair<Value *, APInt> BaseAndOffset
429 = ConstantOffsetPtrs.lookup(I.getOperand(0));
430 if (BaseAndOffset.first)
431 ConstantOffsetPtrs[&I] = BaseAndOffset;
432 }
433
434 // This is really weird. Technically, ptrtoint will disable SROA. However,
435 // unless that ptrtoint is *used* somewhere in the live basic blocks after
436 // inlining, it will be nuked, and SROA should proceed. All of the uses which
437 // would block SROA would also block SROA if applied directly to a pointer,
438 // and so we can just add the integer in here. The only places where SROA is
439 // preserved either cannot fire on an integer, or won't in-and-of themselves
440 // disable SROA (ext) w/o some later use that we would see and disable.
441 Value *SROAArg;
442 DenseMap<Value *, int>::iterator CostIt;
443 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
444 SROAArgValues[&I] = SROAArg;
445
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000446 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000447}
448
Chandler Carruth0539c072012-03-31 12:42:41 +0000449bool CallAnalyzer::visitIntToPtr(IntToPtrInst &I) {
450 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000451 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
452 if (!COp)
453 COp = SimplifiedValues.lookup(I.getOperand(0));
454 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000455 if (Constant *C = ConstantExpr::getIntToPtr(COp, I.getType())) {
456 SimplifiedValues[&I] = C;
457 return true;
458 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000459
Chandler Carruth0539c072012-03-31 12:42:41 +0000460 // Track base/offset pairs when round-tripped through a pointer without
461 // modifications provided the integer is not too large.
462 Value *Op = I.getOperand(0);
463 unsigned IntegerSize = Op->getType()->getScalarSizeInBits();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000464 const DataLayout &DL = F.getParent()->getDataLayout();
Mehdi Amini46a43552015-03-04 18:43:29 +0000465 if (IntegerSize <= DL.getPointerSizeInBits()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000466 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op);
467 if (BaseAndOffset.first)
468 ConstantOffsetPtrs[&I] = BaseAndOffset;
469 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000470
Chandler Carruth0539c072012-03-31 12:42:41 +0000471 // "Propagate" SROA here in the same manner as we do for ptrtoint above.
472 Value *SROAArg;
473 DenseMap<Value *, int>::iterator CostIt;
474 if (lookupSROAArgAndCost(Op, SROAArg, CostIt))
475 SROAArgValues[&I] = SROAArg;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000476
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000477 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000478}
479
480bool CallAnalyzer::visitCastInst(CastInst &I) {
481 // Propagate constants through ptrtoint.
Chandler Carruth86ed5302012-12-28 14:43:42 +0000482 Constant *COp = dyn_cast<Constant>(I.getOperand(0));
483 if (!COp)
484 COp = SimplifiedValues.lookup(I.getOperand(0));
485 if (COp)
Chandler Carruth0539c072012-03-31 12:42:41 +0000486 if (Constant *C = ConstantExpr::getCast(I.getOpcode(), COp, I.getType())) {
487 SimplifiedValues[&I] = C;
488 return true;
489 }
490
491 // Disable SROA in the face of arbitrary casts we don't whitelist elsewhere.
492 disableSROA(I.getOperand(0));
493
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000494 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000495}
496
497bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) {
498 Value *Operand = I.getOperand(0);
Jakub Staszak7b9e0b92013-03-07 20:01:19 +0000499 Constant *COp = dyn_cast<Constant>(Operand);
500 if (!COp)
501 COp = SimplifiedValues.lookup(Operand);
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000502 if (COp) {
503 const DataLayout &DL = F.getParent()->getDataLayout();
Chandler Carruth0539c072012-03-31 12:42:41 +0000504 if (Constant *C = ConstantFoldInstOperands(I.getOpcode(), I.getType(),
Rafael Espindola37dc9e12014-02-21 00:06:31 +0000505 COp, DL)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000506 SimplifiedValues[&I] = C;
507 return true;
508 }
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000509 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000510
511 // Disable any SROA on the argument to arbitrary unary operators.
512 disableSROA(Operand);
513
514 return false;
515}
516
Philip Reames9b5c9582015-06-26 20:51:17 +0000517bool CallAnalyzer::paramHasAttr(Argument *A, Attribute::AttrKind Attr) {
518 unsigned ArgNo = A->getArgNo();
519 return CandidateCS.paramHasAttr(ArgNo+1, Attr);
520}
521
522bool CallAnalyzer::isKnownNonNullInCallee(Value *V) {
523 // Does the *call site* have the NonNull attribute set on an argument? We
524 // use the attribute on the call site to memoize any analysis done in the
525 // caller. This will also trip if the callee function has a non-null
526 // parameter attribute, but that's a less interesting case because hopefully
527 // the callee would already have been simplified based on that.
528 if (Argument *A = dyn_cast<Argument>(V))
529 if (paramHasAttr(A, Attribute::NonNull))
530 return true;
531
532 // Is this an alloca in the caller? This is distinct from the attribute case
533 // above because attributes aren't updated within the inliner itself and we
534 // always want to catch the alloca derived case.
535 if (isAllocaDerivedArg(V))
536 // We can actually predict the result of comparisons between an
537 // alloca-derived value and null. Note that this fires regardless of
538 // SROA firing.
539 return true;
540
541 return false;
542}
543
Matt Arsenault727aa342013-07-20 04:09:00 +0000544bool CallAnalyzer::visitCmpInst(CmpInst &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000545 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
546 // First try to handle simplified comparisons.
547 if (!isa<Constant>(LHS))
548 if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS))
549 LHS = SimpleLHS;
550 if (!isa<Constant>(RHS))
551 if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS))
552 RHS = SimpleRHS;
Matt Arsenault727aa342013-07-20 04:09:00 +0000553 if (Constant *CLHS = dyn_cast<Constant>(LHS)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000554 if (Constant *CRHS = dyn_cast<Constant>(RHS))
Matt Arsenault727aa342013-07-20 04:09:00 +0000555 if (Constant *C = ConstantExpr::getCompare(I.getPredicate(), CLHS, CRHS)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000556 SimplifiedValues[&I] = C;
557 return true;
558 }
Matt Arsenault727aa342013-07-20 04:09:00 +0000559 }
560
561 if (I.getOpcode() == Instruction::FCmp)
562 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000563
564 // Otherwise look for a comparison between constant offset pointers with
565 // a common base.
566 Value *LHSBase, *RHSBase;
567 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000568 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000569 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000570 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000571 if (RHSBase && LHSBase == RHSBase) {
572 // We have common bases, fold the icmp to a constant based on the
573 // offsets.
574 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
575 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
576 if (Constant *C = ConstantExpr::getICmp(I.getPredicate(), CLHS, CRHS)) {
577 SimplifiedValues[&I] = C;
578 ++NumConstantPtrCmps;
579 return true;
580 }
581 }
582 }
583
584 // If the comparison is an equality comparison with null, we can simplify it
Philip Reames9b5c9582015-06-26 20:51:17 +0000585 // if we know the value (argument) can't be null
586 if (I.isEquality() && isa<ConstantPointerNull>(I.getOperand(1)) &&
587 isKnownNonNullInCallee(I.getOperand(0))) {
588 bool IsNotEqual = I.getPredicate() == CmpInst::ICMP_NE;
589 SimplifiedValues[&I] = IsNotEqual ? ConstantInt::getTrue(I.getType())
590 : ConstantInt::getFalse(I.getType());
591 return true;
592 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000593 // Finally check for SROA candidates in comparisons.
594 Value *SROAArg;
595 DenseMap<Value *, int>::iterator CostIt;
596 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) {
597 if (isa<ConstantPointerNull>(I.getOperand(1))) {
598 accumulateSROACost(CostIt, InlineConstants::InstrCost);
599 return true;
600 }
601
602 disableSROA(CostIt);
603 }
604
605 return false;
606}
607
608bool CallAnalyzer::visitSub(BinaryOperator &I) {
609 // Try to handle a special case: we can fold computing the difference of two
610 // constant-related pointers.
611 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
612 Value *LHSBase, *RHSBase;
613 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000614 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000615 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000616 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000617 if (RHSBase && LHSBase == RHSBase) {
618 // We have common bases, fold the subtract to a constant based on the
619 // offsets.
620 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
621 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
622 if (Constant *C = ConstantExpr::getSub(CLHS, CRHS)) {
623 SimplifiedValues[&I] = C;
624 ++NumConstantPtrDiffs;
625 return true;
626 }
627 }
628 }
629
630 // Otherwise, fall back to the generic logic for simplifying and handling
631 // instructions.
632 return Base::visitSub(I);
633}
634
635bool CallAnalyzer::visitBinaryOperator(BinaryOperator &I) {
636 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000637 const DataLayout &DL = F.getParent()->getDataLayout();
Chandler Carruth0539c072012-03-31 12:42:41 +0000638 if (!isa<Constant>(LHS))
639 if (Constant *SimpleLHS = SimplifiedValues.lookup(LHS))
640 LHS = SimpleLHS;
641 if (!isa<Constant>(RHS))
642 if (Constant *SimpleRHS = SimplifiedValues.lookup(RHS))
643 RHS = SimpleRHS;
Michael Zolotukhin4e8598e2015-02-06 20:02:51 +0000644 Value *SimpleV = nullptr;
645 if (auto FI = dyn_cast<FPMathOperator>(&I))
646 SimpleV =
647 SimplifyFPBinOp(I.getOpcode(), LHS, RHS, FI->getFastMathFlags(), DL);
648 else
649 SimpleV = SimplifyBinOp(I.getOpcode(), LHS, RHS, DL);
650
Chandler Carruth0539c072012-03-31 12:42:41 +0000651 if (Constant *C = dyn_cast_or_null<Constant>(SimpleV)) {
652 SimplifiedValues[&I] = C;
653 return true;
654 }
655
656 // Disable any SROA on arguments to arbitrary, unsimplified binary operators.
657 disableSROA(LHS);
658 disableSROA(RHS);
659
660 return false;
661}
662
663bool CallAnalyzer::visitLoad(LoadInst &I) {
664 Value *SROAArg;
665 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000666 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000667 if (I.isSimple()) {
668 accumulateSROACost(CostIt, InlineConstants::InstrCost);
669 return true;
670 }
671
672 disableSROA(CostIt);
673 }
674
675 return false;
676}
677
678bool CallAnalyzer::visitStore(StoreInst &I) {
679 Value *SROAArg;
680 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +0000681 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000682 if (I.isSimple()) {
683 accumulateSROACost(CostIt, InlineConstants::InstrCost);
684 return true;
685 }
686
687 disableSROA(CostIt);
688 }
689
690 return false;
691}
692
Chandler Carruth753e21d2012-12-28 14:23:32 +0000693bool CallAnalyzer::visitExtractValue(ExtractValueInst &I) {
694 // Constant folding for extract value is trivial.
695 Constant *C = dyn_cast<Constant>(I.getAggregateOperand());
696 if (!C)
697 C = SimplifiedValues.lookup(I.getAggregateOperand());
698 if (C) {
699 SimplifiedValues[&I] = ConstantExpr::getExtractValue(C, I.getIndices());
700 return true;
701 }
702
703 // SROA can look through these but give them a cost.
704 return false;
705}
706
707bool CallAnalyzer::visitInsertValue(InsertValueInst &I) {
708 // Constant folding for insert value is trivial.
709 Constant *AggC = dyn_cast<Constant>(I.getAggregateOperand());
710 if (!AggC)
711 AggC = SimplifiedValues.lookup(I.getAggregateOperand());
712 Constant *InsertedC = dyn_cast<Constant>(I.getInsertedValueOperand());
713 if (!InsertedC)
714 InsertedC = SimplifiedValues.lookup(I.getInsertedValueOperand());
715 if (AggC && InsertedC) {
716 SimplifiedValues[&I] = ConstantExpr::getInsertValue(AggC, InsertedC,
717 I.getIndices());
718 return true;
719 }
720
721 // SROA can look through these but give them a cost.
722 return false;
723}
724
725/// \brief Try to simplify a call site.
726///
727/// Takes a concrete function and callsite and tries to actually simplify it by
728/// analyzing the arguments and call itself with instsimplify. Returns true if
729/// it has simplified the callsite to some other entity (a constant), making it
730/// free.
731bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
732 // FIXME: Using the instsimplify logic directly for this is inefficient
733 // because we have to continually rebuild the argument list even when no
734 // simplifications can be performed. Until that is fixed with remapping
735 // inside of instsimplify, directly constant fold calls here.
736 if (!canConstantFoldCallTo(F))
737 return false;
738
739 // Try to re-map the arguments to constants.
740 SmallVector<Constant *, 4> ConstantArgs;
741 ConstantArgs.reserve(CS.arg_size());
742 for (CallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end();
743 I != E; ++I) {
744 Constant *C = dyn_cast<Constant>(*I);
745 if (!C)
746 C = dyn_cast_or_null<Constant>(SimplifiedValues.lookup(*I));
747 if (!C)
748 return false; // This argument doesn't map to a constant.
749
750 ConstantArgs.push_back(C);
751 }
752 if (Constant *C = ConstantFoldCall(F, ConstantArgs)) {
753 SimplifiedValues[CS.getInstruction()] = C;
754 return true;
755 }
756
757 return false;
758}
759
Chandler Carruth0539c072012-03-31 12:42:41 +0000760bool CallAnalyzer::visitCallSite(CallSite CS) {
Chandler Carruth37d25de2013-12-13 08:00:01 +0000761 if (CS.hasFnAttr(Attribute::ReturnsTwice) &&
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +0000762 !F.hasFnAttribute(Attribute::ReturnsTwice)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000763 // This aborts the entire analysis.
764 ExposesReturnsTwice = true;
765 return false;
766 }
James Molloy4f6fb952012-12-20 16:04:27 +0000767 if (CS.isCall() &&
Eli Bendersky576ef3c2014-03-17 16:19:07 +0000768 cast<CallInst>(CS.getInstruction())->cannotDuplicate())
James Molloy4f6fb952012-12-20 16:04:27 +0000769 ContainsNoDuplicateCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000770
Chandler Carruth0539c072012-03-31 12:42:41 +0000771 if (Function *F = CS.getCalledFunction()) {
Chandler Carruth753e21d2012-12-28 14:23:32 +0000772 // When we have a concrete function, first try to simplify it directly.
773 if (simplifyCallSite(F, CS))
774 return true;
775
776 // Next check if it is an intrinsic we know about.
777 // FIXME: Lift this into part of the InstVisitor.
778 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
779 switch (II->getIntrinsicID()) {
780 default:
781 return Base::visitCallSite(CS);
782
783 case Intrinsic::memset:
784 case Intrinsic::memcpy:
785 case Intrinsic::memmove:
786 // SROA can usually chew through these intrinsics, but they aren't free.
787 return false;
Reid Kleckner60381792015-07-07 22:25:32 +0000788 case Intrinsic::localescape:
Reid Kleckner223de262015-04-14 20:38:14 +0000789 HasFrameEscape = true;
790 return false;
Chandler Carruth753e21d2012-12-28 14:23:32 +0000791 }
792 }
793
Chandler Carruth0539c072012-03-31 12:42:41 +0000794 if (F == CS.getInstruction()->getParent()->getParent()) {
795 // This flag will fully abort the analysis, so don't bother with anything
796 // else.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000797 IsRecursiveCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000798 return false;
799 }
800
Chandler Carruth0ba8db42013-01-22 11:26:02 +0000801 if (TTI.isLoweredToCall(F)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000802 // We account for the average 1 instruction per call argument setup
803 // here.
804 Cost += CS.arg_size() * InlineConstants::InstrCost;
805
806 // Everything other than inline ASM will also have a significant cost
807 // merely from making the call.
808 if (!isa<InlineAsm>(CS.getCalledValue()))
809 Cost += InlineConstants::CallPenalty;
810 }
811
812 return Base::visitCallSite(CS);
813 }
814
815 // Otherwise we're in a very special case -- an indirect function call. See
816 // if we can be particularly clever about this.
817 Value *Callee = CS.getCalledValue();
818
819 // First, pay the price of the argument setup. We account for the average
820 // 1 instruction per call argument setup here.
821 Cost += CS.arg_size() * InlineConstants::InstrCost;
822
823 // Next, check if this happens to be an indirect function call to a known
824 // function in this inline context. If not, we've done all we can.
825 Function *F = dyn_cast_or_null<Function>(SimplifiedValues.lookup(Callee));
826 if (!F)
827 return Base::visitCallSite(CS);
828
829 // If we have a constant that we are calling as a function, we can peer
830 // through it and see the function target. This happens not infrequently
831 // during devirtualization and so we want to give it a hefty bonus for
832 // inlining, but cap that bonus in the event that inlining wouldn't pan
833 // out. Pretend to inline the function, with a custom threshold.
Philip Reames9b5c9582015-06-26 20:51:17 +0000834 CallAnalyzer CA(TTI, ACT, *F, InlineConstants::IndirectCallThreshold, CS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000835 if (CA.analyzeCall(CS)) {
836 // We were able to inline the indirect call! Subtract the cost from the
837 // bonus we want to apply, but don't go below zero.
838 Cost -= std::max(0, InlineConstants::IndirectCallThreshold - CA.getCost());
839 }
840
841 return Base::visitCallSite(CS);
842}
843
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000844bool CallAnalyzer::visitReturnInst(ReturnInst &RI) {
845 // At least one return instruction will be free after inlining.
846 bool Free = !HasReturn;
847 HasReturn = true;
848 return Free;
849}
850
851bool CallAnalyzer::visitBranchInst(BranchInst &BI) {
852 // We model unconditional branches as essentially free -- they really
853 // shouldn't exist at all, but handling them makes the behavior of the
854 // inliner more regular and predictable. Interestingly, conditional branches
855 // which will fold away are also free.
856 return BI.isUnconditional() || isa<ConstantInt>(BI.getCondition()) ||
857 dyn_cast_or_null<ConstantInt>(
858 SimplifiedValues.lookup(BI.getCondition()));
859}
860
861bool CallAnalyzer::visitSwitchInst(SwitchInst &SI) {
862 // We model unconditional switches as free, see the comments on handling
863 // branches.
Chandler Carruthe01fd5f2014-04-28 08:52:44 +0000864 if (isa<ConstantInt>(SI.getCondition()))
865 return true;
866 if (Value *V = SimplifiedValues.lookup(SI.getCondition()))
867 if (isa<ConstantInt>(V))
868 return true;
869
870 // Otherwise, we need to accumulate a cost proportional to the number of
871 // distinct successor blocks. This fan-out in the CFG cannot be represented
872 // for free even if we can represent the core switch as a jumptable that
873 // takes a single instruction.
874 //
875 // NB: We convert large switches which are just used to initialize large phi
876 // nodes to lookup tables instead in simplify-cfg, so this shouldn't prevent
877 // inlining those. It will prevent inlining in cases where the optimization
878 // does not (yet) fire.
879 SmallPtrSet<BasicBlock *, 8> SuccessorBlocks;
880 SuccessorBlocks.insert(SI.getDefaultDest());
881 for (auto I = SI.case_begin(), E = SI.case_end(); I != E; ++I)
882 SuccessorBlocks.insert(I.getCaseSuccessor());
883 // Add cost corresponding to the number of distinct destinations. The first
884 // we model as free because of fallthrough.
885 Cost += (SuccessorBlocks.size() - 1) * InlineConstants::InstrCost;
886 return false;
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000887}
888
889bool CallAnalyzer::visitIndirectBrInst(IndirectBrInst &IBI) {
890 // We never want to inline functions that contain an indirectbr. This is
891 // incorrect because all the blockaddress's (in static global initializers
892 // for example) would be referring to the original function, and this
893 // indirect jump would jump from the inlined copy of the function into the
894 // original function which is extremely undefined behavior.
895 // FIXME: This logic isn't really right; we can safely inline functions with
896 // indirectbr's as long as no other function or global references the
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +0000897 // blockaddress of a block within the current function.
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000898 HasIndirectBr = true;
899 return false;
900}
901
902bool CallAnalyzer::visitResumeInst(ResumeInst &RI) {
903 // FIXME: It's not clear that a single instruction is an accurate model for
904 // the inline cost of a resume instruction.
905 return false;
906}
907
David Majnemer654e1302015-07-31 17:58:14 +0000908bool CallAnalyzer::visitCleanupReturnInst(CleanupReturnInst &CRI) {
909 // FIXME: It's not clear that a single instruction is an accurate model for
910 // the inline cost of a cleanupret instruction.
911 return false;
912}
913
914bool CallAnalyzer::visitCatchReturnInst(CatchReturnInst &CRI) {
915 // FIXME: It's not clear that a single instruction is an accurate model for
Joseph Tremoulet8220bcc2015-08-23 00:26:33 +0000916 // the inline cost of a catchret instruction.
David Majnemer654e1302015-07-31 17:58:14 +0000917 return false;
918}
919
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000920bool CallAnalyzer::visitUnreachableInst(UnreachableInst &I) {
921 // FIXME: It might be reasonably to discount the cost of instructions leading
922 // to unreachable as they have the lowest possible impact on both runtime and
923 // code size.
924 return true; // No actual code is needed for unreachable.
925}
926
Chandler Carruth0539c072012-03-31 12:42:41 +0000927bool CallAnalyzer::visitInstruction(Instruction &I) {
Chandler Carruthda7513a2012-05-04 00:58:03 +0000928 // Some instructions are free. All of the free intrinsics can also be
929 // handled by SROA, etc.
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000930 if (TargetTransformInfo::TCC_Free == TTI.getUserCost(&I))
Chandler Carruthda7513a2012-05-04 00:58:03 +0000931 return true;
932
Chandler Carruth0539c072012-03-31 12:42:41 +0000933 // We found something we don't understand or can't handle. Mark any SROA-able
934 // values in the operand list as no longer viable.
935 for (User::op_iterator OI = I.op_begin(), OE = I.op_end(); OI != OE; ++OI)
936 disableSROA(*OI);
937
938 return false;
939}
940
941
942/// \brief Analyze a basic block for its contribution to the inline cost.
943///
944/// This method walks the analyzer over every instruction in the given basic
945/// block and accounts for their cost during inlining at this callsite. It
946/// aborts early if the threshold has been exceeded or an impossible to inline
947/// construct has been detected. It returns false if inlining is no longer
948/// viable, and true if inlining remains viable.
Hal Finkel57f03dd2014-09-07 13:49:57 +0000949bool CallAnalyzer::analyzeBlock(BasicBlock *BB,
950 SmallPtrSetImpl<const Value *> &EphValues) {
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000951 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
Chandler Carruth6b4cc8b2014-02-01 10:38:17 +0000952 // FIXME: Currently, the number of instructions in a function regardless of
953 // our ability to simplify them during inline to constants or dead code,
954 // are actually used by the vector bonus heuristic. As long as that's true,
955 // we have to special case debug intrinsics here to prevent differences in
956 // inlining due to debug symbols. Eventually, the number of unsimplified
957 // instructions shouldn't factor into the cost computation, but until then,
958 // hack around it here.
959 if (isa<DbgInfoIntrinsic>(I))
960 continue;
961
Hal Finkel57f03dd2014-09-07 13:49:57 +0000962 // Skip ephemeral values.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +0000963 if (EphValues.count(&*I))
Hal Finkel57f03dd2014-09-07 13:49:57 +0000964 continue;
965
Chandler Carruth0539c072012-03-31 12:42:41 +0000966 ++NumInstructions;
967 if (isa<ExtractElementInst>(I) || I->getType()->isVectorTy())
968 ++NumVectorInstructions;
969
Sanjay Patele9434e82015-09-15 15:26:25 +0000970 // If the instruction is floating point, and the target says this operation
971 // is expensive or the function has the "use-soft-float" attribute, this may
972 // eventually become a library call. Treat the cost as such.
Cameron Esfahani17177d12015-02-05 02:09:33 +0000973 if (I->getType()->isFloatingPointTy()) {
974 bool hasSoftFloatAttr = false;
975
Sanjay Patele9434e82015-09-15 15:26:25 +0000976 // If the function has the "use-soft-float" attribute, mark it as
977 // expensive.
Cameron Esfahani17177d12015-02-05 02:09:33 +0000978 if (F.hasFnAttribute("use-soft-float")) {
979 Attribute Attr = F.getFnAttribute("use-soft-float");
980 StringRef Val = Attr.getValueAsString();
981 if (Val == "true")
982 hasSoftFloatAttr = true;
983 }
984
985 if (TTI.getFPOpCost(I->getType()) == TargetTransformInfo::TCC_Expensive ||
986 hasSoftFloatAttr)
987 Cost += InlineConstants::CallPenalty;
988 }
989
Chandler Carruth0539c072012-03-31 12:42:41 +0000990 // If the instruction simplified to a constant, there is no cost to this
991 // instruction. Visit the instructions using our InstVisitor to account for
992 // all of the per-instruction logic. The visit tree returns true if we
993 // consumed the instruction in any way, and false if the instruction's base
994 // cost should count against inlining.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +0000995 if (Base::visit(&*I))
Chandler Carruth0539c072012-03-31 12:42:41 +0000996 ++NumInstructionsSimplified;
997 else
998 Cost += InlineConstants::InstrCost;
999
1000 // If the visit this instruction detected an uninlinable pattern, abort.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001001 if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca ||
Reid Kleckner223de262015-04-14 20:38:14 +00001002 HasIndirectBr || HasFrameEscape)
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001003 return false;
1004
1005 // If the caller is a recursive function then we don't want to inline
1006 // functions which allocate a lot of stack space because it would increase
1007 // the caller stack usage dramatically.
1008 if (IsCallerRecursive &&
1009 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller)
Chandler Carruth0539c072012-03-31 12:42:41 +00001010 return false;
1011
Chandler Carrutha004f222015-05-27 02:49:05 +00001012 // Check if we've past the maximum possible threshold so we don't spin in
1013 // huge basic blocks that will never inline.
1014 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001015 return false;
1016 }
1017
1018 return true;
1019}
1020
1021/// \brief Compute the base pointer and cumulative constant offsets for V.
1022///
1023/// This strips all constant offsets off of V, leaving it the base pointer, and
1024/// accumulates the total constant offset applied in the returned constant. It
1025/// returns 0 if V is not a pointer, and returns the constant '0' if there are
1026/// no constant offsets applied.
1027ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001028 if (!V->getType()->isPointerTy())
Craig Topper353eda42014-04-24 06:44:33 +00001029 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001030
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001031 const DataLayout &DL = F.getParent()->getDataLayout();
1032 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +00001033 APInt Offset = APInt::getNullValue(IntPtrWidth);
1034
1035 // Even though we don't look through PHI nodes, we could be called on an
1036 // instruction in an unreachable block, which may be on a cycle.
1037 SmallPtrSet<Value *, 4> Visited;
1038 Visited.insert(V);
1039 do {
1040 if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
1041 if (!GEP->isInBounds() || !accumulateGEPOffset(*GEP, Offset))
Craig Topper353eda42014-04-24 06:44:33 +00001042 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001043 V = GEP->getPointerOperand();
1044 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
1045 V = cast<Operator>(V)->getOperand(0);
1046 } else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
1047 if (GA->mayBeOverridden())
1048 break;
1049 V = GA->getAliasee();
1050 } else {
1051 break;
1052 }
1053 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
David Blaikie70573dc2014-11-19 07:49:26 +00001054 } while (Visited.insert(V).second);
Chandler Carruth0539c072012-03-31 12:42:41 +00001055
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001056 Type *IntPtrTy = DL.getIntPtrType(V->getContext());
Chandler Carruth0539c072012-03-31 12:42:41 +00001057 return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset));
1058}
1059
1060/// \brief Analyze a call site for potential inlining.
1061///
1062/// Returns true if inlining this call is viable, and false if it is not
1063/// viable. It computes the cost and adjusts the threshold based on numerous
1064/// factors and heuristics. If this method returns false but the computed cost
1065/// is below the computed threshold, then inlining was forcibly disabled by
Bob Wilson266802d2012-11-19 07:04:30 +00001066/// some artifact of the routine.
Chandler Carruth0539c072012-03-31 12:42:41 +00001067bool CallAnalyzer::analyzeCall(CallSite CS) {
Chandler Carruth7ae90d42012-04-11 10:15:10 +00001068 ++NumCallsAnalyzed;
1069
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001070 // Perform some tweaks to the cost and threshold based on the direct
1071 // callsite information.
Chandler Carruth0539c072012-03-31 12:42:41 +00001072
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001073 // We want to more aggressively inline vector-dense kernels, so up the
1074 // threshold, and we'll lower it if the % of vector instructions gets too
Chandler Carrutha004f222015-05-27 02:49:05 +00001075 // low. Note that these bonuses are some what arbitrary and evolved over time
1076 // by accident as much as because they are principled bonuses.
1077 //
1078 // FIXME: It would be nice to remove all such bonuses. At least it would be
1079 // nice to base the bonus values on something more scientific.
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001080 assert(NumInstructions == 0);
1081 assert(NumVectorInstructions == 0);
Chandler Carrutha004f222015-05-27 02:49:05 +00001082 FiftyPercentVectorBonus = 3 * Threshold / 2;
1083 TenPercentVectorBonus = 3 * Threshold / 4;
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001084 const DataLayout &DL = F.getParent()->getDataLayout();
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001085
Chandler Carrutha004f222015-05-27 02:49:05 +00001086 // Track whether the post-inlining function would have more than one basic
1087 // block. A single basic block is often intended for inlining. Balloon the
1088 // threshold by 50% until we pass the single-BB phase.
1089 bool SingleBB = true;
1090 int SingleBBBonus = Threshold / 2;
1091
1092 // Speculatively apply all possible bonuses to Threshold. If cost exceeds
1093 // this Threshold any time, and cost cannot decrease, we can stop processing
1094 // the rest of the function body.
1095 Threshold += (SingleBBBonus + FiftyPercentVectorBonus);
1096
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001097 // Give out bonuses per argument, as the instructions setting them up will
1098 // be gone after inlining.
1099 for (unsigned I = 0, E = CS.arg_size(); I != E; ++I) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001100 if (CS.isByValArgument(I)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001101 // We approximate the number of loads and stores needed by dividing the
1102 // size of the byval type by the target's pointer size.
1103 PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType());
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001104 unsigned TypeSize = DL.getTypeSizeInBits(PTy->getElementType());
1105 unsigned PointerSize = DL.getPointerSizeInBits();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001106 // Ceiling division.
1107 unsigned NumStores = (TypeSize + PointerSize - 1) / PointerSize;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001108
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001109 // If it generates more than 8 stores it is likely to be expanded as an
1110 // inline memcpy so we take that as an upper bound. Otherwise we assume
1111 // one load and one store per word copied.
1112 // FIXME: The maxStoresPerMemcpy setting from the target should be used
1113 // here instead of a magic number of 8, but it's not available via
1114 // DataLayout.
1115 NumStores = std::min(NumStores, 8U);
1116
1117 Cost -= 2 * NumStores * InlineConstants::InstrCost;
1118 } else {
1119 // For non-byval arguments subtract off one instruction per call
1120 // argument.
1121 Cost -= InlineConstants::InstrCost;
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001122 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001123 }
1124
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001125 // If there is only one call of the function, and it has internal linkage,
1126 // the cost of inlining it drops dramatically.
James Molloy4f6fb952012-12-20 16:04:27 +00001127 bool OnlyOneCallAndLocalLinkage = F.hasLocalLinkage() && F.hasOneUse() &&
1128 &F == CS.getCalledFunction();
1129 if (OnlyOneCallAndLocalLinkage)
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001130 Cost += InlineConstants::LastCallToStaticBonus;
1131
1132 // If the instruction after the call, or if the normal destination of the
1133 // invoke is an unreachable instruction, the function is noreturn. As such,
1134 // there is little point in inlining this unless there is literally zero
1135 // cost.
1136 Instruction *Instr = CS.getInstruction();
1137 if (InvokeInst *II = dyn_cast<InvokeInst>(Instr)) {
1138 if (isa<UnreachableInst>(II->getNormalDest()->begin()))
Chandler Carrutha004f222015-05-27 02:49:05 +00001139 Threshold = 0;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001140 } else if (isa<UnreachableInst>(++BasicBlock::iterator(Instr)))
Chandler Carrutha004f222015-05-27 02:49:05 +00001141 Threshold = 0;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001142
1143 // If this function uses the coldcc calling convention, prefer not to inline
1144 // it.
1145 if (F.getCallingConv() == CallingConv::Cold)
1146 Cost += InlineConstants::ColdccPenalty;
1147
1148 // Check if we're done. This can happen due to bonuses and penalties.
1149 if (Cost > Threshold)
1150 return false;
1151
Chandler Carruth0539c072012-03-31 12:42:41 +00001152 if (F.empty())
1153 return true;
1154
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001155 Function *Caller = CS.getInstruction()->getParent()->getParent();
1156 // Check if the caller function is recursive itself.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001157 for (User *U : Caller->users()) {
1158 CallSite Site(U);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001159 if (!Site)
1160 continue;
1161 Instruction *I = Site.getInstruction();
1162 if (I->getParent()->getParent() == Caller) {
1163 IsCallerRecursive = true;
1164 break;
1165 }
1166 }
1167
Chandler Carruth0539c072012-03-31 12:42:41 +00001168 // Populate our simplified values by mapping from function arguments to call
1169 // arguments with known important simplifications.
1170 CallSite::arg_iterator CAI = CS.arg_begin();
1171 for (Function::arg_iterator FAI = F.arg_begin(), FAE = F.arg_end();
1172 FAI != FAE; ++FAI, ++CAI) {
1173 assert(CAI != CS.arg_end());
1174 if (Constant *C = dyn_cast<Constant>(CAI))
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001175 SimplifiedValues[&*FAI] = C;
Chandler Carruth0539c072012-03-31 12:42:41 +00001176
1177 Value *PtrArg = *CAI;
1178 if (ConstantInt *C = stripAndComputeInBoundsConstantOffsets(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001179 ConstantOffsetPtrs[&*FAI] = std::make_pair(PtrArg, C->getValue());
Chandler Carruth0539c072012-03-31 12:42:41 +00001180
1181 // We can SROA any pointer arguments derived from alloca instructions.
1182 if (isa<AllocaInst>(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001183 SROAArgValues[&*FAI] = PtrArg;
Chandler Carruth0539c072012-03-31 12:42:41 +00001184 SROAArgCosts[PtrArg] = 0;
1185 }
1186 }
1187 }
1188 NumConstantArgs = SimplifiedValues.size();
1189 NumConstantOffsetPtrArgs = ConstantOffsetPtrs.size();
1190 NumAllocaArgs = SROAArgValues.size();
1191
Hal Finkel57f03dd2014-09-07 13:49:57 +00001192 // FIXME: If a caller has multiple calls to a callee, we end up recomputing
1193 // the ephemeral values multiple times (and they're completely determined by
1194 // the callee, so this is purely duplicate work).
1195 SmallPtrSet<const Value *, 32> EphValues;
Bjorn Steinbrink6f972a12015-02-12 21:04:22 +00001196 CodeMetrics::collectEphemeralValues(&F, &ACT->getAssumptionCache(F), EphValues);
Hal Finkel57f03dd2014-09-07 13:49:57 +00001197
Chandler Carruth0539c072012-03-31 12:42:41 +00001198 // The worklist of live basic blocks in the callee *after* inlining. We avoid
1199 // adding basic blocks of the callee which can be proven to be dead for this
1200 // particular call site in order to get more accurate cost estimates. This
1201 // requires a somewhat heavyweight iteration pattern: we need to walk the
1202 // basic blocks in a breadth-first order as we insert live successors. To
1203 // accomplish this, prioritizing for small iterations because we exit after
1204 // crossing our threshold, we use a small-size optimized SetVector.
1205 typedef SetVector<BasicBlock *, SmallVector<BasicBlock *, 16>,
1206 SmallPtrSet<BasicBlock *, 16> > BBSetVector;
1207 BBSetVector BBWorklist;
1208 BBWorklist.insert(&F.getEntryBlock());
1209 // Note that we *must not* cache the size, this loop grows the worklist.
1210 for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) {
1211 // Bail out the moment we cross the threshold. This means we'll under-count
1212 // the cost, but only when undercounting doesn't matter.
Chandler Carrutha004f222015-05-27 02:49:05 +00001213 if (Cost > Threshold)
Chandler Carruth0539c072012-03-31 12:42:41 +00001214 break;
1215
1216 BasicBlock *BB = BBWorklist[Idx];
1217 if (BB->empty())
Chandler Carruth4d1d34f2012-03-14 23:19:53 +00001218 continue;
Dan Gohman4552e3c2009-10-13 18:30:07 +00001219
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001220 // Disallow inlining a blockaddress. A blockaddress only has defined
1221 // behavior for an indirect branch in the same function, and we do not
1222 // currently support inlining indirect branches. But, the inliner may not
1223 // see an indirect branch that ends up being dead code at a particular call
1224 // site. If the blockaddress escapes the function, e.g., via a global
1225 // variable, inlining may lead to an invalid cross-function reference.
1226 if (BB->hasAddressTaken())
1227 return false;
1228
Chandler Carruth0539c072012-03-31 12:42:41 +00001229 // Analyze the cost of this block. If we blow through the threshold, this
1230 // returns false, and we can bail on out.
Hal Finkel57f03dd2014-09-07 13:49:57 +00001231 if (!analyzeBlock(BB, EphValues)) {
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001232 if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca ||
Reid Kleckner223de262015-04-14 20:38:14 +00001233 HasIndirectBr || HasFrameEscape)
Chandler Carruth0539c072012-03-31 12:42:41 +00001234 return false;
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001235
1236 // If the caller is a recursive function then we don't want to inline
1237 // functions which allocate a lot of stack space because it would increase
1238 // the caller stack usage dramatically.
1239 if (IsCallerRecursive &&
1240 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller)
1241 return false;
1242
Chandler Carruth0539c072012-03-31 12:42:41 +00001243 break;
Eric Christopher46308e62011-02-01 01:16:32 +00001244 }
Eric Christopher46308e62011-02-01 01:16:32 +00001245
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001246 TerminatorInst *TI = BB->getTerminator();
1247
Chandler Carruth0539c072012-03-31 12:42:41 +00001248 // Add in the live successors by first checking whether we have terminator
1249 // that may be simplified based on the values simplified by this call.
1250 if (BranchInst *BI = dyn_cast<BranchInst>(TI)) {
1251 if (BI->isConditional()) {
1252 Value *Cond = BI->getCondition();
1253 if (ConstantInt *SimpleCond
1254 = dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
1255 BBWorklist.insert(BI->getSuccessor(SimpleCond->isZero() ? 1 : 0));
1256 continue;
Eric Christopher46308e62011-02-01 01:16:32 +00001257 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001258 }
1259 } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) {
1260 Value *Cond = SI->getCondition();
1261 if (ConstantInt *SimpleCond
1262 = dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
1263 BBWorklist.insert(SI->findCaseValue(SimpleCond).getCaseSuccessor());
1264 continue;
1265 }
1266 }
Eric Christopher46308e62011-02-01 01:16:32 +00001267
Chandler Carruth0539c072012-03-31 12:42:41 +00001268 // If we're unable to select a particular successor, just count all of
1269 // them.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001270 for (unsigned TIdx = 0, TSize = TI->getNumSuccessors(); TIdx != TSize;
1271 ++TIdx)
Chandler Carruth0539c072012-03-31 12:42:41 +00001272 BBWorklist.insert(TI->getSuccessor(TIdx));
1273
1274 // If we had any successors at this point, than post-inlining is likely to
1275 // have them as well. Note that we assume any basic blocks which existed
1276 // due to branches or switches which folded above will also fold after
1277 // inlining.
1278 if (SingleBB && TI->getNumSuccessors() > 1) {
1279 // Take off the bonus we applied to the threshold.
1280 Threshold -= SingleBBBonus;
1281 SingleBB = false;
Eric Christopher46308e62011-02-01 01:16:32 +00001282 }
1283 }
Andrew Trickcaa500b2011-10-01 01:27:56 +00001284
Chandler Carruthcb5beb32013-12-12 11:59:26 +00001285 // If this is a noduplicate call, we can still inline as long as
James Molloy4f6fb952012-12-20 16:04:27 +00001286 // inlining this would cause the removal of the caller (so the instruction
1287 // is not actually duplicated, just moved).
1288 if (!OnlyOneCallAndLocalLinkage && ContainsNoDuplicateCall)
1289 return false;
1290
Chandler Carrutha004f222015-05-27 02:49:05 +00001291 // We applied the maximum possible vector bonus at the beginning. Now,
1292 // subtract the excess bonus, if any, from the Threshold before
1293 // comparing against Cost.
1294 if (NumVectorInstructions <= NumInstructions / 10)
1295 Threshold -= FiftyPercentVectorBonus;
1296 else if (NumVectorInstructions <= NumInstructions / 2)
1297 Threshold -= (FiftyPercentVectorBonus - TenPercentVectorBonus);
Chandler Carruth0539c072012-03-31 12:42:41 +00001298
Hans Wennborg21ce8ec2015-11-10 09:47:48 +00001299 return Cost <= std::max(0, Threshold);
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001300}
1301
Manman Ren49d684e2012-09-12 05:06:18 +00001302#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
Chandler Carruth0539c072012-03-31 12:42:41 +00001303/// \brief Dump stats about this call's analysis.
1304void CallAnalyzer::dump() {
Eric Christophera13839f2014-02-26 23:27:16 +00001305#define DEBUG_PRINT_STAT(x) dbgs() << " " #x ": " << x << "\n"
Chandler Carruth0539c072012-03-31 12:42:41 +00001306 DEBUG_PRINT_STAT(NumConstantArgs);
1307 DEBUG_PRINT_STAT(NumConstantOffsetPtrArgs);
1308 DEBUG_PRINT_STAT(NumAllocaArgs);
1309 DEBUG_PRINT_STAT(NumConstantPtrCmps);
1310 DEBUG_PRINT_STAT(NumConstantPtrDiffs);
1311 DEBUG_PRINT_STAT(NumInstructionsSimplified);
Chandler Carrutha004f222015-05-27 02:49:05 +00001312 DEBUG_PRINT_STAT(NumInstructions);
Chandler Carruth0539c072012-03-31 12:42:41 +00001313 DEBUG_PRINT_STAT(SROACostSavings);
1314 DEBUG_PRINT_STAT(SROACostSavingsLost);
James Molloy4f6fb952012-12-20 16:04:27 +00001315 DEBUG_PRINT_STAT(ContainsNoDuplicateCall);
Chandler Carruth394e34f2014-01-31 22:32:32 +00001316 DEBUG_PRINT_STAT(Cost);
1317 DEBUG_PRINT_STAT(Threshold);
Chandler Carruth0539c072012-03-31 12:42:41 +00001318#undef DEBUG_PRINT_STAT
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001319}
Manman Renc3366cc2012-09-06 19:55:56 +00001320#endif
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001321
Chandler Carruth4319e292013-01-21 11:39:18 +00001322INITIALIZE_PASS_BEGIN(InlineCostAnalysis, "inline-cost", "Inline Cost Analysis",
1323 true, true)
Chandler Carruth705b1852015-01-31 03:43:40 +00001324INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass)
Chandler Carruth66b31302015-01-04 12:03:27 +00001325INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker)
Chandler Carruth4319e292013-01-21 11:39:18 +00001326INITIALIZE_PASS_END(InlineCostAnalysis, "inline-cost", "Inline Cost Analysis",
1327 true, true)
1328
1329char InlineCostAnalysis::ID = 0;
1330
Rafael Espindola339430f2014-02-25 23:25:17 +00001331InlineCostAnalysis::InlineCostAnalysis() : CallGraphSCCPass(ID) {}
Chandler Carruth4319e292013-01-21 11:39:18 +00001332
1333InlineCostAnalysis::~InlineCostAnalysis() {}
1334
1335void InlineCostAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
1336 AU.setPreservesAll();
Chandler Carruth66b31302015-01-04 12:03:27 +00001337 AU.addRequired<AssumptionCacheTracker>();
Chandler Carruth705b1852015-01-31 03:43:40 +00001338 AU.addRequired<TargetTransformInfoWrapperPass>();
Chandler Carruth4319e292013-01-21 11:39:18 +00001339 CallGraphSCCPass::getAnalysisUsage(AU);
1340}
1341
1342bool InlineCostAnalysis::runOnSCC(CallGraphSCC &SCC) {
Chandler Carruthfdb9c572015-02-01 12:01:35 +00001343 TTIWP = &getAnalysis<TargetTransformInfoWrapperPass>();
Chandler Carruth66b31302015-01-04 12:03:27 +00001344 ACT = &getAnalysis<AssumptionCacheTracker>();
Chandler Carruth4319e292013-01-21 11:39:18 +00001345 return false;
1346}
1347
1348InlineCost InlineCostAnalysis::getInlineCost(CallSite CS, int Threshold) {
David Chisnallc1c9cda2012-04-06 17:27:41 +00001349 return getInlineCost(CS, CS.getCalledFunction(), Threshold);
1350}
Dan Gohman4552e3c2009-10-13 18:30:07 +00001351
Akira Hatanaka5af7ace2015-11-13 01:44:32 +00001352/// \brief Test that two functions either have or have not the given attribute
1353/// at the same time.
1354template<typename AttrKind>
1355static bool attributeMatches(Function *F1, Function *F2, AttrKind Attr) {
1356 return F1->getFnAttribute(Attr) == F2->getFnAttribute(Attr);
1357}
1358
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001359/// \brief Test that there are no attribute conflicts between Caller and Callee
1360/// that prevent inlining.
1361static bool functionsHaveCompatibleAttributes(Function *Caller,
Eric Christopher4371b132015-07-02 01:11:47 +00001362 Function *Callee,
1363 TargetTransformInfo &TTI) {
Eric Christopherd566fb12015-07-29 22:09:48 +00001364 return TTI.areInlineCompatible(Caller, Callee) &&
Akira Hatanaka5af7ace2015-11-13 01:44:32 +00001365 attributeMatches(Caller, Callee, Attribute::SanitizeAddress) &&
1366 attributeMatches(Caller, Callee, Attribute::SanitizeMemory) &&
1367 attributeMatches(Caller, Callee, Attribute::SanitizeThread);
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001368}
1369
Chandler Carruth4319e292013-01-21 11:39:18 +00001370InlineCost InlineCostAnalysis::getInlineCost(CallSite CS, Function *Callee,
David Chisnallc1c9cda2012-04-06 17:27:41 +00001371 int Threshold) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001372 // Cannot inline indirect calls.
1373 if (!Callee)
1374 return llvm::InlineCost::getNever();
1375
1376 // Calls to functions with always-inline attributes should be inlined
1377 // whenever possible.
Peter Collingbourne68a88972014-05-19 18:25:54 +00001378 if (CS.hasFnAttr(Attribute::AlwaysInline)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001379 if (isInlineViable(*Callee))
1380 return llvm::InlineCost::getAlways();
1381 return llvm::InlineCost::getNever();
1382 }
1383
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001384 // Never inline functions with conflicting attributes (unless callee has
1385 // always-inline attribute).
Eric Christopher4371b132015-07-02 01:11:47 +00001386 if (!functionsHaveCompatibleAttributes(CS.getCaller(), Callee,
1387 TTIWP->getTTI(*Callee)))
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001388 return llvm::InlineCost::getNever();
1389
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001390 // Don't inline this call if the caller has the optnone attribute.
1391 if (CS.getCaller()->hasFnAttribute(Attribute::OptimizeNone))
1392 return llvm::InlineCost::getNever();
1393
Dan Gohman4552e3c2009-10-13 18:30:07 +00001394 // Don't inline functions which can be redefined at link-time to mean
Eric Christopherb1a382d2010-03-25 04:49:10 +00001395 // something else. Don't inline functions marked noinline or call sites
1396 // marked noinline.
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001397 if (Callee->mayBeOverridden() ||
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001398 Callee->hasFnAttribute(Attribute::NoInline) || CS.isNoInline())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001399 return llvm::InlineCost::getNever();
1400
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001401 DEBUG(llvm::dbgs() << " Analyzing call of " << Callee->getName()
1402 << "...\n");
Andrew Trickcaa500b2011-10-01 01:27:56 +00001403
Philip Reames9b5c9582015-06-26 20:51:17 +00001404 CallAnalyzer CA(TTIWP->getTTI(*Callee), ACT, *Callee, Threshold, CS);
Chandler Carruth0539c072012-03-31 12:42:41 +00001405 bool ShouldInline = CA.analyzeCall(CS);
Dan Gohman4552e3c2009-10-13 18:30:07 +00001406
Chandler Carruth0539c072012-03-31 12:42:41 +00001407 DEBUG(CA.dump());
1408
1409 // Check if there was a reason to force inlining or no inlining.
1410 if (!ShouldInline && CA.getCost() < CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001411 return InlineCost::getNever();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001412 if (ShouldInline && CA.getCost() >= CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001413 return InlineCost::getAlways();
Andrew Trickcaa500b2011-10-01 01:27:56 +00001414
Chandler Carruth0539c072012-03-31 12:42:41 +00001415 return llvm::InlineCost::get(CA.getCost(), CA.getThreshold());
Dan Gohman4552e3c2009-10-13 18:30:07 +00001416}
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001417
Chandler Carruth4319e292013-01-21 11:39:18 +00001418bool InlineCostAnalysis::isInlineViable(Function &F) {
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001419 bool ReturnsTwice = F.hasFnAttribute(Attribute::ReturnsTwice);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001420 for (Function::iterator BI = F.begin(), BE = F.end(); BI != BE; ++BI) {
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001421 // Disallow inlining of functions which contain indirect branches or
1422 // blockaddresses.
1423 if (isa<IndirectBrInst>(BI->getTerminator()) || BI->hasAddressTaken())
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001424 return false;
1425
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001426 for (auto &II : *BI) {
1427 CallSite CS(&II);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001428 if (!CS)
1429 continue;
1430
1431 // Disallow recursive calls.
1432 if (&F == CS.getCalledFunction())
1433 return false;
1434
1435 // Disallow calls which expose returns-twice to a function not previously
1436 // attributed as such.
1437 if (!ReturnsTwice && CS.isCall() &&
1438 cast<CallInst>(CS.getInstruction())->canReturnTwice())
1439 return false;
Reid Kleckner223de262015-04-14 20:38:14 +00001440
Reid Kleckner60381792015-07-07 22:25:32 +00001441 // Disallow inlining functions that call @llvm.localescape. Doing this
Reid Kleckner223de262015-04-14 20:38:14 +00001442 // correctly would require major changes to the inliner.
1443 if (CS.getCalledFunction() &&
1444 CS.getCalledFunction()->getIntrinsicID() ==
Reid Kleckner60381792015-07-07 22:25:32 +00001445 llvm::Intrinsic::localescape)
Reid Kleckner223de262015-04-14 20:38:14 +00001446 return false;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001447 }
1448 }
1449
1450 return true;
1451}