blob: 7a28ad431f67a0c7320d6ba3710bf491e321da55 [file] [log] [blame]
Dan Gohman4552e3c2009-10-13 18:30:07 +00001//===- InlineCost.cpp - Cost analysis for inliner -------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements inline cost analysis.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Analysis/InlineCost.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000015#include "llvm/ADT/STLExtras.h"
16#include "llvm/ADT/SetVector.h"
17#include "llvm/ADT/SmallPtrSet.h"
18#include "llvm/ADT/SmallVector.h"
19#include "llvm/ADT/Statistic.h"
Daniel Jasperaec2fa32016-12-19 08:22:17 +000020#include "llvm/Analysis/AssumptionCache.h"
Easwaran Raman12585b02017-01-20 22:44:04 +000021#include "llvm/Analysis/BlockFrequencyInfo.h"
Hal Finkel57f03dd2014-09-07 13:49:57 +000022#include "llvm/Analysis/CodeMetrics.h"
Chandler Carruthd9903882015-01-14 11:23:27 +000023#include "llvm/Analysis/ConstantFolding.h"
Haicheng Wu3739e142017-12-14 14:36:18 +000024#include "llvm/Analysis/CFG.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000025#include "llvm/Analysis/InstructionSimplify.h"
Easwaran Raman71069cf2016-06-09 22:23:21 +000026#include "llvm/Analysis/ProfileSummaryInfo.h"
Chandler Carruth42f3dce2013-01-21 11:55:09 +000027#include "llvm/Analysis/TargetTransformInfo.h"
Haicheng Wua4461512017-12-15 14:34:41 +000028#include "llvm/Analysis/ValueTracking.h"
Nico Weber432a3882018-04-30 14:59:11 +000029#include "llvm/Config/llvm-config.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000030#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000031#include "llvm/IR/CallingConv.h"
32#include "llvm/IR/DataLayout.h"
Chandler Carruth03eb0de2014-03-04 10:40:04 +000033#include "llvm/IR/GetElementPtrTypeIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000034#include "llvm/IR/GlobalAlias.h"
Chandler Carruth7da14f12014-03-06 03:23:41 +000035#include "llvm/IR/InstVisitor.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000036#include "llvm/IR/IntrinsicInst.h"
37#include "llvm/IR/Operator.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000038#include "llvm/Support/Debug.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000039#include "llvm/Support/raw_ostream.h"
Eric Christopher2dfbd7e2011-02-05 00:49:15 +000040
Dan Gohman4552e3c2009-10-13 18:30:07 +000041using namespace llvm;
42
Chandler Carruthf1221bd2014-04-22 02:48:03 +000043#define DEBUG_TYPE "inline-cost"
44
Chandler Carruth7ae90d42012-04-11 10:15:10 +000045STATISTIC(NumCallsAnalyzed, "Number of call sites analyzed");
46
Easwaran Raman1c57cc22016-08-10 00:48:04 +000047static cl::opt<int> InlineThreshold(
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000048 "inline-threshold", cl::Hidden, cl::init(225), cl::ZeroOrMore,
49 cl::desc("Control the amount of inlining to perform (default = 225)"));
50
51static cl::opt<int> HintThreshold(
52 "inlinehint-threshold", cl::Hidden, cl::init(325),
53 cl::desc("Threshold for inlining functions with inline hint"));
54
Easwaran Raman12585b02017-01-20 22:44:04 +000055static cl::opt<int>
56 ColdCallSiteThreshold("inline-cold-callsite-threshold", cl::Hidden,
57 cl::init(45),
58 cl::desc("Threshold for inlining cold callsites"));
59
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000060// We introduce this threshold to help performance of instrumentation based
61// PGO before we actually hook up inliner with analysis passes such as BPI and
62// BFI.
63static cl::opt<int> ColdThreshold(
Easwaran Ramanc103ef82017-05-11 21:36:28 +000064 "inlinecold-threshold", cl::Hidden, cl::init(45),
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000065 cl::desc("Threshold for inlining functions with cold attribute"));
66
Dehao Chende39cb92016-08-05 20:28:41 +000067static cl::opt<int>
68 HotCallSiteThreshold("hot-callsite-threshold", cl::Hidden, cl::init(3000),
69 cl::ZeroOrMore,
70 cl::desc("Threshold for hot callsites "));
71
Easwaran Raman974d4ee2017-08-03 22:23:33 +000072static cl::opt<int> LocallyHotCallSiteThreshold(
73 "locally-hot-callsite-threshold", cl::Hidden, cl::init(525), cl::ZeroOrMore,
74 cl::desc("Threshold for locally hot callsites "));
75
Easwaran Ramanc5fa6352017-06-27 23:11:18 +000076static cl::opt<int> ColdCallSiteRelFreq(
77 "cold-callsite-rel-freq", cl::Hidden, cl::init(2), cl::ZeroOrMore,
78 cl::desc("Maxmimum block frequency, expressed as a percentage of caller's "
79 "entry frequency, for a callsite to be cold in the absence of "
80 "profile information."));
81
Easwaran Raman974d4ee2017-08-03 22:23:33 +000082static cl::opt<int> HotCallSiteRelFreq(
83 "hot-callsite-rel-freq", cl::Hidden, cl::init(60), cl::ZeroOrMore,
Easwaran Ramanff77cc72017-08-04 17:15:17 +000084 cl::desc("Minimum block frequency, expressed as a multiple of caller's "
Easwaran Raman974d4ee2017-08-03 22:23:33 +000085 "entry frequency, for a callsite to be hot in the absence of "
86 "profile information."));
87
Easwaran Raman4924bb02017-09-13 20:16:02 +000088static cl::opt<bool> OptComputeFullInlineCost(
Haicheng Wu0812c5b2017-08-21 20:00:09 +000089 "inline-cost-full", cl::Hidden, cl::init(false),
90 cl::desc("Compute the full inline cost of a call site even when the cost "
91 "exceeds the threshold."));
92
Chandler Carruth0539c072012-03-31 12:42:41 +000093namespace {
Chandler Carrutha3089552012-03-14 07:32:53 +000094
Chandler Carruth0539c072012-03-31 12:42:41 +000095class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> {
96 typedef InstVisitor<CallAnalyzer, bool> Base;
97 friend class InstVisitor<CallAnalyzer, bool>;
Owen Andersona08318a2010-09-09 16:56:42 +000098
Chandler Carruth42f3dce2013-01-21 11:55:09 +000099 /// The TargetTransformInfo available for this compilation.
100 const TargetTransformInfo &TTI;
101
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000102 /// Getter for the cache of @llvm.assume intrinsics.
103 std::function<AssumptionCache &(Function &)> &GetAssumptionCache;
104
Easwaran Raman12585b02017-01-20 22:44:04 +0000105 /// Getter for BlockFrequencyInfo
106 Optional<function_ref<BlockFrequencyInfo &(Function &)>> &GetBFI;
107
Easwaran Raman71069cf2016-06-09 22:23:21 +0000108 /// Profile summary information.
109 ProfileSummaryInfo *PSI;
110
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000111 /// The called function.
Chandler Carruth0539c072012-03-31 12:42:41 +0000112 Function &F;
Owen Andersona08318a2010-09-09 16:56:42 +0000113
Eric Christopher85be8ca2017-04-15 06:14:50 +0000114 // Cache the DataLayout since we use it a lot.
115 const DataLayout &DL;
116
Haicheng Wu0812c5b2017-08-21 20:00:09 +0000117 /// The OptimizationRemarkEmitter available for this compilation.
118 OptimizationRemarkEmitter *ORE;
119
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000120 /// The candidate callsite being analyzed. Please do not use this to do
121 /// analysis in the caller function; we want the inline cost query to be
122 /// easily cacheable. Instead, use the cover function paramHasAttr.
Philip Reames9b5c9582015-06-26 20:51:17 +0000123 CallSite CandidateCS;
124
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000125 /// Tunable parameters that control the analysis.
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000126 const InlineParams &Params;
127
Chandler Carruth0539c072012-03-31 12:42:41 +0000128 int Threshold;
129 int Cost;
Easwaran Raman4924bb02017-09-13 20:16:02 +0000130 bool ComputeFullInlineCost;
Owen Andersona08318a2010-09-09 16:56:42 +0000131
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000132 bool IsCallerRecursive;
133 bool IsRecursiveCall;
Chandler Carruth0539c072012-03-31 12:42:41 +0000134 bool ExposesReturnsTwice;
135 bool HasDynamicAlloca;
James Molloy4f6fb952012-12-20 16:04:27 +0000136 bool ContainsNoDuplicateCall;
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000137 bool HasReturn;
138 bool HasIndirectBr;
Vitaly Buka4296ea72018-04-04 21:46:27 +0000139 bool HasUninlineableIntrinsic;
Florian Hahn80788d82018-01-06 19:45:40 +0000140 bool UsesVarArgs;
James Molloy4f6fb952012-12-20 16:04:27 +0000141
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000142 /// Number of bytes allocated statically by the callee.
143 uint64_t AllocatedSize;
Chandler Carruth0539c072012-03-31 12:42:41 +0000144 unsigned NumInstructions, NumVectorInstructions;
Easwaran Raman51b809b2017-07-28 21:47:36 +0000145 int VectorBonus, TenPercentVectorBonus;
146 // Bonus to be applied when the callee has only one reachable basic block.
147 int SingleBBBonus;
Chandler Carruth0539c072012-03-31 12:42:41 +0000148
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000149 /// While we walk the potentially-inlined instructions, we build up and
150 /// maintain a mapping of simplified values specific to this callsite. The
151 /// idea is to propagate any special information we have about arguments to
152 /// this call through the inlinable section of the function, and account for
153 /// likely simplifications post-inlining. The most important aspect we track
154 /// is CFG altering simplifications -- when we prove a basic block dead, that
155 /// can cause dramatic shifts in the cost of inlining a function.
Chandler Carruth0539c072012-03-31 12:42:41 +0000156 DenseMap<Value *, Constant *> SimplifiedValues;
157
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000158 /// Keep track of the values which map back (through function arguments) to
159 /// allocas on the caller stack which could be simplified through SROA.
Chandler Carruth0539c072012-03-31 12:42:41 +0000160 DenseMap<Value *, Value *> SROAArgValues;
161
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000162 /// The mapping of caller Alloca values to their accumulated cost savings. If
163 /// we have to disable SROA for one of the allocas, this tells us how much
164 /// cost must be added.
Chandler Carruth0539c072012-03-31 12:42:41 +0000165 DenseMap<Value *, int> SROAArgCosts;
166
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000167 /// Keep track of values which map to a pointer base and constant offset.
Chad Rosier567556a2016-04-28 14:47:23 +0000168 DenseMap<Value *, std::pair<Value *, APInt>> ConstantOffsetPtrs;
Chandler Carruth0539c072012-03-31 12:42:41 +0000169
Haicheng Wu3739e142017-12-14 14:36:18 +0000170 /// Keep track of dead blocks due to the constant arguments.
171 SetVector<BasicBlock *> DeadBlocks;
172
173 /// The mapping of the blocks to their known unique successors due to the
174 /// constant arguments.
175 DenseMap<BasicBlock *, BasicBlock *> KnownSuccessors;
176
Haicheng Wua4461512017-12-15 14:34:41 +0000177 /// Model the elimination of repeated loads that is expected to happen
178 /// whenever we simplify away the stores that would otherwise cause them to be
179 /// loads.
180 bool EnableLoadElimination;
181 SmallPtrSet<Value *, 16> LoadAddrSet;
182 int LoadEliminationCost;
183
Chandler Carruth0539c072012-03-31 12:42:41 +0000184 // Custom simplification helper routines.
185 bool isAllocaDerivedArg(Value *V);
186 bool lookupSROAArgAndCost(Value *V, Value *&Arg,
187 DenseMap<Value *, int>::iterator &CostIt);
188 void disableSROA(DenseMap<Value *, int>::iterator CostIt);
189 void disableSROA(Value *V);
Haicheng Wu3739e142017-12-14 14:36:18 +0000190 void findDeadBlocks(BasicBlock *CurrBB, BasicBlock *NextBB);
Chandler Carruth0539c072012-03-31 12:42:41 +0000191 void accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
192 int InstructionCost);
Haicheng Wua4461512017-12-15 14:34:41 +0000193 void disableLoadElimination();
Haicheng Wu201b1912017-01-20 18:51:22 +0000194 bool isGEPFree(GetElementPtrInst &GEP);
Evgeny Astigeevichd3558b52017-10-03 12:00:40 +0000195 bool canFoldInboundsGEP(GetElementPtrInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000196 bool accumulateGEPOffset(GEPOperator &GEP, APInt &Offset);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000197 bool simplifyCallSite(Function *F, CallSite CS);
Easwaran Raman617f6362017-02-18 17:22:52 +0000198 template <typename Callable>
199 bool simplifyInstruction(Instruction &I, Callable Evaluate);
Chandler Carruth0539c072012-03-31 12:42:41 +0000200 ConstantInt *stripAndComputeInBoundsConstantOffsets(Value *&V);
201
Philip Reames9b5c9582015-06-26 20:51:17 +0000202 /// Return true if the given argument to the function being considered for
203 /// inlining has the given attribute set either at the call site or the
204 /// function declaration. Primarily used to inspect call site specific
205 /// attributes since these can be more precise than the ones on the callee
Easwaran Raman3676da42015-12-03 19:03:20 +0000206 /// itself.
Philip Reames9b5c9582015-06-26 20:51:17 +0000207 bool paramHasAttr(Argument *A, Attribute::AttrKind Attr);
Chad Rosier567556a2016-04-28 14:47:23 +0000208
Philip Reames9b5c9582015-06-26 20:51:17 +0000209 /// Return true if the given value is known non null within the callee if
Easwaran Raman3676da42015-12-03 19:03:20 +0000210 /// inlined through this particular callsite.
Philip Reames9b5c9582015-06-26 20:51:17 +0000211 bool isKnownNonNullInCallee(Value *V);
212
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000213 /// Update Threshold based on callsite properties such as callee
214 /// attributes and callee hotness for PGO builds. The Callee is explicitly
215 /// passed to support analyzing indirect calls whose target is inferred by
216 /// analysis.
217 void updateThreshold(CallSite CS, Function &Callee);
218
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000219 /// Return true if size growth is allowed when inlining the callee at CS.
220 bool allowSizeGrowth(CallSite CS);
221
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000222 /// Return true if \p CS is a cold callsite.
223 bool isColdCallSite(CallSite CS, BlockFrequencyInfo *CallerBFI);
224
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000225 /// Return a higher threshold if \p CS is a hot callsite.
226 Optional<int> getHotCallSiteThreshold(CallSite CS,
227 BlockFrequencyInfo *CallerBFI);
228
Chandler Carruth0539c072012-03-31 12:42:41 +0000229 // Custom analysis routines.
Hal Finkel57f03dd2014-09-07 13:49:57 +0000230 bool analyzeBlock(BasicBlock *BB, SmallPtrSetImpl<const Value *> &EphValues);
Chandler Carruth0539c072012-03-31 12:42:41 +0000231
232 // Disable several entry points to the visitor so we don't accidentally use
233 // them by declaring but not defining them here.
Chad Rosier567556a2016-04-28 14:47:23 +0000234 void visit(Module *);
235 void visit(Module &);
236 void visit(Function *);
237 void visit(Function &);
238 void visit(BasicBlock *);
239 void visit(BasicBlock &);
Chandler Carruth0539c072012-03-31 12:42:41 +0000240
241 // Provide base case for our instruction visit.
242 bool visitInstruction(Instruction &I);
243
244 // Our visit overrides.
245 bool visitAlloca(AllocaInst &I);
246 bool visitPHI(PHINode &I);
247 bool visitGetElementPtr(GetElementPtrInst &I);
248 bool visitBitCast(BitCastInst &I);
249 bool visitPtrToInt(PtrToIntInst &I);
250 bool visitIntToPtr(IntToPtrInst &I);
251 bool visitCastInst(CastInst &I);
252 bool visitUnaryInstruction(UnaryInstruction &I);
Matt Arsenault727aa342013-07-20 04:09:00 +0000253 bool visitCmpInst(CmpInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000254 bool visitSub(BinaryOperator &I);
255 bool visitBinaryOperator(BinaryOperator &I);
256 bool visitLoad(LoadInst &I);
257 bool visitStore(StoreInst &I);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000258 bool visitExtractValue(ExtractValueInst &I);
259 bool visitInsertValue(InsertValueInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000260 bool visitCallSite(CallSite CS);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000261 bool visitReturnInst(ReturnInst &RI);
262 bool visitBranchInst(BranchInst &BI);
Haicheng Wu3ec848b2017-09-27 14:44:56 +0000263 bool visitSelectInst(SelectInst &SI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000264 bool visitSwitchInst(SwitchInst &SI);
265 bool visitIndirectBrInst(IndirectBrInst &IBI);
266 bool visitResumeInst(ResumeInst &RI);
David Majnemer654e1302015-07-31 17:58:14 +0000267 bool visitCleanupReturnInst(CleanupReturnInst &RI);
268 bool visitCatchReturnInst(CatchReturnInst &RI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000269 bool visitUnreachableInst(UnreachableInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000270
271public:
Sean Silvaab6a6832016-07-23 04:22:50 +0000272 CallAnalyzer(const TargetTransformInfo &TTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000273 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +0000274 Optional<function_ref<BlockFrequencyInfo &(Function &)>> &GetBFI,
Haicheng Wu0812c5b2017-08-21 20:00:09 +0000275 ProfileSummaryInfo *PSI, OptimizationRemarkEmitter *ORE,
276 Function &Callee, CallSite CSArg, const InlineParams &Params)
Easwaran Raman12585b02017-01-20 22:44:04 +0000277 : TTI(TTI), GetAssumptionCache(GetAssumptionCache), GetBFI(GetBFI),
Haicheng Wu0812c5b2017-08-21 20:00:09 +0000278 PSI(PSI), F(Callee), DL(F.getParent()->getDataLayout()), ORE(ORE),
Eric Christopher85be8ca2017-04-15 06:14:50 +0000279 CandidateCS(CSArg), Params(Params), Threshold(Params.DefaultThreshold),
Easwaran Raman4924bb02017-09-13 20:16:02 +0000280 Cost(0), ComputeFullInlineCost(OptComputeFullInlineCost ||
281 Params.ComputeFullInlineCost || ORE),
282 IsCallerRecursive(false), IsRecursiveCall(false),
Eric Christopher85be8ca2017-04-15 06:14:50 +0000283 ExposesReturnsTwice(false), HasDynamicAlloca(false),
284 ContainsNoDuplicateCall(false), HasReturn(false), HasIndirectBr(false),
Vitaly Buka4296ea72018-04-04 21:46:27 +0000285 HasUninlineableIntrinsic(false), UsesVarArgs(false), AllocatedSize(0),
286 NumInstructions(0), NumVectorInstructions(0), VectorBonus(0),
287 SingleBBBonus(0), EnableLoadElimination(true), LoadEliminationCost(0),
288 NumConstantArgs(0), NumConstantOffsetPtrArgs(0), NumAllocaArgs(0),
289 NumConstantPtrCmps(0), NumConstantPtrDiffs(0),
290 NumInstructionsSimplified(0), SROACostSavings(0),
291 SROACostSavingsLost(0) {}
Chandler Carruth0539c072012-03-31 12:42:41 +0000292
293 bool analyzeCall(CallSite CS);
294
295 int getThreshold() { return Threshold; }
296 int getCost() { return Cost; }
297
298 // Keep a bunch of stats about the cost savings found so we can print them
299 // out when debugging.
300 unsigned NumConstantArgs;
301 unsigned NumConstantOffsetPtrArgs;
302 unsigned NumAllocaArgs;
303 unsigned NumConstantPtrCmps;
304 unsigned NumConstantPtrDiffs;
305 unsigned NumInstructionsSimplified;
306 unsigned SROACostSavings;
307 unsigned SROACostSavingsLost;
308
309 void dump();
310};
311
312} // namespace
313
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000314/// Test whether the given value is an Alloca-derived function argument.
Chandler Carruth0539c072012-03-31 12:42:41 +0000315bool CallAnalyzer::isAllocaDerivedArg(Value *V) {
316 return SROAArgValues.count(V);
Owen Andersona08318a2010-09-09 16:56:42 +0000317}
318
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000319/// Lookup the SROA-candidate argument and cost iterator which V maps to.
Chandler Carruth0539c072012-03-31 12:42:41 +0000320/// Returns false if V does not map to a SROA-candidate.
321bool CallAnalyzer::lookupSROAArgAndCost(
322 Value *V, Value *&Arg, DenseMap<Value *, int>::iterator &CostIt) {
323 if (SROAArgValues.empty() || SROAArgCosts.empty())
324 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000325
Chandler Carruth0539c072012-03-31 12:42:41 +0000326 DenseMap<Value *, Value *>::iterator ArgIt = SROAArgValues.find(V);
327 if (ArgIt == SROAArgValues.end())
328 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000329
Chandler Carruth0539c072012-03-31 12:42:41 +0000330 Arg = ArgIt->second;
331 CostIt = SROAArgCosts.find(Arg);
332 return CostIt != SROAArgCosts.end();
Chandler Carruth783b7192012-03-09 02:49:36 +0000333}
334
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000335/// Disable SROA for the candidate marked by this cost iterator.
Chandler Carruth783b7192012-03-09 02:49:36 +0000336///
Benjamin Kramerbde91762012-06-02 10:20:22 +0000337/// This marks the candidate as no longer viable for SROA, and adds the cost
Chandler Carruth0539c072012-03-31 12:42:41 +0000338/// savings associated with it back into the inline cost measurement.
339void CallAnalyzer::disableSROA(DenseMap<Value *, int>::iterator CostIt) {
340 // If we're no longer able to perform SROA we need to undo its cost savings
341 // and prevent subsequent analysis.
342 Cost += CostIt->second;
343 SROACostSavings -= CostIt->second;
344 SROACostSavingsLost += CostIt->second;
345 SROAArgCosts.erase(CostIt);
Haicheng Wua4461512017-12-15 14:34:41 +0000346 disableLoadElimination();
Chandler Carruth0539c072012-03-31 12:42:41 +0000347}
348
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000349/// If 'V' maps to a SROA candidate, disable SROA for it.
Chandler Carruth0539c072012-03-31 12:42:41 +0000350void CallAnalyzer::disableSROA(Value *V) {
351 Value *SROAArg;
352 DenseMap<Value *, int>::iterator CostIt;
353 if (lookupSROAArgAndCost(V, SROAArg, CostIt))
354 disableSROA(CostIt);
355}
356
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000357/// Accumulate the given cost for a particular SROA candidate.
Chandler Carruth0539c072012-03-31 12:42:41 +0000358void CallAnalyzer::accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
359 int InstructionCost) {
360 CostIt->second += InstructionCost;
361 SROACostSavings += InstructionCost;
362}
363
Haicheng Wua4461512017-12-15 14:34:41 +0000364void CallAnalyzer::disableLoadElimination() {
365 if (EnableLoadElimination) {
366 Cost += LoadEliminationCost;
Haicheng Wub3689ca2017-12-19 13:42:58 +0000367 LoadEliminationCost = 0;
Haicheng Wua4461512017-12-15 14:34:41 +0000368 EnableLoadElimination = false;
369 }
370}
371
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000372/// Accumulate a constant GEP offset into an APInt if possible.
Chandler Carruth0539c072012-03-31 12:42:41 +0000373///
374/// Returns false if unable to compute the offset for any reason. Respects any
375/// simplified values known during the analysis of this callsite.
376bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) {
Elena Demikhovsky945b7e52018-02-14 06:58:08 +0000377 unsigned IntPtrWidth = DL.getIndexTypeSizeInBits(GEP.getType());
Chandler Carruth0539c072012-03-31 12:42:41 +0000378 assert(IntPtrWidth == Offset.getBitWidth());
379
380 for (gep_type_iterator GTI = gep_type_begin(GEP), GTE = gep_type_end(GEP);
381 GTI != GTE; ++GTI) {
382 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand());
383 if (!OpC)
384 if (Constant *SimpleOp = SimplifiedValues.lookup(GTI.getOperand()))
385 OpC = dyn_cast<ConstantInt>(SimpleOp);
386 if (!OpC)
Chandler Carruth783b7192012-03-09 02:49:36 +0000387 return false;
Chad Rosier567556a2016-04-28 14:47:23 +0000388 if (OpC->isZero())
389 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000390
Chandler Carruth0539c072012-03-31 12:42:41 +0000391 // Handle a struct index, which adds its field offset to the pointer.
Peter Collingbourneab85225b2016-12-02 02:24:42 +0000392 if (StructType *STy = GTI.getStructTypeOrNull()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000393 unsigned ElementIdx = OpC->getZExtValue();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000394 const StructLayout *SL = DL.getStructLayout(STy);
Chandler Carruth0539c072012-03-31 12:42:41 +0000395 Offset += APInt(IntPtrWidth, SL->getElementOffset(ElementIdx));
396 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000397 }
Chandler Carruth783b7192012-03-09 02:49:36 +0000398
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000399 APInt TypeSize(IntPtrWidth, DL.getTypeAllocSize(GTI.getIndexedType()));
Chandler Carruth0539c072012-03-31 12:42:41 +0000400 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize;
401 }
402 return true;
403}
404
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000405/// Use TTI to check whether a GEP is free.
Haicheng Wu201b1912017-01-20 18:51:22 +0000406///
407/// Respects any simplified values known during the analysis of this callsite.
408bool CallAnalyzer::isGEPFree(GetElementPtrInst &GEP) {
Evgeny Astigeevich61c1bd52017-07-27 12:49:27 +0000409 SmallVector<Value *, 4> Operands;
410 Operands.push_back(GEP.getOperand(0));
Haicheng Wu201b1912017-01-20 18:51:22 +0000411 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
412 if (Constant *SimpleOp = SimplifiedValues.lookup(*I))
Evgeny Astigeevich61c1bd52017-07-27 12:49:27 +0000413 Operands.push_back(SimpleOp);
Haicheng Wu201b1912017-01-20 18:51:22 +0000414 else
Evgeny Astigeevich61c1bd52017-07-27 12:49:27 +0000415 Operands.push_back(*I);
416 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&GEP, Operands);
Haicheng Wu201b1912017-01-20 18:51:22 +0000417}
418
Chandler Carruth0539c072012-03-31 12:42:41 +0000419bool CallAnalyzer::visitAlloca(AllocaInst &I) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000420 // Check whether inlining will turn a dynamic alloca into a static
Sanjay Patel0f153422016-05-09 21:51:53 +0000421 // alloca and handle that case.
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000422 if (I.isArrayAllocation()) {
Sanjay Patel0f153422016-05-09 21:51:53 +0000423 Constant *Size = SimplifiedValues.lookup(I.getArraySize());
424 if (auto *AllocSize = dyn_cast_or_null<ConstantInt>(Size)) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000425 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000426 AllocatedSize = SaturatingMultiplyAdd(
427 AllocSize->getLimitedValue(), DL.getTypeAllocSize(Ty), AllocatedSize);
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000428 return Base::visitAlloca(I);
429 }
430 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000431
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000432 // Accumulate the allocated size.
433 if (I.isStaticAlloca()) {
434 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000435 AllocatedSize = SaturatingAdd(DL.getTypeAllocSize(Ty), AllocatedSize);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000436 }
437
Bob Wilsona5b0dc82012-11-19 07:04:35 +0000438 // We will happily inline static alloca instructions.
439 if (I.isStaticAlloca())
Chandler Carruth0539c072012-03-31 12:42:41 +0000440 return Base::visitAlloca(I);
441
442 // FIXME: This is overly conservative. Dynamic allocas are inefficient for
443 // a variety of reasons, and so we would like to not inline them into
444 // functions which don't currently have a dynamic alloca. This simply
445 // disables inlining altogether in the presence of a dynamic alloca.
446 HasDynamicAlloca = true;
447 return false;
448}
449
450bool CallAnalyzer::visitPHI(PHINode &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000451 // FIXME: We need to propagate SROA *disabling* through phi nodes, even
452 // though we don't want to propagate it's bonuses. The idea is to disable
453 // SROA if it *might* be used in an inappropriate manner.
454
455 // Phi nodes are always zero-cost.
Bjorn Pettersson77f32992018-01-04 18:23:40 +0000456 // FIXME: Pointer sizes may differ between different address spaces, so do we
457 // need to use correct address space in the call to getPointerSizeInBits here?
458 // Or could we skip the getPointerSizeInBits call completely? As far as I can
459 // see the ZeroOffset is used as a dummy value, so we can probably use any
460 // bit width for the ZeroOffset?
461 APInt ZeroOffset = APInt::getNullValue(DL.getPointerSizeInBits(0));
Haicheng Wu3739e142017-12-14 14:36:18 +0000462 bool CheckSROA = I.getType()->isPointerTy();
463
464 // Track the constant or pointer with constant offset we've seen so far.
465 Constant *FirstC = nullptr;
466 std::pair<Value *, APInt> FirstBaseAndOffset = {nullptr, ZeroOffset};
467 Value *FirstV = nullptr;
468
469 for (unsigned i = 0, e = I.getNumIncomingValues(); i != e; ++i) {
470 BasicBlock *Pred = I.getIncomingBlock(i);
471 // If the incoming block is dead, skip the incoming block.
472 if (DeadBlocks.count(Pred))
473 continue;
474 // If the parent block of phi is not the known successor of the incoming
475 // block, skip the incoming block.
476 BasicBlock *KnownSuccessor = KnownSuccessors[Pred];
477 if (KnownSuccessor && KnownSuccessor != I.getParent())
478 continue;
479
480 Value *V = I.getIncomingValue(i);
481 // If the incoming value is this phi itself, skip the incoming value.
482 if (&I == V)
483 continue;
484
485 Constant *C = dyn_cast<Constant>(V);
486 if (!C)
487 C = SimplifiedValues.lookup(V);
488
489 std::pair<Value *, APInt> BaseAndOffset = {nullptr, ZeroOffset};
490 if (!C && CheckSROA)
491 BaseAndOffset = ConstantOffsetPtrs.lookup(V);
492
493 if (!C && !BaseAndOffset.first)
494 // The incoming value is neither a constant nor a pointer with constant
495 // offset, exit early.
496 return true;
497
498 if (FirstC) {
499 if (FirstC == C)
500 // If we've seen a constant incoming value before and it is the same
501 // constant we see this time, continue checking the next incoming value.
502 continue;
503 // Otherwise early exit because we either see a different constant or saw
504 // a constant before but we have a pointer with constant offset this time.
505 return true;
506 }
507
508 if (FirstV) {
509 // The same logic as above, but check pointer with constant offset here.
510 if (FirstBaseAndOffset == BaseAndOffset)
511 continue;
512 return true;
513 }
514
515 if (C) {
516 // This is the 1st time we've seen a constant, record it.
517 FirstC = C;
518 continue;
519 }
520
521 // The remaining case is that this is the 1st time we've seen a pointer with
522 // constant offset, record it.
523 FirstV = V;
524 FirstBaseAndOffset = BaseAndOffset;
525 }
526
527 // Check if we can map phi to a constant.
528 if (FirstC) {
529 SimplifiedValues[&I] = FirstC;
530 return true;
531 }
532
533 // Check if we can map phi to a pointer with constant offset.
534 if (FirstBaseAndOffset.first) {
535 ConstantOffsetPtrs[&I] = FirstBaseAndOffset;
536
537 Value *SROAArg;
538 DenseMap<Value *, int>::iterator CostIt;
539 if (lookupSROAArgAndCost(FirstV, SROAArg, CostIt))
540 SROAArgValues[&I] = SROAArg;
541 }
542
Chandler Carruth0539c072012-03-31 12:42:41 +0000543 return true;
544}
545
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000546/// Check we can fold GEPs of constant-offset call site argument pointers.
Evgeny Astigeevichd3558b52017-10-03 12:00:40 +0000547/// This requires target data and inbounds GEPs.
548///
549/// \return true if the specified GEP can be folded.
550bool CallAnalyzer::canFoldInboundsGEP(GetElementPtrInst &I) {
551 // Check if we have a base + offset for the pointer.
552 std::pair<Value *, APInt> BaseAndOffset =
553 ConstantOffsetPtrs.lookup(I.getPointerOperand());
554 if (!BaseAndOffset.first)
555 return false;
556
557 // Check if the offset of this GEP is constant, and if so accumulate it
558 // into Offset.
559 if (!accumulateGEPOffset(cast<GEPOperator>(I), BaseAndOffset.second))
560 return false;
561
562 // Add the result as a new mapping to Base + Offset.
563 ConstantOffsetPtrs[&I] = BaseAndOffset;
564
565 return true;
566}
567
Chandler Carruth0539c072012-03-31 12:42:41 +0000568bool CallAnalyzer::visitGetElementPtr(GetElementPtrInst &I) {
569 Value *SROAArg;
570 DenseMap<Value *, int>::iterator CostIt;
Chad Rosier567556a2016-04-28 14:47:23 +0000571 bool SROACandidate =
572 lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt);
Chandler Carruth0539c072012-03-31 12:42:41 +0000573
Easwaran Ramana8b9cdc2017-02-25 00:10:22 +0000574 // Lambda to check whether a GEP's indices are all constant.
575 auto IsGEPOffsetConstant = [&](GetElementPtrInst &GEP) {
576 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
577 if (!isa<Constant>(*I) && !SimplifiedValues.lookup(*I))
578 return false;
579 return true;
580 };
581
Evgeny Astigeevichd3558b52017-10-03 12:00:40 +0000582 if ((I.isInBounds() && canFoldInboundsGEP(I)) || IsGEPOffsetConstant(I)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000583 if (SROACandidate)
584 SROAArgValues[&I] = SROAArg;
585
586 // Constant GEPs are modeled as free.
587 return true;
588 }
589
590 // Variable GEPs will require math and will disable SROA.
591 if (SROACandidate)
592 disableSROA(CostIt);
Haicheng Wu201b1912017-01-20 18:51:22 +0000593 return isGEPFree(I);
Chandler Carruth783b7192012-03-09 02:49:36 +0000594}
595
Easwaran Raman617f6362017-02-18 17:22:52 +0000596/// Simplify \p I if its operands are constants and update SimplifiedValues.
597/// \p Evaluate is a callable specific to instruction type that evaluates the
598/// instruction when all the operands are constants.
599template <typename Callable>
600bool CallAnalyzer::simplifyInstruction(Instruction &I, Callable Evaluate) {
601 SmallVector<Constant *, 2> COps;
602 for (Value *Op : I.operands()) {
603 Constant *COp = dyn_cast<Constant>(Op);
604 if (!COp)
605 COp = SimplifiedValues.lookup(Op);
606 if (!COp)
607 return false;
608 COps.push_back(COp);
609 }
610 auto *C = Evaluate(COps);
611 if (!C)
612 return false;
613 SimplifiedValues[&I] = C;
614 return true;
615}
616
Chandler Carruth0539c072012-03-31 12:42:41 +0000617bool CallAnalyzer::visitBitCast(BitCastInst &I) {
618 // Propagate constants through bitcasts.
Easwaran Raman617f6362017-02-18 17:22:52 +0000619 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
620 return ConstantExpr::getBitCast(COps[0], I.getType());
621 }))
622 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000623
Chandler Carruth0539c072012-03-31 12:42:41 +0000624 // Track base/offsets through casts
Chad Rosier567556a2016-04-28 14:47:23 +0000625 std::pair<Value *, APInt> BaseAndOffset =
626 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000627 // Casts don't change the offset, just wrap it up.
628 if (BaseAndOffset.first)
629 ConstantOffsetPtrs[&I] = BaseAndOffset;
630
631 // Also look for SROA candidates here.
632 Value *SROAArg;
633 DenseMap<Value *, int>::iterator CostIt;
634 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
635 SROAArgValues[&I] = SROAArg;
636
637 // Bitcasts are always zero cost.
638 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000639}
640
Chandler Carruth0539c072012-03-31 12:42:41 +0000641bool CallAnalyzer::visitPtrToInt(PtrToIntInst &I) {
642 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000643 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
644 return ConstantExpr::getPtrToInt(COps[0], I.getType());
645 }))
646 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000647
648 // Track base/offset pairs when converted to a plain integer provided the
649 // integer is large enough to represent the pointer.
650 unsigned IntegerSize = I.getType()->getScalarSizeInBits();
Bjorn Pettersson77f32992018-01-04 18:23:40 +0000651 unsigned AS = I.getOperand(0)->getType()->getPointerAddressSpace();
652 if (IntegerSize >= DL.getPointerSizeInBits(AS)) {
Chad Rosier567556a2016-04-28 14:47:23 +0000653 std::pair<Value *, APInt> BaseAndOffset =
654 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000655 if (BaseAndOffset.first)
656 ConstantOffsetPtrs[&I] = BaseAndOffset;
657 }
658
659 // This is really weird. Technically, ptrtoint will disable SROA. However,
660 // unless that ptrtoint is *used* somewhere in the live basic blocks after
661 // inlining, it will be nuked, and SROA should proceed. All of the uses which
662 // would block SROA would also block SROA if applied directly to a pointer,
663 // and so we can just add the integer in here. The only places where SROA is
664 // preserved either cannot fire on an integer, or won't in-and-of themselves
665 // disable SROA (ext) w/o some later use that we would see and disable.
666 Value *SROAArg;
667 DenseMap<Value *, int>::iterator CostIt;
668 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
669 SROAArgValues[&I] = SROAArg;
670
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000671 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000672}
673
Chandler Carruth0539c072012-03-31 12:42:41 +0000674bool CallAnalyzer::visitIntToPtr(IntToPtrInst &I) {
675 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000676 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
677 return ConstantExpr::getIntToPtr(COps[0], I.getType());
678 }))
679 return true;
Dan Gohman4552e3c2009-10-13 18:30:07 +0000680
Chandler Carruth0539c072012-03-31 12:42:41 +0000681 // Track base/offset pairs when round-tripped through a pointer without
682 // modifications provided the integer is not too large.
683 Value *Op = I.getOperand(0);
684 unsigned IntegerSize = Op->getType()->getScalarSizeInBits();
Bjorn Pettersson77f32992018-01-04 18:23:40 +0000685 if (IntegerSize <= DL.getPointerTypeSizeInBits(I.getType())) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000686 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op);
687 if (BaseAndOffset.first)
688 ConstantOffsetPtrs[&I] = BaseAndOffset;
689 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000690
Chandler Carruth0539c072012-03-31 12:42:41 +0000691 // "Propagate" SROA here in the same manner as we do for ptrtoint above.
692 Value *SROAArg;
693 DenseMap<Value *, int>::iterator CostIt;
694 if (lookupSROAArgAndCost(Op, SROAArg, CostIt))
695 SROAArgValues[&I] = SROAArg;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000696
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000697 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000698}
699
700bool CallAnalyzer::visitCastInst(CastInst &I) {
701 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000702 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
703 return ConstantExpr::getCast(I.getOpcode(), COps[0], I.getType());
704 }))
705 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000706
707 // Disable SROA in the face of arbitrary casts we don't whitelist elsewhere.
708 disableSROA(I.getOperand(0));
709
Eli Friedman39ed9a62017-12-22 02:08:08 +0000710 // If this is a floating-point cast, and the target says this operation
711 // is expensive, this may eventually become a library call. Treat the cost
712 // as such.
713 switch (I.getOpcode()) {
714 case Instruction::FPTrunc:
715 case Instruction::FPExt:
716 case Instruction::UIToFP:
717 case Instruction::SIToFP:
718 case Instruction::FPToUI:
719 case Instruction::FPToSI:
720 if (TTI.getFPOpCost(I.getType()) == TargetTransformInfo::TCC_Expensive)
721 Cost += InlineConstants::CallPenalty;
722 default:
723 break;
724 }
725
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000726 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000727}
728
729bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) {
730 Value *Operand = I.getOperand(0);
Easwaran Raman617f6362017-02-18 17:22:52 +0000731 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
Easwaran Raman617f6362017-02-18 17:22:52 +0000732 return ConstantFoldInstOperands(&I, COps[0], DL);
733 }))
734 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000735
736 // Disable any SROA on the argument to arbitrary unary operators.
737 disableSROA(Operand);
738
739 return false;
740}
741
Philip Reames9b5c9582015-06-26 20:51:17 +0000742bool CallAnalyzer::paramHasAttr(Argument *A, Attribute::AttrKind Attr) {
Reid Klecknerfb502d22017-04-14 20:19:02 +0000743 return CandidateCS.paramHasAttr(A->getArgNo(), Attr);
Philip Reames9b5c9582015-06-26 20:51:17 +0000744}
745
746bool CallAnalyzer::isKnownNonNullInCallee(Value *V) {
747 // Does the *call site* have the NonNull attribute set on an argument? We
748 // use the attribute on the call site to memoize any analysis done in the
749 // caller. This will also trip if the callee function has a non-null
750 // parameter attribute, but that's a less interesting case because hopefully
751 // the callee would already have been simplified based on that.
752 if (Argument *A = dyn_cast<Argument>(V))
753 if (paramHasAttr(A, Attribute::NonNull))
754 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000755
Philip Reames9b5c9582015-06-26 20:51:17 +0000756 // Is this an alloca in the caller? This is distinct from the attribute case
757 // above because attributes aren't updated within the inliner itself and we
758 // always want to catch the alloca derived case.
759 if (isAllocaDerivedArg(V))
760 // We can actually predict the result of comparisons between an
761 // alloca-derived value and null. Note that this fires regardless of
762 // SROA firing.
763 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000764
Philip Reames9b5c9582015-06-26 20:51:17 +0000765 return false;
766}
767
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000768bool CallAnalyzer::allowSizeGrowth(CallSite CS) {
769 // If the normal destination of the invoke or the parent block of the call
770 // site is unreachable-terminated, there is little point in inlining this
771 // unless there is literally zero cost.
772 // FIXME: Note that it is possible that an unreachable-terminated block has a
773 // hot entry. For example, in below scenario inlining hot_call_X() may be
774 // beneficial :
775 // main() {
776 // hot_call_1();
777 // ...
778 // hot_call_N()
779 // exit(0);
780 // }
781 // For now, we are not handling this corner case here as it is rare in real
782 // code. In future, we should elaborate this based on BPI and BFI in more
783 // general threshold adjusting heuristics in updateThreshold().
784 Instruction *Instr = CS.getInstruction();
785 if (InvokeInst *II = dyn_cast<InvokeInst>(Instr)) {
786 if (isa<UnreachableInst>(II->getNormalDest()->getTerminator()))
787 return false;
788 } else if (isa<UnreachableInst>(Instr->getParent()->getTerminator()))
789 return false;
790
791 return true;
792}
793
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000794bool CallAnalyzer::isColdCallSite(CallSite CS, BlockFrequencyInfo *CallerBFI) {
795 // If global profile summary is available, then callsite's coldness is
796 // determined based on that.
Chandler Carruthbba762a2017-08-14 21:25:00 +0000797 if (PSI && PSI->hasProfileSummary())
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000798 return PSI->isColdCallSite(CS, CallerBFI);
Chandler Carruthbba762a2017-08-14 21:25:00 +0000799
800 // Otherwise we need BFI to be available.
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000801 if (!CallerBFI)
802 return false;
803
Chandler Carruthbba762a2017-08-14 21:25:00 +0000804 // Determine if the callsite is cold relative to caller's entry. We could
805 // potentially cache the computation of scaled entry frequency, but the added
806 // complexity is not worth it unless this scaling shows up high in the
807 // profiles.
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000808 const BranchProbability ColdProb(ColdCallSiteRelFreq, 100);
809 auto CallSiteBB = CS.getInstruction()->getParent();
810 auto CallSiteFreq = CallerBFI->getBlockFreq(CallSiteBB);
811 auto CallerEntryFreq =
812 CallerBFI->getBlockFreq(&(CS.getCaller()->getEntryBlock()));
813 return CallSiteFreq < CallerEntryFreq * ColdProb;
814}
815
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000816Optional<int>
817CallAnalyzer::getHotCallSiteThreshold(CallSite CS,
818 BlockFrequencyInfo *CallerBFI) {
Chandler Carruthbba762a2017-08-14 21:25:00 +0000819
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000820 // If global profile summary is available, then callsite's hotness is
821 // determined based on that.
Chandler Carruthbba762a2017-08-14 21:25:00 +0000822 if (PSI && PSI->hasProfileSummary() && PSI->isHotCallSite(CS, CallerBFI))
823 return Params.HotCallSiteThreshold;
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000824
Chandler Carruthbba762a2017-08-14 21:25:00 +0000825 // Otherwise we need BFI to be available and to have a locally hot callsite
826 // threshold.
827 if (!CallerBFI || !Params.LocallyHotCallSiteThreshold)
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000828 return None;
829
Chandler Carruthbba762a2017-08-14 21:25:00 +0000830 // Determine if the callsite is hot relative to caller's entry. We could
831 // potentially cache the computation of scaled entry frequency, but the added
832 // complexity is not worth it unless this scaling shows up high in the
833 // profiles.
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000834 auto CallSiteBB = CS.getInstruction()->getParent();
835 auto CallSiteFreq = CallerBFI->getBlockFreq(CallSiteBB).getFrequency();
836 auto CallerEntryFreq = CallerBFI->getEntryFreq();
837 if (CallSiteFreq >= CallerEntryFreq * HotCallSiteRelFreq)
Chandler Carruthbba762a2017-08-14 21:25:00 +0000838 return Params.LocallyHotCallSiteThreshold;
839
840 // Otherwise treat it normally.
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000841 return None;
842}
843
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000844void CallAnalyzer::updateThreshold(CallSite CS, Function &Callee) {
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000845 // If no size growth is allowed for this inlining, set Threshold to 0.
846 if (!allowSizeGrowth(CS)) {
847 Threshold = 0;
848 return;
849 }
850
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000851 Function *Caller = CS.getCaller();
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000852
853 // return min(A, B) if B is valid.
854 auto MinIfValid = [](int A, Optional<int> B) {
855 return B ? std::min(A, B.getValue()) : A;
856 };
857
Easwaran Raman0d58fca2016-08-11 03:58:05 +0000858 // return max(A, B) if B is valid.
859 auto MaxIfValid = [](int A, Optional<int> B) {
860 return B ? std::max(A, B.getValue()) : A;
861 };
862
Easwaran Raman51b809b2017-07-28 21:47:36 +0000863 // Various bonus percentages. These are multiplied by Threshold to get the
864 // bonus values.
865 // SingleBBBonus: This bonus is applied if the callee has a single reachable
866 // basic block at the given callsite context. This is speculatively applied
867 // and withdrawn if more than one basic block is seen.
868 //
869 // Vector bonuses: We want to more aggressively inline vector-dense kernels
870 // and apply this bonus based on the percentage of vector instructions. A
871 // bonus is applied if the vector instructions exceed 50% and half that amount
872 // is applied if it exceeds 10%. Note that these bonuses are some what
873 // arbitrary and evolved over time by accident as much as because they are
874 // principled bonuses.
875 // FIXME: It would be nice to base the bonus values on something more
876 // scientific.
877 //
878 // LstCallToStaticBonus: This large bonus is applied to ensure the inlining
879 // of the last call to a static function as inlining such functions is
880 // guaranteed to reduce code size.
881 //
882 // These bonus percentages may be set to 0 based on properties of the caller
883 // and the callsite.
884 int SingleBBBonusPercent = 50;
885 int VectorBonusPercent = 150;
886 int LastCallToStaticBonus = InlineConstants::LastCallToStaticBonus;
887
888 // Lambda to set all the above bonus and bonus percentages to 0.
889 auto DisallowAllBonuses = [&]() {
890 SingleBBBonusPercent = 0;
891 VectorBonusPercent = 0;
892 LastCallToStaticBonus = 0;
893 };
894
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000895 // Use the OptMinSizeThreshold or OptSizeThreshold knob if they are available
896 // and reduce the threshold if the caller has the necessary attribute.
Easwaran Raman51b809b2017-07-28 21:47:36 +0000897 if (Caller->optForMinSize()) {
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000898 Threshold = MinIfValid(Threshold, Params.OptMinSizeThreshold);
Easwaran Raman51b809b2017-07-28 21:47:36 +0000899 // For minsize, we want to disable the single BB bonus and the vector
900 // bonuses, but not the last-call-to-static bonus. Inlining the last call to
901 // a static function will, at the minimum, eliminate the parameter setup and
902 // call/return instructions.
903 SingleBBBonusPercent = 0;
904 VectorBonusPercent = 0;
905 } else if (Caller->optForSize())
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000906 Threshold = MinIfValid(Threshold, Params.OptSizeThreshold);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000907
Easwaran Ramane08b1392017-01-09 21:56:26 +0000908 // Adjust the threshold based on inlinehint attribute and profile based
909 // hotness information if the caller does not have MinSize attribute.
910 if (!Caller->optForMinSize()) {
911 if (Callee.hasFnAttribute(Attribute::InlineHint))
912 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
Chandler Carruthbba762a2017-08-14 21:25:00 +0000913
914 // FIXME: After switching to the new passmanager, simplify the logic below
915 // by checking only the callsite hotness/coldness as we will reliably
916 // have local profile information.
917 //
918 // Callsite hotness and coldness can be determined if sample profile is
919 // used (which adds hotness metadata to calls) or if caller's
920 // BlockFrequencyInfo is available.
921 BlockFrequencyInfo *CallerBFI = GetBFI ? &((*GetBFI)(*Caller)) : nullptr;
922 auto HotCallSiteThreshold = getHotCallSiteThreshold(CS, CallerBFI);
923 if (!Caller->optForSize() && HotCallSiteThreshold) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000924 LLVM_DEBUG(dbgs() << "Hot callsite.\n");
Chandler Carruthbba762a2017-08-14 21:25:00 +0000925 // FIXME: This should update the threshold only if it exceeds the
926 // current threshold, but AutoFDO + ThinLTO currently relies on this
927 // behavior to prevent inlining of hot callsites during ThinLTO
928 // compile phase.
929 Threshold = HotCallSiteThreshold.getValue();
930 } else if (isColdCallSite(CS, CallerBFI)) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000931 LLVM_DEBUG(dbgs() << "Cold callsite.\n");
Chandler Carruthbba762a2017-08-14 21:25:00 +0000932 // Do not apply bonuses for a cold callsite including the
933 // LastCallToStatic bonus. While this bonus might result in code size
934 // reduction, it can cause the size of a non-cold caller to increase
935 // preventing it from being inlined.
936 DisallowAllBonuses();
937 Threshold = MinIfValid(Threshold, Params.ColdCallSiteThreshold);
938 } else if (PSI) {
939 // Use callee's global profile information only if we have no way of
940 // determining this via callsite information.
941 if (PSI->isFunctionEntryHot(&Callee)) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000942 LLVM_DEBUG(dbgs() << "Hot callee.\n");
Chandler Carruthbba762a2017-08-14 21:25:00 +0000943 // If callsite hotness can not be determined, we may still know
944 // that the callee is hot and treat it as a weaker hint for threshold
945 // increase.
946 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
947 } else if (PSI->isFunctionEntryCold(&Callee)) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000948 LLVM_DEBUG(dbgs() << "Cold callee.\n");
Chandler Carruthbba762a2017-08-14 21:25:00 +0000949 // Do not apply bonuses for a cold callee including the
950 // LastCallToStatic bonus. While this bonus might result in code size
951 // reduction, it can cause the size of a non-cold caller to increase
952 // preventing it from being inlined.
953 DisallowAllBonuses();
954 Threshold = MinIfValid(Threshold, Params.ColdThreshold);
Easwaran Ramane08b1392017-01-09 21:56:26 +0000955 }
956 }
Dehao Chene1c7c572016-08-05 20:49:04 +0000957 }
Dehao Chen9232f982016-07-11 16:48:54 +0000958
Justin Lebar8650a4d2016-04-15 01:38:48 +0000959 // Finally, take the target-specific inlining threshold multiplier into
960 // account.
961 Threshold *= TTI.getInliningThresholdMultiplier();
Easwaran Raman51b809b2017-07-28 21:47:36 +0000962
963 SingleBBBonus = Threshold * SingleBBBonusPercent / 100;
964 VectorBonus = Threshold * VectorBonusPercent / 100;
965
966 bool OnlyOneCallAndLocalLinkage =
967 F.hasLocalLinkage() && F.hasOneUse() && &F == CS.getCalledFunction();
968 // If there is only one call of the function, and it has internal linkage,
969 // the cost of inlining it drops dramatically. It may seem odd to update
970 // Cost in updateThreshold, but the bonus depends on the logic in this method.
971 if (OnlyOneCallAndLocalLinkage)
972 Cost -= LastCallToStaticBonus;
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000973}
974
Matt Arsenault727aa342013-07-20 04:09:00 +0000975bool CallAnalyzer::visitCmpInst(CmpInst &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000976 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
977 // First try to handle simplified comparisons.
Easwaran Raman617f6362017-02-18 17:22:52 +0000978 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
979 return ConstantExpr::getCompare(I.getPredicate(), COps[0], COps[1]);
980 }))
981 return true;
Matt Arsenault727aa342013-07-20 04:09:00 +0000982
983 if (I.getOpcode() == Instruction::FCmp)
984 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000985
986 // Otherwise look for a comparison between constant offset pointers with
987 // a common base.
988 Value *LHSBase, *RHSBase;
989 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000990 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000991 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000992 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000993 if (RHSBase && LHSBase == RHSBase) {
994 // We have common bases, fold the icmp to a constant based on the
995 // offsets.
996 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
997 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
998 if (Constant *C = ConstantExpr::getICmp(I.getPredicate(), CLHS, CRHS)) {
999 SimplifiedValues[&I] = C;
1000 ++NumConstantPtrCmps;
1001 return true;
1002 }
1003 }
1004 }
1005
1006 // If the comparison is an equality comparison with null, we can simplify it
Philip Reames9b5c9582015-06-26 20:51:17 +00001007 // if we know the value (argument) can't be null
1008 if (I.isEquality() && isa<ConstantPointerNull>(I.getOperand(1)) &&
1009 isKnownNonNullInCallee(I.getOperand(0))) {
1010 bool IsNotEqual = I.getPredicate() == CmpInst::ICMP_NE;
1011 SimplifiedValues[&I] = IsNotEqual ? ConstantInt::getTrue(I.getType())
1012 : ConstantInt::getFalse(I.getType());
1013 return true;
1014 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001015 // Finally check for SROA candidates in comparisons.
1016 Value *SROAArg;
1017 DenseMap<Value *, int>::iterator CostIt;
1018 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) {
1019 if (isa<ConstantPointerNull>(I.getOperand(1))) {
1020 accumulateSROACost(CostIt, InlineConstants::InstrCost);
1021 return true;
1022 }
1023
1024 disableSROA(CostIt);
1025 }
1026
1027 return false;
1028}
1029
1030bool CallAnalyzer::visitSub(BinaryOperator &I) {
1031 // Try to handle a special case: we can fold computing the difference of two
1032 // constant-related pointers.
1033 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
1034 Value *LHSBase, *RHSBase;
1035 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +00001036 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +00001037 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +00001038 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +00001039 if (RHSBase && LHSBase == RHSBase) {
1040 // We have common bases, fold the subtract to a constant based on the
1041 // offsets.
1042 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
1043 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
1044 if (Constant *C = ConstantExpr::getSub(CLHS, CRHS)) {
1045 SimplifiedValues[&I] = C;
1046 ++NumConstantPtrDiffs;
1047 return true;
1048 }
1049 }
1050 }
1051
1052 // Otherwise, fall back to the generic logic for simplifying and handling
1053 // instructions.
1054 return Base::visitSub(I);
1055}
1056
1057bool CallAnalyzer::visitBinaryOperator(BinaryOperator &I) {
1058 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
Haicheng Wu6d14dfe2017-12-22 17:09:09 +00001059 Constant *CLHS = dyn_cast<Constant>(LHS);
1060 if (!CLHS)
1061 CLHS = SimplifiedValues.lookup(LHS);
1062 Constant *CRHS = dyn_cast<Constant>(RHS);
1063 if (!CRHS)
1064 CRHS = SimplifiedValues.lookup(RHS);
Michael Zolotukhin4e8598e2015-02-06 20:02:51 +00001065
Haicheng Wu6d14dfe2017-12-22 17:09:09 +00001066 Value *SimpleV = nullptr;
1067 if (auto FI = dyn_cast<FPMathOperator>(&I))
1068 SimpleV = SimplifyFPBinOp(I.getOpcode(), CLHS ? CLHS : LHS,
1069 CRHS ? CRHS : RHS, FI->getFastMathFlags(), DL);
1070 else
1071 SimpleV =
1072 SimplifyBinOp(I.getOpcode(), CLHS ? CLHS : LHS, CRHS ? CRHS : RHS, DL);
1073
1074 if (Constant *C = dyn_cast_or_null<Constant>(SimpleV))
1075 SimplifiedValues[&I] = C;
1076
1077 if (SimpleV)
Chandler Carruth0539c072012-03-31 12:42:41 +00001078 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001079
1080 // Disable any SROA on arguments to arbitrary, unsimplified binary operators.
1081 disableSROA(LHS);
1082 disableSROA(RHS);
1083
Eli Friedman39ed9a62017-12-22 02:08:08 +00001084 // If the instruction is floating point, and the target says this operation
1085 // is expensive, this may eventually become a library call. Treat the cost
1086 // as such.
1087 if (I.getType()->isFloatingPointTy() &&
1088 TTI.getFPOpCost(I.getType()) == TargetTransformInfo::TCC_Expensive)
1089 Cost += InlineConstants::CallPenalty;
1090
Chandler Carruth0539c072012-03-31 12:42:41 +00001091 return false;
1092}
1093
1094bool CallAnalyzer::visitLoad(LoadInst &I) {
1095 Value *SROAArg;
1096 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +00001097 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001098 if (I.isSimple()) {
1099 accumulateSROACost(CostIt, InlineConstants::InstrCost);
1100 return true;
1101 }
1102
1103 disableSROA(CostIt);
1104 }
1105
Haicheng Wua4461512017-12-15 14:34:41 +00001106 // If the data is already loaded from this address and hasn't been clobbered
1107 // by any stores or calls, this load is likely to be redundant and can be
1108 // eliminated.
1109 if (EnableLoadElimination &&
Haicheng Wub3689ca2017-12-19 13:42:58 +00001110 !LoadAddrSet.insert(I.getPointerOperand()).second && I.isUnordered()) {
Haicheng Wua4461512017-12-15 14:34:41 +00001111 LoadEliminationCost += InlineConstants::InstrCost;
1112 return true;
1113 }
1114
Chandler Carruth0539c072012-03-31 12:42:41 +00001115 return false;
1116}
1117
1118bool CallAnalyzer::visitStore(StoreInst &I) {
1119 Value *SROAArg;
1120 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +00001121 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001122 if (I.isSimple()) {
1123 accumulateSROACost(CostIt, InlineConstants::InstrCost);
1124 return true;
1125 }
1126
1127 disableSROA(CostIt);
1128 }
1129
Haicheng Wua4461512017-12-15 14:34:41 +00001130 // The store can potentially clobber loads and prevent repeated loads from
1131 // being eliminated.
1132 // FIXME:
1133 // 1. We can probably keep an initial set of eliminatable loads substracted
1134 // from the cost even when we finally see a store. We just need to disable
1135 // *further* accumulation of elimination savings.
1136 // 2. We should probably at some point thread MemorySSA for the callee into
1137 // this and then use that to actually compute *really* precise savings.
1138 disableLoadElimination();
Chandler Carruth0539c072012-03-31 12:42:41 +00001139 return false;
1140}
1141
Chandler Carruth753e21d2012-12-28 14:23:32 +00001142bool CallAnalyzer::visitExtractValue(ExtractValueInst &I) {
1143 // Constant folding for extract value is trivial.
Easwaran Raman617f6362017-02-18 17:22:52 +00001144 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
1145 return ConstantExpr::getExtractValue(COps[0], I.getIndices());
1146 }))
Chandler Carruth753e21d2012-12-28 14:23:32 +00001147 return true;
Chandler Carruth753e21d2012-12-28 14:23:32 +00001148
1149 // SROA can look through these but give them a cost.
1150 return false;
1151}
1152
1153bool CallAnalyzer::visitInsertValue(InsertValueInst &I) {
1154 // Constant folding for insert value is trivial.
Easwaran Raman617f6362017-02-18 17:22:52 +00001155 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
1156 return ConstantExpr::getInsertValue(/*AggregateOperand*/ COps[0],
1157 /*InsertedValueOperand*/ COps[1],
1158 I.getIndices());
1159 }))
Chandler Carruth753e21d2012-12-28 14:23:32 +00001160 return true;
Chandler Carruth753e21d2012-12-28 14:23:32 +00001161
1162 // SROA can look through these but give them a cost.
1163 return false;
1164}
1165
Adrian Prantl5f8f34e42018-05-01 15:54:18 +00001166/// Try to simplify a call site.
Chandler Carruth753e21d2012-12-28 14:23:32 +00001167///
1168/// Takes a concrete function and callsite and tries to actually simplify it by
1169/// analyzing the arguments and call itself with instsimplify. Returns true if
1170/// it has simplified the callsite to some other entity (a constant), making it
1171/// free.
1172bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
1173 // FIXME: Using the instsimplify logic directly for this is inefficient
1174 // because we have to continually rebuild the argument list even when no
1175 // simplifications can be performed. Until that is fixed with remapping
1176 // inside of instsimplify, directly constant fold calls here.
Andrew Kaylor647025f2017-06-09 23:18:11 +00001177 if (!canConstantFoldCallTo(CS, F))
Chandler Carruth753e21d2012-12-28 14:23:32 +00001178 return false;
1179
1180 // Try to re-map the arguments to constants.
1181 SmallVector<Constant *, 4> ConstantArgs;
1182 ConstantArgs.reserve(CS.arg_size());
Chad Rosier567556a2016-04-28 14:47:23 +00001183 for (CallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end(); I != E;
1184 ++I) {
Chandler Carruth753e21d2012-12-28 14:23:32 +00001185 Constant *C = dyn_cast<Constant>(*I);
1186 if (!C)
1187 C = dyn_cast_or_null<Constant>(SimplifiedValues.lookup(*I));
1188 if (!C)
1189 return false; // This argument doesn't map to a constant.
1190
1191 ConstantArgs.push_back(C);
1192 }
Andrew Kaylor647025f2017-06-09 23:18:11 +00001193 if (Constant *C = ConstantFoldCall(CS, F, ConstantArgs)) {
Chandler Carruth753e21d2012-12-28 14:23:32 +00001194 SimplifiedValues[CS.getInstruction()] = C;
1195 return true;
1196 }
1197
1198 return false;
1199}
1200
Chandler Carruth0539c072012-03-31 12:42:41 +00001201bool CallAnalyzer::visitCallSite(CallSite CS) {
Chandler Carruth37d25de2013-12-13 08:00:01 +00001202 if (CS.hasFnAttr(Attribute::ReturnsTwice) &&
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001203 !F.hasFnAttribute(Attribute::ReturnsTwice)) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001204 // This aborts the entire analysis.
1205 ExposesReturnsTwice = true;
1206 return false;
1207 }
Chad Rosier567556a2016-04-28 14:47:23 +00001208 if (CS.isCall() && cast<CallInst>(CS.getInstruction())->cannotDuplicate())
James Molloy4f6fb952012-12-20 16:04:27 +00001209 ContainsNoDuplicateCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001210
Chandler Carruth0539c072012-03-31 12:42:41 +00001211 if (Function *F = CS.getCalledFunction()) {
Chandler Carruth753e21d2012-12-28 14:23:32 +00001212 // When we have a concrete function, first try to simplify it directly.
1213 if (simplifyCallSite(F, CS))
1214 return true;
1215
1216 // Next check if it is an intrinsic we know about.
1217 // FIXME: Lift this into part of the InstVisitor.
1218 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
1219 switch (II->getIntrinsicID()) {
1220 default:
Haicheng Wua4461512017-12-15 14:34:41 +00001221 if (!CS.onlyReadsMemory() && !isAssumeLikeIntrinsic(II))
1222 disableLoadElimination();
Chandler Carruth753e21d2012-12-28 14:23:32 +00001223 return Base::visitCallSite(CS);
1224
Peter Collingbourne7dd8dbf2016-04-22 21:18:02 +00001225 case Intrinsic::load_relative:
1226 // This is normally lowered to 4 LLVM instructions.
1227 Cost += 3 * InlineConstants::InstrCost;
1228 return false;
1229
Chandler Carruth753e21d2012-12-28 14:23:32 +00001230 case Intrinsic::memset:
1231 case Intrinsic::memcpy:
1232 case Intrinsic::memmove:
Haicheng Wua4461512017-12-15 14:34:41 +00001233 disableLoadElimination();
Chandler Carruth753e21d2012-12-28 14:23:32 +00001234 // SROA can usually chew through these intrinsics, but they aren't free.
1235 return false;
Vitaly Buka4296ea72018-04-04 21:46:27 +00001236 case Intrinsic::icall_branch_funnel:
Reid Kleckner60381792015-07-07 22:25:32 +00001237 case Intrinsic::localescape:
Vitaly Buka4296ea72018-04-04 21:46:27 +00001238 HasUninlineableIntrinsic = true;
Reid Kleckner223de262015-04-14 20:38:14 +00001239 return false;
Florian Hahn80788d82018-01-06 19:45:40 +00001240 case Intrinsic::vastart:
1241 case Intrinsic::vaend:
1242 UsesVarArgs = true;
1243 return false;
Chandler Carruth753e21d2012-12-28 14:23:32 +00001244 }
1245 }
1246
Davide Italiano9d939c82017-11-30 22:10:35 +00001247 if (F == CS.getInstruction()->getFunction()) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001248 // This flag will fully abort the analysis, so don't bother with anything
1249 // else.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001250 IsRecursiveCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001251 return false;
1252 }
1253
Chandler Carruth0ba8db42013-01-22 11:26:02 +00001254 if (TTI.isLoweredToCall(F)) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001255 // We account for the average 1 instruction per call argument setup
1256 // here.
1257 Cost += CS.arg_size() * InlineConstants::InstrCost;
1258
1259 // Everything other than inline ASM will also have a significant cost
1260 // merely from making the call.
1261 if (!isa<InlineAsm>(CS.getCalledValue()))
1262 Cost += InlineConstants::CallPenalty;
1263 }
1264
Haicheng Wua4461512017-12-15 14:34:41 +00001265 if (!CS.onlyReadsMemory())
1266 disableLoadElimination();
Chandler Carruth0539c072012-03-31 12:42:41 +00001267 return Base::visitCallSite(CS);
1268 }
1269
1270 // Otherwise we're in a very special case -- an indirect function call. See
1271 // if we can be particularly clever about this.
1272 Value *Callee = CS.getCalledValue();
1273
1274 // First, pay the price of the argument setup. We account for the average
1275 // 1 instruction per call argument setup here.
1276 Cost += CS.arg_size() * InlineConstants::InstrCost;
1277
1278 // Next, check if this happens to be an indirect function call to a known
1279 // function in this inline context. If not, we've done all we can.
1280 Function *F = dyn_cast_or_null<Function>(SimplifiedValues.lookup(Callee));
Haicheng Wua4461512017-12-15 14:34:41 +00001281 if (!F) {
1282 if (!CS.onlyReadsMemory())
1283 disableLoadElimination();
Chandler Carruth0539c072012-03-31 12:42:41 +00001284 return Base::visitCallSite(CS);
Haicheng Wua4461512017-12-15 14:34:41 +00001285 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001286
1287 // If we have a constant that we are calling as a function, we can peer
1288 // through it and see the function target. This happens not infrequently
1289 // during devirtualization and so we want to give it a hefty bonus for
1290 // inlining, but cap that bonus in the event that inlining wouldn't pan
1291 // out. Pretend to inline the function, with a custom threshold.
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001292 auto IndirectCallParams = Params;
1293 IndirectCallParams.DefaultThreshold = InlineConstants::IndirectCallThreshold;
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001294 CallAnalyzer CA(TTI, GetAssumptionCache, GetBFI, PSI, ORE, *F, CS,
Easwaran Raman12585b02017-01-20 22:44:04 +00001295 IndirectCallParams);
Chandler Carruth0539c072012-03-31 12:42:41 +00001296 if (CA.analyzeCall(CS)) {
1297 // We were able to inline the indirect call! Subtract the cost from the
Easwaran Raman6d90d9f2015-12-07 21:21:20 +00001298 // threshold to get the bonus we want to apply, but don't go below zero.
1299 Cost -= std::max(0, CA.getThreshold() - CA.getCost());
Chandler Carruth0539c072012-03-31 12:42:41 +00001300 }
1301
Haicheng Wua4461512017-12-15 14:34:41 +00001302 if (!F->onlyReadsMemory())
1303 disableLoadElimination();
Chandler Carruth0539c072012-03-31 12:42:41 +00001304 return Base::visitCallSite(CS);
1305}
1306
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001307bool CallAnalyzer::visitReturnInst(ReturnInst &RI) {
1308 // At least one return instruction will be free after inlining.
1309 bool Free = !HasReturn;
1310 HasReturn = true;
1311 return Free;
1312}
1313
1314bool CallAnalyzer::visitBranchInst(BranchInst &BI) {
1315 // We model unconditional branches as essentially free -- they really
1316 // shouldn't exist at all, but handling them makes the behavior of the
1317 // inliner more regular and predictable. Interestingly, conditional branches
1318 // which will fold away are also free.
1319 return BI.isUnconditional() || isa<ConstantInt>(BI.getCondition()) ||
1320 dyn_cast_or_null<ConstantInt>(
1321 SimplifiedValues.lookup(BI.getCondition()));
1322}
1323
Haicheng Wu3ec848b2017-09-27 14:44:56 +00001324bool CallAnalyzer::visitSelectInst(SelectInst &SI) {
1325 bool CheckSROA = SI.getType()->isPointerTy();
1326 Value *TrueVal = SI.getTrueValue();
1327 Value *FalseVal = SI.getFalseValue();
1328
1329 Constant *TrueC = dyn_cast<Constant>(TrueVal);
1330 if (!TrueC)
1331 TrueC = SimplifiedValues.lookup(TrueVal);
1332 Constant *FalseC = dyn_cast<Constant>(FalseVal);
1333 if (!FalseC)
1334 FalseC = SimplifiedValues.lookup(FalseVal);
1335 Constant *CondC =
1336 dyn_cast_or_null<Constant>(SimplifiedValues.lookup(SI.getCondition()));
1337
1338 if (!CondC) {
1339 // Select C, X, X => X
1340 if (TrueC == FalseC && TrueC) {
1341 SimplifiedValues[&SI] = TrueC;
1342 return true;
1343 }
1344
1345 if (!CheckSROA)
1346 return Base::visitSelectInst(SI);
1347
1348 std::pair<Value *, APInt> TrueBaseAndOffset =
1349 ConstantOffsetPtrs.lookup(TrueVal);
1350 std::pair<Value *, APInt> FalseBaseAndOffset =
1351 ConstantOffsetPtrs.lookup(FalseVal);
1352 if (TrueBaseAndOffset == FalseBaseAndOffset && TrueBaseAndOffset.first) {
1353 ConstantOffsetPtrs[&SI] = TrueBaseAndOffset;
1354
1355 Value *SROAArg;
1356 DenseMap<Value *, int>::iterator CostIt;
1357 if (lookupSROAArgAndCost(TrueVal, SROAArg, CostIt))
1358 SROAArgValues[&SI] = SROAArg;
1359 return true;
1360 }
1361
1362 return Base::visitSelectInst(SI);
1363 }
1364
1365 // Select condition is a constant.
1366 Value *SelectedV = CondC->isAllOnesValue()
1367 ? TrueVal
1368 : (CondC->isNullValue()) ? FalseVal : nullptr;
1369 if (!SelectedV) {
1370 // Condition is a vector constant that is not all 1s or all 0s. If all
1371 // operands are constants, ConstantExpr::getSelect() can handle the cases
1372 // such as select vectors.
1373 if (TrueC && FalseC) {
1374 if (auto *C = ConstantExpr::getSelect(CondC, TrueC, FalseC)) {
1375 SimplifiedValues[&SI] = C;
1376 return true;
1377 }
1378 }
1379 return Base::visitSelectInst(SI);
1380 }
1381
1382 // Condition is either all 1s or all 0s. SI can be simplified.
1383 if (Constant *SelectedC = dyn_cast<Constant>(SelectedV)) {
1384 SimplifiedValues[&SI] = SelectedC;
1385 return true;
1386 }
1387
1388 if (!CheckSROA)
1389 return true;
1390
1391 std::pair<Value *, APInt> BaseAndOffset =
1392 ConstantOffsetPtrs.lookup(SelectedV);
1393 if (BaseAndOffset.first) {
1394 ConstantOffsetPtrs[&SI] = BaseAndOffset;
1395
1396 Value *SROAArg;
1397 DenseMap<Value *, int>::iterator CostIt;
1398 if (lookupSROAArgAndCost(SelectedV, SROAArg, CostIt))
1399 SROAArgValues[&SI] = SROAArg;
1400 }
1401
1402 return true;
1403}
1404
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001405bool CallAnalyzer::visitSwitchInst(SwitchInst &SI) {
1406 // We model unconditional switches as free, see the comments on handling
1407 // branches.
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001408 if (isa<ConstantInt>(SI.getCondition()))
1409 return true;
1410 if (Value *V = SimplifiedValues.lookup(SI.getCondition()))
1411 if (isa<ConstantInt>(V))
1412 return true;
1413
Eric Christopher7ad02ee2017-06-28 21:10:31 +00001414 // Assume the most general case where the switch is lowered into
Jun Bum Lim2960d412017-06-02 20:42:54 +00001415 // either a jump table, bit test, or a balanced binary tree consisting of
1416 // case clusters without merging adjacent clusters with the same
1417 // destination. We do not consider the switches that are lowered with a mix
1418 // of jump table/bit test/binary search tree. The cost of the switch is
1419 // proportional to the size of the tree or the size of jump table range.
1420 //
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001421 // NB: We convert large switches which are just used to initialize large phi
1422 // nodes to lookup tables instead in simplify-cfg, so this shouldn't prevent
1423 // inlining those. It will prevent inlining in cases where the optimization
1424 // does not (yet) fire.
Jun Bum Lim2960d412017-06-02 20:42:54 +00001425
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001426 // Maximum valid cost increased in this function.
1427 int CostUpperBound = INT_MAX - InlineConstants::InstrCost - 1;
1428
Jun Bum Lim2960d412017-06-02 20:42:54 +00001429 // Exit early for a large switch, assuming one case needs at least one
1430 // instruction.
1431 // FIXME: This is not true for a bit test, but ignore such case for now to
1432 // save compile-time.
1433 int64_t CostLowerBound =
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001434 std::min((int64_t)CostUpperBound,
Jun Bum Lim2960d412017-06-02 20:42:54 +00001435 (int64_t)SI.getNumCases() * InlineConstants::InstrCost + Cost);
1436
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001437 if (CostLowerBound > Threshold && !ComputeFullInlineCost) {
Jun Bum Lim2960d412017-06-02 20:42:54 +00001438 Cost = CostLowerBound;
1439 return false;
1440 }
1441
1442 unsigned JumpTableSize = 0;
1443 unsigned NumCaseCluster =
1444 TTI.getEstimatedNumberOfCaseClusters(SI, JumpTableSize);
1445
1446 // If suitable for a jump table, consider the cost for the table size and
1447 // branch to destination.
1448 if (JumpTableSize) {
1449 int64_t JTCost = (int64_t)JumpTableSize * InlineConstants::InstrCost +
1450 4 * InlineConstants::InstrCost;
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001451
1452 Cost = std::min((int64_t)CostUpperBound, JTCost + Cost);
Jun Bum Lim2960d412017-06-02 20:42:54 +00001453 return false;
1454 }
1455
1456 // Considering forming a binary search, we should find the number of nodes
1457 // which is same as the number of comparisons when lowered. For a given
1458 // number of clusters, n, we can define a recursive function, f(n), to find
1459 // the number of nodes in the tree. The recursion is :
1460 // f(n) = 1 + f(n/2) + f (n - n/2), when n > 3,
1461 // and f(n) = n, when n <= 3.
1462 // This will lead a binary tree where the leaf should be either f(2) or f(3)
1463 // when n > 3. So, the number of comparisons from leaves should be n, while
1464 // the number of non-leaf should be :
1465 // 2^(log2(n) - 1) - 1
1466 // = 2^log2(n) * 2^-1 - 1
1467 // = n / 2 - 1.
1468 // Considering comparisons from leaf and non-leaf nodes, we can estimate the
1469 // number of comparisons in a simple closed form :
1470 // n + n / 2 - 1 = n * 3 / 2 - 1
1471 if (NumCaseCluster <= 3) {
1472 // Suppose a comparison includes one compare and one conditional branch.
1473 Cost += NumCaseCluster * 2 * InlineConstants::InstrCost;
1474 return false;
1475 }
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001476
1477 int64_t ExpectedNumberOfCompare = 3 * (int64_t)NumCaseCluster / 2 - 1;
1478 int64_t SwitchCost =
Jun Bum Lim2960d412017-06-02 20:42:54 +00001479 ExpectedNumberOfCompare * 2 * InlineConstants::InstrCost;
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001480
1481 Cost = std::min((int64_t)CostUpperBound, SwitchCost + Cost);
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001482 return false;
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001483}
1484
1485bool CallAnalyzer::visitIndirectBrInst(IndirectBrInst &IBI) {
1486 // We never want to inline functions that contain an indirectbr. This is
1487 // incorrect because all the blockaddress's (in static global initializers
1488 // for example) would be referring to the original function, and this
1489 // indirect jump would jump from the inlined copy of the function into the
1490 // original function which is extremely undefined behavior.
1491 // FIXME: This logic isn't really right; we can safely inline functions with
1492 // indirectbr's as long as no other function or global references the
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001493 // blockaddress of a block within the current function.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001494 HasIndirectBr = true;
1495 return false;
1496}
1497
1498bool CallAnalyzer::visitResumeInst(ResumeInst &RI) {
1499 // FIXME: It's not clear that a single instruction is an accurate model for
1500 // the inline cost of a resume instruction.
1501 return false;
1502}
1503
David Majnemer654e1302015-07-31 17:58:14 +00001504bool CallAnalyzer::visitCleanupReturnInst(CleanupReturnInst &CRI) {
1505 // FIXME: It's not clear that a single instruction is an accurate model for
1506 // the inline cost of a cleanupret instruction.
1507 return false;
1508}
1509
1510bool CallAnalyzer::visitCatchReturnInst(CatchReturnInst &CRI) {
1511 // FIXME: It's not clear that a single instruction is an accurate model for
Joseph Tremoulet8220bcc2015-08-23 00:26:33 +00001512 // the inline cost of a catchret instruction.
David Majnemer654e1302015-07-31 17:58:14 +00001513 return false;
1514}
1515
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001516bool CallAnalyzer::visitUnreachableInst(UnreachableInst &I) {
1517 // FIXME: It might be reasonably to discount the cost of instructions leading
1518 // to unreachable as they have the lowest possible impact on both runtime and
1519 // code size.
1520 return true; // No actual code is needed for unreachable.
1521}
1522
Chandler Carruth0539c072012-03-31 12:42:41 +00001523bool CallAnalyzer::visitInstruction(Instruction &I) {
Chandler Carruthda7513a2012-05-04 00:58:03 +00001524 // Some instructions are free. All of the free intrinsics can also be
1525 // handled by SROA, etc.
Chandler Carruthb8cf5102013-01-21 12:05:16 +00001526 if (TargetTransformInfo::TCC_Free == TTI.getUserCost(&I))
Chandler Carruthda7513a2012-05-04 00:58:03 +00001527 return true;
1528
Chandler Carruth0539c072012-03-31 12:42:41 +00001529 // We found something we don't understand or can't handle. Mark any SROA-able
1530 // values in the operand list as no longer viable.
1531 for (User::op_iterator OI = I.op_begin(), OE = I.op_end(); OI != OE; ++OI)
1532 disableSROA(*OI);
1533
1534 return false;
1535}
1536
Adrian Prantl5f8f34e42018-05-01 15:54:18 +00001537/// Analyze a basic block for its contribution to the inline cost.
Chandler Carruth0539c072012-03-31 12:42:41 +00001538///
1539/// This method walks the analyzer over every instruction in the given basic
1540/// block and accounts for their cost during inlining at this callsite. It
1541/// aborts early if the threshold has been exceeded or an impossible to inline
1542/// construct has been detected. It returns false if inlining is no longer
1543/// viable, and true if inlining remains viable.
Hal Finkel57f03dd2014-09-07 13:49:57 +00001544bool CallAnalyzer::analyzeBlock(BasicBlock *BB,
1545 SmallPtrSetImpl<const Value *> &EphValues) {
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001546 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
Chandler Carruth6b4cc8b2014-02-01 10:38:17 +00001547 // FIXME: Currently, the number of instructions in a function regardless of
1548 // our ability to simplify them during inline to constants or dead code,
1549 // are actually used by the vector bonus heuristic. As long as that's true,
1550 // we have to special case debug intrinsics here to prevent differences in
1551 // inlining due to debug symbols. Eventually, the number of unsimplified
1552 // instructions shouldn't factor into the cost computation, but until then,
1553 // hack around it here.
1554 if (isa<DbgInfoIntrinsic>(I))
1555 continue;
1556
Hal Finkel57f03dd2014-09-07 13:49:57 +00001557 // Skip ephemeral values.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001558 if (EphValues.count(&*I))
Hal Finkel57f03dd2014-09-07 13:49:57 +00001559 continue;
1560
Chandler Carruth0539c072012-03-31 12:42:41 +00001561 ++NumInstructions;
1562 if (isa<ExtractElementInst>(I) || I->getType()->isVectorTy())
1563 ++NumVectorInstructions;
1564
1565 // If the instruction simplified to a constant, there is no cost to this
1566 // instruction. Visit the instructions using our InstVisitor to account for
1567 // all of the per-instruction logic. The visit tree returns true if we
1568 // consumed the instruction in any way, and false if the instruction's base
1569 // cost should count against inlining.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001570 if (Base::visit(&*I))
Chandler Carruth0539c072012-03-31 12:42:41 +00001571 ++NumInstructionsSimplified;
1572 else
1573 Cost += InlineConstants::InstrCost;
1574
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001575 using namespace ore;
Chandler Carruth0539c072012-03-31 12:42:41 +00001576 // If the visit this instruction detected an uninlinable pattern, abort.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001577 if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca ||
Vitaly Buka4296ea72018-04-04 21:46:27 +00001578 HasIndirectBr || HasUninlineableIntrinsic || UsesVarArgs) {
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001579 if (ORE)
Vivek Pandya95906582017-10-11 17:12:59 +00001580 ORE->emit([&]() {
1581 return OptimizationRemarkMissed(DEBUG_TYPE, "NeverInline",
1582 CandidateCS.getInstruction())
1583 << NV("Callee", &F)
1584 << " has uninlinable pattern and cost is not fully computed";
1585 });
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001586 return false;
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001587 }
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001588
1589 // If the caller is a recursive function then we don't want to inline
1590 // functions which allocate a lot of stack space because it would increase
1591 // the caller stack usage dramatically.
1592 if (IsCallerRecursive &&
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001593 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller) {
1594 if (ORE)
Vivek Pandya95906582017-10-11 17:12:59 +00001595 ORE->emit([&]() {
1596 return OptimizationRemarkMissed(DEBUG_TYPE, "NeverInline",
1597 CandidateCS.getInstruction())
1598 << NV("Callee", &F)
1599 << " is recursive and allocates too much stack space. Cost is "
1600 "not fully computed";
1601 });
Chandler Carruth0539c072012-03-31 12:42:41 +00001602 return false;
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001603 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001604
Chandler Carrutha004f222015-05-27 02:49:05 +00001605 // Check if we've past the maximum possible threshold so we don't spin in
1606 // huge basic blocks that will never inline.
Haicheng Wu61995362017-08-25 19:00:33 +00001607 if (Cost >= Threshold && !ComputeFullInlineCost)
Chandler Carruth0539c072012-03-31 12:42:41 +00001608 return false;
1609 }
1610
1611 return true;
1612}
1613
Adrian Prantl5f8f34e42018-05-01 15:54:18 +00001614/// Compute the base pointer and cumulative constant offsets for V.
Chandler Carruth0539c072012-03-31 12:42:41 +00001615///
1616/// This strips all constant offsets off of V, leaving it the base pointer, and
1617/// accumulates the total constant offset applied in the returned constant. It
1618/// returns 0 if V is not a pointer, and returns the constant '0' if there are
1619/// no constant offsets applied.
1620ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001621 if (!V->getType()->isPointerTy())
Craig Topper353eda42014-04-24 06:44:33 +00001622 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001623
Bjorn Pettersson77f32992018-01-04 18:23:40 +00001624 unsigned AS = V->getType()->getPointerAddressSpace();
Elena Demikhovsky945b7e52018-02-14 06:58:08 +00001625 unsigned IntPtrWidth = DL.getIndexSizeInBits(AS);
Chandler Carruth0539c072012-03-31 12:42:41 +00001626 APInt Offset = APInt::getNullValue(IntPtrWidth);
1627
1628 // Even though we don't look through PHI nodes, we could be called on an
1629 // instruction in an unreachable block, which may be on a cycle.
1630 SmallPtrSet<Value *, 4> Visited;
1631 Visited.insert(V);
1632 do {
1633 if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
1634 if (!GEP->isInBounds() || !accumulateGEPOffset(*GEP, Offset))
Craig Topper353eda42014-04-24 06:44:33 +00001635 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001636 V = GEP->getPointerOperand();
1637 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
1638 V = cast<Operator>(V)->getOperand(0);
1639 } else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
Sanjoy Das5ce32722016-04-08 00:48:30 +00001640 if (GA->isInterposable())
Chandler Carruth0539c072012-03-31 12:42:41 +00001641 break;
1642 V = GA->getAliasee();
1643 } else {
1644 break;
1645 }
1646 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
David Blaikie70573dc2014-11-19 07:49:26 +00001647 } while (Visited.insert(V).second);
Chandler Carruth0539c072012-03-31 12:42:41 +00001648
Bjorn Pettersson77f32992018-01-04 18:23:40 +00001649 Type *IntPtrTy = DL.getIntPtrType(V->getContext(), AS);
Chandler Carruth0539c072012-03-31 12:42:41 +00001650 return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset));
1651}
1652
Adrian Prantl5f8f34e42018-05-01 15:54:18 +00001653/// Find dead blocks due to deleted CFG edges during inlining.
Haicheng Wu3739e142017-12-14 14:36:18 +00001654///
1655/// If we know the successor of the current block, \p CurrBB, has to be \p
1656/// NextBB, the other successors of \p CurrBB are dead if these successors have
1657/// no live incoming CFG edges. If one block is found to be dead, we can
1658/// continue growing the dead block list by checking the successors of the dead
1659/// blocks to see if all their incoming edges are dead or not.
1660void CallAnalyzer::findDeadBlocks(BasicBlock *CurrBB, BasicBlock *NextBB) {
1661 auto IsEdgeDead = [&](BasicBlock *Pred, BasicBlock *Succ) {
1662 // A CFG edge is dead if the predecessor is dead or the predessor has a
1663 // known successor which is not the one under exam.
1664 return (DeadBlocks.count(Pred) ||
1665 (KnownSuccessors[Pred] && KnownSuccessors[Pred] != Succ));
1666 };
1667
1668 auto IsNewlyDead = [&](BasicBlock *BB) {
1669 // If all the edges to a block are dead, the block is also dead.
1670 return (!DeadBlocks.count(BB) &&
1671 llvm::all_of(predecessors(BB),
1672 [&](BasicBlock *P) { return IsEdgeDead(P, BB); }));
1673 };
1674
1675 for (BasicBlock *Succ : successors(CurrBB)) {
1676 if (Succ == NextBB || !IsNewlyDead(Succ))
1677 continue;
1678 SmallVector<BasicBlock *, 4> NewDead;
1679 NewDead.push_back(Succ);
1680 while (!NewDead.empty()) {
1681 BasicBlock *Dead = NewDead.pop_back_val();
1682 if (DeadBlocks.insert(Dead))
1683 // Continue growing the dead block lists.
1684 for (BasicBlock *S : successors(Dead))
1685 if (IsNewlyDead(S))
1686 NewDead.push_back(S);
1687 }
1688 }
1689}
1690
Adrian Prantl5f8f34e42018-05-01 15:54:18 +00001691/// Analyze a call site for potential inlining.
Chandler Carruth0539c072012-03-31 12:42:41 +00001692///
1693/// Returns true if inlining this call is viable, and false if it is not
1694/// viable. It computes the cost and adjusts the threshold based on numerous
1695/// factors and heuristics. If this method returns false but the computed cost
1696/// is below the computed threshold, then inlining was forcibly disabled by
Bob Wilson266802d2012-11-19 07:04:30 +00001697/// some artifact of the routine.
Chandler Carruth0539c072012-03-31 12:42:41 +00001698bool CallAnalyzer::analyzeCall(CallSite CS) {
Chandler Carruth7ae90d42012-04-11 10:15:10 +00001699 ++NumCallsAnalyzed;
1700
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001701 // Perform some tweaks to the cost and threshold based on the direct
1702 // callsite information.
Chandler Carruth0539c072012-03-31 12:42:41 +00001703
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001704 // We want to more aggressively inline vector-dense kernels, so up the
1705 // threshold, and we'll lower it if the % of vector instructions gets too
Chandler Carrutha004f222015-05-27 02:49:05 +00001706 // low. Note that these bonuses are some what arbitrary and evolved over time
1707 // by accident as much as because they are principled bonuses.
1708 //
1709 // FIXME: It would be nice to remove all such bonuses. At least it would be
1710 // nice to base the bonus values on something more scientific.
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001711 assert(NumInstructions == 0);
1712 assert(NumVectorInstructions == 0);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001713
1714 // Update the threshold based on callsite properties
1715 updateThreshold(CS, F);
1716
Chandler Carrutha004f222015-05-27 02:49:05 +00001717 // Speculatively apply all possible bonuses to Threshold. If cost exceeds
1718 // this Threshold any time, and cost cannot decrease, we can stop processing
1719 // the rest of the function body.
Easwaran Raman51b809b2017-07-28 21:47:36 +00001720 Threshold += (SingleBBBonus + VectorBonus);
Chandler Carrutha004f222015-05-27 02:49:05 +00001721
Xinliang David Li351d9b02017-05-02 05:38:41 +00001722 // Give out bonuses for the callsite, as the instructions setting them up
1723 // will be gone after inlining.
1724 Cost -= getCallsiteCost(CS, DL);
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001725
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001726 // If this function uses the coldcc calling convention, prefer not to inline
1727 // it.
1728 if (F.getCallingConv() == CallingConv::Cold)
1729 Cost += InlineConstants::ColdccPenalty;
1730
1731 // Check if we're done. This can happen due to bonuses and penalties.
Haicheng Wu61995362017-08-25 19:00:33 +00001732 if (Cost >= Threshold && !ComputeFullInlineCost)
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001733 return false;
1734
Chandler Carruth0539c072012-03-31 12:42:41 +00001735 if (F.empty())
1736 return true;
1737
Davide Italiano9d939c82017-11-30 22:10:35 +00001738 Function *Caller = CS.getInstruction()->getFunction();
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001739 // Check if the caller function is recursive itself.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001740 for (User *U : Caller->users()) {
1741 CallSite Site(U);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001742 if (!Site)
1743 continue;
1744 Instruction *I = Site.getInstruction();
Davide Italiano9d939c82017-11-30 22:10:35 +00001745 if (I->getFunction() == Caller) {
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001746 IsCallerRecursive = true;
1747 break;
1748 }
1749 }
1750
Chandler Carruth0539c072012-03-31 12:42:41 +00001751 // Populate our simplified values by mapping from function arguments to call
1752 // arguments with known important simplifications.
1753 CallSite::arg_iterator CAI = CS.arg_begin();
1754 for (Function::arg_iterator FAI = F.arg_begin(), FAE = F.arg_end();
1755 FAI != FAE; ++FAI, ++CAI) {
1756 assert(CAI != CS.arg_end());
1757 if (Constant *C = dyn_cast<Constant>(CAI))
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001758 SimplifiedValues[&*FAI] = C;
Chandler Carruth0539c072012-03-31 12:42:41 +00001759
1760 Value *PtrArg = *CAI;
1761 if (ConstantInt *C = stripAndComputeInBoundsConstantOffsets(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001762 ConstantOffsetPtrs[&*FAI] = std::make_pair(PtrArg, C->getValue());
Chandler Carruth0539c072012-03-31 12:42:41 +00001763
1764 // We can SROA any pointer arguments derived from alloca instructions.
1765 if (isa<AllocaInst>(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001766 SROAArgValues[&*FAI] = PtrArg;
Chandler Carruth0539c072012-03-31 12:42:41 +00001767 SROAArgCosts[PtrArg] = 0;
1768 }
1769 }
1770 }
1771 NumConstantArgs = SimplifiedValues.size();
1772 NumConstantOffsetPtrArgs = ConstantOffsetPtrs.size();
1773 NumAllocaArgs = SROAArgValues.size();
1774
Hal Finkel57f03dd2014-09-07 13:49:57 +00001775 // FIXME: If a caller has multiple calls to a callee, we end up recomputing
1776 // the ephemeral values multiple times (and they're completely determined by
1777 // the callee, so this is purely duplicate work).
1778 SmallPtrSet<const Value *, 32> EphValues;
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001779 CodeMetrics::collectEphemeralValues(&F, &GetAssumptionCache(F), EphValues);
Hal Finkel57f03dd2014-09-07 13:49:57 +00001780
Chandler Carruth0539c072012-03-31 12:42:41 +00001781 // The worklist of live basic blocks in the callee *after* inlining. We avoid
1782 // adding basic blocks of the callee which can be proven to be dead for this
1783 // particular call site in order to get more accurate cost estimates. This
1784 // requires a somewhat heavyweight iteration pattern: we need to walk the
1785 // basic blocks in a breadth-first order as we insert live successors. To
1786 // accomplish this, prioritizing for small iterations because we exit after
1787 // crossing our threshold, we use a small-size optimized SetVector.
1788 typedef SetVector<BasicBlock *, SmallVector<BasicBlock *, 16>,
Chad Rosier567556a2016-04-28 14:47:23 +00001789 SmallPtrSet<BasicBlock *, 16>>
1790 BBSetVector;
Chandler Carruth0539c072012-03-31 12:42:41 +00001791 BBSetVector BBWorklist;
1792 BBWorklist.insert(&F.getEntryBlock());
Easwaran Raman51b809b2017-07-28 21:47:36 +00001793 bool SingleBB = true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001794 // Note that we *must not* cache the size, this loop grows the worklist.
1795 for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) {
1796 // Bail out the moment we cross the threshold. This means we'll under-count
1797 // the cost, but only when undercounting doesn't matter.
Haicheng Wu61995362017-08-25 19:00:33 +00001798 if (Cost >= Threshold && !ComputeFullInlineCost)
Chandler Carruth0539c072012-03-31 12:42:41 +00001799 break;
1800
1801 BasicBlock *BB = BBWorklist[Idx];
1802 if (BB->empty())
Chandler Carruth4d1d34f2012-03-14 23:19:53 +00001803 continue;
Dan Gohman4552e3c2009-10-13 18:30:07 +00001804
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001805 // Disallow inlining a blockaddress. A blockaddress only has defined
1806 // behavior for an indirect branch in the same function, and we do not
1807 // currently support inlining indirect branches. But, the inliner may not
1808 // see an indirect branch that ends up being dead code at a particular call
1809 // site. If the blockaddress escapes the function, e.g., via a global
1810 // variable, inlining may lead to an invalid cross-function reference.
1811 if (BB->hasAddressTaken())
1812 return false;
1813
Chandler Carruth0539c072012-03-31 12:42:41 +00001814 // Analyze the cost of this block. If we blow through the threshold, this
1815 // returns false, and we can bail on out.
Easwaran Ramand295b002016-04-13 21:20:22 +00001816 if (!analyzeBlock(BB, EphValues))
1817 return false;
Eric Christopher46308e62011-02-01 01:16:32 +00001818
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001819 TerminatorInst *TI = BB->getTerminator();
1820
Chandler Carruth0539c072012-03-31 12:42:41 +00001821 // Add in the live successors by first checking whether we have terminator
1822 // that may be simplified based on the values simplified by this call.
1823 if (BranchInst *BI = dyn_cast<BranchInst>(TI)) {
1824 if (BI->isConditional()) {
1825 Value *Cond = BI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001826 if (ConstantInt *SimpleCond =
1827 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Haicheng Wu3739e142017-12-14 14:36:18 +00001828 BasicBlock *NextBB = BI->getSuccessor(SimpleCond->isZero() ? 1 : 0);
1829 BBWorklist.insert(NextBB);
1830 KnownSuccessors[BB] = NextBB;
1831 findDeadBlocks(BB, NextBB);
Chandler Carruth0539c072012-03-31 12:42:41 +00001832 continue;
Eric Christopher46308e62011-02-01 01:16:32 +00001833 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001834 }
1835 } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) {
1836 Value *Cond = SI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001837 if (ConstantInt *SimpleCond =
1838 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Haicheng Wu3739e142017-12-14 14:36:18 +00001839 BasicBlock *NextBB = SI->findCaseValue(SimpleCond)->getCaseSuccessor();
1840 BBWorklist.insert(NextBB);
1841 KnownSuccessors[BB] = NextBB;
1842 findDeadBlocks(BB, NextBB);
Chandler Carruth0539c072012-03-31 12:42:41 +00001843 continue;
1844 }
1845 }
Eric Christopher46308e62011-02-01 01:16:32 +00001846
Chandler Carruth0539c072012-03-31 12:42:41 +00001847 // If we're unable to select a particular successor, just count all of
1848 // them.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001849 for (unsigned TIdx = 0, TSize = TI->getNumSuccessors(); TIdx != TSize;
1850 ++TIdx)
Chandler Carruth0539c072012-03-31 12:42:41 +00001851 BBWorklist.insert(TI->getSuccessor(TIdx));
1852
1853 // If we had any successors at this point, than post-inlining is likely to
1854 // have them as well. Note that we assume any basic blocks which existed
1855 // due to branches or switches which folded above will also fold after
1856 // inlining.
1857 if (SingleBB && TI->getNumSuccessors() > 1) {
1858 // Take off the bonus we applied to the threshold.
1859 Threshold -= SingleBBBonus;
1860 SingleBB = false;
Eric Christopher46308e62011-02-01 01:16:32 +00001861 }
1862 }
Andrew Trickcaa500b2011-10-01 01:27:56 +00001863
Easwaran Raman51b809b2017-07-28 21:47:36 +00001864 bool OnlyOneCallAndLocalLinkage =
1865 F.hasLocalLinkage() && F.hasOneUse() && &F == CS.getCalledFunction();
Chandler Carruthcb5beb32013-12-12 11:59:26 +00001866 // If this is a noduplicate call, we can still inline as long as
James Molloy4f6fb952012-12-20 16:04:27 +00001867 // inlining this would cause the removal of the caller (so the instruction
1868 // is not actually duplicated, just moved).
1869 if (!OnlyOneCallAndLocalLinkage && ContainsNoDuplicateCall)
1870 return false;
1871
Chandler Carrutha004f222015-05-27 02:49:05 +00001872 // We applied the maximum possible vector bonus at the beginning. Now,
1873 // subtract the excess bonus, if any, from the Threshold before
1874 // comparing against Cost.
1875 if (NumVectorInstructions <= NumInstructions / 10)
Easwaran Raman51b809b2017-07-28 21:47:36 +00001876 Threshold -= VectorBonus;
Chandler Carrutha004f222015-05-27 02:49:05 +00001877 else if (NumVectorInstructions <= NumInstructions / 2)
Easwaran Raman51b809b2017-07-28 21:47:36 +00001878 Threshold -= VectorBonus/2;
Chandler Carruth0539c072012-03-31 12:42:41 +00001879
Hans Wennborg00ab73d2016-02-05 20:32:42 +00001880 return Cost < std::max(1, Threshold);
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001881}
1882
Aaron Ballman615eb472017-10-15 14:32:27 +00001883#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
Adrian Prantl5f8f34e42018-05-01 15:54:18 +00001884/// Dump stats about this call's analysis.
Yaron Kereneb2a2542016-01-29 20:50:44 +00001885LLVM_DUMP_METHOD void CallAnalyzer::dump() {
Eric Christophera13839f2014-02-26 23:27:16 +00001886#define DEBUG_PRINT_STAT(x) dbgs() << " " #x ": " << x << "\n"
Chandler Carruth0539c072012-03-31 12:42:41 +00001887 DEBUG_PRINT_STAT(NumConstantArgs);
1888 DEBUG_PRINT_STAT(NumConstantOffsetPtrArgs);
1889 DEBUG_PRINT_STAT(NumAllocaArgs);
1890 DEBUG_PRINT_STAT(NumConstantPtrCmps);
1891 DEBUG_PRINT_STAT(NumConstantPtrDiffs);
1892 DEBUG_PRINT_STAT(NumInstructionsSimplified);
Chandler Carrutha004f222015-05-27 02:49:05 +00001893 DEBUG_PRINT_STAT(NumInstructions);
Chandler Carruth0539c072012-03-31 12:42:41 +00001894 DEBUG_PRINT_STAT(SROACostSavings);
1895 DEBUG_PRINT_STAT(SROACostSavingsLost);
Haicheng Wua4461512017-12-15 14:34:41 +00001896 DEBUG_PRINT_STAT(LoadEliminationCost);
James Molloy4f6fb952012-12-20 16:04:27 +00001897 DEBUG_PRINT_STAT(ContainsNoDuplicateCall);
Chandler Carruth394e34f2014-01-31 22:32:32 +00001898 DEBUG_PRINT_STAT(Cost);
1899 DEBUG_PRINT_STAT(Threshold);
Chandler Carruth0539c072012-03-31 12:42:41 +00001900#undef DEBUG_PRINT_STAT
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001901}
Manman Renc3366cc2012-09-06 19:55:56 +00001902#endif
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001903
Adrian Prantl5f8f34e42018-05-01 15:54:18 +00001904/// Test that there are no attribute conflicts between Caller and Callee
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001905/// that prevent inlining.
1906static bool functionsHaveCompatibleAttributes(Function *Caller,
Eric Christopher4371b132015-07-02 01:11:47 +00001907 Function *Callee,
1908 TargetTransformInfo &TTI) {
Eric Christopherd566fb12015-07-29 22:09:48 +00001909 return TTI.areInlineCompatible(Caller, Callee) &&
Akira Hatanaka1cb242e2015-12-22 23:57:37 +00001910 AttributeFuncs::areInlineCompatible(*Caller, *Callee);
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001911}
1912
Xinliang David Li351d9b02017-05-02 05:38:41 +00001913int llvm::getCallsiteCost(CallSite CS, const DataLayout &DL) {
1914 int Cost = 0;
1915 for (unsigned I = 0, E = CS.arg_size(); I != E; ++I) {
1916 if (CS.isByValArgument(I)) {
1917 // We approximate the number of loads and stores needed by dividing the
1918 // size of the byval type by the target's pointer size.
1919 PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType());
1920 unsigned TypeSize = DL.getTypeSizeInBits(PTy->getElementType());
Bjorn Pettersson77f32992018-01-04 18:23:40 +00001921 unsigned AS = PTy->getAddressSpace();
1922 unsigned PointerSize = DL.getPointerSizeInBits(AS);
Xinliang David Li351d9b02017-05-02 05:38:41 +00001923 // Ceiling division.
1924 unsigned NumStores = (TypeSize + PointerSize - 1) / PointerSize;
1925
1926 // If it generates more than 8 stores it is likely to be expanded as an
1927 // inline memcpy so we take that as an upper bound. Otherwise we assume
1928 // one load and one store per word copied.
1929 // FIXME: The maxStoresPerMemcpy setting from the target should be used
1930 // here instead of a magic number of 8, but it's not available via
1931 // DataLayout.
1932 NumStores = std::min(NumStores, 8U);
1933
1934 Cost += 2 * NumStores * InlineConstants::InstrCost;
1935 } else {
1936 // For non-byval arguments subtract off one instruction per call
1937 // argument.
1938 Cost += InlineConstants::InstrCost;
1939 }
1940 }
1941 // The call instruction also disappears after inlining.
1942 Cost += InlineConstants::InstrCost + InlineConstants::CallPenalty;
1943 return Cost;
1944}
1945
Sean Silvaab6a6832016-07-23 04:22:50 +00001946InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001947 CallSite CS, const InlineParams &Params, TargetTransformInfo &CalleeTTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001948 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +00001949 Optional<function_ref<BlockFrequencyInfo &(Function &)>> GetBFI,
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001950 ProfileSummaryInfo *PSI, OptimizationRemarkEmitter *ORE) {
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001951 return getInlineCost(CS, CS.getCalledFunction(), Params, CalleeTTI,
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001952 GetAssumptionCache, GetBFI, PSI, ORE);
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001953}
1954
Sean Silvaab6a6832016-07-23 04:22:50 +00001955InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001956 CallSite CS, Function *Callee, const InlineParams &Params,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001957 TargetTransformInfo &CalleeTTI,
1958 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +00001959 Optional<function_ref<BlockFrequencyInfo &(Function &)>> GetBFI,
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001960 ProfileSummaryInfo *PSI, OptimizationRemarkEmitter *ORE) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001961
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001962 // Cannot inline indirect calls.
1963 if (!Callee)
1964 return llvm::InlineCost::getNever();
1965
Bjorn Pettersson38514962018-01-10 13:01:18 +00001966 // Never inline calls with byval arguments that does not have the alloca
1967 // address space. Since byval arguments can be replaced with a copy to an
1968 // alloca, the inlined code would need to be adjusted to handle that the
1969 // argument is in the alloca address space (so it is a little bit complicated
1970 // to solve).
1971 unsigned AllocaAS = Callee->getParent()->getDataLayout().getAllocaAddrSpace();
1972 for (unsigned I = 0, E = CS.arg_size(); I != E; ++I)
1973 if (CS.isByValArgument(I)) {
1974 PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType());
1975 if (PTy->getAddressSpace() != AllocaAS)
1976 return llvm::InlineCost::getNever();
1977 }
1978
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001979 // Calls to functions with always-inline attributes should be inlined
1980 // whenever possible.
Peter Collingbourne68a88972014-05-19 18:25:54 +00001981 if (CS.hasFnAttr(Attribute::AlwaysInline)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001982 if (isInlineViable(*Callee))
1983 return llvm::InlineCost::getAlways();
1984 return llvm::InlineCost::getNever();
1985 }
1986
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001987 // Never inline functions with conflicting attributes (unless callee has
1988 // always-inline attribute).
Chad Rosier5ce28f42017-08-02 14:50:27 +00001989 Function *Caller = CS.getCaller();
1990 if (!functionsHaveCompatibleAttributes(Caller, Callee, CalleeTTI))
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001991 return llvm::InlineCost::getNever();
1992
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001993 // Don't inline this call if the caller has the optnone attribute.
Chad Rosier5ce28f42017-08-02 14:50:27 +00001994 if (Caller->hasFnAttribute(Attribute::OptimizeNone))
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001995 return llvm::InlineCost::getNever();
1996
Sanjoy Das5ce32722016-04-08 00:48:30 +00001997 // Don't inline functions which can be interposed at link-time. Don't inline
1998 // functions marked noinline or call sites marked noinline.
Craig Topper107b1872016-12-09 02:18:04 +00001999 // Note: inlining non-exact non-interposable functions is fine, since we know
Sanjoy Das5ce32722016-04-08 00:48:30 +00002000 // we have *a* correct implementation of the source level function.
Chad Rosier567556a2016-04-28 14:47:23 +00002001 if (Callee->isInterposable() || Callee->hasFnAttribute(Attribute::NoInline) ||
2002 CS.isNoInline())
Dan Gohman4552e3c2009-10-13 18:30:07 +00002003 return llvm::InlineCost::getNever();
2004
Nicola Zaghend34e60c2018-05-14 12:53:11 +00002005 LLVM_DEBUG(llvm::dbgs() << " Analyzing call of " << Callee->getName()
2006 << "... (caller:" << Caller->getName() << ")\n");
Andrew Trickcaa500b2011-10-01 01:27:56 +00002007
Haicheng Wu0812c5b2017-08-21 20:00:09 +00002008 CallAnalyzer CA(CalleeTTI, GetAssumptionCache, GetBFI, PSI, ORE, *Callee, CS,
Easwaran Raman12585b02017-01-20 22:44:04 +00002009 Params);
Chandler Carruth0539c072012-03-31 12:42:41 +00002010 bool ShouldInline = CA.analyzeCall(CS);
Dan Gohman4552e3c2009-10-13 18:30:07 +00002011
Nicola Zaghend34e60c2018-05-14 12:53:11 +00002012 LLVM_DEBUG(CA.dump());
Chandler Carruth0539c072012-03-31 12:42:41 +00002013
2014 // Check if there was a reason to force inlining or no inlining.
2015 if (!ShouldInline && CA.getCost() < CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00002016 return InlineCost::getNever();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002017 if (ShouldInline && CA.getCost() >= CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00002018 return InlineCost::getAlways();
Andrew Trickcaa500b2011-10-01 01:27:56 +00002019
Chandler Carruth0539c072012-03-31 12:42:41 +00002020 return llvm::InlineCost::get(CA.getCost(), CA.getThreshold());
Dan Gohman4552e3c2009-10-13 18:30:07 +00002021}
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002022
Easwaran Ramanb9f71202015-12-28 20:28:19 +00002023bool llvm::isInlineViable(Function &F) {
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00002024 bool ReturnsTwice = F.hasFnAttribute(Attribute::ReturnsTwice);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002025 for (Function::iterator BI = F.begin(), BE = F.end(); BI != BE; ++BI) {
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00002026 // Disallow inlining of functions which contain indirect branches or
2027 // blockaddresses.
2028 if (isa<IndirectBrInst>(BI->getTerminator()) || BI->hasAddressTaken())
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002029 return false;
2030
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00002031 for (auto &II : *BI) {
2032 CallSite CS(&II);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002033 if (!CS)
2034 continue;
2035
2036 // Disallow recursive calls.
2037 if (&F == CS.getCalledFunction())
2038 return false;
2039
2040 // Disallow calls which expose returns-twice to a function not previously
2041 // attributed as such.
2042 if (!ReturnsTwice && CS.isCall() &&
2043 cast<CallInst>(CS.getInstruction())->canReturnTwice())
2044 return false;
Reid Kleckner223de262015-04-14 20:38:14 +00002045
Florian Hahn16366512018-01-28 19:11:49 +00002046 if (CS.getCalledFunction())
2047 switch (CS.getCalledFunction()->getIntrinsicID()) {
2048 default:
2049 break;
Vitaly Buka4296ea72018-04-04 21:46:27 +00002050 // Disallow inlining of @llvm.icall.branch.funnel because current
2051 // backend can't separate call targets from call arguments.
2052 case llvm::Intrinsic::icall_branch_funnel:
Florian Hahn16366512018-01-28 19:11:49 +00002053 // Disallow inlining functions that call @llvm.localescape. Doing this
2054 // correctly would require major changes to the inliner.
2055 case llvm::Intrinsic::localescape:
2056 // Disallow inlining of functions that access VarArgs.
2057 case llvm::Intrinsic::vastart:
2058 case llvm::Intrinsic::vaend:
2059 return false;
2060 }
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002061 }
2062 }
2063
2064 return true;
2065}
Easwaran Raman1c57cc22016-08-10 00:48:04 +00002066
2067// APIs to create InlineParams based on command line flags and/or other
2068// parameters.
2069
2070InlineParams llvm::getInlineParams(int Threshold) {
2071 InlineParams Params;
2072
2073 // This field is the threshold to use for a callee by default. This is
2074 // derived from one or more of:
2075 // * optimization or size-optimization levels,
2076 // * a value passed to createFunctionInliningPass function, or
2077 // * the -inline-threshold flag.
2078 // If the -inline-threshold flag is explicitly specified, that is used
2079 // irrespective of anything else.
2080 if (InlineThreshold.getNumOccurrences() > 0)
2081 Params.DefaultThreshold = InlineThreshold;
2082 else
2083 Params.DefaultThreshold = Threshold;
2084
2085 // Set the HintThreshold knob from the -inlinehint-threshold.
2086 Params.HintThreshold = HintThreshold;
2087
2088 // Set the HotCallSiteThreshold knob from the -hot-callsite-threshold.
2089 Params.HotCallSiteThreshold = HotCallSiteThreshold;
2090
Easwaran Raman974d4ee2017-08-03 22:23:33 +00002091 // If the -locally-hot-callsite-threshold is explicitly specified, use it to
2092 // populate LocallyHotCallSiteThreshold. Later, we populate
2093 // Params.LocallyHotCallSiteThreshold from -locally-hot-callsite-threshold if
2094 // we know that optimization level is O3 (in the getInlineParams variant that
2095 // takes the opt and size levels).
2096 // FIXME: Remove this check (and make the assignment unconditional) after
2097 // addressing size regression issues at O2.
2098 if (LocallyHotCallSiteThreshold.getNumOccurrences() > 0)
2099 Params.LocallyHotCallSiteThreshold = LocallyHotCallSiteThreshold;
2100
Easwaran Raman12585b02017-01-20 22:44:04 +00002101 // Set the ColdCallSiteThreshold knob from the -inline-cold-callsite-threshold.
2102 Params.ColdCallSiteThreshold = ColdCallSiteThreshold;
2103
Easwaran Raman1c57cc22016-08-10 00:48:04 +00002104 // Set the OptMinSizeThreshold and OptSizeThreshold params only if the
Easwaran Raman1c57cc22016-08-10 00:48:04 +00002105 // -inlinehint-threshold commandline option is not explicitly given. If that
2106 // option is present, then its value applies even for callees with size and
2107 // minsize attributes.
2108 // If the -inline-threshold is not specified, set the ColdThreshold from the
2109 // -inlinecold-threshold even if it is not explicitly passed. If
2110 // -inline-threshold is specified, then -inlinecold-threshold needs to be
2111 // explicitly specified to set the ColdThreshold knob
2112 if (InlineThreshold.getNumOccurrences() == 0) {
2113 Params.OptMinSizeThreshold = InlineConstants::OptMinSizeThreshold;
2114 Params.OptSizeThreshold = InlineConstants::OptSizeThreshold;
2115 Params.ColdThreshold = ColdThreshold;
2116 } else if (ColdThreshold.getNumOccurrences() > 0) {
2117 Params.ColdThreshold = ColdThreshold;
2118 }
2119 return Params;
2120}
2121
2122InlineParams llvm::getInlineParams() {
2123 return getInlineParams(InlineThreshold);
2124}
2125
2126// Compute the default threshold for inlining based on the opt level and the
2127// size opt level.
2128static int computeThresholdFromOptLevels(unsigned OptLevel,
2129 unsigned SizeOptLevel) {
2130 if (OptLevel > 2)
2131 return InlineConstants::OptAggressiveThreshold;
2132 if (SizeOptLevel == 1) // -Os
2133 return InlineConstants::OptSizeThreshold;
2134 if (SizeOptLevel == 2) // -Oz
2135 return InlineConstants::OptMinSizeThreshold;
2136 return InlineThreshold;
2137}
2138
2139InlineParams llvm::getInlineParams(unsigned OptLevel, unsigned SizeOptLevel) {
Easwaran Raman974d4ee2017-08-03 22:23:33 +00002140 auto Params =
2141 getInlineParams(computeThresholdFromOptLevels(OptLevel, SizeOptLevel));
2142 // At O3, use the value of -locally-hot-callsite-threshold option to populate
2143 // Params.LocallyHotCallSiteThreshold. Below O3, this flag has effect only
2144 // when it is specified explicitly.
2145 if (OptLevel > 2)
2146 Params.LocallyHotCallSiteThreshold = LocallyHotCallSiteThreshold;
2147 return Params;
Easwaran Raman1c57cc22016-08-10 00:48:04 +00002148}