blob: b0cb29203a5a40ed82c2dbe12a9dbeeeadcf4da2 [file] [log] [blame]
Dan Gohman4552e3c2009-10-13 18:30:07 +00001//===- InlineCost.cpp - Cost analysis for inliner -------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements inline cost analysis.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Analysis/InlineCost.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000015#include "llvm/ADT/STLExtras.h"
16#include "llvm/ADT/SetVector.h"
17#include "llvm/ADT/SmallPtrSet.h"
18#include "llvm/ADT/SmallVector.h"
19#include "llvm/ADT/Statistic.h"
Daniel Jasperaec2fa32016-12-19 08:22:17 +000020#include "llvm/Analysis/AssumptionCache.h"
Easwaran Raman12585b02017-01-20 22:44:04 +000021#include "llvm/Analysis/BlockFrequencyInfo.h"
Hal Finkel57f03dd2014-09-07 13:49:57 +000022#include "llvm/Analysis/CodeMetrics.h"
Chandler Carruthd9903882015-01-14 11:23:27 +000023#include "llvm/Analysis/ConstantFolding.h"
Haicheng Wu3739e142017-12-14 14:36:18 +000024#include "llvm/Analysis/CFG.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000025#include "llvm/Analysis/InstructionSimplify.h"
Easwaran Raman71069cf2016-06-09 22:23:21 +000026#include "llvm/Analysis/ProfileSummaryInfo.h"
Chandler Carruth42f3dce2013-01-21 11:55:09 +000027#include "llvm/Analysis/TargetTransformInfo.h"
Haicheng Wua4461512017-12-15 14:34:41 +000028#include "llvm/Analysis/ValueTracking.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000029#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000030#include "llvm/IR/CallingConv.h"
31#include "llvm/IR/DataLayout.h"
Chandler Carruth03eb0de2014-03-04 10:40:04 +000032#include "llvm/IR/GetElementPtrTypeIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000033#include "llvm/IR/GlobalAlias.h"
Chandler Carruth7da14f12014-03-06 03:23:41 +000034#include "llvm/IR/InstVisitor.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000035#include "llvm/IR/IntrinsicInst.h"
36#include "llvm/IR/Operator.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000037#include "llvm/Support/Debug.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000038#include "llvm/Support/raw_ostream.h"
Eric Christopher2dfbd7e2011-02-05 00:49:15 +000039
Dan Gohman4552e3c2009-10-13 18:30:07 +000040using namespace llvm;
41
Chandler Carruthf1221bd2014-04-22 02:48:03 +000042#define DEBUG_TYPE "inline-cost"
43
Chandler Carruth7ae90d42012-04-11 10:15:10 +000044STATISTIC(NumCallsAnalyzed, "Number of call sites analyzed");
45
Easwaran Raman1c57cc22016-08-10 00:48:04 +000046static cl::opt<int> InlineThreshold(
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000047 "inline-threshold", cl::Hidden, cl::init(225), cl::ZeroOrMore,
48 cl::desc("Control the amount of inlining to perform (default = 225)"));
49
50static cl::opt<int> HintThreshold(
51 "inlinehint-threshold", cl::Hidden, cl::init(325),
52 cl::desc("Threshold for inlining functions with inline hint"));
53
Easwaran Raman12585b02017-01-20 22:44:04 +000054static cl::opt<int>
55 ColdCallSiteThreshold("inline-cold-callsite-threshold", cl::Hidden,
56 cl::init(45),
57 cl::desc("Threshold for inlining cold callsites"));
58
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000059// We introduce this threshold to help performance of instrumentation based
60// PGO before we actually hook up inliner with analysis passes such as BPI and
61// BFI.
62static cl::opt<int> ColdThreshold(
Easwaran Ramanc103ef82017-05-11 21:36:28 +000063 "inlinecold-threshold", cl::Hidden, cl::init(45),
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000064 cl::desc("Threshold for inlining functions with cold attribute"));
65
Dehao Chende39cb92016-08-05 20:28:41 +000066static cl::opt<int>
67 HotCallSiteThreshold("hot-callsite-threshold", cl::Hidden, cl::init(3000),
68 cl::ZeroOrMore,
69 cl::desc("Threshold for hot callsites "));
70
Easwaran Raman974d4ee2017-08-03 22:23:33 +000071static cl::opt<int> LocallyHotCallSiteThreshold(
72 "locally-hot-callsite-threshold", cl::Hidden, cl::init(525), cl::ZeroOrMore,
73 cl::desc("Threshold for locally hot callsites "));
74
Easwaran Ramanc5fa6352017-06-27 23:11:18 +000075static cl::opt<int> ColdCallSiteRelFreq(
76 "cold-callsite-rel-freq", cl::Hidden, cl::init(2), cl::ZeroOrMore,
77 cl::desc("Maxmimum block frequency, expressed as a percentage of caller's "
78 "entry frequency, for a callsite to be cold in the absence of "
79 "profile information."));
80
Easwaran Raman974d4ee2017-08-03 22:23:33 +000081static cl::opt<int> HotCallSiteRelFreq(
82 "hot-callsite-rel-freq", cl::Hidden, cl::init(60), cl::ZeroOrMore,
Easwaran Ramanff77cc72017-08-04 17:15:17 +000083 cl::desc("Minimum block frequency, expressed as a multiple of caller's "
Easwaran Raman974d4ee2017-08-03 22:23:33 +000084 "entry frequency, for a callsite to be hot in the absence of "
85 "profile information."));
86
Easwaran Raman4924bb02017-09-13 20:16:02 +000087static cl::opt<bool> OptComputeFullInlineCost(
Haicheng Wu0812c5b2017-08-21 20:00:09 +000088 "inline-cost-full", cl::Hidden, cl::init(false),
89 cl::desc("Compute the full inline cost of a call site even when the cost "
90 "exceeds the threshold."));
91
Chandler Carruth0539c072012-03-31 12:42:41 +000092namespace {
Chandler Carrutha3089552012-03-14 07:32:53 +000093
Chandler Carruth0539c072012-03-31 12:42:41 +000094class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> {
95 typedef InstVisitor<CallAnalyzer, bool> Base;
96 friend class InstVisitor<CallAnalyzer, bool>;
Owen Andersona08318a2010-09-09 16:56:42 +000097
Chandler Carruth42f3dce2013-01-21 11:55:09 +000098 /// The TargetTransformInfo available for this compilation.
99 const TargetTransformInfo &TTI;
100
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000101 /// Getter for the cache of @llvm.assume intrinsics.
102 std::function<AssumptionCache &(Function &)> &GetAssumptionCache;
103
Easwaran Raman12585b02017-01-20 22:44:04 +0000104 /// Getter for BlockFrequencyInfo
105 Optional<function_ref<BlockFrequencyInfo &(Function &)>> &GetBFI;
106
Easwaran Raman71069cf2016-06-09 22:23:21 +0000107 /// Profile summary information.
108 ProfileSummaryInfo *PSI;
109
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000110 /// The called function.
Chandler Carruth0539c072012-03-31 12:42:41 +0000111 Function &F;
Owen Andersona08318a2010-09-09 16:56:42 +0000112
Eric Christopher85be8ca2017-04-15 06:14:50 +0000113 // Cache the DataLayout since we use it a lot.
114 const DataLayout &DL;
115
Haicheng Wu0812c5b2017-08-21 20:00:09 +0000116 /// The OptimizationRemarkEmitter available for this compilation.
117 OptimizationRemarkEmitter *ORE;
118
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000119 /// The candidate callsite being analyzed. Please do not use this to do
120 /// analysis in the caller function; we want the inline cost query to be
121 /// easily cacheable. Instead, use the cover function paramHasAttr.
Philip Reames9b5c9582015-06-26 20:51:17 +0000122 CallSite CandidateCS;
123
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000124 /// Tunable parameters that control the analysis.
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000125 const InlineParams &Params;
126
Chandler Carruth0539c072012-03-31 12:42:41 +0000127 int Threshold;
128 int Cost;
Easwaran Raman4924bb02017-09-13 20:16:02 +0000129 bool ComputeFullInlineCost;
Owen Andersona08318a2010-09-09 16:56:42 +0000130
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000131 bool IsCallerRecursive;
132 bool IsRecursiveCall;
Chandler Carruth0539c072012-03-31 12:42:41 +0000133 bool ExposesReturnsTwice;
134 bool HasDynamicAlloca;
James Molloy4f6fb952012-12-20 16:04:27 +0000135 bool ContainsNoDuplicateCall;
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000136 bool HasReturn;
137 bool HasIndirectBr;
Reid Kleckner223de262015-04-14 20:38:14 +0000138 bool HasFrameEscape;
James Molloy4f6fb952012-12-20 16:04:27 +0000139
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000140 /// Number of bytes allocated statically by the callee.
141 uint64_t AllocatedSize;
Chandler Carruth0539c072012-03-31 12:42:41 +0000142 unsigned NumInstructions, NumVectorInstructions;
Easwaran Raman51b809b2017-07-28 21:47:36 +0000143 int VectorBonus, TenPercentVectorBonus;
144 // Bonus to be applied when the callee has only one reachable basic block.
145 int SingleBBBonus;
Chandler Carruth0539c072012-03-31 12:42:41 +0000146
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000147 /// While we walk the potentially-inlined instructions, we build up and
148 /// maintain a mapping of simplified values specific to this callsite. The
149 /// idea is to propagate any special information we have about arguments to
150 /// this call through the inlinable section of the function, and account for
151 /// likely simplifications post-inlining. The most important aspect we track
152 /// is CFG altering simplifications -- when we prove a basic block dead, that
153 /// can cause dramatic shifts in the cost of inlining a function.
Chandler Carruth0539c072012-03-31 12:42:41 +0000154 DenseMap<Value *, Constant *> SimplifiedValues;
155
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000156 /// Keep track of the values which map back (through function arguments) to
157 /// allocas on the caller stack which could be simplified through SROA.
Chandler Carruth0539c072012-03-31 12:42:41 +0000158 DenseMap<Value *, Value *> SROAArgValues;
159
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000160 /// The mapping of caller Alloca values to their accumulated cost savings. If
161 /// we have to disable SROA for one of the allocas, this tells us how much
162 /// cost must be added.
Chandler Carruth0539c072012-03-31 12:42:41 +0000163 DenseMap<Value *, int> SROAArgCosts;
164
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000165 /// Keep track of values which map to a pointer base and constant offset.
Chad Rosier567556a2016-04-28 14:47:23 +0000166 DenseMap<Value *, std::pair<Value *, APInt>> ConstantOffsetPtrs;
Chandler Carruth0539c072012-03-31 12:42:41 +0000167
Haicheng Wu3739e142017-12-14 14:36:18 +0000168 /// Keep track of dead blocks due to the constant arguments.
169 SetVector<BasicBlock *> DeadBlocks;
170
171 /// The mapping of the blocks to their known unique successors due to the
172 /// constant arguments.
173 DenseMap<BasicBlock *, BasicBlock *> KnownSuccessors;
174
Haicheng Wua4461512017-12-15 14:34:41 +0000175 /// Model the elimination of repeated loads that is expected to happen
176 /// whenever we simplify away the stores that would otherwise cause them to be
177 /// loads.
178 bool EnableLoadElimination;
179 SmallPtrSet<Value *, 16> LoadAddrSet;
180 int LoadEliminationCost;
181
Chandler Carruth0539c072012-03-31 12:42:41 +0000182 // Custom simplification helper routines.
183 bool isAllocaDerivedArg(Value *V);
184 bool lookupSROAArgAndCost(Value *V, Value *&Arg,
185 DenseMap<Value *, int>::iterator &CostIt);
186 void disableSROA(DenseMap<Value *, int>::iterator CostIt);
187 void disableSROA(Value *V);
Haicheng Wu3739e142017-12-14 14:36:18 +0000188 void findDeadBlocks(BasicBlock *CurrBB, BasicBlock *NextBB);
Chandler Carruth0539c072012-03-31 12:42:41 +0000189 void accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
190 int InstructionCost);
Haicheng Wua4461512017-12-15 14:34:41 +0000191 void disableLoadElimination();
Haicheng Wu201b1912017-01-20 18:51:22 +0000192 bool isGEPFree(GetElementPtrInst &GEP);
Evgeny Astigeevichd3558b52017-10-03 12:00:40 +0000193 bool canFoldInboundsGEP(GetElementPtrInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000194 bool accumulateGEPOffset(GEPOperator &GEP, APInt &Offset);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000195 bool simplifyCallSite(Function *F, CallSite CS);
Easwaran Raman617f6362017-02-18 17:22:52 +0000196 template <typename Callable>
197 bool simplifyInstruction(Instruction &I, Callable Evaluate);
Chandler Carruth0539c072012-03-31 12:42:41 +0000198 ConstantInt *stripAndComputeInBoundsConstantOffsets(Value *&V);
199
Philip Reames9b5c9582015-06-26 20:51:17 +0000200 /// Return true if the given argument to the function being considered for
201 /// inlining has the given attribute set either at the call site or the
202 /// function declaration. Primarily used to inspect call site specific
203 /// attributes since these can be more precise than the ones on the callee
Easwaran Raman3676da42015-12-03 19:03:20 +0000204 /// itself.
Philip Reames9b5c9582015-06-26 20:51:17 +0000205 bool paramHasAttr(Argument *A, Attribute::AttrKind Attr);
Chad Rosier567556a2016-04-28 14:47:23 +0000206
Philip Reames9b5c9582015-06-26 20:51:17 +0000207 /// Return true if the given value is known non null within the callee if
Easwaran Raman3676da42015-12-03 19:03:20 +0000208 /// inlined through this particular callsite.
Philip Reames9b5c9582015-06-26 20:51:17 +0000209 bool isKnownNonNullInCallee(Value *V);
210
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000211 /// Update Threshold based on callsite properties such as callee
212 /// attributes and callee hotness for PGO builds. The Callee is explicitly
213 /// passed to support analyzing indirect calls whose target is inferred by
214 /// analysis.
215 void updateThreshold(CallSite CS, Function &Callee);
216
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000217 /// Return true if size growth is allowed when inlining the callee at CS.
218 bool allowSizeGrowth(CallSite CS);
219
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000220 /// Return true if \p CS is a cold callsite.
221 bool isColdCallSite(CallSite CS, BlockFrequencyInfo *CallerBFI);
222
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000223 /// Return a higher threshold if \p CS is a hot callsite.
224 Optional<int> getHotCallSiteThreshold(CallSite CS,
225 BlockFrequencyInfo *CallerBFI);
226
Chandler Carruth0539c072012-03-31 12:42:41 +0000227 // Custom analysis routines.
Hal Finkel57f03dd2014-09-07 13:49:57 +0000228 bool analyzeBlock(BasicBlock *BB, SmallPtrSetImpl<const Value *> &EphValues);
Chandler Carruth0539c072012-03-31 12:42:41 +0000229
230 // Disable several entry points to the visitor so we don't accidentally use
231 // them by declaring but not defining them here.
Chad Rosier567556a2016-04-28 14:47:23 +0000232 void visit(Module *);
233 void visit(Module &);
234 void visit(Function *);
235 void visit(Function &);
236 void visit(BasicBlock *);
237 void visit(BasicBlock &);
Chandler Carruth0539c072012-03-31 12:42:41 +0000238
239 // Provide base case for our instruction visit.
240 bool visitInstruction(Instruction &I);
241
242 // Our visit overrides.
243 bool visitAlloca(AllocaInst &I);
244 bool visitPHI(PHINode &I);
245 bool visitGetElementPtr(GetElementPtrInst &I);
246 bool visitBitCast(BitCastInst &I);
247 bool visitPtrToInt(PtrToIntInst &I);
248 bool visitIntToPtr(IntToPtrInst &I);
249 bool visitCastInst(CastInst &I);
250 bool visitUnaryInstruction(UnaryInstruction &I);
Matt Arsenault727aa342013-07-20 04:09:00 +0000251 bool visitCmpInst(CmpInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000252 bool visitSub(BinaryOperator &I);
253 bool visitBinaryOperator(BinaryOperator &I);
254 bool visitLoad(LoadInst &I);
255 bool visitStore(StoreInst &I);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000256 bool visitExtractValue(ExtractValueInst &I);
257 bool visitInsertValue(InsertValueInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000258 bool visitCallSite(CallSite CS);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000259 bool visitReturnInst(ReturnInst &RI);
260 bool visitBranchInst(BranchInst &BI);
Haicheng Wu3ec848b2017-09-27 14:44:56 +0000261 bool visitSelectInst(SelectInst &SI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000262 bool visitSwitchInst(SwitchInst &SI);
263 bool visitIndirectBrInst(IndirectBrInst &IBI);
264 bool visitResumeInst(ResumeInst &RI);
David Majnemer654e1302015-07-31 17:58:14 +0000265 bool visitCleanupReturnInst(CleanupReturnInst &RI);
266 bool visitCatchReturnInst(CatchReturnInst &RI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000267 bool visitUnreachableInst(UnreachableInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000268
269public:
Sean Silvaab6a6832016-07-23 04:22:50 +0000270 CallAnalyzer(const TargetTransformInfo &TTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000271 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +0000272 Optional<function_ref<BlockFrequencyInfo &(Function &)>> &GetBFI,
Haicheng Wu0812c5b2017-08-21 20:00:09 +0000273 ProfileSummaryInfo *PSI, OptimizationRemarkEmitter *ORE,
274 Function &Callee, CallSite CSArg, const InlineParams &Params)
Easwaran Raman12585b02017-01-20 22:44:04 +0000275 : TTI(TTI), GetAssumptionCache(GetAssumptionCache), GetBFI(GetBFI),
Haicheng Wu0812c5b2017-08-21 20:00:09 +0000276 PSI(PSI), F(Callee), DL(F.getParent()->getDataLayout()), ORE(ORE),
Eric Christopher85be8ca2017-04-15 06:14:50 +0000277 CandidateCS(CSArg), Params(Params), Threshold(Params.DefaultThreshold),
Easwaran Raman4924bb02017-09-13 20:16:02 +0000278 Cost(0), ComputeFullInlineCost(OptComputeFullInlineCost ||
279 Params.ComputeFullInlineCost || ORE),
280 IsCallerRecursive(false), IsRecursiveCall(false),
Eric Christopher85be8ca2017-04-15 06:14:50 +0000281 ExposesReturnsTwice(false), HasDynamicAlloca(false),
282 ContainsNoDuplicateCall(false), HasReturn(false), HasIndirectBr(false),
283 HasFrameEscape(false), AllocatedSize(0), NumInstructions(0),
Easwaran Raman51b809b2017-07-28 21:47:36 +0000284 NumVectorInstructions(0), VectorBonus(0), SingleBBBonus(0),
Haicheng Wua4461512017-12-15 14:34:41 +0000285 EnableLoadElimination(true), LoadEliminationCost(0), NumConstantArgs(0),
286 NumConstantOffsetPtrArgs(0), NumAllocaArgs(0), NumConstantPtrCmps(0),
287 NumConstantPtrDiffs(0), NumInstructionsSimplified(0),
288 SROACostSavings(0), SROACostSavingsLost(0) {}
Chandler Carruth0539c072012-03-31 12:42:41 +0000289
290 bool analyzeCall(CallSite CS);
291
292 int getThreshold() { return Threshold; }
293 int getCost() { return Cost; }
294
295 // Keep a bunch of stats about the cost savings found so we can print them
296 // out when debugging.
297 unsigned NumConstantArgs;
298 unsigned NumConstantOffsetPtrArgs;
299 unsigned NumAllocaArgs;
300 unsigned NumConstantPtrCmps;
301 unsigned NumConstantPtrDiffs;
302 unsigned NumInstructionsSimplified;
303 unsigned SROACostSavings;
304 unsigned SROACostSavingsLost;
305
306 void dump();
307};
308
309} // namespace
310
311/// \brief Test whether the given value is an Alloca-derived function argument.
312bool CallAnalyzer::isAllocaDerivedArg(Value *V) {
313 return SROAArgValues.count(V);
Owen Andersona08318a2010-09-09 16:56:42 +0000314}
315
Chandler Carruth0539c072012-03-31 12:42:41 +0000316/// \brief Lookup the SROA-candidate argument and cost iterator which V maps to.
317/// Returns false if V does not map to a SROA-candidate.
318bool CallAnalyzer::lookupSROAArgAndCost(
319 Value *V, Value *&Arg, DenseMap<Value *, int>::iterator &CostIt) {
320 if (SROAArgValues.empty() || SROAArgCosts.empty())
321 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000322
Chandler Carruth0539c072012-03-31 12:42:41 +0000323 DenseMap<Value *, Value *>::iterator ArgIt = SROAArgValues.find(V);
324 if (ArgIt == SROAArgValues.end())
325 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000326
Chandler Carruth0539c072012-03-31 12:42:41 +0000327 Arg = ArgIt->second;
328 CostIt = SROAArgCosts.find(Arg);
329 return CostIt != SROAArgCosts.end();
Chandler Carruth783b7192012-03-09 02:49:36 +0000330}
331
Chandler Carruth0539c072012-03-31 12:42:41 +0000332/// \brief Disable SROA for the candidate marked by this cost iterator.
Chandler Carruth783b7192012-03-09 02:49:36 +0000333///
Benjamin Kramerbde91762012-06-02 10:20:22 +0000334/// This marks the candidate as no longer viable for SROA, and adds the cost
Chandler Carruth0539c072012-03-31 12:42:41 +0000335/// savings associated with it back into the inline cost measurement.
336void CallAnalyzer::disableSROA(DenseMap<Value *, int>::iterator CostIt) {
337 // If we're no longer able to perform SROA we need to undo its cost savings
338 // and prevent subsequent analysis.
339 Cost += CostIt->second;
340 SROACostSavings -= CostIt->second;
341 SROACostSavingsLost += CostIt->second;
342 SROAArgCosts.erase(CostIt);
Haicheng Wua4461512017-12-15 14:34:41 +0000343 disableLoadElimination();
Chandler Carruth0539c072012-03-31 12:42:41 +0000344}
345
346/// \brief If 'V' maps to a SROA candidate, disable SROA for it.
347void CallAnalyzer::disableSROA(Value *V) {
348 Value *SROAArg;
349 DenseMap<Value *, int>::iterator CostIt;
350 if (lookupSROAArgAndCost(V, SROAArg, CostIt))
351 disableSROA(CostIt);
352}
353
354/// \brief Accumulate the given cost for a particular SROA candidate.
355void CallAnalyzer::accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
356 int InstructionCost) {
357 CostIt->second += InstructionCost;
358 SROACostSavings += InstructionCost;
359}
360
Haicheng Wua4461512017-12-15 14:34:41 +0000361void CallAnalyzer::disableLoadElimination() {
362 if (EnableLoadElimination) {
363 Cost += LoadEliminationCost;
Haicheng Wub3689ca2017-12-19 13:42:58 +0000364 LoadEliminationCost = 0;
Haicheng Wua4461512017-12-15 14:34:41 +0000365 EnableLoadElimination = false;
366 }
367}
368
Chandler Carruth0539c072012-03-31 12:42:41 +0000369/// \brief Accumulate a constant GEP offset into an APInt if possible.
370///
371/// Returns false if unable to compute the offset for any reason. Respects any
372/// simplified values known during the analysis of this callsite.
373bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000374 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +0000375 assert(IntPtrWidth == Offset.getBitWidth());
376
377 for (gep_type_iterator GTI = gep_type_begin(GEP), GTE = gep_type_end(GEP);
378 GTI != GTE; ++GTI) {
379 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand());
380 if (!OpC)
381 if (Constant *SimpleOp = SimplifiedValues.lookup(GTI.getOperand()))
382 OpC = dyn_cast<ConstantInt>(SimpleOp);
383 if (!OpC)
Chandler Carruth783b7192012-03-09 02:49:36 +0000384 return false;
Chad Rosier567556a2016-04-28 14:47:23 +0000385 if (OpC->isZero())
386 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000387
Chandler Carruth0539c072012-03-31 12:42:41 +0000388 // Handle a struct index, which adds its field offset to the pointer.
Peter Collingbourneab85225b2016-12-02 02:24:42 +0000389 if (StructType *STy = GTI.getStructTypeOrNull()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000390 unsigned ElementIdx = OpC->getZExtValue();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000391 const StructLayout *SL = DL.getStructLayout(STy);
Chandler Carruth0539c072012-03-31 12:42:41 +0000392 Offset += APInt(IntPtrWidth, SL->getElementOffset(ElementIdx));
393 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000394 }
Chandler Carruth783b7192012-03-09 02:49:36 +0000395
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000396 APInt TypeSize(IntPtrWidth, DL.getTypeAllocSize(GTI.getIndexedType()));
Chandler Carruth0539c072012-03-31 12:42:41 +0000397 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize;
398 }
399 return true;
400}
401
Haicheng Wu201b1912017-01-20 18:51:22 +0000402/// \brief Use TTI to check whether a GEP is free.
403///
404/// Respects any simplified values known during the analysis of this callsite.
405bool CallAnalyzer::isGEPFree(GetElementPtrInst &GEP) {
Evgeny Astigeevich61c1bd52017-07-27 12:49:27 +0000406 SmallVector<Value *, 4> Operands;
407 Operands.push_back(GEP.getOperand(0));
Haicheng Wu201b1912017-01-20 18:51:22 +0000408 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
409 if (Constant *SimpleOp = SimplifiedValues.lookup(*I))
Evgeny Astigeevich61c1bd52017-07-27 12:49:27 +0000410 Operands.push_back(SimpleOp);
Haicheng Wu201b1912017-01-20 18:51:22 +0000411 else
Evgeny Astigeevich61c1bd52017-07-27 12:49:27 +0000412 Operands.push_back(*I);
413 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&GEP, Operands);
Haicheng Wu201b1912017-01-20 18:51:22 +0000414}
415
Chandler Carruth0539c072012-03-31 12:42:41 +0000416bool CallAnalyzer::visitAlloca(AllocaInst &I) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000417 // Check whether inlining will turn a dynamic alloca into a static
Sanjay Patel0f153422016-05-09 21:51:53 +0000418 // alloca and handle that case.
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000419 if (I.isArrayAllocation()) {
Sanjay Patel0f153422016-05-09 21:51:53 +0000420 Constant *Size = SimplifiedValues.lookup(I.getArraySize());
421 if (auto *AllocSize = dyn_cast_or_null<ConstantInt>(Size)) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000422 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000423 AllocatedSize = SaturatingMultiplyAdd(
424 AllocSize->getLimitedValue(), DL.getTypeAllocSize(Ty), AllocatedSize);
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000425 return Base::visitAlloca(I);
426 }
427 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000428
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000429 // Accumulate the allocated size.
430 if (I.isStaticAlloca()) {
431 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000432 AllocatedSize = SaturatingAdd(DL.getTypeAllocSize(Ty), AllocatedSize);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000433 }
434
Bob Wilsona5b0dc82012-11-19 07:04:35 +0000435 // We will happily inline static alloca instructions.
436 if (I.isStaticAlloca())
Chandler Carruth0539c072012-03-31 12:42:41 +0000437 return Base::visitAlloca(I);
438
439 // FIXME: This is overly conservative. Dynamic allocas are inefficient for
440 // a variety of reasons, and so we would like to not inline them into
441 // functions which don't currently have a dynamic alloca. This simply
442 // disables inlining altogether in the presence of a dynamic alloca.
443 HasDynamicAlloca = true;
444 return false;
445}
446
447bool CallAnalyzer::visitPHI(PHINode &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000448 // FIXME: We need to propagate SROA *disabling* through phi nodes, even
449 // though we don't want to propagate it's bonuses. The idea is to disable
450 // SROA if it *might* be used in an inappropriate manner.
451
452 // Phi nodes are always zero-cost.
Haicheng Wu3739e142017-12-14 14:36:18 +0000453
454 APInt ZeroOffset = APInt::getNullValue(DL.getPointerSizeInBits());
455 bool CheckSROA = I.getType()->isPointerTy();
456
457 // Track the constant or pointer with constant offset we've seen so far.
458 Constant *FirstC = nullptr;
459 std::pair<Value *, APInt> FirstBaseAndOffset = {nullptr, ZeroOffset};
460 Value *FirstV = nullptr;
461
462 for (unsigned i = 0, e = I.getNumIncomingValues(); i != e; ++i) {
463 BasicBlock *Pred = I.getIncomingBlock(i);
464 // If the incoming block is dead, skip the incoming block.
465 if (DeadBlocks.count(Pred))
466 continue;
467 // If the parent block of phi is not the known successor of the incoming
468 // block, skip the incoming block.
469 BasicBlock *KnownSuccessor = KnownSuccessors[Pred];
470 if (KnownSuccessor && KnownSuccessor != I.getParent())
471 continue;
472
473 Value *V = I.getIncomingValue(i);
474 // If the incoming value is this phi itself, skip the incoming value.
475 if (&I == V)
476 continue;
477
478 Constant *C = dyn_cast<Constant>(V);
479 if (!C)
480 C = SimplifiedValues.lookup(V);
481
482 std::pair<Value *, APInt> BaseAndOffset = {nullptr, ZeroOffset};
483 if (!C && CheckSROA)
484 BaseAndOffset = ConstantOffsetPtrs.lookup(V);
485
486 if (!C && !BaseAndOffset.first)
487 // The incoming value is neither a constant nor a pointer with constant
488 // offset, exit early.
489 return true;
490
491 if (FirstC) {
492 if (FirstC == C)
493 // If we've seen a constant incoming value before and it is the same
494 // constant we see this time, continue checking the next incoming value.
495 continue;
496 // Otherwise early exit because we either see a different constant or saw
497 // a constant before but we have a pointer with constant offset this time.
498 return true;
499 }
500
501 if (FirstV) {
502 // The same logic as above, but check pointer with constant offset here.
503 if (FirstBaseAndOffset == BaseAndOffset)
504 continue;
505 return true;
506 }
507
508 if (C) {
509 // This is the 1st time we've seen a constant, record it.
510 FirstC = C;
511 continue;
512 }
513
514 // The remaining case is that this is the 1st time we've seen a pointer with
515 // constant offset, record it.
516 FirstV = V;
517 FirstBaseAndOffset = BaseAndOffset;
518 }
519
520 // Check if we can map phi to a constant.
521 if (FirstC) {
522 SimplifiedValues[&I] = FirstC;
523 return true;
524 }
525
526 // Check if we can map phi to a pointer with constant offset.
527 if (FirstBaseAndOffset.first) {
528 ConstantOffsetPtrs[&I] = FirstBaseAndOffset;
529
530 Value *SROAArg;
531 DenseMap<Value *, int>::iterator CostIt;
532 if (lookupSROAArgAndCost(FirstV, SROAArg, CostIt))
533 SROAArgValues[&I] = SROAArg;
534 }
535
Chandler Carruth0539c072012-03-31 12:42:41 +0000536 return true;
537}
538
Evgeny Astigeevichd3558b52017-10-03 12:00:40 +0000539/// \brief Check we can fold GEPs of constant-offset call site argument pointers.
540/// This requires target data and inbounds GEPs.
541///
542/// \return true if the specified GEP can be folded.
543bool CallAnalyzer::canFoldInboundsGEP(GetElementPtrInst &I) {
544 // Check if we have a base + offset for the pointer.
545 std::pair<Value *, APInt> BaseAndOffset =
546 ConstantOffsetPtrs.lookup(I.getPointerOperand());
547 if (!BaseAndOffset.first)
548 return false;
549
550 // Check if the offset of this GEP is constant, and if so accumulate it
551 // into Offset.
552 if (!accumulateGEPOffset(cast<GEPOperator>(I), BaseAndOffset.second))
553 return false;
554
555 // Add the result as a new mapping to Base + Offset.
556 ConstantOffsetPtrs[&I] = BaseAndOffset;
557
558 return true;
559}
560
Chandler Carruth0539c072012-03-31 12:42:41 +0000561bool CallAnalyzer::visitGetElementPtr(GetElementPtrInst &I) {
562 Value *SROAArg;
563 DenseMap<Value *, int>::iterator CostIt;
Chad Rosier567556a2016-04-28 14:47:23 +0000564 bool SROACandidate =
565 lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt);
Chandler Carruth0539c072012-03-31 12:42:41 +0000566
Easwaran Ramana8b9cdc2017-02-25 00:10:22 +0000567 // Lambda to check whether a GEP's indices are all constant.
568 auto IsGEPOffsetConstant = [&](GetElementPtrInst &GEP) {
569 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
570 if (!isa<Constant>(*I) && !SimplifiedValues.lookup(*I))
571 return false;
572 return true;
573 };
574
Evgeny Astigeevichd3558b52017-10-03 12:00:40 +0000575 if ((I.isInBounds() && canFoldInboundsGEP(I)) || IsGEPOffsetConstant(I)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000576 if (SROACandidate)
577 SROAArgValues[&I] = SROAArg;
578
579 // Constant GEPs are modeled as free.
580 return true;
581 }
582
583 // Variable GEPs will require math and will disable SROA.
584 if (SROACandidate)
585 disableSROA(CostIt);
Haicheng Wu201b1912017-01-20 18:51:22 +0000586 return isGEPFree(I);
Chandler Carruth783b7192012-03-09 02:49:36 +0000587}
588
Easwaran Raman617f6362017-02-18 17:22:52 +0000589/// Simplify \p I if its operands are constants and update SimplifiedValues.
590/// \p Evaluate is a callable specific to instruction type that evaluates the
591/// instruction when all the operands are constants.
592template <typename Callable>
593bool CallAnalyzer::simplifyInstruction(Instruction &I, Callable Evaluate) {
594 SmallVector<Constant *, 2> COps;
595 for (Value *Op : I.operands()) {
596 Constant *COp = dyn_cast<Constant>(Op);
597 if (!COp)
598 COp = SimplifiedValues.lookup(Op);
599 if (!COp)
600 return false;
601 COps.push_back(COp);
602 }
603 auto *C = Evaluate(COps);
604 if (!C)
605 return false;
606 SimplifiedValues[&I] = C;
607 return true;
608}
609
Chandler Carruth0539c072012-03-31 12:42:41 +0000610bool CallAnalyzer::visitBitCast(BitCastInst &I) {
611 // Propagate constants through bitcasts.
Easwaran Raman617f6362017-02-18 17:22:52 +0000612 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
613 return ConstantExpr::getBitCast(COps[0], I.getType());
614 }))
615 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000616
Chandler Carruth0539c072012-03-31 12:42:41 +0000617 // Track base/offsets through casts
Chad Rosier567556a2016-04-28 14:47:23 +0000618 std::pair<Value *, APInt> BaseAndOffset =
619 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000620 // Casts don't change the offset, just wrap it up.
621 if (BaseAndOffset.first)
622 ConstantOffsetPtrs[&I] = BaseAndOffset;
623
624 // Also look for SROA candidates here.
625 Value *SROAArg;
626 DenseMap<Value *, int>::iterator CostIt;
627 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
628 SROAArgValues[&I] = SROAArg;
629
630 // Bitcasts are always zero cost.
631 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000632}
633
Chandler Carruth0539c072012-03-31 12:42:41 +0000634bool CallAnalyzer::visitPtrToInt(PtrToIntInst &I) {
635 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000636 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
637 return ConstantExpr::getPtrToInt(COps[0], I.getType());
638 }))
639 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000640
641 // Track base/offset pairs when converted to a plain integer provided the
642 // integer is large enough to represent the pointer.
643 unsigned IntegerSize = I.getType()->getScalarSizeInBits();
Mehdi Amini46a43552015-03-04 18:43:29 +0000644 if (IntegerSize >= DL.getPointerSizeInBits()) {
Chad Rosier567556a2016-04-28 14:47:23 +0000645 std::pair<Value *, APInt> BaseAndOffset =
646 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000647 if (BaseAndOffset.first)
648 ConstantOffsetPtrs[&I] = BaseAndOffset;
649 }
650
651 // This is really weird. Technically, ptrtoint will disable SROA. However,
652 // unless that ptrtoint is *used* somewhere in the live basic blocks after
653 // inlining, it will be nuked, and SROA should proceed. All of the uses which
654 // would block SROA would also block SROA if applied directly to a pointer,
655 // and so we can just add the integer in here. The only places where SROA is
656 // preserved either cannot fire on an integer, or won't in-and-of themselves
657 // disable SROA (ext) w/o some later use that we would see and disable.
658 Value *SROAArg;
659 DenseMap<Value *, int>::iterator CostIt;
660 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
661 SROAArgValues[&I] = SROAArg;
662
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000663 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000664}
665
Chandler Carruth0539c072012-03-31 12:42:41 +0000666bool CallAnalyzer::visitIntToPtr(IntToPtrInst &I) {
667 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000668 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
669 return ConstantExpr::getIntToPtr(COps[0], I.getType());
670 }))
671 return true;
Dan Gohman4552e3c2009-10-13 18:30:07 +0000672
Chandler Carruth0539c072012-03-31 12:42:41 +0000673 // Track base/offset pairs when round-tripped through a pointer without
674 // modifications provided the integer is not too large.
675 Value *Op = I.getOperand(0);
676 unsigned IntegerSize = Op->getType()->getScalarSizeInBits();
Mehdi Amini46a43552015-03-04 18:43:29 +0000677 if (IntegerSize <= DL.getPointerSizeInBits()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000678 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op);
679 if (BaseAndOffset.first)
680 ConstantOffsetPtrs[&I] = BaseAndOffset;
681 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000682
Chandler Carruth0539c072012-03-31 12:42:41 +0000683 // "Propagate" SROA here in the same manner as we do for ptrtoint above.
684 Value *SROAArg;
685 DenseMap<Value *, int>::iterator CostIt;
686 if (lookupSROAArgAndCost(Op, SROAArg, CostIt))
687 SROAArgValues[&I] = SROAArg;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000688
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000689 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000690}
691
692bool CallAnalyzer::visitCastInst(CastInst &I) {
693 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000694 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
695 return ConstantExpr::getCast(I.getOpcode(), COps[0], I.getType());
696 }))
697 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000698
699 // Disable SROA in the face of arbitrary casts we don't whitelist elsewhere.
700 disableSROA(I.getOperand(0));
701
Eli Friedman39ed9a62017-12-22 02:08:08 +0000702 // If this is a floating-point cast, and the target says this operation
703 // is expensive, this may eventually become a library call. Treat the cost
704 // as such.
705 switch (I.getOpcode()) {
706 case Instruction::FPTrunc:
707 case Instruction::FPExt:
708 case Instruction::UIToFP:
709 case Instruction::SIToFP:
710 case Instruction::FPToUI:
711 case Instruction::FPToSI:
712 if (TTI.getFPOpCost(I.getType()) == TargetTransformInfo::TCC_Expensive)
713 Cost += InlineConstants::CallPenalty;
714 default:
715 break;
716 }
717
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000718 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000719}
720
721bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) {
722 Value *Operand = I.getOperand(0);
Easwaran Raman617f6362017-02-18 17:22:52 +0000723 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
Easwaran Raman617f6362017-02-18 17:22:52 +0000724 return ConstantFoldInstOperands(&I, COps[0], DL);
725 }))
726 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000727
728 // Disable any SROA on the argument to arbitrary unary operators.
729 disableSROA(Operand);
730
731 return false;
732}
733
Philip Reames9b5c9582015-06-26 20:51:17 +0000734bool CallAnalyzer::paramHasAttr(Argument *A, Attribute::AttrKind Attr) {
Reid Klecknerfb502d22017-04-14 20:19:02 +0000735 return CandidateCS.paramHasAttr(A->getArgNo(), Attr);
Philip Reames9b5c9582015-06-26 20:51:17 +0000736}
737
738bool CallAnalyzer::isKnownNonNullInCallee(Value *V) {
739 // Does the *call site* have the NonNull attribute set on an argument? We
740 // use the attribute on the call site to memoize any analysis done in the
741 // caller. This will also trip if the callee function has a non-null
742 // parameter attribute, but that's a less interesting case because hopefully
743 // the callee would already have been simplified based on that.
744 if (Argument *A = dyn_cast<Argument>(V))
745 if (paramHasAttr(A, Attribute::NonNull))
746 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000747
Philip Reames9b5c9582015-06-26 20:51:17 +0000748 // Is this an alloca in the caller? This is distinct from the attribute case
749 // above because attributes aren't updated within the inliner itself and we
750 // always want to catch the alloca derived case.
751 if (isAllocaDerivedArg(V))
752 // We can actually predict the result of comparisons between an
753 // alloca-derived value and null. Note that this fires regardless of
754 // SROA firing.
755 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000756
Philip Reames9b5c9582015-06-26 20:51:17 +0000757 return false;
758}
759
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000760bool CallAnalyzer::allowSizeGrowth(CallSite CS) {
761 // If the normal destination of the invoke or the parent block of the call
762 // site is unreachable-terminated, there is little point in inlining this
763 // unless there is literally zero cost.
764 // FIXME: Note that it is possible that an unreachable-terminated block has a
765 // hot entry. For example, in below scenario inlining hot_call_X() may be
766 // beneficial :
767 // main() {
768 // hot_call_1();
769 // ...
770 // hot_call_N()
771 // exit(0);
772 // }
773 // For now, we are not handling this corner case here as it is rare in real
774 // code. In future, we should elaborate this based on BPI and BFI in more
775 // general threshold adjusting heuristics in updateThreshold().
776 Instruction *Instr = CS.getInstruction();
777 if (InvokeInst *II = dyn_cast<InvokeInst>(Instr)) {
778 if (isa<UnreachableInst>(II->getNormalDest()->getTerminator()))
779 return false;
780 } else if (isa<UnreachableInst>(Instr->getParent()->getTerminator()))
781 return false;
782
783 return true;
784}
785
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000786bool CallAnalyzer::isColdCallSite(CallSite CS, BlockFrequencyInfo *CallerBFI) {
787 // If global profile summary is available, then callsite's coldness is
788 // determined based on that.
Chandler Carruthbba762a2017-08-14 21:25:00 +0000789 if (PSI && PSI->hasProfileSummary())
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000790 return PSI->isColdCallSite(CS, CallerBFI);
Chandler Carruthbba762a2017-08-14 21:25:00 +0000791
792 // Otherwise we need BFI to be available.
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000793 if (!CallerBFI)
794 return false;
795
Chandler Carruthbba762a2017-08-14 21:25:00 +0000796 // Determine if the callsite is cold relative to caller's entry. We could
797 // potentially cache the computation of scaled entry frequency, but the added
798 // complexity is not worth it unless this scaling shows up high in the
799 // profiles.
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000800 const BranchProbability ColdProb(ColdCallSiteRelFreq, 100);
801 auto CallSiteBB = CS.getInstruction()->getParent();
802 auto CallSiteFreq = CallerBFI->getBlockFreq(CallSiteBB);
803 auto CallerEntryFreq =
804 CallerBFI->getBlockFreq(&(CS.getCaller()->getEntryBlock()));
805 return CallSiteFreq < CallerEntryFreq * ColdProb;
806}
807
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000808Optional<int>
809CallAnalyzer::getHotCallSiteThreshold(CallSite CS,
810 BlockFrequencyInfo *CallerBFI) {
Chandler Carruthbba762a2017-08-14 21:25:00 +0000811
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000812 // If global profile summary is available, then callsite's hotness is
813 // determined based on that.
Chandler Carruthbba762a2017-08-14 21:25:00 +0000814 if (PSI && PSI->hasProfileSummary() && PSI->isHotCallSite(CS, CallerBFI))
815 return Params.HotCallSiteThreshold;
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000816
Chandler Carruthbba762a2017-08-14 21:25:00 +0000817 // Otherwise we need BFI to be available and to have a locally hot callsite
818 // threshold.
819 if (!CallerBFI || !Params.LocallyHotCallSiteThreshold)
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000820 return None;
821
Chandler Carruthbba762a2017-08-14 21:25:00 +0000822 // Determine if the callsite is hot relative to caller's entry. We could
823 // potentially cache the computation of scaled entry frequency, but the added
824 // complexity is not worth it unless this scaling shows up high in the
825 // profiles.
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000826 auto CallSiteBB = CS.getInstruction()->getParent();
827 auto CallSiteFreq = CallerBFI->getBlockFreq(CallSiteBB).getFrequency();
828 auto CallerEntryFreq = CallerBFI->getEntryFreq();
829 if (CallSiteFreq >= CallerEntryFreq * HotCallSiteRelFreq)
Chandler Carruthbba762a2017-08-14 21:25:00 +0000830 return Params.LocallyHotCallSiteThreshold;
831
832 // Otherwise treat it normally.
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000833 return None;
834}
835
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000836void CallAnalyzer::updateThreshold(CallSite CS, Function &Callee) {
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000837 // If no size growth is allowed for this inlining, set Threshold to 0.
838 if (!allowSizeGrowth(CS)) {
839 Threshold = 0;
840 return;
841 }
842
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000843 Function *Caller = CS.getCaller();
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000844
845 // return min(A, B) if B is valid.
846 auto MinIfValid = [](int A, Optional<int> B) {
847 return B ? std::min(A, B.getValue()) : A;
848 };
849
Easwaran Raman0d58fca2016-08-11 03:58:05 +0000850 // return max(A, B) if B is valid.
851 auto MaxIfValid = [](int A, Optional<int> B) {
852 return B ? std::max(A, B.getValue()) : A;
853 };
854
Easwaran Raman51b809b2017-07-28 21:47:36 +0000855 // Various bonus percentages. These are multiplied by Threshold to get the
856 // bonus values.
857 // SingleBBBonus: This bonus is applied if the callee has a single reachable
858 // basic block at the given callsite context. This is speculatively applied
859 // and withdrawn if more than one basic block is seen.
860 //
861 // Vector bonuses: We want to more aggressively inline vector-dense kernels
862 // and apply this bonus based on the percentage of vector instructions. A
863 // bonus is applied if the vector instructions exceed 50% and half that amount
864 // is applied if it exceeds 10%. Note that these bonuses are some what
865 // arbitrary and evolved over time by accident as much as because they are
866 // principled bonuses.
867 // FIXME: It would be nice to base the bonus values on something more
868 // scientific.
869 //
870 // LstCallToStaticBonus: This large bonus is applied to ensure the inlining
871 // of the last call to a static function as inlining such functions is
872 // guaranteed to reduce code size.
873 //
874 // These bonus percentages may be set to 0 based on properties of the caller
875 // and the callsite.
876 int SingleBBBonusPercent = 50;
877 int VectorBonusPercent = 150;
878 int LastCallToStaticBonus = InlineConstants::LastCallToStaticBonus;
879
880 // Lambda to set all the above bonus and bonus percentages to 0.
881 auto DisallowAllBonuses = [&]() {
882 SingleBBBonusPercent = 0;
883 VectorBonusPercent = 0;
884 LastCallToStaticBonus = 0;
885 };
886
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000887 // Use the OptMinSizeThreshold or OptSizeThreshold knob if they are available
888 // and reduce the threshold if the caller has the necessary attribute.
Easwaran Raman51b809b2017-07-28 21:47:36 +0000889 if (Caller->optForMinSize()) {
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000890 Threshold = MinIfValid(Threshold, Params.OptMinSizeThreshold);
Easwaran Raman51b809b2017-07-28 21:47:36 +0000891 // For minsize, we want to disable the single BB bonus and the vector
892 // bonuses, but not the last-call-to-static bonus. Inlining the last call to
893 // a static function will, at the minimum, eliminate the parameter setup and
894 // call/return instructions.
895 SingleBBBonusPercent = 0;
896 VectorBonusPercent = 0;
897 } else if (Caller->optForSize())
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000898 Threshold = MinIfValid(Threshold, Params.OptSizeThreshold);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000899
Easwaran Ramane08b1392017-01-09 21:56:26 +0000900 // Adjust the threshold based on inlinehint attribute and profile based
901 // hotness information if the caller does not have MinSize attribute.
902 if (!Caller->optForMinSize()) {
903 if (Callee.hasFnAttribute(Attribute::InlineHint))
904 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
Chandler Carruthbba762a2017-08-14 21:25:00 +0000905
906 // FIXME: After switching to the new passmanager, simplify the logic below
907 // by checking only the callsite hotness/coldness as we will reliably
908 // have local profile information.
909 //
910 // Callsite hotness and coldness can be determined if sample profile is
911 // used (which adds hotness metadata to calls) or if caller's
912 // BlockFrequencyInfo is available.
913 BlockFrequencyInfo *CallerBFI = GetBFI ? &((*GetBFI)(*Caller)) : nullptr;
914 auto HotCallSiteThreshold = getHotCallSiteThreshold(CS, CallerBFI);
915 if (!Caller->optForSize() && HotCallSiteThreshold) {
916 DEBUG(dbgs() << "Hot callsite.\n");
917 // FIXME: This should update the threshold only if it exceeds the
918 // current threshold, but AutoFDO + ThinLTO currently relies on this
919 // behavior to prevent inlining of hot callsites during ThinLTO
920 // compile phase.
921 Threshold = HotCallSiteThreshold.getValue();
922 } else if (isColdCallSite(CS, CallerBFI)) {
923 DEBUG(dbgs() << "Cold callsite.\n");
924 // Do not apply bonuses for a cold callsite including the
925 // LastCallToStatic bonus. While this bonus might result in code size
926 // reduction, it can cause the size of a non-cold caller to increase
927 // preventing it from being inlined.
928 DisallowAllBonuses();
929 Threshold = MinIfValid(Threshold, Params.ColdCallSiteThreshold);
930 } else if (PSI) {
931 // Use callee's global profile information only if we have no way of
932 // determining this via callsite information.
933 if (PSI->isFunctionEntryHot(&Callee)) {
934 DEBUG(dbgs() << "Hot callee.\n");
935 // If callsite hotness can not be determined, we may still know
936 // that the callee is hot and treat it as a weaker hint for threshold
937 // increase.
938 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
939 } else if (PSI->isFunctionEntryCold(&Callee)) {
940 DEBUG(dbgs() << "Cold callee.\n");
941 // Do not apply bonuses for a cold callee including the
942 // LastCallToStatic bonus. While this bonus might result in code size
943 // reduction, it can cause the size of a non-cold caller to increase
944 // preventing it from being inlined.
945 DisallowAllBonuses();
946 Threshold = MinIfValid(Threshold, Params.ColdThreshold);
Easwaran Ramane08b1392017-01-09 21:56:26 +0000947 }
948 }
Dehao Chene1c7c572016-08-05 20:49:04 +0000949 }
Dehao Chen9232f982016-07-11 16:48:54 +0000950
Justin Lebar8650a4d2016-04-15 01:38:48 +0000951 // Finally, take the target-specific inlining threshold multiplier into
952 // account.
953 Threshold *= TTI.getInliningThresholdMultiplier();
Easwaran Raman51b809b2017-07-28 21:47:36 +0000954
955 SingleBBBonus = Threshold * SingleBBBonusPercent / 100;
956 VectorBonus = Threshold * VectorBonusPercent / 100;
957
958 bool OnlyOneCallAndLocalLinkage =
959 F.hasLocalLinkage() && F.hasOneUse() && &F == CS.getCalledFunction();
960 // If there is only one call of the function, and it has internal linkage,
961 // the cost of inlining it drops dramatically. It may seem odd to update
962 // Cost in updateThreshold, but the bonus depends on the logic in this method.
963 if (OnlyOneCallAndLocalLinkage)
964 Cost -= LastCallToStaticBonus;
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000965}
966
Matt Arsenault727aa342013-07-20 04:09:00 +0000967bool CallAnalyzer::visitCmpInst(CmpInst &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000968 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
969 // First try to handle simplified comparisons.
Easwaran Raman617f6362017-02-18 17:22:52 +0000970 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
971 return ConstantExpr::getCompare(I.getPredicate(), COps[0], COps[1]);
972 }))
973 return true;
Matt Arsenault727aa342013-07-20 04:09:00 +0000974
975 if (I.getOpcode() == Instruction::FCmp)
976 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000977
978 // Otherwise look for a comparison between constant offset pointers with
979 // a common base.
980 Value *LHSBase, *RHSBase;
981 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000982 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000983 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000984 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000985 if (RHSBase && LHSBase == RHSBase) {
986 // We have common bases, fold the icmp to a constant based on the
987 // offsets.
988 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
989 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
990 if (Constant *C = ConstantExpr::getICmp(I.getPredicate(), CLHS, CRHS)) {
991 SimplifiedValues[&I] = C;
992 ++NumConstantPtrCmps;
993 return true;
994 }
995 }
996 }
997
998 // If the comparison is an equality comparison with null, we can simplify it
Philip Reames9b5c9582015-06-26 20:51:17 +0000999 // if we know the value (argument) can't be null
1000 if (I.isEquality() && isa<ConstantPointerNull>(I.getOperand(1)) &&
1001 isKnownNonNullInCallee(I.getOperand(0))) {
1002 bool IsNotEqual = I.getPredicate() == CmpInst::ICMP_NE;
1003 SimplifiedValues[&I] = IsNotEqual ? ConstantInt::getTrue(I.getType())
1004 : ConstantInt::getFalse(I.getType());
1005 return true;
1006 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001007 // Finally check for SROA candidates in comparisons.
1008 Value *SROAArg;
1009 DenseMap<Value *, int>::iterator CostIt;
1010 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) {
1011 if (isa<ConstantPointerNull>(I.getOperand(1))) {
1012 accumulateSROACost(CostIt, InlineConstants::InstrCost);
1013 return true;
1014 }
1015
1016 disableSROA(CostIt);
1017 }
1018
1019 return false;
1020}
1021
1022bool CallAnalyzer::visitSub(BinaryOperator &I) {
1023 // Try to handle a special case: we can fold computing the difference of two
1024 // constant-related pointers.
1025 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
1026 Value *LHSBase, *RHSBase;
1027 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +00001028 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +00001029 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +00001030 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +00001031 if (RHSBase && LHSBase == RHSBase) {
1032 // We have common bases, fold the subtract to a constant based on the
1033 // offsets.
1034 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
1035 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
1036 if (Constant *C = ConstantExpr::getSub(CLHS, CRHS)) {
1037 SimplifiedValues[&I] = C;
1038 ++NumConstantPtrDiffs;
1039 return true;
1040 }
1041 }
1042 }
1043
1044 // Otherwise, fall back to the generic logic for simplifying and handling
1045 // instructions.
1046 return Base::visitSub(I);
1047}
1048
1049bool CallAnalyzer::visitBinaryOperator(BinaryOperator &I) {
1050 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
Haicheng Wu6d14dfe2017-12-22 17:09:09 +00001051 Constant *CLHS = dyn_cast<Constant>(LHS);
1052 if (!CLHS)
1053 CLHS = SimplifiedValues.lookup(LHS);
1054 Constant *CRHS = dyn_cast<Constant>(RHS);
1055 if (!CRHS)
1056 CRHS = SimplifiedValues.lookup(RHS);
Michael Zolotukhin4e8598e2015-02-06 20:02:51 +00001057
Haicheng Wu6d14dfe2017-12-22 17:09:09 +00001058 Value *SimpleV = nullptr;
1059 if (auto FI = dyn_cast<FPMathOperator>(&I))
1060 SimpleV = SimplifyFPBinOp(I.getOpcode(), CLHS ? CLHS : LHS,
1061 CRHS ? CRHS : RHS, FI->getFastMathFlags(), DL);
1062 else
1063 SimpleV =
1064 SimplifyBinOp(I.getOpcode(), CLHS ? CLHS : LHS, CRHS ? CRHS : RHS, DL);
1065
1066 if (Constant *C = dyn_cast_or_null<Constant>(SimpleV))
1067 SimplifiedValues[&I] = C;
1068
1069 if (SimpleV)
Chandler Carruth0539c072012-03-31 12:42:41 +00001070 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001071
1072 // Disable any SROA on arguments to arbitrary, unsimplified binary operators.
1073 disableSROA(LHS);
1074 disableSROA(RHS);
1075
Eli Friedman39ed9a62017-12-22 02:08:08 +00001076 // If the instruction is floating point, and the target says this operation
1077 // is expensive, this may eventually become a library call. Treat the cost
1078 // as such.
1079 if (I.getType()->isFloatingPointTy() &&
1080 TTI.getFPOpCost(I.getType()) == TargetTransformInfo::TCC_Expensive)
1081 Cost += InlineConstants::CallPenalty;
1082
Chandler Carruth0539c072012-03-31 12:42:41 +00001083 return false;
1084}
1085
1086bool CallAnalyzer::visitLoad(LoadInst &I) {
1087 Value *SROAArg;
1088 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +00001089 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001090 if (I.isSimple()) {
1091 accumulateSROACost(CostIt, InlineConstants::InstrCost);
1092 return true;
1093 }
1094
1095 disableSROA(CostIt);
1096 }
1097
Haicheng Wua4461512017-12-15 14:34:41 +00001098 // If the data is already loaded from this address and hasn't been clobbered
1099 // by any stores or calls, this load is likely to be redundant and can be
1100 // eliminated.
1101 if (EnableLoadElimination &&
Haicheng Wub3689ca2017-12-19 13:42:58 +00001102 !LoadAddrSet.insert(I.getPointerOperand()).second && I.isUnordered()) {
Haicheng Wua4461512017-12-15 14:34:41 +00001103 LoadEliminationCost += InlineConstants::InstrCost;
1104 return true;
1105 }
1106
Chandler Carruth0539c072012-03-31 12:42:41 +00001107 return false;
1108}
1109
1110bool CallAnalyzer::visitStore(StoreInst &I) {
1111 Value *SROAArg;
1112 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +00001113 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001114 if (I.isSimple()) {
1115 accumulateSROACost(CostIt, InlineConstants::InstrCost);
1116 return true;
1117 }
1118
1119 disableSROA(CostIt);
1120 }
1121
Haicheng Wua4461512017-12-15 14:34:41 +00001122 // The store can potentially clobber loads and prevent repeated loads from
1123 // being eliminated.
1124 // FIXME:
1125 // 1. We can probably keep an initial set of eliminatable loads substracted
1126 // from the cost even when we finally see a store. We just need to disable
1127 // *further* accumulation of elimination savings.
1128 // 2. We should probably at some point thread MemorySSA for the callee into
1129 // this and then use that to actually compute *really* precise savings.
1130 disableLoadElimination();
Chandler Carruth0539c072012-03-31 12:42:41 +00001131 return false;
1132}
1133
Chandler Carruth753e21d2012-12-28 14:23:32 +00001134bool CallAnalyzer::visitExtractValue(ExtractValueInst &I) {
1135 // Constant folding for extract value is trivial.
Easwaran Raman617f6362017-02-18 17:22:52 +00001136 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
1137 return ConstantExpr::getExtractValue(COps[0], I.getIndices());
1138 }))
Chandler Carruth753e21d2012-12-28 14:23:32 +00001139 return true;
Chandler Carruth753e21d2012-12-28 14:23:32 +00001140
1141 // SROA can look through these but give them a cost.
1142 return false;
1143}
1144
1145bool CallAnalyzer::visitInsertValue(InsertValueInst &I) {
1146 // Constant folding for insert value is trivial.
Easwaran Raman617f6362017-02-18 17:22:52 +00001147 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
1148 return ConstantExpr::getInsertValue(/*AggregateOperand*/ COps[0],
1149 /*InsertedValueOperand*/ COps[1],
1150 I.getIndices());
1151 }))
Chandler Carruth753e21d2012-12-28 14:23:32 +00001152 return true;
Chandler Carruth753e21d2012-12-28 14:23:32 +00001153
1154 // SROA can look through these but give them a cost.
1155 return false;
1156}
1157
1158/// \brief Try to simplify a call site.
1159///
1160/// Takes a concrete function and callsite and tries to actually simplify it by
1161/// analyzing the arguments and call itself with instsimplify. Returns true if
1162/// it has simplified the callsite to some other entity (a constant), making it
1163/// free.
1164bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
1165 // FIXME: Using the instsimplify logic directly for this is inefficient
1166 // because we have to continually rebuild the argument list even when no
1167 // simplifications can be performed. Until that is fixed with remapping
1168 // inside of instsimplify, directly constant fold calls here.
Andrew Kaylor647025f2017-06-09 23:18:11 +00001169 if (!canConstantFoldCallTo(CS, F))
Chandler Carruth753e21d2012-12-28 14:23:32 +00001170 return false;
1171
1172 // Try to re-map the arguments to constants.
1173 SmallVector<Constant *, 4> ConstantArgs;
1174 ConstantArgs.reserve(CS.arg_size());
Chad Rosier567556a2016-04-28 14:47:23 +00001175 for (CallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end(); I != E;
1176 ++I) {
Chandler Carruth753e21d2012-12-28 14:23:32 +00001177 Constant *C = dyn_cast<Constant>(*I);
1178 if (!C)
1179 C = dyn_cast_or_null<Constant>(SimplifiedValues.lookup(*I));
1180 if (!C)
1181 return false; // This argument doesn't map to a constant.
1182
1183 ConstantArgs.push_back(C);
1184 }
Andrew Kaylor647025f2017-06-09 23:18:11 +00001185 if (Constant *C = ConstantFoldCall(CS, F, ConstantArgs)) {
Chandler Carruth753e21d2012-12-28 14:23:32 +00001186 SimplifiedValues[CS.getInstruction()] = C;
1187 return true;
1188 }
1189
1190 return false;
1191}
1192
Chandler Carruth0539c072012-03-31 12:42:41 +00001193bool CallAnalyzer::visitCallSite(CallSite CS) {
Chandler Carruth37d25de2013-12-13 08:00:01 +00001194 if (CS.hasFnAttr(Attribute::ReturnsTwice) &&
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001195 !F.hasFnAttribute(Attribute::ReturnsTwice)) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001196 // This aborts the entire analysis.
1197 ExposesReturnsTwice = true;
1198 return false;
1199 }
Chad Rosier567556a2016-04-28 14:47:23 +00001200 if (CS.isCall() && cast<CallInst>(CS.getInstruction())->cannotDuplicate())
James Molloy4f6fb952012-12-20 16:04:27 +00001201 ContainsNoDuplicateCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001202
Chandler Carruth0539c072012-03-31 12:42:41 +00001203 if (Function *F = CS.getCalledFunction()) {
Chandler Carruth753e21d2012-12-28 14:23:32 +00001204 // When we have a concrete function, first try to simplify it directly.
1205 if (simplifyCallSite(F, CS))
1206 return true;
1207
1208 // Next check if it is an intrinsic we know about.
1209 // FIXME: Lift this into part of the InstVisitor.
1210 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
1211 switch (II->getIntrinsicID()) {
1212 default:
Haicheng Wua4461512017-12-15 14:34:41 +00001213 if (!CS.onlyReadsMemory() && !isAssumeLikeIntrinsic(II))
1214 disableLoadElimination();
Chandler Carruth753e21d2012-12-28 14:23:32 +00001215 return Base::visitCallSite(CS);
1216
Peter Collingbourne7dd8dbf2016-04-22 21:18:02 +00001217 case Intrinsic::load_relative:
1218 // This is normally lowered to 4 LLVM instructions.
1219 Cost += 3 * InlineConstants::InstrCost;
1220 return false;
1221
Chandler Carruth753e21d2012-12-28 14:23:32 +00001222 case Intrinsic::memset:
1223 case Intrinsic::memcpy:
1224 case Intrinsic::memmove:
Haicheng Wua4461512017-12-15 14:34:41 +00001225 disableLoadElimination();
Chandler Carruth753e21d2012-12-28 14:23:32 +00001226 // SROA can usually chew through these intrinsics, but they aren't free.
1227 return false;
Reid Kleckner60381792015-07-07 22:25:32 +00001228 case Intrinsic::localescape:
Reid Kleckner223de262015-04-14 20:38:14 +00001229 HasFrameEscape = true;
1230 return false;
Chandler Carruth753e21d2012-12-28 14:23:32 +00001231 }
1232 }
1233
Davide Italiano9d939c82017-11-30 22:10:35 +00001234 if (F == CS.getInstruction()->getFunction()) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001235 // This flag will fully abort the analysis, so don't bother with anything
1236 // else.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001237 IsRecursiveCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001238 return false;
1239 }
1240
Chandler Carruth0ba8db42013-01-22 11:26:02 +00001241 if (TTI.isLoweredToCall(F)) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001242 // We account for the average 1 instruction per call argument setup
1243 // here.
1244 Cost += CS.arg_size() * InlineConstants::InstrCost;
1245
1246 // Everything other than inline ASM will also have a significant cost
1247 // merely from making the call.
1248 if (!isa<InlineAsm>(CS.getCalledValue()))
1249 Cost += InlineConstants::CallPenalty;
1250 }
1251
Haicheng Wua4461512017-12-15 14:34:41 +00001252 if (!CS.onlyReadsMemory())
1253 disableLoadElimination();
Chandler Carruth0539c072012-03-31 12:42:41 +00001254 return Base::visitCallSite(CS);
1255 }
1256
1257 // Otherwise we're in a very special case -- an indirect function call. See
1258 // if we can be particularly clever about this.
1259 Value *Callee = CS.getCalledValue();
1260
1261 // First, pay the price of the argument setup. We account for the average
1262 // 1 instruction per call argument setup here.
1263 Cost += CS.arg_size() * InlineConstants::InstrCost;
1264
1265 // Next, check if this happens to be an indirect function call to a known
1266 // function in this inline context. If not, we've done all we can.
1267 Function *F = dyn_cast_or_null<Function>(SimplifiedValues.lookup(Callee));
Haicheng Wua4461512017-12-15 14:34:41 +00001268 if (!F) {
1269 if (!CS.onlyReadsMemory())
1270 disableLoadElimination();
Chandler Carruth0539c072012-03-31 12:42:41 +00001271 return Base::visitCallSite(CS);
Haicheng Wua4461512017-12-15 14:34:41 +00001272 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001273
1274 // If we have a constant that we are calling as a function, we can peer
1275 // through it and see the function target. This happens not infrequently
1276 // during devirtualization and so we want to give it a hefty bonus for
1277 // inlining, but cap that bonus in the event that inlining wouldn't pan
1278 // out. Pretend to inline the function, with a custom threshold.
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001279 auto IndirectCallParams = Params;
1280 IndirectCallParams.DefaultThreshold = InlineConstants::IndirectCallThreshold;
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001281 CallAnalyzer CA(TTI, GetAssumptionCache, GetBFI, PSI, ORE, *F, CS,
Easwaran Raman12585b02017-01-20 22:44:04 +00001282 IndirectCallParams);
Chandler Carruth0539c072012-03-31 12:42:41 +00001283 if (CA.analyzeCall(CS)) {
1284 // We were able to inline the indirect call! Subtract the cost from the
Easwaran Raman6d90d9f2015-12-07 21:21:20 +00001285 // threshold to get the bonus we want to apply, but don't go below zero.
1286 Cost -= std::max(0, CA.getThreshold() - CA.getCost());
Chandler Carruth0539c072012-03-31 12:42:41 +00001287 }
1288
Haicheng Wua4461512017-12-15 14:34:41 +00001289 if (!F->onlyReadsMemory())
1290 disableLoadElimination();
Chandler Carruth0539c072012-03-31 12:42:41 +00001291 return Base::visitCallSite(CS);
1292}
1293
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001294bool CallAnalyzer::visitReturnInst(ReturnInst &RI) {
1295 // At least one return instruction will be free after inlining.
1296 bool Free = !HasReturn;
1297 HasReturn = true;
1298 return Free;
1299}
1300
1301bool CallAnalyzer::visitBranchInst(BranchInst &BI) {
1302 // We model unconditional branches as essentially free -- they really
1303 // shouldn't exist at all, but handling them makes the behavior of the
1304 // inliner more regular and predictable. Interestingly, conditional branches
1305 // which will fold away are also free.
1306 return BI.isUnconditional() || isa<ConstantInt>(BI.getCondition()) ||
1307 dyn_cast_or_null<ConstantInt>(
1308 SimplifiedValues.lookup(BI.getCondition()));
1309}
1310
Haicheng Wu3ec848b2017-09-27 14:44:56 +00001311bool CallAnalyzer::visitSelectInst(SelectInst &SI) {
1312 bool CheckSROA = SI.getType()->isPointerTy();
1313 Value *TrueVal = SI.getTrueValue();
1314 Value *FalseVal = SI.getFalseValue();
1315
1316 Constant *TrueC = dyn_cast<Constant>(TrueVal);
1317 if (!TrueC)
1318 TrueC = SimplifiedValues.lookup(TrueVal);
1319 Constant *FalseC = dyn_cast<Constant>(FalseVal);
1320 if (!FalseC)
1321 FalseC = SimplifiedValues.lookup(FalseVal);
1322 Constant *CondC =
1323 dyn_cast_or_null<Constant>(SimplifiedValues.lookup(SI.getCondition()));
1324
1325 if (!CondC) {
1326 // Select C, X, X => X
1327 if (TrueC == FalseC && TrueC) {
1328 SimplifiedValues[&SI] = TrueC;
1329 return true;
1330 }
1331
1332 if (!CheckSROA)
1333 return Base::visitSelectInst(SI);
1334
1335 std::pair<Value *, APInt> TrueBaseAndOffset =
1336 ConstantOffsetPtrs.lookup(TrueVal);
1337 std::pair<Value *, APInt> FalseBaseAndOffset =
1338 ConstantOffsetPtrs.lookup(FalseVal);
1339 if (TrueBaseAndOffset == FalseBaseAndOffset && TrueBaseAndOffset.first) {
1340 ConstantOffsetPtrs[&SI] = TrueBaseAndOffset;
1341
1342 Value *SROAArg;
1343 DenseMap<Value *, int>::iterator CostIt;
1344 if (lookupSROAArgAndCost(TrueVal, SROAArg, CostIt))
1345 SROAArgValues[&SI] = SROAArg;
1346 return true;
1347 }
1348
1349 return Base::visitSelectInst(SI);
1350 }
1351
1352 // Select condition is a constant.
1353 Value *SelectedV = CondC->isAllOnesValue()
1354 ? TrueVal
1355 : (CondC->isNullValue()) ? FalseVal : nullptr;
1356 if (!SelectedV) {
1357 // Condition is a vector constant that is not all 1s or all 0s. If all
1358 // operands are constants, ConstantExpr::getSelect() can handle the cases
1359 // such as select vectors.
1360 if (TrueC && FalseC) {
1361 if (auto *C = ConstantExpr::getSelect(CondC, TrueC, FalseC)) {
1362 SimplifiedValues[&SI] = C;
1363 return true;
1364 }
1365 }
1366 return Base::visitSelectInst(SI);
1367 }
1368
1369 // Condition is either all 1s or all 0s. SI can be simplified.
1370 if (Constant *SelectedC = dyn_cast<Constant>(SelectedV)) {
1371 SimplifiedValues[&SI] = SelectedC;
1372 return true;
1373 }
1374
1375 if (!CheckSROA)
1376 return true;
1377
1378 std::pair<Value *, APInt> BaseAndOffset =
1379 ConstantOffsetPtrs.lookup(SelectedV);
1380 if (BaseAndOffset.first) {
1381 ConstantOffsetPtrs[&SI] = BaseAndOffset;
1382
1383 Value *SROAArg;
1384 DenseMap<Value *, int>::iterator CostIt;
1385 if (lookupSROAArgAndCost(SelectedV, SROAArg, CostIt))
1386 SROAArgValues[&SI] = SROAArg;
1387 }
1388
1389 return true;
1390}
1391
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001392bool CallAnalyzer::visitSwitchInst(SwitchInst &SI) {
1393 // We model unconditional switches as free, see the comments on handling
1394 // branches.
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001395 if (isa<ConstantInt>(SI.getCondition()))
1396 return true;
1397 if (Value *V = SimplifiedValues.lookup(SI.getCondition()))
1398 if (isa<ConstantInt>(V))
1399 return true;
1400
Eric Christopher7ad02ee2017-06-28 21:10:31 +00001401 // Assume the most general case where the switch is lowered into
Jun Bum Lim2960d412017-06-02 20:42:54 +00001402 // either a jump table, bit test, or a balanced binary tree consisting of
1403 // case clusters without merging adjacent clusters with the same
1404 // destination. We do not consider the switches that are lowered with a mix
1405 // of jump table/bit test/binary search tree. The cost of the switch is
1406 // proportional to the size of the tree or the size of jump table range.
1407 //
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001408 // NB: We convert large switches which are just used to initialize large phi
1409 // nodes to lookup tables instead in simplify-cfg, so this shouldn't prevent
1410 // inlining those. It will prevent inlining in cases where the optimization
1411 // does not (yet) fire.
Jun Bum Lim2960d412017-06-02 20:42:54 +00001412
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001413 // Maximum valid cost increased in this function.
1414 int CostUpperBound = INT_MAX - InlineConstants::InstrCost - 1;
1415
Jun Bum Lim2960d412017-06-02 20:42:54 +00001416 // Exit early for a large switch, assuming one case needs at least one
1417 // instruction.
1418 // FIXME: This is not true for a bit test, but ignore such case for now to
1419 // save compile-time.
1420 int64_t CostLowerBound =
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001421 std::min((int64_t)CostUpperBound,
Jun Bum Lim2960d412017-06-02 20:42:54 +00001422 (int64_t)SI.getNumCases() * InlineConstants::InstrCost + Cost);
1423
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001424 if (CostLowerBound > Threshold && !ComputeFullInlineCost) {
Jun Bum Lim2960d412017-06-02 20:42:54 +00001425 Cost = CostLowerBound;
1426 return false;
1427 }
1428
1429 unsigned JumpTableSize = 0;
1430 unsigned NumCaseCluster =
1431 TTI.getEstimatedNumberOfCaseClusters(SI, JumpTableSize);
1432
1433 // If suitable for a jump table, consider the cost for the table size and
1434 // branch to destination.
1435 if (JumpTableSize) {
1436 int64_t JTCost = (int64_t)JumpTableSize * InlineConstants::InstrCost +
1437 4 * InlineConstants::InstrCost;
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001438
1439 Cost = std::min((int64_t)CostUpperBound, JTCost + Cost);
Jun Bum Lim2960d412017-06-02 20:42:54 +00001440 return false;
1441 }
1442
1443 // Considering forming a binary search, we should find the number of nodes
1444 // which is same as the number of comparisons when lowered. For a given
1445 // number of clusters, n, we can define a recursive function, f(n), to find
1446 // the number of nodes in the tree. The recursion is :
1447 // f(n) = 1 + f(n/2) + f (n - n/2), when n > 3,
1448 // and f(n) = n, when n <= 3.
1449 // This will lead a binary tree where the leaf should be either f(2) or f(3)
1450 // when n > 3. So, the number of comparisons from leaves should be n, while
1451 // the number of non-leaf should be :
1452 // 2^(log2(n) - 1) - 1
1453 // = 2^log2(n) * 2^-1 - 1
1454 // = n / 2 - 1.
1455 // Considering comparisons from leaf and non-leaf nodes, we can estimate the
1456 // number of comparisons in a simple closed form :
1457 // n + n / 2 - 1 = n * 3 / 2 - 1
1458 if (NumCaseCluster <= 3) {
1459 // Suppose a comparison includes one compare and one conditional branch.
1460 Cost += NumCaseCluster * 2 * InlineConstants::InstrCost;
1461 return false;
1462 }
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001463
1464 int64_t ExpectedNumberOfCompare = 3 * (int64_t)NumCaseCluster / 2 - 1;
1465 int64_t SwitchCost =
Jun Bum Lim2960d412017-06-02 20:42:54 +00001466 ExpectedNumberOfCompare * 2 * InlineConstants::InstrCost;
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001467
1468 Cost = std::min((int64_t)CostUpperBound, SwitchCost + Cost);
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001469 return false;
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001470}
1471
1472bool CallAnalyzer::visitIndirectBrInst(IndirectBrInst &IBI) {
1473 // We never want to inline functions that contain an indirectbr. This is
1474 // incorrect because all the blockaddress's (in static global initializers
1475 // for example) would be referring to the original function, and this
1476 // indirect jump would jump from the inlined copy of the function into the
1477 // original function which is extremely undefined behavior.
1478 // FIXME: This logic isn't really right; we can safely inline functions with
1479 // indirectbr's as long as no other function or global references the
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001480 // blockaddress of a block within the current function.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001481 HasIndirectBr = true;
1482 return false;
1483}
1484
1485bool CallAnalyzer::visitResumeInst(ResumeInst &RI) {
1486 // FIXME: It's not clear that a single instruction is an accurate model for
1487 // the inline cost of a resume instruction.
1488 return false;
1489}
1490
David Majnemer654e1302015-07-31 17:58:14 +00001491bool CallAnalyzer::visitCleanupReturnInst(CleanupReturnInst &CRI) {
1492 // FIXME: It's not clear that a single instruction is an accurate model for
1493 // the inline cost of a cleanupret instruction.
1494 return false;
1495}
1496
1497bool CallAnalyzer::visitCatchReturnInst(CatchReturnInst &CRI) {
1498 // FIXME: It's not clear that a single instruction is an accurate model for
Joseph Tremoulet8220bcc2015-08-23 00:26:33 +00001499 // the inline cost of a catchret instruction.
David Majnemer654e1302015-07-31 17:58:14 +00001500 return false;
1501}
1502
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001503bool CallAnalyzer::visitUnreachableInst(UnreachableInst &I) {
1504 // FIXME: It might be reasonably to discount the cost of instructions leading
1505 // to unreachable as they have the lowest possible impact on both runtime and
1506 // code size.
1507 return true; // No actual code is needed for unreachable.
1508}
1509
Chandler Carruth0539c072012-03-31 12:42:41 +00001510bool CallAnalyzer::visitInstruction(Instruction &I) {
Chandler Carruthda7513a2012-05-04 00:58:03 +00001511 // Some instructions are free. All of the free intrinsics can also be
1512 // handled by SROA, etc.
Chandler Carruthb8cf5102013-01-21 12:05:16 +00001513 if (TargetTransformInfo::TCC_Free == TTI.getUserCost(&I))
Chandler Carruthda7513a2012-05-04 00:58:03 +00001514 return true;
1515
Chandler Carruth0539c072012-03-31 12:42:41 +00001516 // We found something we don't understand or can't handle. Mark any SROA-able
1517 // values in the operand list as no longer viable.
1518 for (User::op_iterator OI = I.op_begin(), OE = I.op_end(); OI != OE; ++OI)
1519 disableSROA(*OI);
1520
1521 return false;
1522}
1523
Chandler Carruth0539c072012-03-31 12:42:41 +00001524/// \brief Analyze a basic block for its contribution to the inline cost.
1525///
1526/// This method walks the analyzer over every instruction in the given basic
1527/// block and accounts for their cost during inlining at this callsite. It
1528/// aborts early if the threshold has been exceeded or an impossible to inline
1529/// construct has been detected. It returns false if inlining is no longer
1530/// viable, and true if inlining remains viable.
Hal Finkel57f03dd2014-09-07 13:49:57 +00001531bool CallAnalyzer::analyzeBlock(BasicBlock *BB,
1532 SmallPtrSetImpl<const Value *> &EphValues) {
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001533 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
Chandler Carruth6b4cc8b2014-02-01 10:38:17 +00001534 // FIXME: Currently, the number of instructions in a function regardless of
1535 // our ability to simplify them during inline to constants or dead code,
1536 // are actually used by the vector bonus heuristic. As long as that's true,
1537 // we have to special case debug intrinsics here to prevent differences in
1538 // inlining due to debug symbols. Eventually, the number of unsimplified
1539 // instructions shouldn't factor into the cost computation, but until then,
1540 // hack around it here.
1541 if (isa<DbgInfoIntrinsic>(I))
1542 continue;
1543
Hal Finkel57f03dd2014-09-07 13:49:57 +00001544 // Skip ephemeral values.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001545 if (EphValues.count(&*I))
Hal Finkel57f03dd2014-09-07 13:49:57 +00001546 continue;
1547
Chandler Carruth0539c072012-03-31 12:42:41 +00001548 ++NumInstructions;
1549 if (isa<ExtractElementInst>(I) || I->getType()->isVectorTy())
1550 ++NumVectorInstructions;
1551
1552 // If the instruction simplified to a constant, there is no cost to this
1553 // instruction. Visit the instructions using our InstVisitor to account for
1554 // all of the per-instruction logic. The visit tree returns true if we
1555 // consumed the instruction in any way, and false if the instruction's base
1556 // cost should count against inlining.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001557 if (Base::visit(&*I))
Chandler Carruth0539c072012-03-31 12:42:41 +00001558 ++NumInstructionsSimplified;
1559 else
1560 Cost += InlineConstants::InstrCost;
1561
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001562 using namespace ore;
Chandler Carruth0539c072012-03-31 12:42:41 +00001563 // If the visit this instruction detected an uninlinable pattern, abort.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001564 if (IsRecursiveCall || ExposesReturnsTwice || HasDynamicAlloca ||
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001565 HasIndirectBr || HasFrameEscape) {
1566 if (ORE)
Vivek Pandya95906582017-10-11 17:12:59 +00001567 ORE->emit([&]() {
1568 return OptimizationRemarkMissed(DEBUG_TYPE, "NeverInline",
1569 CandidateCS.getInstruction())
1570 << NV("Callee", &F)
1571 << " has uninlinable pattern and cost is not fully computed";
1572 });
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001573 return false;
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001574 }
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001575
1576 // If the caller is a recursive function then we don't want to inline
1577 // functions which allocate a lot of stack space because it would increase
1578 // the caller stack usage dramatically.
1579 if (IsCallerRecursive &&
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001580 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller) {
1581 if (ORE)
Vivek Pandya95906582017-10-11 17:12:59 +00001582 ORE->emit([&]() {
1583 return OptimizationRemarkMissed(DEBUG_TYPE, "NeverInline",
1584 CandidateCS.getInstruction())
1585 << NV("Callee", &F)
1586 << " is recursive and allocates too much stack space. Cost is "
1587 "not fully computed";
1588 });
Chandler Carruth0539c072012-03-31 12:42:41 +00001589 return false;
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001590 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001591
Chandler Carrutha004f222015-05-27 02:49:05 +00001592 // Check if we've past the maximum possible threshold so we don't spin in
1593 // huge basic blocks that will never inline.
Haicheng Wu61995362017-08-25 19:00:33 +00001594 if (Cost >= Threshold && !ComputeFullInlineCost)
Chandler Carruth0539c072012-03-31 12:42:41 +00001595 return false;
1596 }
1597
1598 return true;
1599}
1600
1601/// \brief Compute the base pointer and cumulative constant offsets for V.
1602///
1603/// This strips all constant offsets off of V, leaving it the base pointer, and
1604/// accumulates the total constant offset applied in the returned constant. It
1605/// returns 0 if V is not a pointer, and returns the constant '0' if there are
1606/// no constant offsets applied.
1607ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001608 if (!V->getType()->isPointerTy())
Craig Topper353eda42014-04-24 06:44:33 +00001609 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001610
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001611 unsigned IntPtrWidth = DL.getPointerSizeInBits();
Chandler Carruth0539c072012-03-31 12:42:41 +00001612 APInt Offset = APInt::getNullValue(IntPtrWidth);
1613
1614 // Even though we don't look through PHI nodes, we could be called on an
1615 // instruction in an unreachable block, which may be on a cycle.
1616 SmallPtrSet<Value *, 4> Visited;
1617 Visited.insert(V);
1618 do {
1619 if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
1620 if (!GEP->isInBounds() || !accumulateGEPOffset(*GEP, Offset))
Craig Topper353eda42014-04-24 06:44:33 +00001621 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001622 V = GEP->getPointerOperand();
1623 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
1624 V = cast<Operator>(V)->getOperand(0);
1625 } else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
Sanjoy Das5ce32722016-04-08 00:48:30 +00001626 if (GA->isInterposable())
Chandler Carruth0539c072012-03-31 12:42:41 +00001627 break;
1628 V = GA->getAliasee();
1629 } else {
1630 break;
1631 }
1632 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
David Blaikie70573dc2014-11-19 07:49:26 +00001633 } while (Visited.insert(V).second);
Chandler Carruth0539c072012-03-31 12:42:41 +00001634
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001635 Type *IntPtrTy = DL.getIntPtrType(V->getContext());
Chandler Carruth0539c072012-03-31 12:42:41 +00001636 return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset));
1637}
1638
Haicheng Wu3739e142017-12-14 14:36:18 +00001639/// \brief Find dead blocks due to deleted CFG edges during inlining.
1640///
1641/// If we know the successor of the current block, \p CurrBB, has to be \p
1642/// NextBB, the other successors of \p CurrBB are dead if these successors have
1643/// no live incoming CFG edges. If one block is found to be dead, we can
1644/// continue growing the dead block list by checking the successors of the dead
1645/// blocks to see if all their incoming edges are dead or not.
1646void CallAnalyzer::findDeadBlocks(BasicBlock *CurrBB, BasicBlock *NextBB) {
1647 auto IsEdgeDead = [&](BasicBlock *Pred, BasicBlock *Succ) {
1648 // A CFG edge is dead if the predecessor is dead or the predessor has a
1649 // known successor which is not the one under exam.
1650 return (DeadBlocks.count(Pred) ||
1651 (KnownSuccessors[Pred] && KnownSuccessors[Pred] != Succ));
1652 };
1653
1654 auto IsNewlyDead = [&](BasicBlock *BB) {
1655 // If all the edges to a block are dead, the block is also dead.
1656 return (!DeadBlocks.count(BB) &&
1657 llvm::all_of(predecessors(BB),
1658 [&](BasicBlock *P) { return IsEdgeDead(P, BB); }));
1659 };
1660
1661 for (BasicBlock *Succ : successors(CurrBB)) {
1662 if (Succ == NextBB || !IsNewlyDead(Succ))
1663 continue;
1664 SmallVector<BasicBlock *, 4> NewDead;
1665 NewDead.push_back(Succ);
1666 while (!NewDead.empty()) {
1667 BasicBlock *Dead = NewDead.pop_back_val();
1668 if (DeadBlocks.insert(Dead))
1669 // Continue growing the dead block lists.
1670 for (BasicBlock *S : successors(Dead))
1671 if (IsNewlyDead(S))
1672 NewDead.push_back(S);
1673 }
1674 }
1675}
1676
Chandler Carruth0539c072012-03-31 12:42:41 +00001677/// \brief Analyze a call site for potential inlining.
1678///
1679/// Returns true if inlining this call is viable, and false if it is not
1680/// viable. It computes the cost and adjusts the threshold based on numerous
1681/// factors and heuristics. If this method returns false but the computed cost
1682/// is below the computed threshold, then inlining was forcibly disabled by
Bob Wilson266802d2012-11-19 07:04:30 +00001683/// some artifact of the routine.
Chandler Carruth0539c072012-03-31 12:42:41 +00001684bool CallAnalyzer::analyzeCall(CallSite CS) {
Chandler Carruth7ae90d42012-04-11 10:15:10 +00001685 ++NumCallsAnalyzed;
1686
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001687 // Perform some tweaks to the cost and threshold based on the direct
1688 // callsite information.
Chandler Carruth0539c072012-03-31 12:42:41 +00001689
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001690 // We want to more aggressively inline vector-dense kernels, so up the
1691 // threshold, and we'll lower it if the % of vector instructions gets too
Chandler Carrutha004f222015-05-27 02:49:05 +00001692 // low. Note that these bonuses are some what arbitrary and evolved over time
1693 // by accident as much as because they are principled bonuses.
1694 //
1695 // FIXME: It would be nice to remove all such bonuses. At least it would be
1696 // nice to base the bonus values on something more scientific.
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001697 assert(NumInstructions == 0);
1698 assert(NumVectorInstructions == 0);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001699
1700 // Update the threshold based on callsite properties
1701 updateThreshold(CS, F);
1702
Chandler Carrutha004f222015-05-27 02:49:05 +00001703 // Speculatively apply all possible bonuses to Threshold. If cost exceeds
1704 // this Threshold any time, and cost cannot decrease, we can stop processing
1705 // the rest of the function body.
Easwaran Raman51b809b2017-07-28 21:47:36 +00001706 Threshold += (SingleBBBonus + VectorBonus);
Chandler Carrutha004f222015-05-27 02:49:05 +00001707
Xinliang David Li351d9b02017-05-02 05:38:41 +00001708 // Give out bonuses for the callsite, as the instructions setting them up
1709 // will be gone after inlining.
1710 Cost -= getCallsiteCost(CS, DL);
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001711
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001712 // If this function uses the coldcc calling convention, prefer not to inline
1713 // it.
1714 if (F.getCallingConv() == CallingConv::Cold)
1715 Cost += InlineConstants::ColdccPenalty;
1716
1717 // Check if we're done. This can happen due to bonuses and penalties.
Haicheng Wu61995362017-08-25 19:00:33 +00001718 if (Cost >= Threshold && !ComputeFullInlineCost)
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001719 return false;
1720
Chandler Carruth0539c072012-03-31 12:42:41 +00001721 if (F.empty())
1722 return true;
1723
Davide Italiano9d939c82017-11-30 22:10:35 +00001724 Function *Caller = CS.getInstruction()->getFunction();
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001725 // Check if the caller function is recursive itself.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001726 for (User *U : Caller->users()) {
1727 CallSite Site(U);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001728 if (!Site)
1729 continue;
1730 Instruction *I = Site.getInstruction();
Davide Italiano9d939c82017-11-30 22:10:35 +00001731 if (I->getFunction() == Caller) {
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001732 IsCallerRecursive = true;
1733 break;
1734 }
1735 }
1736
Chandler Carruth0539c072012-03-31 12:42:41 +00001737 // Populate our simplified values by mapping from function arguments to call
1738 // arguments with known important simplifications.
1739 CallSite::arg_iterator CAI = CS.arg_begin();
1740 for (Function::arg_iterator FAI = F.arg_begin(), FAE = F.arg_end();
1741 FAI != FAE; ++FAI, ++CAI) {
1742 assert(CAI != CS.arg_end());
1743 if (Constant *C = dyn_cast<Constant>(CAI))
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001744 SimplifiedValues[&*FAI] = C;
Chandler Carruth0539c072012-03-31 12:42:41 +00001745
1746 Value *PtrArg = *CAI;
1747 if (ConstantInt *C = stripAndComputeInBoundsConstantOffsets(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001748 ConstantOffsetPtrs[&*FAI] = std::make_pair(PtrArg, C->getValue());
Chandler Carruth0539c072012-03-31 12:42:41 +00001749
1750 // We can SROA any pointer arguments derived from alloca instructions.
1751 if (isa<AllocaInst>(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001752 SROAArgValues[&*FAI] = PtrArg;
Chandler Carruth0539c072012-03-31 12:42:41 +00001753 SROAArgCosts[PtrArg] = 0;
1754 }
1755 }
1756 }
1757 NumConstantArgs = SimplifiedValues.size();
1758 NumConstantOffsetPtrArgs = ConstantOffsetPtrs.size();
1759 NumAllocaArgs = SROAArgValues.size();
1760
Hal Finkel57f03dd2014-09-07 13:49:57 +00001761 // FIXME: If a caller has multiple calls to a callee, we end up recomputing
1762 // the ephemeral values multiple times (and they're completely determined by
1763 // the callee, so this is purely duplicate work).
1764 SmallPtrSet<const Value *, 32> EphValues;
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001765 CodeMetrics::collectEphemeralValues(&F, &GetAssumptionCache(F), EphValues);
Hal Finkel57f03dd2014-09-07 13:49:57 +00001766
Chandler Carruth0539c072012-03-31 12:42:41 +00001767 // The worklist of live basic blocks in the callee *after* inlining. We avoid
1768 // adding basic blocks of the callee which can be proven to be dead for this
1769 // particular call site in order to get more accurate cost estimates. This
1770 // requires a somewhat heavyweight iteration pattern: we need to walk the
1771 // basic blocks in a breadth-first order as we insert live successors. To
1772 // accomplish this, prioritizing for small iterations because we exit after
1773 // crossing our threshold, we use a small-size optimized SetVector.
1774 typedef SetVector<BasicBlock *, SmallVector<BasicBlock *, 16>,
Chad Rosier567556a2016-04-28 14:47:23 +00001775 SmallPtrSet<BasicBlock *, 16>>
1776 BBSetVector;
Chandler Carruth0539c072012-03-31 12:42:41 +00001777 BBSetVector BBWorklist;
1778 BBWorklist.insert(&F.getEntryBlock());
Easwaran Raman51b809b2017-07-28 21:47:36 +00001779 bool SingleBB = true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001780 // Note that we *must not* cache the size, this loop grows the worklist.
1781 for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) {
1782 // Bail out the moment we cross the threshold. This means we'll under-count
1783 // the cost, but only when undercounting doesn't matter.
Haicheng Wu61995362017-08-25 19:00:33 +00001784 if (Cost >= Threshold && !ComputeFullInlineCost)
Chandler Carruth0539c072012-03-31 12:42:41 +00001785 break;
1786
1787 BasicBlock *BB = BBWorklist[Idx];
1788 if (BB->empty())
Chandler Carruth4d1d34f2012-03-14 23:19:53 +00001789 continue;
Dan Gohman4552e3c2009-10-13 18:30:07 +00001790
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001791 // Disallow inlining a blockaddress. A blockaddress only has defined
1792 // behavior for an indirect branch in the same function, and we do not
1793 // currently support inlining indirect branches. But, the inliner may not
1794 // see an indirect branch that ends up being dead code at a particular call
1795 // site. If the blockaddress escapes the function, e.g., via a global
1796 // variable, inlining may lead to an invalid cross-function reference.
1797 if (BB->hasAddressTaken())
1798 return false;
1799
Chandler Carruth0539c072012-03-31 12:42:41 +00001800 // Analyze the cost of this block. If we blow through the threshold, this
1801 // returns false, and we can bail on out.
Easwaran Ramand295b002016-04-13 21:20:22 +00001802 if (!analyzeBlock(BB, EphValues))
1803 return false;
Eric Christopher46308e62011-02-01 01:16:32 +00001804
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001805 TerminatorInst *TI = BB->getTerminator();
1806
Chandler Carruth0539c072012-03-31 12:42:41 +00001807 // Add in the live successors by first checking whether we have terminator
1808 // that may be simplified based on the values simplified by this call.
1809 if (BranchInst *BI = dyn_cast<BranchInst>(TI)) {
1810 if (BI->isConditional()) {
1811 Value *Cond = BI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001812 if (ConstantInt *SimpleCond =
1813 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Haicheng Wu3739e142017-12-14 14:36:18 +00001814 BasicBlock *NextBB = BI->getSuccessor(SimpleCond->isZero() ? 1 : 0);
1815 BBWorklist.insert(NextBB);
1816 KnownSuccessors[BB] = NextBB;
1817 findDeadBlocks(BB, NextBB);
Chandler Carruth0539c072012-03-31 12:42:41 +00001818 continue;
Eric Christopher46308e62011-02-01 01:16:32 +00001819 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001820 }
1821 } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) {
1822 Value *Cond = SI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001823 if (ConstantInt *SimpleCond =
1824 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Haicheng Wu3739e142017-12-14 14:36:18 +00001825 BasicBlock *NextBB = SI->findCaseValue(SimpleCond)->getCaseSuccessor();
1826 BBWorklist.insert(NextBB);
1827 KnownSuccessors[BB] = NextBB;
1828 findDeadBlocks(BB, NextBB);
Chandler Carruth0539c072012-03-31 12:42:41 +00001829 continue;
1830 }
1831 }
Eric Christopher46308e62011-02-01 01:16:32 +00001832
Chandler Carruth0539c072012-03-31 12:42:41 +00001833 // If we're unable to select a particular successor, just count all of
1834 // them.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001835 for (unsigned TIdx = 0, TSize = TI->getNumSuccessors(); TIdx != TSize;
1836 ++TIdx)
Chandler Carruth0539c072012-03-31 12:42:41 +00001837 BBWorklist.insert(TI->getSuccessor(TIdx));
1838
1839 // If we had any successors at this point, than post-inlining is likely to
1840 // have them as well. Note that we assume any basic blocks which existed
1841 // due to branches or switches which folded above will also fold after
1842 // inlining.
1843 if (SingleBB && TI->getNumSuccessors() > 1) {
1844 // Take off the bonus we applied to the threshold.
1845 Threshold -= SingleBBBonus;
1846 SingleBB = false;
Eric Christopher46308e62011-02-01 01:16:32 +00001847 }
1848 }
Andrew Trickcaa500b2011-10-01 01:27:56 +00001849
Easwaran Raman51b809b2017-07-28 21:47:36 +00001850 bool OnlyOneCallAndLocalLinkage =
1851 F.hasLocalLinkage() && F.hasOneUse() && &F == CS.getCalledFunction();
Chandler Carruthcb5beb32013-12-12 11:59:26 +00001852 // If this is a noduplicate call, we can still inline as long as
James Molloy4f6fb952012-12-20 16:04:27 +00001853 // inlining this would cause the removal of the caller (so the instruction
1854 // is not actually duplicated, just moved).
1855 if (!OnlyOneCallAndLocalLinkage && ContainsNoDuplicateCall)
1856 return false;
1857
Chandler Carrutha004f222015-05-27 02:49:05 +00001858 // We applied the maximum possible vector bonus at the beginning. Now,
1859 // subtract the excess bonus, if any, from the Threshold before
1860 // comparing against Cost.
1861 if (NumVectorInstructions <= NumInstructions / 10)
Easwaran Raman51b809b2017-07-28 21:47:36 +00001862 Threshold -= VectorBonus;
Chandler Carrutha004f222015-05-27 02:49:05 +00001863 else if (NumVectorInstructions <= NumInstructions / 2)
Easwaran Raman51b809b2017-07-28 21:47:36 +00001864 Threshold -= VectorBonus/2;
Chandler Carruth0539c072012-03-31 12:42:41 +00001865
Hans Wennborg00ab73d2016-02-05 20:32:42 +00001866 return Cost < std::max(1, Threshold);
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001867}
1868
Aaron Ballman615eb472017-10-15 14:32:27 +00001869#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
Chandler Carruth0539c072012-03-31 12:42:41 +00001870/// \brief Dump stats about this call's analysis.
Yaron Kereneb2a2542016-01-29 20:50:44 +00001871LLVM_DUMP_METHOD void CallAnalyzer::dump() {
Eric Christophera13839f2014-02-26 23:27:16 +00001872#define DEBUG_PRINT_STAT(x) dbgs() << " " #x ": " << x << "\n"
Chandler Carruth0539c072012-03-31 12:42:41 +00001873 DEBUG_PRINT_STAT(NumConstantArgs);
1874 DEBUG_PRINT_STAT(NumConstantOffsetPtrArgs);
1875 DEBUG_PRINT_STAT(NumAllocaArgs);
1876 DEBUG_PRINT_STAT(NumConstantPtrCmps);
1877 DEBUG_PRINT_STAT(NumConstantPtrDiffs);
1878 DEBUG_PRINT_STAT(NumInstructionsSimplified);
Chandler Carrutha004f222015-05-27 02:49:05 +00001879 DEBUG_PRINT_STAT(NumInstructions);
Chandler Carruth0539c072012-03-31 12:42:41 +00001880 DEBUG_PRINT_STAT(SROACostSavings);
1881 DEBUG_PRINT_STAT(SROACostSavingsLost);
Haicheng Wua4461512017-12-15 14:34:41 +00001882 DEBUG_PRINT_STAT(LoadEliminationCost);
James Molloy4f6fb952012-12-20 16:04:27 +00001883 DEBUG_PRINT_STAT(ContainsNoDuplicateCall);
Chandler Carruth394e34f2014-01-31 22:32:32 +00001884 DEBUG_PRINT_STAT(Cost);
1885 DEBUG_PRINT_STAT(Threshold);
Chandler Carruth0539c072012-03-31 12:42:41 +00001886#undef DEBUG_PRINT_STAT
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001887}
Manman Renc3366cc2012-09-06 19:55:56 +00001888#endif
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001889
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001890/// \brief Test that there are no attribute conflicts between Caller and Callee
1891/// that prevent inlining.
1892static bool functionsHaveCompatibleAttributes(Function *Caller,
Eric Christopher4371b132015-07-02 01:11:47 +00001893 Function *Callee,
1894 TargetTransformInfo &TTI) {
Eric Christopherd566fb12015-07-29 22:09:48 +00001895 return TTI.areInlineCompatible(Caller, Callee) &&
Akira Hatanaka1cb242e2015-12-22 23:57:37 +00001896 AttributeFuncs::areInlineCompatible(*Caller, *Callee);
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001897}
1898
Xinliang David Li351d9b02017-05-02 05:38:41 +00001899int llvm::getCallsiteCost(CallSite CS, const DataLayout &DL) {
1900 int Cost = 0;
1901 for (unsigned I = 0, E = CS.arg_size(); I != E; ++I) {
1902 if (CS.isByValArgument(I)) {
1903 // We approximate the number of loads and stores needed by dividing the
1904 // size of the byval type by the target's pointer size.
1905 PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType());
1906 unsigned TypeSize = DL.getTypeSizeInBits(PTy->getElementType());
1907 unsigned PointerSize = DL.getPointerSizeInBits();
1908 // Ceiling division.
1909 unsigned NumStores = (TypeSize + PointerSize - 1) / PointerSize;
1910
1911 // If it generates more than 8 stores it is likely to be expanded as an
1912 // inline memcpy so we take that as an upper bound. Otherwise we assume
1913 // one load and one store per word copied.
1914 // FIXME: The maxStoresPerMemcpy setting from the target should be used
1915 // here instead of a magic number of 8, but it's not available via
1916 // DataLayout.
1917 NumStores = std::min(NumStores, 8U);
1918
1919 Cost += 2 * NumStores * InlineConstants::InstrCost;
1920 } else {
1921 // For non-byval arguments subtract off one instruction per call
1922 // argument.
1923 Cost += InlineConstants::InstrCost;
1924 }
1925 }
1926 // The call instruction also disappears after inlining.
1927 Cost += InlineConstants::InstrCost + InlineConstants::CallPenalty;
1928 return Cost;
1929}
1930
Sean Silvaab6a6832016-07-23 04:22:50 +00001931InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001932 CallSite CS, const InlineParams &Params, TargetTransformInfo &CalleeTTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001933 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +00001934 Optional<function_ref<BlockFrequencyInfo &(Function &)>> GetBFI,
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001935 ProfileSummaryInfo *PSI, OptimizationRemarkEmitter *ORE) {
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001936 return getInlineCost(CS, CS.getCalledFunction(), Params, CalleeTTI,
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001937 GetAssumptionCache, GetBFI, PSI, ORE);
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001938}
1939
Sean Silvaab6a6832016-07-23 04:22:50 +00001940InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001941 CallSite CS, Function *Callee, const InlineParams &Params,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001942 TargetTransformInfo &CalleeTTI,
1943 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +00001944 Optional<function_ref<BlockFrequencyInfo &(Function &)>> GetBFI,
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001945 ProfileSummaryInfo *PSI, OptimizationRemarkEmitter *ORE) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001946
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001947 // Cannot inline indirect calls.
1948 if (!Callee)
1949 return llvm::InlineCost::getNever();
1950
1951 // Calls to functions with always-inline attributes should be inlined
1952 // whenever possible.
Peter Collingbourne68a88972014-05-19 18:25:54 +00001953 if (CS.hasFnAttr(Attribute::AlwaysInline)) {
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001954 if (isInlineViable(*Callee))
1955 return llvm::InlineCost::getAlways();
1956 return llvm::InlineCost::getNever();
1957 }
1958
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001959 // Never inline functions with conflicting attributes (unless callee has
1960 // always-inline attribute).
Chad Rosier5ce28f42017-08-02 14:50:27 +00001961 Function *Caller = CS.getCaller();
1962 if (!functionsHaveCompatibleAttributes(Caller, Callee, CalleeTTI))
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001963 return llvm::InlineCost::getNever();
1964
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001965 // Don't inline this call if the caller has the optnone attribute.
Chad Rosier5ce28f42017-08-02 14:50:27 +00001966 if (Caller->hasFnAttribute(Attribute::OptimizeNone))
Paul Robinsondcbe35b2013-11-18 21:44:03 +00001967 return llvm::InlineCost::getNever();
1968
Sanjoy Das5ce32722016-04-08 00:48:30 +00001969 // Don't inline functions which can be interposed at link-time. Don't inline
1970 // functions marked noinline or call sites marked noinline.
Craig Topper107b1872016-12-09 02:18:04 +00001971 // Note: inlining non-exact non-interposable functions is fine, since we know
Sanjoy Das5ce32722016-04-08 00:48:30 +00001972 // we have *a* correct implementation of the source level function.
Chad Rosier567556a2016-04-28 14:47:23 +00001973 if (Callee->isInterposable() || Callee->hasFnAttribute(Attribute::NoInline) ||
1974 CS.isNoInline())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001975 return llvm::InlineCost::getNever();
1976
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001977 DEBUG(llvm::dbgs() << " Analyzing call of " << Callee->getName()
Chad Rosier4eb18742017-08-21 19:56:46 +00001978 << "... (caller:" << Caller->getName() << ")\n");
Andrew Trickcaa500b2011-10-01 01:27:56 +00001979
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001980 CallAnalyzer CA(CalleeTTI, GetAssumptionCache, GetBFI, PSI, ORE, *Callee, CS,
Easwaran Raman12585b02017-01-20 22:44:04 +00001981 Params);
Chandler Carruth0539c072012-03-31 12:42:41 +00001982 bool ShouldInline = CA.analyzeCall(CS);
Dan Gohman4552e3c2009-10-13 18:30:07 +00001983
Chandler Carruth0539c072012-03-31 12:42:41 +00001984 DEBUG(CA.dump());
1985
1986 // Check if there was a reason to force inlining or no inlining.
1987 if (!ShouldInline && CA.getCost() < CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001988 return InlineCost::getNever();
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001989 if (ShouldInline && CA.getCost() >= CA.getThreshold())
Dan Gohman4552e3c2009-10-13 18:30:07 +00001990 return InlineCost::getAlways();
Andrew Trickcaa500b2011-10-01 01:27:56 +00001991
Chandler Carruth0539c072012-03-31 12:42:41 +00001992 return llvm::InlineCost::get(CA.getCost(), CA.getThreshold());
Dan Gohman4552e3c2009-10-13 18:30:07 +00001993}
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001994
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001995bool llvm::isInlineViable(Function &F) {
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001996 bool ReturnsTwice = F.hasFnAttribute(Attribute::ReturnsTwice);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001997 for (Function::iterator BI = F.begin(), BE = F.end(); BI != BE; ++BI) {
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001998 // Disallow inlining of functions which contain indirect branches or
1999 // blockaddresses.
2000 if (isa<IndirectBrInst>(BI->getTerminator()) || BI->hasAddressTaken())
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002001 return false;
2002
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00002003 for (auto &II : *BI) {
2004 CallSite CS(&II);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002005 if (!CS)
2006 continue;
2007
2008 // Disallow recursive calls.
2009 if (&F == CS.getCalledFunction())
2010 return false;
2011
2012 // Disallow calls which expose returns-twice to a function not previously
2013 // attributed as such.
2014 if (!ReturnsTwice && CS.isCall() &&
2015 cast<CallInst>(CS.getInstruction())->canReturnTwice())
2016 return false;
Reid Kleckner223de262015-04-14 20:38:14 +00002017
Reid Kleckner60381792015-07-07 22:25:32 +00002018 // Disallow inlining functions that call @llvm.localescape. Doing this
Reid Kleckner223de262015-04-14 20:38:14 +00002019 // correctly would require major changes to the inliner.
2020 if (CS.getCalledFunction() &&
2021 CS.getCalledFunction()->getIntrinsicID() ==
Reid Kleckner60381792015-07-07 22:25:32 +00002022 llvm::Intrinsic::localescape)
Reid Kleckner223de262015-04-14 20:38:14 +00002023 return false;
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002024 }
2025 }
2026
2027 return true;
2028}
Easwaran Raman1c57cc22016-08-10 00:48:04 +00002029
2030// APIs to create InlineParams based on command line flags and/or other
2031// parameters.
2032
2033InlineParams llvm::getInlineParams(int Threshold) {
2034 InlineParams Params;
2035
2036 // This field is the threshold to use for a callee by default. This is
2037 // derived from one or more of:
2038 // * optimization or size-optimization levels,
2039 // * a value passed to createFunctionInliningPass function, or
2040 // * the -inline-threshold flag.
2041 // If the -inline-threshold flag is explicitly specified, that is used
2042 // irrespective of anything else.
2043 if (InlineThreshold.getNumOccurrences() > 0)
2044 Params.DefaultThreshold = InlineThreshold;
2045 else
2046 Params.DefaultThreshold = Threshold;
2047
2048 // Set the HintThreshold knob from the -inlinehint-threshold.
2049 Params.HintThreshold = HintThreshold;
2050
2051 // Set the HotCallSiteThreshold knob from the -hot-callsite-threshold.
2052 Params.HotCallSiteThreshold = HotCallSiteThreshold;
2053
Easwaran Raman974d4ee2017-08-03 22:23:33 +00002054 // If the -locally-hot-callsite-threshold is explicitly specified, use it to
2055 // populate LocallyHotCallSiteThreshold. Later, we populate
2056 // Params.LocallyHotCallSiteThreshold from -locally-hot-callsite-threshold if
2057 // we know that optimization level is O3 (in the getInlineParams variant that
2058 // takes the opt and size levels).
2059 // FIXME: Remove this check (and make the assignment unconditional) after
2060 // addressing size regression issues at O2.
2061 if (LocallyHotCallSiteThreshold.getNumOccurrences() > 0)
2062 Params.LocallyHotCallSiteThreshold = LocallyHotCallSiteThreshold;
2063
Easwaran Raman12585b02017-01-20 22:44:04 +00002064 // Set the ColdCallSiteThreshold knob from the -inline-cold-callsite-threshold.
2065 Params.ColdCallSiteThreshold = ColdCallSiteThreshold;
2066
Easwaran Raman1c57cc22016-08-10 00:48:04 +00002067 // Set the OptMinSizeThreshold and OptSizeThreshold params only if the
Easwaran Raman1c57cc22016-08-10 00:48:04 +00002068 // -inlinehint-threshold commandline option is not explicitly given. If that
2069 // option is present, then its value applies even for callees with size and
2070 // minsize attributes.
2071 // If the -inline-threshold is not specified, set the ColdThreshold from the
2072 // -inlinecold-threshold even if it is not explicitly passed. If
2073 // -inline-threshold is specified, then -inlinecold-threshold needs to be
2074 // explicitly specified to set the ColdThreshold knob
2075 if (InlineThreshold.getNumOccurrences() == 0) {
2076 Params.OptMinSizeThreshold = InlineConstants::OptMinSizeThreshold;
2077 Params.OptSizeThreshold = InlineConstants::OptSizeThreshold;
2078 Params.ColdThreshold = ColdThreshold;
2079 } else if (ColdThreshold.getNumOccurrences() > 0) {
2080 Params.ColdThreshold = ColdThreshold;
2081 }
2082 return Params;
2083}
2084
2085InlineParams llvm::getInlineParams() {
2086 return getInlineParams(InlineThreshold);
2087}
2088
2089// Compute the default threshold for inlining based on the opt level and the
2090// size opt level.
2091static int computeThresholdFromOptLevels(unsigned OptLevel,
2092 unsigned SizeOptLevel) {
2093 if (OptLevel > 2)
2094 return InlineConstants::OptAggressiveThreshold;
2095 if (SizeOptLevel == 1) // -Os
2096 return InlineConstants::OptSizeThreshold;
2097 if (SizeOptLevel == 2) // -Oz
2098 return InlineConstants::OptMinSizeThreshold;
2099 return InlineThreshold;
2100}
2101
2102InlineParams llvm::getInlineParams(unsigned OptLevel, unsigned SizeOptLevel) {
Easwaran Raman974d4ee2017-08-03 22:23:33 +00002103 auto Params =
2104 getInlineParams(computeThresholdFromOptLevels(OptLevel, SizeOptLevel));
2105 // At O3, use the value of -locally-hot-callsite-threshold option to populate
2106 // Params.LocallyHotCallSiteThreshold. Below O3, this flag has effect only
2107 // when it is specified explicitly.
2108 if (OptLevel > 2)
2109 Params.LocallyHotCallSiteThreshold = LocallyHotCallSiteThreshold;
2110 return Params;
Easwaran Raman1c57cc22016-08-10 00:48:04 +00002111}