blob: 574a1b6c8410a8d4ec29fd6edc877e374cea354a [file] [log] [blame]
Dan Gohman4552e3c2009-10-13 18:30:07 +00001//===- InlineCost.cpp - Cost analysis for inliner -------------------------===//
2//
Chandler Carruth2946cd72019-01-19 08:50:56 +00003// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
Dan Gohman4552e3c2009-10-13 18:30:07 +00006//
7//===----------------------------------------------------------------------===//
8//
9// This file implements inline cost analysis.
10//
11//===----------------------------------------------------------------------===//
12
13#include "llvm/Analysis/InlineCost.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000014#include "llvm/ADT/STLExtras.h"
15#include "llvm/ADT/SetVector.h"
16#include "llvm/ADT/SmallPtrSet.h"
17#include "llvm/ADT/SmallVector.h"
18#include "llvm/ADT/Statistic.h"
Daniel Jasperaec2fa32016-12-19 08:22:17 +000019#include "llvm/Analysis/AssumptionCache.h"
Easwaran Raman12585b02017-01-20 22:44:04 +000020#include "llvm/Analysis/BlockFrequencyInfo.h"
Hal Finkel57f03dd2014-09-07 13:49:57 +000021#include "llvm/Analysis/CodeMetrics.h"
Chandler Carruthd9903882015-01-14 11:23:27 +000022#include "llvm/Analysis/ConstantFolding.h"
Haicheng Wu3739e142017-12-14 14:36:18 +000023#include "llvm/Analysis/CFG.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000024#include "llvm/Analysis/InstructionSimplify.h"
David Greenba9f2452018-11-05 14:54:34 +000025#include "llvm/Analysis/LoopInfo.h"
Easwaran Raman71069cf2016-06-09 22:23:21 +000026#include "llvm/Analysis/ProfileSummaryInfo.h"
Chandler Carruth42f3dce2013-01-21 11:55:09 +000027#include "llvm/Analysis/TargetTransformInfo.h"
Haicheng Wua4461512017-12-15 14:34:41 +000028#include "llvm/Analysis/ValueTracking.h"
Nico Weber432a3882018-04-30 14:59:11 +000029#include "llvm/Config/llvm-config.h"
Chandler Carruth219b89b2014-03-04 11:01:28 +000030#include "llvm/IR/CallSite.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000031#include "llvm/IR/CallingConv.h"
32#include "llvm/IR/DataLayout.h"
David Greenba9f2452018-11-05 14:54:34 +000033#include "llvm/IR/Dominators.h"
Chandler Carruth03eb0de2014-03-04 10:40:04 +000034#include "llvm/IR/GetElementPtrTypeIterator.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000035#include "llvm/IR/GlobalAlias.h"
Chandler Carruth7da14f12014-03-06 03:23:41 +000036#include "llvm/IR/InstVisitor.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000037#include "llvm/IR/IntrinsicInst.h"
38#include "llvm/IR/Operator.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000039#include "llvm/Support/Debug.h"
Chandler Carruth0539c072012-03-31 12:42:41 +000040#include "llvm/Support/raw_ostream.h"
Eric Christopher2dfbd7e2011-02-05 00:49:15 +000041
Dan Gohman4552e3c2009-10-13 18:30:07 +000042using namespace llvm;
43
Chandler Carruthf1221bd2014-04-22 02:48:03 +000044#define DEBUG_TYPE "inline-cost"
45
Chandler Carruth7ae90d42012-04-11 10:15:10 +000046STATISTIC(NumCallsAnalyzed, "Number of call sites analyzed");
47
Easwaran Raman1c57cc22016-08-10 00:48:04 +000048static cl::opt<int> InlineThreshold(
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000049 "inline-threshold", cl::Hidden, cl::init(225), cl::ZeroOrMore,
50 cl::desc("Control the amount of inlining to perform (default = 225)"));
51
52static cl::opt<int> HintThreshold(
David Callahanfd3e7a92019-02-04 18:46:25 +000053 "inlinehint-threshold", cl::Hidden, cl::init(325), cl::ZeroOrMore,
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000054 cl::desc("Threshold for inlining functions with inline hint"));
55
Easwaran Raman12585b02017-01-20 22:44:04 +000056static cl::opt<int>
57 ColdCallSiteThreshold("inline-cold-callsite-threshold", cl::Hidden,
David Callahanfd3e7a92019-02-04 18:46:25 +000058 cl::init(45), cl::ZeroOrMore,
Easwaran Raman12585b02017-01-20 22:44:04 +000059 cl::desc("Threshold for inlining cold callsites"));
60
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000061// We introduce this threshold to help performance of instrumentation based
62// PGO before we actually hook up inliner with analysis passes such as BPI and
63// BFI.
64static cl::opt<int> ColdThreshold(
David Callahanfd3e7a92019-02-04 18:46:25 +000065 "inlinecold-threshold", cl::Hidden, cl::init(45), cl::ZeroOrMore,
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +000066 cl::desc("Threshold for inlining functions with cold attribute"));
67
Dehao Chende39cb92016-08-05 20:28:41 +000068static cl::opt<int>
69 HotCallSiteThreshold("hot-callsite-threshold", cl::Hidden, cl::init(3000),
70 cl::ZeroOrMore,
71 cl::desc("Threshold for hot callsites "));
72
Easwaran Raman974d4ee2017-08-03 22:23:33 +000073static cl::opt<int> LocallyHotCallSiteThreshold(
74 "locally-hot-callsite-threshold", cl::Hidden, cl::init(525), cl::ZeroOrMore,
75 cl::desc("Threshold for locally hot callsites "));
76
Easwaran Ramanc5fa6352017-06-27 23:11:18 +000077static cl::opt<int> ColdCallSiteRelFreq(
78 "cold-callsite-rel-freq", cl::Hidden, cl::init(2), cl::ZeroOrMore,
Hiroshi Inoue02a2bb22019-02-05 08:30:48 +000079 cl::desc("Maximum block frequency, expressed as a percentage of caller's "
Easwaran Ramanc5fa6352017-06-27 23:11:18 +000080 "entry frequency, for a callsite to be cold in the absence of "
81 "profile information."));
82
Easwaran Raman974d4ee2017-08-03 22:23:33 +000083static cl::opt<int> HotCallSiteRelFreq(
84 "hot-callsite-rel-freq", cl::Hidden, cl::init(60), cl::ZeroOrMore,
Easwaran Ramanff77cc72017-08-04 17:15:17 +000085 cl::desc("Minimum block frequency, expressed as a multiple of caller's "
Easwaran Raman974d4ee2017-08-03 22:23:33 +000086 "entry frequency, for a callsite to be hot in the absence of "
87 "profile information."));
88
Easwaran Raman4924bb02017-09-13 20:16:02 +000089static cl::opt<bool> OptComputeFullInlineCost(
David Callahanfd3e7a92019-02-04 18:46:25 +000090 "inline-cost-full", cl::Hidden, cl::init(false), cl::ZeroOrMore,
Haicheng Wu0812c5b2017-08-21 20:00:09 +000091 cl::desc("Compute the full inline cost of a call site even when the cost "
92 "exceeds the threshold."));
93
Chandler Carruth0539c072012-03-31 12:42:41 +000094namespace {
Chandler Carrutha3089552012-03-14 07:32:53 +000095
Chandler Carruth0539c072012-03-31 12:42:41 +000096class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> {
97 typedef InstVisitor<CallAnalyzer, bool> Base;
98 friend class InstVisitor<CallAnalyzer, bool>;
Owen Andersona08318a2010-09-09 16:56:42 +000099
Chandler Carruth42f3dce2013-01-21 11:55:09 +0000100 /// The TargetTransformInfo available for this compilation.
101 const TargetTransformInfo &TTI;
102
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000103 /// Getter for the cache of @llvm.assume intrinsics.
104 std::function<AssumptionCache &(Function &)> &GetAssumptionCache;
105
Easwaran Raman12585b02017-01-20 22:44:04 +0000106 /// Getter for BlockFrequencyInfo
107 Optional<function_ref<BlockFrequencyInfo &(Function &)>> &GetBFI;
108
Easwaran Raman71069cf2016-06-09 22:23:21 +0000109 /// Profile summary information.
110 ProfileSummaryInfo *PSI;
111
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000112 /// The called function.
Chandler Carruth0539c072012-03-31 12:42:41 +0000113 Function &F;
Owen Andersona08318a2010-09-09 16:56:42 +0000114
Eric Christopher85be8ca2017-04-15 06:14:50 +0000115 // Cache the DataLayout since we use it a lot.
116 const DataLayout &DL;
117
Haicheng Wu0812c5b2017-08-21 20:00:09 +0000118 /// The OptimizationRemarkEmitter available for this compilation.
119 OptimizationRemarkEmitter *ORE;
120
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000121 /// The candidate callsite being analyzed. Please do not use this to do
122 /// analysis in the caller function; we want the inline cost query to be
123 /// easily cacheable. Instead, use the cover function paramHasAttr.
Philip Reames9b5c9582015-06-26 20:51:17 +0000124 CallSite CandidateCS;
125
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000126 /// Tunable parameters that control the analysis.
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000127 const InlineParams &Params;
128
Chandler Carruth0539c072012-03-31 12:42:41 +0000129 int Threshold;
130 int Cost;
Easwaran Raman4924bb02017-09-13 20:16:02 +0000131 bool ComputeFullInlineCost;
Owen Andersona08318a2010-09-09 16:56:42 +0000132
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000133 bool IsCallerRecursive;
134 bool IsRecursiveCall;
Chandler Carruth0539c072012-03-31 12:42:41 +0000135 bool ExposesReturnsTwice;
136 bool HasDynamicAlloca;
James Molloy4f6fb952012-12-20 16:04:27 +0000137 bool ContainsNoDuplicateCall;
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000138 bool HasReturn;
139 bool HasIndirectBr;
Vitaly Buka4296ea72018-04-04 21:46:27 +0000140 bool HasUninlineableIntrinsic;
Sameer AbuAsal77beee42018-09-20 18:39:34 +0000141 bool InitsVargArgs;
James Molloy4f6fb952012-12-20 16:04:27 +0000142
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000143 /// Number of bytes allocated statically by the callee.
144 uint64_t AllocatedSize;
Chandler Carruth0539c072012-03-31 12:42:41 +0000145 unsigned NumInstructions, NumVectorInstructions;
Easwaran Raman51b809b2017-07-28 21:47:36 +0000146 int VectorBonus, TenPercentVectorBonus;
147 // Bonus to be applied when the callee has only one reachable basic block.
148 int SingleBBBonus;
Chandler Carruth0539c072012-03-31 12:42:41 +0000149
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000150 /// While we walk the potentially-inlined instructions, we build up and
151 /// maintain a mapping of simplified values specific to this callsite. The
152 /// idea is to propagate any special information we have about arguments to
153 /// this call through the inlinable section of the function, and account for
154 /// likely simplifications post-inlining. The most important aspect we track
155 /// is CFG altering simplifications -- when we prove a basic block dead, that
156 /// can cause dramatic shifts in the cost of inlining a function.
Chandler Carruth0539c072012-03-31 12:42:41 +0000157 DenseMap<Value *, Constant *> SimplifiedValues;
158
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000159 /// Keep track of the values which map back (through function arguments) to
160 /// allocas on the caller stack which could be simplified through SROA.
Chandler Carruth0539c072012-03-31 12:42:41 +0000161 DenseMap<Value *, Value *> SROAArgValues;
162
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000163 /// The mapping of caller Alloca values to their accumulated cost savings. If
164 /// we have to disable SROA for one of the allocas, this tells us how much
165 /// cost must be added.
Chandler Carruth0539c072012-03-31 12:42:41 +0000166 DenseMap<Value *, int> SROAArgCosts;
167
Piotr Padlewskif3d122c2016-09-30 21:05:49 +0000168 /// Keep track of values which map to a pointer base and constant offset.
Chad Rosier567556a2016-04-28 14:47:23 +0000169 DenseMap<Value *, std::pair<Value *, APInt>> ConstantOffsetPtrs;
Chandler Carruth0539c072012-03-31 12:42:41 +0000170
Haicheng Wu3739e142017-12-14 14:36:18 +0000171 /// Keep track of dead blocks due to the constant arguments.
172 SetVector<BasicBlock *> DeadBlocks;
173
174 /// The mapping of the blocks to their known unique successors due to the
175 /// constant arguments.
176 DenseMap<BasicBlock *, BasicBlock *> KnownSuccessors;
177
Haicheng Wua4461512017-12-15 14:34:41 +0000178 /// Model the elimination of repeated loads that is expected to happen
179 /// whenever we simplify away the stores that would otherwise cause them to be
180 /// loads.
181 bool EnableLoadElimination;
182 SmallPtrSet<Value *, 16> LoadAddrSet;
183 int LoadEliminationCost;
184
Chandler Carruth0539c072012-03-31 12:42:41 +0000185 // Custom simplification helper routines.
186 bool isAllocaDerivedArg(Value *V);
187 bool lookupSROAArgAndCost(Value *V, Value *&Arg,
188 DenseMap<Value *, int>::iterator &CostIt);
189 void disableSROA(DenseMap<Value *, int>::iterator CostIt);
190 void disableSROA(Value *V);
Haicheng Wu3739e142017-12-14 14:36:18 +0000191 void findDeadBlocks(BasicBlock *CurrBB, BasicBlock *NextBB);
Chandler Carruth0539c072012-03-31 12:42:41 +0000192 void accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
193 int InstructionCost);
Haicheng Wua4461512017-12-15 14:34:41 +0000194 void disableLoadElimination();
Haicheng Wu201b1912017-01-20 18:51:22 +0000195 bool isGEPFree(GetElementPtrInst &GEP);
Evgeny Astigeevichd3558b52017-10-03 12:00:40 +0000196 bool canFoldInboundsGEP(GetElementPtrInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000197 bool accumulateGEPOffset(GEPOperator &GEP, APInt &Offset);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000198 bool simplifyCallSite(Function *F, CallSite CS);
Easwaran Raman617f6362017-02-18 17:22:52 +0000199 template <typename Callable>
200 bool simplifyInstruction(Instruction &I, Callable Evaluate);
Chandler Carruth0539c072012-03-31 12:42:41 +0000201 ConstantInt *stripAndComputeInBoundsConstantOffsets(Value *&V);
202
Philip Reames9b5c9582015-06-26 20:51:17 +0000203 /// Return true if the given argument to the function being considered for
204 /// inlining has the given attribute set either at the call site or the
205 /// function declaration. Primarily used to inspect call site specific
206 /// attributes since these can be more precise than the ones on the callee
Easwaran Raman3676da42015-12-03 19:03:20 +0000207 /// itself.
Philip Reames9b5c9582015-06-26 20:51:17 +0000208 bool paramHasAttr(Argument *A, Attribute::AttrKind Attr);
Chad Rosier567556a2016-04-28 14:47:23 +0000209
Philip Reames9b5c9582015-06-26 20:51:17 +0000210 /// Return true if the given value is known non null within the callee if
Easwaran Raman3676da42015-12-03 19:03:20 +0000211 /// inlined through this particular callsite.
Philip Reames9b5c9582015-06-26 20:51:17 +0000212 bool isKnownNonNullInCallee(Value *V);
213
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000214 /// Update Threshold based on callsite properties such as callee
215 /// attributes and callee hotness for PGO builds. The Callee is explicitly
216 /// passed to support analyzing indirect calls whose target is inferred by
217 /// analysis.
218 void updateThreshold(CallSite CS, Function &Callee);
219
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000220 /// Return true if size growth is allowed when inlining the callee at CS.
221 bool allowSizeGrowth(CallSite CS);
222
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000223 /// Return true if \p CS is a cold callsite.
224 bool isColdCallSite(CallSite CS, BlockFrequencyInfo *CallerBFI);
225
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000226 /// Return a higher threshold if \p CS is a hot callsite.
227 Optional<int> getHotCallSiteThreshold(CallSite CS,
228 BlockFrequencyInfo *CallerBFI);
229
Chandler Carruth0539c072012-03-31 12:42:41 +0000230 // Custom analysis routines.
David Bolvanskyc0aa4b72018-08-05 14:53:08 +0000231 InlineResult analyzeBlock(BasicBlock *BB,
232 SmallPtrSetImpl<const Value *> &EphValues);
Chandler Carruth0539c072012-03-31 12:42:41 +0000233
234 // Disable several entry points to the visitor so we don't accidentally use
235 // them by declaring but not defining them here.
Chad Rosier567556a2016-04-28 14:47:23 +0000236 void visit(Module *);
237 void visit(Module &);
238 void visit(Function *);
239 void visit(Function &);
240 void visit(BasicBlock *);
241 void visit(BasicBlock &);
Chandler Carruth0539c072012-03-31 12:42:41 +0000242
243 // Provide base case for our instruction visit.
244 bool visitInstruction(Instruction &I);
245
246 // Our visit overrides.
247 bool visitAlloca(AllocaInst &I);
248 bool visitPHI(PHINode &I);
249 bool visitGetElementPtr(GetElementPtrInst &I);
250 bool visitBitCast(BitCastInst &I);
251 bool visitPtrToInt(PtrToIntInst &I);
252 bool visitIntToPtr(IntToPtrInst &I);
253 bool visitCastInst(CastInst &I);
254 bool visitUnaryInstruction(UnaryInstruction &I);
Matt Arsenault727aa342013-07-20 04:09:00 +0000255 bool visitCmpInst(CmpInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000256 bool visitSub(BinaryOperator &I);
257 bool visitBinaryOperator(BinaryOperator &I);
258 bool visitLoad(LoadInst &I);
259 bool visitStore(StoreInst &I);
Chandler Carruth753e21d2012-12-28 14:23:32 +0000260 bool visitExtractValue(ExtractValueInst &I);
261 bool visitInsertValue(InsertValueInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000262 bool visitCallSite(CallSite CS);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000263 bool visitReturnInst(ReturnInst &RI);
264 bool visitBranchInst(BranchInst &BI);
Haicheng Wu3ec848b2017-09-27 14:44:56 +0000265 bool visitSelectInst(SelectInst &SI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000266 bool visitSwitchInst(SwitchInst &SI);
267 bool visitIndirectBrInst(IndirectBrInst &IBI);
268 bool visitResumeInst(ResumeInst &RI);
David Majnemer654e1302015-07-31 17:58:14 +0000269 bool visitCleanupReturnInst(CleanupReturnInst &RI);
270 bool visitCatchReturnInst(CatchReturnInst &RI);
Chandler Carruth0814d2a2013-12-13 07:59:56 +0000271 bool visitUnreachableInst(UnreachableInst &I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000272
273public:
Sean Silvaab6a6832016-07-23 04:22:50 +0000274 CallAnalyzer(const TargetTransformInfo &TTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +0000275 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +0000276 Optional<function_ref<BlockFrequencyInfo &(Function &)>> &GetBFI,
Haicheng Wu0812c5b2017-08-21 20:00:09 +0000277 ProfileSummaryInfo *PSI, OptimizationRemarkEmitter *ORE,
278 Function &Callee, CallSite CSArg, const InlineParams &Params)
Easwaran Raman12585b02017-01-20 22:44:04 +0000279 : TTI(TTI), GetAssumptionCache(GetAssumptionCache), GetBFI(GetBFI),
Haicheng Wu0812c5b2017-08-21 20:00:09 +0000280 PSI(PSI), F(Callee), DL(F.getParent()->getDataLayout()), ORE(ORE),
Eric Christopher85be8ca2017-04-15 06:14:50 +0000281 CandidateCS(CSArg), Params(Params), Threshold(Params.DefaultThreshold),
Easwaran Raman4924bb02017-09-13 20:16:02 +0000282 Cost(0), ComputeFullInlineCost(OptComputeFullInlineCost ||
283 Params.ComputeFullInlineCost || ORE),
284 IsCallerRecursive(false), IsRecursiveCall(false),
Eric Christopher85be8ca2017-04-15 06:14:50 +0000285 ExposesReturnsTwice(false), HasDynamicAlloca(false),
286 ContainsNoDuplicateCall(false), HasReturn(false), HasIndirectBr(false),
Sameer AbuAsal77beee42018-09-20 18:39:34 +0000287 HasUninlineableIntrinsic(false), InitsVargArgs(false), AllocatedSize(0),
Vitaly Buka4296ea72018-04-04 21:46:27 +0000288 NumInstructions(0), NumVectorInstructions(0), VectorBonus(0),
289 SingleBBBonus(0), EnableLoadElimination(true), LoadEliminationCost(0),
290 NumConstantArgs(0), NumConstantOffsetPtrArgs(0), NumAllocaArgs(0),
291 NumConstantPtrCmps(0), NumConstantPtrDiffs(0),
292 NumInstructionsSimplified(0), SROACostSavings(0),
293 SROACostSavingsLost(0) {}
Chandler Carruth0539c072012-03-31 12:42:41 +0000294
David Bolvanskyc0aa4b72018-08-05 14:53:08 +0000295 InlineResult analyzeCall(CallSite CS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000296
297 int getThreshold() { return Threshold; }
298 int getCost() { return Cost; }
299
300 // Keep a bunch of stats about the cost savings found so we can print them
301 // out when debugging.
302 unsigned NumConstantArgs;
303 unsigned NumConstantOffsetPtrArgs;
304 unsigned NumAllocaArgs;
305 unsigned NumConstantPtrCmps;
306 unsigned NumConstantPtrDiffs;
307 unsigned NumInstructionsSimplified;
308 unsigned SROACostSavings;
309 unsigned SROACostSavingsLost;
310
311 void dump();
312};
313
314} // namespace
315
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000316/// Test whether the given value is an Alloca-derived function argument.
Chandler Carruth0539c072012-03-31 12:42:41 +0000317bool CallAnalyzer::isAllocaDerivedArg(Value *V) {
318 return SROAArgValues.count(V);
Owen Andersona08318a2010-09-09 16:56:42 +0000319}
320
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000321/// Lookup the SROA-candidate argument and cost iterator which V maps to.
Chandler Carruth0539c072012-03-31 12:42:41 +0000322/// Returns false if V does not map to a SROA-candidate.
323bool CallAnalyzer::lookupSROAArgAndCost(
324 Value *V, Value *&Arg, DenseMap<Value *, int>::iterator &CostIt) {
325 if (SROAArgValues.empty() || SROAArgCosts.empty())
326 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000327
Chandler Carruth0539c072012-03-31 12:42:41 +0000328 DenseMap<Value *, Value *>::iterator ArgIt = SROAArgValues.find(V);
329 if (ArgIt == SROAArgValues.end())
330 return false;
Chandler Carruth783b7192012-03-09 02:49:36 +0000331
Chandler Carruth0539c072012-03-31 12:42:41 +0000332 Arg = ArgIt->second;
333 CostIt = SROAArgCosts.find(Arg);
334 return CostIt != SROAArgCosts.end();
Chandler Carruth783b7192012-03-09 02:49:36 +0000335}
336
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000337/// Disable SROA for the candidate marked by this cost iterator.
Chandler Carruth783b7192012-03-09 02:49:36 +0000338///
Benjamin Kramerbde91762012-06-02 10:20:22 +0000339/// This marks the candidate as no longer viable for SROA, and adds the cost
Chandler Carruth0539c072012-03-31 12:42:41 +0000340/// savings associated with it back into the inline cost measurement.
341void CallAnalyzer::disableSROA(DenseMap<Value *, int>::iterator CostIt) {
342 // If we're no longer able to perform SROA we need to undo its cost savings
343 // and prevent subsequent analysis.
344 Cost += CostIt->second;
345 SROACostSavings -= CostIt->second;
346 SROACostSavingsLost += CostIt->second;
347 SROAArgCosts.erase(CostIt);
Haicheng Wua4461512017-12-15 14:34:41 +0000348 disableLoadElimination();
Chandler Carruth0539c072012-03-31 12:42:41 +0000349}
350
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000351/// If 'V' maps to a SROA candidate, disable SROA for it.
Chandler Carruth0539c072012-03-31 12:42:41 +0000352void CallAnalyzer::disableSROA(Value *V) {
353 Value *SROAArg;
354 DenseMap<Value *, int>::iterator CostIt;
355 if (lookupSROAArgAndCost(V, SROAArg, CostIt))
356 disableSROA(CostIt);
357}
358
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000359/// Accumulate the given cost for a particular SROA candidate.
Chandler Carruth0539c072012-03-31 12:42:41 +0000360void CallAnalyzer::accumulateSROACost(DenseMap<Value *, int>::iterator CostIt,
361 int InstructionCost) {
362 CostIt->second += InstructionCost;
363 SROACostSavings += InstructionCost;
364}
365
Haicheng Wua4461512017-12-15 14:34:41 +0000366void CallAnalyzer::disableLoadElimination() {
367 if (EnableLoadElimination) {
368 Cost += LoadEliminationCost;
Haicheng Wub3689ca2017-12-19 13:42:58 +0000369 LoadEliminationCost = 0;
Haicheng Wua4461512017-12-15 14:34:41 +0000370 EnableLoadElimination = false;
371 }
372}
373
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000374/// Accumulate a constant GEP offset into an APInt if possible.
Chandler Carruth0539c072012-03-31 12:42:41 +0000375///
376/// Returns false if unable to compute the offset for any reason. Respects any
377/// simplified values known during the analysis of this callsite.
378bool CallAnalyzer::accumulateGEPOffset(GEPOperator &GEP, APInt &Offset) {
Elena Demikhovsky945b7e52018-02-14 06:58:08 +0000379 unsigned IntPtrWidth = DL.getIndexTypeSizeInBits(GEP.getType());
Chandler Carruth0539c072012-03-31 12:42:41 +0000380 assert(IntPtrWidth == Offset.getBitWidth());
381
382 for (gep_type_iterator GTI = gep_type_begin(GEP), GTE = gep_type_end(GEP);
383 GTI != GTE; ++GTI) {
384 ConstantInt *OpC = dyn_cast<ConstantInt>(GTI.getOperand());
385 if (!OpC)
386 if (Constant *SimpleOp = SimplifiedValues.lookup(GTI.getOperand()))
387 OpC = dyn_cast<ConstantInt>(SimpleOp);
388 if (!OpC)
Chandler Carruth783b7192012-03-09 02:49:36 +0000389 return false;
Chad Rosier567556a2016-04-28 14:47:23 +0000390 if (OpC->isZero())
391 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000392
Chandler Carruth0539c072012-03-31 12:42:41 +0000393 // Handle a struct index, which adds its field offset to the pointer.
Peter Collingbourneab85225b2016-12-02 02:24:42 +0000394 if (StructType *STy = GTI.getStructTypeOrNull()) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000395 unsigned ElementIdx = OpC->getZExtValue();
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000396 const StructLayout *SL = DL.getStructLayout(STy);
Chandler Carruth0539c072012-03-31 12:42:41 +0000397 Offset += APInt(IntPtrWidth, SL->getElementOffset(ElementIdx));
398 continue;
Chandler Carruth783b7192012-03-09 02:49:36 +0000399 }
Chandler Carruth783b7192012-03-09 02:49:36 +0000400
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000401 APInt TypeSize(IntPtrWidth, DL.getTypeAllocSize(GTI.getIndexedType()));
Chandler Carruth0539c072012-03-31 12:42:41 +0000402 Offset += OpC->getValue().sextOrTrunc(IntPtrWidth) * TypeSize;
403 }
404 return true;
405}
406
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000407/// Use TTI to check whether a GEP is free.
Haicheng Wu201b1912017-01-20 18:51:22 +0000408///
409/// Respects any simplified values known during the analysis of this callsite.
410bool CallAnalyzer::isGEPFree(GetElementPtrInst &GEP) {
Evgeny Astigeevich61c1bd52017-07-27 12:49:27 +0000411 SmallVector<Value *, 4> Operands;
412 Operands.push_back(GEP.getOperand(0));
Haicheng Wu201b1912017-01-20 18:51:22 +0000413 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
414 if (Constant *SimpleOp = SimplifiedValues.lookup(*I))
Evgeny Astigeevich61c1bd52017-07-27 12:49:27 +0000415 Operands.push_back(SimpleOp);
Haicheng Wu201b1912017-01-20 18:51:22 +0000416 else
Evgeny Astigeevich61c1bd52017-07-27 12:49:27 +0000417 Operands.push_back(*I);
418 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&GEP, Operands);
Haicheng Wu201b1912017-01-20 18:51:22 +0000419}
420
Chandler Carruth0539c072012-03-31 12:42:41 +0000421bool CallAnalyzer::visitAlloca(AllocaInst &I) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000422 // Check whether inlining will turn a dynamic alloca into a static
Sanjay Patel0f153422016-05-09 21:51:53 +0000423 // alloca and handle that case.
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000424 if (I.isArrayAllocation()) {
Sanjay Patel0f153422016-05-09 21:51:53 +0000425 Constant *Size = SimplifiedValues.lookup(I.getArraySize());
426 if (auto *AllocSize = dyn_cast_or_null<ConstantInt>(Size)) {
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000427 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000428 AllocatedSize = SaturatingMultiplyAdd(
429 AllocSize->getLimitedValue(), DL.getTypeAllocSize(Ty), AllocatedSize);
Eric Christopherbeb2cd62014-04-07 13:36:21 +0000430 return Base::visitAlloca(I);
431 }
432 }
Chandler Carruth0539c072012-03-31 12:42:41 +0000433
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000434 // Accumulate the allocated size.
435 if (I.isStaticAlloca()) {
436 Type *Ty = I.getAllocatedType();
Easwaran Raman22eb80a2016-06-27 22:31:53 +0000437 AllocatedSize = SaturatingAdd(DL.getTypeAllocSize(Ty), AllocatedSize);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +0000438 }
439
Bob Wilsona5b0dc82012-11-19 07:04:35 +0000440 // We will happily inline static alloca instructions.
441 if (I.isStaticAlloca())
Chandler Carruth0539c072012-03-31 12:42:41 +0000442 return Base::visitAlloca(I);
443
444 // FIXME: This is overly conservative. Dynamic allocas are inefficient for
445 // a variety of reasons, and so we would like to not inline them into
446 // functions which don't currently have a dynamic alloca. This simply
447 // disables inlining altogether in the presence of a dynamic alloca.
448 HasDynamicAlloca = true;
449 return false;
450}
451
452bool CallAnalyzer::visitPHI(PHINode &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000453 // FIXME: We need to propagate SROA *disabling* through phi nodes, even
454 // though we don't want to propagate it's bonuses. The idea is to disable
455 // SROA if it *might* be used in an inappropriate manner.
456
457 // Phi nodes are always zero-cost.
Bjorn Pettersson77f32992018-01-04 18:23:40 +0000458 // FIXME: Pointer sizes may differ between different address spaces, so do we
459 // need to use correct address space in the call to getPointerSizeInBits here?
460 // Or could we skip the getPointerSizeInBits call completely? As far as I can
461 // see the ZeroOffset is used as a dummy value, so we can probably use any
462 // bit width for the ZeroOffset?
463 APInt ZeroOffset = APInt::getNullValue(DL.getPointerSizeInBits(0));
Haicheng Wu3739e142017-12-14 14:36:18 +0000464 bool CheckSROA = I.getType()->isPointerTy();
465
466 // Track the constant or pointer with constant offset we've seen so far.
467 Constant *FirstC = nullptr;
468 std::pair<Value *, APInt> FirstBaseAndOffset = {nullptr, ZeroOffset};
469 Value *FirstV = nullptr;
470
471 for (unsigned i = 0, e = I.getNumIncomingValues(); i != e; ++i) {
472 BasicBlock *Pred = I.getIncomingBlock(i);
473 // If the incoming block is dead, skip the incoming block.
474 if (DeadBlocks.count(Pred))
475 continue;
476 // If the parent block of phi is not the known successor of the incoming
477 // block, skip the incoming block.
478 BasicBlock *KnownSuccessor = KnownSuccessors[Pred];
479 if (KnownSuccessor && KnownSuccessor != I.getParent())
480 continue;
481
482 Value *V = I.getIncomingValue(i);
483 // If the incoming value is this phi itself, skip the incoming value.
484 if (&I == V)
485 continue;
486
487 Constant *C = dyn_cast<Constant>(V);
488 if (!C)
489 C = SimplifiedValues.lookup(V);
490
491 std::pair<Value *, APInt> BaseAndOffset = {nullptr, ZeroOffset};
492 if (!C && CheckSROA)
493 BaseAndOffset = ConstantOffsetPtrs.lookup(V);
494
495 if (!C && !BaseAndOffset.first)
496 // The incoming value is neither a constant nor a pointer with constant
497 // offset, exit early.
498 return true;
499
500 if (FirstC) {
501 if (FirstC == C)
502 // If we've seen a constant incoming value before and it is the same
503 // constant we see this time, continue checking the next incoming value.
504 continue;
505 // Otherwise early exit because we either see a different constant or saw
506 // a constant before but we have a pointer with constant offset this time.
507 return true;
508 }
509
510 if (FirstV) {
511 // The same logic as above, but check pointer with constant offset here.
512 if (FirstBaseAndOffset == BaseAndOffset)
513 continue;
514 return true;
515 }
516
517 if (C) {
518 // This is the 1st time we've seen a constant, record it.
519 FirstC = C;
520 continue;
521 }
522
523 // The remaining case is that this is the 1st time we've seen a pointer with
524 // constant offset, record it.
525 FirstV = V;
526 FirstBaseAndOffset = BaseAndOffset;
527 }
528
529 // Check if we can map phi to a constant.
530 if (FirstC) {
531 SimplifiedValues[&I] = FirstC;
532 return true;
533 }
534
535 // Check if we can map phi to a pointer with constant offset.
536 if (FirstBaseAndOffset.first) {
537 ConstantOffsetPtrs[&I] = FirstBaseAndOffset;
538
539 Value *SROAArg;
540 DenseMap<Value *, int>::iterator CostIt;
541 if (lookupSROAArgAndCost(FirstV, SROAArg, CostIt))
542 SROAArgValues[&I] = SROAArg;
543 }
544
Chandler Carruth0539c072012-03-31 12:42:41 +0000545 return true;
546}
547
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000548/// Check we can fold GEPs of constant-offset call site argument pointers.
Evgeny Astigeevichd3558b52017-10-03 12:00:40 +0000549/// This requires target data and inbounds GEPs.
550///
551/// \return true if the specified GEP can be folded.
552bool CallAnalyzer::canFoldInboundsGEP(GetElementPtrInst &I) {
553 // Check if we have a base + offset for the pointer.
554 std::pair<Value *, APInt> BaseAndOffset =
555 ConstantOffsetPtrs.lookup(I.getPointerOperand());
556 if (!BaseAndOffset.first)
557 return false;
558
559 // Check if the offset of this GEP is constant, and if so accumulate it
560 // into Offset.
561 if (!accumulateGEPOffset(cast<GEPOperator>(I), BaseAndOffset.second))
562 return false;
563
564 // Add the result as a new mapping to Base + Offset.
565 ConstantOffsetPtrs[&I] = BaseAndOffset;
566
567 return true;
568}
569
Chandler Carruth0539c072012-03-31 12:42:41 +0000570bool CallAnalyzer::visitGetElementPtr(GetElementPtrInst &I) {
571 Value *SROAArg;
572 DenseMap<Value *, int>::iterator CostIt;
Chad Rosier567556a2016-04-28 14:47:23 +0000573 bool SROACandidate =
574 lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt);
Chandler Carruth0539c072012-03-31 12:42:41 +0000575
Easwaran Ramana8b9cdc2017-02-25 00:10:22 +0000576 // Lambda to check whether a GEP's indices are all constant.
577 auto IsGEPOffsetConstant = [&](GetElementPtrInst &GEP) {
578 for (User::op_iterator I = GEP.idx_begin(), E = GEP.idx_end(); I != E; ++I)
579 if (!isa<Constant>(*I) && !SimplifiedValues.lookup(*I))
580 return false;
581 return true;
582 };
583
Evgeny Astigeevichd3558b52017-10-03 12:00:40 +0000584 if ((I.isInBounds() && canFoldInboundsGEP(I)) || IsGEPOffsetConstant(I)) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000585 if (SROACandidate)
586 SROAArgValues[&I] = SROAArg;
587
588 // Constant GEPs are modeled as free.
589 return true;
590 }
591
592 // Variable GEPs will require math and will disable SROA.
593 if (SROACandidate)
594 disableSROA(CostIt);
Haicheng Wu201b1912017-01-20 18:51:22 +0000595 return isGEPFree(I);
Chandler Carruth783b7192012-03-09 02:49:36 +0000596}
597
Easwaran Raman617f6362017-02-18 17:22:52 +0000598/// Simplify \p I if its operands are constants and update SimplifiedValues.
599/// \p Evaluate is a callable specific to instruction type that evaluates the
600/// instruction when all the operands are constants.
601template <typename Callable>
602bool CallAnalyzer::simplifyInstruction(Instruction &I, Callable Evaluate) {
603 SmallVector<Constant *, 2> COps;
604 for (Value *Op : I.operands()) {
605 Constant *COp = dyn_cast<Constant>(Op);
606 if (!COp)
607 COp = SimplifiedValues.lookup(Op);
608 if (!COp)
609 return false;
610 COps.push_back(COp);
611 }
612 auto *C = Evaluate(COps);
613 if (!C)
614 return false;
615 SimplifiedValues[&I] = C;
616 return true;
617}
618
Chandler Carruth0539c072012-03-31 12:42:41 +0000619bool CallAnalyzer::visitBitCast(BitCastInst &I) {
620 // Propagate constants through bitcasts.
Easwaran Raman617f6362017-02-18 17:22:52 +0000621 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
622 return ConstantExpr::getBitCast(COps[0], I.getType());
623 }))
624 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000625
Chandler Carruth0539c072012-03-31 12:42:41 +0000626 // Track base/offsets through casts
Chad Rosier567556a2016-04-28 14:47:23 +0000627 std::pair<Value *, APInt> BaseAndOffset =
628 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000629 // Casts don't change the offset, just wrap it up.
630 if (BaseAndOffset.first)
631 ConstantOffsetPtrs[&I] = BaseAndOffset;
632
633 // Also look for SROA candidates here.
634 Value *SROAArg;
635 DenseMap<Value *, int>::iterator CostIt;
636 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
637 SROAArgValues[&I] = SROAArg;
638
639 // Bitcasts are always zero cost.
640 return true;
Owen Andersona08318a2010-09-09 16:56:42 +0000641}
642
Chandler Carruth0539c072012-03-31 12:42:41 +0000643bool CallAnalyzer::visitPtrToInt(PtrToIntInst &I) {
644 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000645 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
646 return ConstantExpr::getPtrToInt(COps[0], I.getType());
647 }))
648 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000649
650 // Track base/offset pairs when converted to a plain integer provided the
651 // integer is large enough to represent the pointer.
652 unsigned IntegerSize = I.getType()->getScalarSizeInBits();
Bjorn Pettersson77f32992018-01-04 18:23:40 +0000653 unsigned AS = I.getOperand(0)->getType()->getPointerAddressSpace();
654 if (IntegerSize >= DL.getPointerSizeInBits(AS)) {
Chad Rosier567556a2016-04-28 14:47:23 +0000655 std::pair<Value *, APInt> BaseAndOffset =
656 ConstantOffsetPtrs.lookup(I.getOperand(0));
Chandler Carruth0539c072012-03-31 12:42:41 +0000657 if (BaseAndOffset.first)
658 ConstantOffsetPtrs[&I] = BaseAndOffset;
659 }
660
661 // This is really weird. Technically, ptrtoint will disable SROA. However,
662 // unless that ptrtoint is *used* somewhere in the live basic blocks after
663 // inlining, it will be nuked, and SROA should proceed. All of the uses which
664 // would block SROA would also block SROA if applied directly to a pointer,
665 // and so we can just add the integer in here. The only places where SROA is
666 // preserved either cannot fire on an integer, or won't in-and-of themselves
667 // disable SROA (ext) w/o some later use that we would see and disable.
668 Value *SROAArg;
669 DenseMap<Value *, int>::iterator CostIt;
670 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt))
671 SROAArgValues[&I] = SROAArg;
672
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000673 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000674}
675
Chandler Carruth0539c072012-03-31 12:42:41 +0000676bool CallAnalyzer::visitIntToPtr(IntToPtrInst &I) {
677 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000678 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
679 return ConstantExpr::getIntToPtr(COps[0], I.getType());
680 }))
681 return true;
Dan Gohman4552e3c2009-10-13 18:30:07 +0000682
Chandler Carruth0539c072012-03-31 12:42:41 +0000683 // Track base/offset pairs when round-tripped through a pointer without
684 // modifications provided the integer is not too large.
685 Value *Op = I.getOperand(0);
686 unsigned IntegerSize = Op->getType()->getScalarSizeInBits();
Bjorn Pettersson77f32992018-01-04 18:23:40 +0000687 if (IntegerSize <= DL.getPointerTypeSizeInBits(I.getType())) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000688 std::pair<Value *, APInt> BaseAndOffset = ConstantOffsetPtrs.lookup(Op);
689 if (BaseAndOffset.first)
690 ConstantOffsetPtrs[&I] = BaseAndOffset;
691 }
Dan Gohman4552e3c2009-10-13 18:30:07 +0000692
Chandler Carruth0539c072012-03-31 12:42:41 +0000693 // "Propagate" SROA here in the same manner as we do for ptrtoint above.
694 Value *SROAArg;
695 DenseMap<Value *, int>::iterator CostIt;
696 if (lookupSROAArgAndCost(Op, SROAArg, CostIt))
697 SROAArgValues[&I] = SROAArg;
Chandler Carruth4d1d34f2012-03-14 23:19:53 +0000698
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000699 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000700}
701
702bool CallAnalyzer::visitCastInst(CastInst &I) {
703 // Propagate constants through ptrtoint.
Easwaran Raman617f6362017-02-18 17:22:52 +0000704 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
705 return ConstantExpr::getCast(I.getOpcode(), COps[0], I.getType());
706 }))
707 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000708
709 // Disable SROA in the face of arbitrary casts we don't whitelist elsewhere.
710 disableSROA(I.getOperand(0));
711
Eli Friedman39ed9a62017-12-22 02:08:08 +0000712 // If this is a floating-point cast, and the target says this operation
713 // is expensive, this may eventually become a library call. Treat the cost
714 // as such.
715 switch (I.getOpcode()) {
716 case Instruction::FPTrunc:
717 case Instruction::FPExt:
718 case Instruction::UIToFP:
719 case Instruction::SIToFP:
720 case Instruction::FPToUI:
721 case Instruction::FPToSI:
722 if (TTI.getFPOpCost(I.getType()) == TargetTransformInfo::TCC_Expensive)
723 Cost += InlineConstants::CallPenalty;
Reid Kleckner4dc0b1a2018-11-01 19:54:45 +0000724 break;
Eli Friedman39ed9a62017-12-22 02:08:08 +0000725 default:
726 break;
727 }
728
Chandler Carruthb8cf5102013-01-21 12:05:16 +0000729 return TargetTransformInfo::TCC_Free == TTI.getUserCost(&I);
Chandler Carruth0539c072012-03-31 12:42:41 +0000730}
731
732bool CallAnalyzer::visitUnaryInstruction(UnaryInstruction &I) {
733 Value *Operand = I.getOperand(0);
Easwaran Raman617f6362017-02-18 17:22:52 +0000734 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
Easwaran Raman617f6362017-02-18 17:22:52 +0000735 return ConstantFoldInstOperands(&I, COps[0], DL);
736 }))
737 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +0000738
739 // Disable any SROA on the argument to arbitrary unary operators.
740 disableSROA(Operand);
741
742 return false;
743}
744
Philip Reames9b5c9582015-06-26 20:51:17 +0000745bool CallAnalyzer::paramHasAttr(Argument *A, Attribute::AttrKind Attr) {
Reid Klecknerfb502d22017-04-14 20:19:02 +0000746 return CandidateCS.paramHasAttr(A->getArgNo(), Attr);
Philip Reames9b5c9582015-06-26 20:51:17 +0000747}
748
749bool CallAnalyzer::isKnownNonNullInCallee(Value *V) {
750 // Does the *call site* have the NonNull attribute set on an argument? We
751 // use the attribute on the call site to memoize any analysis done in the
752 // caller. This will also trip if the callee function has a non-null
753 // parameter attribute, but that's a less interesting case because hopefully
754 // the callee would already have been simplified based on that.
755 if (Argument *A = dyn_cast<Argument>(V))
756 if (paramHasAttr(A, Attribute::NonNull))
757 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000758
Philip Reames9b5c9582015-06-26 20:51:17 +0000759 // Is this an alloca in the caller? This is distinct from the attribute case
760 // above because attributes aren't updated within the inliner itself and we
761 // always want to catch the alloca derived case.
762 if (isAllocaDerivedArg(V))
763 // We can actually predict the result of comparisons between an
764 // alloca-derived value and null. Note that this fires regardless of
765 // SROA firing.
766 return true;
Chad Rosier567556a2016-04-28 14:47:23 +0000767
Philip Reames9b5c9582015-06-26 20:51:17 +0000768 return false;
769}
770
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000771bool CallAnalyzer::allowSizeGrowth(CallSite CS) {
772 // If the normal destination of the invoke or the parent block of the call
773 // site is unreachable-terminated, there is little point in inlining this
774 // unless there is literally zero cost.
775 // FIXME: Note that it is possible that an unreachable-terminated block has a
776 // hot entry. For example, in below scenario inlining hot_call_X() may be
777 // beneficial :
778 // main() {
779 // hot_call_1();
780 // ...
781 // hot_call_N()
782 // exit(0);
783 // }
784 // For now, we are not handling this corner case here as it is rare in real
785 // code. In future, we should elaborate this based on BPI and BFI in more
786 // general threshold adjusting heuristics in updateThreshold().
787 Instruction *Instr = CS.getInstruction();
788 if (InvokeInst *II = dyn_cast<InvokeInst>(Instr)) {
789 if (isa<UnreachableInst>(II->getNormalDest()->getTerminator()))
790 return false;
791 } else if (isa<UnreachableInst>(Instr->getParent()->getTerminator()))
792 return false;
793
794 return true;
795}
796
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000797bool CallAnalyzer::isColdCallSite(CallSite CS, BlockFrequencyInfo *CallerBFI) {
798 // If global profile summary is available, then callsite's coldness is
799 // determined based on that.
Chandler Carruthbba762a2017-08-14 21:25:00 +0000800 if (PSI && PSI->hasProfileSummary())
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000801 return PSI->isColdCallSite(CS, CallerBFI);
Chandler Carruthbba762a2017-08-14 21:25:00 +0000802
803 // Otherwise we need BFI to be available.
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000804 if (!CallerBFI)
805 return false;
806
Chandler Carruthbba762a2017-08-14 21:25:00 +0000807 // Determine if the callsite is cold relative to caller's entry. We could
808 // potentially cache the computation of scaled entry frequency, but the added
809 // complexity is not worth it unless this scaling shows up high in the
810 // profiles.
Easwaran Ramanc5fa6352017-06-27 23:11:18 +0000811 const BranchProbability ColdProb(ColdCallSiteRelFreq, 100);
812 auto CallSiteBB = CS.getInstruction()->getParent();
813 auto CallSiteFreq = CallerBFI->getBlockFreq(CallSiteBB);
814 auto CallerEntryFreq =
815 CallerBFI->getBlockFreq(&(CS.getCaller()->getEntryBlock()));
816 return CallSiteFreq < CallerEntryFreq * ColdProb;
817}
818
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000819Optional<int>
820CallAnalyzer::getHotCallSiteThreshold(CallSite CS,
821 BlockFrequencyInfo *CallerBFI) {
Chandler Carruthbba762a2017-08-14 21:25:00 +0000822
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000823 // If global profile summary is available, then callsite's hotness is
824 // determined based on that.
Chandler Carruthbba762a2017-08-14 21:25:00 +0000825 if (PSI && PSI->hasProfileSummary() && PSI->isHotCallSite(CS, CallerBFI))
826 return Params.HotCallSiteThreshold;
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000827
Chandler Carruthbba762a2017-08-14 21:25:00 +0000828 // Otherwise we need BFI to be available and to have a locally hot callsite
829 // threshold.
830 if (!CallerBFI || !Params.LocallyHotCallSiteThreshold)
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000831 return None;
832
Chandler Carruthbba762a2017-08-14 21:25:00 +0000833 // Determine if the callsite is hot relative to caller's entry. We could
834 // potentially cache the computation of scaled entry frequency, but the added
835 // complexity is not worth it unless this scaling shows up high in the
836 // profiles.
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000837 auto CallSiteBB = CS.getInstruction()->getParent();
838 auto CallSiteFreq = CallerBFI->getBlockFreq(CallSiteBB).getFrequency();
839 auto CallerEntryFreq = CallerBFI->getEntryFreq();
840 if (CallSiteFreq >= CallerEntryFreq * HotCallSiteRelFreq)
Chandler Carruthbba762a2017-08-14 21:25:00 +0000841 return Params.LocallyHotCallSiteThreshold;
842
843 // Otherwise treat it normally.
Easwaran Raman974d4ee2017-08-03 22:23:33 +0000844 return None;
845}
846
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000847void CallAnalyzer::updateThreshold(CallSite CS, Function &Callee) {
Easwaran Raman9a3fc172016-04-08 21:28:02 +0000848 // If no size growth is allowed for this inlining, set Threshold to 0.
849 if (!allowSizeGrowth(CS)) {
850 Threshold = 0;
851 return;
852 }
853
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000854 Function *Caller = CS.getCaller();
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000855
856 // return min(A, B) if B is valid.
857 auto MinIfValid = [](int A, Optional<int> B) {
858 return B ? std::min(A, B.getValue()) : A;
859 };
860
Easwaran Raman0d58fca2016-08-11 03:58:05 +0000861 // return max(A, B) if B is valid.
862 auto MaxIfValid = [](int A, Optional<int> B) {
863 return B ? std::max(A, B.getValue()) : A;
864 };
865
Easwaran Raman51b809b2017-07-28 21:47:36 +0000866 // Various bonus percentages. These are multiplied by Threshold to get the
867 // bonus values.
868 // SingleBBBonus: This bonus is applied if the callee has a single reachable
869 // basic block at the given callsite context. This is speculatively applied
870 // and withdrawn if more than one basic block is seen.
871 //
872 // Vector bonuses: We want to more aggressively inline vector-dense kernels
873 // and apply this bonus based on the percentage of vector instructions. A
874 // bonus is applied if the vector instructions exceed 50% and half that amount
875 // is applied if it exceeds 10%. Note that these bonuses are some what
876 // arbitrary and evolved over time by accident as much as because they are
877 // principled bonuses.
878 // FIXME: It would be nice to base the bonus values on something more
879 // scientific.
880 //
881 // LstCallToStaticBonus: This large bonus is applied to ensure the inlining
882 // of the last call to a static function as inlining such functions is
883 // guaranteed to reduce code size.
884 //
885 // These bonus percentages may be set to 0 based on properties of the caller
886 // and the callsite.
887 int SingleBBBonusPercent = 50;
888 int VectorBonusPercent = 150;
889 int LastCallToStaticBonus = InlineConstants::LastCallToStaticBonus;
890
891 // Lambda to set all the above bonus and bonus percentages to 0.
892 auto DisallowAllBonuses = [&]() {
893 SingleBBBonusPercent = 0;
894 VectorBonusPercent = 0;
895 LastCallToStaticBonus = 0;
896 };
897
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000898 // Use the OptMinSizeThreshold or OptSizeThreshold knob if they are available
899 // and reduce the threshold if the caller has the necessary attribute.
Easwaran Raman51b809b2017-07-28 21:47:36 +0000900 if (Caller->optForMinSize()) {
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000901 Threshold = MinIfValid(Threshold, Params.OptMinSizeThreshold);
Easwaran Raman51b809b2017-07-28 21:47:36 +0000902 // For minsize, we want to disable the single BB bonus and the vector
903 // bonuses, but not the last-call-to-static bonus. Inlining the last call to
904 // a static function will, at the minimum, eliminate the parameter setup and
905 // call/return instructions.
906 SingleBBBonusPercent = 0;
907 VectorBonusPercent = 0;
908 } else if (Caller->optForSize())
Easwaran Raman1c57cc22016-08-10 00:48:04 +0000909 Threshold = MinIfValid(Threshold, Params.OptSizeThreshold);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000910
Easwaran Ramane08b1392017-01-09 21:56:26 +0000911 // Adjust the threshold based on inlinehint attribute and profile based
912 // hotness information if the caller does not have MinSize attribute.
913 if (!Caller->optForMinSize()) {
914 if (Callee.hasFnAttribute(Attribute::InlineHint))
915 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
Chandler Carruthbba762a2017-08-14 21:25:00 +0000916
917 // FIXME: After switching to the new passmanager, simplify the logic below
918 // by checking only the callsite hotness/coldness as we will reliably
919 // have local profile information.
920 //
921 // Callsite hotness and coldness can be determined if sample profile is
922 // used (which adds hotness metadata to calls) or if caller's
923 // BlockFrequencyInfo is available.
924 BlockFrequencyInfo *CallerBFI = GetBFI ? &((*GetBFI)(*Caller)) : nullptr;
925 auto HotCallSiteThreshold = getHotCallSiteThreshold(CS, CallerBFI);
926 if (!Caller->optForSize() && HotCallSiteThreshold) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000927 LLVM_DEBUG(dbgs() << "Hot callsite.\n");
Chandler Carruthbba762a2017-08-14 21:25:00 +0000928 // FIXME: This should update the threshold only if it exceeds the
929 // current threshold, but AutoFDO + ThinLTO currently relies on this
930 // behavior to prevent inlining of hot callsites during ThinLTO
931 // compile phase.
932 Threshold = HotCallSiteThreshold.getValue();
933 } else if (isColdCallSite(CS, CallerBFI)) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000934 LLVM_DEBUG(dbgs() << "Cold callsite.\n");
Chandler Carruthbba762a2017-08-14 21:25:00 +0000935 // Do not apply bonuses for a cold callsite including the
936 // LastCallToStatic bonus. While this bonus might result in code size
937 // reduction, it can cause the size of a non-cold caller to increase
938 // preventing it from being inlined.
939 DisallowAllBonuses();
940 Threshold = MinIfValid(Threshold, Params.ColdCallSiteThreshold);
941 } else if (PSI) {
942 // Use callee's global profile information only if we have no way of
943 // determining this via callsite information.
944 if (PSI->isFunctionEntryHot(&Callee)) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000945 LLVM_DEBUG(dbgs() << "Hot callee.\n");
Chandler Carruthbba762a2017-08-14 21:25:00 +0000946 // If callsite hotness can not be determined, we may still know
947 // that the callee is hot and treat it as a weaker hint for threshold
948 // increase.
949 Threshold = MaxIfValid(Threshold, Params.HintThreshold);
950 } else if (PSI->isFunctionEntryCold(&Callee)) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000951 LLVM_DEBUG(dbgs() << "Cold callee.\n");
Chandler Carruthbba762a2017-08-14 21:25:00 +0000952 // Do not apply bonuses for a cold callee including the
953 // LastCallToStatic bonus. While this bonus might result in code size
954 // reduction, it can cause the size of a non-cold caller to increase
955 // preventing it from being inlined.
956 DisallowAllBonuses();
957 Threshold = MinIfValid(Threshold, Params.ColdThreshold);
Easwaran Ramane08b1392017-01-09 21:56:26 +0000958 }
959 }
Dehao Chene1c7c572016-08-05 20:49:04 +0000960 }
Dehao Chen9232f982016-07-11 16:48:54 +0000961
Justin Lebar8650a4d2016-04-15 01:38:48 +0000962 // Finally, take the target-specific inlining threshold multiplier into
963 // account.
964 Threshold *= TTI.getInliningThresholdMultiplier();
Easwaran Raman51b809b2017-07-28 21:47:36 +0000965
966 SingleBBBonus = Threshold * SingleBBBonusPercent / 100;
967 VectorBonus = Threshold * VectorBonusPercent / 100;
968
969 bool OnlyOneCallAndLocalLinkage =
970 F.hasLocalLinkage() && F.hasOneUse() && &F == CS.getCalledFunction();
971 // If there is only one call of the function, and it has internal linkage,
972 // the cost of inlining it drops dramatically. It may seem odd to update
973 // Cost in updateThreshold, but the bonus depends on the logic in this method.
974 if (OnlyOneCallAndLocalLinkage)
975 Cost -= LastCallToStaticBonus;
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +0000976}
977
Matt Arsenault727aa342013-07-20 04:09:00 +0000978bool CallAnalyzer::visitCmpInst(CmpInst &I) {
Chandler Carruth0539c072012-03-31 12:42:41 +0000979 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
980 // First try to handle simplified comparisons.
Easwaran Raman617f6362017-02-18 17:22:52 +0000981 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
982 return ConstantExpr::getCompare(I.getPredicate(), COps[0], COps[1]);
983 }))
984 return true;
Matt Arsenault727aa342013-07-20 04:09:00 +0000985
986 if (I.getOpcode() == Instruction::FCmp)
987 return false;
Chandler Carruth0539c072012-03-31 12:42:41 +0000988
989 // Otherwise look for a comparison between constant offset pointers with
990 // a common base.
991 Value *LHSBase, *RHSBase;
992 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000993 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000994 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +0000995 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +0000996 if (RHSBase && LHSBase == RHSBase) {
997 // We have common bases, fold the icmp to a constant based on the
998 // offsets.
999 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
1000 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
1001 if (Constant *C = ConstantExpr::getICmp(I.getPredicate(), CLHS, CRHS)) {
1002 SimplifiedValues[&I] = C;
1003 ++NumConstantPtrCmps;
1004 return true;
1005 }
1006 }
1007 }
1008
1009 // If the comparison is an equality comparison with null, we can simplify it
Philip Reames9b5c9582015-06-26 20:51:17 +00001010 // if we know the value (argument) can't be null
1011 if (I.isEquality() && isa<ConstantPointerNull>(I.getOperand(1)) &&
1012 isKnownNonNullInCallee(I.getOperand(0))) {
1013 bool IsNotEqual = I.getPredicate() == CmpInst::ICMP_NE;
1014 SimplifiedValues[&I] = IsNotEqual ? ConstantInt::getTrue(I.getType())
1015 : ConstantInt::getFalse(I.getType());
1016 return true;
1017 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001018 // Finally check for SROA candidates in comparisons.
1019 Value *SROAArg;
1020 DenseMap<Value *, int>::iterator CostIt;
1021 if (lookupSROAArgAndCost(I.getOperand(0), SROAArg, CostIt)) {
1022 if (isa<ConstantPointerNull>(I.getOperand(1))) {
1023 accumulateSROACost(CostIt, InlineConstants::InstrCost);
1024 return true;
1025 }
1026
1027 disableSROA(CostIt);
1028 }
1029
1030 return false;
1031}
1032
1033bool CallAnalyzer::visitSub(BinaryOperator &I) {
1034 // Try to handle a special case: we can fold computing the difference of two
1035 // constant-related pointers.
1036 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
1037 Value *LHSBase, *RHSBase;
1038 APInt LHSOffset, RHSOffset;
Benjamin Kramerd6f1f842014-03-02 13:30:33 +00001039 std::tie(LHSBase, LHSOffset) = ConstantOffsetPtrs.lookup(LHS);
Chandler Carruth0539c072012-03-31 12:42:41 +00001040 if (LHSBase) {
Benjamin Kramerd6f1f842014-03-02 13:30:33 +00001041 std::tie(RHSBase, RHSOffset) = ConstantOffsetPtrs.lookup(RHS);
Chandler Carruth0539c072012-03-31 12:42:41 +00001042 if (RHSBase && LHSBase == RHSBase) {
1043 // We have common bases, fold the subtract to a constant based on the
1044 // offsets.
1045 Constant *CLHS = ConstantInt::get(LHS->getContext(), LHSOffset);
1046 Constant *CRHS = ConstantInt::get(RHS->getContext(), RHSOffset);
1047 if (Constant *C = ConstantExpr::getSub(CLHS, CRHS)) {
1048 SimplifiedValues[&I] = C;
1049 ++NumConstantPtrDiffs;
1050 return true;
1051 }
1052 }
1053 }
1054
1055 // Otherwise, fall back to the generic logic for simplifying and handling
1056 // instructions.
1057 return Base::visitSub(I);
1058}
1059
1060bool CallAnalyzer::visitBinaryOperator(BinaryOperator &I) {
1061 Value *LHS = I.getOperand(0), *RHS = I.getOperand(1);
Haicheng Wu6d14dfe2017-12-22 17:09:09 +00001062 Constant *CLHS = dyn_cast<Constant>(LHS);
1063 if (!CLHS)
1064 CLHS = SimplifiedValues.lookup(LHS);
1065 Constant *CRHS = dyn_cast<Constant>(RHS);
1066 if (!CRHS)
1067 CRHS = SimplifiedValues.lookup(RHS);
Michael Zolotukhin4e8598e2015-02-06 20:02:51 +00001068
Haicheng Wu6d14dfe2017-12-22 17:09:09 +00001069 Value *SimpleV = nullptr;
1070 if (auto FI = dyn_cast<FPMathOperator>(&I))
1071 SimpleV = SimplifyFPBinOp(I.getOpcode(), CLHS ? CLHS : LHS,
1072 CRHS ? CRHS : RHS, FI->getFastMathFlags(), DL);
1073 else
1074 SimpleV =
1075 SimplifyBinOp(I.getOpcode(), CLHS ? CLHS : LHS, CRHS ? CRHS : RHS, DL);
1076
1077 if (Constant *C = dyn_cast_or_null<Constant>(SimpleV))
1078 SimplifiedValues[&I] = C;
1079
1080 if (SimpleV)
Chandler Carruth0539c072012-03-31 12:42:41 +00001081 return true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001082
1083 // Disable any SROA on arguments to arbitrary, unsimplified binary operators.
1084 disableSROA(LHS);
1085 disableSROA(RHS);
1086
Eli Friedman39ed9a62017-12-22 02:08:08 +00001087 // If the instruction is floating point, and the target says this operation
1088 // is expensive, this may eventually become a library call. Treat the cost
1089 // as such.
1090 if (I.getType()->isFloatingPointTy() &&
1091 TTI.getFPOpCost(I.getType()) == TargetTransformInfo::TCC_Expensive)
1092 Cost += InlineConstants::CallPenalty;
1093
Chandler Carruth0539c072012-03-31 12:42:41 +00001094 return false;
1095}
1096
1097bool CallAnalyzer::visitLoad(LoadInst &I) {
1098 Value *SROAArg;
1099 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +00001100 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001101 if (I.isSimple()) {
1102 accumulateSROACost(CostIt, InlineConstants::InstrCost);
1103 return true;
1104 }
1105
1106 disableSROA(CostIt);
1107 }
1108
Haicheng Wua4461512017-12-15 14:34:41 +00001109 // If the data is already loaded from this address and hasn't been clobbered
1110 // by any stores or calls, this load is likely to be redundant and can be
1111 // eliminated.
1112 if (EnableLoadElimination &&
Haicheng Wub3689ca2017-12-19 13:42:58 +00001113 !LoadAddrSet.insert(I.getPointerOperand()).second && I.isUnordered()) {
Haicheng Wua4461512017-12-15 14:34:41 +00001114 LoadEliminationCost += InlineConstants::InstrCost;
1115 return true;
1116 }
1117
Chandler Carruth0539c072012-03-31 12:42:41 +00001118 return false;
1119}
1120
1121bool CallAnalyzer::visitStore(StoreInst &I) {
1122 Value *SROAArg;
1123 DenseMap<Value *, int>::iterator CostIt;
Wei Mi6c428d62015-03-20 18:33:12 +00001124 if (lookupSROAArgAndCost(I.getPointerOperand(), SROAArg, CostIt)) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001125 if (I.isSimple()) {
1126 accumulateSROACost(CostIt, InlineConstants::InstrCost);
1127 return true;
1128 }
1129
1130 disableSROA(CostIt);
1131 }
1132
Haicheng Wua4461512017-12-15 14:34:41 +00001133 // The store can potentially clobber loads and prevent repeated loads from
1134 // being eliminated.
1135 // FIXME:
1136 // 1. We can probably keep an initial set of eliminatable loads substracted
1137 // from the cost even when we finally see a store. We just need to disable
1138 // *further* accumulation of elimination savings.
1139 // 2. We should probably at some point thread MemorySSA for the callee into
1140 // this and then use that to actually compute *really* precise savings.
1141 disableLoadElimination();
Chandler Carruth0539c072012-03-31 12:42:41 +00001142 return false;
1143}
1144
Chandler Carruth753e21d2012-12-28 14:23:32 +00001145bool CallAnalyzer::visitExtractValue(ExtractValueInst &I) {
1146 // Constant folding for extract value is trivial.
Easwaran Raman617f6362017-02-18 17:22:52 +00001147 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
1148 return ConstantExpr::getExtractValue(COps[0], I.getIndices());
1149 }))
Chandler Carruth753e21d2012-12-28 14:23:32 +00001150 return true;
Chandler Carruth753e21d2012-12-28 14:23:32 +00001151
1152 // SROA can look through these but give them a cost.
1153 return false;
1154}
1155
1156bool CallAnalyzer::visitInsertValue(InsertValueInst &I) {
1157 // Constant folding for insert value is trivial.
Easwaran Raman617f6362017-02-18 17:22:52 +00001158 if (simplifyInstruction(I, [&](SmallVectorImpl<Constant *> &COps) {
1159 return ConstantExpr::getInsertValue(/*AggregateOperand*/ COps[0],
1160 /*InsertedValueOperand*/ COps[1],
1161 I.getIndices());
1162 }))
Chandler Carruth753e21d2012-12-28 14:23:32 +00001163 return true;
Chandler Carruth753e21d2012-12-28 14:23:32 +00001164
1165 // SROA can look through these but give them a cost.
1166 return false;
1167}
1168
Adrian Prantl5f8f34e42018-05-01 15:54:18 +00001169/// Try to simplify a call site.
Chandler Carruth753e21d2012-12-28 14:23:32 +00001170///
1171/// Takes a concrete function and callsite and tries to actually simplify it by
1172/// analyzing the arguments and call itself with instsimplify. Returns true if
1173/// it has simplified the callsite to some other entity (a constant), making it
1174/// free.
1175bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
1176 // FIXME: Using the instsimplify logic directly for this is inefficient
1177 // because we have to continually rebuild the argument list even when no
1178 // simplifications can be performed. Until that is fixed with remapping
1179 // inside of instsimplify, directly constant fold calls here.
Chandler Carruth751d95f2019-02-11 07:51:44 +00001180 if (!canConstantFoldCallTo(cast<CallBase>(CS.getInstruction()), F))
Chandler Carruth753e21d2012-12-28 14:23:32 +00001181 return false;
1182
1183 // Try to re-map the arguments to constants.
1184 SmallVector<Constant *, 4> ConstantArgs;
1185 ConstantArgs.reserve(CS.arg_size());
Chad Rosier567556a2016-04-28 14:47:23 +00001186 for (CallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end(); I != E;
1187 ++I) {
Chandler Carruth753e21d2012-12-28 14:23:32 +00001188 Constant *C = dyn_cast<Constant>(*I);
1189 if (!C)
1190 C = dyn_cast_or_null<Constant>(SimplifiedValues.lookup(*I));
1191 if (!C)
1192 return false; // This argument doesn't map to a constant.
1193
1194 ConstantArgs.push_back(C);
1195 }
Chandler Carruth751d95f2019-02-11 07:51:44 +00001196 if (Constant *C = ConstantFoldCall(cast<CallBase>(CS.getInstruction()), F,
1197 ConstantArgs)) {
Chandler Carruth753e21d2012-12-28 14:23:32 +00001198 SimplifiedValues[CS.getInstruction()] = C;
1199 return true;
1200 }
1201
1202 return false;
1203}
1204
Chandler Carruth0539c072012-03-31 12:42:41 +00001205bool CallAnalyzer::visitCallSite(CallSite CS) {
Chandler Carruth37d25de2013-12-13 08:00:01 +00001206 if (CS.hasFnAttr(Attribute::ReturnsTwice) &&
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00001207 !F.hasFnAttribute(Attribute::ReturnsTwice)) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001208 // This aborts the entire analysis.
1209 ExposesReturnsTwice = true;
1210 return false;
1211 }
Chad Rosier567556a2016-04-28 14:47:23 +00001212 if (CS.isCall() && cast<CallInst>(CS.getInstruction())->cannotDuplicate())
James Molloy4f6fb952012-12-20 16:04:27 +00001213 ContainsNoDuplicateCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001214
Chandler Carruth0539c072012-03-31 12:42:41 +00001215 if (Function *F = CS.getCalledFunction()) {
Chandler Carruth753e21d2012-12-28 14:23:32 +00001216 // When we have a concrete function, first try to simplify it directly.
1217 if (simplifyCallSite(F, CS))
1218 return true;
1219
1220 // Next check if it is an intrinsic we know about.
1221 // FIXME: Lift this into part of the InstVisitor.
1222 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
1223 switch (II->getIntrinsicID()) {
1224 default:
Haicheng Wua4461512017-12-15 14:34:41 +00001225 if (!CS.onlyReadsMemory() && !isAssumeLikeIntrinsic(II))
1226 disableLoadElimination();
Chandler Carruth753e21d2012-12-28 14:23:32 +00001227 return Base::visitCallSite(CS);
1228
Peter Collingbourne7dd8dbf2016-04-22 21:18:02 +00001229 case Intrinsic::load_relative:
1230 // This is normally lowered to 4 LLVM instructions.
1231 Cost += 3 * InlineConstants::InstrCost;
1232 return false;
1233
Chandler Carruth753e21d2012-12-28 14:23:32 +00001234 case Intrinsic::memset:
1235 case Intrinsic::memcpy:
1236 case Intrinsic::memmove:
Haicheng Wua4461512017-12-15 14:34:41 +00001237 disableLoadElimination();
Chandler Carruth753e21d2012-12-28 14:23:32 +00001238 // SROA can usually chew through these intrinsics, but they aren't free.
1239 return false;
Vitaly Buka4296ea72018-04-04 21:46:27 +00001240 case Intrinsic::icall_branch_funnel:
Reid Kleckner60381792015-07-07 22:25:32 +00001241 case Intrinsic::localescape:
Vitaly Buka4296ea72018-04-04 21:46:27 +00001242 HasUninlineableIntrinsic = true;
Reid Kleckner223de262015-04-14 20:38:14 +00001243 return false;
Florian Hahn80788d82018-01-06 19:45:40 +00001244 case Intrinsic::vastart:
Sameer AbuAsal77beee42018-09-20 18:39:34 +00001245 InitsVargArgs = true;
Florian Hahn80788d82018-01-06 19:45:40 +00001246 return false;
Chandler Carruth753e21d2012-12-28 14:23:32 +00001247 }
1248 }
1249
Davide Italiano9d939c82017-11-30 22:10:35 +00001250 if (F == CS.getInstruction()->getFunction()) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001251 // This flag will fully abort the analysis, so don't bother with anything
1252 // else.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001253 IsRecursiveCall = true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001254 return false;
1255 }
1256
Chandler Carruth0ba8db42013-01-22 11:26:02 +00001257 if (TTI.isLoweredToCall(F)) {
Chandler Carruth0539c072012-03-31 12:42:41 +00001258 // We account for the average 1 instruction per call argument setup
1259 // here.
1260 Cost += CS.arg_size() * InlineConstants::InstrCost;
1261
1262 // Everything other than inline ASM will also have a significant cost
1263 // merely from making the call.
1264 if (!isa<InlineAsm>(CS.getCalledValue()))
1265 Cost += InlineConstants::CallPenalty;
1266 }
1267
Haicheng Wua4461512017-12-15 14:34:41 +00001268 if (!CS.onlyReadsMemory())
1269 disableLoadElimination();
Chandler Carruth0539c072012-03-31 12:42:41 +00001270 return Base::visitCallSite(CS);
1271 }
1272
1273 // Otherwise we're in a very special case -- an indirect function call. See
1274 // if we can be particularly clever about this.
1275 Value *Callee = CS.getCalledValue();
1276
1277 // First, pay the price of the argument setup. We account for the average
1278 // 1 instruction per call argument setup here.
1279 Cost += CS.arg_size() * InlineConstants::InstrCost;
1280
1281 // Next, check if this happens to be an indirect function call to a known
1282 // function in this inline context. If not, we've done all we can.
1283 Function *F = dyn_cast_or_null<Function>(SimplifiedValues.lookup(Callee));
Haicheng Wua4461512017-12-15 14:34:41 +00001284 if (!F) {
1285 if (!CS.onlyReadsMemory())
1286 disableLoadElimination();
Chandler Carruth0539c072012-03-31 12:42:41 +00001287 return Base::visitCallSite(CS);
Haicheng Wua4461512017-12-15 14:34:41 +00001288 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001289
1290 // If we have a constant that we are calling as a function, we can peer
1291 // through it and see the function target. This happens not infrequently
1292 // during devirtualization and so we want to give it a hefty bonus for
1293 // inlining, but cap that bonus in the event that inlining wouldn't pan
1294 // out. Pretend to inline the function, with a custom threshold.
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001295 auto IndirectCallParams = Params;
1296 IndirectCallParams.DefaultThreshold = InlineConstants::IndirectCallThreshold;
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001297 CallAnalyzer CA(TTI, GetAssumptionCache, GetBFI, PSI, ORE, *F, CS,
Easwaran Raman12585b02017-01-20 22:44:04 +00001298 IndirectCallParams);
Chandler Carruth0539c072012-03-31 12:42:41 +00001299 if (CA.analyzeCall(CS)) {
1300 // We were able to inline the indirect call! Subtract the cost from the
Easwaran Raman6d90d9f2015-12-07 21:21:20 +00001301 // threshold to get the bonus we want to apply, but don't go below zero.
1302 Cost -= std::max(0, CA.getThreshold() - CA.getCost());
Chandler Carruth0539c072012-03-31 12:42:41 +00001303 }
1304
Haicheng Wua4461512017-12-15 14:34:41 +00001305 if (!F->onlyReadsMemory())
1306 disableLoadElimination();
Chandler Carruth0539c072012-03-31 12:42:41 +00001307 return Base::visitCallSite(CS);
1308}
1309
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001310bool CallAnalyzer::visitReturnInst(ReturnInst &RI) {
1311 // At least one return instruction will be free after inlining.
1312 bool Free = !HasReturn;
1313 HasReturn = true;
1314 return Free;
1315}
1316
1317bool CallAnalyzer::visitBranchInst(BranchInst &BI) {
1318 // We model unconditional branches as essentially free -- they really
1319 // shouldn't exist at all, but handling them makes the behavior of the
1320 // inliner more regular and predictable. Interestingly, conditional branches
1321 // which will fold away are also free.
1322 return BI.isUnconditional() || isa<ConstantInt>(BI.getCondition()) ||
1323 dyn_cast_or_null<ConstantInt>(
1324 SimplifiedValues.lookup(BI.getCondition()));
1325}
1326
Haicheng Wu3ec848b2017-09-27 14:44:56 +00001327bool CallAnalyzer::visitSelectInst(SelectInst &SI) {
1328 bool CheckSROA = SI.getType()->isPointerTy();
1329 Value *TrueVal = SI.getTrueValue();
1330 Value *FalseVal = SI.getFalseValue();
1331
1332 Constant *TrueC = dyn_cast<Constant>(TrueVal);
1333 if (!TrueC)
1334 TrueC = SimplifiedValues.lookup(TrueVal);
1335 Constant *FalseC = dyn_cast<Constant>(FalseVal);
1336 if (!FalseC)
1337 FalseC = SimplifiedValues.lookup(FalseVal);
1338 Constant *CondC =
1339 dyn_cast_or_null<Constant>(SimplifiedValues.lookup(SI.getCondition()));
1340
1341 if (!CondC) {
1342 // Select C, X, X => X
1343 if (TrueC == FalseC && TrueC) {
1344 SimplifiedValues[&SI] = TrueC;
1345 return true;
1346 }
1347
1348 if (!CheckSROA)
1349 return Base::visitSelectInst(SI);
1350
1351 std::pair<Value *, APInt> TrueBaseAndOffset =
1352 ConstantOffsetPtrs.lookup(TrueVal);
1353 std::pair<Value *, APInt> FalseBaseAndOffset =
1354 ConstantOffsetPtrs.lookup(FalseVal);
1355 if (TrueBaseAndOffset == FalseBaseAndOffset && TrueBaseAndOffset.first) {
1356 ConstantOffsetPtrs[&SI] = TrueBaseAndOffset;
1357
1358 Value *SROAArg;
1359 DenseMap<Value *, int>::iterator CostIt;
1360 if (lookupSROAArgAndCost(TrueVal, SROAArg, CostIt))
1361 SROAArgValues[&SI] = SROAArg;
1362 return true;
1363 }
1364
1365 return Base::visitSelectInst(SI);
1366 }
1367
1368 // Select condition is a constant.
1369 Value *SelectedV = CondC->isAllOnesValue()
1370 ? TrueVal
1371 : (CondC->isNullValue()) ? FalseVal : nullptr;
1372 if (!SelectedV) {
1373 // Condition is a vector constant that is not all 1s or all 0s. If all
1374 // operands are constants, ConstantExpr::getSelect() can handle the cases
1375 // such as select vectors.
1376 if (TrueC && FalseC) {
1377 if (auto *C = ConstantExpr::getSelect(CondC, TrueC, FalseC)) {
1378 SimplifiedValues[&SI] = C;
1379 return true;
1380 }
1381 }
1382 return Base::visitSelectInst(SI);
1383 }
1384
1385 // Condition is either all 1s or all 0s. SI can be simplified.
1386 if (Constant *SelectedC = dyn_cast<Constant>(SelectedV)) {
1387 SimplifiedValues[&SI] = SelectedC;
1388 return true;
1389 }
1390
1391 if (!CheckSROA)
1392 return true;
1393
1394 std::pair<Value *, APInt> BaseAndOffset =
1395 ConstantOffsetPtrs.lookup(SelectedV);
1396 if (BaseAndOffset.first) {
1397 ConstantOffsetPtrs[&SI] = BaseAndOffset;
1398
1399 Value *SROAArg;
1400 DenseMap<Value *, int>::iterator CostIt;
1401 if (lookupSROAArgAndCost(SelectedV, SROAArg, CostIt))
1402 SROAArgValues[&SI] = SROAArg;
1403 }
1404
1405 return true;
1406}
1407
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001408bool CallAnalyzer::visitSwitchInst(SwitchInst &SI) {
1409 // We model unconditional switches as free, see the comments on handling
1410 // branches.
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001411 if (isa<ConstantInt>(SI.getCondition()))
1412 return true;
1413 if (Value *V = SimplifiedValues.lookup(SI.getCondition()))
1414 if (isa<ConstantInt>(V))
1415 return true;
1416
Eric Christopher7ad02ee2017-06-28 21:10:31 +00001417 // Assume the most general case where the switch is lowered into
Jun Bum Lim2960d412017-06-02 20:42:54 +00001418 // either a jump table, bit test, or a balanced binary tree consisting of
1419 // case clusters without merging adjacent clusters with the same
1420 // destination. We do not consider the switches that are lowered with a mix
1421 // of jump table/bit test/binary search tree. The cost of the switch is
1422 // proportional to the size of the tree or the size of jump table range.
1423 //
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001424 // NB: We convert large switches which are just used to initialize large phi
1425 // nodes to lookup tables instead in simplify-cfg, so this shouldn't prevent
1426 // inlining those. It will prevent inlining in cases where the optimization
1427 // does not (yet) fire.
Jun Bum Lim2960d412017-06-02 20:42:54 +00001428
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001429 // Maximum valid cost increased in this function.
1430 int CostUpperBound = INT_MAX - InlineConstants::InstrCost - 1;
1431
Jun Bum Lim2960d412017-06-02 20:42:54 +00001432 // Exit early for a large switch, assuming one case needs at least one
1433 // instruction.
1434 // FIXME: This is not true for a bit test, but ignore such case for now to
1435 // save compile-time.
1436 int64_t CostLowerBound =
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001437 std::min((int64_t)CostUpperBound,
Jun Bum Lim2960d412017-06-02 20:42:54 +00001438 (int64_t)SI.getNumCases() * InlineConstants::InstrCost + Cost);
1439
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001440 if (CostLowerBound > Threshold && !ComputeFullInlineCost) {
Jun Bum Lim2960d412017-06-02 20:42:54 +00001441 Cost = CostLowerBound;
1442 return false;
1443 }
1444
1445 unsigned JumpTableSize = 0;
1446 unsigned NumCaseCluster =
1447 TTI.getEstimatedNumberOfCaseClusters(SI, JumpTableSize);
1448
1449 // If suitable for a jump table, consider the cost for the table size and
1450 // branch to destination.
1451 if (JumpTableSize) {
1452 int64_t JTCost = (int64_t)JumpTableSize * InlineConstants::InstrCost +
1453 4 * InlineConstants::InstrCost;
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001454
1455 Cost = std::min((int64_t)CostUpperBound, JTCost + Cost);
Jun Bum Lim2960d412017-06-02 20:42:54 +00001456 return false;
1457 }
1458
1459 // Considering forming a binary search, we should find the number of nodes
1460 // which is same as the number of comparisons when lowered. For a given
1461 // number of clusters, n, we can define a recursive function, f(n), to find
1462 // the number of nodes in the tree. The recursion is :
1463 // f(n) = 1 + f(n/2) + f (n - n/2), when n > 3,
1464 // and f(n) = n, when n <= 3.
1465 // This will lead a binary tree where the leaf should be either f(2) or f(3)
1466 // when n > 3. So, the number of comparisons from leaves should be n, while
1467 // the number of non-leaf should be :
1468 // 2^(log2(n) - 1) - 1
1469 // = 2^log2(n) * 2^-1 - 1
1470 // = n / 2 - 1.
1471 // Considering comparisons from leaf and non-leaf nodes, we can estimate the
1472 // number of comparisons in a simple closed form :
1473 // n + n / 2 - 1 = n * 3 / 2 - 1
1474 if (NumCaseCluster <= 3) {
1475 // Suppose a comparison includes one compare and one conditional branch.
1476 Cost += NumCaseCluster * 2 * InlineConstants::InstrCost;
1477 return false;
1478 }
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001479
1480 int64_t ExpectedNumberOfCompare = 3 * (int64_t)NumCaseCluster / 2 - 1;
1481 int64_t SwitchCost =
Jun Bum Lim2960d412017-06-02 20:42:54 +00001482 ExpectedNumberOfCompare * 2 * InlineConstants::InstrCost;
Jun Bum Lim506cfb72017-06-23 16:12:37 +00001483
1484 Cost = std::min((int64_t)CostUpperBound, SwitchCost + Cost);
Chandler Carruthe01fd5f2014-04-28 08:52:44 +00001485 return false;
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001486}
1487
1488bool CallAnalyzer::visitIndirectBrInst(IndirectBrInst &IBI) {
1489 // We never want to inline functions that contain an indirectbr. This is
1490 // incorrect because all the blockaddress's (in static global initializers
1491 // for example) would be referring to the original function, and this
1492 // indirect jump would jump from the inlined copy of the function into the
1493 // original function which is extremely undefined behavior.
1494 // FIXME: This logic isn't really right; we can safely inline functions with
1495 // indirectbr's as long as no other function or global references the
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001496 // blockaddress of a block within the current function.
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001497 HasIndirectBr = true;
1498 return false;
1499}
1500
1501bool CallAnalyzer::visitResumeInst(ResumeInst &RI) {
1502 // FIXME: It's not clear that a single instruction is an accurate model for
1503 // the inline cost of a resume instruction.
1504 return false;
1505}
1506
David Majnemer654e1302015-07-31 17:58:14 +00001507bool CallAnalyzer::visitCleanupReturnInst(CleanupReturnInst &CRI) {
1508 // FIXME: It's not clear that a single instruction is an accurate model for
1509 // the inline cost of a cleanupret instruction.
1510 return false;
1511}
1512
1513bool CallAnalyzer::visitCatchReturnInst(CatchReturnInst &CRI) {
1514 // FIXME: It's not clear that a single instruction is an accurate model for
Joseph Tremoulet8220bcc2015-08-23 00:26:33 +00001515 // the inline cost of a catchret instruction.
David Majnemer654e1302015-07-31 17:58:14 +00001516 return false;
1517}
1518
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001519bool CallAnalyzer::visitUnreachableInst(UnreachableInst &I) {
1520 // FIXME: It might be reasonably to discount the cost of instructions leading
1521 // to unreachable as they have the lowest possible impact on both runtime and
1522 // code size.
1523 return true; // No actual code is needed for unreachable.
1524}
1525
Chandler Carruth0539c072012-03-31 12:42:41 +00001526bool CallAnalyzer::visitInstruction(Instruction &I) {
Chandler Carruthda7513a2012-05-04 00:58:03 +00001527 // Some instructions are free. All of the free intrinsics can also be
1528 // handled by SROA, etc.
Chandler Carruthb8cf5102013-01-21 12:05:16 +00001529 if (TargetTransformInfo::TCC_Free == TTI.getUserCost(&I))
Chandler Carruthda7513a2012-05-04 00:58:03 +00001530 return true;
1531
Chandler Carruth0539c072012-03-31 12:42:41 +00001532 // We found something we don't understand or can't handle. Mark any SROA-able
1533 // values in the operand list as no longer viable.
1534 for (User::op_iterator OI = I.op_begin(), OE = I.op_end(); OI != OE; ++OI)
1535 disableSROA(*OI);
1536
1537 return false;
1538}
1539
Adrian Prantl5f8f34e42018-05-01 15:54:18 +00001540/// Analyze a basic block for its contribution to the inline cost.
Chandler Carruth0539c072012-03-31 12:42:41 +00001541///
1542/// This method walks the analyzer over every instruction in the given basic
1543/// block and accounts for their cost during inlining at this callsite. It
1544/// aborts early if the threshold has been exceeded or an impossible to inline
1545/// construct has been detected. It returns false if inlining is no longer
1546/// viable, and true if inlining remains viable.
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00001547InlineResult
1548CallAnalyzer::analyzeBlock(BasicBlock *BB,
1549 SmallPtrSetImpl<const Value *> &EphValues) {
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001550 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
Chandler Carruth6b4cc8b2014-02-01 10:38:17 +00001551 // FIXME: Currently, the number of instructions in a function regardless of
1552 // our ability to simplify them during inline to constants or dead code,
1553 // are actually used by the vector bonus heuristic. As long as that's true,
1554 // we have to special case debug intrinsics here to prevent differences in
1555 // inlining due to debug symbols. Eventually, the number of unsimplified
1556 // instructions shouldn't factor into the cost computation, but until then,
1557 // hack around it here.
1558 if (isa<DbgInfoIntrinsic>(I))
1559 continue;
1560
Hal Finkel57f03dd2014-09-07 13:49:57 +00001561 // Skip ephemeral values.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001562 if (EphValues.count(&*I))
Hal Finkel57f03dd2014-09-07 13:49:57 +00001563 continue;
1564
Chandler Carruth0539c072012-03-31 12:42:41 +00001565 ++NumInstructions;
1566 if (isa<ExtractElementInst>(I) || I->getType()->isVectorTy())
1567 ++NumVectorInstructions;
1568
1569 // If the instruction simplified to a constant, there is no cost to this
1570 // instruction. Visit the instructions using our InstVisitor to account for
1571 // all of the per-instruction logic. The visit tree returns true if we
1572 // consumed the instruction in any way, and false if the instruction's base
1573 // cost should count against inlining.
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001574 if (Base::visit(&*I))
Chandler Carruth0539c072012-03-31 12:42:41 +00001575 ++NumInstructionsSimplified;
1576 else
1577 Cost += InlineConstants::InstrCost;
1578
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001579 using namespace ore;
Chandler Carruth0539c072012-03-31 12:42:41 +00001580 // If the visit this instruction detected an uninlinable pattern, abort.
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00001581 InlineResult IR;
1582 if (IsRecursiveCall)
1583 IR = "recursive";
1584 else if (ExposesReturnsTwice)
1585 IR = "exposes returns twice";
1586 else if (HasDynamicAlloca)
1587 IR = "dynamic alloca";
1588 else if (HasIndirectBr)
1589 IR = "indirect branch";
1590 else if (HasUninlineableIntrinsic)
1591 IR = "uninlinable intrinsic";
Sameer AbuAsal77beee42018-09-20 18:39:34 +00001592 else if (InitsVargArgs)
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00001593 IR = "varargs";
1594 if (!IR) {
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001595 if (ORE)
Vivek Pandya95906582017-10-11 17:12:59 +00001596 ORE->emit([&]() {
1597 return OptimizationRemarkMissed(DEBUG_TYPE, "NeverInline",
1598 CandidateCS.getInstruction())
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00001599 << NV("Callee", &F) << " has uninlinable pattern ("
1600 << NV("InlineResult", IR.message)
1601 << ") and cost is not fully computed";
Vivek Pandya95906582017-10-11 17:12:59 +00001602 });
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00001603 return IR;
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001604 }
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001605
1606 // If the caller is a recursive function then we don't want to inline
1607 // functions which allocate a lot of stack space because it would increase
1608 // the caller stack usage dramatically.
1609 if (IsCallerRecursive &&
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001610 AllocatedSize > InlineConstants::TotalAllocaSizeRecursiveCaller) {
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00001611 InlineResult IR = "recursive and allocates too much stack space";
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001612 if (ORE)
Vivek Pandya95906582017-10-11 17:12:59 +00001613 ORE->emit([&]() {
1614 return OptimizationRemarkMissed(DEBUG_TYPE, "NeverInline",
1615 CandidateCS.getInstruction())
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00001616 << NV("Callee", &F) << " is " << NV("InlineResult", IR.message)
1617 << ". Cost is not fully computed";
Vivek Pandya95906582017-10-11 17:12:59 +00001618 });
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00001619 return IR;
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001620 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001621
Chandler Carrutha004f222015-05-27 02:49:05 +00001622 // Check if we've past the maximum possible threshold so we don't spin in
1623 // huge basic blocks that will never inline.
Haicheng Wu61995362017-08-25 19:00:33 +00001624 if (Cost >= Threshold && !ComputeFullInlineCost)
Chandler Carruth0539c072012-03-31 12:42:41 +00001625 return false;
1626 }
1627
1628 return true;
1629}
1630
Adrian Prantl5f8f34e42018-05-01 15:54:18 +00001631/// Compute the base pointer and cumulative constant offsets for V.
Chandler Carruth0539c072012-03-31 12:42:41 +00001632///
1633/// This strips all constant offsets off of V, leaving it the base pointer, and
1634/// accumulates the total constant offset applied in the returned constant. It
1635/// returns 0 if V is not a pointer, and returns the constant '0' if there are
1636/// no constant offsets applied.
1637ConstantInt *CallAnalyzer::stripAndComputeInBoundsConstantOffsets(Value *&V) {
Mehdi Aminia28d91d2015-03-10 02:37:25 +00001638 if (!V->getType()->isPointerTy())
Craig Topper353eda42014-04-24 06:44:33 +00001639 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001640
Bjorn Pettersson77f32992018-01-04 18:23:40 +00001641 unsigned AS = V->getType()->getPointerAddressSpace();
Elena Demikhovsky945b7e52018-02-14 06:58:08 +00001642 unsigned IntPtrWidth = DL.getIndexSizeInBits(AS);
Chandler Carruth0539c072012-03-31 12:42:41 +00001643 APInt Offset = APInt::getNullValue(IntPtrWidth);
1644
1645 // Even though we don't look through PHI nodes, we could be called on an
1646 // instruction in an unreachable block, which may be on a cycle.
1647 SmallPtrSet<Value *, 4> Visited;
1648 Visited.insert(V);
1649 do {
1650 if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
1651 if (!GEP->isInBounds() || !accumulateGEPOffset(*GEP, Offset))
Craig Topper353eda42014-04-24 06:44:33 +00001652 return nullptr;
Chandler Carruth0539c072012-03-31 12:42:41 +00001653 V = GEP->getPointerOperand();
1654 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
1655 V = cast<Operator>(V)->getOperand(0);
1656 } else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
Sanjoy Das5ce32722016-04-08 00:48:30 +00001657 if (GA->isInterposable())
Chandler Carruth0539c072012-03-31 12:42:41 +00001658 break;
1659 V = GA->getAliasee();
1660 } else {
1661 break;
1662 }
1663 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
David Blaikie70573dc2014-11-19 07:49:26 +00001664 } while (Visited.insert(V).second);
Chandler Carruth0539c072012-03-31 12:42:41 +00001665
Bjorn Pettersson77f32992018-01-04 18:23:40 +00001666 Type *IntPtrTy = DL.getIntPtrType(V->getContext(), AS);
Chandler Carruth0539c072012-03-31 12:42:41 +00001667 return cast<ConstantInt>(ConstantInt::get(IntPtrTy, Offset));
1668}
1669
Adrian Prantl5f8f34e42018-05-01 15:54:18 +00001670/// Find dead blocks due to deleted CFG edges during inlining.
Haicheng Wu3739e142017-12-14 14:36:18 +00001671///
1672/// If we know the successor of the current block, \p CurrBB, has to be \p
1673/// NextBB, the other successors of \p CurrBB are dead if these successors have
1674/// no live incoming CFG edges. If one block is found to be dead, we can
1675/// continue growing the dead block list by checking the successors of the dead
1676/// blocks to see if all their incoming edges are dead or not.
1677void CallAnalyzer::findDeadBlocks(BasicBlock *CurrBB, BasicBlock *NextBB) {
1678 auto IsEdgeDead = [&](BasicBlock *Pred, BasicBlock *Succ) {
Hiroshi Inoue02a2bb22019-02-05 08:30:48 +00001679 // A CFG edge is dead if the predecessor is dead or the predecessor has a
Haicheng Wu3739e142017-12-14 14:36:18 +00001680 // known successor which is not the one under exam.
1681 return (DeadBlocks.count(Pred) ||
1682 (KnownSuccessors[Pred] && KnownSuccessors[Pred] != Succ));
1683 };
1684
1685 auto IsNewlyDead = [&](BasicBlock *BB) {
1686 // If all the edges to a block are dead, the block is also dead.
1687 return (!DeadBlocks.count(BB) &&
1688 llvm::all_of(predecessors(BB),
1689 [&](BasicBlock *P) { return IsEdgeDead(P, BB); }));
1690 };
1691
1692 for (BasicBlock *Succ : successors(CurrBB)) {
1693 if (Succ == NextBB || !IsNewlyDead(Succ))
1694 continue;
1695 SmallVector<BasicBlock *, 4> NewDead;
1696 NewDead.push_back(Succ);
1697 while (!NewDead.empty()) {
1698 BasicBlock *Dead = NewDead.pop_back_val();
1699 if (DeadBlocks.insert(Dead))
1700 // Continue growing the dead block lists.
1701 for (BasicBlock *S : successors(Dead))
1702 if (IsNewlyDead(S))
1703 NewDead.push_back(S);
1704 }
1705 }
1706}
1707
Adrian Prantl5f8f34e42018-05-01 15:54:18 +00001708/// Analyze a call site for potential inlining.
Chandler Carruth0539c072012-03-31 12:42:41 +00001709///
1710/// Returns true if inlining this call is viable, and false if it is not
1711/// viable. It computes the cost and adjusts the threshold based on numerous
1712/// factors and heuristics. If this method returns false but the computed cost
1713/// is below the computed threshold, then inlining was forcibly disabled by
Bob Wilson266802d2012-11-19 07:04:30 +00001714/// some artifact of the routine.
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00001715InlineResult CallAnalyzer::analyzeCall(CallSite CS) {
Chandler Carruth7ae90d42012-04-11 10:15:10 +00001716 ++NumCallsAnalyzed;
1717
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001718 // Perform some tweaks to the cost and threshold based on the direct
1719 // callsite information.
Chandler Carruth0539c072012-03-31 12:42:41 +00001720
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001721 // We want to more aggressively inline vector-dense kernels, so up the
1722 // threshold, and we'll lower it if the % of vector instructions gets too
Chandler Carrutha004f222015-05-27 02:49:05 +00001723 // low. Note that these bonuses are some what arbitrary and evolved over time
1724 // by accident as much as because they are principled bonuses.
1725 //
1726 // FIXME: It would be nice to remove all such bonuses. At least it would be
1727 // nice to base the bonus values on something more scientific.
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001728 assert(NumInstructions == 0);
1729 assert(NumVectorInstructions == 0);
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00001730
1731 // Update the threshold based on callsite properties
1732 updateThreshold(CS, F);
1733
Easwaran Ramaned279752019-01-09 19:26:17 +00001734 // While Threshold depends on commandline options that can take negative
1735 // values, we want to enforce the invariant that the computed threshold and
1736 // bonuses are non-negative.
1737 assert(Threshold >= 0);
1738 assert(SingleBBBonus >= 0);
1739 assert(VectorBonus >= 0);
1740
Chandler Carrutha004f222015-05-27 02:49:05 +00001741 // Speculatively apply all possible bonuses to Threshold. If cost exceeds
1742 // this Threshold any time, and cost cannot decrease, we can stop processing
1743 // the rest of the function body.
Easwaran Raman51b809b2017-07-28 21:47:36 +00001744 Threshold += (SingleBBBonus + VectorBonus);
Chandler Carrutha004f222015-05-27 02:49:05 +00001745
Xinliang David Li351d9b02017-05-02 05:38:41 +00001746 // Give out bonuses for the callsite, as the instructions setting them up
1747 // will be gone after inlining.
1748 Cost -= getCallsiteCost(CS, DL);
Benjamin Kramerc99d0e92012-08-07 11:13:19 +00001749
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001750 // If this function uses the coldcc calling convention, prefer not to inline
1751 // it.
1752 if (F.getCallingConv() == CallingConv::Cold)
1753 Cost += InlineConstants::ColdccPenalty;
1754
1755 // Check if we're done. This can happen due to bonuses and penalties.
Haicheng Wu61995362017-08-25 19:00:33 +00001756 if (Cost >= Threshold && !ComputeFullInlineCost)
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00001757 return "high cost";
Bob Wilsona5b0dc82012-11-19 07:04:35 +00001758
Chandler Carruth0539c072012-03-31 12:42:41 +00001759 if (F.empty())
1760 return true;
1761
Davide Italiano9d939c82017-11-30 22:10:35 +00001762 Function *Caller = CS.getInstruction()->getFunction();
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001763 // Check if the caller function is recursive itself.
Chandler Carruthcdf47882014-03-09 03:16:01 +00001764 for (User *U : Caller->users()) {
1765 CallSite Site(U);
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001766 if (!Site)
1767 continue;
1768 Instruction *I = Site.getInstruction();
Davide Italiano9d939c82017-11-30 22:10:35 +00001769 if (I->getFunction() == Caller) {
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001770 IsCallerRecursive = true;
1771 break;
1772 }
1773 }
1774
Chandler Carruth0539c072012-03-31 12:42:41 +00001775 // Populate our simplified values by mapping from function arguments to call
1776 // arguments with known important simplifications.
1777 CallSite::arg_iterator CAI = CS.arg_begin();
1778 for (Function::arg_iterator FAI = F.arg_begin(), FAE = F.arg_end();
1779 FAI != FAE; ++FAI, ++CAI) {
1780 assert(CAI != CS.arg_end());
1781 if (Constant *C = dyn_cast<Constant>(CAI))
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001782 SimplifiedValues[&*FAI] = C;
Chandler Carruth0539c072012-03-31 12:42:41 +00001783
1784 Value *PtrArg = *CAI;
1785 if (ConstantInt *C = stripAndComputeInBoundsConstantOffsets(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001786 ConstantOffsetPtrs[&*FAI] = std::make_pair(PtrArg, C->getValue());
Chandler Carruth0539c072012-03-31 12:42:41 +00001787
1788 // We can SROA any pointer arguments derived from alloca instructions.
1789 if (isa<AllocaInst>(PtrArg)) {
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00001790 SROAArgValues[&*FAI] = PtrArg;
Chandler Carruth0539c072012-03-31 12:42:41 +00001791 SROAArgCosts[PtrArg] = 0;
1792 }
1793 }
1794 }
1795 NumConstantArgs = SimplifiedValues.size();
1796 NumConstantOffsetPtrArgs = ConstantOffsetPtrs.size();
1797 NumAllocaArgs = SROAArgValues.size();
1798
Hal Finkel57f03dd2014-09-07 13:49:57 +00001799 // FIXME: If a caller has multiple calls to a callee, we end up recomputing
1800 // the ephemeral values multiple times (and they're completely determined by
1801 // the callee, so this is purely duplicate work).
1802 SmallPtrSet<const Value *, 32> EphValues;
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001803 CodeMetrics::collectEphemeralValues(&F, &GetAssumptionCache(F), EphValues);
Hal Finkel57f03dd2014-09-07 13:49:57 +00001804
Chandler Carruth0539c072012-03-31 12:42:41 +00001805 // The worklist of live basic blocks in the callee *after* inlining. We avoid
1806 // adding basic blocks of the callee which can be proven to be dead for this
1807 // particular call site in order to get more accurate cost estimates. This
1808 // requires a somewhat heavyweight iteration pattern: we need to walk the
1809 // basic blocks in a breadth-first order as we insert live successors. To
1810 // accomplish this, prioritizing for small iterations because we exit after
1811 // crossing our threshold, we use a small-size optimized SetVector.
1812 typedef SetVector<BasicBlock *, SmallVector<BasicBlock *, 16>,
Chad Rosier567556a2016-04-28 14:47:23 +00001813 SmallPtrSet<BasicBlock *, 16>>
1814 BBSetVector;
Chandler Carruth0539c072012-03-31 12:42:41 +00001815 BBSetVector BBWorklist;
1816 BBWorklist.insert(&F.getEntryBlock());
Easwaran Raman51b809b2017-07-28 21:47:36 +00001817 bool SingleBB = true;
Chandler Carruth0539c072012-03-31 12:42:41 +00001818 // Note that we *must not* cache the size, this loop grows the worklist.
1819 for (unsigned Idx = 0; Idx != BBWorklist.size(); ++Idx) {
1820 // Bail out the moment we cross the threshold. This means we'll under-count
1821 // the cost, but only when undercounting doesn't matter.
Haicheng Wu61995362017-08-25 19:00:33 +00001822 if (Cost >= Threshold && !ComputeFullInlineCost)
Chandler Carruth0539c072012-03-31 12:42:41 +00001823 break;
1824
1825 BasicBlock *BB = BBWorklist[Idx];
1826 if (BB->empty())
Chandler Carruth4d1d34f2012-03-14 23:19:53 +00001827 continue;
Dan Gohman4552e3c2009-10-13 18:30:07 +00001828
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001829 // Disallow inlining a blockaddress. A blockaddress only has defined
1830 // behavior for an indirect branch in the same function, and we do not
1831 // currently support inlining indirect branches. But, the inliner may not
1832 // see an indirect branch that ends up being dead code at a particular call
1833 // site. If the blockaddress escapes the function, e.g., via a global
1834 // variable, inlining may lead to an invalid cross-function reference.
Nick Desaulniers6a84cd32019-02-14 23:42:21 +00001835 if (BB->hasAddressTaken())
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00001836 return "blockaddress";
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00001837
Chandler Carruth0539c072012-03-31 12:42:41 +00001838 // Analyze the cost of this block. If we blow through the threshold, this
1839 // returns false, and we can bail on out.
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00001840 InlineResult IR = analyzeBlock(BB, EphValues);
1841 if (!IR)
1842 return IR;
Eric Christopher46308e62011-02-01 01:16:32 +00001843
Chandler Carruthedb12a82018-10-15 10:04:59 +00001844 Instruction *TI = BB->getTerminator();
Chandler Carruth0814d2a2013-12-13 07:59:56 +00001845
Chandler Carruth0539c072012-03-31 12:42:41 +00001846 // Add in the live successors by first checking whether we have terminator
1847 // that may be simplified based on the values simplified by this call.
1848 if (BranchInst *BI = dyn_cast<BranchInst>(TI)) {
1849 if (BI->isConditional()) {
1850 Value *Cond = BI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001851 if (ConstantInt *SimpleCond =
1852 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Haicheng Wu3739e142017-12-14 14:36:18 +00001853 BasicBlock *NextBB = BI->getSuccessor(SimpleCond->isZero() ? 1 : 0);
1854 BBWorklist.insert(NextBB);
1855 KnownSuccessors[BB] = NextBB;
1856 findDeadBlocks(BB, NextBB);
Chandler Carruth0539c072012-03-31 12:42:41 +00001857 continue;
Eric Christopher46308e62011-02-01 01:16:32 +00001858 }
Chandler Carruth0539c072012-03-31 12:42:41 +00001859 }
1860 } else if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) {
1861 Value *Cond = SI->getCondition();
Chad Rosier567556a2016-04-28 14:47:23 +00001862 if (ConstantInt *SimpleCond =
1863 dyn_cast_or_null<ConstantInt>(SimplifiedValues.lookup(Cond))) {
Haicheng Wu3739e142017-12-14 14:36:18 +00001864 BasicBlock *NextBB = SI->findCaseValue(SimpleCond)->getCaseSuccessor();
1865 BBWorklist.insert(NextBB);
1866 KnownSuccessors[BB] = NextBB;
1867 findDeadBlocks(BB, NextBB);
Chandler Carruth0539c072012-03-31 12:42:41 +00001868 continue;
1869 }
1870 }
Eric Christopher46308e62011-02-01 01:16:32 +00001871
Chandler Carruth0539c072012-03-31 12:42:41 +00001872 // If we're unable to select a particular successor, just count all of
1873 // them.
Nadav Rotem4eb3d4b2012-09-19 08:08:04 +00001874 for (unsigned TIdx = 0, TSize = TI->getNumSuccessors(); TIdx != TSize;
1875 ++TIdx)
Chandler Carruth0539c072012-03-31 12:42:41 +00001876 BBWorklist.insert(TI->getSuccessor(TIdx));
1877
1878 // If we had any successors at this point, than post-inlining is likely to
1879 // have them as well. Note that we assume any basic blocks which existed
1880 // due to branches or switches which folded above will also fold after
1881 // inlining.
1882 if (SingleBB && TI->getNumSuccessors() > 1) {
1883 // Take off the bonus we applied to the threshold.
1884 Threshold -= SingleBBBonus;
1885 SingleBB = false;
Eric Christopher46308e62011-02-01 01:16:32 +00001886 }
1887 }
Andrew Trickcaa500b2011-10-01 01:27:56 +00001888
Easwaran Raman51b809b2017-07-28 21:47:36 +00001889 bool OnlyOneCallAndLocalLinkage =
1890 F.hasLocalLinkage() && F.hasOneUse() && &F == CS.getCalledFunction();
Chandler Carruthcb5beb32013-12-12 11:59:26 +00001891 // If this is a noduplicate call, we can still inline as long as
James Molloy4f6fb952012-12-20 16:04:27 +00001892 // inlining this would cause the removal of the caller (so the instruction
1893 // is not actually duplicated, just moved).
1894 if (!OnlyOneCallAndLocalLinkage && ContainsNoDuplicateCall)
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00001895 return "noduplicate";
James Molloy4f6fb952012-12-20 16:04:27 +00001896
David Greenba9f2452018-11-05 14:54:34 +00001897 // Loops generally act a lot like calls in that they act like barriers to
1898 // movement, require a certain amount of setup, etc. So when optimising for
1899 // size, we penalise any call sites that perform loops. We do this after all
1900 // other costs here, so will likely only be dealing with relatively small
1901 // functions (and hence DT and LI will hopefully be cheap).
1902 if (Caller->optForMinSize()) {
1903 DominatorTree DT(F);
1904 LoopInfo LI(DT);
1905 int NumLoops = 0;
1906 for (Loop *L : LI) {
1907 // Ignore loops that will not be executed
1908 if (DeadBlocks.count(L->getHeader()))
1909 continue;
1910 NumLoops++;
1911 }
1912 Cost += NumLoops * InlineConstants::CallPenalty;
1913 }
1914
Chandler Carrutha004f222015-05-27 02:49:05 +00001915 // We applied the maximum possible vector bonus at the beginning. Now,
1916 // subtract the excess bonus, if any, from the Threshold before
1917 // comparing against Cost.
1918 if (NumVectorInstructions <= NumInstructions / 10)
Easwaran Raman51b809b2017-07-28 21:47:36 +00001919 Threshold -= VectorBonus;
Chandler Carrutha004f222015-05-27 02:49:05 +00001920 else if (NumVectorInstructions <= NumInstructions / 2)
Easwaran Raman51b809b2017-07-28 21:47:36 +00001921 Threshold -= VectorBonus/2;
Chandler Carruth0539c072012-03-31 12:42:41 +00001922
Hans Wennborg00ab73d2016-02-05 20:32:42 +00001923 return Cost < std::max(1, Threshold);
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001924}
1925
Aaron Ballman615eb472017-10-15 14:32:27 +00001926#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
Adrian Prantl5f8f34e42018-05-01 15:54:18 +00001927/// Dump stats about this call's analysis.
Yaron Kereneb2a2542016-01-29 20:50:44 +00001928LLVM_DUMP_METHOD void CallAnalyzer::dump() {
Eric Christophera13839f2014-02-26 23:27:16 +00001929#define DEBUG_PRINT_STAT(x) dbgs() << " " #x ": " << x << "\n"
Chandler Carruth0539c072012-03-31 12:42:41 +00001930 DEBUG_PRINT_STAT(NumConstantArgs);
1931 DEBUG_PRINT_STAT(NumConstantOffsetPtrArgs);
1932 DEBUG_PRINT_STAT(NumAllocaArgs);
1933 DEBUG_PRINT_STAT(NumConstantPtrCmps);
1934 DEBUG_PRINT_STAT(NumConstantPtrDiffs);
1935 DEBUG_PRINT_STAT(NumInstructionsSimplified);
Chandler Carrutha004f222015-05-27 02:49:05 +00001936 DEBUG_PRINT_STAT(NumInstructions);
Chandler Carruth0539c072012-03-31 12:42:41 +00001937 DEBUG_PRINT_STAT(SROACostSavings);
1938 DEBUG_PRINT_STAT(SROACostSavingsLost);
Haicheng Wua4461512017-12-15 14:34:41 +00001939 DEBUG_PRINT_STAT(LoadEliminationCost);
James Molloy4f6fb952012-12-20 16:04:27 +00001940 DEBUG_PRINT_STAT(ContainsNoDuplicateCall);
Chandler Carruth394e34f2014-01-31 22:32:32 +00001941 DEBUG_PRINT_STAT(Cost);
1942 DEBUG_PRINT_STAT(Threshold);
Chandler Carruth0539c072012-03-31 12:42:41 +00001943#undef DEBUG_PRINT_STAT
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001944}
Manman Renc3366cc2012-09-06 19:55:56 +00001945#endif
Eric Christopher2dfbd7e2011-02-05 00:49:15 +00001946
Adrian Prantl5f8f34e42018-05-01 15:54:18 +00001947/// Test that there are no attribute conflicts between Caller and Callee
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001948/// that prevent inlining.
1949static bool functionsHaveCompatibleAttributes(Function *Caller,
Eric Christopher4371b132015-07-02 01:11:47 +00001950 Function *Callee,
1951 TargetTransformInfo &TTI) {
Eric Christopherd566fb12015-07-29 22:09:48 +00001952 return TTI.areInlineCompatible(Caller, Callee) &&
Akira Hatanaka1cb242e2015-12-22 23:57:37 +00001953 AttributeFuncs::areInlineCompatible(*Caller, *Callee);
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00001954}
1955
Xinliang David Li351d9b02017-05-02 05:38:41 +00001956int llvm::getCallsiteCost(CallSite CS, const DataLayout &DL) {
1957 int Cost = 0;
1958 for (unsigned I = 0, E = CS.arg_size(); I != E; ++I) {
1959 if (CS.isByValArgument(I)) {
1960 // We approximate the number of loads and stores needed by dividing the
1961 // size of the byval type by the target's pointer size.
1962 PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType());
1963 unsigned TypeSize = DL.getTypeSizeInBits(PTy->getElementType());
Bjorn Pettersson77f32992018-01-04 18:23:40 +00001964 unsigned AS = PTy->getAddressSpace();
1965 unsigned PointerSize = DL.getPointerSizeInBits(AS);
Xinliang David Li351d9b02017-05-02 05:38:41 +00001966 // Ceiling division.
1967 unsigned NumStores = (TypeSize + PointerSize - 1) / PointerSize;
1968
1969 // If it generates more than 8 stores it is likely to be expanded as an
1970 // inline memcpy so we take that as an upper bound. Otherwise we assume
1971 // one load and one store per word copied.
1972 // FIXME: The maxStoresPerMemcpy setting from the target should be used
1973 // here instead of a magic number of 8, but it's not available via
1974 // DataLayout.
1975 NumStores = std::min(NumStores, 8U);
1976
1977 Cost += 2 * NumStores * InlineConstants::InstrCost;
1978 } else {
1979 // For non-byval arguments subtract off one instruction per call
1980 // argument.
1981 Cost += InlineConstants::InstrCost;
1982 }
1983 }
1984 // The call instruction also disappears after inlining.
1985 Cost += InlineConstants::InstrCost + InlineConstants::CallPenalty;
1986 return Cost;
1987}
1988
Sean Silvaab6a6832016-07-23 04:22:50 +00001989InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001990 CallSite CS, const InlineParams &Params, TargetTransformInfo &CalleeTTI,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001991 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +00001992 Optional<function_ref<BlockFrequencyInfo &(Function &)>> GetBFI,
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001993 ProfileSummaryInfo *PSI, OptimizationRemarkEmitter *ORE) {
Daniel Jasperaec2fa32016-12-19 08:22:17 +00001994 return getInlineCost(CS, CS.getCalledFunction(), Params, CalleeTTI,
Haicheng Wu0812c5b2017-08-21 20:00:09 +00001995 GetAssumptionCache, GetBFI, PSI, ORE);
Easwaran Ramanb9f71202015-12-28 20:28:19 +00001996}
1997
Sean Silvaab6a6832016-07-23 04:22:50 +00001998InlineCost llvm::getInlineCost(
Easwaran Raman1c57cc22016-08-10 00:48:04 +00001999 CallSite CS, Function *Callee, const InlineParams &Params,
Daniel Jasperaec2fa32016-12-19 08:22:17 +00002000 TargetTransformInfo &CalleeTTI,
2001 std::function<AssumptionCache &(Function &)> &GetAssumptionCache,
Easwaran Raman12585b02017-01-20 22:44:04 +00002002 Optional<function_ref<BlockFrequencyInfo &(Function &)>> GetBFI,
Haicheng Wu0812c5b2017-08-21 20:00:09 +00002003 ProfileSummaryInfo *PSI, OptimizationRemarkEmitter *ORE) {
Easwaran Ramanf4bb2f02016-01-14 23:16:29 +00002004
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002005 // Cannot inline indirect calls.
2006 if (!Callee)
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00002007 return llvm::InlineCost::getNever("indirect call");
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002008
Bjorn Pettersson38514962018-01-10 13:01:18 +00002009 // Never inline calls with byval arguments that does not have the alloca
2010 // address space. Since byval arguments can be replaced with a copy to an
2011 // alloca, the inlined code would need to be adjusted to handle that the
2012 // argument is in the alloca address space (so it is a little bit complicated
2013 // to solve).
2014 unsigned AllocaAS = Callee->getParent()->getDataLayout().getAllocaAddrSpace();
2015 for (unsigned I = 0, E = CS.arg_size(); I != E; ++I)
2016 if (CS.isByValArgument(I)) {
2017 PointerType *PTy = cast<PointerType>(CS.getArgument(I)->getType());
2018 if (PTy->getAddressSpace() != AllocaAS)
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00002019 return llvm::InlineCost::getNever("byval arguments without alloca"
2020 " address space");
Bjorn Pettersson38514962018-01-10 13:01:18 +00002021 }
2022
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002023 // Calls to functions with always-inline attributes should be inlined
2024 // whenever possible.
Peter Collingbourne68a88972014-05-19 18:25:54 +00002025 if (CS.hasFnAttr(Attribute::AlwaysInline)) {
Yevgeny Rouban15b17d02019-02-01 10:44:43 +00002026 auto IsViable = isInlineViable(*Callee);
2027 if (IsViable)
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00002028 return llvm::InlineCost::getAlways("always inline attribute");
Yevgeny Rouban15b17d02019-02-01 10:44:43 +00002029 return llvm::InlineCost::getNever(IsViable.message);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002030 }
2031
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00002032 // Never inline functions with conflicting attributes (unless callee has
2033 // always-inline attribute).
Chad Rosier5ce28f42017-08-02 14:50:27 +00002034 Function *Caller = CS.getCaller();
2035 if (!functionsHaveCompatibleAttributes(Caller, Callee, CalleeTTI))
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00002036 return llvm::InlineCost::getNever("conflicting attributes");
Evgeniy Stepanov2ad36982013-08-08 08:22:39 +00002037
Paul Robinsondcbe35b2013-11-18 21:44:03 +00002038 // Don't inline this call if the caller has the optnone attribute.
Chad Rosier5ce28f42017-08-02 14:50:27 +00002039 if (Caller->hasFnAttribute(Attribute::OptimizeNone))
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00002040 return llvm::InlineCost::getNever("optnone attribute");
Paul Robinsondcbe35b2013-11-18 21:44:03 +00002041
Manoj Gupta77eeac32018-07-09 22:27:23 +00002042 // Don't inline a function that treats null pointer as valid into a caller
2043 // that does not have this attribute.
2044 if (!Caller->nullPointerIsDefined() && Callee->nullPointerIsDefined())
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00002045 return llvm::InlineCost::getNever("nullptr definitions incompatible");
Manoj Gupta77eeac32018-07-09 22:27:23 +00002046
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00002047 // Don't inline functions which can be interposed at link-time.
2048 if (Callee->isInterposable())
2049 return llvm::InlineCost::getNever("interposable");
2050
2051 // Don't inline functions marked noinline.
2052 if (Callee->hasFnAttribute(Attribute::NoInline))
2053 return llvm::InlineCost::getNever("noinline function attribute");
2054
2055 // Don't inline call sites marked noinline.
2056 if (CS.isNoInline())
2057 return llvm::InlineCost::getNever("noinline call site attribute");
Dan Gohman4552e3c2009-10-13 18:30:07 +00002058
Nicola Zaghend34e60c2018-05-14 12:53:11 +00002059 LLVM_DEBUG(llvm::dbgs() << " Analyzing call of " << Callee->getName()
2060 << "... (caller:" << Caller->getName() << ")\n");
Andrew Trickcaa500b2011-10-01 01:27:56 +00002061
Haicheng Wu0812c5b2017-08-21 20:00:09 +00002062 CallAnalyzer CA(CalleeTTI, GetAssumptionCache, GetBFI, PSI, ORE, *Callee, CS,
Easwaran Raman12585b02017-01-20 22:44:04 +00002063 Params);
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00002064 InlineResult ShouldInline = CA.analyzeCall(CS);
Dan Gohman4552e3c2009-10-13 18:30:07 +00002065
Nicola Zaghend34e60c2018-05-14 12:53:11 +00002066 LLVM_DEBUG(CA.dump());
Chandler Carruth0539c072012-03-31 12:42:41 +00002067
2068 // Check if there was a reason to force inlining or no inlining.
2069 if (!ShouldInline && CA.getCost() < CA.getThreshold())
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00002070 return InlineCost::getNever(ShouldInline.message);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002071 if (ShouldInline && CA.getCost() >= CA.getThreshold())
David Bolvanskyc0aa4b72018-08-05 14:53:08 +00002072 return InlineCost::getAlways("empty function");
Andrew Trickcaa500b2011-10-01 01:27:56 +00002073
Chandler Carruth0539c072012-03-31 12:42:41 +00002074 return llvm::InlineCost::get(CA.getCost(), CA.getThreshold());
Dan Gohman4552e3c2009-10-13 18:30:07 +00002075}
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002076
Yevgeny Rouban15b17d02019-02-01 10:44:43 +00002077InlineResult llvm::isInlineViable(Function &F) {
Duncan P. N. Exon Smithb3fc83c2015-02-14 00:12:15 +00002078 bool ReturnsTwice = F.hasFnAttribute(Attribute::ReturnsTwice);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002079 for (Function::iterator BI = F.begin(), BE = F.end(); BI != BE; ++BI) {
Gerolf Hoflehner734f4c82014-07-01 00:19:34 +00002080 // Disallow inlining of functions which contain indirect branches or
2081 // blockaddresses.
Yevgeny Rouban15b17d02019-02-01 10:44:43 +00002082 if (isa<IndirectBrInst>(BI->getTerminator()))
2083 return "contains indirect branches";
2084
Nick Desaulniers6a84cd32019-02-14 23:42:21 +00002085 if (BI->hasAddressTaken())
Yevgeny Rouban15b17d02019-02-01 10:44:43 +00002086 return "uses block address";
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002087
Duncan P. N. Exon Smith5a82c912015-10-10 00:53:03 +00002088 for (auto &II : *BI) {
2089 CallSite CS(&II);
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002090 if (!CS)
2091 continue;
2092
2093 // Disallow recursive calls.
2094 if (&F == CS.getCalledFunction())
Yevgeny Rouban15b17d02019-02-01 10:44:43 +00002095 return "recursive call";
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002096
2097 // Disallow calls which expose returns-twice to a function not previously
2098 // attributed as such.
2099 if (!ReturnsTwice && CS.isCall() &&
2100 cast<CallInst>(CS.getInstruction())->canReturnTwice())
Yevgeny Rouban15b17d02019-02-01 10:44:43 +00002101 return "exposes returns-twice attribute";
Reid Kleckner223de262015-04-14 20:38:14 +00002102
Florian Hahn16366512018-01-28 19:11:49 +00002103 if (CS.getCalledFunction())
2104 switch (CS.getCalledFunction()->getIntrinsicID()) {
2105 default:
2106 break;
Vitaly Buka4296ea72018-04-04 21:46:27 +00002107 // Disallow inlining of @llvm.icall.branch.funnel because current
2108 // backend can't separate call targets from call arguments.
2109 case llvm::Intrinsic::icall_branch_funnel:
Yevgeny Rouban15b17d02019-02-01 10:44:43 +00002110 return "disallowed inlining of @llvm.icall.branch.funnel";
Florian Hahn16366512018-01-28 19:11:49 +00002111 // Disallow inlining functions that call @llvm.localescape. Doing this
2112 // correctly would require major changes to the inliner.
2113 case llvm::Intrinsic::localescape:
Yevgeny Rouban15b17d02019-02-01 10:44:43 +00002114 return "disallowed inlining of @llvm.localescape";
Sameer AbuAsal77beee42018-09-20 18:39:34 +00002115 // Disallow inlining of functions that initialize VarArgs with va_start.
Florian Hahn16366512018-01-28 19:11:49 +00002116 case llvm::Intrinsic::vastart:
Yevgeny Rouban15b17d02019-02-01 10:44:43 +00002117 return "contains VarArgs initialized with va_start";
Florian Hahn16366512018-01-28 19:11:49 +00002118 }
Bob Wilsona5b0dc82012-11-19 07:04:35 +00002119 }
2120 }
2121
2122 return true;
2123}
Easwaran Raman1c57cc22016-08-10 00:48:04 +00002124
2125// APIs to create InlineParams based on command line flags and/or other
2126// parameters.
2127
2128InlineParams llvm::getInlineParams(int Threshold) {
2129 InlineParams Params;
2130
2131 // This field is the threshold to use for a callee by default. This is
2132 // derived from one or more of:
2133 // * optimization or size-optimization levels,
2134 // * a value passed to createFunctionInliningPass function, or
2135 // * the -inline-threshold flag.
2136 // If the -inline-threshold flag is explicitly specified, that is used
2137 // irrespective of anything else.
2138 if (InlineThreshold.getNumOccurrences() > 0)
2139 Params.DefaultThreshold = InlineThreshold;
2140 else
2141 Params.DefaultThreshold = Threshold;
2142
2143 // Set the HintThreshold knob from the -inlinehint-threshold.
2144 Params.HintThreshold = HintThreshold;
2145
2146 // Set the HotCallSiteThreshold knob from the -hot-callsite-threshold.
2147 Params.HotCallSiteThreshold = HotCallSiteThreshold;
2148
Easwaran Raman974d4ee2017-08-03 22:23:33 +00002149 // If the -locally-hot-callsite-threshold is explicitly specified, use it to
2150 // populate LocallyHotCallSiteThreshold. Later, we populate
2151 // Params.LocallyHotCallSiteThreshold from -locally-hot-callsite-threshold if
2152 // we know that optimization level is O3 (in the getInlineParams variant that
2153 // takes the opt and size levels).
2154 // FIXME: Remove this check (and make the assignment unconditional) after
2155 // addressing size regression issues at O2.
2156 if (LocallyHotCallSiteThreshold.getNumOccurrences() > 0)
2157 Params.LocallyHotCallSiteThreshold = LocallyHotCallSiteThreshold;
2158
Easwaran Raman12585b02017-01-20 22:44:04 +00002159 // Set the ColdCallSiteThreshold knob from the -inline-cold-callsite-threshold.
2160 Params.ColdCallSiteThreshold = ColdCallSiteThreshold;
2161
Easwaran Raman1c57cc22016-08-10 00:48:04 +00002162 // Set the OptMinSizeThreshold and OptSizeThreshold params only if the
Easwaran Raman1c57cc22016-08-10 00:48:04 +00002163 // -inlinehint-threshold commandline option is not explicitly given. If that
2164 // option is present, then its value applies even for callees with size and
2165 // minsize attributes.
2166 // If the -inline-threshold is not specified, set the ColdThreshold from the
2167 // -inlinecold-threshold even if it is not explicitly passed. If
2168 // -inline-threshold is specified, then -inlinecold-threshold needs to be
2169 // explicitly specified to set the ColdThreshold knob
2170 if (InlineThreshold.getNumOccurrences() == 0) {
2171 Params.OptMinSizeThreshold = InlineConstants::OptMinSizeThreshold;
2172 Params.OptSizeThreshold = InlineConstants::OptSizeThreshold;
2173 Params.ColdThreshold = ColdThreshold;
2174 } else if (ColdThreshold.getNumOccurrences() > 0) {
2175 Params.ColdThreshold = ColdThreshold;
2176 }
2177 return Params;
2178}
2179
2180InlineParams llvm::getInlineParams() {
2181 return getInlineParams(InlineThreshold);
2182}
2183
2184// Compute the default threshold for inlining based on the opt level and the
2185// size opt level.
2186static int computeThresholdFromOptLevels(unsigned OptLevel,
2187 unsigned SizeOptLevel) {
2188 if (OptLevel > 2)
2189 return InlineConstants::OptAggressiveThreshold;
2190 if (SizeOptLevel == 1) // -Os
2191 return InlineConstants::OptSizeThreshold;
2192 if (SizeOptLevel == 2) // -Oz
2193 return InlineConstants::OptMinSizeThreshold;
2194 return InlineThreshold;
2195}
2196
2197InlineParams llvm::getInlineParams(unsigned OptLevel, unsigned SizeOptLevel) {
Easwaran Raman974d4ee2017-08-03 22:23:33 +00002198 auto Params =
2199 getInlineParams(computeThresholdFromOptLevels(OptLevel, SizeOptLevel));
2200 // At O3, use the value of -locally-hot-callsite-threshold option to populate
2201 // Params.LocallyHotCallSiteThreshold. Below O3, this flag has effect only
2202 // when it is specified explicitly.
2203 if (OptLevel > 2)
2204 Params.LocallyHotCallSiteThreshold = LocallyHotCallSiteThreshold;
2205 return Params;
Easwaran Raman1c57cc22016-08-10 00:48:04 +00002206}