blob: 1e8833f09c71ba09cd3a06c8988b56940469e4af [file] [log] [blame]
Mircea Trofinbdceefe2020-06-09 14:50:50 -07001//===- MLInlineAdvisor.cpp - machine learned InlineAdvisor ----------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements the interface between the inliner and a learned model.
10// It delegates model evaluation to either the AOT compiled model (the
11// 'release' mode) or a runtime-loaded model (the 'development' case).
12//
13//===----------------------------------------------------------------------===//
Nico Weber4fe912f2020-07-21 11:44:47 -040014#include "llvm/Config/config.h"
15#if defined(LLVM_HAVE_TF_AOT) || defined(LLVM_HAVE_TF_API)
16
Mircea Trofinbdceefe2020-06-09 14:50:50 -070017#include <limits>
18#include <unordered_map>
19#include <unordered_set>
20
21#include "llvm/ADT/SCCIterator.h"
22#include "llvm/Analysis/CallGraph.h"
Tarindu Jayatilaka418121c32020-07-22 09:52:53 -070023#include "llvm/Analysis/FunctionPropertiesAnalysis.h"
Mircea Trofinbdceefe2020-06-09 14:50:50 -070024#include "llvm/Analysis/InlineCost.h"
Mircea Trofinbdceefe2020-06-09 14:50:50 -070025#include "llvm/Analysis/MLInlineAdvisor.h"
26#include "llvm/Analysis/MLModelRunner.h"
27#include "llvm/Analysis/OptimizationRemarkEmitter.h"
28#include "llvm/Analysis/TargetLibraryInfo.h"
29#include "llvm/Analysis/TargetTransformInfo.h"
30#include "llvm/IR/InstIterator.h"
31#include "llvm/IR/Instructions.h"
32#include "llvm/IR/PassManager.h"
33#include "llvm/Support/CommandLine.h"
34#include "llvm/Support/Path.h"
35
36using namespace llvm;
37
38#define DEBUG_TYPE "inline-ml"
39
40static cl::opt<float> SizeIncreaseThreshold(
41 "ml-advisor-size-increase-threshold", cl::Hidden,
42 cl::desc("Maximum factor by which expected native size may increase before "
43 "blocking any further inlining."),
44 cl::init(2.0));
45
46const std::array<std::string, NumberOfFeatures> llvm::FeatureNameMap{
47#define POPULATE_NAMES(INDEX_NAME, NAME, COMMENT) NAME,
48 INLINE_FEATURE_ITERATOR(POPULATE_NAMES)
49#undef POPULATE_NAMES
50};
51
52const char *const llvm::DecisionName = "inlining_decision";
53const char *const llvm::DefaultDecisionName = "inlining_default";
54const char *const llvm::RewardName = "delta_size";
55
56CallBase *getInlinableCS(Instruction &I) {
57 if (auto *CS = dyn_cast<CallBase>(&I))
58 if (Function *Callee = CS->getCalledFunction()) {
59 if (!Callee->isDeclaration()) {
60 return CS;
61 }
62 }
63 return nullptr;
64}
65
66MLInlineAdvisor::MLInlineAdvisor(Module &M, ModuleAnalysisManager &MAM,
67 std::unique_ptr<MLModelRunner> Runner)
68 : InlineAdvisor(
69 MAM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager()),
70 M(M), ModelRunner(std::move(Runner)), CG(new CallGraph(M)),
71 InitialIRSize(getModuleIRSize()), CurrentIRSize(InitialIRSize) {
72 assert(ModelRunner);
73
74 // Extract the 'call site height' feature - the position of a call site
75 // relative to the farthest statically reachable SCC node. We don't mutate
76 // this value while inlining happens. Empirically, this feature proved
77 // critical in behavioral cloning - i.e. training a model to mimic the manual
78 // heuristic's decisions - and, thus, equally important for training for
79 // improvement.
80 for (auto I = scc_begin(CG.get()); !I.isAtEnd(); ++I) {
81 const std::vector<CallGraphNode *> &CGNodes = *I;
82 unsigned Level = 0;
83 for (auto *CGNode : CGNodes) {
84 Function *F = CGNode->getFunction();
85 if (!F || F->isDeclaration())
86 continue;
87 for (auto &I : instructions(F)) {
88 if (auto *CS = getInlinableCS(I)) {
89 auto *Called = CS->getCalledFunction();
90 auto Pos = FunctionLevels.find(Called);
91 // In bottom up traversal, an inlinable callee is either in the
92 // same SCC, or to a function in a visited SCC. So not finding its
93 // level means we haven't visited it yet, meaning it's in this SCC.
94 if (Pos == FunctionLevels.end())
95 continue;
96 Level = std::max(Level, Pos->second + 1);
97 }
98 }
99 }
100 for (auto *CGNode : CGNodes) {
101 Function *F = CGNode->getFunction();
102 if (F && !F->isDeclaration())
103 FunctionLevels[F] = Level;
104 }
105 }
106}
107
108void MLInlineAdvisor::onPassEntry() {
109 // Function passes executed between InlinerPass runs may have changed the
110 // module-wide features.
111 NodeCount = 0;
112 EdgeCount = 0;
113 for (auto &F : M)
114 if (!F.isDeclaration()) {
115 ++NodeCount;
116 EdgeCount += getLocalCalls(F);
117 }
118}
119
120int64_t MLInlineAdvisor::getLocalCalls(Function &F) {
Tarindu Jayatilaka418121c32020-07-22 09:52:53 -0700121 return FAM.getResult<FunctionPropertiesAnalysis>(F)
122 .DirectCallsToDefinedFunctions;
Mircea Trofinbdceefe2020-06-09 14:50:50 -0700123}
124
125// Update the internal state of the advisor, and force invalidate feature
126// analysis. Currently, we maintain minimal (and very simple) global state - the
127// number of functions and the number of static calls. We also keep track of the
128// total IR size in this module, to stop misbehaving policies at a certain bloat
129// factor (SizeIncreaseThreshold)
130void MLInlineAdvisor::onSuccessfulInlining(const MLInlineAdvice &Advice,
131 bool CalleeWasDeleted) {
132 assert(!ForceStop);
133 Function *Caller = Advice.getCaller();
134 Function *Callee = Advice.getCallee();
135
136 // The caller features aren't valid anymore.
Tarindu Jayatilaka418121c32020-07-22 09:52:53 -0700137 FAM.invalidate<FunctionPropertiesAnalysis>(*Caller);
Mircea Trofinbdceefe2020-06-09 14:50:50 -0700138 int64_t IRSizeAfter =
139 getIRSize(*Caller) + (CalleeWasDeleted ? 0 : Advice.CalleeIRSize);
140 CurrentIRSize += IRSizeAfter - (Advice.CallerIRSize + Advice.CalleeIRSize);
141 if (CurrentIRSize > SizeIncreaseThreshold * InitialIRSize)
142 ForceStop = true;
143
144 // We can delta-update module-wide features. We know the inlining only changed
145 // the caller, and maybe the callee (by deleting the latter).
146 // Nodes are simple to update.
147 // For edges, we 'forget' the edges that the caller and callee used to have
148 // before inlining, and add back what they currently have together.
149 int64_t NewCallerAndCalleeEdges =
Tarindu Jayatilaka418121c32020-07-22 09:52:53 -0700150 FAM.getResult<FunctionPropertiesAnalysis>(*Caller)
Mircea Trofinbdceefe2020-06-09 14:50:50 -0700151 .DirectCallsToDefinedFunctions;
152
153 if (CalleeWasDeleted)
154 --NodeCount;
155 else
Tarindu Jayatilaka418121c32020-07-22 09:52:53 -0700156 NewCallerAndCalleeEdges +=
157 FAM.getResult<FunctionPropertiesAnalysis>(*Callee)
158 .DirectCallsToDefinedFunctions;
Mircea Trofinbdceefe2020-06-09 14:50:50 -0700159 EdgeCount += (NewCallerAndCalleeEdges - Advice.CallerAndCalleeEdges);
160 assert(CurrentIRSize >= 0 && EdgeCount >= 0 && NodeCount >= 0);
161}
162
163int64_t MLInlineAdvisor::getModuleIRSize() const {
164 int64_t Ret = 0;
165 for (auto &F : CG->getModule())
166 if (!F.isDeclaration())
167 Ret += getIRSize(F);
168 return Ret;
169}
170
171std::unique_ptr<InlineAdvice> MLInlineAdvisor::getAdvice(CallBase &CB) {
172 auto &Caller = *CB.getCaller();
173 auto &Callee = *CB.getCalledFunction();
174
175 auto GetAssumptionCache = [&](Function &F) -> AssumptionCache & {
176 return FAM.getResult<AssumptionAnalysis>(F);
177 };
Mircea Trofinbdceefe2020-06-09 14:50:50 -0700178 auto &TIR = FAM.getResult<TargetIRAnalysis>(Callee);
179 auto &ORE = FAM.getResult<OptimizationRemarkEmitterAnalysis>(Caller);
180
Mircea Trofin5fe10262020-11-16 14:01:53 -0800181 auto MandatoryKind = MandatoryInlineAdvisor::getMandatoryKind(CB, FAM, ORE);
Mircea Trofinbdceefe2020-06-09 14:50:50 -0700182 // If this is a "never inline" case, there won't be any changes to internal
183 // state we need to track, so we can just return the base InlineAdvice, which
184 // will do nothing interesting.
185 // Same thing if this is a recursive case.
Mircea Trofin5fe10262020-11-16 14:01:53 -0800186 if (MandatoryKind == MandatoryInlineAdvisor::MandatoryInliningKind::Never ||
Mircea Trofinbdceefe2020-06-09 14:50:50 -0700187 &Caller == &Callee)
188 return std::make_unique<InlineAdvice>(this, CB, ORE, false);
189
Mircea Trofin5fe10262020-11-16 14:01:53 -0800190 bool Mandatory =
191 MandatoryKind == MandatoryInlineAdvisor::MandatoryInliningKind::Always;
Mircea Trofinbdceefe2020-06-09 14:50:50 -0700192
193 // If we need to stop, we won't want to track anymore any state changes, so
194 // we just return the base InlineAdvice, which acts as a noop.
195 if (ForceStop) {
196 ORE.emit([&] {
197 return OptimizationRemarkMissed(DEBUG_TYPE, "ForceStop", &CB)
198 << "Won't attempt inlining because module size grew too much.";
199 });
200 return std::make_unique<InlineAdvice>(this, CB, ORE, Mandatory);
201 }
202
203 int CostEstimate = 0;
204 if (!Mandatory) {
205 auto IsCallSiteInlinable =
206 llvm::getInliningCostEstimate(CB, TIR, GetAssumptionCache);
207 if (!IsCallSiteInlinable) {
208 // We can't inline this for correctness reasons, so return the base
209 // InlineAdvice, as we don't care about tracking any state changes (which
210 // won't happen).
211 return std::make_unique<InlineAdvice>(this, CB, ORE, false);
212 }
213 CostEstimate = *IsCallSiteInlinable;
214 }
215
216 if (Mandatory)
217 return getMandatoryAdvice(CB, ORE);
218
219 auto NrCtantParams = 0;
220 for (auto I = CB.arg_begin(), E = CB.arg_end(); I != E; ++I) {
221 NrCtantParams += (isa<Constant>(*I));
222 }
223
Tarindu Jayatilaka418121c32020-07-22 09:52:53 -0700224 auto &CallerBefore = FAM.getResult<FunctionPropertiesAnalysis>(Caller);
225 auto &CalleeBefore = FAM.getResult<FunctionPropertiesAnalysis>(Callee);
Mircea Trofinbdceefe2020-06-09 14:50:50 -0700226
227 ModelRunner->setFeature(FeatureIndex::CalleeBasicBlockCount,
228 CalleeBefore.BasicBlockCount);
229 ModelRunner->setFeature(FeatureIndex::CallSiteHeight,
230 FunctionLevels[&Caller]);
231 ModelRunner->setFeature(FeatureIndex::NodeCount, NodeCount);
232 ModelRunner->setFeature(FeatureIndex::NrCtantParams, NrCtantParams);
233 ModelRunner->setFeature(FeatureIndex::CostEstimate, CostEstimate);
234 ModelRunner->setFeature(FeatureIndex::EdgeCount, EdgeCount);
235 ModelRunner->setFeature(FeatureIndex::CallerUsers, CallerBefore.Uses);
236 ModelRunner->setFeature(FeatureIndex::CallerConditionallyExecutedBlocks,
237 CallerBefore.BlocksReachedFromConditionalInstruction);
238 ModelRunner->setFeature(FeatureIndex::CallerBasicBlockCount,
239 CallerBefore.BasicBlockCount);
240 ModelRunner->setFeature(FeatureIndex::CalleeConditionallyExecutedBlocks,
241 CalleeBefore.BlocksReachedFromConditionalInstruction);
242 ModelRunner->setFeature(FeatureIndex::CalleeUsers, CalleeBefore.Uses);
243 return getAdviceFromModel(CB, ORE);
244}
245
246std::unique_ptr<MLInlineAdvice>
247MLInlineAdvisor::getAdviceFromModel(CallBase &CB,
248 OptimizationRemarkEmitter &ORE) {
249 return std::make_unique<MLInlineAdvice>(this, CB, ORE, ModelRunner->run());
250}
251
252std::unique_ptr<MLInlineAdvice>
253MLInlineAdvisor::getMandatoryAdvice(CallBase &CB,
254 OptimizationRemarkEmitter &ORE) {
255 return std::make_unique<MLInlineAdvice>(this, CB, ORE, true);
256}
257
258void MLInlineAdvice::reportContextForRemark(
259 DiagnosticInfoOptimizationBase &OR) {
260 using namespace ore;
261 OR << NV("Callee", Callee->getName());
262 for (size_t I = 0; I < NumberOfFeatures; ++I)
263 OR << NV(FeatureNameMap[I], getAdvisor()->getModelRunner().getFeature(I));
264 OR << NV("ShouldInline", isInliningRecommended());
265}
266
267void MLInlineAdvice::recordInliningImpl() {
268 ORE.emit([&]() {
269 OptimizationRemark R(DEBUG_TYPE, "InliningSuccess", DLoc, Block);
270 reportContextForRemark(R);
271 return R;
272 });
273 getAdvisor()->onSuccessfulInlining(*this, /*CalleeWasDeleted*/ false);
274}
275
276void MLInlineAdvice::recordInliningWithCalleeDeletedImpl() {
277 ORE.emit([&]() {
278 OptimizationRemark R(DEBUG_TYPE, "InliningSuccessWithCalleeDeleted", DLoc,
279 Block);
280 reportContextForRemark(R);
281 return R;
282 });
283 getAdvisor()->onSuccessfulInlining(*this, /*CalleeWasDeleted*/ true);
284}
285
286void MLInlineAdvice::recordUnsuccessfulInliningImpl(
287 const InlineResult &Result) {
288 ORE.emit([&]() {
289 OptimizationRemarkMissed R(DEBUG_TYPE, "InliningAttemptedAndUnsuccessful",
290 DLoc, Block);
291 reportContextForRemark(R);
292 return R;
293 });
294}
295void MLInlineAdvice::recordUnattemptedInliningImpl() {
296 ORE.emit([&]() {
297 OptimizationRemarkMissed R(DEBUG_TYPE, "IniningNotAttempted", DLoc, Block);
298 reportContextForRemark(R);
299 return R;
300 });
Nico Weber4fe912f2020-07-21 11:44:47 -0400301}
302#endif // defined(LLVM_HAVE_TF_AOT) || defined(LLVM_HAVE_TF_API)