blob: b325afb8e7c53b6ce9101a78443012f5b3e68bf9 [file] [log] [blame]
Chandler Carruth572e3402014-04-21 11:12:00 +00001//===- CGSCCPassManager.cpp - Managing & running CGSCC passes -------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
10#include "llvm/Analysis/CGSCCPassManager.h"
Eugene Zelenkofa6434b2017-08-31 21:56:16 +000011#include "llvm/ADT/ArrayRef.h"
12#include "llvm/ADT/Optional.h"
13#include "llvm/ADT/STLExtras.h"
14#include "llvm/ADT/SetVector.h"
15#include "llvm/ADT/SmallPtrSet.h"
16#include "llvm/ADT/SmallVector.h"
17#include "llvm/ADT/iterator_range.h"
18#include "llvm/Analysis/LazyCallGraph.h"
Chandler Carruth88823462016-08-24 09:37:14 +000019#include "llvm/IR/CallSite.h"
Eugene Zelenkofa6434b2017-08-31 21:56:16 +000020#include "llvm/IR/Constant.h"
Chandler Carruth89772232016-12-06 10:06:06 +000021#include "llvm/IR/InstIterator.h"
Eugene Zelenkofa6434b2017-08-31 21:56:16 +000022#include "llvm/IR/Instruction.h"
23#include "llvm/IR/PassManager.h"
24#include "llvm/Support/Casting.h"
25#include "llvm/Support/Debug.h"
26#include "llvm/Support/raw_ostream.h"
27#include <algorithm>
28#include <cassert>
29#include <iterator>
Chandler Carruth572e3402014-04-21 11:12:00 +000030
Chandler Carruth19913b22017-08-11 05:47:13 +000031#define DEBUG_TYPE "cgscc"
32
Chandler Carruth572e3402014-04-21 11:12:00 +000033using namespace llvm;
34
Vedant Kumard3196742018-02-28 19:08:52 +000035// Explicit template instantiations and specialization definitions for core
Chandler Carruth6b981642016-12-10 06:34:44 +000036// template typedefs.
Chandler Carruth2a540942016-02-27 10:38:10 +000037namespace llvm {
Chandler Carruth88823462016-08-24 09:37:14 +000038
39// Explicit instantiations for the core proxy templates.
Chandler Carruth3ab2a5a2016-11-28 22:04:31 +000040template class AllAnalysesOn<LazyCallGraph::SCC>;
Chandler Carruth88823462016-08-24 09:37:14 +000041template class AnalysisManager<LazyCallGraph::SCC, LazyCallGraph &>;
42template class PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager,
43 LazyCallGraph &, CGSCCUpdateResult &>;
Chandler Carruth2a540942016-02-27 10:38:10 +000044template class InnerAnalysisManagerProxy<CGSCCAnalysisManager, Module>;
45template class OuterAnalysisManagerProxy<ModuleAnalysisManager,
Chandler Carruth346542b2017-02-07 01:50:48 +000046 LazyCallGraph::SCC, LazyCallGraph &>;
Chandler Carruth2a540942016-02-27 10:38:10 +000047template class OuterAnalysisManagerProxy<CGSCCAnalysisManager, Function>;
Chandler Carruth88823462016-08-24 09:37:14 +000048
49/// Explicitly specialize the pass manager run method to handle call graph
50/// updates.
51template <>
52PreservedAnalyses
53PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager, LazyCallGraph &,
54 CGSCCUpdateResult &>::run(LazyCallGraph::SCC &InitialC,
55 CGSCCAnalysisManager &AM,
56 LazyCallGraph &G, CGSCCUpdateResult &UR) {
57 PreservedAnalyses PA = PreservedAnalyses::all();
58
59 if (DebugLogging)
60 dbgs() << "Starting CGSCC pass manager run.\n";
61
62 // The SCC may be refined while we are running passes over it, so set up
63 // a pointer that we can update.
64 LazyCallGraph::SCC *C = &InitialC;
65
66 for (auto &Pass : Passes) {
67 if (DebugLogging)
68 dbgs() << "Running pass: " << Pass->name() << " on " << *C << "\n";
69
70 PreservedAnalyses PassPA = Pass->run(*C, AM, G, UR);
71
72 // Update the SCC if necessary.
73 C = UR.UpdatedC ? UR.UpdatedC : C;
74
Chandler Carruth7376ae82017-09-14 08:33:57 +000075 // If the CGSCC pass wasn't able to provide a valid updated SCC, the
76 // current SCC may simply need to be skipped if invalid.
77 if (UR.InvalidatedSCCs.count(C)) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +000078 LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n");
Chandler Carruth7376ae82017-09-14 08:33:57 +000079 break;
80 }
Chandler Carruth88823462016-08-24 09:37:14 +000081 // Check that we didn't miss any update scenario.
Chandler Carruth88823462016-08-24 09:37:14 +000082 assert(C->begin() != C->end() && "Cannot have an empty SCC!");
83
84 // Update the analysis manager as each pass runs and potentially
Chandler Carruth0c6efff12016-11-28 10:42:21 +000085 // invalidates analyses.
86 AM.invalidate(*C, PassPA);
Chandler Carruth88823462016-08-24 09:37:14 +000087
88 // Finally, we intersect the final preserved analyses to compute the
89 // aggregate preserved set for this pass manager.
90 PA.intersect(std::move(PassPA));
91
92 // FIXME: Historically, the pass managers all called the LLVM context's
93 // yield function here. We don't have a generic way to acquire the
94 // context and it isn't yet clear what the right pattern is for yielding
95 // in the new pass manager so it is currently omitted.
96 // ...getContext().yield();
97 }
98
Vedant Kumard3196742018-02-28 19:08:52 +000099 // Invalidation was handled after each pass in the above loop for the current
Chandler Carruth0c6efff12016-11-28 10:42:21 +0000100 // SCC. Therefore, the remaining analysis results in the AnalysisManager are
101 // preserved. We mark this with a set so that we don't need to inspect each
102 // one individually.
Chandler Carruthba90ae92016-12-27 08:40:39 +0000103 PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>();
Chandler Carruth0c6efff12016-11-28 10:42:21 +0000104
Chandler Carruth88823462016-08-24 09:37:14 +0000105 if (DebugLogging)
106 dbgs() << "Finished CGSCC pass manager run.\n";
107
108 return PA;
109}
110
Chandler Carruth6b981642016-12-10 06:34:44 +0000111bool CGSCCAnalysisManagerModuleProxy::Result::invalidate(
112 Module &M, const PreservedAnalyses &PA,
113 ModuleAnalysisManager::Invalidator &Inv) {
Chandler Carruthba90ae92016-12-27 08:40:39 +0000114 // If literally everything is preserved, we're done.
115 if (PA.areAllPreserved())
116 return false; // This is still a valid proxy.
117
Chandler Carruth6b981642016-12-10 06:34:44 +0000118 // If this proxy or the call graph is going to be invalidated, we also need
119 // to clear all the keys coming from that analysis.
120 //
121 // We also directly invalidate the FAM's module proxy if necessary, and if
122 // that proxy isn't preserved we can't preserve this proxy either. We rely on
123 // it to handle module -> function analysis invalidation in the face of
124 // structural changes and so if it's unavailable we conservatively clear the
Chandler Carruthba90ae92016-12-27 08:40:39 +0000125 // entire SCC layer as well rather than trying to do invalidation ourselves.
126 auto PAC = PA.getChecker<CGSCCAnalysisManagerModuleProxy>();
127 if (!(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Module>>()) ||
Chandler Carruth6b981642016-12-10 06:34:44 +0000128 Inv.invalidate<LazyCallGraphAnalysis>(M, PA) ||
129 Inv.invalidate<FunctionAnalysisManagerModuleProxy>(M, PA)) {
130 InnerAM->clear();
131
132 // And the proxy itself should be marked as invalid so that we can observe
133 // the new call graph. This isn't strictly necessary because we cheat
134 // above, but is still useful.
135 return true;
136 }
137
Chandler Carruthba90ae92016-12-27 08:40:39 +0000138 // Directly check if the relevant set is preserved so we can short circuit
139 // invalidating SCCs below.
140 bool AreSCCAnalysesPreserved =
141 PA.allAnalysesInSetPreserved<AllAnalysesOn<LazyCallGraph::SCC>>();
142
Chandler Carruth6b981642016-12-10 06:34:44 +0000143 // Ok, we have a graph, so we can propagate the invalidation down into it.
Chandler Carruth2e0fe3e2017-02-06 19:38:06 +0000144 G->buildRefSCCs();
Chandler Carruth6b981642016-12-10 06:34:44 +0000145 for (auto &RC : G->postorder_ref_sccs())
Chandler Carruthba90ae92016-12-27 08:40:39 +0000146 for (auto &C : RC) {
147 Optional<PreservedAnalyses> InnerPA;
148
149 // Check to see whether the preserved set needs to be adjusted based on
150 // module-level analysis invalidation triggering deferred invalidation
151 // for this SCC.
152 if (auto *OuterProxy =
153 InnerAM->getCachedResult<ModuleAnalysisManagerCGSCCProxy>(C))
154 for (const auto &OuterInvalidationPair :
155 OuterProxy->getOuterInvalidations()) {
156 AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
157 const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
158 if (Inv.invalidate(OuterAnalysisID, M, PA)) {
159 if (!InnerPA)
160 InnerPA = PA;
161 for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
162 InnerPA->abandon(InnerAnalysisID);
163 }
164 }
165
166 // Check if we needed a custom PA set. If so we'll need to run the inner
167 // invalidation.
168 if (InnerPA) {
169 InnerAM->invalidate(C, *InnerPA);
170 continue;
171 }
172
173 // Otherwise we only need to do invalidation if the original PA set didn't
174 // preserve all SCC analyses.
175 if (!AreSCCAnalysesPreserved)
176 InnerAM->invalidate(C, PA);
177 }
Chandler Carruth6b981642016-12-10 06:34:44 +0000178
179 // Return false to indicate that this result is still a valid proxy.
180 return false;
181}
182
183template <>
184CGSCCAnalysisManagerModuleProxy::Result
185CGSCCAnalysisManagerModuleProxy::run(Module &M, ModuleAnalysisManager &AM) {
186 // Force the Function analysis manager to also be available so that it can
187 // be accessed in an SCC analysis and proxied onward to function passes.
188 // FIXME: It is pretty awkward to just drop the result here and assert that
189 // we can find it again later.
190 (void)AM.getResult<FunctionAnalysisManagerModuleProxy>(M);
191
192 return Result(*InnerAM, AM.getResult<LazyCallGraphAnalysis>(M));
193}
194
195AnalysisKey FunctionAnalysisManagerCGSCCProxy::Key;
196
197FunctionAnalysisManagerCGSCCProxy::Result
198FunctionAnalysisManagerCGSCCProxy::run(LazyCallGraph::SCC &C,
199 CGSCCAnalysisManager &AM,
200 LazyCallGraph &CG) {
201 // Collect the FunctionAnalysisManager from the Module layer and use that to
202 // build the proxy result.
203 //
204 // This allows us to rely on the FunctionAnalysisMangaerModuleProxy to
205 // invalidate the function analyses.
206 auto &MAM = AM.getResult<ModuleAnalysisManagerCGSCCProxy>(C, CG).getManager();
207 Module &M = *C.begin()->getFunction().getParent();
208 auto *FAMProxy = MAM.getCachedResult<FunctionAnalysisManagerModuleProxy>(M);
209 assert(FAMProxy && "The CGSCC pass manager requires that the FAM module "
210 "proxy is run on the module prior to entering the CGSCC "
211 "walk.");
212
213 // Note that we special-case invalidation handling of this proxy in the CGSCC
214 // analysis manager's Module proxy. This avoids the need to do anything
215 // special here to recompute all of this if ever the FAM's module proxy goes
216 // away.
217 return Result(FAMProxy->getManager());
218}
219
220bool FunctionAnalysisManagerCGSCCProxy::Result::invalidate(
221 LazyCallGraph::SCC &C, const PreservedAnalyses &PA,
222 CGSCCAnalysisManager::Invalidator &Inv) {
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000223 // If literally everything is preserved, we're done.
224 if (PA.areAllPreserved())
225 return false; // This is still a valid proxy.
Chandler Carruth6b981642016-12-10 06:34:44 +0000226
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000227 // If this proxy isn't marked as preserved, then even if the result remains
228 // valid, the key itself may no longer be valid, so we clear everything.
229 //
230 // Note that in order to preserve this proxy, a module pass must ensure that
231 // the FAM has been completely updated to handle the deletion of functions.
232 // Specifically, any FAM-cached results for those functions need to have been
233 // forcibly cleared. When preserved, this proxy will only invalidate results
234 // cached on functions *still in the module* at the end of the module pass.
235 auto PAC = PA.getChecker<FunctionAnalysisManagerCGSCCProxy>();
236 if (!PAC.preserved() && !PAC.preservedSet<AllAnalysesOn<LazyCallGraph::SCC>>()) {
237 for (LazyCallGraph::Node &N : C)
Sanjoy Dasdef17292017-09-28 02:45:42 +0000238 FAM->clear(N.getFunction(), N.getFunction().getName());
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000239
240 return true;
241 }
242
243 // Directly check if the relevant set is preserved.
244 bool AreFunctionAnalysesPreserved =
245 PA.allAnalysesInSetPreserved<AllAnalysesOn<Function>>();
246
247 // Now walk all the functions to see if any inner analysis invalidation is
248 // necessary.
249 for (LazyCallGraph::Node &N : C) {
250 Function &F = N.getFunction();
251 Optional<PreservedAnalyses> FunctionPA;
252
253 // Check to see whether the preserved set needs to be pruned based on
254 // SCC-level analysis invalidation that triggers deferred invalidation
255 // registered with the outer analysis manager proxy for this function.
256 if (auto *OuterProxy =
257 FAM->getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F))
258 for (const auto &OuterInvalidationPair :
259 OuterProxy->getOuterInvalidations()) {
260 AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
261 const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
262 if (Inv.invalidate(OuterAnalysisID, C, PA)) {
263 if (!FunctionPA)
264 FunctionPA = PA;
265 for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
266 FunctionPA->abandon(InnerAnalysisID);
267 }
268 }
269
270 // Check if we needed a custom PA set, and if so we'll need to run the
271 // inner invalidation.
272 if (FunctionPA) {
273 FAM->invalidate(F, *FunctionPA);
274 continue;
275 }
276
277 // Otherwise we only need to do invalidation if the original PA set didn't
278 // preserve all function analyses.
279 if (!AreFunctionAnalysesPreserved)
280 FAM->invalidate(F, PA);
281 }
282
283 // Return false to indicate that this result is still a valid proxy.
Chandler Carruth6b981642016-12-10 06:34:44 +0000284 return false;
285}
286
Eugene Zelenkofa6434b2017-08-31 21:56:16 +0000287} // end namespace llvm
Chandler Carruth88823462016-08-24 09:37:14 +0000288
Chandler Carruth7c8964d2017-07-09 13:16:55 +0000289/// When a new SCC is created for the graph and there might be function
290/// analysis results cached for the functions now in that SCC two forms of
291/// updates are required.
292///
293/// First, a proxy from the SCC to the FunctionAnalysisManager needs to be
294/// created so that any subsequent invalidation events to the SCC are
295/// propagated to the function analysis results cached for functions within it.
296///
297/// Second, if any of the functions within the SCC have analysis results with
298/// outer analysis dependencies, then those dependencies would point to the
299/// *wrong* SCC's analysis result. We forcibly invalidate the necessary
300/// function analyses so that they don't retain stale handles.
301static void updateNewSCCFunctionAnalyses(LazyCallGraph::SCC &C,
302 LazyCallGraph &G,
303 CGSCCAnalysisManager &AM) {
304 // Get the relevant function analysis manager.
305 auto &FAM =
306 AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, G).getManager();
307
308 // Now walk the functions in this SCC and invalidate any function analysis
309 // results that might have outer dependencies on an SCC analysis.
310 for (LazyCallGraph::Node &N : C) {
311 Function &F = N.getFunction();
312
313 auto *OuterProxy =
314 FAM.getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F);
315 if (!OuterProxy)
316 // No outer analyses were queried, nothing to do.
317 continue;
318
319 // Forcibly abandon all the inner analyses with dependencies, but
320 // invalidate nothing else.
321 auto PA = PreservedAnalyses::all();
322 for (const auto &OuterInvalidationPair :
323 OuterProxy->getOuterInvalidations()) {
324 const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
325 for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
326 PA.abandon(InnerAnalysisID);
327 }
328
329 // Now invalidate anything we found.
330 FAM.invalidate(F, PA);
331 }
332}
333
Chandler Carruth88823462016-08-24 09:37:14 +0000334/// Helper function to update both the \c CGSCCAnalysisManager \p AM and the \c
335/// CGSCCPassManager's \c CGSCCUpdateResult \p UR based on a range of newly
336/// added SCCs.
337///
338/// The range of new SCCs must be in postorder already. The SCC they were split
339/// out of must be provided as \p C. The current node being mutated and
340/// triggering updates must be passed as \p N.
341///
342/// This function returns the SCC containing \p N. This will be either \p C if
343/// no new SCCs have been split out, or it will be the new SCC containing \p N.
344template <typename SCCRangeT>
Eugene Zelenkofa6434b2017-08-31 21:56:16 +0000345static LazyCallGraph::SCC *
Chandler Carruth88823462016-08-24 09:37:14 +0000346incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G,
347 LazyCallGraph::Node &N, LazyCallGraph::SCC *C,
Chandler Carruth19913b22017-08-11 05:47:13 +0000348 CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
Eugene Zelenkofa6434b2017-08-31 21:56:16 +0000349 using SCC = LazyCallGraph::SCC;
Chandler Carruth88823462016-08-24 09:37:14 +0000350
351 if (NewSCCRange.begin() == NewSCCRange.end())
352 return C;
353
Chandler Carruth443e57e2016-12-28 10:34:50 +0000354 // Add the current SCC to the worklist as its shape has changed.
Chandler Carruth88823462016-08-24 09:37:14 +0000355 UR.CWorklist.insert(C);
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000356 LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist:" << *C
357 << "\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000358
359 SCC *OldC = C;
Chandler Carruth88823462016-08-24 09:37:14 +0000360
361 // Update the current SCC. Note that if we have new SCCs, this must actually
362 // change the SCC.
363 assert(C != &*NewSCCRange.begin() &&
364 "Cannot insert new SCCs without changing current SCC!");
365 C = &*NewSCCRange.begin();
366 assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
367
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000368 // If we had a cached FAM proxy originally, we will want to create more of
369 // them for each SCC that was split off.
370 bool NeedFAMProxy =
371 AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(*OldC) != nullptr;
372
373 // We need to propagate an invalidation call to all but the newly current SCC
374 // because the outer pass manager won't do that for us after splitting them.
375 // FIXME: We should accept a PreservedAnalysis from the CG updater so that if
Vedant Kumard3196742018-02-28 19:08:52 +0000376 // there are preserved analysis we can avoid invalidating them here for
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000377 // split-off SCCs.
378 // We know however that this will preserve any FAM proxy so go ahead and mark
379 // that.
380 PreservedAnalyses PA;
381 PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
382 AM.invalidate(*OldC, PA);
383
Chandler Carruth7c8964d2017-07-09 13:16:55 +0000384 // Ensure the now-current SCC's function analyses are updated.
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000385 if (NeedFAMProxy)
Chandler Carruth7c8964d2017-07-09 13:16:55 +0000386 updateNewSCCFunctionAnalyses(*C, G, AM);
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000387
Eugene Zelenkofa6434b2017-08-31 21:56:16 +0000388 for (SCC &NewC : llvm::reverse(make_range(std::next(NewSCCRange.begin()),
389 NewSCCRange.end()))) {
Chandler Carruth88823462016-08-24 09:37:14 +0000390 assert(C != &NewC && "No need to re-visit the current SCC!");
391 assert(OldC != &NewC && "Already handled the original SCC!");
392 UR.CWorklist.insert(&NewC);
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000393 LLVM_DEBUG(dbgs() << "Enqueuing a newly formed SCC:" << NewC << "\n");
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000394
Chandler Carruth7c8964d2017-07-09 13:16:55 +0000395 // Ensure new SCCs' function analyses are updated.
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000396 if (NeedFAMProxy)
Chandler Carruth051bdb02017-07-12 09:08:11 +0000397 updateNewSCCFunctionAnalyses(NewC, G, AM);
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000398
Chandler Carruth7c8964d2017-07-09 13:16:55 +0000399 // Also propagate a normal invalidation to the new SCC as only the current
400 // will get one from the pass manager infrastructure.
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000401 AM.invalidate(NewC, PA);
Chandler Carruth88823462016-08-24 09:37:14 +0000402 }
403 return C;
404}
Chandler Carruth88823462016-08-24 09:37:14 +0000405
406LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForFunctionPass(
407 LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
Chandler Carruth19913b22017-08-11 05:47:13 +0000408 CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
Eugene Zelenkofa6434b2017-08-31 21:56:16 +0000409 using Node = LazyCallGraph::Node;
410 using Edge = LazyCallGraph::Edge;
411 using SCC = LazyCallGraph::SCC;
412 using RefSCC = LazyCallGraph::RefSCC;
Chandler Carruth88823462016-08-24 09:37:14 +0000413
414 RefSCC &InitialRC = InitialC.getOuterRefSCC();
415 SCC *C = &InitialC;
416 RefSCC *RC = &InitialRC;
417 Function &F = N.getFunction();
418
419 // Walk the function body and build up the set of retained, promoted, and
420 // demoted edges.
421 SmallVector<Constant *, 16> Worklist;
422 SmallPtrSet<Constant *, 16> Visited;
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000423 SmallPtrSet<Node *, 16> RetainedEdges;
424 SmallSetVector<Node *, 4> PromotedRefTargets;
425 SmallSetVector<Node *, 4> DemotedCallTargets;
Chandler Carruth89772232016-12-06 10:06:06 +0000426
Chandler Carruth88823462016-08-24 09:37:14 +0000427 // First walk the function and handle all called functions. We do this first
428 // because if there is a single call edge, whether there are ref edges is
429 // irrelevant.
Chandler Carruth89772232016-12-06 10:06:06 +0000430 for (Instruction &I : instructions(F))
431 if (auto CS = CallSite(&I))
432 if (Function *Callee = CS.getCalledFunction())
433 if (Visited.insert(Callee).second && !Callee->isDeclaration()) {
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000434 Node &CalleeN = *G.lookup(*Callee);
435 Edge *E = N->lookup(CalleeN);
Chandler Carruth89772232016-12-06 10:06:06 +0000436 // FIXME: We should really handle adding new calls. While it will
437 // make downstream usage more complex, there is no fundamental
438 // limitation and it will allow passes within the CGSCC to be a bit
439 // more flexible in what transforms they can do. Until then, we
440 // verify that new calls haven't been introduced.
441 assert(E && "No function transformations should introduce *new* "
442 "call edges! Any new calls should be modeled as "
443 "promoted existing ref edges!");
Chandler Carruth6e35c312017-08-08 10:13:23 +0000444 bool Inserted = RetainedEdges.insert(&CalleeN).second;
445 (void)Inserted;
446 assert(Inserted && "We should never visit a function twice.");
Chandler Carruth89772232016-12-06 10:06:06 +0000447 if (!E->isCall())
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000448 PromotedRefTargets.insert(&CalleeN);
Chandler Carruth89772232016-12-06 10:06:06 +0000449 }
Chandler Carruth88823462016-08-24 09:37:14 +0000450
451 // Now walk all references.
Chandler Carruth89772232016-12-06 10:06:06 +0000452 for (Instruction &I : instructions(F))
453 for (Value *Op : I.operand_values())
Eugene Zelenkofa6434b2017-08-31 21:56:16 +0000454 if (auto *C = dyn_cast<Constant>(Op))
Chandler Carruth89772232016-12-06 10:06:06 +0000455 if (Visited.insert(C).second)
456 Worklist.push_back(C);
Chandler Carruth88823462016-08-24 09:37:14 +0000457
Chandler Carruthf59a8382017-07-15 08:08:19 +0000458 auto VisitRef = [&](Function &Referee) {
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000459 Node &RefereeN = *G.lookup(Referee);
460 Edge *E = N->lookup(RefereeN);
Chandler Carruth89772232016-12-06 10:06:06 +0000461 // FIXME: Similarly to new calls, we also currently preclude
462 // introducing new references. See above for details.
463 assert(E && "No function transformations should introduce *new* ref "
464 "edges! Any new ref edges would require IPO which "
465 "function passes aren't allowed to do!");
Chandler Carruth6e35c312017-08-08 10:13:23 +0000466 bool Inserted = RetainedEdges.insert(&RefereeN).second;
467 (void)Inserted;
468 assert(Inserted && "We should never visit a function twice.");
Chandler Carruth89772232016-12-06 10:06:06 +0000469 if (E->isCall())
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000470 DemotedCallTargets.insert(&RefereeN);
Chandler Carruthf59a8382017-07-15 08:08:19 +0000471 };
472 LazyCallGraph::visitReferences(Worklist, Visited, VisitRef);
473
474 // Include synthetic reference edges to known, defined lib functions.
475 for (auto *F : G.getLibFunctions())
Chandler Carruth6e35c312017-08-08 10:13:23 +0000476 // While the list of lib functions doesn't have repeats, don't re-visit
477 // anything handled above.
478 if (!Visited.count(F))
479 VisitRef(*F);
Chandler Carruth88823462016-08-24 09:37:14 +0000480
481 // First remove all of the edges that are no longer present in this function.
Chandler Carruth23c2f442017-08-09 09:05:27 +0000482 // The first step makes these edges uniformly ref edges and accumulates them
483 // into a separate data structure so removal doesn't invalidate anything.
484 SmallVector<Node *, 4> DeadTargets;
485 for (Edge &E : *N) {
486 if (RetainedEdges.count(&E.getNode()))
Chandler Carruth88823462016-08-24 09:37:14 +0000487 continue;
Chandler Carruth88823462016-08-24 09:37:14 +0000488
Chandler Carruth23c2f442017-08-09 09:05:27 +0000489 SCC &TargetC = *G.lookupSCC(E.getNode());
490 RefSCC &TargetRC = TargetC.getOuterRefSCC();
491 if (&TargetRC == RC && E.isCall()) {
Chandler Carruth443e57e2016-12-28 10:34:50 +0000492 if (C != &TargetC) {
493 // For separate SCCs this is trivial.
Chandler Carruth23c2f442017-08-09 09:05:27 +0000494 RC->switchTrivialInternalEdgeToRef(N, E.getNode());
Chandler Carruth443e57e2016-12-28 10:34:50 +0000495 } else {
Chandler Carruth443e57e2016-12-28 10:34:50 +0000496 // Now update the call graph.
Chandler Carruth23c2f442017-08-09 09:05:27 +0000497 C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, E.getNode()),
Chandler Carruth19913b22017-08-11 05:47:13 +0000498 G, N, C, AM, UR);
Chandler Carruth443e57e2016-12-28 10:34:50 +0000499 }
500 }
Chandler Carruth88823462016-08-24 09:37:14 +0000501
Chandler Carruth23c2f442017-08-09 09:05:27 +0000502 // Now that this is ready for actual removal, put it into our list.
503 DeadTargets.push_back(&E.getNode());
504 }
505 // Remove the easy cases quickly and actually pull them out of our list.
506 DeadTargets.erase(
507 llvm::remove_if(DeadTargets,
508 [&](Node *TargetN) {
509 SCC &TargetC = *G.lookupSCC(*TargetN);
510 RefSCC &TargetRC = TargetC.getOuterRefSCC();
Chandler Carruth88823462016-08-24 09:37:14 +0000511
Chandler Carruth23c2f442017-08-09 09:05:27 +0000512 // We can't trivially remove internal targets, so skip
513 // those.
514 if (&TargetRC == RC)
515 return false;
516
517 RC->removeOutgoingEdge(N, *TargetN);
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000518 LLVM_DEBUG(dbgs() << "Deleting outgoing edge from '"
519 << N << "' to '" << TargetN << "'\n");
Chandler Carruth23c2f442017-08-09 09:05:27 +0000520 return true;
521 }),
522 DeadTargets.end());
523
524 // Now do a batch removal of the internal ref edges left.
525 auto NewRefSCCs = RC->removeInternalRefEdge(N, DeadTargets);
526 if (!NewRefSCCs.empty()) {
527 // The old RefSCC is dead, mark it as such.
528 UR.InvalidatedRefSCCs.insert(RC);
529
530 // Note that we don't bother to invalidate analyses as ref-edge
531 // connectivity is not really observable in any way and is intended
532 // exclusively to be used for ordering of transforms rather than for
533 // analysis conclusions.
534
535 // Update RC to the "bottom".
536 assert(G.lookupSCC(N) == C && "Changed the SCC when splitting RefSCCs!");
537 RC = &C->getOuterRefSCC();
538 assert(G.lookupRefSCC(N) == RC && "Failed to update current RefSCC!");
539
540 // The RC worklist is in reverse postorder, so we enqueue the new ones in
541 // RPO except for the one which contains the source node as that is the
542 // "bottom" we will continue processing in the bottom-up walk.
543 assert(NewRefSCCs.front() == RC &&
544 "New current RefSCC not first in the returned list!");
Eugene Zelenkofa6434b2017-08-31 21:56:16 +0000545 for (RefSCC *NewRC : llvm::reverse(make_range(std::next(NewRefSCCs.begin()),
546 NewRefSCCs.end()))) {
Chandler Carruth23c2f442017-08-09 09:05:27 +0000547 assert(NewRC != RC && "Should not encounter the current RefSCC further "
548 "in the postorder list of new RefSCCs.");
549 UR.RCWorklist.insert(NewRC);
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000550 LLVM_DEBUG(dbgs() << "Enqueuing a new RefSCC in the update worklist: "
551 << *NewRC << "\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000552 }
553 }
554
555 // Next demote all the call edges that are now ref edges. This helps make
556 // the SCCs small which should minimize the work below as we don't want to
557 // form cycles that this would break.
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000558 for (Node *RefTarget : DemotedCallTargets) {
559 SCC &TargetC = *G.lookupSCC(*RefTarget);
Chandler Carruth88823462016-08-24 09:37:14 +0000560 RefSCC &TargetRC = TargetC.getOuterRefSCC();
561
562 // The easy case is when the target RefSCC is not this RefSCC. This is
563 // only supported when the target RefSCC is a child of this RefSCC.
564 if (&TargetRC != RC) {
565 assert(RC->isAncestorOf(TargetRC) &&
566 "Cannot potentially form RefSCC cycles here!");
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000567 RC->switchOutgoingEdgeToRef(N, *RefTarget);
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000568 LLVM_DEBUG(dbgs() << "Switch outgoing call edge to a ref edge from '" << N
569 << "' to '" << *RefTarget << "'\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000570 continue;
571 }
572
Chandler Carruth443e57e2016-12-28 10:34:50 +0000573 // We are switching an internal call edge to a ref edge. This may split up
574 // some SCCs.
575 if (C != &TargetC) {
576 // For separate SCCs this is trivial.
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000577 RC->switchTrivialInternalEdgeToRef(N, *RefTarget);
Chandler Carruth443e57e2016-12-28 10:34:50 +0000578 continue;
579 }
580
Chandler Carruth443e57e2016-12-28 10:34:50 +0000581 // Now update the call graph.
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000582 C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, *RefTarget), G, N,
Chandler Carruth19913b22017-08-11 05:47:13 +0000583 C, AM, UR);
Chandler Carruth88823462016-08-24 09:37:14 +0000584 }
585
586 // Now promote ref edges into call edges.
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000587 for (Node *CallTarget : PromotedRefTargets) {
588 SCC &TargetC = *G.lookupSCC(*CallTarget);
Chandler Carruth88823462016-08-24 09:37:14 +0000589 RefSCC &TargetRC = TargetC.getOuterRefSCC();
590
591 // The easy case is when the target RefSCC is not this RefSCC. This is
592 // only supported when the target RefSCC is a child of this RefSCC.
593 if (&TargetRC != RC) {
594 assert(RC->isAncestorOf(TargetRC) &&
595 "Cannot potentially form RefSCC cycles here!");
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000596 RC->switchOutgoingEdgeToCall(N, *CallTarget);
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000597 LLVM_DEBUG(dbgs() << "Switch outgoing ref edge to a call edge from '" << N
598 << "' to '" << *CallTarget << "'\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000599 continue;
600 }
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000601 LLVM_DEBUG(dbgs() << "Switch an internal ref edge to a call edge from '"
602 << N << "' to '" << *CallTarget << "'\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000603
604 // Otherwise we are switching an internal ref edge to a call edge. This
605 // may merge away some SCCs, and we add those to the UpdateResult. We also
606 // need to make sure to update the worklist in the event SCCs have moved
Chandler Carruthc213c672017-07-09 13:45:11 +0000607 // before the current one in the post-order sequence
608 bool HasFunctionAnalysisProxy = false;
Chandler Carruth88823462016-08-24 09:37:14 +0000609 auto InitialSCCIndex = RC->find(*C) - RC->begin();
Chandler Carruthc213c672017-07-09 13:45:11 +0000610 bool FormedCycle = RC->switchInternalEdgeToCall(
611 N, *CallTarget, [&](ArrayRef<SCC *> MergedSCCs) {
612 for (SCC *MergedC : MergedSCCs) {
613 assert(MergedC != &TargetC && "Cannot merge away the target SCC!");
614
615 HasFunctionAnalysisProxy |=
616 AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(
617 *MergedC) != nullptr;
618
619 // Mark that this SCC will no longer be valid.
620 UR.InvalidatedSCCs.insert(MergedC);
621
622 // FIXME: We should really do a 'clear' here to forcibly release
623 // memory, but we don't have a good way of doing that and
624 // preserving the function analyses.
625 auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
626 PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
627 AM.invalidate(*MergedC, PA);
628 }
629 });
630
631 // If we formed a cycle by creating this call, we need to update more data
632 // structures.
633 if (FormedCycle) {
Chandler Carruth88823462016-08-24 09:37:14 +0000634 C = &TargetC;
635 assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
636
Chandler Carruthc213c672017-07-09 13:45:11 +0000637 // If one of the invalidated SCCs had a cached proxy to a function
638 // analysis manager, we need to create a proxy in the new current SCC as
Vedant Kumard3196742018-02-28 19:08:52 +0000639 // the invalidated SCCs had their functions moved.
Chandler Carruthc213c672017-07-09 13:45:11 +0000640 if (HasFunctionAnalysisProxy)
641 AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G);
642
Chandler Carruth88823462016-08-24 09:37:14 +0000643 // Any analyses cached for this SCC are no longer precise as the shape
Chandler Carruthc213c672017-07-09 13:45:11 +0000644 // has changed by introducing this cycle. However, we have taken care to
645 // update the proxies so it remains valide.
646 auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
647 PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
648 AM.invalidate(*C, PA);
Chandler Carruth88823462016-08-24 09:37:14 +0000649 }
650 auto NewSCCIndex = RC->find(*C) - RC->begin();
Chandler Carruth3c6a8202017-08-01 06:40:11 +0000651 // If we have actually moved an SCC to be topologically "below" the current
652 // one due to merging, we will need to revisit the current SCC after
653 // visiting those moved SCCs.
654 //
655 // It is critical that we *do not* revisit the current SCC unless we
656 // actually move SCCs in the process of merging because otherwise we may
657 // form a cycle where an SCC is split apart, merged, split, merged and so
658 // on infinitely.
Chandler Carruth88823462016-08-24 09:37:14 +0000659 if (InitialSCCIndex < NewSCCIndex) {
660 // Put our current SCC back onto the worklist as we'll visit other SCCs
661 // that are now definitively ordered prior to the current one in the
662 // post-order sequence, and may end up observing more precise context to
663 // optimize the current SCC.
664 UR.CWorklist.insert(C);
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000665 LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist: " << *C
666 << "\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000667 // Enqueue in reverse order as we pop off the back of the worklist.
Eugene Zelenkofa6434b2017-08-31 21:56:16 +0000668 for (SCC &MovedC : llvm::reverse(make_range(RC->begin() + InitialSCCIndex,
669 RC->begin() + NewSCCIndex))) {
Chandler Carruth88823462016-08-24 09:37:14 +0000670 UR.CWorklist.insert(&MovedC);
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000671 LLVM_DEBUG(dbgs() << "Enqueuing a newly earlier in post-order SCC: "
672 << MovedC << "\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000673 }
674 }
675 }
676
677 assert(!UR.InvalidatedSCCs.count(C) && "Invalidated the current SCC!");
678 assert(!UR.InvalidatedRefSCCs.count(RC) && "Invalidated the current RefSCC!");
679 assert(&C->getOuterRefSCC() == RC && "Current SCC not in current RefSCC!");
680
681 // Record the current RefSCC and SCC for higher layers of the CGSCC pass
682 // manager now that all the updates have been applied.
683 if (RC != &InitialRC)
684 UR.UpdatedRC = RC;
685 if (C != &InitialC)
686 UR.UpdatedC = C;
687
688 return *C;
Chandler Carruth572e3402014-04-21 11:12:00 +0000689}