blob: 46d384922af6803ec11e636ab69925175677c3cd [file] [log] [blame]
Chandler Carruth572e3402014-04-21 11:12:00 +00001//===- CGSCCPassManager.cpp - Managing & running CGSCC passes -------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
10#include "llvm/Analysis/CGSCCPassManager.h"
Chandler Carruth88823462016-08-24 09:37:14 +000011#include "llvm/IR/CallSite.h"
Chandler Carruth89772232016-12-06 10:06:06 +000012#include "llvm/IR/InstIterator.h"
Chandler Carruth572e3402014-04-21 11:12:00 +000013
Chandler Carruth19913b22017-08-11 05:47:13 +000014#define DEBUG_TYPE "cgscc"
15
Chandler Carruth572e3402014-04-21 11:12:00 +000016using namespace llvm;
17
Chandler Carruth6b981642016-12-10 06:34:44 +000018// Explicit template instantiations and specialization defininitions for core
19// template typedefs.
Chandler Carruth2a540942016-02-27 10:38:10 +000020namespace llvm {
Chandler Carruth88823462016-08-24 09:37:14 +000021
22// Explicit instantiations for the core proxy templates.
Chandler Carruth3ab2a5a2016-11-28 22:04:31 +000023template class AllAnalysesOn<LazyCallGraph::SCC>;
Chandler Carruth88823462016-08-24 09:37:14 +000024template class AnalysisManager<LazyCallGraph::SCC, LazyCallGraph &>;
25template class PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager,
26 LazyCallGraph &, CGSCCUpdateResult &>;
Chandler Carruth2a540942016-02-27 10:38:10 +000027template class InnerAnalysisManagerProxy<CGSCCAnalysisManager, Module>;
28template class OuterAnalysisManagerProxy<ModuleAnalysisManager,
Chandler Carruth346542b2017-02-07 01:50:48 +000029 LazyCallGraph::SCC, LazyCallGraph &>;
Chandler Carruth2a540942016-02-27 10:38:10 +000030template class OuterAnalysisManagerProxy<CGSCCAnalysisManager, Function>;
Chandler Carruth88823462016-08-24 09:37:14 +000031
32/// Explicitly specialize the pass manager run method to handle call graph
33/// updates.
34template <>
35PreservedAnalyses
36PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager, LazyCallGraph &,
37 CGSCCUpdateResult &>::run(LazyCallGraph::SCC &InitialC,
38 CGSCCAnalysisManager &AM,
39 LazyCallGraph &G, CGSCCUpdateResult &UR) {
40 PreservedAnalyses PA = PreservedAnalyses::all();
41
42 if (DebugLogging)
43 dbgs() << "Starting CGSCC pass manager run.\n";
44
45 // The SCC may be refined while we are running passes over it, so set up
46 // a pointer that we can update.
47 LazyCallGraph::SCC *C = &InitialC;
48
49 for (auto &Pass : Passes) {
50 if (DebugLogging)
51 dbgs() << "Running pass: " << Pass->name() << " on " << *C << "\n";
52
53 PreservedAnalyses PassPA = Pass->run(*C, AM, G, UR);
54
55 // Update the SCC if necessary.
56 C = UR.UpdatedC ? UR.UpdatedC : C;
57
58 // Check that we didn't miss any update scenario.
59 assert(!UR.InvalidatedSCCs.count(C) && "Processing an invalid SCC!");
60 assert(C->begin() != C->end() && "Cannot have an empty SCC!");
61
62 // Update the analysis manager as each pass runs and potentially
Chandler Carruth0c6efff12016-11-28 10:42:21 +000063 // invalidates analyses.
64 AM.invalidate(*C, PassPA);
Chandler Carruth88823462016-08-24 09:37:14 +000065
66 // Finally, we intersect the final preserved analyses to compute the
67 // aggregate preserved set for this pass manager.
68 PA.intersect(std::move(PassPA));
69
70 // FIXME: Historically, the pass managers all called the LLVM context's
71 // yield function here. We don't have a generic way to acquire the
72 // context and it isn't yet clear what the right pattern is for yielding
73 // in the new pass manager so it is currently omitted.
74 // ...getContext().yield();
75 }
76
Chandler Carruth0c6efff12016-11-28 10:42:21 +000077 // Invaliadtion was handled after each pass in the above loop for the current
78 // SCC. Therefore, the remaining analysis results in the AnalysisManager are
79 // preserved. We mark this with a set so that we don't need to inspect each
80 // one individually.
Chandler Carruthba90ae92016-12-27 08:40:39 +000081 PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>();
Chandler Carruth0c6efff12016-11-28 10:42:21 +000082
Chandler Carruth88823462016-08-24 09:37:14 +000083 if (DebugLogging)
84 dbgs() << "Finished CGSCC pass manager run.\n";
85
86 return PA;
87}
88
Chandler Carruth6b981642016-12-10 06:34:44 +000089bool CGSCCAnalysisManagerModuleProxy::Result::invalidate(
90 Module &M, const PreservedAnalyses &PA,
91 ModuleAnalysisManager::Invalidator &Inv) {
Chandler Carruthba90ae92016-12-27 08:40:39 +000092 // If literally everything is preserved, we're done.
93 if (PA.areAllPreserved())
94 return false; // This is still a valid proxy.
95
Chandler Carruth6b981642016-12-10 06:34:44 +000096 // If this proxy or the call graph is going to be invalidated, we also need
97 // to clear all the keys coming from that analysis.
98 //
99 // We also directly invalidate the FAM's module proxy if necessary, and if
100 // that proxy isn't preserved we can't preserve this proxy either. We rely on
101 // it to handle module -> function analysis invalidation in the face of
102 // structural changes and so if it's unavailable we conservatively clear the
Chandler Carruthba90ae92016-12-27 08:40:39 +0000103 // entire SCC layer as well rather than trying to do invalidation ourselves.
104 auto PAC = PA.getChecker<CGSCCAnalysisManagerModuleProxy>();
105 if (!(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Module>>()) ||
Chandler Carruth6b981642016-12-10 06:34:44 +0000106 Inv.invalidate<LazyCallGraphAnalysis>(M, PA) ||
107 Inv.invalidate<FunctionAnalysisManagerModuleProxy>(M, PA)) {
108 InnerAM->clear();
109
110 // And the proxy itself should be marked as invalid so that we can observe
111 // the new call graph. This isn't strictly necessary because we cheat
112 // above, but is still useful.
113 return true;
114 }
115
Chandler Carruthba90ae92016-12-27 08:40:39 +0000116 // Directly check if the relevant set is preserved so we can short circuit
117 // invalidating SCCs below.
118 bool AreSCCAnalysesPreserved =
119 PA.allAnalysesInSetPreserved<AllAnalysesOn<LazyCallGraph::SCC>>();
120
Chandler Carruth6b981642016-12-10 06:34:44 +0000121 // Ok, we have a graph, so we can propagate the invalidation down into it.
Chandler Carruth2e0fe3e2017-02-06 19:38:06 +0000122 G->buildRefSCCs();
Chandler Carruth6b981642016-12-10 06:34:44 +0000123 for (auto &RC : G->postorder_ref_sccs())
Chandler Carruthba90ae92016-12-27 08:40:39 +0000124 for (auto &C : RC) {
125 Optional<PreservedAnalyses> InnerPA;
126
127 // Check to see whether the preserved set needs to be adjusted based on
128 // module-level analysis invalidation triggering deferred invalidation
129 // for this SCC.
130 if (auto *OuterProxy =
131 InnerAM->getCachedResult<ModuleAnalysisManagerCGSCCProxy>(C))
132 for (const auto &OuterInvalidationPair :
133 OuterProxy->getOuterInvalidations()) {
134 AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
135 const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
136 if (Inv.invalidate(OuterAnalysisID, M, PA)) {
137 if (!InnerPA)
138 InnerPA = PA;
139 for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
140 InnerPA->abandon(InnerAnalysisID);
141 }
142 }
143
144 // Check if we needed a custom PA set. If so we'll need to run the inner
145 // invalidation.
146 if (InnerPA) {
147 InnerAM->invalidate(C, *InnerPA);
148 continue;
149 }
150
151 // Otherwise we only need to do invalidation if the original PA set didn't
152 // preserve all SCC analyses.
153 if (!AreSCCAnalysesPreserved)
154 InnerAM->invalidate(C, PA);
155 }
Chandler Carruth6b981642016-12-10 06:34:44 +0000156
157 // Return false to indicate that this result is still a valid proxy.
158 return false;
159}
160
161template <>
162CGSCCAnalysisManagerModuleProxy::Result
163CGSCCAnalysisManagerModuleProxy::run(Module &M, ModuleAnalysisManager &AM) {
164 // Force the Function analysis manager to also be available so that it can
165 // be accessed in an SCC analysis and proxied onward to function passes.
166 // FIXME: It is pretty awkward to just drop the result here and assert that
167 // we can find it again later.
168 (void)AM.getResult<FunctionAnalysisManagerModuleProxy>(M);
169
170 return Result(*InnerAM, AM.getResult<LazyCallGraphAnalysis>(M));
171}
172
173AnalysisKey FunctionAnalysisManagerCGSCCProxy::Key;
174
175FunctionAnalysisManagerCGSCCProxy::Result
176FunctionAnalysisManagerCGSCCProxy::run(LazyCallGraph::SCC &C,
177 CGSCCAnalysisManager &AM,
178 LazyCallGraph &CG) {
179 // Collect the FunctionAnalysisManager from the Module layer and use that to
180 // build the proxy result.
181 //
182 // This allows us to rely on the FunctionAnalysisMangaerModuleProxy to
183 // invalidate the function analyses.
184 auto &MAM = AM.getResult<ModuleAnalysisManagerCGSCCProxy>(C, CG).getManager();
185 Module &M = *C.begin()->getFunction().getParent();
186 auto *FAMProxy = MAM.getCachedResult<FunctionAnalysisManagerModuleProxy>(M);
187 assert(FAMProxy && "The CGSCC pass manager requires that the FAM module "
188 "proxy is run on the module prior to entering the CGSCC "
189 "walk.");
190
191 // Note that we special-case invalidation handling of this proxy in the CGSCC
192 // analysis manager's Module proxy. This avoids the need to do anything
193 // special here to recompute all of this if ever the FAM's module proxy goes
194 // away.
195 return Result(FAMProxy->getManager());
196}
197
198bool FunctionAnalysisManagerCGSCCProxy::Result::invalidate(
199 LazyCallGraph::SCC &C, const PreservedAnalyses &PA,
200 CGSCCAnalysisManager::Invalidator &Inv) {
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000201 // If literally everything is preserved, we're done.
202 if (PA.areAllPreserved())
203 return false; // This is still a valid proxy.
Chandler Carruth6b981642016-12-10 06:34:44 +0000204
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000205 // If this proxy isn't marked as preserved, then even if the result remains
206 // valid, the key itself may no longer be valid, so we clear everything.
207 //
208 // Note that in order to preserve this proxy, a module pass must ensure that
209 // the FAM has been completely updated to handle the deletion of functions.
210 // Specifically, any FAM-cached results for those functions need to have been
211 // forcibly cleared. When preserved, this proxy will only invalidate results
212 // cached on functions *still in the module* at the end of the module pass.
213 auto PAC = PA.getChecker<FunctionAnalysisManagerCGSCCProxy>();
214 if (!PAC.preserved() && !PAC.preservedSet<AllAnalysesOn<LazyCallGraph::SCC>>()) {
215 for (LazyCallGraph::Node &N : C)
216 FAM->clear(N.getFunction());
217
218 return true;
219 }
220
221 // Directly check if the relevant set is preserved.
222 bool AreFunctionAnalysesPreserved =
223 PA.allAnalysesInSetPreserved<AllAnalysesOn<Function>>();
224
225 // Now walk all the functions to see if any inner analysis invalidation is
226 // necessary.
227 for (LazyCallGraph::Node &N : C) {
228 Function &F = N.getFunction();
229 Optional<PreservedAnalyses> FunctionPA;
230
231 // Check to see whether the preserved set needs to be pruned based on
232 // SCC-level analysis invalidation that triggers deferred invalidation
233 // registered with the outer analysis manager proxy for this function.
234 if (auto *OuterProxy =
235 FAM->getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F))
236 for (const auto &OuterInvalidationPair :
237 OuterProxy->getOuterInvalidations()) {
238 AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
239 const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
240 if (Inv.invalidate(OuterAnalysisID, C, PA)) {
241 if (!FunctionPA)
242 FunctionPA = PA;
243 for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
244 FunctionPA->abandon(InnerAnalysisID);
245 }
246 }
247
248 // Check if we needed a custom PA set, and if so we'll need to run the
249 // inner invalidation.
250 if (FunctionPA) {
251 FAM->invalidate(F, *FunctionPA);
252 continue;
253 }
254
255 // Otherwise we only need to do invalidation if the original PA set didn't
256 // preserve all function analyses.
257 if (!AreFunctionAnalysesPreserved)
258 FAM->invalidate(F, PA);
259 }
260
261 // Return false to indicate that this result is still a valid proxy.
Chandler Carruth6b981642016-12-10 06:34:44 +0000262 return false;
263}
264
Chandler Carruth88823462016-08-24 09:37:14 +0000265} // End llvm namespace
266
Chandler Carruth7c8964d2017-07-09 13:16:55 +0000267/// When a new SCC is created for the graph and there might be function
268/// analysis results cached for the functions now in that SCC two forms of
269/// updates are required.
270///
271/// First, a proxy from the SCC to the FunctionAnalysisManager needs to be
272/// created so that any subsequent invalidation events to the SCC are
273/// propagated to the function analysis results cached for functions within it.
274///
275/// Second, if any of the functions within the SCC have analysis results with
276/// outer analysis dependencies, then those dependencies would point to the
277/// *wrong* SCC's analysis result. We forcibly invalidate the necessary
278/// function analyses so that they don't retain stale handles.
279static void updateNewSCCFunctionAnalyses(LazyCallGraph::SCC &C,
280 LazyCallGraph &G,
281 CGSCCAnalysisManager &AM) {
282 // Get the relevant function analysis manager.
283 auto &FAM =
284 AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, G).getManager();
285
286 // Now walk the functions in this SCC and invalidate any function analysis
287 // results that might have outer dependencies on an SCC analysis.
288 for (LazyCallGraph::Node &N : C) {
289 Function &F = N.getFunction();
290
291 auto *OuterProxy =
292 FAM.getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F);
293 if (!OuterProxy)
294 // No outer analyses were queried, nothing to do.
295 continue;
296
297 // Forcibly abandon all the inner analyses with dependencies, but
298 // invalidate nothing else.
299 auto PA = PreservedAnalyses::all();
300 for (const auto &OuterInvalidationPair :
301 OuterProxy->getOuterInvalidations()) {
302 const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
303 for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
304 PA.abandon(InnerAnalysisID);
305 }
306
307 // Now invalidate anything we found.
308 FAM.invalidate(F, PA);
309 }
310}
311
Chandler Carruth88823462016-08-24 09:37:14 +0000312namespace {
313/// Helper function to update both the \c CGSCCAnalysisManager \p AM and the \c
314/// CGSCCPassManager's \c CGSCCUpdateResult \p UR based on a range of newly
315/// added SCCs.
316///
317/// The range of new SCCs must be in postorder already. The SCC they were split
318/// out of must be provided as \p C. The current node being mutated and
319/// triggering updates must be passed as \p N.
320///
321/// This function returns the SCC containing \p N. This will be either \p C if
322/// no new SCCs have been split out, or it will be the new SCC containing \p N.
323template <typename SCCRangeT>
324LazyCallGraph::SCC *
325incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G,
326 LazyCallGraph::Node &N, LazyCallGraph::SCC *C,
Chandler Carruth19913b22017-08-11 05:47:13 +0000327 CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
Chandler Carruth88823462016-08-24 09:37:14 +0000328 typedef LazyCallGraph::SCC SCC;
329
330 if (NewSCCRange.begin() == NewSCCRange.end())
331 return C;
332
Chandler Carruth443e57e2016-12-28 10:34:50 +0000333 // Add the current SCC to the worklist as its shape has changed.
Chandler Carruth88823462016-08-24 09:37:14 +0000334 UR.CWorklist.insert(C);
Chandler Carruth19913b22017-08-11 05:47:13 +0000335 DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist:" << *C << "\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000336
337 SCC *OldC = C;
Chandler Carruth88823462016-08-24 09:37:14 +0000338
339 // Update the current SCC. Note that if we have new SCCs, this must actually
340 // change the SCC.
341 assert(C != &*NewSCCRange.begin() &&
342 "Cannot insert new SCCs without changing current SCC!");
343 C = &*NewSCCRange.begin();
344 assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
345
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000346 // If we had a cached FAM proxy originally, we will want to create more of
347 // them for each SCC that was split off.
348 bool NeedFAMProxy =
349 AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(*OldC) != nullptr;
350
351 // We need to propagate an invalidation call to all but the newly current SCC
352 // because the outer pass manager won't do that for us after splitting them.
353 // FIXME: We should accept a PreservedAnalysis from the CG updater so that if
354 // there are preserved ananalyses we can avoid invalidating them here for
355 // split-off SCCs.
356 // We know however that this will preserve any FAM proxy so go ahead and mark
357 // that.
358 PreservedAnalyses PA;
359 PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
360 AM.invalidate(*OldC, PA);
361
Chandler Carruth7c8964d2017-07-09 13:16:55 +0000362 // Ensure the now-current SCC's function analyses are updated.
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000363 if (NeedFAMProxy)
Chandler Carruth7c8964d2017-07-09 13:16:55 +0000364 updateNewSCCFunctionAnalyses(*C, G, AM);
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000365
Chandler Carruth88823462016-08-24 09:37:14 +0000366 for (SCC &NewC :
367 reverse(make_range(std::next(NewSCCRange.begin()), NewSCCRange.end()))) {
368 assert(C != &NewC && "No need to re-visit the current SCC!");
369 assert(OldC != &NewC && "Already handled the original SCC!");
370 UR.CWorklist.insert(&NewC);
Chandler Carruth19913b22017-08-11 05:47:13 +0000371 DEBUG(dbgs() << "Enqueuing a newly formed SCC:" << NewC << "\n");
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000372
Chandler Carruth7c8964d2017-07-09 13:16:55 +0000373 // Ensure new SCCs' function analyses are updated.
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000374 if (NeedFAMProxy)
Chandler Carruth051bdb02017-07-12 09:08:11 +0000375 updateNewSCCFunctionAnalyses(NewC, G, AM);
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000376
Chandler Carruth7c8964d2017-07-09 13:16:55 +0000377 // Also propagate a normal invalidation to the new SCC as only the current
378 // will get one from the pass manager infrastructure.
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000379 AM.invalidate(NewC, PA);
Chandler Carruth88823462016-08-24 09:37:14 +0000380 }
381 return C;
382}
383}
384
385LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForFunctionPass(
386 LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
Chandler Carruth19913b22017-08-11 05:47:13 +0000387 CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
Chandler Carruth88823462016-08-24 09:37:14 +0000388 typedef LazyCallGraph::Node Node;
389 typedef LazyCallGraph::Edge Edge;
390 typedef LazyCallGraph::SCC SCC;
391 typedef LazyCallGraph::RefSCC RefSCC;
392
393 RefSCC &InitialRC = InitialC.getOuterRefSCC();
394 SCC *C = &InitialC;
395 RefSCC *RC = &InitialRC;
396 Function &F = N.getFunction();
397
398 // Walk the function body and build up the set of retained, promoted, and
399 // demoted edges.
400 SmallVector<Constant *, 16> Worklist;
401 SmallPtrSet<Constant *, 16> Visited;
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000402 SmallPtrSet<Node *, 16> RetainedEdges;
403 SmallSetVector<Node *, 4> PromotedRefTargets;
404 SmallSetVector<Node *, 4> DemotedCallTargets;
Chandler Carruth89772232016-12-06 10:06:06 +0000405
Chandler Carruth88823462016-08-24 09:37:14 +0000406 // First walk the function and handle all called functions. We do this first
407 // because if there is a single call edge, whether there are ref edges is
408 // irrelevant.
Chandler Carruth89772232016-12-06 10:06:06 +0000409 for (Instruction &I : instructions(F))
410 if (auto CS = CallSite(&I))
411 if (Function *Callee = CS.getCalledFunction())
412 if (Visited.insert(Callee).second && !Callee->isDeclaration()) {
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000413 Node &CalleeN = *G.lookup(*Callee);
414 Edge *E = N->lookup(CalleeN);
Chandler Carruth89772232016-12-06 10:06:06 +0000415 // FIXME: We should really handle adding new calls. While it will
416 // make downstream usage more complex, there is no fundamental
417 // limitation and it will allow passes within the CGSCC to be a bit
418 // more flexible in what transforms they can do. Until then, we
419 // verify that new calls haven't been introduced.
420 assert(E && "No function transformations should introduce *new* "
421 "call edges! Any new calls should be modeled as "
422 "promoted existing ref edges!");
Chandler Carruth6e35c312017-08-08 10:13:23 +0000423 bool Inserted = RetainedEdges.insert(&CalleeN).second;
424 (void)Inserted;
425 assert(Inserted && "We should never visit a function twice.");
Chandler Carruth89772232016-12-06 10:06:06 +0000426 if (!E->isCall())
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000427 PromotedRefTargets.insert(&CalleeN);
Chandler Carruth89772232016-12-06 10:06:06 +0000428 }
Chandler Carruth88823462016-08-24 09:37:14 +0000429
430 // Now walk all references.
Chandler Carruth89772232016-12-06 10:06:06 +0000431 for (Instruction &I : instructions(F))
432 for (Value *Op : I.operand_values())
433 if (Constant *C = dyn_cast<Constant>(Op))
434 if (Visited.insert(C).second)
435 Worklist.push_back(C);
Chandler Carruth88823462016-08-24 09:37:14 +0000436
Chandler Carruthf59a8382017-07-15 08:08:19 +0000437 auto VisitRef = [&](Function &Referee) {
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000438 Node &RefereeN = *G.lookup(Referee);
439 Edge *E = N->lookup(RefereeN);
Chandler Carruth89772232016-12-06 10:06:06 +0000440 // FIXME: Similarly to new calls, we also currently preclude
441 // introducing new references. See above for details.
442 assert(E && "No function transformations should introduce *new* ref "
443 "edges! Any new ref edges would require IPO which "
444 "function passes aren't allowed to do!");
Chandler Carruth6e35c312017-08-08 10:13:23 +0000445 bool Inserted = RetainedEdges.insert(&RefereeN).second;
446 (void)Inserted;
447 assert(Inserted && "We should never visit a function twice.");
Chandler Carruth89772232016-12-06 10:06:06 +0000448 if (E->isCall())
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000449 DemotedCallTargets.insert(&RefereeN);
Chandler Carruthf59a8382017-07-15 08:08:19 +0000450 };
451 LazyCallGraph::visitReferences(Worklist, Visited, VisitRef);
452
453 // Include synthetic reference edges to known, defined lib functions.
454 for (auto *F : G.getLibFunctions())
Chandler Carruth6e35c312017-08-08 10:13:23 +0000455 // While the list of lib functions doesn't have repeats, don't re-visit
456 // anything handled above.
457 if (!Visited.count(F))
458 VisitRef(*F);
Chandler Carruth88823462016-08-24 09:37:14 +0000459
460 // First remove all of the edges that are no longer present in this function.
Chandler Carruth23c2f442017-08-09 09:05:27 +0000461 // The first step makes these edges uniformly ref edges and accumulates them
462 // into a separate data structure so removal doesn't invalidate anything.
463 SmallVector<Node *, 4> DeadTargets;
464 for (Edge &E : *N) {
465 if (RetainedEdges.count(&E.getNode()))
Chandler Carruth88823462016-08-24 09:37:14 +0000466 continue;
Chandler Carruth88823462016-08-24 09:37:14 +0000467
Chandler Carruth23c2f442017-08-09 09:05:27 +0000468 SCC &TargetC = *G.lookupSCC(E.getNode());
469 RefSCC &TargetRC = TargetC.getOuterRefSCC();
470 if (&TargetRC == RC && E.isCall()) {
Chandler Carruth443e57e2016-12-28 10:34:50 +0000471 if (C != &TargetC) {
472 // For separate SCCs this is trivial.
Chandler Carruth23c2f442017-08-09 09:05:27 +0000473 RC->switchTrivialInternalEdgeToRef(N, E.getNode());
Chandler Carruth443e57e2016-12-28 10:34:50 +0000474 } else {
Chandler Carruth443e57e2016-12-28 10:34:50 +0000475 // Now update the call graph.
Chandler Carruth23c2f442017-08-09 09:05:27 +0000476 C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, E.getNode()),
Chandler Carruth19913b22017-08-11 05:47:13 +0000477 G, N, C, AM, UR);
Chandler Carruth443e57e2016-12-28 10:34:50 +0000478 }
479 }
Chandler Carruth88823462016-08-24 09:37:14 +0000480
Chandler Carruth23c2f442017-08-09 09:05:27 +0000481 // Now that this is ready for actual removal, put it into our list.
482 DeadTargets.push_back(&E.getNode());
483 }
484 // Remove the easy cases quickly and actually pull them out of our list.
485 DeadTargets.erase(
486 llvm::remove_if(DeadTargets,
487 [&](Node *TargetN) {
488 SCC &TargetC = *G.lookupSCC(*TargetN);
489 RefSCC &TargetRC = TargetC.getOuterRefSCC();
Chandler Carruth88823462016-08-24 09:37:14 +0000490
Chandler Carruth23c2f442017-08-09 09:05:27 +0000491 // We can't trivially remove internal targets, so skip
492 // those.
493 if (&TargetRC == RC)
494 return false;
495
496 RC->removeOutgoingEdge(N, *TargetN);
Chandler Carruth19913b22017-08-11 05:47:13 +0000497 DEBUG(dbgs() << "Deleting outgoing edge from '" << N
498 << "' to '" << TargetN << "'\n");
Chandler Carruth23c2f442017-08-09 09:05:27 +0000499 return true;
500 }),
501 DeadTargets.end());
502
503 // Now do a batch removal of the internal ref edges left.
504 auto NewRefSCCs = RC->removeInternalRefEdge(N, DeadTargets);
505 if (!NewRefSCCs.empty()) {
506 // The old RefSCC is dead, mark it as such.
507 UR.InvalidatedRefSCCs.insert(RC);
508
509 // Note that we don't bother to invalidate analyses as ref-edge
510 // connectivity is not really observable in any way and is intended
511 // exclusively to be used for ordering of transforms rather than for
512 // analysis conclusions.
513
514 // Update RC to the "bottom".
515 assert(G.lookupSCC(N) == C && "Changed the SCC when splitting RefSCCs!");
516 RC = &C->getOuterRefSCC();
517 assert(G.lookupRefSCC(N) == RC && "Failed to update current RefSCC!");
518
519 // The RC worklist is in reverse postorder, so we enqueue the new ones in
520 // RPO except for the one which contains the source node as that is the
521 // "bottom" we will continue processing in the bottom-up walk.
522 assert(NewRefSCCs.front() == RC &&
523 "New current RefSCC not first in the returned list!");
524 for (RefSCC *NewRC :
525 reverse(make_range(std::next(NewRefSCCs.begin()), NewRefSCCs.end()))) {
526 assert(NewRC != RC && "Should not encounter the current RefSCC further "
527 "in the postorder list of new RefSCCs.");
528 UR.RCWorklist.insert(NewRC);
Chandler Carruth19913b22017-08-11 05:47:13 +0000529 DEBUG(dbgs() << "Enqueuing a new RefSCC in the update worklist: "
530 << *NewRC << "\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000531 }
532 }
533
534 // Next demote all the call edges that are now ref edges. This helps make
535 // the SCCs small which should minimize the work below as we don't want to
536 // form cycles that this would break.
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000537 for (Node *RefTarget : DemotedCallTargets) {
538 SCC &TargetC = *G.lookupSCC(*RefTarget);
Chandler Carruth88823462016-08-24 09:37:14 +0000539 RefSCC &TargetRC = TargetC.getOuterRefSCC();
540
541 // The easy case is when the target RefSCC is not this RefSCC. This is
542 // only supported when the target RefSCC is a child of this RefSCC.
543 if (&TargetRC != RC) {
544 assert(RC->isAncestorOf(TargetRC) &&
545 "Cannot potentially form RefSCC cycles here!");
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000546 RC->switchOutgoingEdgeToRef(N, *RefTarget);
Chandler Carruth19913b22017-08-11 05:47:13 +0000547 DEBUG(dbgs() << "Switch outgoing call edge to a ref edge from '" << N
548 << "' to '" << *RefTarget << "'\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000549 continue;
550 }
551
Chandler Carruth443e57e2016-12-28 10:34:50 +0000552 // We are switching an internal call edge to a ref edge. This may split up
553 // some SCCs.
554 if (C != &TargetC) {
555 // For separate SCCs this is trivial.
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000556 RC->switchTrivialInternalEdgeToRef(N, *RefTarget);
Chandler Carruth443e57e2016-12-28 10:34:50 +0000557 continue;
558 }
559
Chandler Carruth443e57e2016-12-28 10:34:50 +0000560 // Now update the call graph.
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000561 C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, *RefTarget), G, N,
Chandler Carruth19913b22017-08-11 05:47:13 +0000562 C, AM, UR);
Chandler Carruth88823462016-08-24 09:37:14 +0000563 }
564
565 // Now promote ref edges into call edges.
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000566 for (Node *CallTarget : PromotedRefTargets) {
567 SCC &TargetC = *G.lookupSCC(*CallTarget);
Chandler Carruth88823462016-08-24 09:37:14 +0000568 RefSCC &TargetRC = TargetC.getOuterRefSCC();
569
570 // The easy case is when the target RefSCC is not this RefSCC. This is
571 // only supported when the target RefSCC is a child of this RefSCC.
572 if (&TargetRC != RC) {
573 assert(RC->isAncestorOf(TargetRC) &&
574 "Cannot potentially form RefSCC cycles here!");
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000575 RC->switchOutgoingEdgeToCall(N, *CallTarget);
Chandler Carruth19913b22017-08-11 05:47:13 +0000576 DEBUG(dbgs() << "Switch outgoing ref edge to a call edge from '" << N
577 << "' to '" << *CallTarget << "'\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000578 continue;
579 }
Chandler Carruth19913b22017-08-11 05:47:13 +0000580 DEBUG(dbgs() << "Switch an internal ref edge to a call edge from '" << N
581 << "' to '" << *CallTarget << "'\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000582
583 // Otherwise we are switching an internal ref edge to a call edge. This
584 // may merge away some SCCs, and we add those to the UpdateResult. We also
585 // need to make sure to update the worklist in the event SCCs have moved
Chandler Carruthc213c672017-07-09 13:45:11 +0000586 // before the current one in the post-order sequence
587 bool HasFunctionAnalysisProxy = false;
Chandler Carruth88823462016-08-24 09:37:14 +0000588 auto InitialSCCIndex = RC->find(*C) - RC->begin();
Chandler Carruthc213c672017-07-09 13:45:11 +0000589 bool FormedCycle = RC->switchInternalEdgeToCall(
590 N, *CallTarget, [&](ArrayRef<SCC *> MergedSCCs) {
591 for (SCC *MergedC : MergedSCCs) {
592 assert(MergedC != &TargetC && "Cannot merge away the target SCC!");
593
594 HasFunctionAnalysisProxy |=
595 AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(
596 *MergedC) != nullptr;
597
598 // Mark that this SCC will no longer be valid.
599 UR.InvalidatedSCCs.insert(MergedC);
600
601 // FIXME: We should really do a 'clear' here to forcibly release
602 // memory, but we don't have a good way of doing that and
603 // preserving the function analyses.
604 auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
605 PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
606 AM.invalidate(*MergedC, PA);
607 }
608 });
609
610 // If we formed a cycle by creating this call, we need to update more data
611 // structures.
612 if (FormedCycle) {
Chandler Carruth88823462016-08-24 09:37:14 +0000613 C = &TargetC;
614 assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
615
Chandler Carruthc213c672017-07-09 13:45:11 +0000616 // If one of the invalidated SCCs had a cached proxy to a function
617 // analysis manager, we need to create a proxy in the new current SCC as
618 // the invaliadted SCCs had their functions moved.
619 if (HasFunctionAnalysisProxy)
620 AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G);
621
Chandler Carruth88823462016-08-24 09:37:14 +0000622 // Any analyses cached for this SCC are no longer precise as the shape
Chandler Carruthc213c672017-07-09 13:45:11 +0000623 // has changed by introducing this cycle. However, we have taken care to
624 // update the proxies so it remains valide.
625 auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
626 PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
627 AM.invalidate(*C, PA);
Chandler Carruth88823462016-08-24 09:37:14 +0000628 }
629 auto NewSCCIndex = RC->find(*C) - RC->begin();
Chandler Carruth3c6a8202017-08-01 06:40:11 +0000630 // If we have actually moved an SCC to be topologically "below" the current
631 // one due to merging, we will need to revisit the current SCC after
632 // visiting those moved SCCs.
633 //
634 // It is critical that we *do not* revisit the current SCC unless we
635 // actually move SCCs in the process of merging because otherwise we may
636 // form a cycle where an SCC is split apart, merged, split, merged and so
637 // on infinitely.
Chandler Carruth88823462016-08-24 09:37:14 +0000638 if (InitialSCCIndex < NewSCCIndex) {
639 // Put our current SCC back onto the worklist as we'll visit other SCCs
640 // that are now definitively ordered prior to the current one in the
641 // post-order sequence, and may end up observing more precise context to
642 // optimize the current SCC.
643 UR.CWorklist.insert(C);
Chandler Carruth19913b22017-08-11 05:47:13 +0000644 DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist: " << *C
645 << "\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000646 // Enqueue in reverse order as we pop off the back of the worklist.
647 for (SCC &MovedC : reverse(make_range(RC->begin() + InitialSCCIndex,
648 RC->begin() + NewSCCIndex))) {
649 UR.CWorklist.insert(&MovedC);
Chandler Carruth19913b22017-08-11 05:47:13 +0000650 DEBUG(dbgs() << "Enqueuing a newly earlier in post-order SCC: "
651 << MovedC << "\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000652 }
653 }
654 }
655
656 assert(!UR.InvalidatedSCCs.count(C) && "Invalidated the current SCC!");
657 assert(!UR.InvalidatedRefSCCs.count(RC) && "Invalidated the current RefSCC!");
658 assert(&C->getOuterRefSCC() == RC && "Current SCC not in current RefSCC!");
659
660 // Record the current RefSCC and SCC for higher layers of the CGSCC pass
661 // manager now that all the updates have been applied.
662 if (RC != &InitialRC)
663 UR.UpdatedRC = RC;
664 if (C != &InitialC)
665 UR.UpdatedC = C;
666
667 return *C;
Chandler Carruth572e3402014-04-21 11:12:00 +0000668}