blob: 9e6384437fb250a213109107fe53e184a27f3c6e [file] [log] [blame]
Chandler Carruth572e3402014-04-21 11:12:00 +00001//===- CGSCCPassManager.cpp - Managing & running CGSCC passes -------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
10#include "llvm/Analysis/CGSCCPassManager.h"
Eugene Zelenkofa6434b2017-08-31 21:56:16 +000011#include "llvm/ADT/ArrayRef.h"
12#include "llvm/ADT/Optional.h"
13#include "llvm/ADT/STLExtras.h"
14#include "llvm/ADT/SetVector.h"
15#include "llvm/ADT/SmallPtrSet.h"
16#include "llvm/ADT/SmallVector.h"
17#include "llvm/ADT/iterator_range.h"
18#include "llvm/Analysis/LazyCallGraph.h"
Chandler Carruth88823462016-08-24 09:37:14 +000019#include "llvm/IR/CallSite.h"
Eugene Zelenkofa6434b2017-08-31 21:56:16 +000020#include "llvm/IR/Constant.h"
Chandler Carruth89772232016-12-06 10:06:06 +000021#include "llvm/IR/InstIterator.h"
Eugene Zelenkofa6434b2017-08-31 21:56:16 +000022#include "llvm/IR/Instruction.h"
23#include "llvm/IR/PassManager.h"
24#include "llvm/Support/Casting.h"
25#include "llvm/Support/Debug.h"
26#include "llvm/Support/raw_ostream.h"
27#include <algorithm>
28#include <cassert>
29#include <iterator>
Chandler Carruth572e3402014-04-21 11:12:00 +000030
Chandler Carruth19913b22017-08-11 05:47:13 +000031#define DEBUG_TYPE "cgscc"
32
Chandler Carruth572e3402014-04-21 11:12:00 +000033using namespace llvm;
34
Chandler Carruth6b981642016-12-10 06:34:44 +000035// Explicit template instantiations and specialization defininitions for core
36// template typedefs.
Chandler Carruth2a540942016-02-27 10:38:10 +000037namespace llvm {
Chandler Carruth88823462016-08-24 09:37:14 +000038
39// Explicit instantiations for the core proxy templates.
Chandler Carruth3ab2a5a2016-11-28 22:04:31 +000040template class AllAnalysesOn<LazyCallGraph::SCC>;
Chandler Carruth88823462016-08-24 09:37:14 +000041template class AnalysisManager<LazyCallGraph::SCC, LazyCallGraph &>;
42template class PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager,
43 LazyCallGraph &, CGSCCUpdateResult &>;
Chandler Carruth2a540942016-02-27 10:38:10 +000044template class InnerAnalysisManagerProxy<CGSCCAnalysisManager, Module>;
45template class OuterAnalysisManagerProxy<ModuleAnalysisManager,
Chandler Carruth346542b2017-02-07 01:50:48 +000046 LazyCallGraph::SCC, LazyCallGraph &>;
Chandler Carruth2a540942016-02-27 10:38:10 +000047template class OuterAnalysisManagerProxy<CGSCCAnalysisManager, Function>;
Chandler Carruth88823462016-08-24 09:37:14 +000048
49/// Explicitly specialize the pass manager run method to handle call graph
50/// updates.
51template <>
52PreservedAnalyses
53PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager, LazyCallGraph &,
54 CGSCCUpdateResult &>::run(LazyCallGraph::SCC &InitialC,
55 CGSCCAnalysisManager &AM,
56 LazyCallGraph &G, CGSCCUpdateResult &UR) {
57 PreservedAnalyses PA = PreservedAnalyses::all();
58
59 if (DebugLogging)
60 dbgs() << "Starting CGSCC pass manager run.\n";
61
62 // The SCC may be refined while we are running passes over it, so set up
63 // a pointer that we can update.
64 LazyCallGraph::SCC *C = &InitialC;
65
66 for (auto &Pass : Passes) {
67 if (DebugLogging)
68 dbgs() << "Running pass: " << Pass->name() << " on " << *C << "\n";
69
70 PreservedAnalyses PassPA = Pass->run(*C, AM, G, UR);
71
72 // Update the SCC if necessary.
73 C = UR.UpdatedC ? UR.UpdatedC : C;
74
75 // Check that we didn't miss any update scenario.
76 assert(!UR.InvalidatedSCCs.count(C) && "Processing an invalid SCC!");
77 assert(C->begin() != C->end() && "Cannot have an empty SCC!");
78
79 // Update the analysis manager as each pass runs and potentially
Chandler Carruth0c6efff12016-11-28 10:42:21 +000080 // invalidates analyses.
81 AM.invalidate(*C, PassPA);
Chandler Carruth88823462016-08-24 09:37:14 +000082
83 // Finally, we intersect the final preserved analyses to compute the
84 // aggregate preserved set for this pass manager.
85 PA.intersect(std::move(PassPA));
86
87 // FIXME: Historically, the pass managers all called the LLVM context's
88 // yield function here. We don't have a generic way to acquire the
89 // context and it isn't yet clear what the right pattern is for yielding
90 // in the new pass manager so it is currently omitted.
91 // ...getContext().yield();
92 }
93
Chandler Carruth0c6efff12016-11-28 10:42:21 +000094 // Invaliadtion was handled after each pass in the above loop for the current
95 // SCC. Therefore, the remaining analysis results in the AnalysisManager are
96 // preserved. We mark this with a set so that we don't need to inspect each
97 // one individually.
Chandler Carruthba90ae92016-12-27 08:40:39 +000098 PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>();
Chandler Carruth0c6efff12016-11-28 10:42:21 +000099
Chandler Carruth88823462016-08-24 09:37:14 +0000100 if (DebugLogging)
101 dbgs() << "Finished CGSCC pass manager run.\n";
102
103 return PA;
104}
105
Chandler Carruth6b981642016-12-10 06:34:44 +0000106bool CGSCCAnalysisManagerModuleProxy::Result::invalidate(
107 Module &M, const PreservedAnalyses &PA,
108 ModuleAnalysisManager::Invalidator &Inv) {
Chandler Carruthba90ae92016-12-27 08:40:39 +0000109 // If literally everything is preserved, we're done.
110 if (PA.areAllPreserved())
111 return false; // This is still a valid proxy.
112
Chandler Carruth6b981642016-12-10 06:34:44 +0000113 // If this proxy or the call graph is going to be invalidated, we also need
114 // to clear all the keys coming from that analysis.
115 //
116 // We also directly invalidate the FAM's module proxy if necessary, and if
117 // that proxy isn't preserved we can't preserve this proxy either. We rely on
118 // it to handle module -> function analysis invalidation in the face of
119 // structural changes and so if it's unavailable we conservatively clear the
Chandler Carruthba90ae92016-12-27 08:40:39 +0000120 // entire SCC layer as well rather than trying to do invalidation ourselves.
121 auto PAC = PA.getChecker<CGSCCAnalysisManagerModuleProxy>();
122 if (!(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Module>>()) ||
Chandler Carruth6b981642016-12-10 06:34:44 +0000123 Inv.invalidate<LazyCallGraphAnalysis>(M, PA) ||
124 Inv.invalidate<FunctionAnalysisManagerModuleProxy>(M, PA)) {
125 InnerAM->clear();
126
127 // And the proxy itself should be marked as invalid so that we can observe
128 // the new call graph. This isn't strictly necessary because we cheat
129 // above, but is still useful.
130 return true;
131 }
132
Chandler Carruthba90ae92016-12-27 08:40:39 +0000133 // Directly check if the relevant set is preserved so we can short circuit
134 // invalidating SCCs below.
135 bool AreSCCAnalysesPreserved =
136 PA.allAnalysesInSetPreserved<AllAnalysesOn<LazyCallGraph::SCC>>();
137
Chandler Carruth6b981642016-12-10 06:34:44 +0000138 // Ok, we have a graph, so we can propagate the invalidation down into it.
Chandler Carruth2e0fe3e2017-02-06 19:38:06 +0000139 G->buildRefSCCs();
Chandler Carruth6b981642016-12-10 06:34:44 +0000140 for (auto &RC : G->postorder_ref_sccs())
Chandler Carruthba90ae92016-12-27 08:40:39 +0000141 for (auto &C : RC) {
142 Optional<PreservedAnalyses> InnerPA;
143
144 // Check to see whether the preserved set needs to be adjusted based on
145 // module-level analysis invalidation triggering deferred invalidation
146 // for this SCC.
147 if (auto *OuterProxy =
148 InnerAM->getCachedResult<ModuleAnalysisManagerCGSCCProxy>(C))
149 for (const auto &OuterInvalidationPair :
150 OuterProxy->getOuterInvalidations()) {
151 AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
152 const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
153 if (Inv.invalidate(OuterAnalysisID, M, PA)) {
154 if (!InnerPA)
155 InnerPA = PA;
156 for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
157 InnerPA->abandon(InnerAnalysisID);
158 }
159 }
160
161 // Check if we needed a custom PA set. If so we'll need to run the inner
162 // invalidation.
163 if (InnerPA) {
164 InnerAM->invalidate(C, *InnerPA);
165 continue;
166 }
167
168 // Otherwise we only need to do invalidation if the original PA set didn't
169 // preserve all SCC analyses.
170 if (!AreSCCAnalysesPreserved)
171 InnerAM->invalidate(C, PA);
172 }
Chandler Carruth6b981642016-12-10 06:34:44 +0000173
174 // Return false to indicate that this result is still a valid proxy.
175 return false;
176}
177
178template <>
179CGSCCAnalysisManagerModuleProxy::Result
180CGSCCAnalysisManagerModuleProxy::run(Module &M, ModuleAnalysisManager &AM) {
181 // Force the Function analysis manager to also be available so that it can
182 // be accessed in an SCC analysis and proxied onward to function passes.
183 // FIXME: It is pretty awkward to just drop the result here and assert that
184 // we can find it again later.
185 (void)AM.getResult<FunctionAnalysisManagerModuleProxy>(M);
186
187 return Result(*InnerAM, AM.getResult<LazyCallGraphAnalysis>(M));
188}
189
190AnalysisKey FunctionAnalysisManagerCGSCCProxy::Key;
191
192FunctionAnalysisManagerCGSCCProxy::Result
193FunctionAnalysisManagerCGSCCProxy::run(LazyCallGraph::SCC &C,
194 CGSCCAnalysisManager &AM,
195 LazyCallGraph &CG) {
196 // Collect the FunctionAnalysisManager from the Module layer and use that to
197 // build the proxy result.
198 //
199 // This allows us to rely on the FunctionAnalysisMangaerModuleProxy to
200 // invalidate the function analyses.
201 auto &MAM = AM.getResult<ModuleAnalysisManagerCGSCCProxy>(C, CG).getManager();
202 Module &M = *C.begin()->getFunction().getParent();
203 auto *FAMProxy = MAM.getCachedResult<FunctionAnalysisManagerModuleProxy>(M);
204 assert(FAMProxy && "The CGSCC pass manager requires that the FAM module "
205 "proxy is run on the module prior to entering the CGSCC "
206 "walk.");
207
208 // Note that we special-case invalidation handling of this proxy in the CGSCC
209 // analysis manager's Module proxy. This avoids the need to do anything
210 // special here to recompute all of this if ever the FAM's module proxy goes
211 // away.
212 return Result(FAMProxy->getManager());
213}
214
215bool FunctionAnalysisManagerCGSCCProxy::Result::invalidate(
216 LazyCallGraph::SCC &C, const PreservedAnalyses &PA,
217 CGSCCAnalysisManager::Invalidator &Inv) {
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000218 // If literally everything is preserved, we're done.
219 if (PA.areAllPreserved())
220 return false; // This is still a valid proxy.
Chandler Carruth6b981642016-12-10 06:34:44 +0000221
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000222 // If this proxy isn't marked as preserved, then even if the result remains
223 // valid, the key itself may no longer be valid, so we clear everything.
224 //
225 // Note that in order to preserve this proxy, a module pass must ensure that
226 // the FAM has been completely updated to handle the deletion of functions.
227 // Specifically, any FAM-cached results for those functions need to have been
228 // forcibly cleared. When preserved, this proxy will only invalidate results
229 // cached on functions *still in the module* at the end of the module pass.
230 auto PAC = PA.getChecker<FunctionAnalysisManagerCGSCCProxy>();
231 if (!PAC.preserved() && !PAC.preservedSet<AllAnalysesOn<LazyCallGraph::SCC>>()) {
232 for (LazyCallGraph::Node &N : C)
233 FAM->clear(N.getFunction());
234
235 return true;
236 }
237
238 // Directly check if the relevant set is preserved.
239 bool AreFunctionAnalysesPreserved =
240 PA.allAnalysesInSetPreserved<AllAnalysesOn<Function>>();
241
242 // Now walk all the functions to see if any inner analysis invalidation is
243 // necessary.
244 for (LazyCallGraph::Node &N : C) {
245 Function &F = N.getFunction();
246 Optional<PreservedAnalyses> FunctionPA;
247
248 // Check to see whether the preserved set needs to be pruned based on
249 // SCC-level analysis invalidation that triggers deferred invalidation
250 // registered with the outer analysis manager proxy for this function.
251 if (auto *OuterProxy =
252 FAM->getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F))
253 for (const auto &OuterInvalidationPair :
254 OuterProxy->getOuterInvalidations()) {
255 AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
256 const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
257 if (Inv.invalidate(OuterAnalysisID, C, PA)) {
258 if (!FunctionPA)
259 FunctionPA = PA;
260 for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
261 FunctionPA->abandon(InnerAnalysisID);
262 }
263 }
264
265 // Check if we needed a custom PA set, and if so we'll need to run the
266 // inner invalidation.
267 if (FunctionPA) {
268 FAM->invalidate(F, *FunctionPA);
269 continue;
270 }
271
272 // Otherwise we only need to do invalidation if the original PA set didn't
273 // preserve all function analyses.
274 if (!AreFunctionAnalysesPreserved)
275 FAM->invalidate(F, PA);
276 }
277
278 // Return false to indicate that this result is still a valid proxy.
Chandler Carruth6b981642016-12-10 06:34:44 +0000279 return false;
280}
281
Eugene Zelenkofa6434b2017-08-31 21:56:16 +0000282} // end namespace llvm
Chandler Carruth88823462016-08-24 09:37:14 +0000283
Chandler Carruth7c8964d2017-07-09 13:16:55 +0000284/// When a new SCC is created for the graph and there might be function
285/// analysis results cached for the functions now in that SCC two forms of
286/// updates are required.
287///
288/// First, a proxy from the SCC to the FunctionAnalysisManager needs to be
289/// created so that any subsequent invalidation events to the SCC are
290/// propagated to the function analysis results cached for functions within it.
291///
292/// Second, if any of the functions within the SCC have analysis results with
293/// outer analysis dependencies, then those dependencies would point to the
294/// *wrong* SCC's analysis result. We forcibly invalidate the necessary
295/// function analyses so that they don't retain stale handles.
296static void updateNewSCCFunctionAnalyses(LazyCallGraph::SCC &C,
297 LazyCallGraph &G,
298 CGSCCAnalysisManager &AM) {
299 // Get the relevant function analysis manager.
300 auto &FAM =
301 AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, G).getManager();
302
303 // Now walk the functions in this SCC and invalidate any function analysis
304 // results that might have outer dependencies on an SCC analysis.
305 for (LazyCallGraph::Node &N : C) {
306 Function &F = N.getFunction();
307
308 auto *OuterProxy =
309 FAM.getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F);
310 if (!OuterProxy)
311 // No outer analyses were queried, nothing to do.
312 continue;
313
314 // Forcibly abandon all the inner analyses with dependencies, but
315 // invalidate nothing else.
316 auto PA = PreservedAnalyses::all();
317 for (const auto &OuterInvalidationPair :
318 OuterProxy->getOuterInvalidations()) {
319 const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
320 for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
321 PA.abandon(InnerAnalysisID);
322 }
323
324 // Now invalidate anything we found.
325 FAM.invalidate(F, PA);
326 }
327}
328
Chandler Carruth88823462016-08-24 09:37:14 +0000329/// Helper function to update both the \c CGSCCAnalysisManager \p AM and the \c
330/// CGSCCPassManager's \c CGSCCUpdateResult \p UR based on a range of newly
331/// added SCCs.
332///
333/// The range of new SCCs must be in postorder already. The SCC they were split
334/// out of must be provided as \p C. The current node being mutated and
335/// triggering updates must be passed as \p N.
336///
337/// This function returns the SCC containing \p N. This will be either \p C if
338/// no new SCCs have been split out, or it will be the new SCC containing \p N.
339template <typename SCCRangeT>
Eugene Zelenkofa6434b2017-08-31 21:56:16 +0000340static LazyCallGraph::SCC *
Chandler Carruth88823462016-08-24 09:37:14 +0000341incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G,
342 LazyCallGraph::Node &N, LazyCallGraph::SCC *C,
Chandler Carruth19913b22017-08-11 05:47:13 +0000343 CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
Eugene Zelenkofa6434b2017-08-31 21:56:16 +0000344 using SCC = LazyCallGraph::SCC;
Chandler Carruth88823462016-08-24 09:37:14 +0000345
346 if (NewSCCRange.begin() == NewSCCRange.end())
347 return C;
348
Chandler Carruth443e57e2016-12-28 10:34:50 +0000349 // Add the current SCC to the worklist as its shape has changed.
Chandler Carruth88823462016-08-24 09:37:14 +0000350 UR.CWorklist.insert(C);
Chandler Carruth19913b22017-08-11 05:47:13 +0000351 DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist:" << *C << "\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000352
353 SCC *OldC = C;
Chandler Carruth88823462016-08-24 09:37:14 +0000354
355 // Update the current SCC. Note that if we have new SCCs, this must actually
356 // change the SCC.
357 assert(C != &*NewSCCRange.begin() &&
358 "Cannot insert new SCCs without changing current SCC!");
359 C = &*NewSCCRange.begin();
360 assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
361
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000362 // If we had a cached FAM proxy originally, we will want to create more of
363 // them for each SCC that was split off.
364 bool NeedFAMProxy =
365 AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(*OldC) != nullptr;
366
367 // We need to propagate an invalidation call to all but the newly current SCC
368 // because the outer pass manager won't do that for us after splitting them.
369 // FIXME: We should accept a PreservedAnalysis from the CG updater so that if
370 // there are preserved ananalyses we can avoid invalidating them here for
371 // split-off SCCs.
372 // We know however that this will preserve any FAM proxy so go ahead and mark
373 // that.
374 PreservedAnalyses PA;
375 PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
376 AM.invalidate(*OldC, PA);
377
Chandler Carruth7c8964d2017-07-09 13:16:55 +0000378 // Ensure the now-current SCC's function analyses are updated.
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000379 if (NeedFAMProxy)
Chandler Carruth7c8964d2017-07-09 13:16:55 +0000380 updateNewSCCFunctionAnalyses(*C, G, AM);
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000381
Eugene Zelenkofa6434b2017-08-31 21:56:16 +0000382 for (SCC &NewC : llvm::reverse(make_range(std::next(NewSCCRange.begin()),
383 NewSCCRange.end()))) {
Chandler Carruth88823462016-08-24 09:37:14 +0000384 assert(C != &NewC && "No need to re-visit the current SCC!");
385 assert(OldC != &NewC && "Already handled the original SCC!");
386 UR.CWorklist.insert(&NewC);
Chandler Carruth19913b22017-08-11 05:47:13 +0000387 DEBUG(dbgs() << "Enqueuing a newly formed SCC:" << NewC << "\n");
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000388
Chandler Carruth7c8964d2017-07-09 13:16:55 +0000389 // Ensure new SCCs' function analyses are updated.
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000390 if (NeedFAMProxy)
Chandler Carruth051bdb02017-07-12 09:08:11 +0000391 updateNewSCCFunctionAnalyses(NewC, G, AM);
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000392
Chandler Carruth7c8964d2017-07-09 13:16:55 +0000393 // Also propagate a normal invalidation to the new SCC as only the current
394 // will get one from the pass manager infrastructure.
Chandler Carruthbd9c2902017-07-09 03:59:31 +0000395 AM.invalidate(NewC, PA);
Chandler Carruth88823462016-08-24 09:37:14 +0000396 }
397 return C;
398}
Chandler Carruth88823462016-08-24 09:37:14 +0000399
400LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForFunctionPass(
401 LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
Chandler Carruth19913b22017-08-11 05:47:13 +0000402 CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
Eugene Zelenkofa6434b2017-08-31 21:56:16 +0000403 using Node = LazyCallGraph::Node;
404 using Edge = LazyCallGraph::Edge;
405 using SCC = LazyCallGraph::SCC;
406 using RefSCC = LazyCallGraph::RefSCC;
Chandler Carruth88823462016-08-24 09:37:14 +0000407
408 RefSCC &InitialRC = InitialC.getOuterRefSCC();
409 SCC *C = &InitialC;
410 RefSCC *RC = &InitialRC;
411 Function &F = N.getFunction();
412
413 // Walk the function body and build up the set of retained, promoted, and
414 // demoted edges.
415 SmallVector<Constant *, 16> Worklist;
416 SmallPtrSet<Constant *, 16> Visited;
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000417 SmallPtrSet<Node *, 16> RetainedEdges;
418 SmallSetVector<Node *, 4> PromotedRefTargets;
419 SmallSetVector<Node *, 4> DemotedCallTargets;
Chandler Carruth89772232016-12-06 10:06:06 +0000420
Chandler Carruth88823462016-08-24 09:37:14 +0000421 // First walk the function and handle all called functions. We do this first
422 // because if there is a single call edge, whether there are ref edges is
423 // irrelevant.
Chandler Carruth89772232016-12-06 10:06:06 +0000424 for (Instruction &I : instructions(F))
425 if (auto CS = CallSite(&I))
426 if (Function *Callee = CS.getCalledFunction())
427 if (Visited.insert(Callee).second && !Callee->isDeclaration()) {
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000428 Node &CalleeN = *G.lookup(*Callee);
429 Edge *E = N->lookup(CalleeN);
Chandler Carruth89772232016-12-06 10:06:06 +0000430 // FIXME: We should really handle adding new calls. While it will
431 // make downstream usage more complex, there is no fundamental
432 // limitation and it will allow passes within the CGSCC to be a bit
433 // more flexible in what transforms they can do. Until then, we
434 // verify that new calls haven't been introduced.
435 assert(E && "No function transformations should introduce *new* "
436 "call edges! Any new calls should be modeled as "
437 "promoted existing ref edges!");
Chandler Carruth6e35c312017-08-08 10:13:23 +0000438 bool Inserted = RetainedEdges.insert(&CalleeN).second;
439 (void)Inserted;
440 assert(Inserted && "We should never visit a function twice.");
Chandler Carruth89772232016-12-06 10:06:06 +0000441 if (!E->isCall())
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000442 PromotedRefTargets.insert(&CalleeN);
Chandler Carruth89772232016-12-06 10:06:06 +0000443 }
Chandler Carruth88823462016-08-24 09:37:14 +0000444
445 // Now walk all references.
Chandler Carruth89772232016-12-06 10:06:06 +0000446 for (Instruction &I : instructions(F))
447 for (Value *Op : I.operand_values())
Eugene Zelenkofa6434b2017-08-31 21:56:16 +0000448 if (auto *C = dyn_cast<Constant>(Op))
Chandler Carruth89772232016-12-06 10:06:06 +0000449 if (Visited.insert(C).second)
450 Worklist.push_back(C);
Chandler Carruth88823462016-08-24 09:37:14 +0000451
Chandler Carruthf59a8382017-07-15 08:08:19 +0000452 auto VisitRef = [&](Function &Referee) {
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000453 Node &RefereeN = *G.lookup(Referee);
454 Edge *E = N->lookup(RefereeN);
Chandler Carruth89772232016-12-06 10:06:06 +0000455 // FIXME: Similarly to new calls, we also currently preclude
456 // introducing new references. See above for details.
457 assert(E && "No function transformations should introduce *new* ref "
458 "edges! Any new ref edges would require IPO which "
459 "function passes aren't allowed to do!");
Chandler Carruth6e35c312017-08-08 10:13:23 +0000460 bool Inserted = RetainedEdges.insert(&RefereeN).second;
461 (void)Inserted;
462 assert(Inserted && "We should never visit a function twice.");
Chandler Carruth89772232016-12-06 10:06:06 +0000463 if (E->isCall())
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000464 DemotedCallTargets.insert(&RefereeN);
Chandler Carruthf59a8382017-07-15 08:08:19 +0000465 };
466 LazyCallGraph::visitReferences(Worklist, Visited, VisitRef);
467
468 // Include synthetic reference edges to known, defined lib functions.
469 for (auto *F : G.getLibFunctions())
Chandler Carruth6e35c312017-08-08 10:13:23 +0000470 // While the list of lib functions doesn't have repeats, don't re-visit
471 // anything handled above.
472 if (!Visited.count(F))
473 VisitRef(*F);
Chandler Carruth88823462016-08-24 09:37:14 +0000474
475 // First remove all of the edges that are no longer present in this function.
Chandler Carruth23c2f442017-08-09 09:05:27 +0000476 // The first step makes these edges uniformly ref edges and accumulates them
477 // into a separate data structure so removal doesn't invalidate anything.
478 SmallVector<Node *, 4> DeadTargets;
479 for (Edge &E : *N) {
480 if (RetainedEdges.count(&E.getNode()))
Chandler Carruth88823462016-08-24 09:37:14 +0000481 continue;
Chandler Carruth88823462016-08-24 09:37:14 +0000482
Chandler Carruth23c2f442017-08-09 09:05:27 +0000483 SCC &TargetC = *G.lookupSCC(E.getNode());
484 RefSCC &TargetRC = TargetC.getOuterRefSCC();
485 if (&TargetRC == RC && E.isCall()) {
Chandler Carruth443e57e2016-12-28 10:34:50 +0000486 if (C != &TargetC) {
487 // For separate SCCs this is trivial.
Chandler Carruth23c2f442017-08-09 09:05:27 +0000488 RC->switchTrivialInternalEdgeToRef(N, E.getNode());
Chandler Carruth443e57e2016-12-28 10:34:50 +0000489 } else {
Chandler Carruth443e57e2016-12-28 10:34:50 +0000490 // Now update the call graph.
Chandler Carruth23c2f442017-08-09 09:05:27 +0000491 C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, E.getNode()),
Chandler Carruth19913b22017-08-11 05:47:13 +0000492 G, N, C, AM, UR);
Chandler Carruth443e57e2016-12-28 10:34:50 +0000493 }
494 }
Chandler Carruth88823462016-08-24 09:37:14 +0000495
Chandler Carruth23c2f442017-08-09 09:05:27 +0000496 // Now that this is ready for actual removal, put it into our list.
497 DeadTargets.push_back(&E.getNode());
498 }
499 // Remove the easy cases quickly and actually pull them out of our list.
500 DeadTargets.erase(
501 llvm::remove_if(DeadTargets,
502 [&](Node *TargetN) {
503 SCC &TargetC = *G.lookupSCC(*TargetN);
504 RefSCC &TargetRC = TargetC.getOuterRefSCC();
Chandler Carruth88823462016-08-24 09:37:14 +0000505
Chandler Carruth23c2f442017-08-09 09:05:27 +0000506 // We can't trivially remove internal targets, so skip
507 // those.
508 if (&TargetRC == RC)
509 return false;
510
511 RC->removeOutgoingEdge(N, *TargetN);
Chandler Carruth19913b22017-08-11 05:47:13 +0000512 DEBUG(dbgs() << "Deleting outgoing edge from '" << N
513 << "' to '" << TargetN << "'\n");
Chandler Carruth23c2f442017-08-09 09:05:27 +0000514 return true;
515 }),
516 DeadTargets.end());
517
518 // Now do a batch removal of the internal ref edges left.
519 auto NewRefSCCs = RC->removeInternalRefEdge(N, DeadTargets);
520 if (!NewRefSCCs.empty()) {
521 // The old RefSCC is dead, mark it as such.
522 UR.InvalidatedRefSCCs.insert(RC);
523
524 // Note that we don't bother to invalidate analyses as ref-edge
525 // connectivity is not really observable in any way and is intended
526 // exclusively to be used for ordering of transforms rather than for
527 // analysis conclusions.
528
529 // Update RC to the "bottom".
530 assert(G.lookupSCC(N) == C && "Changed the SCC when splitting RefSCCs!");
531 RC = &C->getOuterRefSCC();
532 assert(G.lookupRefSCC(N) == RC && "Failed to update current RefSCC!");
533
534 // The RC worklist is in reverse postorder, so we enqueue the new ones in
535 // RPO except for the one which contains the source node as that is the
536 // "bottom" we will continue processing in the bottom-up walk.
537 assert(NewRefSCCs.front() == RC &&
538 "New current RefSCC not first in the returned list!");
Eugene Zelenkofa6434b2017-08-31 21:56:16 +0000539 for (RefSCC *NewRC : llvm::reverse(make_range(std::next(NewRefSCCs.begin()),
540 NewRefSCCs.end()))) {
Chandler Carruth23c2f442017-08-09 09:05:27 +0000541 assert(NewRC != RC && "Should not encounter the current RefSCC further "
542 "in the postorder list of new RefSCCs.");
543 UR.RCWorklist.insert(NewRC);
Chandler Carruth19913b22017-08-11 05:47:13 +0000544 DEBUG(dbgs() << "Enqueuing a new RefSCC in the update worklist: "
545 << *NewRC << "\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000546 }
547 }
548
549 // Next demote all the call edges that are now ref edges. This helps make
550 // the SCCs small which should minimize the work below as we don't want to
551 // form cycles that this would break.
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000552 for (Node *RefTarget : DemotedCallTargets) {
553 SCC &TargetC = *G.lookupSCC(*RefTarget);
Chandler Carruth88823462016-08-24 09:37:14 +0000554 RefSCC &TargetRC = TargetC.getOuterRefSCC();
555
556 // The easy case is when the target RefSCC is not this RefSCC. This is
557 // only supported when the target RefSCC is a child of this RefSCC.
558 if (&TargetRC != RC) {
559 assert(RC->isAncestorOf(TargetRC) &&
560 "Cannot potentially form RefSCC cycles here!");
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000561 RC->switchOutgoingEdgeToRef(N, *RefTarget);
Chandler Carruth19913b22017-08-11 05:47:13 +0000562 DEBUG(dbgs() << "Switch outgoing call edge to a ref edge from '" << N
563 << "' to '" << *RefTarget << "'\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000564 continue;
565 }
566
Chandler Carruth443e57e2016-12-28 10:34:50 +0000567 // We are switching an internal call edge to a ref edge. This may split up
568 // some SCCs.
569 if (C != &TargetC) {
570 // For separate SCCs this is trivial.
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000571 RC->switchTrivialInternalEdgeToRef(N, *RefTarget);
Chandler Carruth443e57e2016-12-28 10:34:50 +0000572 continue;
573 }
574
Chandler Carruth443e57e2016-12-28 10:34:50 +0000575 // Now update the call graph.
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000576 C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, *RefTarget), G, N,
Chandler Carruth19913b22017-08-11 05:47:13 +0000577 C, AM, UR);
Chandler Carruth88823462016-08-24 09:37:14 +0000578 }
579
580 // Now promote ref edges into call edges.
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000581 for (Node *CallTarget : PromotedRefTargets) {
582 SCC &TargetC = *G.lookupSCC(*CallTarget);
Chandler Carruth88823462016-08-24 09:37:14 +0000583 RefSCC &TargetRC = TargetC.getOuterRefSCC();
584
585 // The easy case is when the target RefSCC is not this RefSCC. This is
586 // only supported when the target RefSCC is a child of this RefSCC.
587 if (&TargetRC != RC) {
588 assert(RC->isAncestorOf(TargetRC) &&
589 "Cannot potentially form RefSCC cycles here!");
Chandler Carruthaaad9f82017-02-09 23:24:13 +0000590 RC->switchOutgoingEdgeToCall(N, *CallTarget);
Chandler Carruth19913b22017-08-11 05:47:13 +0000591 DEBUG(dbgs() << "Switch outgoing ref edge to a call edge from '" << N
592 << "' to '" << *CallTarget << "'\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000593 continue;
594 }
Chandler Carruth19913b22017-08-11 05:47:13 +0000595 DEBUG(dbgs() << "Switch an internal ref edge to a call edge from '" << N
596 << "' to '" << *CallTarget << "'\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000597
598 // Otherwise we are switching an internal ref edge to a call edge. This
599 // may merge away some SCCs, and we add those to the UpdateResult. We also
600 // need to make sure to update the worklist in the event SCCs have moved
Chandler Carruthc213c672017-07-09 13:45:11 +0000601 // before the current one in the post-order sequence
602 bool HasFunctionAnalysisProxy = false;
Chandler Carruth88823462016-08-24 09:37:14 +0000603 auto InitialSCCIndex = RC->find(*C) - RC->begin();
Chandler Carruthc213c672017-07-09 13:45:11 +0000604 bool FormedCycle = RC->switchInternalEdgeToCall(
605 N, *CallTarget, [&](ArrayRef<SCC *> MergedSCCs) {
606 for (SCC *MergedC : MergedSCCs) {
607 assert(MergedC != &TargetC && "Cannot merge away the target SCC!");
608
609 HasFunctionAnalysisProxy |=
610 AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(
611 *MergedC) != nullptr;
612
613 // Mark that this SCC will no longer be valid.
614 UR.InvalidatedSCCs.insert(MergedC);
615
616 // FIXME: We should really do a 'clear' here to forcibly release
617 // memory, but we don't have a good way of doing that and
618 // preserving the function analyses.
619 auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
620 PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
621 AM.invalidate(*MergedC, PA);
622 }
623 });
624
625 // If we formed a cycle by creating this call, we need to update more data
626 // structures.
627 if (FormedCycle) {
Chandler Carruth88823462016-08-24 09:37:14 +0000628 C = &TargetC;
629 assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
630
Chandler Carruthc213c672017-07-09 13:45:11 +0000631 // If one of the invalidated SCCs had a cached proxy to a function
632 // analysis manager, we need to create a proxy in the new current SCC as
633 // the invaliadted SCCs had their functions moved.
634 if (HasFunctionAnalysisProxy)
635 AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G);
636
Chandler Carruth88823462016-08-24 09:37:14 +0000637 // Any analyses cached for this SCC are no longer precise as the shape
Chandler Carruthc213c672017-07-09 13:45:11 +0000638 // has changed by introducing this cycle. However, we have taken care to
639 // update the proxies so it remains valide.
640 auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
641 PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
642 AM.invalidate(*C, PA);
Chandler Carruth88823462016-08-24 09:37:14 +0000643 }
644 auto NewSCCIndex = RC->find(*C) - RC->begin();
Chandler Carruth3c6a8202017-08-01 06:40:11 +0000645 // If we have actually moved an SCC to be topologically "below" the current
646 // one due to merging, we will need to revisit the current SCC after
647 // visiting those moved SCCs.
648 //
649 // It is critical that we *do not* revisit the current SCC unless we
650 // actually move SCCs in the process of merging because otherwise we may
651 // form a cycle where an SCC is split apart, merged, split, merged and so
652 // on infinitely.
Chandler Carruth88823462016-08-24 09:37:14 +0000653 if (InitialSCCIndex < NewSCCIndex) {
654 // Put our current SCC back onto the worklist as we'll visit other SCCs
655 // that are now definitively ordered prior to the current one in the
656 // post-order sequence, and may end up observing more precise context to
657 // optimize the current SCC.
658 UR.CWorklist.insert(C);
Chandler Carruth19913b22017-08-11 05:47:13 +0000659 DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist: " << *C
660 << "\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000661 // Enqueue in reverse order as we pop off the back of the worklist.
Eugene Zelenkofa6434b2017-08-31 21:56:16 +0000662 for (SCC &MovedC : llvm::reverse(make_range(RC->begin() + InitialSCCIndex,
663 RC->begin() + NewSCCIndex))) {
Chandler Carruth88823462016-08-24 09:37:14 +0000664 UR.CWorklist.insert(&MovedC);
Chandler Carruth19913b22017-08-11 05:47:13 +0000665 DEBUG(dbgs() << "Enqueuing a newly earlier in post-order SCC: "
666 << MovedC << "\n");
Chandler Carruth88823462016-08-24 09:37:14 +0000667 }
668 }
669 }
670
671 assert(!UR.InvalidatedSCCs.count(C) && "Invalidated the current SCC!");
672 assert(!UR.InvalidatedRefSCCs.count(RC) && "Invalidated the current RefSCC!");
673 assert(&C->getOuterRefSCC() == RC && "Current SCC not in current RefSCC!");
674
675 // Record the current RefSCC and SCC for higher layers of the CGSCC pass
676 // manager now that all the updates have been applied.
677 if (RC != &InitialRC)
678 UR.UpdatedRC = RC;
679 if (C != &InitialC)
680 UR.UpdatedC = C;
681
682 return *C;
Chandler Carruth572e3402014-04-21 11:12:00 +0000683}