blob: 2b1591cfbcf8c960980913fd701edb5470b5ca2b [file] [log] [blame]
Chandler Carruth572e3402014-04-21 11:12:00 +00001//===- CGSCCPassManager.cpp - Managing & running CGSCC passes -------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
10#include "llvm/Analysis/CGSCCPassManager.h"
Chandler Carruth88823462016-08-24 09:37:14 +000011#include "llvm/IR/CallSite.h"
Chandler Carruth572e3402014-04-21 11:12:00 +000012
13using namespace llvm;
14
Chandler Carruth2a540942016-02-27 10:38:10 +000015namespace llvm {
Chandler Carruth88823462016-08-24 09:37:14 +000016
17// Explicit instantiations for the core proxy templates.
18template class AnalysisManager<LazyCallGraph::SCC, LazyCallGraph &>;
19template class PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager,
20 LazyCallGraph &, CGSCCUpdateResult &>;
Chandler Carruth2a540942016-02-27 10:38:10 +000021template class InnerAnalysisManagerProxy<CGSCCAnalysisManager, Module>;
22template class OuterAnalysisManagerProxy<ModuleAnalysisManager,
NAKAMURA Takumib673b162016-08-30 15:47:13 +000023 LazyCallGraph::SCC>;
Chandler Carruth2a540942016-02-27 10:38:10 +000024template class InnerAnalysisManagerProxy<FunctionAnalysisManager,
NAKAMURA Takumib673b162016-08-30 15:47:13 +000025 LazyCallGraph::SCC>;
Chandler Carruth2a540942016-02-27 10:38:10 +000026template class OuterAnalysisManagerProxy<CGSCCAnalysisManager, Function>;
Chandler Carruth88823462016-08-24 09:37:14 +000027
28/// Explicitly specialize the pass manager run method to handle call graph
29/// updates.
30template <>
31PreservedAnalyses
32PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager, LazyCallGraph &,
33 CGSCCUpdateResult &>::run(LazyCallGraph::SCC &InitialC,
34 CGSCCAnalysisManager &AM,
35 LazyCallGraph &G, CGSCCUpdateResult &UR) {
36 PreservedAnalyses PA = PreservedAnalyses::all();
37
38 if (DebugLogging)
39 dbgs() << "Starting CGSCC pass manager run.\n";
40
41 // The SCC may be refined while we are running passes over it, so set up
42 // a pointer that we can update.
43 LazyCallGraph::SCC *C = &InitialC;
44
45 for (auto &Pass : Passes) {
46 if (DebugLogging)
47 dbgs() << "Running pass: " << Pass->name() << " on " << *C << "\n";
48
49 PreservedAnalyses PassPA = Pass->run(*C, AM, G, UR);
50
51 // Update the SCC if necessary.
52 C = UR.UpdatedC ? UR.UpdatedC : C;
53
54 // Check that we didn't miss any update scenario.
55 assert(!UR.InvalidatedSCCs.count(C) && "Processing an invalid SCC!");
56 assert(C->begin() != C->end() && "Cannot have an empty SCC!");
57
58 // Update the analysis manager as each pass runs and potentially
59 // invalidates analyses. We also update the preserved set of analyses
60 // based on what analyses we have already handled the invalidation for
61 // here and don't need to invalidate when finished.
62 PassPA = AM.invalidate(*C, std::move(PassPA));
63
64 // Finally, we intersect the final preserved analyses to compute the
65 // aggregate preserved set for this pass manager.
66 PA.intersect(std::move(PassPA));
67
68 // FIXME: Historically, the pass managers all called the LLVM context's
69 // yield function here. We don't have a generic way to acquire the
70 // context and it isn't yet clear what the right pattern is for yielding
71 // in the new pass manager so it is currently omitted.
72 // ...getContext().yield();
73 }
74
75 if (DebugLogging)
76 dbgs() << "Finished CGSCC pass manager run.\n";
77
78 return PA;
79}
80
81} // End llvm namespace
82
83namespace {
84/// Helper function to update both the \c CGSCCAnalysisManager \p AM and the \c
85/// CGSCCPassManager's \c CGSCCUpdateResult \p UR based on a range of newly
86/// added SCCs.
87///
88/// The range of new SCCs must be in postorder already. The SCC they were split
89/// out of must be provided as \p C. The current node being mutated and
90/// triggering updates must be passed as \p N.
91///
92/// This function returns the SCC containing \p N. This will be either \p C if
93/// no new SCCs have been split out, or it will be the new SCC containing \p N.
94template <typename SCCRangeT>
95LazyCallGraph::SCC *
96incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G,
97 LazyCallGraph::Node &N, LazyCallGraph::SCC *C,
98 CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR,
99 bool DebugLogging = false) {
100 typedef LazyCallGraph::SCC SCC;
101
102 if (NewSCCRange.begin() == NewSCCRange.end())
103 return C;
104
105 // Invalidate the analyses of the current SCC and add it to the worklist since
106 // it has changed its shape.
107 AM.invalidate(*C, PreservedAnalyses::none());
108 UR.CWorklist.insert(C);
109 if (DebugLogging)
110 dbgs() << "Enqueuing the existing SCC in the worklist:" << *C << "\n";
111
112 SCC *OldC = C;
113 (void)OldC;
114
115 // Update the current SCC. Note that if we have new SCCs, this must actually
116 // change the SCC.
117 assert(C != &*NewSCCRange.begin() &&
118 "Cannot insert new SCCs without changing current SCC!");
119 C = &*NewSCCRange.begin();
120 assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
121
122 for (SCC &NewC :
123 reverse(make_range(std::next(NewSCCRange.begin()), NewSCCRange.end()))) {
124 assert(C != &NewC && "No need to re-visit the current SCC!");
125 assert(OldC != &NewC && "Already handled the original SCC!");
126 UR.CWorklist.insert(&NewC);
127 if (DebugLogging)
128 dbgs() << "Enqueuing a newly formed SCC:" << NewC << "\n";
129 }
130 return C;
131}
132}
133
134LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForFunctionPass(
135 LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
136 CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, bool DebugLogging) {
137 typedef LazyCallGraph::Node Node;
138 typedef LazyCallGraph::Edge Edge;
139 typedef LazyCallGraph::SCC SCC;
140 typedef LazyCallGraph::RefSCC RefSCC;
141
142 RefSCC &InitialRC = InitialC.getOuterRefSCC();
143 SCC *C = &InitialC;
144 RefSCC *RC = &InitialRC;
145 Function &F = N.getFunction();
146
147 // Walk the function body and build up the set of retained, promoted, and
148 // demoted edges.
149 SmallVector<Constant *, 16> Worklist;
150 SmallPtrSet<Constant *, 16> Visited;
151 SmallPtrSet<Function *, 16> RetainedEdges;
152 SmallSetVector<Function *, 4> PromotedRefTargets;
153 SmallSetVector<Function *, 4> DemotedCallTargets;
154 // First walk the function and handle all called functions. We do this first
155 // because if there is a single call edge, whether there are ref edges is
156 // irrelevant.
157 for (BasicBlock &BB : F)
158 for (Instruction &I : BB)
159 if (auto CS = CallSite(&I))
160 if (Function *Callee = CS.getCalledFunction())
161 if (Visited.insert(Callee).second && !Callee->isDeclaration()) {
162 const Edge *E = N.lookup(*Callee);
163 // FIXME: We should really handle adding new calls. While it will
164 // make downstream usage more complex, there is no fundamental
165 // limitation and it will allow passes within the CGSCC to be a bit
166 // more flexible in what transforms they can do. Until then, we
167 // verify that new calls haven't been introduced.
168 assert(E && "No function transformations should introduce *new* "
169 "call edges! Any new calls should be modeled as "
170 "promoted existing ref edges!");
171 RetainedEdges.insert(Callee);
172 if (!E->isCall())
173 PromotedRefTargets.insert(Callee);
174 }
175
176 // Now walk all references.
177 for (BasicBlock &BB : F)
178 for (Instruction &I : BB) {
179 for (Value *Op : I.operand_values())
180 if (Constant *C = dyn_cast<Constant>(Op))
181 if (Visited.insert(C).second)
182 Worklist.push_back(C);
183
184 LazyCallGraph::visitReferences(Worklist, Visited, [&](Function &Referee) {
185 // Skip declarations.
186 if (Referee.isDeclaration())
187 return;
188
189 const Edge *E = N.lookup(Referee);
190 // FIXME: Similarly to new calls, we also currently preclude
191 // introducing new references. See above for details.
192 assert(E && "No function transformations should introduce *new* ref "
193 "edges! Any new ref edges would require IPO which "
194 "function passes aren't allowed to do!");
195 RetainedEdges.insert(&Referee);
196 if (E->isCall())
197 DemotedCallTargets.insert(&Referee);
198 });
199 }
200
201 // First remove all of the edges that are no longer present in this function.
202 // We have to build a list of dead targets first and then remove them as the
203 // data structures will all be invalidated by removing them.
204 SmallVector<PointerIntPair<Node *, 1, Edge::Kind>, 4> DeadTargets;
205 for (Edge &E : N)
206 if (!RetainedEdges.count(&E.getFunction()))
207 DeadTargets.push_back({E.getNode(), E.getKind()});
208 for (auto DeadTarget : DeadTargets) {
209 Node &TargetN = *DeadTarget.getPointer();
210 bool IsCall = DeadTarget.getInt() == Edge::Call;
211 SCC &TargetC = *G.lookupSCC(TargetN);
212 RefSCC &TargetRC = TargetC.getOuterRefSCC();
213
214 if (&TargetRC != RC) {
215 RC->removeOutgoingEdge(N, TargetN);
216 if (DebugLogging)
217 dbgs() << "Deleting outgoing edge from '" << N << "' to '" << TargetN
218 << "'\n";
219 continue;
220 }
221 if (DebugLogging)
222 dbgs() << "Deleting internal " << (IsCall ? "call" : "ref")
223 << " edge from '" << N << "' to '" << TargetN << "'\n";
224
225 if (IsCall)
226 C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, TargetN), G, N,
227 C, AM, UR, DebugLogging);
228
229 auto NewRefSCCs = RC->removeInternalRefEdge(N, TargetN);
230 if (!NewRefSCCs.empty()) {
231 // Note that we don't bother to invalidate analyses as ref-edge
232 // connectivity is not really observable in any way and is intended
233 // exclusively to be used for ordering of transforms rather than for
234 // analysis conclusions.
235
236 // The RC worklist is in reverse postorder, so we first enqueue the
237 // current RefSCC as it will remain the parent of all split RefSCCs, then
238 // we enqueue the new ones in RPO except for the one which contains the
239 // source node as that is the "bottom" we will continue processing in the
240 // bottom-up walk.
241 UR.RCWorklist.insert(RC);
242 if (DebugLogging)
243 dbgs() << "Enqueuing the existing RefSCC in the update worklist: "
244 << *RC << "\n";
245 // Update the RC to the "bottom".
246 assert(G.lookupSCC(N) == C && "Changed the SCC when splitting RefSCCs!");
247 RC = &C->getOuterRefSCC();
248 assert(G.lookupRefSCC(N) == RC && "Failed to update current RefSCC!");
249 for (RefSCC *NewRC : reverse(NewRefSCCs))
250 if (NewRC != RC) {
251 UR.RCWorklist.insert(NewRC);
252 if (DebugLogging)
253 dbgs() << "Enqueuing a new RefSCC in the update worklist: "
254 << *NewRC << "\n";
255 }
256 }
257 }
258
259 // Next demote all the call edges that are now ref edges. This helps make
260 // the SCCs small which should minimize the work below as we don't want to
261 // form cycles that this would break.
262 for (Function *RefTarget : DemotedCallTargets) {
263 Node &TargetN = *G.lookup(*RefTarget);
264 SCC &TargetC = *G.lookupSCC(TargetN);
265 RefSCC &TargetRC = TargetC.getOuterRefSCC();
266
267 // The easy case is when the target RefSCC is not this RefSCC. This is
268 // only supported when the target RefSCC is a child of this RefSCC.
269 if (&TargetRC != RC) {
270 assert(RC->isAncestorOf(TargetRC) &&
271 "Cannot potentially form RefSCC cycles here!");
272 RC->switchOutgoingEdgeToRef(N, TargetN);
273 if (DebugLogging)
274 dbgs() << "Switch outgoing call edge to a ref edge from '" << N
275 << "' to '" << TargetN << "'\n";
276 continue;
277 }
278
279 // Otherwise we are switching an internal call edge to a ref edge. This
280 // may split up some SCCs.
281 C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, TargetN), G, N, C,
282 AM, UR, DebugLogging);
283 }
284
285 // Now promote ref edges into call edges.
286 for (Function *CallTarget : PromotedRefTargets) {
287 Node &TargetN = *G.lookup(*CallTarget);
288 SCC &TargetC = *G.lookupSCC(TargetN);
289 RefSCC &TargetRC = TargetC.getOuterRefSCC();
290
291 // The easy case is when the target RefSCC is not this RefSCC. This is
292 // only supported when the target RefSCC is a child of this RefSCC.
293 if (&TargetRC != RC) {
294 assert(RC->isAncestorOf(TargetRC) &&
295 "Cannot potentially form RefSCC cycles here!");
296 RC->switchOutgoingEdgeToCall(N, TargetN);
297 if (DebugLogging)
298 dbgs() << "Switch outgoing ref edge to a call edge from '" << N
299 << "' to '" << TargetN << "'\n";
300 continue;
301 }
302 if (DebugLogging)
303 dbgs() << "Switch an internal ref edge to a call edge from '" << N
304 << "' to '" << TargetN << "'\n";
305
306 // Otherwise we are switching an internal ref edge to a call edge. This
307 // may merge away some SCCs, and we add those to the UpdateResult. We also
308 // need to make sure to update the worklist in the event SCCs have moved
309 // before the current one in the post-order sequence.
310 auto InitialSCCIndex = RC->find(*C) - RC->begin();
311 auto InvalidatedSCCs = RC->switchInternalEdgeToCall(N, TargetN);
312 if (!InvalidatedSCCs.empty()) {
313 C = &TargetC;
314 assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
315
316 // Any analyses cached for this SCC are no longer precise as the shape
317 // has changed by introducing this cycle.
318 AM.invalidate(*C, PreservedAnalyses::none());
319
320 for (SCC *InvalidatedC : InvalidatedSCCs) {
321 assert(InvalidatedC != C && "Cannot invalidate the current SCC!");
322 UR.InvalidatedSCCs.insert(InvalidatedC);
323
324 // Also clear any cached analyses for the SCCs that are dead. This
325 // isn't really necessary for correctness but can release memory.
326 AM.clear(*InvalidatedC);
327 }
328 }
329 auto NewSCCIndex = RC->find(*C) - RC->begin();
330 if (InitialSCCIndex < NewSCCIndex) {
331 // Put our current SCC back onto the worklist as we'll visit other SCCs
332 // that are now definitively ordered prior to the current one in the
333 // post-order sequence, and may end up observing more precise context to
334 // optimize the current SCC.
335 UR.CWorklist.insert(C);
336 if (DebugLogging)
337 dbgs() << "Enqueuing the existing SCC in the worklist: " << *C << "\n";
338 // Enqueue in reverse order as we pop off the back of the worklist.
339 for (SCC &MovedC : reverse(make_range(RC->begin() + InitialSCCIndex,
340 RC->begin() + NewSCCIndex))) {
341 UR.CWorklist.insert(&MovedC);
342 if (DebugLogging)
343 dbgs() << "Enqueuing a newly earlier in post-order SCC: " << MovedC
344 << "\n";
345 }
346 }
347 }
348
349 assert(!UR.InvalidatedSCCs.count(C) && "Invalidated the current SCC!");
350 assert(!UR.InvalidatedRefSCCs.count(RC) && "Invalidated the current RefSCC!");
351 assert(&C->getOuterRefSCC() == RC && "Current SCC not in current RefSCC!");
352
353 // Record the current RefSCC and SCC for higher layers of the CGSCC pass
354 // manager now that all the updates have been applied.
355 if (RC != &InitialRC)
356 UR.UpdatedRC = RC;
357 if (C != &InitialC)
358 UR.UpdatedC = C;
359
360 return *C;
Chandler Carruth572e3402014-04-21 11:12:00 +0000361}