blob: decce99e901c4d1df35d644595eb1d9cdbbbaef7 [file] [log] [blame]
Chandler Carruth572e3402014-04-21 11:12:00 +00001//===- CGSCCPassManager.cpp - Managing & running CGSCC passes -------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
10#include "llvm/Analysis/CGSCCPassManager.h"
Chandler Carruth88823462016-08-24 09:37:14 +000011#include "llvm/IR/CallSite.h"
Chandler Carruth572e3402014-04-21 11:12:00 +000012
13using namespace llvm;
14
Chandler Carruth2a540942016-02-27 10:38:10 +000015namespace llvm {
Chandler Carruth88823462016-08-24 09:37:14 +000016
17// Explicit instantiations for the core proxy templates.
Chandler Carruth3ab2a5a2016-11-28 22:04:31 +000018template class AllAnalysesOn<LazyCallGraph::SCC>;
Chandler Carruth88823462016-08-24 09:37:14 +000019template class AnalysisManager<LazyCallGraph::SCC, LazyCallGraph &>;
20template class PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager,
21 LazyCallGraph &, CGSCCUpdateResult &>;
Chandler Carruth2a540942016-02-27 10:38:10 +000022template class InnerAnalysisManagerProxy<CGSCCAnalysisManager, Module>;
23template class OuterAnalysisManagerProxy<ModuleAnalysisManager,
NAKAMURA Takumib673b162016-08-30 15:47:13 +000024 LazyCallGraph::SCC>;
Chandler Carruth2a540942016-02-27 10:38:10 +000025template class InnerAnalysisManagerProxy<FunctionAnalysisManager,
NAKAMURA Takumib673b162016-08-30 15:47:13 +000026 LazyCallGraph::SCC>;
Chandler Carruth2a540942016-02-27 10:38:10 +000027template class OuterAnalysisManagerProxy<CGSCCAnalysisManager, Function>;
Chandler Carruth88823462016-08-24 09:37:14 +000028
29/// Explicitly specialize the pass manager run method to handle call graph
30/// updates.
31template <>
32PreservedAnalyses
33PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager, LazyCallGraph &,
34 CGSCCUpdateResult &>::run(LazyCallGraph::SCC &InitialC,
35 CGSCCAnalysisManager &AM,
36 LazyCallGraph &G, CGSCCUpdateResult &UR) {
37 PreservedAnalyses PA = PreservedAnalyses::all();
38
39 if (DebugLogging)
40 dbgs() << "Starting CGSCC pass manager run.\n";
41
42 // The SCC may be refined while we are running passes over it, so set up
43 // a pointer that we can update.
44 LazyCallGraph::SCC *C = &InitialC;
45
46 for (auto &Pass : Passes) {
47 if (DebugLogging)
48 dbgs() << "Running pass: " << Pass->name() << " on " << *C << "\n";
49
50 PreservedAnalyses PassPA = Pass->run(*C, AM, G, UR);
51
52 // Update the SCC if necessary.
53 C = UR.UpdatedC ? UR.UpdatedC : C;
54
55 // Check that we didn't miss any update scenario.
56 assert(!UR.InvalidatedSCCs.count(C) && "Processing an invalid SCC!");
57 assert(C->begin() != C->end() && "Cannot have an empty SCC!");
58
59 // Update the analysis manager as each pass runs and potentially
Chandler Carruth0c6efff12016-11-28 10:42:21 +000060 // invalidates analyses.
61 AM.invalidate(*C, PassPA);
Chandler Carruth88823462016-08-24 09:37:14 +000062
63 // Finally, we intersect the final preserved analyses to compute the
64 // aggregate preserved set for this pass manager.
65 PA.intersect(std::move(PassPA));
66
67 // FIXME: Historically, the pass managers all called the LLVM context's
68 // yield function here. We don't have a generic way to acquire the
69 // context and it isn't yet clear what the right pattern is for yielding
70 // in the new pass manager so it is currently omitted.
71 // ...getContext().yield();
72 }
73
Chandler Carruth0c6efff12016-11-28 10:42:21 +000074 // Invaliadtion was handled after each pass in the above loop for the current
75 // SCC. Therefore, the remaining analysis results in the AnalysisManager are
76 // preserved. We mark this with a set so that we don't need to inspect each
77 // one individually.
78 PA.preserve<AllAnalysesOn<LazyCallGraph::SCC>>();
79
Chandler Carruth88823462016-08-24 09:37:14 +000080 if (DebugLogging)
81 dbgs() << "Finished CGSCC pass manager run.\n";
82
83 return PA;
84}
85
86} // End llvm namespace
87
88namespace {
89/// Helper function to update both the \c CGSCCAnalysisManager \p AM and the \c
90/// CGSCCPassManager's \c CGSCCUpdateResult \p UR based on a range of newly
91/// added SCCs.
92///
93/// The range of new SCCs must be in postorder already. The SCC they were split
94/// out of must be provided as \p C. The current node being mutated and
95/// triggering updates must be passed as \p N.
96///
97/// This function returns the SCC containing \p N. This will be either \p C if
98/// no new SCCs have been split out, or it will be the new SCC containing \p N.
99template <typename SCCRangeT>
100LazyCallGraph::SCC *
101incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G,
102 LazyCallGraph::Node &N, LazyCallGraph::SCC *C,
103 CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR,
104 bool DebugLogging = false) {
105 typedef LazyCallGraph::SCC SCC;
106
107 if (NewSCCRange.begin() == NewSCCRange.end())
108 return C;
109
110 // Invalidate the analyses of the current SCC and add it to the worklist since
111 // it has changed its shape.
112 AM.invalidate(*C, PreservedAnalyses::none());
113 UR.CWorklist.insert(C);
114 if (DebugLogging)
115 dbgs() << "Enqueuing the existing SCC in the worklist:" << *C << "\n";
116
117 SCC *OldC = C;
118 (void)OldC;
119
120 // Update the current SCC. Note that if we have new SCCs, this must actually
121 // change the SCC.
122 assert(C != &*NewSCCRange.begin() &&
123 "Cannot insert new SCCs without changing current SCC!");
124 C = &*NewSCCRange.begin();
125 assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
126
127 for (SCC &NewC :
128 reverse(make_range(std::next(NewSCCRange.begin()), NewSCCRange.end()))) {
129 assert(C != &NewC && "No need to re-visit the current SCC!");
130 assert(OldC != &NewC && "Already handled the original SCC!");
131 UR.CWorklist.insert(&NewC);
132 if (DebugLogging)
133 dbgs() << "Enqueuing a newly formed SCC:" << NewC << "\n";
134 }
135 return C;
136}
137}
138
139LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForFunctionPass(
140 LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
141 CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, bool DebugLogging) {
142 typedef LazyCallGraph::Node Node;
143 typedef LazyCallGraph::Edge Edge;
144 typedef LazyCallGraph::SCC SCC;
145 typedef LazyCallGraph::RefSCC RefSCC;
146
147 RefSCC &InitialRC = InitialC.getOuterRefSCC();
148 SCC *C = &InitialC;
149 RefSCC *RC = &InitialRC;
150 Function &F = N.getFunction();
151
152 // Walk the function body and build up the set of retained, promoted, and
153 // demoted edges.
154 SmallVector<Constant *, 16> Worklist;
155 SmallPtrSet<Constant *, 16> Visited;
156 SmallPtrSet<Function *, 16> RetainedEdges;
157 SmallSetVector<Function *, 4> PromotedRefTargets;
158 SmallSetVector<Function *, 4> DemotedCallTargets;
159 // First walk the function and handle all called functions. We do this first
160 // because if there is a single call edge, whether there are ref edges is
161 // irrelevant.
162 for (BasicBlock &BB : F)
163 for (Instruction &I : BB)
164 if (auto CS = CallSite(&I))
165 if (Function *Callee = CS.getCalledFunction())
166 if (Visited.insert(Callee).second && !Callee->isDeclaration()) {
167 const Edge *E = N.lookup(*Callee);
168 // FIXME: We should really handle adding new calls. While it will
169 // make downstream usage more complex, there is no fundamental
170 // limitation and it will allow passes within the CGSCC to be a bit
171 // more flexible in what transforms they can do. Until then, we
172 // verify that new calls haven't been introduced.
173 assert(E && "No function transformations should introduce *new* "
174 "call edges! Any new calls should be modeled as "
175 "promoted existing ref edges!");
176 RetainedEdges.insert(Callee);
177 if (!E->isCall())
178 PromotedRefTargets.insert(Callee);
179 }
180
181 // Now walk all references.
182 for (BasicBlock &BB : F)
183 for (Instruction &I : BB) {
184 for (Value *Op : I.operand_values())
185 if (Constant *C = dyn_cast<Constant>(Op))
186 if (Visited.insert(C).second)
187 Worklist.push_back(C);
188
189 LazyCallGraph::visitReferences(Worklist, Visited, [&](Function &Referee) {
190 // Skip declarations.
191 if (Referee.isDeclaration())
192 return;
193
194 const Edge *E = N.lookup(Referee);
195 // FIXME: Similarly to new calls, we also currently preclude
196 // introducing new references. See above for details.
197 assert(E && "No function transformations should introduce *new* ref "
198 "edges! Any new ref edges would require IPO which "
199 "function passes aren't allowed to do!");
200 RetainedEdges.insert(&Referee);
201 if (E->isCall())
202 DemotedCallTargets.insert(&Referee);
203 });
204 }
205
206 // First remove all of the edges that are no longer present in this function.
207 // We have to build a list of dead targets first and then remove them as the
208 // data structures will all be invalidated by removing them.
209 SmallVector<PointerIntPair<Node *, 1, Edge::Kind>, 4> DeadTargets;
210 for (Edge &E : N)
211 if (!RetainedEdges.count(&E.getFunction()))
212 DeadTargets.push_back({E.getNode(), E.getKind()});
213 for (auto DeadTarget : DeadTargets) {
214 Node &TargetN = *DeadTarget.getPointer();
215 bool IsCall = DeadTarget.getInt() == Edge::Call;
216 SCC &TargetC = *G.lookupSCC(TargetN);
217 RefSCC &TargetRC = TargetC.getOuterRefSCC();
218
219 if (&TargetRC != RC) {
220 RC->removeOutgoingEdge(N, TargetN);
221 if (DebugLogging)
222 dbgs() << "Deleting outgoing edge from '" << N << "' to '" << TargetN
223 << "'\n";
224 continue;
225 }
226 if (DebugLogging)
227 dbgs() << "Deleting internal " << (IsCall ? "call" : "ref")
228 << " edge from '" << N << "' to '" << TargetN << "'\n";
229
230 if (IsCall)
231 C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, TargetN), G, N,
232 C, AM, UR, DebugLogging);
233
234 auto NewRefSCCs = RC->removeInternalRefEdge(N, TargetN);
235 if (!NewRefSCCs.empty()) {
236 // Note that we don't bother to invalidate analyses as ref-edge
237 // connectivity is not really observable in any way and is intended
238 // exclusively to be used for ordering of transforms rather than for
239 // analysis conclusions.
240
241 // The RC worklist is in reverse postorder, so we first enqueue the
242 // current RefSCC as it will remain the parent of all split RefSCCs, then
243 // we enqueue the new ones in RPO except for the one which contains the
244 // source node as that is the "bottom" we will continue processing in the
245 // bottom-up walk.
246 UR.RCWorklist.insert(RC);
247 if (DebugLogging)
248 dbgs() << "Enqueuing the existing RefSCC in the update worklist: "
249 << *RC << "\n";
250 // Update the RC to the "bottom".
251 assert(G.lookupSCC(N) == C && "Changed the SCC when splitting RefSCCs!");
252 RC = &C->getOuterRefSCC();
253 assert(G.lookupRefSCC(N) == RC && "Failed to update current RefSCC!");
254 for (RefSCC *NewRC : reverse(NewRefSCCs))
255 if (NewRC != RC) {
256 UR.RCWorklist.insert(NewRC);
257 if (DebugLogging)
258 dbgs() << "Enqueuing a new RefSCC in the update worklist: "
259 << *NewRC << "\n";
260 }
261 }
262 }
263
264 // Next demote all the call edges that are now ref edges. This helps make
265 // the SCCs small which should minimize the work below as we don't want to
266 // form cycles that this would break.
267 for (Function *RefTarget : DemotedCallTargets) {
268 Node &TargetN = *G.lookup(*RefTarget);
269 SCC &TargetC = *G.lookupSCC(TargetN);
270 RefSCC &TargetRC = TargetC.getOuterRefSCC();
271
272 // The easy case is when the target RefSCC is not this RefSCC. This is
273 // only supported when the target RefSCC is a child of this RefSCC.
274 if (&TargetRC != RC) {
275 assert(RC->isAncestorOf(TargetRC) &&
276 "Cannot potentially form RefSCC cycles here!");
277 RC->switchOutgoingEdgeToRef(N, TargetN);
278 if (DebugLogging)
279 dbgs() << "Switch outgoing call edge to a ref edge from '" << N
280 << "' to '" << TargetN << "'\n";
281 continue;
282 }
283
284 // Otherwise we are switching an internal call edge to a ref edge. This
285 // may split up some SCCs.
286 C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, TargetN), G, N, C,
287 AM, UR, DebugLogging);
288 }
289
290 // Now promote ref edges into call edges.
291 for (Function *CallTarget : PromotedRefTargets) {
292 Node &TargetN = *G.lookup(*CallTarget);
293 SCC &TargetC = *G.lookupSCC(TargetN);
294 RefSCC &TargetRC = TargetC.getOuterRefSCC();
295
296 // The easy case is when the target RefSCC is not this RefSCC. This is
297 // only supported when the target RefSCC is a child of this RefSCC.
298 if (&TargetRC != RC) {
299 assert(RC->isAncestorOf(TargetRC) &&
300 "Cannot potentially form RefSCC cycles here!");
301 RC->switchOutgoingEdgeToCall(N, TargetN);
302 if (DebugLogging)
303 dbgs() << "Switch outgoing ref edge to a call edge from '" << N
304 << "' to '" << TargetN << "'\n";
305 continue;
306 }
307 if (DebugLogging)
308 dbgs() << "Switch an internal ref edge to a call edge from '" << N
309 << "' to '" << TargetN << "'\n";
310
311 // Otherwise we are switching an internal ref edge to a call edge. This
312 // may merge away some SCCs, and we add those to the UpdateResult. We also
313 // need to make sure to update the worklist in the event SCCs have moved
314 // before the current one in the post-order sequence.
315 auto InitialSCCIndex = RC->find(*C) - RC->begin();
316 auto InvalidatedSCCs = RC->switchInternalEdgeToCall(N, TargetN);
317 if (!InvalidatedSCCs.empty()) {
318 C = &TargetC;
319 assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
320
321 // Any analyses cached for this SCC are no longer precise as the shape
322 // has changed by introducing this cycle.
323 AM.invalidate(*C, PreservedAnalyses::none());
324
325 for (SCC *InvalidatedC : InvalidatedSCCs) {
326 assert(InvalidatedC != C && "Cannot invalidate the current SCC!");
327 UR.InvalidatedSCCs.insert(InvalidatedC);
328
329 // Also clear any cached analyses for the SCCs that are dead. This
330 // isn't really necessary for correctness but can release memory.
331 AM.clear(*InvalidatedC);
332 }
333 }
334 auto NewSCCIndex = RC->find(*C) - RC->begin();
335 if (InitialSCCIndex < NewSCCIndex) {
336 // Put our current SCC back onto the worklist as we'll visit other SCCs
337 // that are now definitively ordered prior to the current one in the
338 // post-order sequence, and may end up observing more precise context to
339 // optimize the current SCC.
340 UR.CWorklist.insert(C);
341 if (DebugLogging)
342 dbgs() << "Enqueuing the existing SCC in the worklist: " << *C << "\n";
343 // Enqueue in reverse order as we pop off the back of the worklist.
344 for (SCC &MovedC : reverse(make_range(RC->begin() + InitialSCCIndex,
345 RC->begin() + NewSCCIndex))) {
346 UR.CWorklist.insert(&MovedC);
347 if (DebugLogging)
348 dbgs() << "Enqueuing a newly earlier in post-order SCC: " << MovedC
349 << "\n";
350 }
351 }
352 }
353
354 assert(!UR.InvalidatedSCCs.count(C) && "Invalidated the current SCC!");
355 assert(!UR.InvalidatedRefSCCs.count(RC) && "Invalidated the current RefSCC!");
356 assert(&C->getOuterRefSCC() == RC && "Current SCC not in current RefSCC!");
357
358 // Record the current RefSCC and SCC for higher layers of the CGSCC pass
359 // manager now that all the updates have been applied.
360 if (RC != &InitialRC)
361 UR.UpdatedRC = RC;
362 if (C != &InitialC)
363 UR.UpdatedC = C;
364
365 return *C;
Chandler Carruth572e3402014-04-21 11:12:00 +0000366}