blob: b89bf70b43305a73778a048fecad70c9ca72fc38 [file] [log] [blame]
Chandler Carruthbf71a342014-02-06 04:37:03 +00001//===- LazyCallGraph.cpp - Analysis of a Module's call graph --------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9
10#include "llvm/Analysis/LazyCallGraph.h"
11#include "llvm/ADT/SCCIterator.h"
12#include "llvm/IR/Instructions.h"
13#include "llvm/IR/PassManager.h"
14#include "llvm/Support/CallSite.h"
15#include "llvm/Support/raw_ostream.h"
16#include "llvm/InstVisitor.h"
17
18using namespace llvm;
19
20static void findCallees(
21 SmallVectorImpl<Constant *> &Worklist, SmallPtrSetImpl<Constant *> &Visited,
22 SmallVectorImpl<PointerUnion<Function *, LazyCallGraph::Node *> > &Callees,
23 SmallPtrSetImpl<Function *> &CalleeSet) {
24 while (!Worklist.empty()) {
25 Constant *C = Worklist.pop_back_val();
26
27 if (Function *F = dyn_cast<Function>(C)) {
28 // Note that we consider *any* function with a definition to be a viable
29 // edge. Even if the function's definition is subject to replacement by
30 // some other module (say, a weak definition) there may still be
31 // optimizations which essentially speculate based on the definition and
32 // a way to check that the specific definition is in fact the one being
33 // used. For example, this could be done by moving the weak definition to
34 // a strong (internal) definition and making the weak definition be an
35 // alias. Then a test of the address of the weak function against the new
36 // strong definition's address would be an effective way to determine the
37 // safety of optimizing a direct call edge.
38 if (!F->isDeclaration() && CalleeSet.insert(F))
39 Callees.push_back(F);
40 continue;
41 }
42
43 for (User::value_op_iterator OI = C->value_op_begin(),
44 OE = C->value_op_end();
45 OI != OE; ++OI)
46 if (Visited.insert(cast<Constant>(*OI)))
47 Worklist.push_back(cast<Constant>(*OI));
48 }
49}
50
51LazyCallGraph::Node::Node(LazyCallGraph &G, Function &F) : G(G), F(F) {
52 SmallVector<Constant *, 16> Worklist;
53 SmallPtrSet<Constant *, 16> Visited;
54 // Find all the potential callees in this function. First walk the
55 // instructions and add every operand which is a constant to the worklist.
56 for (Function::iterator BBI = F.begin(), BBE = F.end(); BBI != BBE; ++BBI)
57 for (BasicBlock::iterator II = BBI->begin(), IE = BBI->end(); II != IE;
58 ++II)
59 for (User::value_op_iterator OI = II->value_op_begin(),
60 OE = II->value_op_end();
61 OI != OE; ++OI)
62 if (Constant *C = dyn_cast<Constant>(*OI))
63 if (Visited.insert(C))
64 Worklist.push_back(C);
65
66 // We've collected all the constant (and thus potentially function or
67 // function containing) operands to all of the instructions in the function.
68 // Process them (recursively) collecting every function found.
69 findCallees(Worklist, Visited, Callees, CalleeSet);
70}
71
72LazyCallGraph::Node::Node(LazyCallGraph &G, const Node &OtherN)
73 : G(G), F(OtherN.F), CalleeSet(OtherN.CalleeSet) {
74 // Loop over the other node's callees, adding the Function*s to our list
75 // directly, and recursing to add the Node*s.
76 Callees.reserve(OtherN.Callees.size());
77 for (NodeVectorImplT::iterator OI = OtherN.Callees.begin(),
78 OE = OtherN.Callees.end();
79 OI != OE; ++OI)
80 if (Function *Callee = OI->dyn_cast<Function *>())
81 Callees.push_back(Callee);
82 else
83 Callees.push_back(G.copyInto(*OI->get<Node *>()));
84}
85
86#if LLVM_HAS_RVALUE_REFERENCES
87LazyCallGraph::Node::Node(LazyCallGraph &G, Node &&OtherN)
88 : G(G), F(OtherN.F), Callees(std::move(OtherN.Callees)),
89 CalleeSet(std::move(OtherN.CalleeSet)) {
90 // Loop over our Callees. They've been moved from another node, but we need
91 // to move the Node*s to live under our bump ptr allocator.
92 for (NodeVectorImplT::iterator CI = Callees.begin(), CE = Callees.end();
93 CI != CE; ++CI)
94 if (Node *ChildN = CI->dyn_cast<Node *>())
95 *CI = G.moveInto(std::move(*ChildN));
96}
97#endif
98
99LazyCallGraph::LazyCallGraph(Module &M) : M(M) {
100 for (Module::iterator FI = M.begin(), FE = M.end(); FI != FE; ++FI)
101 if (!FI->isDeclaration() && !FI->hasLocalLinkage())
102 if (EntryNodeSet.insert(&*FI))
103 EntryNodes.push_back(&*FI);
104
105 // Now add entry nodes for functions reachable via initializers to globals.
106 SmallVector<Constant *, 16> Worklist;
107 SmallPtrSet<Constant *, 16> Visited;
108 for (Module::global_iterator GI = M.global_begin(), GE = M.global_end(); GI != GE; ++GI)
109 if (GI->hasInitializer())
110 if (Visited.insert(GI->getInitializer()))
111 Worklist.push_back(GI->getInitializer());
112
113 findCallees(Worklist, Visited, EntryNodes, EntryNodeSet);
114}
115
116LazyCallGraph::LazyCallGraph(const LazyCallGraph &G)
117 : M(G.M), EntryNodeSet(G.EntryNodeSet) {
118 EntryNodes.reserve(EntryNodes.size());
119 for (NodeVectorImplT::iterator EI = EntryNodes.begin(),
120 EE = EntryNodes.end();
121 EI != EE; ++EI)
122 if (Function *Callee = EI->dyn_cast<Function *>())
123 EntryNodes.push_back(Callee);
124 else
125 EntryNodes.push_back(copyInto(*EI->get<Node *>()));
126}
127
128#if LLVM_HAS_RVALUE_REFERENCES
129// FIXME: This would be crazy simpler if BumpPtrAllocator were movable without
130// invalidating any of the allocated memory. We should make that be the case at
131// some point and delete this.
132LazyCallGraph::LazyCallGraph(LazyCallGraph &&G)
133 : M(G.M), EntryNodes(std::move(G.EntryNodes)),
134 EntryNodeSet(std::move(G.EntryNodeSet)) {
135 // Loop over our EntryNodes. They've been moved from another graph, but we
136 // need to move the Node*s to live under our bump ptr allocator.
137 for (NodeVectorImplT::iterator EI = EntryNodes.begin(), EE = EntryNodes.end();
138 EI != EE; ++EI)
139 if (Node *EntryN = EI->dyn_cast<Node *>())
140 *EI = G.moveInto(std::move(*EntryN));
141}
142#endif
143
144LazyCallGraph::Node *LazyCallGraph::insertInto(Function &F, Node *&MappedN) {
145 return new (MappedN = BPA.Allocate()) Node(*this, F);
146}
147
148LazyCallGraph::Node *LazyCallGraph::copyInto(const Node &OtherN) {
149 Node *&N = NodeMap[&OtherN.F];
150 if (N)
151 return N;
152
153 return new (N = BPA.Allocate()) Node(*this, OtherN);
154}
155
156#if LLVM_HAS_RVALUE_REFERENCES
157LazyCallGraph::Node *LazyCallGraph::moveInto(Node &&OtherN) {
158 Node *&N = NodeMap[&OtherN.F];
159 if (N)
160 return N;
161
162 return new (N = BPA.Allocate()) Node(*this, std::move(OtherN));
163}
164#endif
165
166char LazyCallGraphAnalysis::PassID;
167
168LazyCallGraphPrinterPass::LazyCallGraphPrinterPass(raw_ostream &OS) : OS(OS) {}
169
170static void printNodes(raw_ostream &OS, LazyCallGraph::Node &N,
171 SmallPtrSetImpl<LazyCallGraph::Node *> &Printed) {
172 // Recurse depth first through the nodes.
173 for (LazyCallGraph::iterator I = N.begin(), E = N.end(); I != E; ++I)
174 if (Printed.insert(*I))
175 printNodes(OS, **I, Printed);
176
177 OS << " Call edges in function: " << N.getFunction().getName() << "\n";
178 for (LazyCallGraph::iterator I = N.begin(), E = N.end(); I != E; ++I)
179 OS << " -> " << I->getFunction().getName() << "\n";
180
181 OS << "\n";
182}
183
184PreservedAnalyses LazyCallGraphPrinterPass::run(Module *M, ModuleAnalysisManager *AM) {
185 LazyCallGraph &G = AM->getResult<LazyCallGraphAnalysis>(M);
186
187 OS << "Printing the call graph for module: " << M->getModuleIdentifier() << "\n\n";
188
189 SmallPtrSet<LazyCallGraph::Node *, 16> Printed;
190 for (LazyCallGraph::iterator I = G.begin(), E = G.end(); I != E; ++I)
191 if (Printed.insert(*I))
192 printNodes(OS, **I, Printed);
193
194 return PreservedAnalyses::all();
195}